reponame
stringlengths
2
39
files
list
median_score
float64
0
11.5
draganHR
[ { "content": "from __future__ import print_function, unicode_literals\n\nimport re\nfrom collections import namedtuple\n\nLintEntry = namedtuple('LintEntry', ['filename', 'line', 'column', 'message'])\n\nline_parts_pattern = re.compile(r'(?:\\./)?(.+?):(\\d+):(\\d+): ?(.*)')\n\n\nclass ParseError(Exception):\n pass\n\n\ndef parse_report(raw_report):\n pass\n\n\ndef parse_lint_line(line):\n \"\"\"Parse lint diff line\n\n >>> parse_lint_line('foo.txt:1:2: bar')\n LintEntry(filename='foo.txt', line=1, column=2, message='bar')\n\n >>> parse_lint_line('foo.txt:123:50: bar')\n LintEntry(filename='foo.txt', line=123, column=50, message='bar')\n\n >>> parse_lint_line('foo.txt:0:1:')\n LintEntry(filename='foo.txt', line=0, column=1, message='')\n\n >>> parse_lint_line('foo/foo bar.txt:0:1: baz')\n LintEntry(filename='foo/foo bar.txt', line=0, column=1, message='baz')\n \"\"\"\n # TODO: handle colon in filename\n line_parts = line_parts_pattern.match(line)\n if line_parts is None:\n raise ParseError('Failed to parse line: %s' % line)\n lint_entry = LintEntry(\n line_parts.group(1),\n int(line_parts.group(2)),\n int(line_parts.group(3)),\n line_parts.group(4)\n )\n return lint_entry\n", "id": "7013922", "language": "Python", "matching_score": 1.2763272523880005, "max_stars_count": 0, "path": "lintlens/lint/unix.py" }, { "content": "from __future__ import print_function, unicode_literals\n\nfrom ..git import parse_file_line_numbers, parse_hunk\n\n\ndef test_parse_hunk():\n assert parse_hunk('@@ -0 +1 @@ Foo bar') == ((0, 1), (1, 1), u'Foo bar')\n assert parse_hunk('@@ -987 +99999 @@ Foo bar') == ((987, 1), (99999, 1), u'Foo bar')\n assert parse_hunk('@@ -5,0 +42,5 @@ Foo bar') == ((5, 0), (42, 5), u'Foo bar')\n assert parse_hunk('@@ -1,3 +42,0 @@ Foo bar') == ((1, 3), (42, 0), u'Foo bar')\n assert parse_hunk('@@ -0 +1 @@') == ((0, 1), (1, 1), u'')\n\n\ndef test_parse_file_line_numbers():\n assert parse_file_line_numbers('-0') == (0, 1)\n assert parse_file_line_numbers('+0') == (0, 1)\n assert parse_file_line_numbers('+0,0') == (0, 0)\n assert parse_file_line_numbers('+0,1') == (0, 1)\n assert parse_file_line_numbers('+0,5') == (0, 5)\n assert parse_file_line_numbers('+123,5') == (123, 5)\n", "id": "351587", "language": "Python", "matching_score": 0.1774969846010208, "max_stars_count": 0, "path": "lintlens/tests/test_git.py" }, { "content": "#!/usr/bin/env python\nimport pkg_resources\nfrom setuptools import find_packages, setup\n\nimport lintlens\n\npackages = find_packages(include=('lintlens*',), exclude=('*.tests',))\n\n\nwith open('requirements/base.txt') as requirements_txt:\n install_requires = [\n str(requirement)\n for requirement\n in pkg_resources.parse_requirements(requirements_txt)\n ]\n\n\nwith open('requirements/tests.txt') as requirements_txt:\n tests_extras = [\n str(requirement)\n for requirement\n in pkg_resources.parse_requirements(requirements_txt)\n ]\n\n\nwith open('README.md', 'r') as f:\n readme = f.read()\n\n\nsetup(\n name='lintlens',\n version=lintlens.__version__,\n description='Filters lint report and keep only defects on changed lines',\n long_description=readme,\n author='<NAME>',\n url='https://github.com/draganHR/lintlens',\n packages=packages,\n include_package_data=True,\n zip_safe=False,\n classifiers=(\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3.6',\n ),\n install_requires=install_requires,\n extras_require={\n 'tests': tests_extras,\n },\n\n entry_points={\n 'console_scripts': [\n 'lintlens = lintlens.cli:main',\n ]\n },\n test_suite='tests'\n)\n", "id": "10142991", "language": "Python", "matching_score": 1.4075610637664795, "max_stars_count": 0, "path": "setup.py" }, { "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport argparse\nimport codecs\nimport sys\n\nimport lintlens\n\nfrom .git import get_diff_lines\nfrom .lint.unix import ParseError, parse_lint_line\nfrom .utils import check_line_overlap_hunks\n\n\ndef handle_range(revision_range, lint_lines):\n diff_lines = {}\n parse_errors = []\n for filename, hunks in get_diff_lines(revision_range):\n diff_lines[filename[1]] = hunks\n\n for lint_line in lint_lines:\n try:\n lint_entry = parse_lint_line(lint_line)\n except ParseError as e:\n parse_errors.append(e)\n continue\n\n # skip file not changed in revision_range\n if lint_entry.filename not in diff_lines:\n continue\n\n hunks = diff_lines[lint_entry.filename]\n if check_line_overlap_hunks(lint_entry.line, hunks, threshold=1):\n print(lint_line, end='')\n\n for parse_error in parse_errors:\n print(str(parse_error).strip('\\n'), file=sys.stderr)\n\n\ndef read_file_lines(filename):\n with codecs.open(filename, 'r', encoding='utf-8') as f:\n lines = f.readlines()\n return lines\n\n\ndef main():\n parser = argparse.ArgumentParser(prog='lintlens')\n parser.add_argument('--version', action='version', version='%(prog)s ' + str(lintlens.__version__))\n parser.add_argument('revision_range',\n help='Include changes in the specified revision range. '\n 'Example: \"master..HEAD\".'\n )\n parser.add_argument('input_filename',\n help='Input filename')\n\n args = parser.parse_args()\n\n lint_lines = read_file_lines(args.input_filename)\n\n handle_range(args.revision_range, lint_lines)\n\n\nif __name__ == \"__main__\":\n main()\n", "id": "5037087", "language": "Python", "matching_score": 2.461686611175537, "max_stars_count": 0, "path": "lintlens/cli.py" }, { "content": "import re\nimport subprocess\n\n\ndef get_diff_lines(revision_range):\n diff_content = get_diff(revision_range)\n for entry in parse_diff(diff_content):\n yield entry\n\n\ndef get_diff(revision_range):\n cmd_output = subprocess.check_output(['git', 'diff', revision_range, '--unified=0'],\n universal_newlines=True)\n if isinstance(cmd_output, bytes):\n cmd_output = cmd_output.decode('utf-8')\n return cmd_output\n\n\ndef parse_diff(diff_content):\n lines = diff_content.split('\\n')\n\n a_filename, b_filename = None, None\n hunks = None\n\n for line in lines:\n if line.startswith('diff --git'):\n # Process next file\n if hunks:\n yield (a_filename, b_filename), hunks\n hunks = []\n\n elif line.startswith('--- '):\n a_filename = parse_diff_filename(line)\n\n elif line.startswith('+++ '):\n b_filename = parse_diff_filename(line)\n\n elif line.startswith('@@'):\n hunk = parse_hunk(line)\n hunks.append(hunk)\n\n # Last file\n if hunks:\n yield (a_filename, b_filename), hunks\n\n\ndef parse_diff_filename(line):\n if isinstance(line, bytes):\n line = line.decode('unicode-escape')\n line_decoded = line.encode('latin-1').decode('utf-8')\n without_prefix = line_decoded[4:].rstrip()\n if without_prefix == '/dev/null':\n return ''\n without_quotes = without_prefix[1:-1] if without_prefix.startswith('\"') else without_prefix\n filename = without_quotes[2:] # Strip a/b\n return filename\n\n\ndef parse_hunk(line):\n \"\"\"\n Parse git hunk (Example: \"@@ -5,0 +42,5 @@ Foobar\")\n \"\"\"\n hunk_parts = re.match(r\"^@@ ([^@ ]+) ([^@ ]+) @@ ?(.*)$\", line)\n line_from_formatted, line_to_formatted, code = hunk_parts.groups()\n\n line_from = parse_file_line_numbers(line_from_formatted)\n line_to = parse_file_line_numbers(line_to_formatted)\n\n return line_from, line_to, code\n\n\ndef parse_file_line_numbers(formatted_numbers):\n \"\"\"\n Parse \"start,count\" formatted line numbers\n \"\"\"\n formatted = formatted_numbers[1:] # strip -/+\n formatted_parts = formatted.split(',')\n start = int(formatted_parts[0])\n count = int(formatted_parts[1]) if len(formatted_parts) > 1 else 1\n return start, count\n", "id": "897155", "language": "Python", "matching_score": 1.4837156534194946, "max_stars_count": 0, "path": "lintlens/git.py" }, { "content": "\ndef check_line_overlap_hunks(start, hunks, threshold):\n for _change_from, change_to, _ in hunks:\n if check_line_in_range(start, change_to[0], change_to[1], threshold):\n return True\n return False\n\n\ndef check_line_in_range(line_number, range_start, range_count, threshold):\n in_range = range_start - threshold <= line_number <= range_start + range_count - 1 + threshold\n return in_range\n", "id": "2319347", "language": "Python", "matching_score": 1.7813177108764648, "max_stars_count": 0, "path": "lintlens/utils.py" }, { "content": "import pytest\n\nfrom lintlens.utils import check_line_in_range\n\n\ndef test_check_line_in_range_first_line():\n args = 1, 1, 0\n assert check_line_in_range(0, *args) is False\n assert check_line_in_range(1, *args) is True\n assert check_line_in_range(2, *args) is False\n\n\n@pytest.mark.parametrize('range_count', (1, 2, 3, 4, 5))\ndef test_check_line_in_range_for_range_count(range_count):\n args = 42, range_count, 0\n for i in range(3):\n assert check_line_in_range(41 - i, *args) is False\n for i in range(range_count):\n assert check_line_in_range(42 + i, *args) is True\n for i in range(3):\n assert check_line_in_range(43 + i + range_count, *args) is False\n", "id": "9184532", "language": "Python", "matching_score": 0.5635569095611572, "max_stars_count": 0, "path": "lintlens/tests/test_utils.py" } ]
1.407561
sljchina
[ { "content": "import pandas as pd\nimport numpy as np\nfrom sqlalchemy import create_engine\n\nengine = create_engine('mysql+pymysql://root:87886922@localhost:3306/bidding_info')\n\ninfo_detail = pd.read_sql_table('info_detail', engine)\n\ncombine_data = pd.DataFrame()\n\ni = 0\nfor index , row in info_detail.iterrows():\n # print(index)\n sub_title = row[1].split('|')\n item = row[2].split('|')\n\n\n i = 0\n pop_list = []\n while i < len(sub_title):\n # print(sub_title[i])\n if sub_title[i] == '':\n print(\"x\")\n pop_list.append(i)\n i += 1\n\n if len(pop_list) > 0 or len(sub_title) != len(item):\n continue;\n\n\n new_dict = dict(zip(sub_title,item))\n new_pd = pd.DataFrame(data=new_dict, index=[0], columns=sub_title)\n\n\n # new_pd = new_pd.drop('',axis=1)\n print(index)\n # print(new_pd)\n if index == 0 :\n combine_data = new_pd\n else:\n # combine_data = combine_data.reset_index()\n # new_pd = new_pd.reset_index()\n try:\n combine_data = pd.concat([combine_data, new_pd] , axis=0, ignore_index=True)\n except:\n print(\"出错了\")\n print(combine_data)\n \n\ncombine_data.to_excel('final_format.xlsx', encoding='utf8')\n\n\n\n\n# for title in info_detail['bidding_table_titles_list']:\n# sub_title = title.split('|')\n# print(sub_title)\n# i = i + 1\n # sub_item = info_detail['bidding_table_items_list'][i].split(',')\n\n # print(len(sub_item))\n # print(len(sub_title))\n\n # single_data = pd.DataFrame(np.array(sub_item),columns=sub_title)\n # print(single_data)\n\n\n # print(title.split(','))\n# for item in info_detail['bidding_table_items_list']:\n# single_data = pd.DataFrame(np.array(item.split('|')))\n# print(single_data)\n # print(item.split('|'))\n # print(single_data)\n", "id": "7469461", "language": "Python", "matching_score": 1.5516146421432495, "max_stars_count": 0, "path": "bidding_info_new/data_export.py" }, { "content": "# Define your item pipelines here\n#\n# Don't forget to add your pipeline to the ITEM_PIPELINES setting\n# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html\n\n\n# useful for handling different item types with a single interface\nfrom itemadapter import ItemAdapter\nimport pymysql\nimport csv\n\n\nclass BiddingInfoNewPipeline:\n\n def process_item(self, item, spider):\n return item\n\n# class BiddingInfoNewDetailPipeline:\n# def process_item(self, item, spider):\n# return item\n\n# class MyProjectPipeline(object):\n# # 保存为csv格式\n# def __init__(self):\n# # 打开文件,指定方式为写,利用第3个参数把csv写数据时产生的空行消除\n# self.f = open(\"myproject.csv\",\"a\",newline=\"\")\n# # 设置文件第一行的字段名,注意要跟spider传过来的字典key名称相同\n# self.fieldnames = [\"m_num\",\"m_name\",\"s_name\",\"i_date\",\"l_work\",\"m_style\",\"c_work\"]\n# # 指定文件的写入方式为csv字典写入,参数1为指定具体文件,参数2为指定字段名\n# self.writer = csv.DictWriter(self.f, fieldnames=self.fieldnames)\n# # 写入第一行字段名,因为只要写入一次,所以文件放在__init__里面\n# self.writer.writeheader()\n\n# def process_item(self, item, spider):\n# # 写入spider传过来的具体数值\n# self.writer.writerow(item)\n# # 写入完返回\n# return item\n\n# def close(self,spider):\n# self.f.close()\n\n\n\n\n\n\n\n\nclass MysqlPipeline(object):\n def __init__(self,host,user,password,database,port):\n self.host = host\n self.user = user\n self.password = password\n self.database = database\n self.port = port\n\n @classmethod\n def from_crawler(cls,crawler):\n return cls(\n host = crawler.settings.get(\"MYSQL_HOST\"),\n user = crawler.settings.get(\"MYSQL_USER\"),\n password = crawler.settings.get(\"MYSQL_PASS\"),\n database = crawler.settings.get(\"MYSQL_DATABASE\"),\n port = crawler.settings.get(\"MYSQL_PORT\"),\n )\n\n def open_spider(self, spider):\n '''负责连接数据库'''\n self.db = pymysql.connect(self.host,self.user,self.password,self.database,charset=\"utf8mb4\",port=self.port)\n self.cursor = self.db.cursor()\n\n def process_item(self, item, spider):\n '''执行数据表的写入操作'''\n #组装sql语句\n data = dict(item)\n keys = ','.join(data.keys())\n values=','.join(['%s']*len(data))\n sql = \"insert into %s(%s) values(%s)\"%(item.table,keys,values)\n #指定参数,并执行sql添加\n self.cursor.execute(sql,tuple(data.values()))\n #事务提交\n self.db.commit()\n return item\n\n def close_spider(self, spider):\n '''关闭连接数据库'''\n self.db.close()", "id": "11790955", "language": "Python", "matching_score": 2.6045594215393066, "max_stars_count": 0, "path": "bidding_info_new/bidding_info_new/pipelines.py" }, { "content": "# Define here the models for your scraped items\n#\n# See documentation in:\n# https://docs.scrapy.org/en/latest/topics/items.html\n\nimport scrapy\n\n\nclass BiddingInfoNewItem(scrapy.Item):\n # define the fields for your item here like:\n # name = scrapy.Field()\n table = \"info_list\"\n bidding_company = scrapy.Field()\n project_name = scrapy.Field()\n bidding_type = scrapy.Field()\n bidding_date = scrapy.Field()\n bidding_url = scrapy.Field()\n\nclass BiddingInfoNewItemDetail(scrapy.Item):\n # define the fields for your item here like:\n # name = scrapy.Field()\n table = \"info_detail\"\n bidding_table_titles_list = scrapy.Field()\n bidding_table_items_list = scrapy.Field()\n\n\n", "id": "5348474", "language": "Python", "matching_score": 4.096156597137451, "max_stars_count": 0, "path": "bidding_info_new/bidding_info_new/items.py" }, { "content": "import scrapy\nfrom scrapy.selector import Selector\nfrom bidding_info_new.items import BiddingInfoNewItem, BiddingInfoNewItemDetail\nimport re\n\nclass CsgSpider(scrapy.Spider):\n name = 'csg'\n allowed_domains = ['www.bidding.csg.cn']\n start_urls = ['https://www.bidding.csg.cn/zbgg/index.jhtml']\n\n def parse(self, response):\n index_list = []\n for i in range(1, 611):\n index = 'https://www.bidding.csg.cn/zbgg/index_'+str(i)+'.jhtml'\n print(index)\n index_list.append(index)\n\n for index in index_list:\n yield scrapy.Request(url=index,callback=self.parse_index)\n # items = Selector(response=response).xpath('/html/body/section/div[3]/div[1]/ul/li')\n\n def parse_index(self, response):\n \n\n items = Selector(response=response).xpath('/html/body/section/div[3]/div[1]/ul/li')\n for item in items:\n\n BiddingInfo = BiddingInfoNewItem()\n\n bidding_company = item.xpath('./a[1]/text()').extract()[0]\n project_name = item.xpath('./a[2]/text()').extract()[0]\n bidding_type = item.xpath('./span[1]/a/text()').extract()[0]\n bidding_date = item.xpath('./span[1]/span/text()').extract()[0]\n bidding_url = 'https://www.bidding.csg.cn/'+item.xpath('./a[2]/@href').extract()[0]\n\n BiddingInfo['bidding_company'] = bidding_company\n BiddingInfo['project_name'] = project_name\n BiddingInfo['bidding_type'] = bidding_type\n BiddingInfo['bidding_date'] = bidding_date\n BiddingInfo['bidding_url'] = bidding_url\n \n\n print('-----------------------')\n print(bidding_company)\n print(project_name)\n print(bidding_type)\n print(bidding_date)\n print(bidding_url)\n print('-----------------------')\n\n yield scrapy.Request(url=bidding_url,callback=self.parse_item)\n\n yield BiddingInfo\n\n def parse_item(self, response):\n\n BiddingInfoDetail = BiddingInfoNewItemDetail()\n \n bidding_table_titles_list = []\n bidding_table_items_list = []\n\n bidding_table_titles_list.append('网页链接')\n bidding_table_items_list.append(response.url)\n\n print(response.url)\n try:\n bidding_id_search = re.search(r'[0]{3}[0-9]{13}',response.text)\n\n if bidding_id_search:\n print(bidding_id_search.group())\n bidding_table_titles_list.append('项目编号')\n bidding_table_items_list.append(bidding_id_search.group())\n else:\n print('没有项目编号')\n bidding_table_titles_list.append('项目编号')\n bidding_table_items_list.append('没有项目编号')\n\n bidding_table = Selector(response=response).xpath('//table')[0]\n bidding_table_titles = bidding_table.xpath('.//tr[1]/td')\n bidding_table_items = bidding_table.xpath('.//tr[2]/td')\n\n print('-----------------------')\n for title in bidding_table_titles:\n # print(title.extract())\n span_sum = ''\n spans = title.xpath('.//span/text()')\n if len(spans) > 0:\n for span_item in spans:\n span_sum = span_sum+span_item.extract()\n\n ass_sum = ''\n aas = title.xpath('.//span/a/text()')\n if len(aas) > 0:\n for ass_item in aas:\n ass_sum = ass_sum+ass_item.extract()\n\n bidding_table_titles_list.append(span_sum+ass_sum)\n \n for item in bidding_table_items:\n \n span_sum = ''\n spans = item.xpath('.//span/text()')\n if len(spans) > 0:\n for span_item in spans:\n span_sum = span_sum+span_item.extract()\n\n ass_sum = ''\n aas = item.xpath('.//span/a/text()')\n if len(aas) > 0:\n for ass_item in aas:\n ass_sum = ass_sum+ass_item.extract()\n\n bidding_table_items_list.append(span_sum+ass_sum)\n\n print(bidding_table_titles_list)\n print(bidding_table_items_list)\n\n print('-----------------------')\n\n BiddingInfoDetail['bidding_table_titles_list'] = \"|\".join(bidding_table_titles_list)\n BiddingInfoDetail['bidding_table_items_list'] = \"|\".join(bidding_table_items_list)\n\n yield BiddingInfoDetail\n\n except IndexError:\n print('没有表格')\n \n # bidding_table_title = Selector(response=response).xpath('/html/body/section/div[2]/div/div[2]/div/div[1]/table/tbody/tr[1]/td[1]/p/span[1]')\n\n", "id": "741086", "language": "Python", "matching_score": 2.76008677482605, "max_stars_count": 0, "path": "bidding_info_new/bidding_info_new/spiders/csg.py" }, { "content": "import scrapy\nfrom selenium import webdriver\n\n\nclass SgccSpider(scrapy.Spider):\n name = 'sgcc'\n allowed_domains = ['ecp.sgcc.com.cn']\n start_urls = ['https://ecp.sgcc.com.cn/ecp2.0/portal/#/list/list-spe/2018032600289606_1_2018032700291334']\n\n def __init__(self):\n self.driver = webdriver.PhantomJS()\n \n # def start_requests(self):\n # return super().start_requests()\n\n def parse(self, response):\n print('---------------------------------------')\n print(response.text)\n print('---------------------------------------')\n", "id": "11317052", "language": "Python", "matching_score": 0.4447385370731354, "max_stars_count": 0, "path": "bidding_info_new/bidding_info_new/spiders/sgcc.py" } ]
2.604559
kindly
[ { "content": "from setuptools import setup\nsetup(name='ocdsmerge',\n version='0.1',\n description='OCDS Release Merge Library',\n author='<NAME>',\n author_email='<EMAIL>',\n license='BSD',\n packages=['ocdsmerge', 'ocdsmerge.fixtures'],\n include_package_data=True,\n url='https://github.com/kindly/ocds-merge'\n)\n", "id": "5741645", "language": "Python", "matching_score": 1.6956866979599, "max_stars_count": 0, "path": "setup.py" }, { "content": "import unittest\nimport ocdsmerge.merge as merge\nimport ocdsmerge.fixtures as fixtures\nimport os\n\nimport glob\n\nclass TestAllFixtures(unittest.TestCase):\n maxDiff = None\n def test_all(self):\n current_dir = os.path.dirname(os.path.abspath(__file__))\n for file_name in glob.glob(os.path.join(current_dir, 'fixtures', '*.py')):\n name = file_name.split('/')[-1].split('.')[0]\n if name == \"__init__\":\n continue\n \n __import__(\"ocdsmerge.fixtures.\" + name)\n fixture = getattr(fixtures, name)\n\n self.assertEqual(merge.merge(fixture.releases),\n fixture.compiledRelease,\n 'Test compiled for ' + name)\n\n self.assertEqual(merge.merge_versioned(fixture.releases),\n fixture.versionedRelease,\n 'Test versioned for ' + name)\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "435468", "language": "Python", "matching_score": 0.6206088662147522, "max_stars_count": 0, "path": "ocdsmerge/tests.py" }, { "content": "import decimal\nimport click\n\nimport orjson\n\nfrom .flatterer import iterator_flatten_rs, flatten_rs, setup_logging, setup_ctrlc\n\nLOGGING_SETUP = False\n\n\ndef default(obj):\n if isinstance(obj, decimal.Decimal):\n return str(obj)\n raise TypeError\n\n\ndef bytes_generator(iterator):\n for item in iterator:\n if isinstance(item, bytes):\n yield item\n if isinstance(item, str):\n yield str.encode()\n if isinstance(item, dict):\n yield orjson.dumps(item, default=default)\n\n\ndef flatten(\n input,\n output_dir,\n csv=True,\n xlsx=False,\n path='',\n main_table_name='main',\n emit_path=[],\n json_lines=False,\n force=False,\n fields='',\n only_fields=False,\n tables='',\n only_tables=False,\n inline_one_to_one=False,\n schema=\"\",\n table_prefix=\"\",\n path_separator=\"_\",\n schema_titles=\"\",\n log_error=False,\n):\n global LOGGING_SETUP\n if not LOGGING_SETUP:\n setup_logging(\"warning\")\n LOGGING_SETUP = True\n flatten_rs(input, output_dir, csv, xlsx,\n path, main_table_name, emit_path, json_lines, force, fields, only_fields, tables, only_tables,\n inline_one_to_one, schema, table_prefix, path_separator, schema_titles, log_error)\n\n\ndef iterator_flatten(\n iterator,\n output_dir,\n csv=True,\n xlsx=False,\n main_table_name='main',\n emit_path=[],\n force=False,\n fields='',\n only_fields=False,\n tables='',\n only_tables=False,\n inline_one_to_one=False,\n schema=\"\",\n table_prefix=\"\",\n path_separator=\"_\",\n schema_titles=\"\",\n log_error=False\n):\n global LOGGING_SETUP\n if not LOGGING_SETUP:\n setup_logging(\"warning\")\n LOGGING_SETUP = True\n iterator_flatten_rs(bytes_generator(iterator), output_dir, csv, xlsx,\n main_table_name, emit_path, force, fields, only_fields, tables,\n only_tables, inline_one_to_one, schema, table_prefix, path_separator, schema_titles, log_error)\n\n\n@click.command()\n@click.option('--csv/--nocsv', default=True, help='Output CSV files, default true')\n@click.option('--xlsx/--noxlsx', default=False, help='Output XLSX file, default false')\n@click.option('--main-table-name', '-m', default=None,\n help='Name of main table, defaults to name of the file without the extension')\n@click.option('--path', '-p', default='', help='Key name of where json array starts, default top level array')\n@click.option('--json-lines', '-j', is_flag=True, default=False,\n help='Is file a jsonlines file, default false')\n@click.option('--force', is_flag=True, default=False,\n help='Delete output directory if it exists, then run command, default False')\n@click.option('--fields', '-f', default=\"\", help='fields.csv file to use')\n@click.option('--only-fields', '-o', is_flag=True, default=False, help='Only output fields in fields.csv file')\n@click.option('--tables', '-b', default=\"\", help='tables.csv file to use')\n@click.option('--only-tables', '-l', is_flag=True, default=False, help='Only output tables in tables.csv file')\n@click.option('--inline-one-to-one', '-i', is_flag=True, default=False,\n help='If array only has single item for all objects treat as one-to-one')\n@click.option('--schema', '-s', default=\"\",\n help='JSONSchema file or URL to determine field order')\n@click.option('--table-prefix', '-t', default=\"\",\n help='Prefix to add to all table names')\n@click.option('--path-separator', '-a', default=\"_\",\n help='Seperator to denote new path within the input JSON. Defaults to `_`')\n@click.option('--schema-titles', '-h', default=\"\",\n help='Use titles from JSONSchema in the given way. Options are `full`, `slug`, `underscore_slug`. Default to not using titles.')\n@click.argument('input_file')\n@click.argument('output_directory')\ndef cli(\n input_file,\n output_directory,\n csv=True,\n xlsx=False,\n path='',\n main_table_name=None,\n json_lines=False,\n force=False,\n fields=\"\",\n only_fields=False,\n tables=\"\",\n only_tables=False,\n inline_one_to_one=False,\n schema=\"\",\n table_prefix=\"\",\n path_separator=\"_\",\n schema_titles=\"\"\n):\n global LOGGING_SETUP\n if not LOGGING_SETUP:\n setup_logging(\"info\")\n LOGGING_SETUP = True\n setup_ctrlc()\n\n if not main_table_name:\n main_table_name = input_file.split('/')[-1].split('.')[0]\n\n try:\n flatten(input_file,\n output_directory,\n csv=csv,\n xlsx=xlsx,\n path=path,\n main_table_name=main_table_name,\n json_lines=json_lines,\n force=force,\n fields=fields,\n only_fields=only_fields,\n tables=tables,\n only_tables=only_tables,\n inline_one_to_one=inline_one_to_one,\n schema=schema,\n table_prefix=table_prefix,\n path_separator=path_separator,\n schema_titles=schema_titles,\n log_error=True)\n except IOError:\n pass\n", "id": "5519047", "language": "Python", "matching_score": 1.5512279272079468, "max_stars_count": 23, "path": "flatterer/__init__.py" }, { "content": "import collections\n\nNOT_FLATTEN_KEYS = ['additionalIdentifiers', \n 'additionalClassifications',\n 'suppliers',\n 'changes',\n 'tenderers'\n ]\n\nclass IdValue(str):\n '''This is basically a string but is used to differentiate itself when doing an ininstance check.'''\n def __init__(self, value):\n ## Save original value. this is needed if id was originally an integer and you want to keep that iformation.\n self.original_value = value\n str.__init__(value)\n\n\ndef flatten(path, flattened, obj):\n '''Flatten any nested json object into simple key value pairs.\n The key is the json path represented as a tuple. \n eg. {\"a\": \"I am a\", \"b\": [\"A\", \"list\"], \"c\": [{\"ca\": \"I am ca\"}, {\"cb\": \"I am cb\"}]}\n will flatten to\n {('a',): 'I am a',\n ('b', 1): 'list', \n ('c', 0, 'ca'): 'I am ca', \n ('b', 0): 'A', \n ('c', 1, 'cb'): 'I am cb'}\n '''\n if isinstance(obj, dict):\n iterable = list(obj.items())\n if not iterable:\n flattened[path] = {}\n else:\n iterable = list(enumerate(obj))\n if not iterable:\n flattened[path] = []\n for key, value in iterable:\n # We do not flatten these keys as the child lists of \n # these keys will not be merged, be totally replaced\n # and versioned as a whole\n if isinstance(value, (dict, list)) and key not in NOT_FLATTEN_KEYS:\n flatten(path + (key,), flattened, value)\n else:\n flattened[path + (key,)] = value\n return flattened\n\ndef unflatten(flattened):\n '''Unflatten flattened object back into nested form.'''\n unflattened = {}\n for flat_key in flattened:\n current_pos = unflattened\n for num, item in enumerate(flat_key):\n if isinstance(item, IdValue):\n if len(flat_key) - 1 == num: #when this is an array of string or ints\n current_pos.append(flattened[flat_key])\n else:\n for obj in current_pos:\n obj_id = obj.get('id')\n if obj_id == item.original_value:\n current_pos = obj\n break\n else:\n new_pos = {\"id\": item.original_value}\n current_pos.append(new_pos)\n current_pos = new_pos\n continue\n new_pos = current_pos.get(item)\n if new_pos is not None:\n current_pos = new_pos\n continue\n if len(flat_key) - 1 == num:\n current_pos[item] = flattened[flat_key]\n elif isinstance(flat_key[num + 1], IdValue):\n new_pos = []\n current_pos[item] = new_pos\n current_pos = new_pos\n else:\n new_pos = {}\n current_pos[item] = new_pos\n current_pos = new_pos\n return unflattened\n\n\n\ndef process_flattened(flattened):\n ''' Replace numbers in json path (representing position in arrays)\n with special id object. This is to make detecting what is an\n array possible without needed to check schema.'''\n\n # Keep ordered so that arrays will stay in the same order.\n processed = collections.OrderedDict()\n for key in sorted(flattened.keys(), key=lambda a: (len(a),) + a):\n new_key = []\n for num, item in enumerate(key):\n if isinstance(item, int):\n id_value = flattened.get(tuple(key[:num+1]) + ('id',))\n if id_value is None:\n id_value = item\n new_key.append(IdValue(id_value))\n continue\n new_key.append(item)\n processed[tuple(new_key)] = flattened[key]\n return processed\n\n\ndef merge(releases):\n ''' Takes a list of releases and merge them making a \n compiledRelease suitible for an OCDS Record '''\n merged = collections.OrderedDict({(\"tag\",): ['compiled']})\n for release in sorted(releases, key=lambda rel: rel[\"date\"]):\n release = release.copy()\n release.pop('tag', None)\n\n flat = flatten((), {}, release)\n\n processed = process_flattened(flat)\n # In flattening and adding the ids to the json path\n # we make sure each json path is going to same as long as\n # all the ids match. Position in the array is not relevent \n # (however it will keep this order anyway due to having an ordered dict). \n # This makes the actual merging come down to\n # just this statement.\n merged.update(processed)\n return unflatten(merged)\n\ndef merge_versioned(releases):\n ''' Takes a list of releases and merge them making a \n versionedRelease suitible for an OCDS Record '''\n merged = collections.OrderedDict()\n for release in sorted(releases, key=lambda rel: rel[\"date\"]):\n release = release.copy()\n ocid = release.pop(\"ocid\")\n merged[(\"ocid\",)] = ocid\n\n releaseID = release.pop(\"id\")\n date = release.pop(\"date\")\n tag = release.pop('tag', None)\n flat = flatten((), {}, release)\n\n processed = process_flattened(flat)\n\n for key, value in processed.items():\n if key[-1] == 'id' and isinstance(key[-2], tuple):\n merged[key] = value\n continue\n new_value = {\"releaseID\": releaseID,\n \"releaseDate\": date,\n \"releaseTag\": tag,\n \"value\": value}\n if key in merged:\n if value == merged[key][-1]['value']:\n continue\n\n if key not in merged:\n merged[key] = []\n merged[key].append(new_value)\n\n return unflatten(merged)\n\n\n", "id": "1976196", "language": "Python", "matching_score": 1.6853878498077393, "max_stars_count": 0, "path": "ocdsmerge/merge.py" }, { "content": "from merge import merge, merge_versioned\n", "id": "11834317", "language": "Python", "matching_score": 0.09755152463912964, "max_stars_count": 0, "path": "ocdsmerge/__init__.py" } ]
1.551228
xie-wenjie
[ { "content": "#\n# Usage: Fill in the configuration variables. It will download the feed\n# for it, parse it, and print out test cases to add to the unit test.\n#\n\nEMAIL = \"<EMAIL>\"\nPRIVATE_COOKIE = \"432802670aefa458daf036597ec8136b\"\nSTART_DATE = (\"2006\",\"01\",\"01\")\nEND_DATE = (\"2009\",\"01\",\"01\")\n\n\n\nimport sys, urllib, re\nfrom xml.dom import minidom\n\ndef fmt(n):\n if n < 10:\n return \"0\" + str(n)\n else:\n return str(n)\n\ndef makeDate(d):\n return d[0] + \"-\" + d[1] + \"-\" + d[2]\n\ndef makeZDate(d):\n return d[0] + d[1] + d[2] + \"T000000Z\"\n\nurl = \"http://www.google.com/calendar/feeds/<EMAIL>/private-\" \\\n + PRIVATE_COOKIE + \"/composite?start-min=\" + makeDate(START_DATE) \\\n + \"&start-max=\" + makeDate(END_DATE)\n\n#data = open(\"out.xml\")\ndata = urllib.urlopen(url)\n\nDTSTART_TZID = re.compile(\"DTSTART;TZID=(.*):(.*)\")\nDTSTART = re.compile(\"DTSTART:(.*)\")\nDURATION = re.compile(\"DURATION:(.*)\")\nRRULE = re.compile(\"RRULE:(.*)\")\nTIME = re.compile(\"(....)-(..)-(..)T(..):(..):(..)....([+-])(..):(..)\")\nTIMEZ = re.compile(\"(....)-(..)-(..)T(..):(..):(..)....Z\")\n\ndef stripTimezone(str):\n lines = str.split(\"\\n\")\n drop = False\n result = []\n for line in lines:\n if line == \"BEGIN:VTIMEZONE\":\n drop = True\n if not drop:\n result.append(line)\n if line == \"END:VTIMEZONE\":\n drop = False\n return result\n\ndef fixInstance(s):\n m = TIME.match(s[0])\n if m:\n if m.group(7) == \"+\":\n sign = -1\n else:\n sign = 1\n hour = int(m.group(4)) + (sign * int(m.group(8)))\n return m.group(1) + m.group(2) + m.group(3) + \"T\" + fmt(hour) \\\n + m.group(5) + m.group(6) + \"Z\"\n m = TIMEZ.match(s[0])\n if m:\n return m.group(1) + m.group(2) + m.group(3) + \"T\" + m.group(4) \\\n + m.group(5) + m.group(6) + \"Z\"\n return s[0]\n\ndom = minidom.parse(data)\nroot = dom.documentElement\n\nentries = root.getElementsByTagName(\"entry\")\n\nfor entry in entries:\n recurrences = entry.getElementsByTagName(\"gd:recurrence\")\n dtstart = \"\"\n tzid = \"\"\n duration = \"\"\n rrule = \"\"\n if len(recurrences) > 0:\n recurrence = recurrences[0]\n s = \"\"\n for c in recurrence.childNodes:\n s = s + c.nodeValue\n lines = stripTimezone(s)\n for s in lines:\n re_dtstart = DTSTART_TZID.match(s)\n if re_dtstart:\n dtstart = re_dtstart.group(2)\n tzid = re_dtstart.group(1)\n re_dtstart = DTSTART.match(s)\n if re_dtstart:\n dtstart = re_dtstart.group(1)\n re_duration = DURATION.match(s)\n if re_duration:\n duration = re_duration.group(1)\n re_rrule = RRULE.match(s)\n if re_rrule:\n rrule = re_rrule.group(1)\n whens = entry.getElementsByTagName(\"gd:when\")\n instances = []\n for w in whens:\n startTime = w.getAttribute(\"startTime\")\n endTime = w.getAttribute(\"endTime\")\n instances.append((startTime,endTime))\n\n instances = map(fixInstance, instances)\n instances.sort()\n if dtstart != \"\":\n title = \"\"\n for c in entry.getElementsByTagName('title')[0].childNodes:\n title = title + c.nodeValue\n\n print \" // \" + title\n print \" test(\\\"\" + dtstart + \"\\\",\"\n print \" \\\"\" + rrule + \"\\\",\"\n print \" \\\"\" + makeZDate(START_DATE) \\\n + \"\\\", \\\"\" + makeZDate(END_DATE) + \"\\\",\"\n print \" new String[] {\"\n for i in instances:\n print \" \\\"\" + i + \"\\\",\"\n print \" });\"\n print\n\n\n", "id": "4129994", "language": "Python", "matching_score": 0, "max_stars_count": 5, "path": "providers/CalendarProvider/maketests.py" } ]
0
MichelleLochner
[ { "content": "'''\nPlot posterior P(z) for all lines in a folder\n\nrun script from radio-z/plotting_scripts\ndata comes from root_dir\nplots go into plot_dir\n'''\nimport numpy as np\nimport pandas as pd\nfrom matplotlib import pyplot as plt\nfrom matplotlib import rc\nimport glob\nimport pdb\n\nrc('text', usetex=True)\nrc('font', family='serif')\nrc('font', size=11)\n\ncpal = ['#185aa9','#008c48','#ee2e2f','#f47d23','#662c91','#a21d21','#b43894','#010202']\n\nplt.close('all') # tidy up any unshown plots\n\nplot_dir = '../plots/'\nroot_dir = '../data/examples_band2/'\n\nlineid_list = glob.glob(root_dir+'*')\n#lineid_list = lineid_list[-5:-1]\n#lineid_list = np.random.choice(lineid_list, size=4, replace=False)\n\nfor i,lineid in enumerate(lineid_list):\n\n fig, ax = plt.subplots(1, 1, sharex=True, sharey=True, figsize=(4.5, 3.75))\n\n linestore = pd.HDFStore(lineid)\n lineparms = linestore['parameters']\n linedata = linestore['data']\n lineev = linestore['evidence']\n\n z_true = linestore['summary']['True']['z']\n v0 = linestore['chain']['v0']\n z_pdf = -v0/(v0+3e5)\n ax.axvline(0, linestyle='--', color='c', zorder=1)\n ax.hist((z_pdf-z_true)*1.e3, color='m', zorder=10, histtype='step', normed=True, range=(-4, 4), bins=50,\n label='$\\ln(B) = $'+('%.2f' % lineev['Bayes factor'])+'\\n$\\mathrm{'+lineid.split('/')[-1]+'}$')#, color=cpal)\n ax.legend(loc='upper left', frameon=False, fontsize='x-small')\n\n plt.xlim([-4, 4])\n plt.ylabel('$\\mathrm{Probability}\\, P(z - z_{\\\\rm true})$')\n plt.xlabel('$10^{3}\\\\times\\mathrm{Redshift \\, Error} \\, \\Delta z$')\n plt.savefig(plot_dir+'line_{0}_zpdf.png'.format(lineid.split('/')[-1]), bbox_inches='tight', dpi=300)\n linestore.close()", "id": "9385666", "language": "Python", "matching_score": 5.681591987609863, "max_stars_count": 1, "path": "plotting_code/plot_pz_all.py" }, { "content": "'''\nPlot posterior P(z) for four lines (selected at random..!)\n\nrun script from radio-z/plotting_scripts\ndata comes from root_dir\nplots go into plot_dir\n'''\nimport numpy as np\nimport pandas as pd\nfrom matplotlib import pyplot as plt\nfrom matplotlib import rc\nimport glob\nimport pdb\n\nimport sys\nsys.path.append('../radio_z/')\nfrom hiprofile import *\n\nrc('text', usetex=True)\nrc('font', family='serif')\nrc('font', size=11)\n\ncpal = ['#185aa9','#008c48','#ee2e2f','#f47d23','#662c91','#a21d21','#b43894','#010202']\n\nplt.close('all') # tidy up any unshown plots\n\nplot_dir = '../plots/'\nroot_dir = '../data/examples_band2/'\n\nfig, axlist = plt.subplots(2, 2, sharex=True, sharey=True, figsize=(4.5, 3.75))\naxlist = axlist.reshape([4])\n\n# find all the lines we have and select four at random\nlineid_list = glob.glob(root_dir+'*')\n#lineid_list = lineid_list[-5:-1]\nlineid_list = np.random.choice(lineid_list, size=4, replace=False)\n\nfor i,lineid in enumerate(lineid_list):\n\n # open up the line data store\n linestore = pd.HDFStore(lineid)\n lineparms = linestore['parameters']\n linedata = linestore['data']\n lineev = linestore['evidence']\n\n # get the line profile\n z_true = linestore['summary']['True']['z']\n v0 = linestore['chain']['v0']\n z_pdf = -v0/(v0+3e5)\n\n # plot the good redshift P(z)\n if z_pdf.var() < 0.005:\n axlist[i].axvline(0, linestyle='--', color='m', zorder=1)\n axlist[i].hist((z_pdf-z_true)*1.e3, color='k', zorder=10, histtype='step', normed=True, range=(-4, 4), bins=50, label='$\\ln(B) = $'+('%.2f' % lineev['Bayes factor']))#, color=cpal[i])\n axlist[i].text(0.5,3,'$\\ln(B) = $'+('%.2f' % lineev['Bayes factor']), fontsize='small')\n\nfig.subplots_adjust(hspace=0)\nfig.subplots_adjust(wspace=0)\n\n# mess around with the axes labels to get what we want (there is definitely a better way)\nplt.setp([a.get_xticklabels() for a in fig.axes[:]], visible=False)\nplt.setp([a.get_yticklabels() for a in fig.axes[:]], visible=False)\nplt.setp([fig.axes[2].get_xticklabels()], visible=True)\nplt.setp([fig.axes[3].get_xticklabels()], visible=True)\nplt.setp([fig.axes[0].get_yticklabels()], visible=True)\nplt.setp([fig.axes[2].get_yticklabels()], visible=True)\nplt.xlim([-4, 4])\nfig.add_subplot(111, frameon=False)\nplt.tick_params(labelcolor='none', top='off', bottom='off', left='off', right='off')\nplt.ylabel('$\\mathrm{Probability}\\, P(z - z_{\\\\rm true})$')\nplt.xlabel('$10^{3}\\\\times\\mathrm{Redshift \\, Error} \\, \\Delta z$')\nplt.savefig(plot_dir+'multiline_zpdf.png', bbox_inches='tight', dpi=300)\nlinestore.close()", "id": "3435092", "language": "Python", "matching_score": 4.430255889892578, "max_stars_count": 1, "path": "plotting_code/plot_pz.py" }, { "content": "'''\nPlot theory HI lines along with their corresponding observed data\n\nrun script from radio-z/plotting_scripts\ndata comes from root_dir\nplots go into plot_dir\n'''\nimport numpy as np\nimport pandas as pd\nfrom matplotlib import pyplot as plt\nfrom matplotlib import rc\nimport pdb\n\nimport sys\nsys.path.append('../radio_z/')\nfrom hiprofile import *\n\nrc('text', usetex=True)\nrc('font', family='serif')\nrc('font', size=11)\n\ncpal = ['#185aa9','#008c48','#ee2e2f','#f47d23','#662c91','#a21d21','#b43894','#010202']\n\nplt.close('all') # tidy up any unshown plots\n\ndef makePlot(line_id, root_dir, plot_dir ='../plots', drawwidth=300):\n\n # open up the line data store\n linestore = pd.HDFStore(root_dir+line_id+'.hdf5')\n lineparms = linestore['parameters']\n linedata = linestore['data']\n\n # get the line profile\n v = linedata['v'].values\n v0 = lineparms['v0'].values\n psi = linedata['psi'].values\n v_channel_width = abs(v[1]-v[0]) \n theoryline = LineProfile(v0,\n lineparms['w_obs_20'].values[0], lineparms['w_obs_50'].values[0], lineparms['w_obs_peak'].values[0],\n lineparms['psi_obs_max'].values[0], lineparms['psi_obs_0'].values[0])\n\n v0_idx = np.where(abs((v-v0))==np.min(abs(v-v0)))[0][0] # line centre\n\n n_drawchannels = int(drawwidth/v_channel_width)\n v_plot = v[v0_idx - n_drawchannels:v0_idx + n_drawchannels]\n psi_plot = psi[v0_idx - n_drawchannels:v0_idx + n_drawchannels]\n v_good = v_plot\n\n psi_plot = psi[v0_idx - n_drawchannels:v0_idx + n_drawchannels]\n psi_good = theoryline.get_line_profile(v_good)\n psi_good_large = theoryline.get_line_profile(v)\n\n plt.close('all')\n\n # plot of theory line and observed line\n plt.figure(1, figsize=(4.5, 3.75))\n plt.plot(v_plot*1.e-3, psi_plot*1.e3, c='k', label='$\\mathrm{SKA-like}$', alpha=0.6)\n plt.plot(v_good*1.e-3, psi_good*1.e3, c='c', label='$\\mathrm{Model}$', lw=2)\n plt.xlabel('$\\\\times10^{3} \\, v \\, [\\mathrm{kms}^{-1}]$')\n plt.ylabel('$\\Psi \\, [\\mathrm{mJy}]$')\n plt.xlim([v_plot[0]*1.e-3, v_plot[-1]*1.e-3])\n plt.legend(frameon=False, fontsize='small')\n plt.savefig(plot_dir+'line_{0}.png'.format(line_id), bbox_inches='tight', dpi=300)\n\n # plot of theory line and observed line, zoomed out to show full band\n plt.figure(2, figsize=(4.5, 3.75))\n plt.plot(v*1.e-3, psi*1.e3, c='k', label='$\\mathrm{SKA-like}$')\n plt.plot(v*1.e-3, psi_good_large*1.e3, c='c', label='$\\mathrm{Model}$', lw=2)\n plt.xlabel('$\\\\times10^{3} \\, v \\, [\\mathrm{kms}^{-1}]$')\n plt.ylabel('$\\Psi \\, [\\mathrm{mJy}]$')\n plt.xlim([v[0]*1.e-3, v[-1]*1.e-3])\n plt.legend(frameon=False, fontsize='small')\n plt.savefig(plot_dir+'line_{0}_zoomout.png'.format(line_id), bbox_inches='tight', dpi=300)\n\n # plot of posterior pdf for redshift\n plt.figure(3, figsize=(4.5, 3.75))\n z_true = linestore['summary']['True']['z']\n v0 = linestore['chain']['v0']\n z_pdf = -v0/(v0+3e5)\n plt.hist((z_pdf-z_true)*1.e5, histtype='step', normed=True, color=cpal[-1])\n plt.axvline(0, linestyle='--', color=cpal[3])\n plt.ylabel('$\\mathrm{Probability}\\, P(z - z_{\\\\rm true})$')\n plt.xlabel('$10^{5}\\\\times\\mathrm{Redshift \\, Error} \\, \\Delta z$')\n plt.savefig(plot_dir+'line_{0}_zpdf.png'.format(line_id), bbox_inches='tight', dpi=300)\n linestore.close()\n\nif __name__=='__main__':\n\n makePlot('ID12600790', '../data/output_snr_1_band_2/')", "id": "9785948", "language": "Python", "matching_score": 3.3964881896972656, "max_stars_count": 1, "path": "plotting_code/plot_line_data.py" }, { "content": "from __future__ import division, print_function\nimport numpy as np\n\n\nclass LineProfile:\n \"\"\"\n Class describing a symmetric HI 21cm spectral line profile as parameterised in\n Obreschkow et al (2009) arXiv:0908.0983.\n \"\"\"\n def __init__(self,\n v0,\n w_obs_20, w_obs_50, w_obs_peak,\n psi_obs_max, psi_obs_0):\n \"\"\"\n HI line profile\n\n Parameters\n ----------\n v0 : number\n Observed velocity [km/s]\n w_obs_peak : float\n Line width between the two horns of the HI-line profile [km/s]\n w_obs_50 : float\n Line width at 50% of peak luminosity density [km/s]\n w_obs_20 : float\n Line width at 20% of peak luminosity density [km/s]\n psi_obs_max : float\n Normalized peak flux density [Jy]\n psi_obs_0 : float\n Normalized flux density at the line center [Jy]\n \"\"\"\n self.v0 = v0\n self.w_obs_20 = w_obs_20\n self.w_obs_50 = w_obs_50\n self.w_obs_peak = w_obs_peak\n self.psi_obs_max = psi_obs_max\n self.psi_obs_0 = psi_obs_0\n\n def _k1(self):\n \"\"\"\n Functional form parameter for HI line profile.\n As defined in Obreschkow et al (2009) arXiv:0908.0983 Eq. A2\n \n Returns\n -------\n k1 : number\n \"\"\"\n numerator = np.log(self.w_obs_50 - self.w_obs_peak) - np.log(2.)\n denominator = np.log(self.w_obs_50 - self.w_obs_peak) - np.log(self.w_obs_20 - self.w_obs_peak)\n\n retvar = -0.693*pow(2.322, numerator/denominator)\n if retvar == 0.0:\n retvar = -1.e-11\n return retvar\n\n def _k2(self):\n \"\"\"\n Functional form parameter for HI line profile.\n As defined in Obreschkow et al (2009) arXiv:0908.0983 Eq. A3\n \n Returns\n -------\n k2 : number\n \"\"\"\n retvar = 0.842/(np.log(self.w_obs_20 - self.w_obs_peak) - np.log(self.w_obs_50 - self.w_obs_peak))\n\n return retvar\n\n def _k3(self):\n \"\"\"\n Functional form parameter for HI line profile.\n As defined in Obreschkow et al (2009) arXiv:0908.0983 Eq. A4\n \n Returns\n -------\n k3 : number\n \"\"\"\n retvar = self.w_obs_peak/2.\n\n return retvar\n\n def _k4(self):\n \"\"\"\n Functional form parameter for HI line profile.\n As defined in Obreschkow et al (2009) arXiv:0908.0983 Eq. A5\n \n Returns\n -------\n k4 : number\n \"\"\"\n if self.psi_obs_max == self.psi_obs_0:\n retvar = 0.\n elif self.psi_obs_max > 0:\n retvar = 0.25 * (((self.w_obs_peak**2.)*(self.psi_obs_max**2.))\n /(self.psi_obs_max**2. - self.psi_obs_0**2.))\n else:\n # Something went wrong\n retvar = -1\n\n return retvar\n\n def _k5(self):\n \"\"\"\n Functional form parameter for HI line profile.\n As defined in Obreschkow et al (2009) arXiv:0908.0983 Eq. A6\n \n Returns\n -------\n k5 : number\n \"\"\"\n retvar = self.psi_obs_0*np.sqrt(self._k4())\n return retvar\n\n def get_line_profile(self, v, noise=0):\n \"\"\"\n Produces a parameterised HI spectral line profile as specified in\n Obreschkow et al (2009) arXiv:0908.0983 Eq A1 plotted over a velocity\n interval v.\n\n Parameters\n ----------\n v : '~np.ndarray'\n Velocity range over which to plot the line profile [km/s]\n noise : float or array, optional\n If not zero, generates noise from a normal distribution defined by either a float (sigma) or an array\n (same size as v).\n\n Returns\n -------\n psi : '~np.ndarray'\n Array containing the line profile.\n \"\"\"\n v = v.copy()\n v -= self.v0\n psi = np.zeros_like(v)\n\n fact = 20\n mask = abs(v) < fact*self.w_obs_20\n v1 = (abs(v) >= self.w_obs_peak / 2.) * mask\n v2 = (abs(v) < self.w_obs_peak / 2.) * (self.psi_obs_max > self.psi_obs_0) * mask\n v3 = (abs(v) < self.w_obs_peak / 2.) * (self.psi_obs_max == self.psi_obs_0) * mask\n\n psi[v1] = self.psi_obs_max * np.exp(self._k1() * pow(abs(v[v1]) - self._k3(), self._k2()))\n psi[v2] = self._k5() * pow(self._k4() - v[v2] ** 2., -0.5)\n psi[v3] = self.psi_obs_0\n\n norm = psi.max() / self.psi_obs_max\n psi = psi * norm\n\n if hasattr(noise, \"__len__\") or noise != 0:\n noise = np.random.randn(len(psi)) * noise\n psi = psi + noise\n\n return psi\n", "id": "8604525", "language": "Python", "matching_score": 1.3299126625061035, "max_stars_count": 1, "path": "radio_z/hiprofile.py" }, { "content": "from __future__ import division, print_function\nimport pandas as pd\nfrom tables.exceptions import HDF5ExtError # Needed to catch errors when loading hdf5 files\nimport numpy as np\nfrom scipy.interpolate import interp1d\nimport matplotlib.pyplot as plt\nfrom radio_z import hiprofile\nimport os, glob\nfrom functools import partial\nfrom multiprocessing import Pool\n\n\nclass SaxCatalogue:\n \"\"\"\n Reads in a Sax catalogue\n \"\"\"\n\n def __init__(self, filename, nu_rest=1.42e3):\n \"\"\"\n Reads in a Sax catalogue\n\n Parameters\n ----------\n filename : str\n Either an ascii, csv file or an hdf5 file\n nu_rest : float, optional\n Rest frame frequency of the line in question (e.g. neutral hydrogen) in MHz\n \"\"\"\n self.filename = filename\n\n if not os.path.exists(filename):\n print(filename, \"not found\")\n raise IOError\n self.nu_rest = nu_rest\n self.important_parameters = ['v0', 'w_obs_20', 'w_obs_50', 'w_obs_peak', 'psi_obs_max', 'psi_obs_0']\n self.ids = [] # This can only be set by reading in the original catalogue file\n\n def convert_parameters(self, df):\n \"\"\"\n Given a pandas dataframe with SAX parameters, computes the parameters needed to create an HI profile and\n stores them in the same dataframe.\n\n Parameters\n ----------\n df : pandas.Dataframe\n A catalogue of SAX objects as a dataframe\n \"\"\"\n try:\n df['id'].astype(int)\n df['id'] = 'ID' + df['id'].astype(int).astype(str) # For HDF5 naming conventions\n except ValueError:\n pass # Already in correct format\n df['v0'] = - 3e5*df['zapparent']/(1 + df['zapparent'])\n df['w_obs_20'] = df['hiwidth20']\n df['w_obs_50'] = df['hiwidth50']\n df['w_obs_peak'] = df['hiwidthpeak']\n df['psi_obs_max'] = df['hiintflux']*df['hilumpeak']\n df['psi_obs_0'] = df['hiintflux']*df['hilumcenter']\n\n def compute_snr(self, df):\n \"\"\"\n Function to compute the SNR for a catalogue\n\n Parameters\n ----------\n df : pandas.DataFrame\n The catalogue dataframe\n \"\"\"\n if 'v0' not in df.columns:\n self.convert_parameters(df)\n surv1 = Survey('ska1_band1')\n surv2 = Survey('ska1_band2')\n\n noise1 = surv1.get_noise(np.array(df.v0))\n noise2 = surv2.get_noise(np.array(df.v0))\n snr1 = np.array(df['psi_obs_0']) / noise1\n snr2 = np.array(df['psi_obs_0']) / noise2\n\n delta_v = surv1.nu2v(1+surv1.delta_nu) - surv1.nu2v(1)\n\n snr_santos1 = df['hiintflux']/np.sqrt(delta_v*df['w_obs_peak']) / noise1\n snr_santos2 = df['hiintflux'] / np.sqrt(delta_v * df['w_obs_peak']) / noise2\n\n df['snr_band1_std'] = snr1 # Experiment with different definition of SNR\n df['snr_band2_std'] = snr2\n df['snr_band1_santos'] = snr_santos1\n df['snr_band2_santos'] = snr_santos2\n\n def get_data(self):\n \"\"\"\n Actually reads in the S3 parameters\n\n Returns\n -------\n df : pandas.DataFrame\n Sax catalogue in the form of a pandas dataframe\n\n \"\"\"\n try:\n hstore = pd.HDFStore(self.filename)\n # We'll assume the data is stored in a child table in the hdf5 file\n key = hstore.keys()[-1]\n df = hstore[key]\n hstore.close()\n\n except HDF5ExtError:\n # In which case this is a sql.result ascii file\n df = pd.read_csv(self.filename)\n\n self.convert_parameters(df)\n self.compute_snr(df)\n\n self.ids = (list)(df['id'])\n\n return df\n\n def get_params(self, df, ind=''):\n \"\"\"\n Gets the true parameters for a dataframe as an array (useful for plotting etc.)\n Parameters\n ----------\n df : pandas.DataFrame\n Catalogue dataframe\n ind : str\n ID of object to return\n\n Returns\n -------\n array\n Returns just the true parameters\n \"\"\"\n if 'id' in df.columns:\n return df[df['id'] == ind][self.important_parameters].as_matrix()[0]\n else:\n return df[self.important_parameters].as_matrix()[0]\n\n def get_ids(self):\n \"\"\"\n Returns a list of object identifiers in this catalogue.\n\n Returns\n -------\n IDs : list\n List of IDs (as strings)\n \"\"\"\n if len(self.ids) != 0:\n return self.ids\n\n else:\n try:\n hstore = pd.HDFStore(self.filename)\n # We'll assume the data is stored in a child table in the hdf5 file\n key = hstore.keys()[-1]\n df = hstore[key]\n hstore.close()\n\n except HDF5ExtError:\n # In which case this is a sql.result ascii file\n df = pd.read_csv(self.filename)\n\n ids = df['id']\n\n if len(ids) == 0:\n print('Warning: no ID found.')\n else:\n if 'ID' not in df.iloc[0]['id']:\n ids = 'ID' + ids.astype(int).astype(str)\n ids = (list)(ids)\n\n self.ids = ids\n return ids\n\n\n def write_params(self, id, df, filepath):\n \"\"\"\n Function to write the original catalogue parameters of an object to its HDF5 file\n Parameters\n ----------\n id : string\n Identifier of galaxy\n df : pandas.DataFrame\n Catalogue dataframe\n filepath : string\n HDF5 file for the object\n \"\"\"\n outfile = os.path.join(filepath, (str)(id) + '.hdf5')\n params = df[df['id'] == id]\n params.to_hdf(outfile, 'parameters')\n\n def write_to_disk(self, filepath='./', nprocesses=1):\n \"\"\"\n Reads the catalogue file, extracts the 6 parameters we care about and then stores them as individual hdf5 files\n for each object.\n\n Parameters\n ----------\n filepath : str\n The file path where to store all the files. The files will be saved as <object id>.hdf5\n \"\"\"\n df = self.get_data()\n\n if nprocesses == 1:\n for i in df['id']:\n self.write_params(i, df, filepath)\n else:\n new_func = partial(self.write_params, df=df, filepath=filepath)\n p = Pool(nprocesses)\n p.map(new_func, df['id'])\n\n\n\nclass Survey:\n \"\"\"\n Collects useful, survey-specific methods and attributes\n \"\"\"\n def __init__(self, survey_name):\n \"\"\"\n Initialise survey attributes\n\n Parameters\n ----------\n survey_name : str\n The name of the survey (e.g. 'ska1_band1')\n \"\"\"\n\n self.nantennas = 190 # Needed for noise calculation\n\n if survey_name == 'ska1_band1':\n self.name = survey_name\n self.nu_min = 350\n self.nu_max = 1050\n self.delta_nu = 0.01\n self.s_rms = 315.e-6\n self.z_min = 0.35\n self.z_max = 3.06\n self.band = 1\n\n elif survey_name == 'ska1_band2':\n self.name = survey_name\n self.nu_min = 950\n self.nu_max = 1760\n #self.nu_max = 1420\n self.delta_nu = 0.01\n self.s_rms = 187.e-6\n self.z_min = 0\n self.z_max = 0.5\n self.band = 2\n\n else:\n print('Survey name not recognised')\n\n self.kb = 1.38e-16/1.e4/1.e-23 # Boltzmann constant\n\n def aeff_on_tsys(self, nu, normed_at_1ghz=False, makeplot=False, band=0):\n \"\"\"\n Function to calculate SKA *Band 1* A_eff / T_sys.\n Functionisation of python code from <NAME>, which in turn was based on\n the SuperMongo code 'sensaswbx.mon' from <NAME>, who comments:\n \"Its all quite crude, but it reproduces the SEFD numbers provided by the\n DSH consortium in their PDR documentation.\"\n\n Use make_plot = True for testing, compare with figure 1 in http://astronomers.skatelescope.org/wp-content/uploads/2015/11/SKA1-Observing-Bands-V4.pdf\n\n Parameters\n ----------\n nu : float\n Input frequency [MHz]\n normed_at_1ghz : bool, optional\n If True, return normalised to the value at 1GHz\n (for re-scaling S_rms^ref as defined by Santos et al)\n Default = False\n makeplot : bool, optional\n If True, make and show a plot with the A_eff / T_sys\n Default = False\n band : int, optional\n If 0, use interpolation function for all five bands.\n If 1 or 2 use interpolation function for that band.\n \"\"\"\n\n # Frequency coverage\n frq = np.linspace(0.35, 20., 4000)\n lfrq = np.log10(frq)\n\n # SKA1-MID parameters\n sdeg = 1. # FOV in sq. deg.? (PJB: Not sure...)\n nant = 1. # No. of antennae (1 == A_eff/T_sys per antenna)\n stad = 15. # Dish diameter [m]\n\n # Define T_recv piecewise, in each band (bands 1,5 are defined in two pieces)\n # trcvb1a = 17. + 3.*(frq-0.35)/(1.05-0.35)\n # trcvb1b = 17. + 3.*(frq-0.35)/(1.05-0.35)\n\n #FUDGED TO MATCH LATEST DOCUMENT\n trcvb1a = 6. + 11.75 * (frq - 0.35) / (1.05 - 0.35)\n trcvb1b = 6. + 11.75 * (frq - 0.35) / (1.05 - 0.35)\n\n trcvb2 = 8.2 + 0.7*(frq-0.95)/(1.76-0.95)\n trcvb3 = 10.6 + 1.5*(frq-1.65)/(3.05-1.65)\n trcvb4 = 14.3 + 2.4*(frq-2.8)/(5.18-2.8)\n trcvb5 = 16.7 + 6.1*(frq-4.6)/(13.8-4.6)\n trcvb5b = 17. + 6.*(frq-4.6)/(24.-4.6)\n tsky = 20. * (0.408/frq)**2.75 + 2.73 \\\n + 288. * ( 0.005 + 0.1314 * np.exp((lfrq-np.log10(22.23))*8.) ) # T_sky\n tspl = 4.0 # T_spillover?\n tgnd = 300. # T_ground?\n\n # Aperture efficiency as a fn. of frequency\n etaa0 = 0.92\n etaa = etaa0 - 70.*((3.e8/(frq*1.e9))/stad)**2.\n etaa = etaa - 0.36*(np.abs(frq-1.6)/(24.-1.6))**0.6\n\n # Band boundaries (GHz)\n frb1alo = 0.35\n # frb1ahi = 0.58\n frb1ahi = 1.05\n frb1blo = 0.58\n frb1bhi = 1.05\n frb2lo = 0.95\n frb2hi = 1.76\n frb3lo = 1.65\n frb3hi = 3.05\n frb4lo = 2.8\n frb4hi = 4.6\n frb5lo = 4.6\n frbd5hi = 13.8\n frb5hi = 26.\n\n # Initialise SEFD, FOV, T_recv arrays\n sefd = 1e6 * np.ones(lfrq.shape)\n fov = np.ones(lfrq.shape)\n trcv = np.ones(lfrq.shape)\n\n # Calculate piecewise A_eff / T_sys curve\n # (N.B. Ordering seems to have been chosen to take the largest value of A/T when\n # two bands overlap)\n bands = [\n (frb1ahi, frb1alo, trcvb1a),\n # (frb1bhi, frb1blo, trcvb1b),\n (frb5hi, frb5lo, trcvb5b),\n (frb4hi, frb4lo, trcvb4),\n (frb3hi, frb3lo, trcvb3),\n (frb2hi, frb2lo, trcvb2),\n ]\n trcv_bands = []\n\n df = frq[1] - frq[0]\n #df = 0\n\n for fhi, flo, _trcv in bands:\n\n idx = np.where(np.logical_and(frq <= fhi+df, frq >= flo-df))\n trcv[idx] = _trcv[idx] # Overall T_recv\n\n\n # Get per-band T_recv curve\n trcv_band = np.inf * np.ones(trcv.shape) # Should make A/T -> 0 out of band\n trcv_band[idx] = _trcv[idx]\n trcv_bands.append(trcv_band)\n trcv_bands = np.array(trcv_bands) # Array containing T_rcv separately, for each band\n\n # Calculate T_sys, A_eff, SEFD across all bands\n tsys = trcv + tsky + tspl\n aeff = nant * etaa * np.pi * stad**2./4.\n sefd = 2.*self.kb*tsys/aeff\n\n # Calculate FOV, A/T, survey speed\n fovd = 2340. * ((3.e8/(frq*1.e9))/stad)**2.\n aont = 2.*self.kb/sefd/sdeg\n surv = aont**2*fovd\n\n # Do the same for the separate bands\n aot_bands = []\n for i in range(trcv_bands.shape[0]):\n _tsys = trcv_bands[i] + tsky + tspl\n _aeff = nant * etaa * np.pi * stad**2./4.\n aot_bands.append( _aeff / _tsys )\n aot_bands = np.array(aot_bands) # per-band A_eff / T_sys\n\n # Construct interpolation function for A/T (takes freq. argument in MHz)\n interp_aont = interp1d(frq*1e3, aont, kind='linear',\n bounds_error=False)\n\n if makeplot:\n print(interp_aont(1000.))\n\n # Plot results\n ff = np.logspace(np.log10(350.), np.log10(20e3), 1000)\n #plt.subplot(111)\n\n # Combined curve\n plt.plot(ff, interp_aont(ff), 'k-', lw=1.5)\n\n # Per-band\n for i in range(trcv_bands.shape[0]):\n plt.plot(frq*1e3, aot_bands[i], 'y--', lw=1.5)\n\n plt.xlabel(\"Freq. [MHz]\", fontsize=18)\n plt.ylabel(r\"$A_{\\rm eff} / T_{\\rm sys}$\", fontsize=18)\n\n plt.xscale('log')\n plt.xlim((300., 30e3))\n plt.tight_layout()\n plt.savefig('aeff_on_tsys.png', bbox_inches='tight', dpi=160)\n\n if band == 0:\n interp_aont_return = interp_aont\n elif band == 1:\n aot_b1_full = aot_bands[0]\n interp_aont_return = interp1d(frq*1e3,\n aot_b1_full,\n kind='linear',\n bounds_error=False)\n elif band == 2:\n interp_aont_return = interp1d(frq*1e3,\n aot_bands[4],\n kind='linear',\n bounds_error=False)\n\n if normed_at_1ghz:\n return interp_aont_return(nu) / interp_aont_return(1000.)\n else:\n return interp_aont_return(nu)\n\n def v2nu(self, v, nu_rest=1.42e3):\n \"\"\"\n Convert velocity measurements back to observed frequency measurements.\n\n Parameters\n ----------\n v : array\n Velocities (in km/s)\n nu_rest : float, optional\n Rest frame frequency\n\n Returns\n -------\n array\n Frequency array\n\n \"\"\"\n return (v/3.e5 + 1)*nu_rest\n\n def nu2v(self, nu, nu_rest=1.42e3):\n \"\"\"\n Convert frequencies to velocity around rest frame reference frequency\n\n Parameters\n ----------\n nu : array\n Frequencies\n nu_rest : float, optional\n Rest frame frequency\n\n Returns\n -------\n array\n Velocity array in km/s\n \"\"\"\n\n return 3.e5*(nu/nu_rest - 1.e0)\n\n def get_noise(self, v, nu_rest=1.42e3):\n \"\"\"\n Returns the noise as a function of v for a given survey.\n\n Parameters\n ----------\n v : array\n Velocities\n nu_rest : float, optional\n Observing frequency\n\n Returns\n -------\n sigma : array\n Noise on the flux as a function of v\n \"\"\"\n\n nu = self.v2nu(v, nu_rest = nu_rest)\n\n AoT = self.aeff_on_tsys(nu, normed_at_1ghz=False, band=self.band, makeplot=False)\n tp = 1.76 * (1 / (nu / 1000)) ** 2 # Assumed time per pointing\n # Equation from Yahya et al (2015)\n return 260e-6 * (25000/20) / (AoT * self.nantennas) * (0.01 / self.delta_nu) ** (0.5) * (1 / tp) ** 0.5\n\n\n\n def inband(self, df):\n \"\"\"\n Given a catalogue as a dataframe objects, as a column that tests if the object will be observable in the\n chosen band.\n\n Parameters\n ----------\n df : pandas.DataFrame\n SAX catalogue as a pandas object\n \"\"\"\n\n df['inband'] = (df['zapparent']>=self.z_min) & (df['zapparent']<=self.z_max)\n\n\nclass DataFromCatalogue:\n \"\"\"\n Generate fake data from a catalogue\n \"\"\"\n def __init__(self):\n \"\"\"\n Tools to generate mock data.\n \"\"\"\n self.profile_params = ['v0', 'w_obs_20', 'w_obs_50', 'w_obs_peak', 'psi_obs_max', 'psi_obs_0']\n\n def create_data(self, params, survey, noise=True):\n \"\"\"\n Generate fake data from a dataframe of parameters and for a given survey.\n\n Parameters\n ----------\n params : list-like\n Contains the profile parameters (v0, w_obs_20, w_obs_50, w_obs_peak, psi_obs_max, psi_obs_0)\n survey : saxdata.Survey\n Which survey to do this for\n noise : bool, optional\n Whether or not to add noise\n\n Returns\n -------\n pandas.DataFrame\n Dataframe containing the velocities, psi values and noise\n \"\"\"\n nchan = int(survey.nu_max - survey.nu_min)/survey.delta_nu\n nu_range = np.linspace(survey.nu_min, survey.nu_max, nchan)\n v_range = survey.nu2v(nu_range)\n\n lp = hiprofile.LineProfile(*params)\n\n if noise:\n sigma = survey.get_noise(v_range)\n else:\n sigma = [0]*len(v_range)\n psi = lp.get_line_profile(v_range, sigma)\n\n return pd.DataFrame(data=np.column_stack([v_range, psi, sigma]), columns=['v', 'psi', 'psi_err'])\n\n def create_data_from_file(self, fl, cols, survey):\n \"\"\"\n Does what it says on the box. Creates the simulated data from a set of parameters in an HDF5 file.\n\n Parameters\n ----------\n fl : string\n HDF5 file where parameters are contained and data is to be saved\n cols : list\n Names of the parameters used to generate a line profile\n survey : saxdata.Survey\n Survey object initialised to either band 1 or band 2\n \"\"\"\n hstore = pd.HDFStore(fl)\n params = hstore['parameters'][cols]\n hstore.close()\n params = params.as_matrix()[0].tolist()\n data = self.create_data(params, survey, noise=True)\n data.to_hdf(fl, 'data')\n\n def create_from_cat(self, survey, df=[], filepath='./', nprocesses=1):\n \"\"\"\n Generates all data from a set of objects in a catalogue (supplied either as a dataframe or as existing hdf5\n files) and stores the data in either existing or new hdf5 files (one per object).\n\n Parameters\n ----------\n survey : saxdata.Survey\n Which survey to make this for\n df : pandas.DataFrame, optional\n The catalogue with parameters for each object\n filepath : str, optional\n Where to put the output hdf5 files\n \"\"\"\n\n\n if len(df) != 0:\n ids = df.id\n for i in ids:\n params = df[df.id == i][self.profile_params]\n params = params.as_matrix()[0].tolist()\n\n data = self.create_data(params, survey, noise=True)\n\n outfile = os.path.join(filepath, (str)(i)+'.hdf5')\n data.to_hdf(outfile, 'data')\n else:\n files = glob.glob(filepath+'ID*.hdf5')\n\n if len(files) == 0:\n print('Cannot find object files matching pattern <ID*.hdf5>. Please supply DataFrame or correct path.')\n\n else:\n if nprocesses == 1:\n for f in files:\n self.create_data_from_file(f, self.profile_params, survey)\n else:\n new_func = partial(self.create_data_from_file, cols=self.profile_params, survey=survey)\n p = Pool(nprocesses)\n p.map(new_func, files)\n\n\n\n\n def plot_profile(self, df, plot_model=False, model_params=[], plot_fit=False, fit_params=[], zoom=True,\n fontsize=14, data_colour='#c2c2d6', model_colour='k', fit_colour='r', rotation=0):\n \"\"\"\n Plots a single data set\n\n Parameters\n ----------\n df : pandas.DataFrame or array\n Contains the data in columns 'v', 'psi', 'psi_err'\n plot_model : bool, optional\n Whether or not to overplot the underlying model\n model_params : list-like, optional\n If the model is to be plotted, list of parameters\n plot_fit : bool, optional\n Whether or not to overplot a best fit model\n fit_params : list-like, optional\n If the best fit is to be plotted, list of parameters\n zoom : bool, optional\n Whether to zoom in on the region where the line actually is or to plot the whole thing\n fontsize : float, optional\n Font size of labels\n data_colour : str, optional\n Colour of the plotted data\n model_colour : str, optional\n Colour of the plotted model\n fit_colour : str, optional\n Colour of the plotted fit\n rotation : float, optional\n Angle by which to rotate x labels\n \"\"\"\n\n if isinstance(df, pd.DataFrame):\n v = df['v'].as_matrix()\n psi = df['psi'].as_matrix()\n else:\n v = df[:, 0]\n psi = df[:, 1]\n\n plt.plot(v, psi, color=data_colour)\n\n if plot_model:\n model_params = list(model_params)\n lp = hiprofile.LineProfile(*model_params)\n psi_model = lp.get_line_profile(v, noise=0)\n plt.plot(v, psi_model, color=model_colour, lw=1.5)\n\n if plot_fit:\n fit_params = list(fit_params)\n lp = hiprofile.LineProfile(*fit_params)\n psi_fit = lp.get_line_profile(v, noise=0)\n plt.plot(v, psi_fit, color=fit_colour, lw=1.5)\n\n if zoom and len(model_params) != 0: # We need to know where the true line is if we want to zoom in\n delta = 5*model_params[1]\n if not model_params[0]-delta < v.min():\n plt.xlim([model_params[0]-delta, model_params[0]+delta])\n\n plt.xlabel('Velocity (km/s)',fontsize=fontsize)\n plt.ylabel('Normalised flux density (Jy s/km)',fontsize=fontsize)\n plt.xticks(rotation=rotation)\n\n plt.tight_layout()\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", "id": "3746681", "language": "Python", "matching_score": 5.497889518737793, "max_stars_count": 1, "path": "radio_z/saxdata.py" }, { "content": "from __future__ import division, print_function\nimport pymultinest\nimport numpy as np\nimport pandas as pd\nfrom radio_z import hiprofile, contour_plot\nfrom collections import OrderedDict\nimport os\nimport time\nimport sys\nimport glob\nfrom multiprocessing import Pool\nfrom functools import partial\nfrom scipy.interpolate import interp1d\nimport matplotlib.pyplot as plt\nfrom tables.exceptions import HDF5ExtError # Needed to catch errors when loading hdf5 files\n\n\ndef _fit_object(filename, output_dir='output', save_to_hdf=True, delete_files=False, n_live_points=500, log_file=''):\n \"\"\"\n Given a key, fits a single spectral line from a catalogue. External function to the FitCatalogue class to get\n around the pickling issues in the multiprocessing library.\n\n Parameters\n ----------\n key : str\n ID of object\n cat : pandas.DataFrame or dict\n Contains the catalogue of all the objects\n output_dir : str, optional\n Output directory. Chains will automatically be named using the ID.\n n_live_points : int, optional\n Number of live points for multinest\n convert_to_binary : bool, optional\n If true, converts the multinest output files to binary numpy files to save space.\n \"\"\"\n id = filename.split(os.sep)[-1].split('.')[0]\n print('Fitting object', id)\n fd = FitData(filename=filename)\n t1 = time.time()\n fd.fit(chain_name=output_dir + '/' + id + '-', save_to_hdf=save_to_hdf, delete_files=delete_files,\n n_live_points=n_live_points)\n # time.sleep(np.random.randn()*2+5)\n tm = time.time() - t1\n\n if len(log_file)!= 0:\n prms = pd.read_hdf(filename, 'parameters')\n fl = open(log_file, 'a')\n fl.write('%s\\t%2.2f\\t%2.2f\\t%3.2f\\n' %(id, prms.snr_band1_santos, prms.snr_band2_santos, tm/60.))\n fl.close()\n\n\nclass FitData:\n \"\"\"\n Encapsulated class for fitting some HI profile data\n \"\"\"\n def __init__(self, read_from_hdf=True, filename='chain.hdf5', v=[], psi=[], sigma=[], bounds=[]):\n \"\"\"\n Class for using Multinest for inference on a single galaxy. Either read the data from an object HDF5 file (in\n the 'data' table) or provide the data directly in the arguments. Can also save the output chain directly to\n the same HDF5 file.\n\n Parameters\n ----------\n read_from_hdf : boolean, optional\n If true, read the data directly from an individual object's HDF5 file\n filename : str, optional\n The HDF5 file to read the data from and/or write the output chain to\n v : array, optional\n Velocities (use if read_from_hdf = False)\n psi : array, optional\n Flux (use if read_from_hdf = False)\n sigma : array or float, optional\n Uncertainties in the flux (use if read_from_hdf = False)\n bounds : OrderedDict, optional\n Uniform prior bounds on the parameters\n \"\"\"\n if not os.path.exists(filename):\n print(filename, \"does not exist\")\n raise IOError\n\n self.filename = filename\n\n self.complib = 'bzip2' # What compression library should be used when storing hdf5 files\n\n if read_from_hdf:\n try:\n hstore = pd.HDFStore(self.filename)\n # We'll assume the data is stored in a child table in the hdf5 file\n data = hstore['data']\n self.v, self.psi, self.sigma = data.as_matrix().T\n\n except HDF5ExtError:\n if len(v) == 0:\n print('Error: File provided is not an HDF5 file or is corrupt. Please provide v, psi and sigma '\n 'instead.')\n sys.exit(0)\n else:\n print('Warning: File provided is not an HDF5 file or is corrupt')\n\n self.v = v\n self.psi = psi\n self.sigma = sigma\n else:\n self.v = v\n self.psi = psi\n self.sigma = sigma\n\n if len(bounds) == 0:\n vmax = self.v.max()\n if vmax > 0:\n vmax = 0 # Redshift can't be less than zero\n self.bounds = OrderedDict([\n ('v0', [self.v.min(), vmax]),\n ('w_obs_20', [-1, 7.5]),\n ('w_obs_50', [-1, 7.5]),\n ('w_obs_peak', [-1, 7.5]),\n ('psi_obs_max', [-11, -2]),\n ('psi_obs_0', [-11, -2])\n ])\n else:\n self.bounds = bounds\n\n self.log_params = np.arange(1, 6)\n\n self.ndim = len(self.bounds)\n self.likecalls = 0\n\n def apply_bounds(self, params):\n \"\"\"\n Additional prior to ensure constraints are met in the sampling.\n \"\"\"\n return (params[1] > params[2]) and (params[2] > params[3]) and (params[4] > params[5]) and (params[3]>0)\n\n def loglike(self, cube, ndim, nparams):\n \"\"\"\n Log likelihood for multinest\n\n Parameters\n ----------\n cube : array-like\n Input parameter cube\n ndim : int\n Number of dimensions\n nparams : int\n Number of parameters actually varying\n\n Returns\n -------\n float\n Log likelihood\n \"\"\"\n self.likecalls += 1\n params = []\n\n\n # This is the only obvious way to convert a ctypes pointer to a numpy array\n for i in range(nparams):\n if i in self.log_params:\n p = np.exp(cube[i])\n else:\n p = cube[i]\n params.append(p)\n # Now we check to make sure all conditions in eq.8-12 of Obreschkow are met\n if not self.apply_bounds(params):\n return -1e110 # Default multinest \"zero\" value is -1e100. We must return less than that.\n\n lp = hiprofile.LineProfile(*params)\n psi_fit = lp.get_line_profile(self.v, noise=0)\n return -0.5*np.sum(((psi_fit-self.psi)/self.sigma)**2)\n\n def prior(self, cube, ndim, nparams):\n \"\"\"\n Prior for multinest\n\n Parameters\n ----------\n cube : array-like\n Input parameter cube\n ndim : int\n Number of dimensions\n nparams : int\n Number of parameters actually varying\n\n Returns\n -------\n array\n Transformed parameter cube\n\n \"\"\"\n keys = list(self.bounds.keys())\n for i in range(ndim):\n lower = self.bounds[keys[i]][0]\n upper = self.bounds[keys[i]][1]\n cube[i] = cube[i]*(upper-lower)+lower\n return cube\n\n\n\n def fit(self, chain_name='hi_run', save_to_hdf=True, delete_files=False, n_live_points=500, multimodal=True):\n \"\"\"\n Actually run multinest to fit model to the data\n\n Parameters\n ----------\n n_live_points : int, optional\n Number of live points to use\n chain_name : str, optional\n Root for all the chains (including directory)\n Note: This path can't be too long because multinest has a hardcoded character limit (100 characters)\n save_to_hdf : boolean, optional\n Whether or not to store the chain (only the equal weighted posterior) and the evidence in the object hdf5\n file (provided at initialisation)\n delete_files : boolean, optional\n Whether or not to delete the base chain files (will not exectue if not saved to hdf5 first)\n multimodal : boolean, optional\n Whether or not to run multinest in multimodal mode. If true, can occasionally fix modes too early so it's\n worth changing for difficult problems.\n \"\"\"\n t1 = time.time()\n pymultinest.run(self.loglike, self.prior, self.ndim, importance_nested_sampling = True, init_MPI = False,\n resume = False, verbose = False, sampling_efficiency = 'model', evidence_tolerance = 0.5,\n n_live_points = n_live_points, outputfiles_basename = chain_name, multimodal = multimodal)\n\n if save_to_hdf:\n # These are the files we can convert\n x = np.loadtxt(chain_name+'post_equal_weights.dat')\n df = pd.DataFrame(data=x, columns=list(self.bounds.keys())+['loglike'])\n df.to_hdf(self.filename, 'chain', complib=self.complib)\n\n ev, ev_sig, ev_is = self.read_evidence(chain_name)\n bayes_fact, bayes_sig = self.compute_evidence_ratio(chain_name)\n df_ev = pd.DataFrame(data=np.array([[ev, ev_sig, ev_is, bayes_fact]]), columns=['ln(evidence)',\n 'uncertainty',\n 'IS ln(evidence)',\n 'Bayes factor'])\n df_ev.to_hdf(self.filename, 'evidence', complib=self.complib)\n\n if delete_files:\n fls = glob.glob(chain_name+'*')\n print('Deleting files')\n for f in fls:\n os.system('rm '+f)\n\n print('Time taken', (time.time()-t1)/60, 'minutes')\n\n def compute_null_evidence(self):\n \"\"\"\n Computes the Bayesian evidence for the \"null hypothesis\" (i.e. y=0)\n\n Returns\n -------\n float\n Bayesian evidence\n \"\"\"\n return -0.5*np.sum((self.psi/self.sigma)**2)\n\n def read_evidence(self, chain_name):\n \"\"\"\n Reads in the ln(evidence) and uncertainty for a run multinest chain.\n\n Parameters\n ----------\n chain_name : str\n The name of an already run chain where the evidence is stored\n\n Returns\n -------\n float\n ln(evidence)\n float\n Uncertainty in ln(evidence)\n \"\"\"\n lns = open(chain_name+'stats.dat').readlines()\n line = lns[0].split(':')[1].split()\n ev = float(line[0])\n ev_sig = float(line[-1])\n line = lns[1].split(':')[1].split() # Get the importance sampled evidence\n ev_is = float(line[0])\n return ev, ev_sig, ev_is\n\n def compute_evidence_ratio(self, chain_name):\n \"\"\"\n Computes the Bayesian evidence ratio of the fitted model (M2) to the \"null hypothesis\" (M1)\n\n Parameters\n ----------\n chain_name : str\n The name of an already run chain where the evidence is stored\n\n Returns\n -------\n float\n ln(E2/E1)\n float\n Uncertainty in ln(E2/E1)\n \"\"\"\n\n E2, E2_sig, E_is = self.read_evidence(chain_name)\n\n E1 = self.compute_null_evidence()\n return E2-E1, E2_sig\n\n\nclass FitCatalogue:\n \"\"\"\n Fit an entire catalogue of data\n \"\"\"\n def __init__(self, filepath='./', log_name='log', append_to_log=False, subset=[]):\n \"\"\"\n Class to fit a catalogue of data, in parallel if requested. Assumes data are stored as individual HDF5 files\n in a single directory.\n Parameters\n ----------\n filepath : str, optional\n Catalogue of data where each object is a different HDF5 file\n \"\"\"\n self.filepath = filepath\n self.subset = subset\n if len(log_name) != 0:\n self.log_file = os.path.join(filepath, log_name+'.txt')\n if not append_to_log:\n fl = open(self.log_file, 'w')\n fl.write('%s\\tSNR1\\tSNR2\\tTime\\n' %('#ID'.ljust(11)))\n fl.close()\n else:\n self.log_file = ''\n\n def fit_all(self, nprocesses=1, output_dir='output', save_to_hdf=True, delete_files=False, n_live_points=500):\n \"\"\"\n Fits all the spectral lines in a catalogue.\n\n Parameters\n ----------\n nprocesses : int, optional\n Number of processors to be used (note: parallelisation only available with shared memory)\n output_dir : str, optional\n Output directory. Chains will automatically be named using the ID.\n n_live_points : int, optional\n Number of live points for multinest\n convert_to_binary : bool, optional\n If true, converts the multinest output files to binary numpy files to save space.\n subset : list, optional\n Give a list of keys to run on a subset of the data\n \"\"\"\n\n if len(self.subset) == 0:\n files = glob.glob(os.path.join(self.filepath, 'ID*hdf5'))\n else:\n files = self.subset\n\n\n if nprocesses > 1:\n new_func = partial(_fit_object, output_dir=output_dir, save_to_hdf=save_to_hdf, delete_files=delete_files,\n n_live_points=n_live_points, log_file=self.log_file)\n p = Pool(nprocesses)\n p.map(new_func, files)\n\n else:\n for f in files:\n _fit_object(f, output_dir=output_dir, save_to_hdf=save_to_hdf, delete_files=delete_files,\n n_live_points=n_live_points, log_file=self.log_file)\n\n\nclass ChainAnalyser:\n \"\"\"\n Class with convenience functions to analyse multinest output.\n \"\"\"\n def __init__(self, filename, log_params=[4,5]):\n \"\"\"\n Multinest chain analysis class.\n\n Parameters\n ----------\n filename : str, optional\n The HDF5 file to read the chain and evidence from\n log_params : list, optional\n Which parameters were varied in log space and so should be exponentiated\n \"\"\"\n self.filename = filename\n self.log_params = log_params\n\n try:\n self.chain = pd.read_hdf(filename, 'chain').as_matrix()\n self.evidence = pd.read_hdf(filename, 'evidence')\n except KeyError:\n print('Chain not found in file', filename)\n raise KeyError\n\n self.param_names = ['v0', 'w_obs_20', 'w_obs_50', 'w_obs_peak', 'psi_obs_max', 'psi_obs_0', 'z']\n\n def convert_z(self, v):\n c = 3e5\n return -(v/(c+v))\n\n def p_of_z(self, delta_z=0, z_min=0, z_max=0, nbins=50, v0_ind=0, save_to_file=True):\n \"\"\"\n Function to return the marginalised probability density function of redshift for a given object.\n\n Parameters\n ----------\n delta_z : float, optional\n Approximate desired width of bin\n v0_ind : int, optional\n The column of the chain containing the v0 values\n save_to_file : bool, optional\n Whether or not to store the output back in the original hdf5 file\n\n Returns\n -------\n bins : array\n The mid points of the z bins\n pdf : array\n The values of the pdf at the corresponding z value\n\n \"\"\"\n c = 3e5 # Speed of light in km/s\n\n z = self.convert_z(self.chain[:, v0_ind])\n\n if z_max==0:\n if delta_z == 0:\n nbins = 25\n else:\n nbins = (int)((z.max() - z.min())/delta_z)\n bins = np.linspace(z.min(), z.max(), nbins)\n else:\n bins = np.linspace(z_min, z_max, nbins)\n pdf, bins = np.histogram(z, bins=bins)\n pdf = pdf/np.sum(pdf) # Normalise\n\n # We want to return the mid points of the bins\n new_bins = (bins[1:] + bins[:-1])/2\n\n if save_to_file:\n df = pd.DataFrame(data=np.column_stack((new_bins, pdf)), columns=['z', 'p(z)'])\n df.to_hdf(self.filename, 'p(z)')\n\n return new_bins, pdf\n\n def plot_p_of_z(self, delta_z=0, v0_ind=0, true_val=0, colour='#0057f6', smooth=False, rot=0):\n \"\"\"\n Plots P(z)\n Parameters\n ----------\n delta_z : float, optional\n Approximate desired width of bin\n v0_ind : int, optional\n The column of the chain containing the v0 values\n smooth : bool, optional\n Whether or not to smooth the resulting curve\n \"\"\"\n\n bins, pdf = self.p_of_z(delta_z=delta_z, v0_ind=v0_ind)\n if smooth:\n f = interp1d(bins, pdf, kind='cubic')\n newbins = np.linspace(bins.min(), bins.max(), 100)\n newpdf = f(newbins)\n plt.plot(newbins, newpdf, color=colour, lw=1.5)\n else:\n plt.plot(bins, pdf, color=colour, lw=1.5)\n\n plt.xticks(rotation=rot)\n plt.xlabel('z')\n plt.ylabel('P(z)')\n\n\n if true_val != 0:\n plt.plot([true_val, true_val], plt.gca().get_ylim(), lw=1.5, color='k')\n plt.tight_layout()\n\n\n\n def get_errors(self, x, max_post, perc):\n \"\"\"\n Returns the error estimates from a chain\n\n Parameters\n ----------\n x : numpy array\n Column of a chain\n max_post : float\n Maximum posterior point in x\n perc : float\n Which percentile to compute (68 for 1 sigma, 95 for 2 sigma, 99 for 3 sigma)\n\n Returns\n -------\n sig1, sig2\n The values of x corresponding the input percentile\n\n \"\"\"\n xnew = np.sort(x)\n x1 = xnew[xnew < max_post]\n x2 = xnew[xnew >= max_post]\n\n if len(x1) == 0:\n sig1 = max_post\n else:\n sig1 = np.percentile(x1, 100-perc)\n if len(x2) == 0:\n sig2 = max_post\n else:\n sig2 = np.percentile(x2, perc)\n\n return sig1, sig2\n\n def parameter_estimates(self, true_params=[], save_to_file=True):\n \"\"\"\n Returns the best fit estimate of the parameters and their uncertainties.\n\n Parameters\n ----------\n true_params : list-like, optional\n If the true parameters are supplied, add them to the output dataframe for ease of comparison\n save_to_file : bool, optional\n Whether or not to store the output back in the original hdf5 file\n\n Returns\n -------\n pd.DataFrame\n The parameter estimates (mean, median and maximum posterior) as well as the 16th and 84th percentiles\n (corresponding to upper and lower 1 sigma estimates for a Gaussian)\n\n \"\"\"\n\n z = self.convert_z(self.chain[:, 0])\n logpost = self.chain[:, -1]\n chain = np.column_stack((self.chain[:, :-1], z))\n\n parameters = pd.DataFrame(columns = ['Mean', 'Median', 'MAP', 'lower_1sigma', 'upper_1sigma','lower_2sigma',\n 'upper_2sigma','lower_3sigma', 'upper_3sigma'], index=self.param_names)\n\n parameters['Mean'] = np.mean(chain, axis=0)\n parameters['Median'] = np.median(chain, axis=0)\n maps = chain[np.argmax(logpost), :]\n parameters['MAP'] = maps\n\n lower1 = np.zeros(len(chain[0,:]))\n upper1 = np.zeros(len(chain[0,:]))\n lower2 = np.zeros(len(chain[0, :]))\n upper2 = np.zeros(len(chain[0, :]))\n lower3 = np.zeros(len(chain[0, :]))\n upper3 = np.zeros(len(chain[0, :]))\n\n for i in range(len(self.param_names)):\n lower1[i], upper1[i] = self.get_errors(chain[:,i], maps[i], 68)\n lower2[i], upper2[i] = self.get_errors(chain[:, i], maps[i], 95)\n lower3[i], upper3[i] = self.get_errors(chain[:, i], maps[i], 99.7)\n\n parameters['lower_1sigma'] = lower1\n parameters['upper_1sigma'] = upper1\n parameters['lower_2sigma'] = lower2\n parameters['upper_2sigma'] = upper2\n parameters['lower_3sigma'] = lower3\n parameters['upper_3sigma'] = upper3\n\n parameters.iloc[self.log_params, :] = np.exp(parameters.iloc[self.log_params, :])\n\n if len(true_params) != 0:\n true_z = self.convert_z(true_params[0])\n true_params = np.append(true_params, true_z)\n parameters['True'] = true_params\n\n if save_to_file:\n parameters.to_hdf(self.filename, 'summary')\n\n return parameters\n\n def triangle_plot(self, params=[], labels=[], true_vals=[], best_params=[], smooth=5e3, rot=0):\n \"\"\"\n Plots the triangle plot for a sampled chain.\n\n Parameters\n ----------\n params : list-like, optional\n List of indices of parameters, otherwise every column of chain is used\n labels : list-like, optional\n Labels for parameters\n true_vals : list-like, optional\n If provided, plots the true values on the histograms and contours\n best_params : list-like, optional\n List of lists for each parameter (mean, minus uncertainty, plus uncertainty) plotted on histograms\n smooth : float, optional\n Smoothing scale for the contours. Contour will raise warning is this is too small. Set to 0 for no smoothing.\n rot : float, optional\n Rotation angle for the x axis tick labels (they often clash and need to be rotated)\n\n Returns\n -------\n\n \"\"\"\n contour_plot.triangle_plot(self.chain.copy(), params=params, labels=labels, true_vals=true_vals,\n best_params=best_params, smooth=smooth, rot=rot)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", "id": "431346", "language": "Python", "matching_score": 4.7106709480285645, "max_stars_count": 1, "path": "radio_z/hifit.py" }, { "content": "# Code to plot a contour from an MCMC chain\n# Author: <NAME> (2013)\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom scipy import interpolate\n\n\n#Finds the 95% and 68% confidence intervals, given a 2d histogram of the likelihood\ndef findconfidence(H):\n H2 = H.ravel()\n H2 = np.sort(H2)\n\n\n # Loop through this flattened array until we find the value in the bin which contains 95% of the points\n tot = sum(H2)\n tot95=0\n tot68=0\n\n # Changed this to 68% and 30% C.I\n for i in range(len(H2)):\n tot95 += H2[i]\n if tot95 >= 0.05*tot:\n N95 = H2[i]\n #print i\n break\n\n for i in range(len(H2)):\n tot68 += H2[i]\n if tot68>=0.32*tot:\n N68 = H2[i]\n break \n return max(H2),N95,N68\n\n#Given a chain, labels and a list of which parameters to plot, plots the contours\n# Arguments:\n# chain=an array of the chain (not using weights, i.e. each row counts only once)\n# p= a list of integers: the two parameters you want to plot (refers to two columns in the chain)\n#kwargs: labels= the labels of the parameters (list of strings)\n# col=a tuple of the two colours for the contour plot\n# line=boolean whether or not to just do a line contour plot\ndef contour(chain,p,**kwargs):\n binsize=50\n if ('weights' in kwargs) & (len(kwargs['weights'])!=0):\n H, xedges, yedges = np.histogram2d(chain[:,p[0]],chain[:,p[1]], weights=kwargs['weights'], bins=(binsize,binsize))\n else:\n H, xedges, yedges = np.histogram2d(chain[:,p[0]],chain[:,p[1]], bins=(binsize,binsize))\n \n x=[]\n y=[]\n z=[]\n for i in range(len(xedges[:-1])):\n for j in range(len(yedges[:-1])):\n x.append(xedges[:-1][i])\n y.append(yedges[:-1][j])\n z.append(H[i, j])\n\n if 'smooth' in kwargs:\n SMOOTH=True\n smth=kwargs['smooth']\n if smth==0:\n SMOOTH=False\n else:\n SMOOTH=True\n smth=10e5\n if SMOOTH:\n sz=50\n spl = interpolate.bisplrep(x, y, z, s=smth)\n X = np.linspace(min(xedges[:-1]), max(xedges[:-1]), sz)\n Y = np.linspace(min(yedges[:-1]), max(yedges[:-1]), sz)\n Z = interpolate.bisplev(X, Y, spl)\n else:\n X=xedges[:-1]\n Y=yedges[:-1]\n Z=H\n \n #I think this is the weird thing I have to do to make the contours work properly\n X1=np.zeros([len(X), len(X)])\n Y1=np.zeros([len(X), len(X)])\n for i in range(len(X)):\n X1[ :, i]=X\n Y1[i, :]=Y\n X=X1\n Y=Y1\n \n N100,N95,N68 = findconfidence(Z)\n\n if 'col' in kwargs:\n col=kwargs['col']\n else:\n col =('#a3c0f6','#0057f6') #A pretty blue\n \n\n\n if 'line' in kwargs and kwargs['line']==True:\n plt.contour(X, Y,Z,levels=[N95,N68,N100],colors=col, linewidth=100)\n else:\n plt.contourf(X, Y,Z,levels=[N95,N68,N100],colors=col)\n if 'labels' in kwargs:\n labels=kwargs['labels']\n plt.xlabel(labels[0],fontsize=22)\n plt.ylabel(labels[1],fontsize=22)\n #plt.show()\n\ndef triangle_plot(chain,params=[],labels=[],true_vals=[],best_params=[],smooth=5e3,weights=[],rot=0):\n \"\"\"\n Plots the triangle plot for a sampled chain.\n chain = Input chain_\n params = List of indices of parameters, otherwise every column of chain is used\n labels = Labels for parameters\n true_vales = If provided, plots the true values on the histograms and contours\n best_params = List of lists for each parameter (mean, minus uncertainty, plus uncertainty) plotted on histograms\n smooth = Smoothing scale for the contours. Contour will raise warning is this is too small. Set to 0 for no smoothing.\n weights = If the chain needs reweighting before histogramming\n rot = Rotation of labels for plots\n \"\"\"\n fntsz=18\n if len(params)==0:\n #If a list of parameter indices is not explicitly given, assume we plot all columns of chain except the last\n # (assumed to be likelihood)\n params=range(len(chain[0,:-1]))\n if len(labels)==0:\n labels=['%d' %i for i in range(len(params))]\n\n\n for i in range(len(params)):\n plt.subplot(len(params),len(params),i*(len(params)+1)+1)\n #Plot the histograms\n if len(weights)!=0:\n plt.hist(chain[:,params[i]],25,weights=weights,facecolor='#a3c0f6')\n else:\n plt.hist(chain[:,params[i]],25,facecolor='#a3c0f6')\n if len(true_vals)!=0:\n plt.plot([true_vals[i],true_vals[i]],plt.gca().get_ylim(),'k',lw=2.5)\n if len(best_params)!=0:\n plt.plot([best_params[i][0],best_params[i][0]],plt.gca().get_ylim(),'r',lw=2.5)\n plt.plot([best_params[i][0]+best_params[i][2],best_params[i][0]+best_params[i][2]],plt.gca().get_ylim(),'r--',lw=2.5)\n plt.plot([best_params[i][0]-best_params[i][1],best_params[i][0]-best_params[i][1]],plt.gca().get_ylim(),'r--',lw=2.5)\n plt.ticklabel_format(style='sci',scilimits=(-3,5))\n plt.xticks(rotation=rot)\n\n #Plot the contours\n for j in range(0,i):\n plt.subplot(len(params),len(params),i*(len(params))+j+1)\n contour(chain,[params[j],params[i]],smooth=smooth,weights=weights)\n if len(true_vals)!=0:\n plt.plot([true_vals[j]],[true_vals[i]],'*k',markersize=10)\n plt.ticklabel_format(style='sci',scilimits=(-3,5))\n plt.xticks(rotation=rot)\n plt.ticklabel_format()\n plt.tight_layout()\n\n\n for i in range(len(params)):\n ax=plt.subplot(len(params),len(params),len(params)*(len(params)-1)+i+1)\n ax.set_xlabel(labels[i])\n ax=plt.subplot(len(params),len(params),i*len(params)+1)\n ax.set_ylabel(labels[i])\n plt.tight_layout()\n\n##Testing all functionality\n#c=np.loadtxt('chain_2d_gaussian.txt')\n#contour(c,[0,1], labels=['1', '2'],line=False)\n#plt.show()\n", "id": "7275215", "language": "Python", "matching_score": 0.6729071736335754, "max_stars_count": 1, "path": "radio_z/contour_plot.py" }, { "content": "from setuptools import setup\n\nsetup(\n name = 'radio_z',\n version = '1.0',\n packages = ['radio_z'],\n url = '',\n license = 'MIT',\n author = '<NAME>',\n author_email = '<EMAIL>',\n description = 'Package for fitting HI lines with Bayesian inference'\n)\n", "id": "10414568", "language": "Python", "matching_score": 0.019116759300231934, "max_stars_count": 1, "path": "setup.py" } ]
3.913372
ToxicDeathX
[ { "content": "import ssl\n\nfrom aiohttp.client_exceptions import ClientConnectorError, ServerDisconnectedError\nfrom aiohttp_retry import RetryClient\n\n\nclass eskom_interface:\n \"\"\"Interface class to obtain loadshedding information using the Eskom API\"\"\"\n\n def __init__(self):\n \"\"\"Initializes class parameters\"\"\"\n\n self.base_url = \"https://loadshedding.eskom.co.za/LoadShedding\"\n self.headers = {\n \"user_agent\": \"Mozilla/5.0 (X11; Linux x86_64; rv:69.0) Gecko/20100101 Firefox/69.0\"\n }\n self.ssl_context = ssl.create_default_context()\n self.ssl_context.set_ciphers(\"DEFAULT@SECLEVEL=1\")\n\n async def async_query_api(self, endpoint, payload=None):\n \"\"\"Queries a given endpoint on the Eskom loadshedding API with the specified payload\n\n Args:\n endpoint (string): The endpoint of the Eskom API\n payload (dict, optional): The parameters to apply to the query. Defaults to None.\n\n Returns:\n The response object from the request\n \"\"\"\n async with RetryClient() as client:\n # The Eskom API occasionally drops incoming connections, implement reies\n async with client.get(\n url=self.base_url + endpoint,\n headers=self.headers,\n params=payload,\n ssl=self.ssl_context,\n retry_attempts=50,\n retry_exceptions={\n ClientConnectorError,\n ServerDisconnectedError,\n ConnectionError,\n OSError,\n },\n ) as res:\n return await res.json()\n\n async def async_get_stage(self, attempts=5):\n \"\"\"Fetches the current loadshedding stage from the Eskom API\n\n Args:\n attempts (int, optional): The number of attempts to query a sane value from the Eskom API. Defaults to 5.\n\n Returns:\n The loadshedding stage if the query succeeded, else `None`\n \"\"\"\n\n # Placeholder for returned loadshedding stage\n api_result = None\n\n # Query the API until a sensible (> 0) value is received, or the number of attempts is exceeded\n for attempt in range(attempts):\n res = await self.async_query_api(\"/GetStatus\")\n\n # Check if the API returned a valid response\n if res:\n # Store the response\n api_result = res\n\n # Only return the result if the API returned a non-negative stage, otherwise retry\n if int(res) > 0:\n # Return the current loadshedding stage by subtracting 1 from the query result\n return int(res) - 1\n\n if api_result:\n # If the API is up but returning \"invalid\" stages (< 0), simply return 0\n return 0\n else:\n # If the API the query did not succeed after the number of attempts has been exceeded, raise an exception\n raise Exception(\n f\"Error, no response received from API after {attempts} attempts\"\n )\n\n async def async_get_data(self):\n \"\"\"Fetches data from the loadshedding API\"\"\"\n stage = await self.async_get_stage()\n data = {\n \"data\": {\"stage\": stage},\n }\n return data\n", "id": "7107217", "language": "Python", "matching_score": 0.747407853603363, "max_stars_count": 25, "path": "custom_components/eskom_loadshedding/eskom_interface.py" }, { "content": "\"\"\"Sensor platform for Eskom Loadshedding Interface.\"\"\"\nfrom .const import (\n DEFAULT_NAME,\n DOMAIN,\n ICON,\n SENSOR,\n)\nfrom .entity import EskomEntity\n\n\nasync def async_setup_entry(hass, entry, async_add_devices):\n \"\"\"Setup sensor platform.\"\"\"\n coordinator = hass.data[DOMAIN][entry.entry_id]\n async_add_devices([EskomStageSensor(coordinator, entry)])\n\n\nclass EskomStageSensor(EskomEntity):\n \"\"\"Eskom Stage Sensor class.\"\"\"\n\n @property\n def name(self):\n \"\"\"Return the name of the sensor.\"\"\"\n return f\"{DEFAULT_NAME}_stage\"\n\n @property\n def state(self):\n \"\"\"Return the state of the sensor.\"\"\"\n return self.coordinator.data.get(\"stage\")\n\n @property\n def icon(self):\n \"\"\"Return the icon of the sensor.\"\"\"\n return ICON\n", "id": "2126154", "language": "Python", "matching_score": 2.9021034240722656, "max_stars_count": 25, "path": "custom_components/eskom_loadshedding/sensor.py" }, { "content": "\"\"\"Constants for eskom loadshedding interface\"\"\"\n# Base component constants\nNAME = \"Eskom Loadshedding Interface\"\nDOMAIN = \"eskom_loadshedding\"\nDOMAIN_DATA = f\"{DOMAIN}_data\"\nVERSION = \"1.0.5\"\n\nISSUE_URL = \"https://github.com/swartjean/ha-eskom-loadshedding/issues\"\n\n# Icons\nICON = \"mdi:lightning-bolt\"\n\n# Platforms\nSENSOR = \"sensor\"\nPLATFORMS = [SENSOR]\n\n# Configuration and options\nCONF_ENABLED = \"enabled\"\nCONF_SCAN_PERIOD = \"scan_period\"\n\n# Defaults\nDEFAULT_SCAN_PERIOD = 900\nMIN_SCAN_PERIOD = 300\n\n# Defaults\nDEFAULT_NAME = DOMAIN\n\n\nSTARTUP_MESSAGE = f\"\"\"\n-------------------------------------------------------------------\n{NAME}\nVersion: {VERSION}\nWelcome to the Eskom Loadshedding Interface!\nIf you have any issues with this you need to open an issue here:\n{ISSUE_URL}\n-------------------------------------------------------------------\n\"\"\"\n", "id": "458533", "language": "Python", "matching_score": 2.4085278511047363, "max_stars_count": 0, "path": "custom_components/eskom_loadshedding/const.py" } ]
2.408528
openshift-psap
[ { "content": "import logging\nimport sys\n\nimport wdm\n\nimport wdm.model as model\nimport wdm.env_config as env_config\nimport wdm.run as run\n\ndef do_test(dep, print_first_test=True):\n if not dep.spec.test:\n if dep.spec.install:\n if print_first_test:\n logging.debug(f\"Nothing to test for '{dep.name}'. Has install tasks, run them.\")\n success = False\n else:\n if print_first_test:\n logging.debug(f\"Nothing to test for '{dep.name}'. Doesn't have install tasks, we're good.\")\n success = True\n return success\n\n for task in dep.spec.test:\n if print_first_test:\n logging.debug(f\"Testing '{dep.name}' ...\")\n print_first_test = False\n\n success = run.run(dep, task, is_test=True)\n\n wdm.state.tested[f\"{dep.name} -> {task.name}\"] = success\n if success:\n return True\n\n return success # False or None\n\n\ndef resolve_task_requirement(dep, requirement_name):\n prefix = wdm.state.dependency_prefixes[dep.name]\n\n next_dep = None\n for name in f\"{prefix}{requirement_name}\", requirement_name:\n try: next_dep = wdm.state.dependencies[name]\n except KeyError: pass\n\n if next_dep is None:\n pfix = f\"[{prefix}]\" if prefix else \"\"\n logging.error(f\"Missing required dependency: {pfix}{requirement_name}\")\n sys.exit(1)\n\n return resolve(next_dep)\n\n\ndef resolve_task_config_requirement(dep, config_requirements):\n kv = env_config.get_configuration_kv(dep)\n\n missing = [config_key for config_key in config_requirements if config_key not in kv]\n for config_key in missing:\n logging.error(f\"Missing required configuration dependency: {config_key}\")\n\n if missing:\n logging.info(f\"Available configuration keys: {', '.join(kv.keys())}\")\n sys.exit(1)\n\ndef resolve(dep):\n logging.info(f\"Resolving '{dep.name}' dependency ...\")\n\n if dep.name in wdm.state.resolved:\n logging.info(f\"Dependency '{dep.name}' has already need resolved, skipping.\")\n return\n\n if dep.spec.configuration:\n resolve_task_config_requirement(dep, dep.spec.configuration)\n\n for req in dep.spec.requirements or []:\n logging.info(f\"Dependency '{dep.name}' needs '{req}' ...\")\n resolve_task_requirement(dep, req)\n\n if do_test(dep) == True:\n if dep.spec.test:\n if dep.spec.install:\n logging.debug( f\"Dependency '{dep.name}' is satisfied, no need to install.\")\n else:\n logging.debug( f\"Dependency '{dep.name}' is satisfied (and it cannot be installed).\")\n\n elif wdm.state.wdm_mode == \"test\":\n\n for task in dep.spec.install:\n logging.debug(f\"Running in {'test' if wdm.state.wdm_mode == 'test' else 'dry'} mode, \"\n f\"skipping {task.name} installation.\")\n wdm.state.installed[f\"{dep.name} -> {task.name}\"] = True\n else:\n first_install = True\n for task in dep.spec.install or []:\n if first_install:\n first_install = False\n logging.info(f\"Installing '{dep.name}' ...\")\n\n if run.run(dep, task, is_test=False) == False:\n logging.error(f\"Installation of '{dep.name}' failed.\")\n sys.exit(1)\n\n wdm.state.installed[f\"{dep.name} -> {task.name}\"] = True\n\n if first_install and wdm.state.wdm_mode != \"dryrun\":\n # no install task available\n logging.error(f\"'{dep.name}' test failed, but no install script provided.\")\n sys.exit(1)\n\n if do_test(dep, print_first_test=False) == False:\n if dep.spec.test:\n logging.error(f\"'{dep.name}' installed, but test still failing.\")\n sys.exit(1)\n\n logging.info(f\"'{dep.name}' installed, but has no test. Continuing nevertheless.\")\n\n\n wdm.state.resolved.add(dep.name)\n logging.info(f\"Done with '{dep.name}'.\\n\")\n", "id": "437419", "language": "Python", "matching_score": 2.123530626296997, "max_stars_count": 1, "path": "subprojects/wdm/wdm/resolve.py" }, { "content": "import os\nimport logging\nimport wdm\n\ndef update_env_with_env_files():\n \"\"\"\n Overrides the function default args with the flags found in the environment variables files\n \"\"\"\n for env in \".wdm_env\", \".wdm_env.generated\":\n try:\n with open(env) as f:\n for line in f.readlines():\n key, found , value = line.strip().partition(\"=\")\n if not found:\n logging.warning(\"invalid line in {env}: {line.strip()}\")\n continue\n if key in os.environ: continue # prefer env to env file\n os.environ[key] = value\n except FileNotFoundError: pass # ignore missing files\n\n\ndef update_kwargs_with_env(kwargs):\n # override the function default args with the flags found in the environment variables\n\n for flag, current_value in kwargs.items():\n if current_value: continue # already set, ignore.\n\n env_value = os.environ.get(f\"WDM_{flag.upper()}\")\n if not env_value: continue # not set, ignore.\n kwargs[flag] = env_value # override the function arg with the environment variable value\n\n\ndef get_config_from_kv_file(config_file):\n kv = {}\n with open(config_file) as f:\n for line in f.readlines():\n key, found , value = line.strip().partition(\"=\")\n if not found:\n logging.warning(\"Invalid line in {config_file}: {line.strip()}\")\n continue\n if key in kv:\n logging.warning(f\"Duplicated entry in {config_file}: {key}. \"\n \"Keeping only the first entry.\")\n continue\n\n kv[key] = value\n\n return kv\n\n\ndef get_config_from_cli(cli_arg):\n config_kv = {}\n for kv in cli_arg.split(\",\"):\n key, found, value = kv.partition(\"=\")\n if not found:\n logging.error(\"Found an invalid configuration entry in the command-line: %s\", kv)\n sys.exit(1)\n config_kv[key] = value\n return config_kv\n\n\ndef get_configuration_kv(dep):\n config_sources = [\n wdm.state.cli_configuration,\n wdm.state.dep_file_configuration,\n wdm.state.cfg_file_configuration,\n dep.config_values if dep else {},\n ]\n\n kv = {}\n for src in config_sources:\n for k, v in (src or {}).items():\n if v is None: continue\n kv[k] = v\n\n return kv\n\ndef get_task_configuration_kv(dep, task):\n all_kv = get_configuration_kv(dep)\n if dep.config_values:\n all_kv.update(dep.config_values)\n\n config_requirements = []\n config_requirements += (dep.spec.configuration or [])\n config_requirements += (task and task.configuration or [])\n config_requirements += (dep.config_values or [])\n\n kv = {}\n for key in config_requirements:\n value = None\n\n try: value = all_kv[key]\n except KeyError:\n raise KeyError(f\"Could not find a value for the configuration key '{key}'\")\n\n kv[key] = value\n\n return kv\n", "id": "1292032", "language": "Python", "matching_score": 2.0354230403900146, "max_stars_count": 1, "path": "subprojects/wdm/wdm/env_config.py" }, { "content": "import logging\nimport pathlib\nimport sys\n\nimport wdm\nimport wdm.model as model\nimport wdm.env_config as env_config\nimport wdm.populate as populate\nimport wdm.resolve as resolve\n\ndef print_summary():\n logging.info(\"All done.\\n\")\n\n if wdm.state.wdm_mode in (\"dryrun\"):\n logging.info(\"Would have tested:\")\n else:\n logging.info(\"Tested:\")\n\n for taskname, success in wdm.state.tested.items():\n logging.info(f\"- {'☑ ' if success else ('' if success is None else '❎ ')}{taskname}\")\n\n if wdm.state.installed:\n if wdm.state.wdm_mode in (\"test\", \"dryrun\"):\n logging.info(\"Would have installed:\")\n else:\n logging.info(\"Installed:\")\n [logging.info(f\"- {taskname}\") for taskname in wdm.state.installed]\n else:\n if wdm.state.wdm_mode in (\"test\", \"dryrun\"):\n logging.info(\"Would have installed: nothing.\")\n else:\n logging.info(\"Installed: nothing.\")\n\n\ndef has_failures():\n has_test_failures = False\n for taskname, success in wdm.state.tested.items():\n if success == False: return True\n\n return False\n\n\ndef wdm_main(wdm_mode, cli_args):\n wdm.state.wdm_mode = wdm_mode\n wdm.state.cli_args = cli_args\n\n cli_config = wdm.state.cli_args[\"config\"]\n if cli_config:\n wdm.state.cli_configuration = env_config.get_config_from_cli(cli_config)\n\n subproject_dirname = pathlib.Path(__file__).resolve().parent.parent\n for filename in (subproject_dirname / \"predefined.d\").glob(\"*\"):\n populate.populate_predefined_tasks(filename, wdm.state.predefined_tasks)\n\n for filename in (subproject_dirname / \"library.d\").glob(\"*\"):\n _name, _, ext = filename.name.rpartition(\".\")\n\n name = _name if ext in (\"yaml\", \"yml\") else filename.name\n prefix = f\"library.{name}.\"\n populate.populate_dependencies(filename, wdm.state.dependencies, wdm.state.dependency_prefixes, prefix=prefix)\n\n if wdm.state.cli_args[\"library\"]:\n if not wdm.state.cli_args.get(\"target\") and wdm.state.wdm_mode != \"list\":\n logging.error(\"Flag 'target' cannot be empty when 'dependency-file' is set to 'library'\")\n sys.exit(1)\n\n config_file = wdm.state.cli_args[\"config_file\"]\n if not config_file:\n config_file = pathlib.Path(\".wdm_config\")\n if not config_file.is_file():\n config_file = \"no\"\n\n\n if config_file != \"no\":\n config_file = pathlib.Path(config_file)\n if not config_file.is_file():\n logging.error(f\"Flag 'config_file' must point to a valid dependency file (config_file={config_file})\")\n sys.exit(1)\n wdm.state.cfg_file_configuration = env_config.get_config_from_kv_file(config_file)\n\n file_first_target = None\n\n wdm_dependency_file = wdm.state.cli_args[\"dependency_file\"]\n if pathlib.Path(wdm_dependency_file).is_file():\n file_first_target = populate.populate_dependencies(wdm_dependency_file,\n wdm.state.dependencies,\n wdm.state.dependency_prefixes,\n prefix=\"\",\n file_configuration=wdm.state.dep_file_configuration)\n elif not wdm.state.cli_args[\"library\"]:\n logging.error(f\"Flag 'dependency_file' must point to a valid file (dependency_file='{wdm_dependency_file}'), or enable the 'library' flag to pickup the main target from the library files only.\")\n sys.exit(2)\n\n if wdm.state.wdm_mode == \"list\":\n for target in wdm.state.dependencies:\n print(f\"- {target}\")\n sys.exit(0)\n\n target = wdm.state.cli_args.get(\"target\") or file_first_target\n dependency = wdm.state.dependencies.get(target)\n if not dependency:\n logging.error(f\"Main dependency '{target}' does not exist.\")\n sys.exit(1)\n\n resolve.resolve(dependency)\n\n print_summary()\n\n if has_failures():\n logging.warning(\"Test failed, exit with errcode=1.\")\n sys.exit(1)\n", "id": "8104258", "language": "Python", "matching_score": 3.8866491317749023, "max_stars_count": 1, "path": "subprojects/wdm/wdm/main.py" }, { "content": "class State:\n def __init__(self):\n self.dependencies = {}\n self.predefined_tasks = {}\n\n self.resolved = set()\n\n self.tested = dict()\n self.installed = dict()\n\n self.dep_file_configuration = {}\n self.cfg_file_configuration = {}\n self.cli_configuration = {}\n\n self.wdm_mode = None\n\n self.cli_args = None\n\n self.dependency_prefixes = {}\n\nstate = State()\n", "id": "3508196", "language": "Python", "matching_score": 0.5911654233932495, "max_stars_count": 1, "path": "subprojects/wdm/wdm/__init__.py" }, { "content": "import yaml\nimport logging\nimport sys\n\nimport pydantic\n\nimport wdm.model as model\nimport wdm.env_config as env_config\n\ndef populate_predefined_tasks(filepath, predefined_tasks):\n with open(filepath) as f:\n docs = list(yaml.safe_load_all(f))\n\n class Model(pydantic.BaseModel):\n task: model.TaskModels\n\n for doc in docs:\n if doc is None: continue # empty block\n try:\n obj = Model.parse_obj(dict(task=doc))\n task = obj.task\n except pydantic.error_wrappers.ValidationError as e:\n logging.error(f\"Failed to parse the YAML predefined file: {e}\")\n logging.info(\"Faulty YAML entry:\\n\" + yaml.dump(doc))\n sys.exit(1)\n\n if task.name in predefined_tasks:\n logging.warning(f\"Predefined task '{obj.name}' already known. Keeping only the first one.\")\n continue\n\n predefined_tasks[task.name] = task\n\ndef populate_dependencies(filepath, dependencies, dependency_prefixes,\n *, prefix, file_configuration=None):\n with open(filepath) as f:\n docs = list(yaml.safe_load_all(f))\n\n first_target = None\n for doc in docs:\n if doc is None: continue # empty block\n\n try: obj = model.DependencyModel.parse_obj(doc)\n except pydantic.error_wrappers.ValidationError as e:\n logging.error(f\"Failed to parse the YAML dependency file '{filepath}': {e}\")\n logging.info(\"Faulty YAML entry:\\n\" + yaml.dump(doc))\n sys.exit(1)\n\n if not obj.spec:\n if file_configuration:\n logging.error(\"File configuration already populated ...\")\n sys.exit(1)\n if obj.config_values and file_configuration is None:\n logging.error(\"Library file '%s' cannot have a file 'configuration-values' field.\", filepath)\n sys.exit(1)\n for k, v in obj.config_values.items():\n if v is not None: continue\n if k in env_config.get_configuration_kv(None):\n continue\n\n logging.error(f\"Configuration key '{k}' not provided ...\")\n sys.exit(1)\n\n\n file_configuration.update(obj.config_values)\n continue\n\n obj.name = f\"{prefix}{obj.name}\"\n dependencies[obj.name] = obj\n dependency_prefixes[obj.name] = prefix\n\n if not first_target:\n first_target = obj.name\n\n return first_target\n", "id": "11646366", "language": "Python", "matching_score": 2.5244786739349365, "max_stars_count": 1, "path": "subprojects/wdm/wdm/populate.py" }, { "content": "import enum\nimport typing\n\nimport pydantic\n\nclass TaskType(str, enum.Enum):\n shell = 'shell'\n ansible = 'ansible'\n toolbox = 'toolbox'\n predefined = 'predefined'\n\nclass TaskAbstractModel(pydantic.BaseModel):\n name: str\n type: TaskType\n configuration: typing.List[str] = None\n\n# ---\n\nclass ToolboxTaskSpecModel(pydantic.BaseModel, extra=pydantic.Extra.forbid):\n group: str\n command: str\n args: typing.List[str] = None\n\nclass ToolboxTaskModel(TaskAbstractModel, extra=pydantic.Extra.forbid):\n type: str = pydantic.Field(TaskType.toolbox.value, const=True)\n spec: ToolboxTaskSpecModel\n\n# ---\n\nclass ShellTaskModel(TaskAbstractModel, extra=pydantic.Extra.forbid):\n type: str = pydantic.Field(TaskType.shell.value, const=True)\n spec: str\n\n# ---\n\nclass AnsibleTaskModel(TaskAbstractModel, extra=pydantic.Extra.forbid):\n type: str = pydantic.Field(TaskType.ansible.value, const=True)\n spec: typing.List[dict]\n\n# ---\n\nclass PredefinedSpecTaskModel(pydantic.BaseModel, extra=pydantic.Extra.forbid):\n name: str\n args: typing.Dict[str, str]\n\nclass PredefinedTaskModel(TaskAbstractModel, extra=pydantic.Extra.forbid):\n type: str = pydantic.Field(TaskType.predefined.value, const=True)\n spec: PredefinedSpecTaskModel\n\n# ---\n\nTaskModels = typing.Union[ShellTaskModel, AnsibleTaskModel, PredefinedTaskModel, ToolboxTaskModel]\n\nclass DependencySpecModel(pydantic.BaseModel, extra=pydantic.Extra.forbid):\n requirements: typing.List[str] = None\n configuration: typing.List[str] = None\n test: typing.List[TaskModels] = None\n install: typing.List[TaskModels] = None\n\nclass DependencyModel(pydantic.BaseModel, extra=pydantic.Extra.forbid):\n \"\"\"\n This is the description of a dependency object\n \"\"\"\n\n name: str\n config_values: typing.Dict[str, typing.Union[str, None]] = None\n spec: DependencySpecModel = None\n", "id": "7800452", "language": "Python", "matching_score": 1.8023278713226318, "max_stars_count": 1, "path": "subprojects/wdm/wdm/model.py" }, { "content": "import selectors\nimport tempfile\nimport subprocess\nimport os, sys\nimport logging\nimport yaml\nimport pathlib\n\nimport wdm\nimport wdm.env_config as env_config\nimport wdm.model as model\n\ndef run(dep, task, *, is_test):\n logging.debug(f\"Running %s task '{task.name}' ...\", \"test\" if is_test else \"install\")\n\n try:\n fn = TaskTypeFunctions[task.type]\n except KeyError:\n logging.error(f\"Unknown task type: {task.type}.\")\n sys.exit(1)\n\n success = fn(dep, task, is_test=is_test)\n\n if success != None:\n logging.info(\"%s of '%s': %s\", \"Testing\" if is_test else \"Installation\", task.name, \"Success\" if success else \"Failed\")\n\n return success\n\n\ndef subprocess_stdout_to_log(proc, prefix):\n sel = selectors.DefaultSelector()\n sel.register(proc.stdout, selectors.EVENT_READ)\n sel.register(proc.stderr, selectors.EVENT_READ)\n\n prefix = f\"{prefix} |\"\n pending = dict(stdout=\"\", stderr=\"\")\n while True:\n for key, _ in sel.select():\n read = key.fileobj.read1\n\n # remove when Py 3.6 is not supported anymore:\n if sys.version_info.minor <= 6:\n read = key.fileobj.read\n\n data = read().decode()\n if not data:\n for out in pending.keys():\n if pending[out]:\n logging.debug(\"%s %s\", prefix, pending[out])\n return\n\n is_stdout = key.fileobj is proc.stdout\n is_std = dict(stdout=is_stdout, stderr=not is_stdout)\n\n for line in data.split(\"\\n\")[:-1]:\n for out in pending.keys():\n if is_std[out] and pending[out]:\n line = f\"{pending[out]}{line}\"\n pending[out] = \"\"\n\n logging.debug(\"%s %s\", prefix, line)\n\n unfinished_line = data.rpartition(\"\\n\")[-1]\n for out in pending.keys():\n if is_std[out] and unfinished_line:\n pending[out] = unfinished_line\n\ndef run_ansible(dep, task, *, is_test):\n play = [\n dict(name=f\"Run {task.name}\",\n connection=\"local\",\n gather_facts=False,\n hosts=\"localhost\",\n tasks=task.spec,\n )\n ]\n\n repo_ansible_config = pathlib.Path(__file__).parent.parent.parent.parent / \"config\" / \"ansible.cfg\"\n cli_ansible_config = wdm.state.cli_args.get(\"ansible_config\")\n ENV_ANSIBLE_CONFIG = \"ANSIBLE_CONFIG\"\n\n env = os.environ.copy()\n\n if cli_ansible_config:\n env[ENV_ANSIBLE_CONFIG] = cli_ansible_config\n\n elif ENV_ANSIBLE_CONFIG not in env:\n env[ENV_ANSIBLE_CONFIG] = repo_ansible_config\n\n sys.stdout.flush()\n sys.stderr.flush()\n\n tmp = tempfile.NamedTemporaryFile(\"w+\", dir=os.getcwd(), delete=False)\n yaml.dump(play, tmp)\n tmp.close()\n\n cmd = [\"ansible-playbook\", tmp.name]\n\n for key, value in env_config.get_task_configuration_kv(dep, task).items():\n logging.debug(f\"[ansible] extra var: %s=%s\", key, value)\n cmd += [\"--extra-vars\", f\"{key}={value}\"]\n\n logging.debug(\"[ansible] command: %s\", \" \".join(cmd))\n\n if wdm.state.wdm_mode == \"dryrun\":\n logging.info(\"Dry mode, skipping execution.\")\n return None\n\n try:\n proc = subprocess.Popen(cmd,\n stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n env=env, stdin=subprocess.PIPE)\n subprocess_stdout_to_log(proc, prefix=task.name)\n proc.wait()\n ret = proc.returncode\n except KeyboardInterrupt:\n print()\n logging.error(f\"Task '{task.name}' was interrupted ...\")\n sys.exit(1)\n finally:\n try: os.remove(tmp.name)\n except FileNotFoundError: pass # ignore\n return ret == 0\n\ndef run_shell(dep, task, *, is_test):\n logging.debug(f\"[shell] Running '{task.name}' ...\")\n\n cmd = task.spec.strip()\n\n env = os.environ.copy()\n\n for key, value in env_config.get_task_configuration_kv(dep, task).items():\n env[key] = value\n logging.debug(f\"[shell] env %s=%s\", key, value)\n\n for line in cmd.split(\"\\n\"):\n logging.debug(f\"[shell] %s\", line)\n\n sys.stdout.flush()\n sys.stderr.flush()\n\n popen_cmd = [\"bash\", \"-cxeuo\", \"pipefail\", cmd]\n\n if wdm.state.wdm_mode == \"dryrun\":\n logging.info(\"Dry mode, skipping execution.\")\n return None\n\n try:\n proc = subprocess.Popen(popen_cmd,\n env=env,\n stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n stdin=subprocess.PIPE)\n\n subprocess_stdout_to_log(proc, prefix=task.name)\n proc.wait()\n ret = proc.returncode\n except KeyboardInterrupt:\n logging.error(f\"Task '{task}' was interrupted ...\")\n sys.exit(1)\n\n return ret == 0\n\ndef run_predefined(_dep, task, *, is_test):\n try:\n predefined_task = wdm.state.predefined_tasks[task.spec.name].copy()\n except KeyError:\n logging.error(f\"{_dep.name}/{task.name}: Could not find predefined task {task.spec.name}\")\n logging.info(\"Available predefined tasks: %s\", \", \".join(wdm.state.predefined_tasks))\n sys.exit(1)\n\n predefined_task.name = f\"{task.name} | predefined({task.spec.name})\"\n\n logging.debug(f\"[predefined] Running '{predefined_task.name}' ...\")\n dep = _dep.copy()\n\n if not dep.config_values:\n dep.config_values = {}\n dep.config_values.update(task.spec.args)\n\n return run(dep, predefined_task, is_test=is_test)\n\ndef run_toolbox(dep, task, *, is_test):\n try:\n predefined_toolbox_task = wdm.state.predefined_tasks[\"run_toolbox\"]\n except KeyError:\n logging.error(\"Could not find the task 'run_toolbox' in the predefined tasks. \"\n \"That's unexpected ...\")\n logging.info(\"Available predefined tasks: %s\", \", \".join(wdm.state.predefined_tasks.keys()))\n sys.exit(1)\n\n task_config = env_config.get_task_configuration_kv(dep, task)\n def apply_config(val):\n for key, value in task_config.items():\n val = val.replace(f\"${key}\", value)\n val = val.replace(f\"${{{key}}}\", value)\n\n return val\n\n toolbox_dir = pathlib.Path(__file__).parent.parent.parent.parent\n\n obj = dict(\n name=f\"{task.name} | toolbox()\",\n configuration=task.configuration,\n spec=dict(\n name=predefined_toolbox_task.name,\n args=dict(\n group=apply_config(task.spec.group),\n command=apply_config(task.spec.command),\n args=apply_config(\" \".join(task.spec.args or [])),\n toolbox_dir=str(toolbox_dir),\n )\n )\n )\n toolbox_task = model.PredefinedTaskModel.parse_obj(obj)\n\n logging.debug(f\"[toolbox] Running '{toolbox_task.name}' ...\")\n return run_predefined(dep, toolbox_task, is_test=is_test)\n\nTaskTypeFunctions = {\n model.TaskType.shell: run_shell,\n model.TaskType.ansible: run_ansible,\n model.TaskType.predefined: run_predefined,\n model.TaskType.toolbox: run_toolbox,\n}\n", "id": "2587582", "language": "Python", "matching_score": 3.160048723220825, "max_stars_count": 1, "path": "subprojects/wdm/wdm/run.py" }, { "content": "import itertools\nimport subprocess\nimport os\nimport time\nimport sys\nfrom pathlib import Path\nimport yaml\nimport tempfile\n\ntop_dir = Path(__file__).resolve().parent.parent\n\n\nclass RunAnsibleRole:\n \"\"\"\n Playbook runner\n\n !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n If you're seeing this text, put the --help flag earlier in your list\n of command-line arguments, this is a limitation of the CLI parsing library\n used by the toolbox.\n !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n \"\"\"\n def __init__(self, playbook_name, opts: dict = None):\n if opts is None:\n opts = {}\n\n self.playbook_name = playbook_name\n self.opts = opts\n\n def __str__(self):\n return \"\"\n\n def _run(self):\n run_ansible_role(self.playbook_name, self.opts)\n\n\ndef flatten(lst):\n return itertools.chain(*lst)\n\n\ndef run_ansible_role(role_name, opts: dict = dict()):\n version_override = os.environ.get(\"OCP_VERSION\")\n if version_override is not None:\n opts[\"openshift_release\"] = version_override\n\n # do not modify the `os.environ` of this Python process\n env = os.environ.copy()\n\n if env.get(\"ARTIFACT_DIR\") is None:\n env[\"ARTIFACT_DIR\"] = f\"/tmp/ci-artifacts_{time.strftime('%Y%m%d')}\"\n print(f\"Using '{env['ARTIFACT_DIR']}' to store the test artifacts (default value for ARTIFACT_DIR).\")\n else:\n print(f\"Using '{env['ARTIFACT_DIR']}' to store the test artifacts.\")\n opts[\"artifact_dir\"] = env[\"ARTIFACT_DIR\"]\n\n env[\"ARTIFACT_DIRNAME\"] = '__'.join(sys.argv[1:3])\n\n artifact_dir = Path(env[\"ARTIFACT_DIR\"])\n artifact_dir.mkdir(parents=True, exist_ok=True)\n\n if env.get(\"ARTIFACT_EXTRA_LOGS_DIR\") is None:\n previous_extra_count = len(list(artifact_dir.glob(\"*__*\")))\n env[\"ARTIFACT_EXTRA_LOGS_DIR\"] = str(\n Path(env[\"ARTIFACT_DIR\"]) /\n f\"{previous_extra_count:03d}__{env['ARTIFACT_DIRNAME']}\"\n )\n\n artifact_extra_logs_dir = Path(env[\"ARTIFACT_EXTRA_LOGS_DIR\"])\n artifact_extra_logs_dir.mkdir(parents=True, exist_ok=True)\n\n print(f\"Using '{artifact_extra_logs_dir}' to store extra log files.\")\n opts[\"artifact_extra_logs_dir\"] = str(artifact_extra_logs_dir)\n\n if env.get(\"ANSIBLE_LOG_PATH\") is None:\n env[\"ANSIBLE_LOG_PATH\"] = str(artifact_extra_logs_dir / \"_ansible.log\")\n print(f\"Using '{env['ANSIBLE_LOG_PATH']}' to store ansible logs.\")\n Path(env[\"ANSIBLE_LOG_PATH\"]).parent.mkdir(parents=True, exist_ok=True)\n\n if env.get(\"ANSIBLE_CACHE_PLUGIN_CONNECTION\") is None:\n env[\"ANSIBLE_CACHE_PLUGIN_CONNECTION\"] = str(artifact_dir / \"ansible_facts\")\n print(f\"Using '{env['ANSIBLE_CACHE_PLUGIN_CONNECTION']}' to store ansible facts.\")\n Path(env[\"ANSIBLE_CACHE_PLUGIN_CONNECTION\"]).parent.mkdir(parents=True, exist_ok=True)\n\n if env.get(\"ANSIBLE_CONFIG\") is None:\n env[\"ANSIBLE_CONFIG\"] = str(top_dir / \"config\" / \"ansible.cfg\")\n print(f\"Using '{env['ANSIBLE_CONFIG']}' as ansible configuration file.\")\n\n if env.get(\"ANSIBLE_JSON_TO_LOGFILE\") is None:\n env[\"ANSIBLE_JSON_TO_LOGFILE\"] = str(artifact_extra_logs_dir / \"_ansible.log.json\")\n print(f\"Using '{env['ANSIBLE_JSON_TO_LOGFILE']}' as ansible json log file.\")\n\n option_flags = flatten(\n [\n [\"-e\", f\"{option_name}={option_value}\"]\n for option_name, option_value in opts.items()\n ]\n )\n\n tmp_play_file = tempfile.NamedTemporaryFile(\"w+\", dir=os.getcwd(), delete=False)\n play = [\n dict(name=f\"Run {role_name} role\",\n connection=\"local\",\n gather_facts=False,\n hosts=\"localhost\",\n roles=[role_name],\n )\n ]\n yaml.dump(play, tmp_play_file)\n tmp_play_file.close()\n\n cmd = [\"ansible-playbook\", \"-vv\", *option_flags, tmp_play_file.name]\n\n with open(artifact_extra_logs_dir / \"_ansible.cmd\", \"w\") as f:\n print(\" \".join(cmd), file=f)\n\n with open(artifact_extra_logs_dir / \"_ansible.env\", \"w\") as f:\n for k, v in env.items():\n print(f\"{k}={v}\", file=f)\n\n with open(artifact_extra_logs_dir / \"_python.cmd\", \"w\") as f:\n print(\" \".join(sys.argv), file=f)\n\n sys.stdout.flush()\n sys.stderr.flush()\n\n try:\n run_result = subprocess.run(cmd, env=env, check=False)\n ret = run_result.returncode\n except KeyboardInterrupt:\n print(\"\")\n print(\"Interrupted :/\")\n sys.exit(1)\n finally:\n try:\n os.remove(tmp_play_file.name)\n except FileNotFoundError:\n pass # play file was removed, ignore\n\n raise SystemExit(ret)\n", "id": "7828082", "language": "Python", "matching_score": 1.4502943754196167, "max_stars_count": 1, "path": "toolbox/_common.py" }, { "content": "#! /usr/bin/python3\n\nimport logging\nimport os, sys\n\nlogging.basicConfig(\n level=os.environ.get(\"LOGLEVEL\", \"DEBUG\"),\n format=\"%(levelname)6s | %(message)s\",\n)\n\ntry:\n import fire\nexcept ModuleNotFoundError:\n logging.error(\"WDM requires the Python `fire` package.\")\n sys.exit(1)\n\nimport wdm.env_config as env_config\nimport wdm.main as wdm_main\n\n# ---\n\ndef get_entrypoint(entrypoint_name, descr):\n\n doc = \"\"\"\nEnv:\n WDM_DEPENDENCY_FILE\n WDM_TARGET\n WDM_ANSIBLE_CONFIG\n WDM_CONFIG\n WDM_LIBRARY\n\nOr stored in .wdm_env. See the `FLAGS` section for the descriptions.\n\nReturn codes:\n 2 if an error occured\n 1 if the testing is unsuccessful (test mode)\n 1 if an installation failed (ensure mode)\n 0 if the testing is successful (test mode)\n 0 if the dependencies are all satisfied (ensure mode)\n\nArgs:\n dependency_file: Path of the dependency file to resolve.\n target: Dependency to resolve. If empty, take the first entry defined the dependency file.\n ansible_config: Ansible config file (for Ansible tasks).\n library: If True, the `dependency_file` can be omitted.\n config: comma-separated key=value list of configuration values.\n config_file: Path to a file containing configuration key=value pairs, one per line. If empty, loads '.wdm_config' if it exists, or 'no' to skip loading any config file.\n\"\"\"\n\n def entrypoint(target: str = \"\",\n dependency_file: str = \"./dependencies.yaml\",\n ansible_config: str = None,\n library: bool = False,\n config: str = \"\",\n config_file: str = \"\"\n ):\n\n kwargs = dict(locals()) # capture the function arguments\n\n env_config.update_env_with_env_files()\n env_config.update_kwargs_with_env(kwargs)\n\n wdm_dependency_file = kwargs[\"dependency_file\"]\n\n return wdm_main.wdm_main(entrypoint_name, kwargs)\n\n entrypoint.__doc__ = descr + \"\\n\" + doc\n\n return entrypoint\n\ndef show_example():\n \"\"\"\n Show the example of command and files\n \"\"\"\n print(\"\"\"\nExamples:\n $ export WDM_DEPENDENCY_FILE=...\n $ wdm test has_nfd\n $ wdm ensure has_gpu_operator\n---\nname: has_gpu_operator\nspec:\n requirements:\n - has_nfd\n test:\n - name: has_nfd_operatorhub\n type: shell\n spec: oc get pod -l app.kubernetes.io/component=gpu-operator -A -oname\n install:\n - name: install_gpu_operator\n type: shell\n spec: ./run_toolbox.py gpu_operator deploy_from_operatorhub\n - name: install_gpu_operator\n type: shell\n spec: ./run_toolbox.py gpu_operator wait_deployment\n---\nname: has_nfd\nspec:\n test:\n - name: has_nfd_labels\n type: shell\n spec: ./run_toolbox.py nfd has_labels\n install:\n - name: install_nfd_from_operatorhub\n type: shell\n spec: ./run_toolbox.py nfd_operator deploy_from_operatorhub\n\"\"\")\n\nclass WDM_Entrypoint:\n def __init__(self):\n self.dryrun = get_entrypoint(\"dryrun\", \"Do not run test nor install tasks, only confirm that they could be executed.\")\n self.ensure = get_entrypoint(\"ensure\", \"Test dependencies and install those unsatisfied.\")\n self.test = get_entrypoint(\"test\", \"Only test if dependencies are satisfied.\")\n self.list = get_entrypoint(\"list\", \"List the available dependency targets.\")\n self.example = show_example\n\ndef main():\n # Print help rather than opening a pager\n fire.core.Display = lambda lines, out: print(*lines, file=out)\n\n # Launch CLI, get a runnable\n fire.Fire(WDM_Entrypoint())\n\n\nif __name__ == \"__main__\":\n main()\n", "id": "3366669", "language": "Python", "matching_score": 2.8345959186553955, "max_stars_count": 1, "path": "subprojects/wdm/wdm/__main__.py" }, { "content": "#!/usr/bin/env python3\n\nimport sys\n\ntry:\n import fire\nexcept ModuleNotFoundError:\n print(\"The toolbox requires the Python `fire` package, see requirements.txt for a full list of requirements\")\n sys.exit(1)\n\nfrom toolbox.cluster import Cluster\nfrom toolbox.entitlement import Entitlement\nfrom toolbox.gpu_operator import GPUOperator\nfrom toolbox.nfd import NFD\nfrom toolbox.nfd_operator import NFDOperator\nfrom toolbox.repo import Repo\nfrom toolbox.benchmarking import Benchmarking\nfrom toolbox.utils import Utils\nfrom toolbox.nto import NTO\nfrom toolbox.rhods import RHODS\n\n\nclass Toolbox:\n \"\"\"\n The PSAP Operators Toolbox\n\n The toolbox is a set of tools, originally written for\n CI automation, but that appeared to be useful for a broader scope. It\n automates different operations on OpenShift clusters and operators\n revolving around PSAP activities: entitlement, scale-up of GPU nodes,\n deployment of the NFD, SRO and NVIDIA GPU Operators, but also their\n configuration and troubleshooting.\n \"\"\"\n def __init__(self):\n self.cluster = Cluster\n self.entitlement = Entitlement\n self.gpu_operator = GPUOperator\n self.nfd_operator = NFDOperator\n self.nfd = NFD\n self.repo = Repo\n self.benchmarking = Benchmarking\n self.utils = Utils\n self.nto = NTO\n self.rhods = RHODS\n\n\ndef main(no_exit=False):\n # Print help rather than opening a pager\n fire.core.Display = lambda lines, out: print(*lines, file=out)\n\n # Launch CLI, get a runnable\n runnable = None\n try:\n runnable = fire.Fire(Toolbox())\n except fire.core.FireExit:\n if not no_exit:\n raise\n\n # Run the actual workload\n try:\n if hasattr(runnable, \"_run\"):\n runnable._run()\n else:\n # CLI didn't resolve completely - either by lack of arguments\n # or use of `--help`. This is okay.\n pass\n except SystemExit as e:\n if not no_exit:\n sys.exit(e.code)\n\n\nif __name__ == \"__main__\":\n main()\n", "id": "1567424", "language": "Python", "matching_score": 1.7288531064987183, "max_stars_count": 1, "path": "run_toolbox.py" }, { "content": "import sys\nimport secrets\n\nfrom toolbox._common import RunAnsibleRole\n\n\nclass GPUOperator:\n \"\"\"\n Commands for deploying, building and testing the GPU operator in various ways\n \"\"\"\n\n @staticmethod\n def deploy_cluster_policy():\n \"\"\"\n Creates the ClusterPolicy from the OLM ClusterServiceVersion\n \"\"\"\n print(\"Creating the ClusterPolicy from the CSV\")\n return RunAnsibleRole(\n \"gpu_operator_deploy_from_operatorhub\",\n {\"gpu_operator_deploy_from\": \"pre-deployed\"},\n )\n\n @staticmethod\n def deploy_from_bundle(bundle, namespace=\"nvidia-gpu-operator\"):\n \"\"\"\n Deploys the GPU Operator from a bundle\n\n Args:\n bundle: Either a bundle OCI image or \"master\" to deploy the latest bundle\n namespace: Optional namespace in which the GPU Operator will be deployed. Before v1.9, the value must be \"openshift-operators\". With >=v1.9, the namespace can freely chosen (except 'openshift-operators'). Default: nvidia-gpu-operator.\n \"\"\"\n opts = {\"gpu_operator_deploy_from\": \"bundle\",\n \"gpu_operator_target_namespace\": namespace}\n\n if bundle == 'master':\n print(\"Deploying the GPU Operator from OperatorHub using the master bundle\")\n return RunAnsibleRole(\"gpu_operator_deploy_from_operatorhub\", opts)\n\n opts[\"deploy_bundle_image\"] = bundle\n return RunAnsibleRole(\"gpu_operator_deploy_from_operatorhub\", opts)\n\n @staticmethod\n def deploy_from_operatorhub(namespace=\"nvidia-gpu-operator\", version=None, channel=None, installPlan=\"Manual\"):\n \"\"\"\n Deploys the GPU operator from OperatorHub\n\n Args:\n namespace: Optional namespace in which the GPU Operator will be deployed. Before v1.9, the value must be \"openshift-operators\". With >=v1.9, the namespace can freely chosen. Default: nvidia-gpu-operator.\n channel: Optional channel to deploy from. If unspecified, deploys the CSV's default channel.\n version: Optional version to deploy. If unspecified, deploys the latest version available in the selected channel. Run the toolbox gpu_operator list_version_from_operator_hub subcommand to see the available versions.\n installPlan: Optional InstallPlan approval mode (Automatic or Manual [default])\n \"\"\"\n\n opts = {\n \"cluster_deploy_operator_catalog\": \"certified-operators\",\n \"cluster_deploy_operator_manifest_name\": \"gpu-operator-certified\",\n\n \"cluster_deploy_operator_namespace\": namespace,\n \"cluster_deploy_operator_all_namespaces\": namespace == \"openshift-operators\",\n\n \"cluster_deploy_operator_deploy_cr\": True,\n \"cluster_deploy_operator_namespace_monitoring\": True,\n }\n\n if channel is not None:\n opts[\"cluster_deploy_operator_channel\"] = channel\n print(\n f\"Deploying the GPU Operator from OperatorHub using channel '{channel}'.\"\n )\n\n if version is not None:\n if channel is None:\n print(\"Version may only be specified if --channel is specified\")\n sys.exit(1)\n\n opts[\"cluster_deploy_operator_version\"] = version\n print(\n f\"Deploying the GPU Operator from OperatorHub using version '{version}'.\"\n )\n\n opts[\"cluster_deploy_operator_installplan_approval\"] = installPlan\n if installPlan not in (\"Manual\", \"Automatic\"):\n print(\n f\"InstallPlan can only be Manual or Automatic. Received '{installPlan}'.\"\n )\n sys.exit(1)\n\n print(\n f\"Deploying the GPU Operator from OperatorHub using InstallPlan approval '{installPlan}'.\"\n )\n\n print(\"Deploying the GPU Operator from OperatorHub.\")\n return RunAnsibleRole(\"cluster_deploy_operator\", opts)\n\n @staticmethod\n def run_gpu_burn(runtime=None):\n \"\"\"\n Runs the GPU burn on the cluster\n\n Args:\n runtime: How long to run the GPU for, in seconds\n \"\"\"\n opts = {}\n if runtime is not None:\n opts[\"gpu_burn_time\"] = runtime\n print(f\"Running GPU Burn for {runtime} seconds.\")\n\n return RunAnsibleRole(\"gpu_operator_run_gpu-burn\", opts)\n\n @staticmethod\n def undeploy_from_operatorhub():\n \"\"\"\n Undeploys a GPU-operator that was deployed from OperatorHub\n \"\"\"\n\n return RunAnsibleRole(\"gpu_operator_undeploy_from_operatorhub\")\n\n @staticmethod\n def wait_deployment():\n \"\"\"\n Waits for the GPU operator to deploy\n \"\"\"\n return RunAnsibleRole(\"gpu_operator_wait_deployment\")\n\n @staticmethod\n def capture_deployment_state():\n \"\"\"\n Captures the GPU operator deployment state\n \"\"\"\n return RunAnsibleRole(\"gpu_operator_capture_deployment_state\")\n\n @staticmethod\n def get_csv_version():\n \"\"\"\n Get the version of the GPU Operator currently installed from OLM\n Stores the version in the 'ARTIFACT_EXTRA_LOGS_DIR' artifacts directory.\n \"\"\"\n\n return RunAnsibleRole(\"gpu_operator_get_csv_version\")\n\n", "id": "3348190", "language": "Python", "matching_score": 4.497506141662598, "max_stars_count": 1, "path": "toolbox/gpu_operator.py" }, { "content": "import sys\n\nfrom toolbox._common import RunAnsibleRole\n\n\nclass Cluster:\n \"\"\"\n Commands relating to cluster scaling, upgrading and environment capture\n \"\"\"\n @staticmethod\n def set_scale(instance_type, scale, base_machineset=None, force=False):\n \"\"\"\n Ensures that the cluster has exactly `scale` nodes with instance_type `instance_type`\n\n If the machinesets of the given instance type already have the required total number of replicas,\n their replica parameters will not be modified.\n Otherwise,\n - If there's only one machineset with the given instance type, its replicas will be set to the value of this parameter.\n\n - If there are other machinesets with non-zero replicas, the playbook will fail, unless the 'force_scale' parameter is\n set to true. In that case, the number of replicas of the other machinesets will be zeroed before setting the replicas\n of the first machineset to the value of this parameter.\"\n\n - If `--base-machineset=machineset` flag is passed, `machineset` machineset will be used to derive the new\n machinetset (otherwise, the first machinetset of the listing will be used). This is useful if the desired `instance_type`\n is only available in some specific regions and, controlled by different machinesets.\n\n Example: ./run_toolbox.py cluster set_scale g4dn.xlarge 1 # ensure that the cluster has 1 GPU node\n\n Args:\n instance_type: The instance type to use, for example, g4dn.xlarge\n scale: The number of required nodes with given instance type\n base_machineset: Name of a machineset to use to derive the new one. Default: pickup the first machineset found in `oc get machinesets -n openshift-machine-api`.\n \"\"\"\n opts = {\n \"machineset_instance_type\": instance_type,\n \"scale\": scale,\n }\n\n if base_machineset is not None:\n opts[\"base_machineset\"] = base_machineset\n\n if force:\n opts[\"force_scale\"] = \"true\"\n\n return RunAnsibleRole(\"cluster_set_scale\", opts)\n\n @staticmethod\n def upgrade_to_image(image):\n \"\"\"\n Upgrades the cluster to the given image\n\n Args:\n image: The image to upgrade the cluster to\n \"\"\"\n return RunAnsibleRole(\"cluster_upgrade_to_image\", {\"cluster_upgrade_image\": image})\n\n @staticmethod\n def capture_environment():\n \"\"\"\n Captures the cluster environment\n\n Args:\n image: The image to upgrade the cluster to\n \"\"\"\n return RunAnsibleRole(\"cluster_capture_environment\")\n\n\n @staticmethod\n def deploy_operator(catalog, manifest_name, namespace, version=None, channel=None, install_plan=\"Manual\", deploy_cr=False, ns_monitoring=False):\n \"\"\"\n Deploy an operator from OperatorHub catalog entry.\n\n Args:\n catalog: Name of the catalog containing the operator.\n manifest_name: Name of the operator package manifest.\n namespace: Namespace in which the operator will be deployed, or 'all' to deploy in all the namespaces.\n channel: Optional channel to deploy from. If unspecified, deploys the CSV's default channel. Use '?' to list the available channels for the given package manifest.\n version: Optional version to deploy. If unspecified, deploys the latest version available in the selected channel.\n install_plan: Optional InstallPlan approval mode (Automatic or Manual). Default: Manual.\n deploy_cr: Optional boolean flag to deploy the first example CR found in the CSV.\n ns_monitoring: Optional boolean flag to enable OpenShift namespace monitoring. Default: False.\n \"\"\"\n\n opts = {\n \"cluster_deploy_operator_catalog\": catalog,\n \"cluster_deploy_operator_manifest_name\": manifest_name,\n }\n\n\n if namespace == \"all\":\n opts[\"cluster_deploy_operator_all_namespaces\"] = \"True\"\n opts[\"cluster_deploy_operator_namespace\"] = \"openshift-operators\"\n if ns_monitoring:\n print(\"Namespace monitoring cannot be enabled when deploying in all the namespaces.\")\n sys.exit(1)\n\n print(f\"Deploying the operator in all the namespaces.\")\n else:\n opts[\"cluster_deploy_operator_namespace\"] = namespace\n opts[\"cluster_deploy_operator_namespace_monitoring\"] = ns_monitoring\n if ns_monitoring:\n print(f\"Enabling namespace monitoring.\")\n\n print(f\"Deploying the operator using namespace '{namespace}'.\")\n\n\n if channel is not None:\n opts[\"cluster_deploy_operator_channel\"] = channel\n print(f\"Deploying the operator using channel '{channel}'.\")\n\n if version is not None:\n if channel is None:\n print(\"Version may only be specified if --channel is specified\")\n sys.exit(1)\n\n opts[\"cluster_deploy_operator_version\"] = version\n print(f\"Deploying the operator using version '{version}'.\")\n\n opts[\"cluster_deploy_operator_installplan_approval\"] = install_plan\n if install_plan not in (\"Manual\", \"Automatic\"):\n print(f\"--install-plan can only be Manual or Automatic. Received '{install_plan}'.\")\n sys.exit(1)\n\n print(f\"Deploying the operator using InstallPlan approval mode '{install_plan}'.\")\n\n opts[\"cluster_deploy_operator_deploy_cr\"] = deploy_cr\n if deploy_cr:\n print(f\"Deploying the operator default CR.\")\n\n\n print(\"Deploying the operator.\")\n\n return RunAnsibleRole(\"cluster_deploy_operator\", opts)\n\n @staticmethod\n def deploy_aws_efs():\n \"\"\"\n Deploy AWS EFS CSI driver and configure AWS accordingly.\n Assumes that AWS (credentials, Ansible module, Python module) is properly configured in the system.\n \"\"\"\n return RunAnsibleRole(\"cluster_deploy_aws_efs\", {})\n\n @staticmethod\n def deploy_minio_s3_server(secret_properties_file):\n \"\"\"\n Deploy Minio S3 server\n\n Example of secret properties file:\n\n user_password=<PASSWORD>\n admin_password=<PASSWORD>\n\n Args\n secret_properties_file: Path of a file containing the properties of S3 secrets.\n \"\"\"\n\n opts = {\n \"cluster_deploy_minio_s3_server_secret_properties\": secret_properties_file,\n }\n\n return RunAnsibleRole(\"cluster_deploy_minio_s3_server\", opts)\n", "id": "3742672", "language": "Python", "matching_score": 1.3217049837112427, "max_stars_count": 1, "path": "toolbox/cluster.py" }, { "content": "import time, datetime\nimport urllib3\n\nprint(\"Importing OpenShift/Kubernetes packages ...\")\n\nimport kubernetes\nimport ocp_resources\nimport openshift\n\nimport ocp_resources.node\nimport ocp_resources.machine\n\nimport openshift.dynamic\n\nprint(\"Importing AWS boto3 ...\")\n\nimport boto3\nimport botocore\n\nclient_k8s = None\nclient_ec2 = None\nresource_ec2 = None\n\ndef configure():\n #\n # K8s\n #\n\n global client_k8s\n try:\n client_k8s = openshift.dynamic.DynamicClient(client=kubernetes.config.new_client_from_config())\n except Exception as e:\n print(\"WARNING: kubernetes not available:\", e)\n\n #\n # AWS\n #\n\n machines = [m for m in ocp_resources.machine.Machine.get(dyn_client=client_k8s)]\n if not machines:\n raise RuntimeError(\"No machine available ...\")\n cluster_region = machines[0].instance.spec.providerSpec.value.placement.region\n\n global client_ec2, resource_ec2\n cfg = botocore.config.Config(region_name=cluster_region)\n # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2.html\n client_ec2 = boto3.client('ec2', config=cfg)\n resource_ec2 = boto3.resource('ec2', config=cfg)\n\n print(\"Ready.\")\n\ndef wait_openshift():\n first = True\n print(\"Waiting for OpenShift cluster to be ready ...\")\n\n while True:\n try:\n global client_k8s\n client_k8s = DynamicClient(client=kubernetes.config.new_client_from_config())\n\n nodes = [m for m in ocp_resources.node.Node.get(dyn_client=client_k8s)]\n if len(nodes) != 0:\n print(f\"Found {len(nodes)} node, OpenShift Cluster is ready!\")\n break\n except urllib3.exceptions.MaxRetryError: pass\n except kubernetes.client.exceptions.ApiException: pass\n\n time.sleep(10)\n\ndef get_machine_props():\n if not client_k8s:\n return None, None\n\n machines = [m for m in ocp_resources.machine.Machine.get(dyn_client=client_k8s)]\n if len(machines) != 1:\n raise RuntimeError(\"Should be only one machine ...\")\n\n machine = machines[0]\n\n cluster_name = machine.cluster_name\n print(f\"Cluster name: {cluster_name}\")\n\n instance = resource_ec2.Instance(machine.instance.status.providerStatus.instanceId)\n instance.load()\n print(f\"Instance Id: {instance.id}\")\n\n zone = machine.instance.spec.providerSpec.value.placement.availabilityZone\n print(f\"Availability zone: {zone}\")\n return cluster_name, instance, zone\n\n\ndef get_instance_root_volume(instance):\n volumes = [v for v in instance.volumes.all()]\n if len(volumes) > 1:\n print(\"WARNING: more than 1 volume found ...\")\n\n return volumes[0]\n\ndef get_cluster_snapshot(cluster_name, instance, zone):\n resp = client_ec2.describe_snapshots(\n Filters=[{\n 'Name': f'tag:kubernetes.io/cluster/{cluster_name}',\n 'Values': ['owned']\n }])\n\n snapshots = resp[\"Snapshots\"]\n if len(snapshots) == 0:\n return None\n\n if len(snapshots) > 1:\n print(\"WARNING: more than 1 snapshot found ... taking the first one.\")\n\n snapshot = resource_ec2.Snapshot(snapshots[0]['SnapshotId'])\n snapshot.load()\n\n return snapshot\n\ndef await_snapshot(snapshot):\n prev = \"\"\n if snapshot.progress == \"100%\":\n print(f\"Snapshot {snapshot.id} is ready.\")\n\n while not snapshot.progress == \"100%\":\n if prev == \"\":\n print(f\"Awaiting for the completion of snapshot {snapshot.id} ...\")\n print(snapshot.progress)\n prev = snapshot.progress\n\n time.sleep(10)\n snapshot.reload()\n if prev != snapshot.progress:\n prev = snapshot.progress\n print(snapshot.progress)\n\ndef human_ts():\n return datetime.datetime.now().strftime(\"%Y-%m-%dT%H:%M\")\n", "id": "12844530", "language": "Python", "matching_score": 3.5886597633361816, "max_stars_count": 1, "path": "subprojects/sno-snapshot/src/common.py" }, { "content": "#! /usr/bin/env python\n\nimport sys, time\n\nimport common\n\ndef rollback(snapshot, cluster_name, instance, zone):\n volume = common.get_instance_root_volume(instance)\n\n ts = common.human_ts()\n\n print(f\"Old Volume Id: {volume.id}\")\n\n print(\"Triggering the rollback!\")\n resp = common.client_ec2.create_replace_root_volume_task(\n InstanceId=instance.id,\n SnapshotId=snapshot.id,\n TagSpecifications=[\n {\n 'ResourceType': 'volume',\n 'Tags': [\n {'Key': 'Name', 'Value': f'{cluster_name}-restored-at-{ts}'},\n {'Key': f'kubernetes.io/cluster/{cluster_name}', 'Value': 'owned'},\n ]\n },\n {\n 'ResourceType': 'replace-root-volume-task',\n 'Tags': [\n # replace-root-volume-task objects cannot be\n # deleted, so do not use the 'owned' value below,\n # otherwise `openshift-install destroy cluster`\n # will loop forever as it detects resources owned\n # by the cluster.\n # https://github.com/aws/aws-cli/issues/6650\n {'Key': f'kubernetes.io/cluster/{cluster_name}', 'Value': 'createdby'},\n {'Key': f'costless-resource', 'Value': 'true'},\n ]\n },\n ]\n )\n\n replace_id = resp[\"ReplaceRootVolumeTask\"][\"ReplaceRootVolumeTaskId\"]\n print(f\"Replacement Id: {replace_id}\")\n state = resp[\"ReplaceRootVolumeTask\"][\"TaskState\"]\n prev = state\n print(f\"Waiting for the completion of the replacement ... (state={state})\")\n while state in ('pending', 'in-progress'):\n time.sleep(10)\n\n resp = common.client_ec2.describe_replace_root_volume_tasks(\n ReplaceRootVolumeTaskIds=[replace_id]\n )\n state = resp[\"ReplaceRootVolumeTasks\"][0][\"TaskState\"]\n if prev != state:\n print(state)\n prev = state\n\n if state != \"succeeded\":\n print(\"Rollback failed ...\")\n return 1\n\n print(f\"Cluster rolled back to snapshot {snapshot.id}\")\n\n volume.delete()\n print(f\"Old volume {volume.id} deleted.\")\n\ndef has_ongoing_rollback(cluster_name, instance, zone):\n first = True\n\n resp = common.client_ec2.describe_replace_root_volume_tasks(\n Filters=[{\n 'Name': f'tag:kubernetes.io/cluster/{cluster_name}',\n 'Values': ['createdby']\n }]\n )\n\n ongoing = False\n for task in resp[\"ReplaceRootVolumeTasks\"]:\n state = task[\"TaskState\"]\n if state in ('pending', 'in-progress'):\n ongoing = True\n if first:\n first = False\n print(\"Found ongoing an task:\")\n print(task)\n\n return ongoing\n\ndef main():\n common.configure()\n machine_props = common.get_machine_props()\n print()\n\n if \"--only-wait-openshift\" in sys.argv:\n common.wait_openshift()\n return\n\n snapshot = common.get_cluster_snapshot(*machine_props)\n\n if snapshot is None:\n print(\"ERROR: no snapshot to restore ...\")\n return 1\n\n if snapshot.progress != \"100%\":\n if \"--wait-snapshot\" not in sys.argv:\n print(\"ERROR: cannot restore the snapshot before it completed.\")\n print(f\"Pass '--wait-snapshot' to wait for its completion. ({snapshot.progress})\")\n return 1\n\n common.await_snapshot(snapshot)\n\n\n if has_ongoing_rollback(*machine_props):\n print(\"ERROR: cannot trigger multiple rollbacks at the same time ...\")\n return 1\n\n rollback(snapshot, *machine_props)\n\n common.wait_openshift()\n\n return\n\nif __name__ == \"__main__\":\n sys.exit(main())\n", "id": "10884835", "language": "Python", "matching_score": 2.113919496536255, "max_stars_count": 1, "path": "subprojects/sno-snapshot/src/rollback.py" }, { "content": "import sys\nimport datetime\nimport boto3\nimport botocore.config\nfrom collections import defaultdict\n\nIGNORE_LESS_THAN_DAYS = 0\nMY_REGIONS = [\"us-east-2\", \"us-east-1\", \"us-west-2\", \"eu-central-1\"]\nnow = datetime.datetime.now(datetime.timezone.utc)\n\nclient_ec2 = boto3.client('ec2')\nresource_ec2 = boto3.resource('ec2')\n\nclusters = defaultdict(list)\n\ndef collect_instances(region=None):\n print(f\"Looking at the {region} region ...\")\n\n if region is None:\n regional_resource_ec2 = resource_ec2\n else:\n my_config = botocore.config.Config(region_name=region)\n regional_resource_ec2 = boto3.resource(\"ec2\", config=my_config)\n\n instance_count = 0\n instances_stopped = 0\n instances_ignored_too_young = 0\n for instance in regional_resource_ec2.instances.all():\n age = (now - instance.launch_time).days\n instance_count += 1\n\n if age < IGNORE_LESS_THAN_DAYS:\n instances_ignored_too_young += 1\n continue\n\n state = instance.state['Name']\n\n if state == \"stopped\":\n instances_stopped += 1\n\n info = {\n \"ID\": instance.id,\n \"Type\": instance.instance_type,\n \"State\": instance.state[\"Name\"],\n \"Age\": f\"{age} days\",\n \"Region\": region,\n \"Name\": \"<not defined>\",\n }\n\n for tag in instance.tags or {}:\n if tag[\"Key\"] == \"Name\":\n info[\"Name\"] = tag[\"Value\"]\n if tag[\"Value\"] == \"owned\":\n info[\"Cluster ID\"] = tag[\"Key\"]\n\n clusters[info.get(\"Cluster ID\")].append(info)\n if not instance_count: return\n print(f\"\"\"\\\n{instance_count=}\n{instances_stopped=}\n{instances_ignored_too_young=}\n \"\"\")\n\ndef get_all_regions():\n if MY_REGIONS:\n return MY_REGIONS\n\n return [region['RegionName'] for region in client_ec2.describe_regions()['Regions']]\n\ndef print_clusters():\n for cluster_tag in reversed(sorted(map(str, clusters))):\n if cluster_tag == \"None\":\n cluster_name = \"Not part of a cluster\"\n cluster_tag = None\n else:\n cluster_name = cluster_tag.rpartition(\"/\")[-1]\n\n print(cluster_name)\n print(\"=\"*len(cluster_name))\n first = True\n for cluster_instance in clusters[cluster_tag]:\n\n if cluster_tag is not None:\n cluster_instance = dict(cluster_instance) # make a copy of the dict\n cluster_instance.pop('Cluster ID')\n cluster_instance['Name'] = cluster_instance['Name'].replace(f\"{cluster_tag}-\", \"\")\n if first:\n print(\"Age:\", cluster_instance['Age'])\n print(\"Region:\", cluster_instance['Region'])\n\n print(cluster_instance[\"ID\"], cluster_instance[\"Type\"], cluster_instance[\"State\"], cluster_instance[\"Name\"])\n\n if cluster_tag is None:\n print(\"Age:\", cluster_instance['Age'])\n print(\"Region:\", cluster_instance['Region'])\n first = False\n print()\n\n\ndef main():\n for region in get_all_regions():\n collect_instances(region)\n print_clusters()\n\nsys.exit(main())\n", "id": "6749716", "language": "Python", "matching_score": 0.1868545562028885, "max_stars_count": 1, "path": "subprojects/cloud-watch/ec2.py" }, { "content": "import sys\n\nimport secrets\n\nfrom toolbox._common import RunAnsibleRole\n\n\nclass Utils:\n \"\"\"\n General-purpose command(s) independent of and usable by any tool\n \"\"\"\n\n @staticmethod\n def build_push_image(\n local_image_name,\n image_tag=\"\",\n namespace=\"ci-artifacts\",\n remote_repo=\"\",\n remote_auth_file=\"\",\n git_repo=\"\",\n git_ref=\"\",\n dockerfile_path=\"Dockerfile\",\n context_dir=\"/\",\n memory=\"\"\n ):\n \"\"\"\n Build and publish an image to quay using either a Dockerfile or\n git repo.\n\n Args:\n local_image_name: Name of locally built image.\n image_tag: Optional tag for the image to build. If undefined, a random tag will be generated.\n namespace: Optional namespace where the local image will be built. Default: ci-artifacts.\n remote_repo: Optional remote image repo to push to. If undefined, the image will not be pushed.\n remote_auth_file: Optional auth file for the remote repository.\n\n git_repo: Optional Git repo containing Dockerfile if used as source. If undefined, the local path of 'dockerfile_path' will be used.\n git_ref: Optional Git commit ref (branch, tag, commit hash) in the git repository.\n\n context_dir: Optional context dir inside the git repository. Default /.\n dockerfile_path: Optional Path/Name of Dockerfile if used as source. Default: Dockerfile. If 'git_repo' is undefined, this path will be resolved locally, and the Dockerfile will be injected in the image BuildConfig.\n memory: Optional flag to specify the required memory to build the image (in Gb).\n \"\"\"\n\n if not git_repo and not dockerfile_path:\n print(\"Either a git repo or a Dockerfile Path is required\")\n sys.exit(1)\n\n both_or_none = lambda a, b: (a and b) or (not a and not b)\n\n if not both_or_none(remote_repo, remote_auth_file):\n print(\"ERROR: remote_repo and remote_auth_file must come together.\")\n sys.exit(1)\n elif remote_repo:\n print(f\"Using remote repo {remote_repo} and auth file {remote_auth_file} to push the image.\")\n else:\n print(f\"No remote repo provided, not pushing the image.\")\n\n if not both_or_none(git_repo, git_ref):\n print(\"ERROR: git_repo and git_ref must come together.\")\n sys.exit(1)\n elif git_repo:\n print(f\"Using Git repo {git_repo}|{git_ref}|{context_dir}|{dockerfile_path} for building the image.\")\n else:\n print(f\"Using local dockerfile at {dockerfile_path} for building the image.\")\n\n if not git_repo and context_dir != \"/\":\n print(\"ERROR: local builds (no git_repo) cannot specify a context_dir.\")\n sys.exit(1)\n\n if memory:\n try:\n memory = str(float(memory)) + \"Gi\"\n except ValueError:\n print(\"ERROR: memory must be of type float or int\")\n sys.exit(1)\n print(f\"Requesting {memory} of memory for building the image.\")\n\n if not image_tag:\n image_tag = secrets.token_hex(4)\n print(f\"Using '{image_tag}' as image tag.\")\n\n opts = {\n \"utils_build_push_image_local_name\": local_image_name,\n \"utils_build_push_image_tag\": image_tag,\n \"utils_build_push_image_namespace\": namespace,\n\n \"utils_build_push_image_remote_repo\": remote_repo,\n \"utils_build_push_image_remote_auth_file\": remote_auth_file,\n\n \"utils_build_push_image_git_repo\": git_repo,\n \"utils_build_push_image_git_ref\": git_ref,\n\n \"utils_build_push_image_context_dir\": context_dir,\n \"utils_build_push_image_dockerfile_path\": dockerfile_path,\n\n \"utils_build_push_image_memory\": memory,\n }\n\n return RunAnsibleRole(\"utils_build_push_image\", opts)\n", "id": "2321541", "language": "Python", "matching_score": 2.0138916969299316, "max_stars_count": 1, "path": "toolbox/utils.py" }, { "content": "from toolbox._common import RunAnsibleRole\n\nclass NTO:\n \"\"\"\n Commands for NTO related tasks\n \"\"\"\n @staticmethod\n def run_e2e_test(\n git_repo,\n git_ref,\n ):\n \"\"\"\n Run NTO e2e tests\n\n Args:\n git_repo: Git repository URL where to find the e2e tests\n git_ref: Git reference to clone\n \"\"\"\n opts = {\n \"nto_git_repo\": git_repo,\n \"nto_git_ref\": git_ref,\n }\n\n return RunAnsibleRole(\"nto_run_e2e_test\", opts)\n", "id": "4290546", "language": "Python", "matching_score": 0.6928980350494385, "max_stars_count": 1, "path": "toolbox/nto.py" }, { "content": "from toolbox._common import RunAnsibleRole\n\n\nclass NFD:\n \"\"\"\n Commands for NFD related tasks\n \"\"\"\n @staticmethod\n def has_gpu_nodes():\n \"\"\"\n Checks if the cluster has GPU nodes\n \"\"\"\n return RunAnsibleRole(\"nfd_test_wait_gpu\")\n\n @staticmethod\n def has_labels():\n \"\"\"\n Checks if the cluster has NFD labels\n \"\"\"\n return RunAnsibleRole(\"nfd_has_labels\")\n\n @staticmethod\n def wait_gpu_nodes():\n \"\"\"\n Wait until nfd find GPU nodes\n \"\"\"\n opts = {\n \"nfd_wait_gpu_nodes\": \"yes\"\n }\n return RunAnsibleRole(\"nfd_test_wait_gpu\", opts)\n\n @staticmethod\n def wait_labels():\n \"\"\"\n Wait until nfd labels the nodes\n \"\"\"\n return RunAnsibleRole(\"nfd_test_wait_labels\")\n", "id": "11607278", "language": "Python", "matching_score": 0.9218860268592834, "max_stars_count": 1, "path": "toolbox/nfd.py" }, { "content": "import sys\nimport secrets\n\nfrom toolbox._common import PlaybookRun\n\n\nclass OCMAddon:\n \"\"\"\n Commands for managing OCM addons\n \"\"\"\n", "id": "4123533", "language": "Python", "matching_score": 0.938273549079895, "max_stars_count": 1, "path": "toolbox/ocm_addon.py" }, { "content": "import sys\n\nfrom toolbox._common import RunAnsibleRole\n\n\nODS_CATALOG_IMAGE_DEFAULT = \"quay.io/modh/qe-catalog-source\"\nODS_CATALOG_IMAGE_VERSION_DEFAULT = \"v160-8\"\nclass RHODS:\n \"\"\"\n Commands relating to RHODS\n \"\"\"\n\n @staticmethod\n def deploy_ods(catalog_image=ODS_CATALOG_IMAGE_DEFAULT,\n version=ODS_CATALOG_IMAGE_VERSION_DEFAULT):\n \"\"\"\n Deploy ODS operator from its custom catalog\n\n Args:\n catalog_image: Optional. Container image containing ODS bundle.\n version: Optional. Version (catalog image tag) of ODS to deploy.\n \"\"\"\n\n opts = {\n \"rhods_deploy_ods_catalog_image\": catalog_image,\n \"rhods_deploy_ods_catalog_image_tag\": version,\n }\n\n return RunAnsibleRole(\"rhods_deploy_ods\", opts)\n\n @staticmethod\n def deploy_addon(cluster_name, wait_for_ready_state=True):\n \"\"\"\n Installs the RHODS OCM addon\n\n Args:\n cluster_name: The name of the cluster where RHODS should be deployed.\n wait_for_ready_state: Optional. If true (default), will cause the role to wait until addon reports ready state. (Can time out)\n \"\"\"\n\n opt = {\n \"ocm_deploy_addon_id\": \"managed-odh\",\n \"ocm_deploy_addon_cluster_name\": cluster_name,\n \"ocm_deploy_addon_wait_for_ready_state\": wait_for_ready_state,\n }\n\n return RunAnsibleRole(\"ocm_deploy_addon\", opt)\n\n @staticmethod\n def test_jupyterlab(idp_name, username_prefix, user_count: int, secret_properties_file, sut_cluster_kubeconfig=\"\"):\n \"\"\"\n Test RHODS JupyterLab notebooks\n\n Args:\n idp_name: Name of the identity provider to use.\n user_count: Number of users to run in parallel\n secret_properties_file: Path of a file containing the properties of LDAP secrets. (See 'deploy_ldap' command)\n sut_cluster_kubeconfig: Optional. Path of the system-under-test cluster's Kubeconfig. If provided, the RHODS endpoints will be looked up in this cluster.\n \"\"\"\n\n opts = {\n \"rhods_test_jupyterlab_idp_name\": idp_name,\n \"rhods_test_jupyterlab_username_prefix\": username_prefix,\n \"rhods_test_jupyterlab_user_count\": user_count,\n \"rhods_test_jupyterlab_secret_properties\": secret_properties_file,\n \"rhods_test_jupyterlab_sut_cluster_kubeconfig\": sut_cluster_kubeconfig,\n }\n\n return RunAnsibleRole(\"rhods_test_jupyterlab\", opts)\n\n @staticmethod\n def undeploy_ods():\n \"\"\"\n Undeploy ODS operator\n \"\"\"\n\n return RunAnsibleRole(\"rhods_undeploy_ods\")\n\n @staticmethod\n def deploy_ldap(idp_name, username_prefix, username_count: int, secret_properties_file, use_ocm=\"\"):\n \"\"\"\n Deploy OpenLDAP and LDAP Oauth\n\n Example of secret properties file:\n\n user_password=<PASSWORD>\n admin_password=<PASSWORD>\n\n Args:\n idp_name: Name of the LDAP identity provider.\n username_prefix: Prefix for the creation of the users (suffix is 0..username_count)\n username_count: Number of users to create.\n secret_properties_file: Path of a file containing the properties of LDAP secrets.\n use_ocm: Optional. If set with a cluster name, use `ocm create idp` to deploy the LDAP identity provider.\n \"\"\"\n\n opts = {\n \"rhods_deploy_ldap_idp_name\": idp_name,\n \"rhods_deploy_ldap_username_prefix\": username_prefix,\n \"rhods_deploy_ldap_username_count\": username_count,\n \"rhods_deploy_ldap_secret_properties\": secret_properties_file,\n \"rhods_deploy_ldap_use_ocm\": use_ocm,\n }\n\n return RunAnsibleRole(\"rhods_deploy_ldap\", opts)\n\n @staticmethod\n def undeploy_ldap():\n \"\"\"\n Undeploy OpenLDAP and LDAP Oauth\n \"\"\"\n\n return RunAnsibleRole(\"rhods_undeploy_ldap\")\n", "id": "7824388", "language": "Python", "matching_score": 1.9750055074691772, "max_stars_count": 1, "path": "toolbox/rhods.py" }, { "content": "import types\nimport base64\nimport os\nimport sys\nimport pathlib\nimport argparse\n\n\"\"\"\nReads a property file passed in parameter, and generates base64-encoded\nuser&passwords lists.\nExpected properties:\n- user_password # unique password of the users\n- adminpassword # password of the admin user\n\nAdd the '--verbose' flag to the command line to show the generated content of stderr\n\"\"\"\n\ndef str2base64(mystr):\n return base64.b64encode(mystr.encode(\"ascii\")).decode(\"ascii\")\n\ndef encode_users(prefix, nb):\n \"\"\"prefix=\"testuser\", nb=20:\n \"testuser1,testuser2,...,testuser19\"\n and returns it in cleartext and base64-encoded\n \"\"\"\n\n mystr = ''\n for x in range(nb):\n mystr += prefix + str(x) + ','\n\n mystr = mystr[:-1] # remove the trailing comma\n\n return mystr, str2base64(mystr)\n\n\ndef encode_password(password, nb):\n \"\"\"Generates \"password,password...,password\" nb times\n and returns it in cleartext and base64-encoded\n \"\"\"\n mystr = ''\n for x in range(nb):\n mystr += password + ','\n\n mystr = mystr[:-1] # remove the trailing comma\n\n return mystr, str2base64(mystr)\n\n\ndef parse_args():\n \"\"\"\n Simple argument parser.\n \"\"\"\n parser = argparse.ArgumentParser(description='')\n parser.add_argument('--secret_props')\n parser.add_argument('--admin_user')\n parser.add_argument('--prefix')\n parser.add_argument('--nbusers')\n parser.add_argument('--verbose', action='store_true')\n parser.add_argument('--test', action='store_true')\n cli_args = parser.parse_args()\n\n if None in (cli_args.prefix, cli_args.nbusers, cli_args.secret_props):\n parser.error(\"all arguments are mandatory\")\n\n try:\n fname = pathlib.Path(cli_args.secret_props)\n except IndexError:\n print(\"ERROR: please provide a property file in parameter.\")\n sys.exit(1)\n\n if not fname.exists():\n print(f\"ERROR: file '{fname}' doesn't exist...\")\n sys.exit(1)\n\n args = types.SimpleNamespace()\n args.admin_user = cli_args.admin_user\n args.prefix = cli_args.prefix\n args.nbusers = cli_args.nbusers\n args.user_password = None\n\n args.admin_password = None\n\n with open(fname) as f:\n for i, line in enumerate(f.readlines()):\n key, found, value = line.strip().partition(\"=\")\n if not found:\n print(f\"WARNING: invalid value line {i}, ignoring.\")\n continue\n if key not in args.__dict__.keys():\n print(f\"WARNING: unexpected key '{key}' line {i}, ignoring.\")\n continue\n args.__dict__[key] = value\n\n if None in args.__dict__.values():\n print(\"ERROR: not all the properties have been set ...\")\n print(\"INFO: expected properties:\", \", \".join(args.__dict__.keys()))\n sys.exit(1)\n\n\n args.verbose = cli_args.verbose\n args.test = cli_args.test\n\n try:\n args.nbusers = int(args.nbusers)\n except ValueError:\n print(\"ERROR: could not parse the number of users as an integer.\")\n sys.exit(1)\n\n return args\n\nif __name__ == \"__main__\":\n args = parse_args()\n\n users, users64 = encode_users(args.prefix, args.nbusers)\n passwords, passwords64 = encode_password(args.user_password, args.nbusers)\n\n adminuser64 = str2base64(args.admin_user)\n adminpassword64 = str2base64(args.admin_password)\n\n if args.test:\n print(\"test mode: all good, exiting.\")\n sys.exit(0)\n\n print(f\"\"\"\\\nexport rhods_ldap_adminuser64=\"{adminuser64}\"\nexport rhods_ldap_adminpassword64=\"{adminpassword64}\"\nexport rhods_ldap_users64=\"{users64}\"\nexport rhods_ldap_passwords64=\"{passwords64}\"\\\n\nexport rhods_ldap_adminuser=\"{args.admin_user}\"\nexport rhods_ldap_adminpassword=\"{args.admin_password}\"\n\"\"\")\n\n if args.verbose:\n print(f\"\"\"\nusers = {users}\nb64(users) = {users64}\npasswords = {passwords}\nb64(passwords) = {passwords64}\nadminuser = {args.admin_user}\nb64(adminuser) = {adminuser64}\nadminpassword = {args.admin_password}\nb64(adminpassord) = {adminpassword64}\\\n\"\"\", file=sys.stderr)\n", "id": "11615631", "language": "Python", "matching_score": 0.865315318107605, "max_stars_count": 1, "path": "roles/rhods_deploy_ldap/files/ldapgen.py" }, { "content": "from toolbox._common import RunAnsibleRole\n\n\nclass Benchmarking:\n \"\"\"\n Commands related to benchmarking tasks\n \"\"\"\n\n @staticmethod\n def download_coco_dataset(node_hostname, namespace=\"default\", pvc_name=None, storage_dir=None, s3_cred=None):\n \"\"\"\n Downloads the COCO dataset into a PVC of the cluster\n\n Args:\n node_hostname: Hostname of the node where the download pod will be executed.\n namespace: Name of the namespace in which the resources will be created.\n pvc_name: Name of the PVC that will be create to store the dataset files.\n s3_cred: Optional path to credentials to use for accessing the dataset s3 bucket.\n \"\"\"\n opts = {\n \"benchmarking_node_hostname\": node_hostname,\n \"benchmarking_namespace\": namespace,\n }\n if pvc_name is not None:\n opts[\"benchmarking_coco_dataset_pvc_name\"] = pvc_name\n print(\n f\"Using '{pvc_name}' as PVC name.\"\n )\n \n if storage_dir is not None:\n opts[\"benchmarking_coco_dataset_storage_dir\"] = storage_dir\n print(\n f\"Using '{storage_dir}' as storage dir.\"\n )\n\n if s3_cred is not None:\n opts[\"benchmarking_coco_dataset_s3_cred\"] = s3_cred\n print(\n f\"Using '{s3_cred}' as s3 credentials.\"\n )\n\n return RunAnsibleRole(\"benchmarking_deploy_coco_dataset\", opts)\n\n @staticmethod\n def run_mlperf_ssd(node_hostname, namespace=\"default\", pvc_name=None, epochs=None, threshold=None):\n \"\"\"\n Run NVIDIA MLPerf SSD Detection training benchmark.\n\n Args:\n node_hostname: Hostname of the node where the ssd benchmark will be executed.\n namespace: Name of the namespace in which the resources will be created.\n pvc_name: Name of the PVC that will be create to store the dataset files.\n epochs: Number of epochs to run the benchmark for.\n threshold: Benchmark threshold target value.\n \"\"\"\n\n opts = {\n \"benchmarking_node_hostname\": node_hostname,\n \"benchmarking_namespace\": namespace,\n }\n\n if pvc_name is not None:\n opts[\"benchmarking_coco_dataset_pvc_name\"] = pvc_name\n print(\n f\"Using '{pvc_name}' as PVC where the coco dataset is stored.\"\n )\n\n if epochs:\n try:\n epochs = str(int(epochs))\n opts[\"benchmarking_epochs\"] = epochs\n except ValueError:\n print(\"ERROR: epochs must be of type int\")\n exit(1)\n if threshold:\n try:\n threshold = str(float(threshold))\n opts[\"benchmarking_threshold\"] = threshold\n except ValueError:\n print(\"ERROR: threshold must be of type float\")\n exit(1)\n\n return RunAnsibleRole(\"benchmarking_run_mlperf_ssd\", opts)\n", "id": "12015117", "language": "Python", "matching_score": 0.36107760667800903, "max_stars_count": 1, "path": "toolbox/benchmarking.py" } ]
1.888667
zecrazytux
[ { "content": "# Copyright (c) 2014 Alcatel-Lucent Enterprise\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nfrom facette.connection import Connection\nfrom facette.v1.server import Server\nfrom facette.v1.catalog import Catalog\nfrom facette.v1.library import Library\n\nclass Facette:\n def __init__(self, uri, user = None, passwd = <PASSWORD>):\n self.uri = uri\n if self.uri.endswith('/'):\n self.uri = self.uri[:-1]\n self.user = user\n self.passwd = <PASSWORD>\n self.c = Connection(self.uri, self.user, self.passwd)\n\n self.server = Server(self.c)\n self.catalog = Catalog(self.c)\n self.library = Library(self.c)\n", "id": "9455720", "language": "Python", "matching_score": 1.456572413444519, "max_stars_count": 1, "path": "src/facette/client.py" }, { "content": "# Copyright (c) 2014 Alcatel-Lucent Enterprise\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nimport json\nimport requests\n\nCONTENT_TYPE_JSON = 'application/json'\nHEADER_X_TOTAL_RECORDS = 'X-Total-Records'\n\nRESULT_OK = 200\nRESULT_CREATED = 201\nRESULT_BAD_REQUEST = 400\nRESULT_NOT_FOUND = 404\nRESULT_NOT_ALLOWED = 405\nRESULT_CONFLICT = 409\nRESULT_UNSUPPORTED = 415\nRESULT_UNAVAILABLE = 503\n\nclass Connection:\n def __init__(self, root, user = None, passwd = None):\n self.root = root\n self.auth = None\n if user and passwd:\n self.auth = (user, passwd)\n self.headers = { 'content-type': CONTENT_TYPE_JSON }\n\n def get(self, uri, payload=None):\n r = requests.get(self.root + uri, auth=self.auth, params=payload)\n #if HEADER_X_TOTAL_RECORDS in r.headers:\n # print r.headers[HEADER_X_TOTAL_RECORDS]\n if r.headers['content-type'].startswith(CONTENT_TYPE_JSON):\n return r.status_code, r.json()\n else:\n return r.status_code, r.text\n\n def post(self, uri, payload):\n r = requests.post(self.root + uri, data=payload,\n auth=self.auth, headers=self.headers)\n if r.headers['content-type'].startswith(CONTENT_TYPE_JSON):\n return r.status_code, r.json()\n else:\n return r.status_code, r.text\n\n def put(self, uri, payload):\n r = requests.put(self.root + uri, data=payload,\n auth=self.auth, headers=self.headers)\n if r.headers['content-type'].startswith(CONTENT_TYPE_JSON):\n return r.status_code, r.json()\n else:\n return r.status_code, r.text\n\n def delete(self, uri):\n r = requests.delete(self.root + uri, auth=self.auth)\n if r.headers['content-type'].startswith(CONTENT_TYPE_JSON):\n return r.status_code, r.json()\n else:\n return r.status_code, r.text\n", "id": "4657934", "language": "Python", "matching_score": 1.1965235471725464, "max_stars_count": 1, "path": "src/facette/connection.py" }, { "content": "# Copyright (c) 2014 Alcatel-Lucent Enterprise\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nfrom facette.connection import *\nfrom facette.utils import *\nfrom facette.v1.plot import Plot\nimport json\n\nclass Plots:\n def __init__(self, c):\n self.root = \"/api/v1/library/graphs/plots\"\n self.c = c\n\n def get(self, name, range):\n payload = {}\n payload_add(payload, 'id', name)\n payload_add(payload, 'range', range)\n code, js = self.c.post(self.root, json.dumps(payload))\n g = None\n if code == RESULT_OK:\n g = Plot(js)\n return g\n", "id": "4246267", "language": "Python", "matching_score": 2.3624839782714844, "max_stars_count": 1, "path": "src/facette/v1/plots.py" }, { "content": "from facette.connection import *\nfrom facette.utils import *\nfrom facette.v1.graph import Graph\nfrom facette.v1.plots import Plots\nimport json\n\nclass Graphs:\n def __init__(self, c):\n self.root = \"/api/v1/library/graphs/\"\n self.c = c\n self.plots = Plots(self.c)\n\n def list(self, collection=None, filter=None, limit=None, offset=None):\n payload = {}\n payload_add(payload, 'collection', collection)\n payload_add(payload, 'filter', filter)\n payload_add(payload, 'limit', limit)\n payload_add(payload, 'offset', offset)\n code, js = self.c.get(self.root, payload)\n\n graphs = []\n if code == RESULT_OK:\n for x in js:\n g = Graph(x)\n graphs.append(g)\n return graphs\n\n def get(self, name):\n code, js = self.c.get(self.root + name)\n g = None\n if code == RESULT_OK:\n g = Graph(js)\n return g\n\n def add(self, c):\n payload = str(c)\n code, js = self.c.post(self.root, payload)\n return facette_http_status(code, RESULT_CREATED, js)\n\n def update(self, id, c):\n payload = str(c)\n code, js = self.c.put(self.root + id, payload)\n return facette_http_status(code, RESULT_OK, js)\n\n def delete(self, id):\n code, js = self.c.delete(self.root + id)\n return facette_http_status(code, RESULT_OK, js)\n", "id": "3299915", "language": "Python", "matching_score": 3.2645115852355957, "max_stars_count": 1, "path": "src/facette/v1/graphs.py" }, { "content": "# Copyright (c) 2014 Alcatel-Lucent Enterprise\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nfrom facette.connection import *\nfrom facette.utils import *\nfrom facette.v1.scale import Scale\nimport json\n\nclass Scales:\n def __init__(self, c):\n self.root = \"/api/v1/library/scales/\"\n self.c = c\n\n def list(self, filter=None, limit=None, offset=None):\n payload = {}\n payload_add(payload, 'filter', filter)\n payload_add(payload, 'limit', limit)\n payload_add(payload, 'offset', offset)\n code, js = self.c.get(self.root, payload)\n\n scales = []\n if code == RESULT_OK:\n for x in js:\n s = Scale(x)\n scales.append(s)\n return scales\n\n def list_values(self):\n code, js = self.c.get(self.root + \"values\")\n scales = []\n if code == RESULT_OK:\n for x in js:\n s = Scale(x)\n scales.append(s)\n return scales\n\n def get(self, name):\n code, js = self.c.get(self.root + name)\n s = None\n if code == RESULT_OK:\n s = Scale(js)\n return s\n\n def add(self, s):\n payload = str(s)\n code, js = self.c.post(self.root, payload)\n return facette_http_status(code, RESULT_CREATED, js)\n\n def update(self, id, s):\n payload = str(s)\n code, js = self.c.put(self.root + id, payload)\n return facette_http_status(code, RESULT_OK, js)\n\n def delete(self, id):\n code, js = self.c.delete(self.root + id)\n return facette_http_status(code, RESULT_OK, js)\n", "id": "5734033", "language": "Python", "matching_score": 3.4017534255981445, "max_stars_count": 1, "path": "src/facette/v1/scales.py" }, { "content": "# Copyright (c) 2014 Alcatel-Lucent Enterprise\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nfrom facette.connection import *\n\ndef facette_to_json(name, js, d):\n v = None\n if name in js:\n v = js[name]\n d[name] = v\n return v\n\ndef facette_set(v, f, d):\n if v is not None: d[f] = v\n return v\n\ndef payload_add(payload, name, v):\n if v:\n payload[name] = v\n\ndef facette_http_status(code, expected, js):\n if code == expected:\n return True\n else:\n print \"[Facette] Error {0}: {1}\".format(code, js['message'])\n return False\n", "id": "5747270", "language": "Python", "matching_score": 0.7046666145324707, "max_stars_count": 1, "path": "src/facette/utils.py" }, { "content": "# Copyright (c) 2014 Alcatel-Lucent Enterprise\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nfrom facette.utils import *\nimport json\n\nOPTIONS_TITLE = \"title\"\nOPTIONS_SAMPLE = \"sample\"\nOPTIONS_RANGE = \"range\"\nOPTIONS_PERCENTILES = \"percentiles\"\nOPTIONS_CONSTANTS = \"constants\"\nOPTIONS_ENABLED = \"enabled\"\n\nclass Options:\n def __init__(self, js=\"\"):\n self.options = {}\n self.title = facette_to_json(OPTIONS_TITLE, js, self.options)\n self.sample = facette_to_json(OPTIONS_SAMPLE, js, self.options)\n self.range = facette_to_json(OPTIONS_RANGE, js, self.options)\n self.percentiles = facette_to_json(OPTIONS_PERCENTILES, js, self.options)\n self.constants = facette_to_json(OPTIONS_CONSTANTS, js, self.options)\n self.enabled = facette_to_json(OPTIONS_ENABLED, js, self.options)\n\n def set(self, title=None, sample=None, range=None,\n percentiles=None, constants=None, enabled=None):\n self.title = facette_set(title, OPTIONS_TITLE, self.options)\n self.sample = facette_set(sample, OPTIONS_SAMPLE, self.options)\n self.range = facette_set(range, OPTIONS_RANGE, self.options)\n self.percentiles = facette_set(percentiles, OPTIONS_PERCENTILES, self.options)\n self.constants = facette_set(constants, OPTIONS_CONSTANTS, self.options)\n self.enabled = facette_set(enabled, OPTIONS_ENABLED, self.options)\n\n def __str__(self):\n return json.dumps(self.options)\n\n def __repr__(self):\n return str(self)\n\n", "id": "11232009", "language": "Python", "matching_score": 0.7274394631385803, "max_stars_count": 1, "path": "src/facette/v1/options.py" }, { "content": "# Copyright (c) 2014 Alcatel-Lucent Enterprise\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nfrom facette.utils import *\nfrom facette.v1.plotseriesummary import PlotSerieSummary\nimport json\n\nPLOT_SERIE_NAME = \"name\"\nPLOT_SERIE_STACK_ID = \"stack_id\"\nPLOT_SERIE_SUMMARY = \"summary\"\nPLOT_SERIE_PLOTS = \"plots\"\n\nclass PlotSerie:\n def __init__(self, js=\"\"):\n self.serie = {}\n self.name = facette_to_json(PLOT_SERIE_NAME, js, self.serie)\n self.stack_id = facette_to_json(PLOT_SERIE_STACK_ID, js, self.serie)\n self.plots = facette_to_json(PLOT_SERIE_PLOTS, js, self.serie)\n\n self.summary = {}\n if PLOT_SERIE_SUMMARY in js:\n self.summary = PlotSerieSummary(js[PLOT_SERIE_SUMMARY])\n self.serie[PLOT_SERIE_SUMMARY] = self.summary\n\n def __str__(self):\n js = self.serie\n summary = json.loads(str(self.summary))\n js[PLOT_SERIE_SUMMARY] = summary\n return json.dumps(js)\n\n def __repr__(self):\n return str(self)\n", "id": "8820225", "language": "Python", "matching_score": 2.122100830078125, "max_stars_count": 1, "path": "src/facette/v1/plotserie.py" }, { "content": "# Copyright (c) 2014 Alcatel-Lucent Enterprise\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nfrom facette.utils import *\nfrom facette.v1.plotserie import PlotSerie\nimport json\n\nPLOT_ID = \"id\"\nPLOT_NAME = \"name\"\nPLOT_DESCRIPTION = \"description\"\nPLOT_TYPE = \"type\"\nPLOT_SERIES = \"series\"\nPLOT_STACK_MODE = \"stack_mode\"\nPLOT_START = \"start\"\nPLOT_END = \"end\"\nPLOT_STEP = \"step\"\nPLOT_MODIFIED = \"modified\"\nPLOT_UNIT_LABEL = \"unit_label\"\nPLOT_UNIT_TYPE = \"unit_type\"\n\nGRAPH_TYPE_AREA = 1\nGRAPH_TYPE_LINE = 2\n\nSTACK_MODE_NONE = 1\nSTACK_MODE_NORMAL = 2\nSTACK_MODE_PERCENT = 3\n\nclass Plot:\n def __init__(self, js=\"\"):\n self.plot = {}\n self.id = facette_to_json(PLOT_ID, js, self.plot)\n self.name = facette_to_json(PLOT_NAME, js, self.plot)\n self.description = facette_to_json(PLOT_DESCRIPTION, js, self.plot)\n self.type = facette_to_json(PLOT_TYPE, js, self.plot)\n self.stack_mode = facette_to_json(PLOT_STACK_MODE, js, self.plot)\n self.start = facette_to_json(PLOT_START, js, self.plot)\n self.end = facette_to_json(PLOT_END, js, self.plot)\n self.step = facette_to_json(PLOT_STEP, js, self.plot)\n self.modified = facette_to_json(PLOT_MODIFIED, js, self.plot)\n self.unit_label = facette_to_json(PLOT_UNIT_LABEL, js, self.plot)\n self.unit_type = facette_to_json(PLOT_UNIT_TYPE, js, self.plot)\n\n self.series = []\n if js.get(PLOT_SERIES):\n for x in js[PLOT_SERIES]:\n e = PlotSerie(x)\n self.series.append(e)\n self.plot[PLOT_SERIES] = self.series\n\n def __str__(self):\n js = self.plot\n series = []\n for s in self.series:\n series.append(json.loads(str(s)))\n js[PLOT_SERIES] = series\n return json.dumps(js)\n\n def __repr__(self):\n return str(self)\n", "id": "1718773", "language": "Python", "matching_score": 3.7423176765441895, "max_stars_count": 1, "path": "src/facette/v1/plot.py" }, { "content": "# Copyright (c) 2014 Alcatel-Lucent Enterprise\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nfrom facette.utils import *\nfrom facette.v1.graphgroup import GraphGroup\nimport json\n\nGRAPH_ID = \"id\"\nGRAPH_NAME = \"name\"\nGRAPH_DESCRIPTION = \"description\"\nGRAPH_MODIFIED = \"modified\"\nGRAPH_TYPE = \"type\"\nGRAPH_STACK_MODE = \"stack_mode\"\nGRAPH_UNIT_LABEL = \"unit_label\"\nGRAPH_UNIT_TYPE = \"unit_type\"\nGRAPH_GROUPS = \"groups\"\n\nGRAPH_TYPE_AREA = 1\nGRAPH_TYPE_LINE = 2\n\nSTACK_MODE_NONE = 1\nSTACK_MODE_NORMAL = 2\nSTACK_MODE_PERCENT = 3\n\nUNIT_TYPE_FIXED = 1\nUNIT_TYPE_METRIC = 2\n\nclass Graph:\n def __init__(self, js=\"\"):\n self.graph = {}\n self.id = facette_to_json(GRAPH_ID, js, self.graph)\n self.name = facette_to_json(GRAPH_NAME, js, self.graph)\n self.description = facette_to_json(GRAPH_DESCRIPTION, js, self.graph)\n self.modified = facette_to_json(GRAPH_MODIFIED, js, self.graph)\n self.type = facette_to_json(GRAPH_TYPE, js, self.graph)\n self.stack_mode = facette_to_json(GRAPH_STACK_MODE, js, self.graph)\n self.unit_label = facette_to_json(GRAPH_UNIT_LABEL, js, self.graph)\n self.unit_type = facette_to_json(GRAPH_UNIT_TYPE, js, self.graph)\n\n self.groups = []\n if GRAPH_GROUPS in js:\n for x in js[GRAPH_GROUPS]:\n e = GraphGroup(x)\n self.groups.append(e)\n self.graph[GRAPH_GROUPS] = self.groups\n\n def set(self, id=None, name=None, description=None, type=None,\n stack_mode=None, unit_label=None, unit_type=None, groups=None):\n self.id = facette_set(id, GRAPH_ID, self.graph)\n self.name = facette_set(name, GRAPH_NAME, self.graph)\n self.description = facette_set(description, GRAPH_DESCRIPTION, self.graph)\n self.type = facette_set(type, GRAPH_TYPE, self.graph)\n self.stack_mode = facette_set(stack_mode, GRAPH_STACK_MODE, self.graph)\n self.unit_label = facette_set(unit_label, GRAPH_UNIT_LABEL, self.graph)\n self.unit_type = facette_set(unit_type, GRAPH_UNIT_TYPE, self.graph)\n\n if groups:\n for x in groups:\n self.groups.append(x)\n\n def __str__(self):\n js = self.graph\n groups = []\n for g in self.groups:\n groups.append(json.loads(str(g)))\n js[GRAPH_GROUPS] = groups\n return json.dumps(js)\n\n def __repr__(self):\n return str(self)\n", "id": "8043042", "language": "Python", "matching_score": 1.922206163406372, "max_stars_count": 1, "path": "src/facette/v1/graph.py" }, { "content": "# Copyright (c) 2014 Alcatel-Lucent Enterprise\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nfrom facette.utils import *\nimport json\n\nSTAT_CATALOG_UPDATED = \"catalog_updated\"\nSTAT_GROUPS = \"groups\"\nSTAT_COLLECTIONS = \"collections\"\nSTAT_GRAPHS = \"graphs\"\nSTAT_METRICS = \"metrics\"\nSTAT_SOURCES = \"sources\"\nSTAT_ORIGINS = \"origins\"\n\nclass Stats:\n def __init__(self, js):\n self.stats = {}\n self.catalog_updated = facette_to_json(STAT_CATALOG_UPDATED, js, self.stats)\n self.groups = facette_to_json(STAT_GROUPS, js, self.stats)\n self.collections = facette_to_json(STAT_COLLECTIONS, js, self.stats)\n self.graphs = facette_to_json(STAT_GRAPHS, js, self.stats)\n self.metrics = facette_to_json(STAT_METRICS, js, self.stats)\n self.sources = facette_to_json(STAT_SOURCES, js, self.stats)\n self.origins = facette_to_json(STAT_ORIGINS, js, self.stats)\n\n def __str__(self):\n return json.dumps(self.stats)\n", "id": "5658461", "language": "Python", "matching_score": 2.619558572769165, "max_stars_count": 1, "path": "src/facette/v1/stats.py" }, { "content": "# Copyright (c) 2014 Alcatel-Lucent Enterprise\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nfrom facette.utils import *\nimport json\n\nMETRIC_NAME = \"name\"\nMETRIC_ORIGINS = \"origins\"\nMETRIC_SOURCES = \"sources\"\nMETRIC_UPDATED = \"updated\"\n\nclass Metric:\n def __init__(self, js):\n self.metric = {}\n self.name = facette_to_json(METRIC_NAME, js, self.metric)\n self.origins = facette_to_json(METRIC_ORIGINS, js, self.metric)\n self.sources = facette_to_json(METRIC_SOURCES, js, self.metric)\n self.updated = facette_to_json(METRIC_UPDATED, js, self.metric)\n\n def __str__(self):\n return json.dumps(self.metric)\n", "id": "3109745", "language": "Python", "matching_score": 2.2407681941986084, "max_stars_count": 1, "path": "src/facette/v1/metric.py" }, { "content": "from facette.utils import *\nimport json\n\nORIGIN_NAME = \"name\"\nORIGIN_CONNECTOR = \"connector\"\nORIGIN_UPDATED = \"updated\"\n\nclass Origin:\n def __init__(self, js):\n self.origin = {}\n self.name = facette_to_json(ORIGIN_NAME, js, self.origin)\n self.connector = facette_to_json(ORIGIN_CONNECTOR, js, self.origin)\n self.updated = facette_to_json(ORIGIN_UPDATED, js, self.origin)\n\n def __str__(self):\n return json.dumps(self.origin)\n", "id": "9776067", "language": "Python", "matching_score": 0.39183345437049866, "max_stars_count": 1, "path": "src/facette/v1/origin.py" }, { "content": "# Copyright (c) 2014 Alcatel-Lucent Enterprise\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nfrom facette.utils import *\nfrom facette.v1.collectionentry import CollectionEntry\nimport json\n\nCOLLECTION_ID = \"id\"\nCOLLECTION_NAME = \"name\"\nCOLLECTION_DESCRIPTION = \"description\"\nCOLLECTION_PARENT = \"parent\"\nCOLLECTION_HAS_CHILDREN = \"has_children\"\nCOLLECTION_MODIFIED = \"modified\"\nCOLLECTION_ENTRIES = \"entries\"\n\nclass Collection:\n def __init__(self, js=\"\"):\n self.collection = {}\n self.id = facette_to_json(COLLECTION_ID,\n js, self.collection)\n self.name = facette_to_json(COLLECTION_NAME,\n js, self.collection)\n self.description = facette_to_json(COLLECTION_DESCRIPTION,\n js, self.collection)\n self.parent = facette_to_json(COLLECTION_PARENT,\n js, self.collection)\n self.has_children = facette_to_json(COLLECTION_HAS_CHILDREN,\n js, self.collection)\n self.modified = facette_to_json(COLLECTION_MODIFIED,\n js, self.collection)\n\n self.entries = []\n if COLLECTION_ENTRIES in js:\n for x in js[COLLECTION_ENTRIES]:\n e = CollectionEntry(x)\n self.entries.append(e)\n self.collection[COLLECTION_ENTRIES] = self.entries\n\n def set(self, id=None, name=None, description=None, parent=None,\n has_children=None, entries=None):\n\n self.id = facette_set(id, COLLECTION_ID, self.collection)\n self.name = facette_set(name, COLLECTION_NAME, self.collection)\n self.description = facette_set(description,\n COLLECTION_DESCRIPTION, self.collection)\n self.parent = facette_set(parent, COLLECTION_PARENT, self.collection)\n self.has_children = facette_set(has_children,\n COLLECTION_HAS_CHILDREN, self.collection)\n\n if entries:\n self.entries = []\n for x in entries:\n self.entries.append(x)\n\n def __str__(self):\n js = self.collection\n entries = []\n for e in self.entries:\n entries.append(json.loads(str(e)))\n js[COLLECTION_ENTRIES] = entries\n return json.dumps(js)\n\n def __repr__(self):\n return str(self)\n", "id": "10066521", "language": "Python", "matching_score": 2.6090707778930664, "max_stars_count": 1, "path": "src/facette/v1/collection.py" }, { "content": "# Copyright (c) 2014 Alcatel-Lucent Enterprise\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nfrom facette.utils import *\nfrom facette.v1.groupentry import GroupEntry\nimport json\n\nGROUP_ID = \"id\"\nGROUP_NAME = \"name\"\nGROUP_DESCRIPTION = \"description\"\nGROUP_MODIFIED = \"modified\"\nGROUP_ENTRIES = \"entries\"\n\nclass Group:\n def __init__(self, js=\"\"):\n self.group = {}\n self.id = facette_to_json(GROUP_ID, js, self.group)\n self.name = facette_to_json(GROUP_NAME, js, self.group)\n self.description = facette_to_json(GROUP_DESCRIPTION, js, self.group)\n self.modified = facette_to_json(GROUP_MODIFIED, js, self.group)\n\n self.entries = []\n if GROUP_ENTRIES in js:\n for x in js[GROUP_ENTRIES]:\n e = GroupEntry(x)\n self.entries.append(e)\n self.group[GROUP_ENTRIES] = self.entries\n\n def set(self, id=None, name=None, description=None, entries=None):\n self.id = facette_set(id, GROUP_ID, self.group)\n self.name = facette_set(name, GROUP_NAME, self.group)\n self.description = facette_set(description, GROUP_DESCRIPTION, self.group)\n if entries:\n for x in entries:\n self.entries.append(x)\n\n def __str__(self):\n js = self.group\n entries = []\n for e in self.entries:\n entries.append(json.loads(str(e)))\n js[GROUP_ENTRIES] = entries\n return json.dumps(js)\n\n def __repr__(self):\n return str(self)\n", "id": "812116", "language": "Python", "matching_score": 1.1519747972488403, "max_stars_count": 1, "path": "src/facette/v1/group.py" }, { "content": "from facette.utils import *\nfrom facette.v1.graphgroupserie import GraphGroupSerie\nimport json\n\nGRAPH_GROUP_NAME = \"name\"\nGRAPH_GROUP_TYPE = \"type\"\nGRAPH_GROUP_STACK_ID = \"stack_id\"\nGRAPH_GROUP_SERIES = \"series\"\nGRAPH_GROUP_SCALE = \"scale\"\n\nclass GraphGroup:\n def __init__(self, js=\"\"):\n self.group = {}\n self.name = facette_to_json(GRAPH_GROUP_NAME, js, self.group)\n self.type = facette_to_json(GRAPH_GROUP_TYPE, js, self.group)\n self.stack_id = facette_to_json(GRAPH_GROUP_STACK_ID, js, self.group)\n\n self.series = []\n if GRAPH_GROUP_SERIES in js:\n for x in js[GRAPH_GROUP_SERIES]:\n e = GraphGroupSerie(x)\n self.series.append(e)\n self.group[GRAPH_GROUP_SERIES] = self.series\n\n self.scale = facette_to_json(GRAPH_GROUP_SCALE, js, self.group)\n\n def set(self, name=None, type=None, stack_id=None, series=None, scale=None):\n self.name = facette_set(name, GRAPH_GROUP_NAME, self.group)\n self.type = facette_set(type, GRAPH_GROUP_TYPE, self.group)\n self.stack_id = facette_set(stack_id, GRAPH_GROUP_STACK_ID, self.group)\n if series:\n for x in series:\n self.series.append(x)\n self.scale = facette_set(scale, GRAPH_GROUP_SCALE, self.group)\n\n def __str__(self):\n js = self.group\n series = []\n for s in self.series:\n series.append(json.loads(str(s)))\n js[GRAPH_GROUP_SERIES] = series\n return json.dumps(js)\n\n def __repr__(self):\n return str(self)\n", "id": "12009255", "language": "Python", "matching_score": 2.6863648891448975, "max_stars_count": 1, "path": "src/facette/v1/graphgroup.py" }, { "content": "from facette.utils import *\nimport json\n\nGRAPH_GROUP_SERIE_NAME = \"name\"\nGRAPH_GROUP_SERIE_ORIGIN = \"origin\"\nGRAPH_GROUP_SERIE_SOURCE = \"source\"\nGRAPH_GROUP_SERIE_METRIC = \"metric\"\nGRAPH_GROUP_SERIE_SCALE = \"scale\"\n\nclass GraphGroupSerie:\n def __init__(self, js=\"\"):\n self.serie = {}\n self.name = facette_to_json(GRAPH_GROUP_SERIE_NAME, js, self.serie)\n self.origin = facette_to_json(GRAPH_GROUP_SERIE_ORIGIN, js, self.serie)\n self.source = facette_to_json(GRAPH_GROUP_SERIE_SOURCE, js, self.serie)\n self.metric = facette_to_json(GRAPH_GROUP_SERIE_METRIC, js, self.serie)\n self.scale = facette_to_json(GRAPH_GROUP_SERIE_SCALE, js, self.serie)\n\n def set(self, name=None, origin=None, source=None, metric=None, scale=None):\n self.name = facette_set(name, GRAPH_GROUP_SERIE_NAME, self.serie)\n self.origin = facette_set(origin, GRAPH_GROUP_SERIE_ORIGIN, self.serie)\n self.source = facette_set(source, GRAPH_GROUP_SERIE_SOURCE, self.serie)\n self.metric = facette_set(metric, GRAPH_GROUP_SERIE_METRIC, self.serie)\n self.scale = facette_set(scale, GRAPH_GROUP_SERIE_SCALE, self.serie)\n\n def __str__(self):\n return json.dumps(self.serie)\n\n def __repr__(self):\n return str(self)\n", "id": "7941299", "language": "Python", "matching_score": 0.576041579246521, "max_stars_count": 1, "path": "src/facette/v1/graphgroupserie.py" }, { "content": "#!/usr/bin/env python2.7\n\nimport os, sys\nfrom facette.client import Facette\nfrom facette.v1.collectionentry import CollectionEntry\nfrom facette.v1.options import Options\nfrom facette.v1.collection import Collection\nfrom facette.v1.scale import Scale\n\n# use UTF-8 encoding instead of unicode to support more characters\nreload(sys)\nsys.setdefaultencoding(\"utf-8\")\n\nsrv = \"http://demo.facette.io/\"\nfc = Facette(srv)\n\ncode = fc.server.reload()\nprint code\ncode, s = fc.server.stats()\nprint s\no = fc.catalog.origins.list(filter=\"glob:co*\", limit=1)\nprint o\no = fc.catalog.origins.get('collectd')\nprint o\ns = fc.catalog.sources.list()\nprint s\ns = fc.catalog.sources.get('host1.example.net')\nprint s\nm = fc.catalog.metrics.list()\nprint m\nm = fc.catalog.metrics.get('cpu.0.user')\nprint m\nc = fc.catalog.list()\nprint c\nc = fc.library.collections.list()\nprint c\nc = fc.library.collections.get('8c63a76b-3f62-4c8d-4954-b9338e0d144a')\nprint c\no = Options()\nprint o\no.set(title=\"opt title\", percentiles=95)\nprint o\ne = CollectionEntry()\ne.set(id=\"123\", options=o)\nprint e\nc = Collection()\nc.set(entries=[e])\nfc.library.collections.add(c)\n\ns = fc.library.scales.list()\nprint s\ns = fc.library.scales.list_values()\nprint s\ns = fc.library.scales.get('8895f732-8647-4f1f-79f4-0cb56afc6680')\nprint s\ns = Scale()\ns.set(name=\"My Scale\", description=\"My Description\", value=\"10\")\nfc.library.scales.add(s)\n\ng = fc.library.sourcegroups.list()\nprint g\ns = fc.library.sourcegroups.get('588bf36f-fb30-4800-40df-9e7c61f83ca6')\nprint s\ng = fc.library.metricgroups.list()\nprint g\ngr = fc.library.graphs.list()\nfor g in gr:\n print g\ng = fc.library.graphs.get('44de3587-c2c7-49ff-698b-cbae8d2a0085')\nprint g\n\n", "id": "591817", "language": "Python", "matching_score": 1.9747841358184814, "max_stars_count": 1, "path": "tests/test.py" }, { "content": "# Copyright (c) 2014 Alcatel-Lucent Enterprise\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\n\"\"\"\nLibrary Interface\n\"\"\"\n\nfrom facette.connection import Connection\nfrom facette.v1.collections import Collections\nfrom facette.v1.graphs import Graphs\nfrom facette.v1.scales import Scales\nfrom facette.v1.sourcegroups import SourceGroups\nfrom facette.v1.metricgroups import MetricGroups\n\nclass Library:\n def __init__(self, c):\n self.c = c\n self.collections = Collections(self.c)\n self.graphs = Graphs(self.c)\n self.scales = Scales(self.c)\n self.sourcegroups = SourceGroups(self.c)\n self.metricgroups = MetricGroups(self.c)\n\n def list(self):\n code, js = self.c.get(\"/api/v1/library/\")\n return \"\"\n # o = None\n # if code == 200:\n # o = Origins(js)\n # return code, o\n", "id": "9723794", "language": "Python", "matching_score": 1.4170188903808594, "max_stars_count": 1, "path": "src/facette/v1/library.py" }, { "content": "# Copyright (c) 2014 Alcatel-Lucent Enterprise\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\n\"\"\"\nServer Interface\n\"\"\"\n\nfrom facette.connection import Connection\nfrom facette.v1.stats import Stats\n\nclass Server:\n def __init__(self, c):\n self.c = c\n\n def reload(self):\n return self.c.get(\"/reload\")\n\n def stats(self):\n code, js = self.c.get(\"/api/v1/stats\")\n s = None\n if code == 200:\n s = Stats(js)\n return code, s\n", "id": "10536387", "language": "Python", "matching_score": 0.8986132144927979, "max_stars_count": 1, "path": "src/facette/v1/server.py" }, { "content": "# Copyright (c) 2014 Alcatel-Lucent Enterprise\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nfrom setuptools import find_packages\nfrom setuptools import setup\n\nlong_description = \"\"\"\\\npython-facette is a pure python SDK which allows connecting to Facette API.\n\"\"\"\n\npkgdir = {'': 'src'}\n\nsetup(\n name = 'facette',\n version = '1.2',\n description = 'Facette SDK for Python',\n keywords = 'facette REST API SDK',\n long_description = long_description,\n author = 'Alcatel-Lucent Enterprise Personal Cloud R&D',\n author_email = '<EMAIL>',\n url = 'https://github.com/OpenTouch/python-facette',\n package_dir=pkgdir,\n packages=find_packages('src'),\n platforms = ['All'],\n license = 'Apache 2.0',\n)\n", "id": "1256239", "language": "Python", "matching_score": 0.8595550060272217, "max_stars_count": 1, "path": "setup.py" } ]
1.922206
carrier-io
[ { "content": "from time import sleep\n\n\ndef wait_for_page_to_load(page_load_timeout):\n sleep(page_load_timeout)\n", "id": "6932335", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "observer_hub/wait.py" }, { "content": "import smtplib\nimport ssl\nfrom email.mime.multipart import MIMEMultipart\nfrom email.mime.text import MIMEText\n\n\nclass EmailClient(object):\n\n def __init__(self, arguments):\n self.host = arguments['smtp_host']\n self.port = int(arguments['smtp_port'])\n self.user = arguments['smtp_user']\n self.password = arguments['smtp_password']\n self.sender = arguments['smtp_sender']\n if self.sender is None:\n self.sender = self.user\n\n def send_email(self, email):\n with smtplib.SMTP_SSL(host=self.host, port=self.port) as server:\n server.ehlo()\n server.login(self.user, self.password)\n\n for recipient in email.users_to:\n if all(i in recipient for i in [\"<mailto:\", \"|\"]):\n recipient = recipient.split(\"|\")[1].replace(\">\", \"\").replace(\"<\", \"\")\n msg_root = MIMEMultipart('related')\n msg_root['Subject'] = email.subject\n msg_root['From'] = self.sender\n msg_root['To'] = recipient\n msg_alternative = MIMEMultipart('alternative')\n msg_alternative.attach(MIMEText(email.email_body, 'html'))\n msg_root.attach(msg_alternative)\n for chart in email.charts:\n msg_root.attach(chart)\n\n server.sendmail(self.sender, recipient, msg_root.as_string())\n print('Send')\n", "id": "6250491", "language": "Python", "matching_score": 1.565253734588623, "max_stars_count": 0, "path": "email_client.py" }, { "content": "# Copyright 2019 getcarrier.io\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom perfreporter.data_manager import DataManager\n\nfrom report_builder import ReportBuilder\n\n\nclass Email(object):\n\n def __init__(self, test_name, subject, users_to, email_body, charts, date):\n self.test_name = test_name\n self.subject = subject\n self.email_body = email_body\n self.charts = charts\n self.date = date\n self.users_to = users_to\n\n\nclass ApiEmailNotification:\n def __init__(self, arguments):\n self.args = arguments\n self.data_manager = DataManager(arguments, arguments[\"galloper_url\"], arguments[\"token\"],\n arguments[\"project_id\"])\n self.report_builder = ReportBuilder()\n\n def email_notification(self):\n tests_data, last_test_data, baseline, violation, compare_with_thresholds = self.data_manager.get_api_test_info()\n email_body, charts, date = self.report_builder.create_api_email_body(self.args, tests_data, last_test_data,\n baseline,\n self.args['comparison_metric'],\n violation, compare_with_thresholds)\n\n subject = \"[\" + str(self.args['notification_type']) + \"] \"\n subject += \"Test results for \\\"\" + str(self.args['test'])\n subject += \"\\\". Users count: \" + str(self.args['users']) + \". From \" + str(date) + \".\"\n\n return Email(self.args['test'], subject, self.args['user_list'], email_body, charts, date)\n", "id": "8147990", "language": "Python", "matching_score": 2.2410221099853516, "max_stars_count": 0, "path": "email_notifications.py" }, { "content": "# Copyright 2019 getcarrier.io\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nimport time\nimport calendar\nimport datetime\nfrom chart_generator import alerts_linechart, barchart, ui_comparison_linechart\nfrom email.mime.image import MIMEImage\nimport statistics\nfrom jinja2 import Environment, FileSystemLoader\n\nGREEN = '#028003'\nYELLOW = '#FFA400'\nRED = '#FF0000'\nGRAY = '#CCCCCC'\n\n\nclass ReportBuilder:\n\n def create_api_email_body(self, args, tests_data, last_test_data, baseline, comparison_metric,\n violation, thresholds=None):\n test_description = self.create_test_description(args, last_test_data, baseline, comparison_metric, violation)\n builds_comparison = self.create_builds_comparison(tests_data)\n general_metrics = self.get_general_metrics(builds_comparison[0], baseline, thresholds)\n charts = self.create_charts(builds_comparison, last_test_data, baseline, comparison_metric)\n baseline_and_thresholds = self.get_baseline_and_thresholds(last_test_data, baseline, comparison_metric,\n thresholds)\n\n email_body = self.get_api_email_body(test_description, last_test_data, baseline, builds_comparison,\n baseline_and_thresholds, general_metrics)\n return email_body, charts, str(test_description['start']).split(\" \")[0]\n\n def create_ui_email_body(self, tests_data, last_test_data):\n test_params = self.create_ui_test_discription(last_test_data)\n top_five_thresholds = self.get_baseline_and_thresholds(last_test_data, None, 'time')\n builds_comparison = self.create_ui_builds_comparison(tests_data)\n charts = self.create_ui_charts(last_test_data, builds_comparison)\n last_test_data = self.aggregate_last_test_results(last_test_data)\n email_body = self.get_ui_email_body(test_params, top_five_thresholds, builds_comparison, last_test_data)\n return email_body, charts, str(test_params['start_time']).split(\" \")[0]\n\n def create_test_description(self, args, test, baseline, comparison_metric, violation):\n params = ['simulation', 'users', 'duration']\n test_params = {}\n for param in params:\n test_params[param] = test[0][param]\n test_params['end'] = str(test[0]['time']).replace(\"T\", \" \").replace(\"Z\", \"\")\n timestamp = calendar.timegm(time.strptime(test_params['end'], '%Y-%m-%d %H:%M:%S'))\n test_params['start'] = datetime.datetime.utcfromtimestamp(int(timestamp) - int(float(test[0]['duration']))) \\\n .strftime('%Y-%m-%d %H:%M:%S')\n test_params['status'], test_params['color'], test_params['failed_reason'] = self.check_status(args,\n test, baseline, comparison_metric, violation)\n return test_params\n\n @staticmethod\n def create_ui_test_discription(test):\n description = {'start_time': datetime.datetime.utcfromtimestamp(int(test[0]['start_time']) / 1000).strftime(\n '%Y-%m-%d %H:%M:%S'), 'scenario': test[0]['scenario'], 'suite': test[0]['suite']}\n count, errors = 0, 0\n for page in test:\n count += int(page['count'])\n errors += int(page['failed'])\n error_rate = round(errors * 100 / count, 2)\n failed_reasons = []\n if error_rate > 10:\n description['status'] = 'FAILED'\n failed_reasons.append('error rate - ' + str(error_rate) + ' %')\n else:\n description['status'] = 'SUCCESS'\n page_count, missed_thresholds = 0, 0\n for page in test:\n page_count += 1\n if page['time_threshold'] != 'green':\n missed_thresholds += 1\n missed_thresholds_rate = round(missed_thresholds * 100 / page_count, 2)\n if missed_thresholds_rate > 50:\n description['status'] = 'FAILED'\n failed_reasons.append('missed thresholds - ' + str(missed_thresholds_rate) + ' %')\n if description['status'] is 'SUCCESS':\n description['color'] = GREEN\n else:\n description['color'] = RED\n description['failed_reason'] = failed_reasons\n return description\n\n def check_status(self, args, test, baseline, comparison_metric, violation):\n failed_reasons = []\n test_status, failed_message = self.check_functional_issues(args[\"error_rate\"], test)\n if failed_message != '':\n failed_reasons.append(failed_message)\n status, failed_message = self.check_performance_degradation(args[\"performance_degradation_rate\"], test, baseline, comparison_metric)\n if failed_message != '':\n failed_reasons.append(failed_message)\n if test_status is 'SUCCESS':\n test_status = status\n status, failed_message = self.check_missed_thresholds(args[\"missed_thresholds\"], violation)\n if failed_message != '':\n failed_reasons.append(failed_message)\n if test_status is 'SUCCESS':\n test_status = status\n if test_status is 'SUCCESS':\n color = GREEN\n else:\n color = RED\n return test_status, color, failed_reasons\n\n @staticmethod\n def check_functional_issues(_error_rate, test):\n request_count, error_count = 0, 0\n for request in test:\n request_count += int(request['total'])\n error_count += int(request['ko'])\n error_rate = round(error_count * 100 / request_count, 2)\n if error_rate > _error_rate:\n return 'FAILED', 'error rate - ' + str(error_rate) + ' %'\n return 'SUCCESS', ''\n\n @staticmethod\n def check_performance_degradation(degradation_rate, test, baseline, comparison_metric):\n if baseline:\n request_count, performance_degradation = 0, 0\n for request in test:\n request_count += 1\n for baseline_request in baseline:\n if request['request_name'] == baseline_request['request_name']:\n if int(request[comparison_metric]) > int(baseline_request[comparison_metric]):\n performance_degradation += 1\n performance_degradation_rate = round(performance_degradation * 100 / request_count, 2)\n if performance_degradation_rate > degradation_rate:\n return 'FAILED', 'performance degradation rate - ' + str(performance_degradation_rate) + ' %'\n else:\n return 'SUCCESS', ''\n else:\n return 'SUCCESS', ''\n\n @staticmethod\n def check_missed_thresholds(missed_thresholds, violation):\n if violation > missed_thresholds:\n return 'FAILED', 'missed thresholds rate - ' + str(violation) + ' %'\n return 'SUCCESS', ''\n\n def create_builds_comparison(self, tests):\n builds_comparison = []\n for test in tests:\n summary_request, test_info = {}, {}\n for req in test:\n if req[\"request_name\"] == \"All\":\n summary_request = req\n\n date = str(test[0]['time']).replace(\"T\", \" \").replace(\"Z\", \"\")\n try:\n timestamp = calendar.timegm(time.strptime(date, '%Y-%m-%d %H:%M:%S'))\n except ValueError:\n timestamp = calendar.timegm(time.strptime(date.split(\".\")[0], '%Y-%m-%d %H:%M:%S'))\n if summary_request:\n test_info['date'] = datetime.datetime.utcfromtimestamp(int(timestamp)).strftime('%d-%b %H:%M')\n test_info['total'] = summary_request[\"total\"]\n test_info['throughput'] = round(summary_request[\"throughput\"], 2)\n test_info['pct95'] = summary_request[\"pct95\"]\n test_info['error_rate'] = round((summary_request[\"ko\"] / summary_request[\"total\"]) * 100, 2)\n builds_comparison.append(test_info)\n builds_comparison = self.calculate_diffs(builds_comparison)\n\n return builds_comparison\n\n def calculate_diffs(self, builds):\n builds_comparison = []\n last_build = builds[0]\n for build in builds:\n build_info_with_diffs = self.compare_builds(build, last_build)\n builds_comparison.append(build_info_with_diffs)\n return builds_comparison\n\n @staticmethod\n def compare_builds(build, last_build):\n build_info = {}\n for param in ['date', 'error_rate', 'pct95', 'total', 'throughput']:\n param_diff = None\n if param in ['error_rate']:\n param_diff = round(float(build[param]) - float(last_build.get(param, 0.0)), 2)\n color = RED if param_diff > 0.0 else GREEN\n if param in ['throughput', 'total']:\n param_diff = round(float(build[param]) - float(last_build.get(param, 0.0)), 2)\n color = RED if param_diff < 0.0 else GREEN\n if param in ['pct95']:\n param_diff = round((float(build[param]) - float(last_build[param])) / 1000, 2)\n color = RED if param_diff > 0.0 else GREEN\n if param_diff is not None:\n param_diff = f\"+{param_diff}\" if param_diff > 0 else str(param_diff)\n build_info[f'{param}_diff'] = f\"<p style=\\\"color: {color}\\\">{param_diff}</p>\"\n build_info[param] = build[param]\n return build_info\n\n def create_charts(self, builds, last_test_data, baseline, comparison_metric):\n charts = []\n if len(builds) > 1:\n charts.append(self.create_success_rate_chart(builds))\n charts.append(self.create_throughput_chart(builds))\n return charts\n\n @staticmethod\n def create_success_rate_chart(builds):\n labels, keys, values = [], [], []\n count = 1\n for test in builds:\n labels.append(test['date'])\n keys.append(round(100 - test['error_rate'], 2))\n values.append(count)\n count += 1\n datapoints = {\n 'title': 'Successful requests, %',\n 'label': 'Successful requests, %',\n 'x_axis': 'Test Runs',\n 'y_axis': 'Successful requests, %',\n 'width': 10,\n 'height': 3,\n 'path_to_save': '/tmp/success_rate.png',\n 'keys': keys[::-1],\n 'values': values,\n 'labels': labels[::-1]\n }\n alerts_linechart(datapoints)\n fp = open('/tmp/success_rate.png', 'rb')\n image = MIMEImage(fp.read())\n image.add_header('Content-ID', '<success_rate>')\n fp.close()\n return image\n\n @staticmethod\n def create_throughput_chart(builds):\n labels, keys, values = [], [], []\n count = 1\n for test in builds:\n labels.append(test['date'])\n keys.append(test['throughput'])\n values.append(count)\n count += 1\n datapoints = {\n 'title': 'Throughput',\n 'label': 'Throughput, req/s',\n 'x_axis': 'Test Runs',\n 'y_axis': 'Throughput, req/s',\n 'width': 10,\n 'height': 3,\n 'path_to_save': '/tmp/throughput.png',\n 'keys': keys[::-1],\n 'values': values,\n 'labels': labels[::-1]\n }\n alerts_linechart(datapoints)\n fp = open('/tmp/throughput.png', 'rb')\n image = MIMEImage(fp.read())\n image.add_header('Content-ID', '<throughput>')\n fp.close()\n return image\n\n @staticmethod\n def create_comparison_vs_baseline_barchart(last_test_data, baseline, comparison_metric):\n green_keys, yellow_keys, utility_key, green_request_value, yellow_request_value = [], [], [], [], []\n utility_request_value, green_request_name, yellow_request_name, utility_request_name = [], [], [], []\n count = 1\n for request in last_test_data:\n for baseline_request in baseline:\n if request['request_name'] == baseline_request['request_name']:\n if int(request[comparison_metric]) > int(baseline_request[comparison_metric]):\n yellow_keys.append(count)\n count += 1\n yellow_request_value.append(round(-float(request[comparison_metric]) / 1000, 2))\n yellow_request_name.append(request['request_name'])\n else:\n green_keys.append(count)\n count += 1\n green_request_value.append(round(float(request[comparison_metric]) / 1000, 2))\n green_request_name.append(request['request_name'])\n\n if len(green_keys) == 0:\n utility_key.append(count)\n count += 1\n utility_request_name.append('utility')\n utility_request_value.append(-max(yellow_request_value) / 2)\n if len(yellow_keys) == 0:\n utility_key.append(count)\n count += 1\n utility_request_name.append('utility')\n utility_request_value.append(-max(green_request_value) / 2)\n datapoints = {\"green_keys\": green_keys,\n \"yellow_keys\": yellow_keys,\n \"red_keys\": [],\n \"utility_keys\": utility_key,\n \"green_request\": green_request_value,\n \"yellow_request\": yellow_request_value,\n \"red_request\": [],\n \"utility_request\": utility_request_value,\n \"green_request_name\": green_request_name,\n \"yellow_request_name\": yellow_request_name,\n \"red_request_name\": [],\n \"utility_request_name\": utility_request_name,\n 'width': 8,\n 'height': 4.5,\n \"x_axis\": \"Requests\", \"y_axis\": \"Time, s\", \"title\": \"Comparison vs Baseline\",\n \"path_to_save\": \"/tmp/baseline.png\"}\n barchart(datapoints)\n fp = open('/tmp/baseline.png', 'rb')\n image = MIMEImage(fp.read())\n image.add_header('Content-ID', '<baseline>')\n fp.close()\n return image\n\n @staticmethod\n def create_thresholds_chart(last_test_data, comparison_metric):\n green_keys, yellow_keys, red_keys, utility_key, green_request_value = [], [], [], [], []\n yellow_request_value, red_request_value, utility_request_value, green_request_name = [], [], [], []\n yellow_request_name, red_request_name, utility_request_name = [], [], []\n count = 1\n for request in last_test_data:\n if request[comparison_metric + '_threshold'] == 'green':\n green_keys.append(count)\n count += 1\n green_request_value.append(round(float(request[comparison_metric]) / 1000, 2))\n green_request_name.append(request['request_name'])\n if request[comparison_metric + '_threshold'] == 'orange':\n yellow_keys.append(count)\n count += 1\n yellow_request_value.append(round(-float(request[comparison_metric]) / 1000, 2))\n yellow_request_name.append(request['request_name'])\n if request[comparison_metric + '_threshold'] == 'red':\n red_keys.append(count)\n count += 1\n red_request_value.append(round(-float(request[comparison_metric]) / 1000, 2))\n red_request_name.append(request['request_name'])\n\n if len(green_keys) == 0:\n utility_key.append(count)\n count += 1\n utility_request_name.append('utility')\n if len(red_request_value) != 0:\n utility_request_value.append(-max(red_request_value) / 2)\n else:\n utility_request_value.append(-max(yellow_request_value) / 2)\n if len(yellow_keys) == 0 and len(red_keys) == 0:\n utility_key.append(count)\n count += 1\n utility_request_name.append('utility')\n utility_request_value.append(-max(green_request_value) / 2)\n datapoints = {\"green_keys\": green_keys,\n \"yellow_keys\": yellow_keys,\n \"red_keys\": red_keys,\n \"utility_keys\": utility_key,\n \"green_request\": green_request_value,\n \"yellow_request\": yellow_request_value,\n \"red_request\": red_request_value,\n \"utility_request\": utility_request_value,\n \"green_request_name\": green_request_name,\n \"yellow_request_name\": yellow_request_name,\n \"red_request_name\": red_request_name,\n \"utility_request_name\": utility_request_name,\n 'width': 8,\n 'height': 4.5,\n \"x_axis\": \"Requests\", \"y_axis\": \"Time, s\", \"title\": \"Comparison vs Thresholds\",\n \"path_to_save\": \"/tmp/thresholds.png\"}\n barchart(datapoints)\n fp = open('/tmp/thresholds.png', 'rb')\n image = MIMEImage(fp.read())\n image.add_header('Content-ID', '<thresholds>')\n fp.close()\n return image\n\n def create_ui_charts(self, test, builds_comparison):\n charts = [self.create_thresholds_chart(test, 'time')]\n if len(builds_comparison) > 1:\n charts.append(self.create_comparison_chart(builds_comparison))\n if len(builds_comparison) > 1:\n charts.append(self.create_success_rate_chart(builds_comparison))\n return charts\n\n @staticmethod\n def create_comparison_chart(builds_comparison):\n labels, keys, latency_values, transfer_values = [], [], [], []\n tti_values, ttl_values, total_time_values = [], [], []\n count = 1\n for build in builds_comparison:\n labels.append(build['date'])\n keys.append(count)\n count += 1\n latency_values.append(build['latency'])\n transfer_values.append(build['transfer'])\n tti_values.append(build['tti'])\n ttl_values.append(build['ttl'])\n total_time_values.append(build['total_time'])\n datapoints = {\n 'title': '',\n 'label': 'Time, sec',\n 'x_axis': 'Test Runs',\n 'y_axis': 'Time, sec',\n 'width': 10,\n 'height': 3,\n 'path_to_save': '/tmp/comparison.png',\n 'keys': keys,\n 'latency_values': latency_values[::-1],\n 'transfer_values': transfer_values[::-1],\n 'tti_values': tti_values[::-1],\n 'ttl_values': ttl_values[::-1],\n 'total_time_values': total_time_values[::-1],\n 'labels': labels[::-1]\n }\n ui_comparison_linechart(datapoints)\n fp = open('/tmp/comparison.png', 'rb')\n image = MIMEImage(fp.read())\n image.add_header('Content-ID', '<comparison>')\n fp.close()\n return image\n\n @staticmethod\n def get_general_metrics(build_data, baseline, thresholds=None):\n current_tp = build_data['throughput']\n current_error_rate = build_data['error_rate']\n baseline_throughput = \"N/A\"\n baseline_error_rate = \"N/A\"\n thresholds_tp_rate = \"N/A\"\n thresholds_error_rate = \"N/A\"\n thresholds_tp_color = GRAY\n thresholds_er_color = GRAY\n baseline_tp_color = GRAY\n baseline_er_color = GRAY\n if baseline:\n baseline_throughput = round(sum([tp['throughput'] for tp in baseline]), 2)\n baseline_ko_count = round(sum([tp['ko'] for tp in baseline]), 2)\n baseline_ok_count = round(sum([tp['ok'] for tp in baseline]), 2)\n baseline_error_rate = round((baseline_ko_count / (baseline_ko_count + baseline_ok_count)) * 100, 2)\n baseline_tp_color = RED if baseline_throughput > current_tp else GREEN\n baseline_er_color = RED if current_error_rate > baseline_error_rate else GREEN\n baseline_throughput = round(current_tp - baseline_throughput, 2)\n baseline_error_rate = round(current_error_rate - baseline_error_rate, 2)\n if thresholds:\n for th in thresholds:\n if th['request_name'] == 'all':\n if th['target'] == 'error_rate':\n thresholds_error_rate = round(th[\"metric\"] - th['yellow'], 2)\n if th['threshold'] == \"red\":\n thresholds_er_color = RED\n elif th['threshold'] == \"yellow\":\n thresholds_er_color = YELLOW\n else:\n thresholds_er_color = GREEN\n if th['target'] == 'throughput':\n thresholds_tp_rate = round(th[\"metric\"] - th['yellow'], 2)\n if th['threshold'] == \"red\":\n thresholds_tp_color = RED\n elif th['threshold'] == \"yellow\":\n thresholds_tp_color = YELLOW\n else:\n thresholds_tp_color = GREEN\n return {\n \"current_tp\": current_tp,\n \"baseline_tp\": baseline_throughput,\n \"baseline_tp_color\": baseline_tp_color,\n \"threshold_tp\": thresholds_tp_rate,\n \"threshold_tp_color\": thresholds_tp_color,\n \"current_er\": current_error_rate,\n \"baseline_er\": baseline_error_rate,\n \"baseline_er_color\": baseline_er_color,\n \"threshold_er\": thresholds_error_rate,\n \"threshold_er_color\": thresholds_er_color\n }\n\n @staticmethod\n def get_baseline_and_thresholds(last_test_data, baseline, comparison_metric, thresholds):\n exceeded_thresholds = []\n baseline_metrics = {}\n thresholds_metrics = {}\n if baseline:\n for request in baseline:\n baseline_metrics[request['request_name']] = int(request[comparison_metric])\n\n if thresholds:\n for th in thresholds:\n if th['target'] == 'response_time':\n thresholds_metrics[th['request_name']] = th\n for request in last_test_data:\n req = {}\n req['response_time'] = str(round(float(request[comparison_metric]) / 1000, 2))\n if len(str(request['request_name'])) > 25:\n req['request_name'] = str(request['request_name'])[:25] + \"... \"\n else:\n req['request_name'] = str(request['request_name'])\n if baseline:\n req['baseline'] = round(\n float(int(request[comparison_metric]) - baseline_metrics[request['request_name']]) / 1000, 2)\n if req['baseline'] < 0:\n req['baseline_color'] = GREEN\n else:\n req['baseline_color'] = YELLOW\n else:\n req['baseline'] = \"N/A\"\n req['baseline_color'] = GRAY\n if thresholds_metrics and thresholds_metrics.get(request['request_name']):\n req['threshold'] = round(\n float(int(request[comparison_metric]) -\n int(thresholds_metrics[request['request_name']]['yellow'])) / 1000, 2)\n req['threshold_value'] = str(thresholds_metrics[request['request_name']]['yellow'])\n if thresholds_metrics[request['request_name']]['threshold'] == 'yellow':\n req['threshold_color'] = YELLOW\n elif thresholds_metrics[request['request_name']]['threshold'] == 'red':\n req['line_color'] = RED\n req['threshold_color'] = RED\n else:\n req['threshold_color'] = GREEN\n else:\n req['threshold'] = \"N/A\"\n req['threshold_value'] = 0.0\n req['threshold_color'] = GRAY\n req['line_color'] = GRAY\n if not req.get('line_color'):\n if req['threshold_color'] == GREEN and req.get('baseline_color', GREEN) == GREEN:\n req['line_color'] = GREEN\n else:\n req['line_color'] = YELLOW\n exceeded_thresholds.append(req)\n exceeded_thresholds = sorted(exceeded_thresholds, key=lambda k: float(k['response_time']), reverse=True)\n hundered = 0\n for _ in range(len(exceeded_thresholds)):\n if not (hundered):\n exceeded_thresholds[_]['share'] = 100\n hundered = float(exceeded_thresholds[_]['response_time'])\n else:\n exceeded_thresholds[_]['share'] = int((100 * float(exceeded_thresholds[_]['response_time'])) / hundered)\n return exceeded_thresholds\n\n def create_ui_builds_comparison(self, tests):\n comparison, builds_info = [], []\n for build in tests:\n build_info = {'ttl': [], 'tti': [], 'transfer': [], 'latency': [], 'total_time': []}\n page_count = 0\n error_count = 0\n for page in build:\n page_count += page['count']\n error_count += page['failed']\n build_info['ttl'].append(round(statistics.median(page['ttl']) / 1000.0, 2))\n build_info['tti'].append(round(statistics.median(page['tti']) / 1000.0, 2))\n build_info['transfer'].append(round(statistics.median(page['transfer']) / 1000.0, 2))\n build_info['latency'].append(round(statistics.median(page['latency']) / 1000.0, 2))\n build_info['total_time'].append(round(statistics.median(page['total_time']) / 1000.0, 2))\n build_info['ttl'] = statistics.median(build_info['ttl'])\n build_info['tti'] = statistics.median(build_info['tti'])\n build_info['transfer'] = statistics.median(build_info['transfer'])\n build_info['latency'] = statistics.median(build_info['latency'])\n build_info['total_time'] = statistics.median(build_info['total_time'])\n build_info['date'] = datetime.datetime.utcfromtimestamp(int(build[0]['start_time']) / 1000) \\\n .strftime('%d-%b %H:%M')\n build_info['count'] = page_count\n build_info['error_rate'] = round((error_count / page_count) * 100, 2)\n builds_info.append(build_info)\n last_build = builds_info[0]\n for build in builds_info:\n comparison.append(self.compare_ui_builds(last_build, build))\n return comparison\n\n @staticmethod\n def compare_ui_builds(last_build, build):\n build_info = {}\n params = ['date', 'error_rate', 'ttl', 'tti', 'transfer', 'latency', 'total_time', 'count']\n build_info['error_rate_diff'] = float(build['error_rate']) - float(last_build['error_rate'])\n if build_info['error_rate_diff'] > 0.0:\n build_info['error_rate_diff'] = \"<b style=\\\"color: red\\\">&#9650;</b>\" + str(build_info['error_rate_diff'])\n else:\n build_info['error_rate_diff'] = str(build_info['error_rate_diff']\n ).replace(\"-\", \"<b style=\\\"color: green\\\">&#9660;</b>\")\n build_info['total_page_diff'] = round(float(build['count']) * 100 / float(last_build['count']) - 100, 1)\n if build_info['total_page_diff'] > 0.0:\n build_info['total_page_diff'] = \"<b style=\\\"color: red\\\">&#9650;</b>\" + str(build_info['total_page_diff'])\n else:\n build_info['total_page_diff'] = str(build_info['total_page_diff']\n ).replace(\"-\", \"<b style=\\\"color: green\\\">&#9660;</b>\")\n build_info['total_ttl_diff'] = round(float(build['ttl']) * 100 / float(last_build['ttl']) - 100, 1)\n if build_info['total_ttl_diff'] > 0.0:\n build_info['total_ttl_diff'] = \"<b style=\\\"color: red\\\">&#9650;</b>\" + str(build_info['total_ttl_diff'])\n else:\n build_info['total_ttl_diff'] = str(build_info['total_ttl_diff']\n ).replace(\"-\", \"<b style=\\\"color: green\\\">&#9660;</b>\")\n build_info['total_tti_diff'] = round(float(build['tti']) * 100 / float(last_build['tti']) - 100, 1)\n if build_info['total_tti_diff'] > 0.0:\n build_info['total_tti_diff'] = \"<b style=\\\"color: red\\\">&#9650;</b>\" + str(build_info['total_tti_diff'])\n else:\n build_info['total_tti_diff'] = str(build_info['total_tti_diff']\n ).replace(\"-\", \"<b style=\\\"color: green\\\">&#9660;</b>\")\n\n build_info['total_transfer_diff'] = round(float(build['transfer']) * 100 / float(last_build['transfer']) - 100,\n 1) if int(last_build['transfer']) != 0 else 0\n\n if build_info['total_transfer_diff'] > 0.0:\n build_info['total_transfer_diff'] = \"<b style=\\\"color: red\\\">&#9650;</b>\" + \\\n str(build_info['total_transfer_diff'])\n else:\n build_info['total_transfer_diff'] = str(build_info['total_transfer_diff']\n ).replace(\"-\", \"<b style=\\\"color: green\\\">&#9660;</b>\")\n build_info['total_latency_diff'] = round(float(build['latency']) * 100 / float(last_build['latency']) - 100, 1)\n if build_info['total_latency_diff'] > 0.0:\n build_info['total_latency_diff'] = \"<b style=\\\"color: red\\\">&#9650;</b>\" + str(\n build_info['total_latency_diff'])\n else:\n build_info['total_latency_diff'] = str(round(float(build_info['total_latency_diff']), 1)\n ).replace(\"-\", \"<b style=\\\"color: green\\\">&#9660;</b>\")\n build_info['total_time_diff'] = round(float(build['total_time']) * 100 / float(last_build['total_time']) - 100,\n 1)\n if build_info['total_time_diff'] > 0.0:\n build_info['total_time_diff'] = \"<b style=\\\"color: red\\\">&#9650;</b>\" + str(build_info['total_time_diff'])\n else:\n build_info['total_time_diff'] = str(build_info['total_time_diff']\n ).replace(\"-\", \"<b style=\\\"color: green\\\">&#9660;</b>\")\n\n for param in params:\n build_info[param] = build[param]\n return build_info\n\n @staticmethod\n def aggregate_last_test_results(test):\n test_data = []\n params = ['request_name', 'count', 'failed', 'time_threshold']\n for page in test:\n page_info = {}\n for param in params:\n page_info[param] = page[param]\n page_info['ttl'] = round(statistics.median(page['ttl']) / 1000.0, 2)\n page_info['tti'] = round(statistics.median(page['tti']) / 1000.0, 2)\n page_info['transfer'] = round(statistics.median(page['transfer']) / 1000.0, 2)\n page_info['latency'] = round(statistics.median(page['latency']) / 1000.0, 2)\n page_info['total_time'] = round(statistics.median(page['total_time']) / 1000.0, 2)\n test_data.append(page_info)\n return test_data\n\n def get_api_email_body(self, test_params, last_test_data, baseline, builds_comparison, baseline_and_thresholds,\n general_metrics):\n env = Environment(loader=FileSystemLoader('./templates/'))\n template = env.get_template(\"backend_email.html\")\n last_test_data = self.reprocess_test_data(last_test_data, ['total', 'throughput'])\n html = template.render(t_params=test_params, summary=last_test_data, baseline=baseline,\n comparison=builds_comparison,\n baseline_and_thresholds=baseline_and_thresholds, general_metrics=general_metrics)\n return html\n\n @staticmethod\n def stringify_number(number):\n if float(number) // 1000000 > 0:\n return f'{str(round(float(number) / 1000000, 2))}M'\n elif float(number) // 1000 > 0:\n return f'{str(round(float(number) / 1000, 2))}K'\n else:\n return str(number)\n\n def reprocess_test_data(self, results_list, keys):\n for _ in range(len(results_list)):\n for key in keys:\n results_list[_][key] = self.stringify_number(results_list[_][key])\n return results_list\n\n @staticmethod\n def get_ui_email_body(test_params, top_five_thresholds, builds_comparison, last_test_data):\n env = Environment(loader=FileSystemLoader('./templates/'))\n template = env.get_template(\"ui_email_template.html\")\n html = template.render(t_params=test_params, top_five_thresholds=top_five_thresholds,\n comparison=builds_comparison,\n summary=last_test_data)\n return html\n", "id": "9666009", "language": "Python", "matching_score": 3.681004047393799, "max_stars_count": 0, "path": "report_builder.py" }, { "content": "from datetime import datetime\n\nimport requests\nfrom jinja2 import Environment, FileSystemLoader\n\nfrom email_notifications import Email\n\n\nclass UIEmailNotification(object):\n\n def __init__(self, arguments):\n self.test_id = arguments['test_id']\n self.gelloper_url = arguments['galloper_url']\n self.gelloper_token = arguments['token']\n self.galloper_project_id = arguments['project_id']\n self.report_id = arguments['report_id']\n self.test_name = arguments['test']\n\n def ui_email_notification(self):\n info = self.__get_test_info()\n user_list = self.__extract_recipient_emails(info)\n\n date = datetime.now().strftime(\"%m/%d/%Y, %H:%M:%S\")\n subject = f\"[UI] Test results for {info['name']}. From {date}.\"\n\n report_info = self.__get_report_info()\n results_info = self.__get_results_info()\n\n status = \"PASSED\"\n if not report_info['passed']:\n status = \"FAILED\"\n\n t_params = {\n \"scenario\": report_info['name'],\n \"start_time\": report_info[\"start_time\"],\n \"status\": status,\n \"duration\": report_info['duration'],\n \"env\": report_info['environment'],\n \"browser\": report_info['browser'].capitalize(),\n \"version\": report_info['browser_version'],\n \"view_port\": \"1920x1080\",\n \"loops\": report_info[\"loops\"],\n \"pages\": len(results_info)\n }\n\n email_body = self.__get_email_body(t_params, results_info)\n\n charts = []\n\n return Email(self.test_name, subject, user_list, email_body, charts, date)\n\n def __extract_recipient_emails(self, info):\n return info['emails'].split(',')\n\n def __get_test_info(self):\n return self.__get_url(\n f\"/tests/{self.galloper_project_id}/frontend/{self.test_id}?raw=1\")\n\n def __get_report_info(self):\n return self.__get_url(f\"/observer/{self.galloper_project_id}?report_id={self.report_id}\")\n\n def __get_results_info(self):\n return self.__get_url(f\"/visual/{self.galloper_project_id}/{self.report_id}?order=asc\")\n\n def __get_email_body(self, t_params, results_info):\n env = Environment(\n loader=FileSystemLoader('./templates'))\n template = env.get_template(\"ui_email_template.html\")\n return template.render(t_params=t_params, results=results_info)\n\n def __get_url(self, url):\n resp = requests.get(\n f\"{self.gelloper_url}/api/v1{url}\", headers={\n 'Authorization': f'bearer {self.gelloper_token}',\n 'Content-type': 'application/json'\n })\n\n if resp.status_code != 200:\n raise Exception(f\"Error {resp}\")\n\n return resp.json()\n", "id": "4798090", "language": "Python", "matching_score": 2.809662342071533, "max_stars_count": 0, "path": "ui_email_notification.py" }, { "content": "from json import JSONDecodeError\n\nimport requests\nfrom observer_hub.util import logger\n\n\ndef init_email_notification(galloper_url, galloper_project_id, galloper_token, report_id, test_id):\n if galloper_url and galloper_token and galloper_project_id:\n secrets_url = f\"{galloper_url}/api/v1/secrets/{galloper_project_id}/\"\n try:\n email_notification_id = requests.get(secrets_url + \"email_notification_id\",\n headers={'Authorization': f'bearer {galloper_token}',\n 'Content-type': 'application/json'}\n ).json()[\"secret\"]\n except (AttributeError, JSONDecodeError):\n email_notification_id = \"\"\n\n if email_notification_id:\n task_url = f\"{galloper_url}/api/v1/task/{galloper_project_id}/{email_notification_id}\"\n\n event = {\n \"notification_type\": \"ui\",\n \"test_id\": test_id,\n \"report_id\": report_id\n }\n\n res = requests.post(task_url, json=event, headers={'Authorization': f'bearer {galloper_token}',\n 'Content-type': 'application/json'})\n logger.info(f\"Email notification {res.text}\")\n", "id": "9117652", "language": "Python", "matching_score": 1.3767870664596558, "max_stars_count": 0, "path": "observer_hub/reporters/email_reporter.py" }, { "content": "from uuid import uuid4\nfrom arbiter import Arbiter\nfrom observer_hub.util import logger\nfrom observer_hub.constants import (REPORTS_BUCKET, RABBIT_HOST, RABBIT_QUEUE_NAME,\n RABBIT_PORT, RABBIT_USER, RABBIT_PASSWORD)\n\n\ndef port_async_processing_task(galloper_url, galloper_project_id, galloper_token, results,\n report_name, minio_package_name):\n browsertime_package = f\"browsertime_{uuid4()}\"\n params = dict(galloper_url=galloper_url,\n project_id=galloper_project_id,\n token=<PASSWORD>,\n bucket=REPORTS_BUCKET,\n filename=browsertime_package,\n url=results.results[\"info\"].get(\"url\"),\n headers=results.results[\"info\"].get(\"headers\") if results.results[\"info\"].get(\"headers\") else {},\n minio_package_name=minio_package_name,\n report_filename=report_name,\n browser=\"chrome\")\n if RABBIT_HOST:\n logger.info(\"Connecting to Arbiter\")\n arbiter = Arbiter(host=RABBIT_HOST, port=RABBIT_PORT, user=RABBIT_USER, password=<PASSWORD>,\n start_consumer=False)\n arbiter.apply(task_name='browsertime', queue=RABBIT_QUEUE_NAME, task_kwargs=params)\n return browsertime_package\n", "id": "6225899", "language": "Python", "matching_score": 2.497143030166626, "max_stars_count": 0, "path": "observer_hub/reporters/browsertime_reporter.py" }, { "content": "from traceback import format_exc\n\nimport requests\nfrom observer_hub.util import logger\n\n\ndef get_headers(token):\n if token:\n return {'Authorization': f\"Bearer {token}\"}\n logger.warning(\"=====> Auth TOKEN is not set!\")\n return None\n\n\ndef create_galloper_report(galloper_url, galloper_project_id, galloper_token, data):\n try:\n res = requests.post(f\"{galloper_url}/api/v1/observer/{galloper_project_id}\", json=data,\n headers=get_headers(galloper_token))\n logger.info(f\"Create report for {galloper_project_id} {res.json()}\")\n except Exception:\n logger.error(format_exc())\n\n\ndef finalize_galloper_report(galloper_url, galloper_project_id, galloper_token, data):\n try:\n requests.put(f\"{galloper_url}/api/v1/observer/{galloper_project_id}\", json=data,\n headers=get_headers(galloper_token))\n except Exception:\n logger.error(format_exc())\n\n\ndef get_thresholds(galloper_url, galloper_project_id, galloper_token, test_name, env):\n logger.info(f\"Get thresholds for: {test_name} {env}\")\n res = None\n try:\n res = requests.get(\n f\"{galloper_url}/api/v1/thresholds/{galloper_project_id}/ui?name={test_name}&environment={env}&order=asc\",\n headers=get_headers(galloper_token))\n except Exception:\n logger.error(format_exc())\n\n if not res or res.status_code != 200:\n return {}\n\n try:\n return res.json()\n except ValueError:\n return {}\n\n\ndef send_gelloper_report_results(galloper_url, galloper_project_id, galloper_token, report_id, data):\n try:\n requests.post(f\"{galloper_url}/api/v1/observer/{galloper_project_id}/{report_id}\", json=data,\n headers=get_headers(galloper_token))\n except Exception:\n logger.error(format_exc())\n\n\ndef upload_artifacts(galloper_url, galloper_project_id, galloper_token, bucket_name, file_path, file_name):\n file = {'file': open(file_path, 'rb')}\n\n try:\n requests.post(f\"{galloper_url}/api/v1/artifacts/{galloper_project_id}/{bucket_name}/{file_name}\",\n files=file,\n headers=get_headers(galloper_token))\n except Exception:\n logger.error(format_exc())\n\n\ndef send_report_locators(galloper_url, galloper_token, project_id: int, report_id: int, exception):\n try:\n requests.put(f\"{galloper_url}/api/v1/observer/{project_id}/{report_id}\",\n json={\"exception\": exception, \"status\": \"\"},\n headers=get_headers(galloper_token))\n except Exception:\n logger.error(format_exc())\n", "id": "4573053", "language": "Python", "matching_score": 3.218641757965088, "max_stars_count": 0, "path": "observer_hub/integrations/galloper_api_client.py" }, { "content": "from datetime import timedelta\nfrom uuid import uuid4\n\nfrom observer_hub.constants import REPORTS_BUCKET, REPORT_PATH, TIMEOUT\nfrom observer_hub.integrations.galloper_api_client import create_galloper_report, send_gelloper_report_results, \\\n upload_artifacts, \\\n finalize_galloper_report\nfrom observer_hub.models.exporters import GalloperExporter\nfrom observer_hub.util import logger, current_time\n\n\ndef notify_on_test_start(galloper_url, galloper_project_id, galloper_token, desired_capabilities):\n browser_name = desired_capabilities['browserName']\n version = desired_capabilities['version']\n\n if version:\n version = f'_{version}'\n\n test_name = desired_capabilities.get(\"scenario_name\", f\"{browser_name}{version}\")\n base_url = desired_capabilities.get('base_url', \"\")\n loops = desired_capabilities.get('loops', 1)\n aggregation = desired_capabilities.get('aggregation', 'max')\n report_uid = desired_capabilities.get('report_uid', str(uuid4()))\n env = desired_capabilities.get('venv', '')\n tz = desired_capabilities.get('tz', 'UTC')\n version = desired_capabilities.get('version', '')\n job_name = desired_capabilities.get('job_name', '')\n\n data = {\n \"report_id\": report_uid,\n \"test_name\": job_name,\n \"base_url\": base_url,\n \"browser_name\": browser_name,\n \"browser_version\": version,\n \"env\": env,\n \"loops\": loops,\n \"aggregation\": aggregation,\n \"time\": current_time(tz).strftime('%Y-%m-%d %H:%M:%S')\n }\n\n create_galloper_report(galloper_url, galloper_project_id, galloper_token, data)\n\n return report_uid, job_name\n\n\ndef notify_on_test_end(galloper_url, galloper_project_id, galloper_token, report_id, total_thresholds, exception,\n junit_report_name,\n junit_report_bucket, tz):\n logger.info(f\"About to notify on test end for report {report_id}\")\n\n time = current_time(tz) - timedelta(seconds=TIMEOUT)\n\n data = {\n \"report_id\": report_id,\n \"time\": time.strftime('%Y-%m-%d %H:%M:%S'),\n \"status\": \"Finished\",\n \"thresholds_total\": total_thresholds.get(\"total\", 0),\n \"thresholds_failed\": total_thresholds.get(\"failed\", 0)\n }\n\n if exception:\n data[\"exception\"] = str(exception)\n\n finalize_galloper_report(galloper_url, galloper_project_id, galloper_token, data)\n\n if junit_report_name:\n logger.info(f\"About to upload junit report to {junit_report_bucket}\")\n upload_artifacts(galloper_url, galloper_project_id, galloper_token, junit_report_bucket,\n f\"{REPORT_PATH}/junit/{junit_report_name}\",\n junit_report_name)\n\n\ndef notify_on_command_end(galloper_url, galloper_project_id, galloper_token, report_id, report, minio_package,\n execution_result, thresholds, session_id):\n name = execution_result.results['info']['title']\n metrics = execution_result.results\n logger.info(f\"About to notify on command end for report {report_id}\")\n result = GalloperExporter(metrics).export()\n\n data = {\n \"name\": name,\n \"type\": execution_result.results_type,\n \"identifier\": execution_result.page_identifier,\n \"metrics\": result,\n \"bucket_name\": REPORTS_BUCKET,\n \"file_name\": report.file_name,\n \"resolution\": metrics['info']['windowSize'],\n \"browser_version\": metrics['info']['browser'],\n \"thresholds_total\": thresholds.get(\"total\", 0),\n \"thresholds_failed\": thresholds.get(\"failed\", 0),\n \"locators\": execution_result.commands,\n \"session_id\": session_id\n }\n\n send_gelloper_report_results(galloper_url, galloper_project_id, galloper_token, report_id, data)\n\n upload_artifacts(galloper_url, galloper_project_id, galloper_token, REPORTS_BUCKET, report.path, report.file_name)\n upload_artifacts(galloper_url, galloper_project_id, galloper_token, REPORTS_BUCKET,\n minio_package.path, minio_package.file_name)\n", "id": "945181", "language": "Python", "matching_score": 2.6448252201080322, "max_stars_count": 0, "path": "observer_hub/integrations/galloper.py" }, { "content": "import os\n\nfrom junit_xml import TestCase, TestSuite\n\nfrom observer_hub.constants import REPORT_PATH\nfrom observer_hub.util import logger\n\n\ndef generate_junit_report(test_name, total_thresholds, report_name):\n test_cases = []\n file_name = f\"junit_report_{report_name}.xml\"\n logger.info(f\"Generate report {file_name}\")\n\n for item in total_thresholds[\"details\"]:\n message = item['message']\n test_case = TestCase(item['name'], classname=f\"{item['scope']}\",\n status=\"PASSED\",\n stdout=f\"{item['scope']} {item['name'].lower()} {item['aggregation']} {item['actual']} \"\n f\"{item['rule']} {item['expected']}\")\n if message:\n test_case.status = \"FAILED\"\n test_case.add_failure_info(message)\n test_cases.append(test_case)\n\n ts = TestSuite(test_name, test_cases)\n os.makedirs(f\"{REPORT_PATH}/junit\", exist_ok=True)\n with open(f\"{REPORT_PATH}/junit/{file_name}\", 'w') as f:\n TestSuite.to_file(f, [ts], prettyprint=True)\n\n return file_name\n", "id": "11192290", "language": "Python", "matching_score": 1.2561407089233398, "max_stars_count": 0, "path": "observer_hub/reporters/junit_reporter.py" }, { "content": "import math\n\n\ndef is_threshold_failed(actual, comparison, expected):\n if comparison == 'gte':\n return actual >= expected\n elif comparison == 'lte':\n return actual <= expected\n elif comparison == 'gt':\n return actual > expected\n elif comparison == 'lt':\n return actual < expected\n elif comparison == 'eq':\n return actual == expected\n return False\n\n\ndef get_aggregated_value(aggregation, metrics):\n if aggregation == 'max':\n return max(metrics)\n elif aggregation == 'min':\n return min(metrics)\n elif aggregation == 'avg':\n return int(sum(metrics) / len(metrics))\n elif aggregation == 'pct95':\n return percentile(metrics, 95)\n elif aggregation == 'pct50':\n return percentile(metrics, 50)\n else:\n raise Exception(f\"No such aggregation {aggregation}\")\n\n\ndef percentile(data, percentile):\n size = len(data)\n return sorted(data)[int(math.ceil((size * percentile) / 100)) - 1]\n\n", "id": "11371323", "language": "Python", "matching_score": 2.1577916145324707, "max_stars_count": 0, "path": "util.py" }, { "content": "from observer_hub.util import logger, is_values_match, get_aggregated_value, closest, flatten_list\n\n\nclass Threshold(object):\n\n def __init__(self, gate, actual):\n self.name = gate['target'].replace(\"_\", \" \").capitalize()\n self.gate = gate\n self.actual = actual\n self.expected = gate['metric']\n self.comparison = gate['comparison']\n self.scope = gate['scope']\n\n def is_passed(self):\n return is_values_match(self.actual, self.comparison, self.expected)\n\n def get_result(self):\n message = \"\"\n if not self.is_passed():\n message = f\"Threshold: {self.scope} [{self.name}] value {self.actual} violates rule {self.comparison} {self.expected}\"\n logger.info(f\"{message}! [FAILED]\")\n else:\n logger.info(\n f\"Threshold: {self.scope} [{self.name}] value {self.actual} comply with rule {self.comparison} {self.expected}! [PASSED]\")\n\n return {\"name\": f\"{self.name}\",\n \"actual\": self.actual, \"expected\": self.expected,\n \"message\": message}\n\n\nclass AggregatedThreshold(object):\n\n def __init__(self, gate, values):\n self.name = gate['target'].replace(\"_\", \" \").capitalize()\n self.metric_name = gate['target']\n self.expected_value = gate['metric']\n self.aggregation = gate['aggregation']\n self.comparison = gate['comparison']\n self.scope = gate['scope']\n self.values = values\n self.result = {}\n\n def get_actual_aggregated_value(self):\n if self.scope == 'every':\n for page, results in self.values.items():\n metrics = [d.to_json()[self.metric_name] for d in results]\n yield get_aggregated_value(self.aggregation, metrics)\n elif self.scope == 'all':\n result = []\n for page, results in self.values.items():\n metrics = [d.to_json()[self.metric_name] for d in results]\n result.append(metrics)\n\n yield get_aggregated_value(self.aggregation, flatten_list(result))\n else:\n result = {k: v for k, v in self.values.items() if k.startswith(self.scope)}\n for page, results in result.items():\n metrics = [d.to_json()[self.metric_name] for d in results]\n yield get_aggregated_value(self.aggregation, metrics)\n\n def is_passed(self):\n actual_value = None\n\n for actual, metrics in self.get_actual_aggregated_value():\n actual_value = actual\n\n if not is_values_match(actual, self.comparison, self.expected_value):\n message = f\"Threshold: {self.scope} [{self.name}] {self.aggregation} value {actual} violates rule \" \\\n f\"{self.comparison} {self.expected_value}\"\n logger.info(f\"{message} [FAILED]\")\n\n failed_result = self.__find_actual_result(metrics, actual)\n\n self.result = {\n \"name\": f\"{self.name}\",\n \"status\": \"failed\",\n \"rule\": self.comparison,\n \"scope\": self.scope,\n \"aggregation\": self.aggregation,\n \"actual\": actual,\n \"expected\": self.expected_value,\n \"message\": message,\n \"raw_result\": failed_result\n }\n return False\n\n logger.info(\n f\"Threshold: {self.scope} [{self.name}] {self.aggregation} value {actual_value} comply with rule {self.comparison} \"\n f\"{self.expected_value} [PASSED]\")\n\n self.result = {\n \"name\": f\"{self.name}\",\n \"status\": \"passed\",\n \"rule\": self.comparison,\n \"scope\": self.scope,\n \"aggregation\": self.aggregation,\n \"actual\": actual_value,\n \"expected\": self.expected_value,\n \"message\": ''\n }\n\n return True\n\n def get_result(self):\n return self.result\n\n def __find_actual_result(self, metrics, actual):\n value = closest(metrics, actual)\n for _, data in self.values.items():\n for v in data:\n if v.to_json()[self.name.lower()] == value:\n return v\n", "id": "9894122", "language": "Python", "matching_score": 2.226862668991089, "max_stars_count": 0, "path": "observer_hub/models/thresholds.py" }, { "content": "from observer_hub.models.exporters import JsonExporter\nfrom observer_hub.models.thresholds import AggregatedThreshold, Threshold\nfrom observer_hub.util import logger, filter_thresholds_for\n\n\ndef assert_test_thresholds(test_name, all_scope_thresholds, execution_results):\n threshold_results = {\"total\": len(all_scope_thresholds), \"failed\": 0, \"details\": []}\n\n if not all_scope_thresholds:\n return threshold_results\n\n logger.info(f\"=====> Assert aggregated thresholds for {test_name}\")\n checking_result = []\n for gate in all_scope_thresholds:\n threshold = AggregatedThreshold(gate, execution_results)\n if not threshold.is_passed():\n threshold_results['failed'] += 1\n checking_result.append(threshold.get_result())\n\n threshold_results[\"details\"] = checking_result\n logger.info(\"=====>\")\n\n return threshold_results\n\n\ndef assert_page_thresholds(execution_result, thresholds):\n page_identifier = execution_result.page_identifier\n scoped_thresholds = filter_thresholds_for(page_identifier, thresholds)\n\n threshold_results = {\"total\": len(scoped_thresholds), \"failed\": 0}\n if not thresholds:\n return threshold_results\n\n perf_results = JsonExporter(execution_result.results).export()['fields']\n\n logger.info(f\"=====> Assert thresholds for {page_identifier}\")\n for gate in scoped_thresholds:\n target_metric_name = gate[\"target\"]\n threshold = Threshold(gate, perf_results[target_metric_name])\n if not threshold.is_passed():\n threshold_results['failed'] += 1\n threshold.get_result()\n\n logger.info(\"=====>\")\n return threshold_results\n", "id": "7116910", "language": "Python", "matching_score": 1.8546613454818726, "max_stars_count": 0, "path": "observer_hub/assertions.py" }, { "content": "from observer_hub.models.exporters import JsonExporter\n\n\nclass ExecutionResult(object):\n\n def __init__(self, page_identifier=None, results=None, screenshot_path=None,\n results_type=\"page\", commands=None):\n self.page_identifier = page_identifier\n self.results = results\n self.screenshot_path = screenshot_path\n self.video_folder = None\n self.video_path = None\n self.results_type = results_type\n if not commands:\n commands = []\n self.commands = commands\n self.report = None\n\n def to_json(self):\n return JsonExporter(self.results).export()['fields']", "id": "2009149", "language": "Python", "matching_score": 1.5160008668899536, "max_stars_count": 0, "path": "observer_hub/models/execution_result.py" }, { "content": "import json\nimport os\nimport subprocess\nimport tempfile\nfrom time import time\n\nfrom requests import get\n\nfrom observer_hub.constants import VIDEO_PATH\nfrom observer_hub.util import logger\n\n\ndef start_video_recording(video_host):\n start_time = time()\n start_recording(video_host)\n current_time = time() - start_time\n return int(current_time)\n\n\ndef start_recording(host):\n get(f'http://{host}/record/start')\n\n\ndef stop_recording(host):\n logger.info(\"Stop recording...\")\n os.makedirs(VIDEO_PATH, exist_ok=True)\n video_results = get(f'http://{host}/record/stop').content\n video_folder = tempfile.mkdtemp(dir=VIDEO_PATH)\n video_path = os.path.join(video_folder, \"video.mp4\")\n with open(video_path, 'w+b') as f:\n f.write(video_results)\n logger.info(f\"Video file {video_path}\")\n return video_folder, video_path\n\n\ndef get_video_length(file_path):\n command = [\n \"ffprobe\",\n \"-loglevel\", \"quiet\",\n \"-print_format\", \"json\",\n \"-show_format\",\n file_path\n ]\n\n pipe = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n out, err = pipe.communicate()\n return int(float(json.loads(out)['format']['duration']) * 1000)\n", "id": "6241353", "language": "Python", "matching_score": 1.7520880699157715, "max_stars_count": 0, "path": "observer_hub/video.py" }, { "content": "# Copyright 2019 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom flask import Flask, send_from_directory\nfrom os import environ, kill, path, remove\nfrom signal import SIGINT\nfrom subprocess import Popen, PIPE\nfrom multiprocessing import Process, Queue, active_children\n\nxvfb_screen_to_record = environ.get(\"DISPLAY\", \":99.0\")\nscreen_resolution = environ.get(\"RESOLUTION\", \"1360x1020\")\nscreen_depth = environ.get(\"DEPTH\", \"24\")\ncodec = environ.get(\"CODEC\", \"x11grab\")\nffmpef_path = environ.get(\"FFMPEG\", \"/usr/bin/ffmpeg\")\n\n\ndef start_recording():\n cmd = [ffmpef_path, \"-r\", \"10\", \"-f\", codec, \"-s\", screen_resolution, \"-i\",\n xvfb_screen_to_record, \"-bufsize\", \"1k\", \"-y\", \"/tmp/output.mp4\"]\n Popen(cmd, stderr=PIPE, stdout=PIPE).communicate()\n\n\ndef restart_recording_agent():\n cmd = [\"/usr/bin/supervisorctl\", \"restart\", \"selenium-standalone\"]\n Popen(cmd, stderr=PIPE, stdout=PIPE).communicate()\n\ndef terminate_supervisor():\n cmd = [\"/usr/bin/supervisorctl\", \"shutdown\"]\n Popen(cmd, stderr=PIPE, stdout=PIPE).communicate()\n\n\ndef create_app():\n app = Flask(__name__)\n q = Queue()\n @app.teardown_appcontext\n def teardown_db(event):\n pass\n\n @app.route(\"/terminate\", methods=[\"GET\"])\n def terminate_host():\n proc = Process(target=terminate_supervisor, args=())\n proc.start()\n return \"Ok\"\n\n @app.route(\"/restart\", methods=[\"GET\"])\n def flush():\n proc = Process(target=restart_recording_agent, args=())\n proc.start()\n return \"Ok\"\n\n @app.route(\"/record/start\", methods=[\"GET\"])\n def record_screen():\n if len(active_children()):\n return \"Stop previous recording first\"\n if path.exists('/tmp/output.mp4'):\n remove('/tmp/output.mp4')\n proc = Process(target=start_recording, args=())\n proc.start()\n return \"Ok\"\n\n @app.route(\"/record/stop\", methods=[\"GET\"])\n def record_stop():\n try:\n proc = active_children()[0]\n kill(proc.pid + 1, SIGINT)\n proc.join()\n except:\n pass\n return send_from_directory('/tmp', filename=\"output.mp4\", as_attachment=True)\n\n @app.route('/', methods=[\"GET\"])\n def healthcheck():\n return 'OK'\n\n return app\n\n\ndef main():\n _app = create_app()\n host = \"0.0.0.0\"\n port = environ.get(\"LISTENER_PORT\", 9999)\n _app.run(host=host, port=port, debug=True)\n\n\nif __name__ == \"__main__\":\n main()\n", "id": "1474021", "language": "Python", "matching_score": 2.4203269481658936, "max_stars_count": 0, "path": "observer_video_client/app.py" }, { "content": "#!/usr/bin/python\n# coding=utf-8\n# pylint: disable=I0011\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\n Process tools\n\"\"\"\n\nimport subprocess\n\nfrom pylon.core.tools import log\n\n\ndef run_command(*args, **kvargs):\n \"\"\" Run command and log output \"\"\"\n proc = subprocess.Popen(*args, **kvargs, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n #\n while proc.poll() is None:\n while True:\n line = proc.stdout.readline().decode().strip()\n #\n if not line:\n break\n #\n log.info(line)\n #\n if proc.returncode != 0:\n raise RuntimeError(f\"Command failed, return code={proc.returncode}\")\n", "id": "152771", "language": "Python", "matching_score": 0.2764107584953308, "max_stars_count": 0, "path": "pylon/core/tools/process.py" }, { "content": "# Copyright 2019 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom requests import get\nfrom ...shared.constants import LOKI_HOST\n\n\ndef get_results(test, int_start_time, int_end_time):\n url = f\"{LOKI_HOST}/loki/api/v1/query_range\"\n\n data = {\n \"direction\": \"BACKWARD\",\n \"limit\": 5000,\n \"query\": '{filename=\"/tmp/' + test + '.log\"}',\n \"start\": int_start_time,\n \"end\": int_end_time\n }\n results = get(url, params=data, headers={\"Content-Type\": \"application/json\"}).json()\n issues = {}\n for result in results[\"data\"][\"result\"]:\n for value in result['values']:\n _values = value[1].strip().split(\"\\t\")\n _issue = {\"count\": 1}\n _issue_key = ''\n for _ in _values:\n if \":\" in _:\n key, value = _[:_.index(':')], _[_.index(':')+1:].strip()\n if key == 'Error key' and value in issues:\n issues[value][\"count\"] += 1\n continue\n _issue[key] = value\n if 'Error key' in _issue and _issue['Error key'] not in issues.keys():\n issues[_issue['Error key']] = _issue\n return issues\n", "id": "4248126", "language": "Python", "matching_score": 0.8054426312446594, "max_stars_count": 0, "path": "connectors/loki.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\n Logging tool: Loki support\n\"\"\"\n\nimport json\nimport gzip\nimport time\nimport logging\nimport logging.handlers\nimport traceback\nimport threading\nimport requests # pylint: disable=E0401\n\n\nclass CarrierLokiLogEmitter: # pylint: disable=R0902\n \"\"\" Emit logs to Loki \"\"\"\n\n def __init__( # pylint: disable=R0913\n self, loki_push_url,\n loki_user=None, loki_password=<PASSWORD>, loki_token=None,\n default_labels=None,\n verify=True, retries=3, retry_delay=0.5, timeout=15,\n ):\n self.loki_push_url = loki_push_url\n self.loki_user = loki_user\n self.loki_password = <PASSWORD>\n self.loki_token = loki_token\n #\n self.default_labels = default_labels if default_labels is not None else dict()\n #\n self.verify = verify\n self.retries = retries\n self.retry_delay = retry_delay\n self.timeout = timeout\n #\n self._connection = None\n\n def connect(self):\n \"\"\" Get connection object \"\"\"\n if self._connection is not None:\n return self._connection\n #\n self._connection = requests.Session()\n #\n if self.loki_user is not None and self.loki_password is not None:\n self._connection.auth = (self.loki_user, self.loki_password)\n if self.loki_token is not None:\n self._connection.headers.update({\n \"Authorization\": f\"Bearer {self.loki_token}\",\n })\n #\n self._connection.headers.update({\n \"Content-Type\": \"application/json\",\n \"Content-Encoding\": \"gzip\",\n })\n #\n return self._connection\n\n def disconnect(self):\n \"\"\" Destroy connection object \"\"\"\n if self._connection is not None:\n try:\n self._connection.close()\n except: # pylint: disable=W0702\n pass\n self._connection = None\n\n def post_data(self, data):\n \"\"\" Do a POST to Loki \"\"\"\n for _ in range(self.retries):\n try:\n connection = self.connect()\n payload = gzip.compress(json.dumps(data).encode(\"utf-8\"))\n response = connection.post(\n self.loki_push_url, data=payload, verify=self.verify, timeout=self.timeout,\n )\n response.raise_for_status()\n return response\n except: # pylint: disable=W0702\n self.disconnect()\n time.sleep(self.retry_delay)\n\n def emit_line(self, unix_epoch_in_nanoseconds, log_line, additional_labels=None):\n \"\"\" Emit log line \"\"\"\n labels = self.default_labels\n if additional_labels is not None:\n labels.update(additional_labels)\n #\n data = {\n \"streams\": [\n {\n \"stream\": labels,\n \"values\": [\n [f\"{unix_epoch_in_nanoseconds}\", log_line],\n ]\n }\n ]\n }\n #\n self.post_data(data)\n\n def emit_batch(self, batch_data, additional_labels=None):\n \"\"\" Emit log line \"\"\"\n labels = self.default_labels\n if additional_labels is not None:\n labels.update(additional_labels)\n #\n data = {\n \"streams\": [\n {\n \"stream\": labels,\n \"values\": batch_data,\n }\n ]\n }\n #\n self.post_data(data)\n #\n # TODO: batches with different stream labels (a.k.a. multiple streams support)\n\n\nclass CarrierLokiLogHandler(logging.Handler):\n \"\"\" Log handler - send logs to storage \"\"\"\n\n def __init__(self, context):\n super().__init__()\n self.settings = context.settings.get(\"loki\")\n #\n default_loki_labels = self.settings.get(\"labels\", dict())\n if self.settings.get(\"include_node_name\", True):\n default_loki_labels[\"node\"] = context.node_name\n #\n self.emitter = CarrierLokiLogEmitter(\n loki_push_url=self.settings.get(\"url\"),\n loki_user=self.settings.get(\"user\", None),\n loki_password=self.settings.get(\"password\", None),\n loki_token=self.settings.get(\"token\", None),\n default_labels=default_loki_labels,\n verify=self.settings.get(\"verify\", True),\n # retries=3,\n # retry_delay=0.5,\n # timeout=15,\n )\n\n def handleError(self, record):\n \"\"\" Handle error while logging \"\"\"\n super().handleError(record)\n self.emitter.disconnect()\n\n def emit(self, record):\n try:\n record_ts = int(record.created * 1000000000)\n record_data = self.format(record)\n #\n additional_labels = dict()\n if self.settings.get(\"include_level_name\", True):\n additional_labels[\"level\"] = record.levelname\n if self.settings.get(\"include_logger_name\", True):\n additional_labels[\"logger\"] = record.name\n #\n self.emitter.emit_line(record_ts, record_data, additional_labels)\n except: # pylint: disable=W0702\n # In this case we should NOT use logging to log logging error. Only print()\n print(\"[FATAL] Exception during sending logs\")\n traceback.print_exc()\n\n\nclass CarrierLokiBufferedLogHandler(logging.handlers.BufferingHandler):\n \"\"\" Log handler - buffer and send logs to storage \"\"\"\n\n def __init__(self, context):\n super().__init__(\n context.settings.get(\"loki\").get(\"buffer_capacity\", 100)\n )\n self.settings = context.settings.get(\"loki\")\n #\n default_loki_labels = self.settings.get(\"labels\", dict())\n if self.settings.get(\"include_node_name\", True):\n default_loki_labels[\"node\"] = context.node_name\n #\n self.emitter = CarrierLokiLogEmitter(\n loki_push_url=self.settings.get(\"url\"),\n loki_user=self.settings.get(\"user\", None),\n loki_password=self.settings.get(\"password\", None),\n loki_token=self.settings.get(\"token\", None),\n default_labels=default_loki_labels,\n verify=self.settings.get(\"verify\", True),\n # retries=3,\n # retry_delay=0.5,\n # timeout=15,\n )\n #\n self.last_flush = 0.0\n PeriodicFlush(self, self.settings.get(\"buffer_flush_deadline\", 30)).start()\n\n def handleError(self, record):\n \"\"\" Handle error while logging \"\"\"\n super().handleError(record)\n self.emitter.disconnect()\n\n def shouldFlush(self, record):\n \"\"\" Check if we need to flush messages \"\"\"\n return \\\n (len(self.buffer) >= self.capacity) or \\\n (time.time() - self.last_flush) >= self.settings.get(\"buffer_flush_interval\", 10)\n\n def flush(self):\n self.acquire()\n try:\n log_records = list()\n while self.buffer:\n record = self.buffer.pop(0)\n record_ts = int(record.created * 1000000000)\n record_data = self.format(record)\n # TODO: batches with different stream labels (a.k.a. multiple streams support)\n log_records.append([f\"{record_ts}\", record_data])\n if log_records:\n self.emitter.emit_batch(log_records)\n except: # pylint: disable=W0702\n # In this case we should NOT use logging to log logging error. Only print()\n print(\"[FATAL] Exception during sending logs to manager\")\n traceback.print_exc()\n finally:\n self.release()\n self.last_flush = time.time()\n\n\nclass PeriodicFlush(threading.Thread): # pylint: disable=R0903\n \"\"\" Flush logger time to time \"\"\"\n\n def __init__(self, handler, interval=30):\n super().__init__(daemon=True)\n self.handler = handler\n self.interval = interval\n\n def run(self):\n \"\"\" Run handler thread \"\"\"\n while True:\n time.sleep(self.interval)\n self.handler.flush()\n\n\ndef enable_loki_logging(context):\n \"\"\" Enable logging to Loki \"\"\"\n if \"loki\" not in context.settings:\n return\n #\n if context.settings.get(\"loki\").get(\"buffering\", True):\n LokiLogHandler = CarrierLokiBufferedLogHandler\n else:\n LokiLogHandler = CarrierLokiLogHandler\n #\n handler = LokiLogHandler(context)\n handler.setFormatter(logging.getLogger(\"\").handlers[0].formatter)\n logging.getLogger(\"\").addHandler(handler)\n", "id": "1364278", "language": "Python", "matching_score": 2.1445436477661133, "max_stars_count": 0, "path": "pylon/core/tools/log_loki.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n\n# Copyright 2020 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\n Logging tool\n\"\"\"\n\nimport io\nimport os\nimport logging\nimport inspect\nimport urllib3 # pylint: disable=E0401\nimport requests # pylint: disable=E0401\n\nfrom pylon.core import constants\n\n\ndef init(level=logging.INFO):\n \"\"\" Initialize logging \"\"\"\n logging.basicConfig(\n level=level,\n datefmt=constants.LOG_DATE_FORMAT,\n format=constants.LOG_FORMAT,\n )\n logging.raiseExceptions = False\n # Disable requests/urllib3 logging\n logging.getLogger(\"requests\").setLevel(logging.WARNING)\n logging.getLogger(\"urllib3\").setLevel(logging.WARNING)\n # Disable SSL warnings\n urllib3.disable_warnings()\n requests.packages.urllib3.disable_warnings() # pylint: disable=E1101\n # Disable additional logging\n logging.getLogger(\"pika\").setLevel(logging.WARNING)\n\n\ndef get_logger():\n \"\"\" Get logger for caller context \"\"\"\n return logging.getLogger(\n inspect.currentframe().f_back.f_globals[\"__name__\"]\n )\n\n\ndef get_outer_logger():\n \"\"\" Get logger for callers context (for use in this module) \"\"\"\n return logging.getLogger(\n inspect.currentframe().f_back.f_back.f_globals[\"__name__\"]\n )\n\n\ndef debug(msg, *args, **kwargs):\n \"\"\" Logs a message with level DEBUG \"\"\"\n return get_outer_logger().debug(msg, *args, **kwargs)\n\n\ndef info(msg, *args, **kwargs):\n \"\"\" Logs a message with level INFO \"\"\"\n return get_outer_logger().info(msg, *args, **kwargs)\n\n\ndef warning(msg, *args, **kwargs):\n \"\"\" Logs a message with level WARNING \"\"\"\n return get_outer_logger().warning(msg, *args, **kwargs)\n\n\ndef error(msg, *args, **kwargs):\n \"\"\" Logs a message with level ERROR \"\"\"\n return get_outer_logger().error(msg, *args, **kwargs)\n\n\ndef critical(msg, *args, **kwargs):\n \"\"\" Logs a message with level CRITICAL \"\"\"\n return get_outer_logger().critical(msg, *args, **kwargs)\n\n\ndef log(lvl, msg, *args, **kwargs):\n \"\"\" Logs a message with integer level lvl \"\"\"\n return get_outer_logger().log(lvl, msg, *args, **kwargs)\n\n\ndef exception(msg, *args, **kwargs):\n \"\"\" Logs a message with level ERROR inside exception handler \"\"\"\n return get_outer_logger().exception(msg, *args, **kwargs)\n\n\ndef enable_logging():\n \"\"\" Enable logging using log level supplied from env \"\"\"\n if os.environ.get(\"CORE_DEBUG_LOGGING\", \"\").lower() in [\"true\", \"yes\"]:\n log_level = logging.DEBUG\n else:\n log_level = logging.INFO\n #\n init(log_level)\n\n\nclass DebugLogStream(io.RawIOBase):\n \"\"\" IO stream that writes to log.debug \"\"\"\n\n def read(self, size=-1): # pylint: disable=W0613\n return None\n\n def readall(self):\n return None\n\n def readinto(self, b): # pylint: disable=W0613\n return None\n\n def write(self, b):\n for line in b.decode().splitlines():\n get_outer_logger().debug(line)\n", "id": "6509396", "language": "Python", "matching_score": 1.2143217325210571, "max_stars_count": 0, "path": "pylon/core/tools/log.py" }, { "content": "# Copyright 2018 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom os import environ\n\nCPU_MULTIPLIER = 1000000000\nCONTAINER_CPU_QUOTA = int(float(environ.get('CPU_QUOTA', 1)) * CPU_MULTIPLIER) # nano fraction of single core\nBROWSERTIME_CPU_QUOTA = int(environ.get('BROWSERTIME_CPU_QUOTA', 2)) * CPU_MULTIPLIER # nano fraction of single core\nCONTAINER_MEMORY_QUOTA = environ.get('RAM_QUOTA', '1g')\nBROWSERTIME_MEMORY_QUOTA = environ.get('BROWSERTIME_MEMORY_QUOTA', '4g')\nLOKI_HOST = environ.get('LOKI_HOST', \"\")\nLOKI_PORT = int(environ.get('LOKI_PORT', 3100))\nLOG_LEVEL = environ.get('LOG_LEVEL', 'info')\nREMOVE_CONTAINERS = True if environ.get(\"REMOVE_CONTAINERS\", \"True\") == \"True\" else False\n\nUNZIP_DOCKERFILE = \"\"\"FROM kubeless/unzip:latest\nADD {localfile} /tmp/{docker_path}\nENTRYPOINT [\"unzip\", \"/tmp/{docker_path}\", \"-d\", \"/tmp/unzipped\"]\n\"\"\"\n\nUNZIP_DOCKER_COMPOSE = \"\"\"version: '3'\nservices:\n unzip:\n build: {path}\n volumes:\n - {volume}:/tmp/unzipped\n labels:\n - 'traefik.enable=false'\n container_name: unzip-{task_id}\nvolumes:\n {volume}:\n external: true\n\"\"\"\n\nNAME_CONTAINER_MAPPING = {\n \"Python 3.7\": 'lambda:python3.7',\n \"Python 3.8\": 'lambda:python3.8',\n \"Python 3.6\": 'lambda:python3.6',\n \"Python 2.7\": 'lambda:python2.7',\n \".NET Core 2.0 (C#)\": 'lambda:dotnetcore2.0',\n \".NET Core 2.1 (C#/PowerShell)\": 'lambda:dotnetcore2.1',\n \"Go 1.x\": \"lambda:go1.x\",\n \"Java 8\": \"lambda:java8\",\n \"Java 11\": \"lambda:java11\",\n \"Node.js 6.10\": 'lambda:nodejs6.10',\n \"Node.js 8.10\": 'lambda:nodejs8.10',\n \"Node.js 10.x\": 'lambda:nodejs10.x',\n \"Node.js 12.x\": 'lambda:nodejs12.x',\n \"Ruby 2.5\": 'lambda:ruby2.5'\n}\n\nBROWSERTIME_CONTAINER = 'getcarrier/browsertime:latest'\nSTRIP_HEADERS = [\"content-length\"]\n", "id": "1116534", "language": "Python", "matching_score": 2.0321712493896484, "max_stars_count": 0, "path": "interceptor/constants.py" }, { "content": "import os\nfrom uuid import uuid4\nimport shutil\nfrom json import dumps, loads\nfrom requests import post, get\nfrom datetime import datetime\nfrom time import mktime\nimport re\nimport docker\nfrom traceback import format_exc\nfrom subprocess import Popen, PIPE\nimport logging\nfrom interceptor.constants import NAME_CONTAINER_MAPPING, UNZIP_DOCKER_COMPOSE, UNZIP_DOCKERFILE\n\n\nclass LambdaExecutor:\n\n def __init__(self, task, event, galloper_url, token):\n self.task = task\n self.event = event\n self.galloper_url = galloper_url\n self.token = token\n\n def execute_lambda(self):\n lambda_id = str(uuid4())\n client = docker.from_env()\n container_name = NAME_CONTAINER_MAPPING.get(self.task['runtime'])\n if not container_name:\n return f\"Container {self.task['runtime']} is not found\"\n self.download_artifact(lambda_id)\n self.create_volume(client, lambda_id)\n mount = docker.types.Mount(type=\"volume\", source=lambda_id, target=\"/var/task\")\n env_vars = loads(self.task.get(\"env_vars\", \"{}\"))\n if self.task['task_name'] == \"control_tower\" and \"cc_env_vars\" in self.event[0]:\n env_vars.update(self.event[0][\"cc_env_vars\"])\n response = client.containers.run(f\"getcarrier/{container_name}\",\n command=[f\"{self.task['task_handler']}\", dumps(self.event)],\n mounts=[mount], stderr=True, remove=True,\n environment=env_vars)\n try:\n volume = client.volumes.get(lambda_id)\n volume.remove(force=True)\n except:\n logging.info(\"Failed to remove docker volume\")\n shutil.rmtree(f'/tmp/{lambda_id}', ignore_errors=True)\n try:\n log = response.decode(\"utf-8\", errors='ignore')\n except:\n log = \"\\n\\n{logs are not available}\"\n if container_name == \"lambda:python3.7\":\n results = re.findall(r'({.+?})', log)[-1]\n else:\n # TODO: magic of 2 enters is very flaky, Need to think on how to workaround, probably with specific logging\n results = log.split(\"\\n\\n\")[1]\n\n data = {\"ts\": int(mktime(datetime.utcnow().timetuple())), 'results': results, 'stderr': log}\n\n headers = {\n \"Content-Type\": \"application/json\",\n 'Authorization': f'bearer {self.token}'}\n post(f'{self.galloper_url}/api/v1/task/{self.task[\"task_id\"]}/results', headers=headers, data=dumps(data))\n # if self.task[\"callback\"]:\n # for each in self.event:\n # each['result'] = results\n # endpoint = f\"/api/v1/task/{self.task['project_id']}/{self.task['callback']}?exec=True\"\n # headers = {'Authorization': f'bearer {self.token}', 'content-type': 'application/json'}\n # self.task = get(f\"{self.galloper_url}/{endpoint}\", headers=headers).json()\n # self.execute_lambda()\n\n def download_artifact(self, lambda_id):\n try:\n os.mkdir(f'/tmp/{lambda_id}')\n endpoint = f'/api/v1/artifact/{self.task[\"project_id\"]}/{self.task[\"zippath\"]}'\n headers = {'Authorization': f'bearer {self.token}'}\n r = get(f'{self.galloper_url}/{endpoint}', allow_redirects=True, headers=headers)\n with open(f'/tmp/{lambda_id}/{lambda_id}', 'wb') as file_data:\n file_data.write(r.content)\n except Exception:\n print(format_exc())\n\n def create_volume(self, client, lambda_id):\n client.volumes.create(lambda_id)\n with open(f\"/tmp/{lambda_id}/Dockerfile\", 'w') as f:\n f.write(UNZIP_DOCKERFILE.format(localfile=lambda_id, docker_path=f'{lambda_id}.zip'))\n with open(f\"/tmp/{lambda_id}/docker-compose.yaml\", 'w') as f:\n f.write(UNZIP_DOCKER_COMPOSE.format(path=f\"/tmp/{lambda_id}\",\n volume=lambda_id, task_id=lambda_id))\n cmd = ['docker-compose', 'up']\n popen = Popen(cmd, stdout=PIPE, stderr=PIPE, universal_newlines=True, cwd=f\"/tmp/{lambda_id}\")\n popen.communicate()\n cmd = ['docker-compose', 'down', '--rmi', 'all']\n popen = Popen(cmd, stdout=PIPE, stderr=PIPE, universal_newlines=True, cwd=f\"/tmp/{lambda_id}\")\n return popen.communicate()\n", "id": "10728799", "language": "Python", "matching_score": 2.5425336360931396, "max_stars_count": 0, "path": "interceptor/lambda_executor.py" }, { "content": "from os import environ\nimport requests\nfrom traceback import format_exc\nimport json\n\nPROJECT_ID = environ.get('project_id')\nURL = environ.get('galloper_url')\nADDITIONAL_FILES = environ.get(\"additional_files\")\nTOKEN = environ.get(\"token\")\n\nif not all(a for a in [URL, ADDITIONAL_FILES]):\n exit(0)\n\ntry:\n files = json.loads(ADDITIONAL_FILES)\n if PROJECT_ID:\n endpoint = f'/api/v1/artifact/{PROJECT_ID}'\n else:\n endpoint = '/artifacts'\n headers = {'Authorization': f'bearer {TOKEN}'} if TOKEN else {}\n for file, path in files.items():\n r = requests.get(f'{URL}/{endpoint}/{file}', allow_redirects=True, headers=headers)\n with open(path, 'wb') as file_data:\n file_data.write(r.content)\nexcept Exception:\n print(format_exc())\n\n", "id": "11664259", "language": "Python", "matching_score": 1.408447027206421, "max_stars_count": 7, "path": "pre_processing/minio_additional_files_reader.py" }, { "content": "from os import path, environ\nimport json\nimport requests\nimport logging\nfrom interceptor.lambda_executor import LambdaExecutor\n\n\nclass PostProcessor:\n\n def __init__(self, galloper_url, project_id, galloper_web_hook, bucket, prefix, junit=False, token=None,\n integration=[], email_recipients=None):\n self.galloper_url = galloper_url\n self.project_id = project_id\n self.galloper_web_hook = galloper_web_hook\n self.bucket = bucket\n self.prefix = prefix\n self.config_file = '{}'\n self.junit = junit\n self.token = token\n self.integration = integration\n self.email_recipients = email_recipients\n\n def results_post_processing(self):\n if self.galloper_web_hook:\n if path.exists('/tmp/config.yaml'):\n with open(\"/tmp/config.yaml\", \"r\") as f:\n self.config_file = f.read()\n else:\n self.config_file = environ.get('CONFIG_FILE', '{}')\n\n event = {'galloper_url': self.galloper_url, 'project_id': self.project_id,\n 'config_file': json.dumps(self.config_file),\n 'bucket': self.bucket, 'prefix': self.prefix, 'junit': self.junit, 'token': self.token,\n 'integration': self.integration, \"email_recipients\": self.email_recipients}\n endpoint = f\"api/v1/task/{self.project_id}/\" \\\n f\"{self.galloper_web_hook.replace(self.galloper_url + '/task/', '')}?exec=True\"\n headers = {'Authorization': f'bearer {self.token}', 'content-type': 'application/json'}\n task = requests.get(f\"{self.galloper_url}/{endpoint}\", headers=headers).json()\n\n LambdaExecutor(task, event, self.galloper_url, self.token).execute_lambda()\n\n\n\n", "id": "3083306", "language": "Python", "matching_score": 1.030548334121704, "max_stars_count": 0, "path": "interceptor/post_processor.py" }, { "content": "from lambda_function import lambda_handler\n\n\ndef test_lambda():\n event = {\n \"notification_type\": \"ui\",\n \"test_id\": \"d3caeeb7-32bd-4b4b-824d-702708a7116c\",\n \"report_id\": 176,\n }\n lambda_handler(event, {})\n", "id": "4928881", "language": "Python", "matching_score": 0.7613494992256165, "max_stars_count": 0, "path": "tests/test_lamda.py" }, { "content": "import os\nfrom traceback import format_exc\nfrom json import loads\nfrom control_tower.config_mock import BulkConfig\n\n\ndef parse_args(events):\n args = {\n \"container\": [],\n \"execution_params\": [],\n \"job_type\": [],\n \"job_name\": '',\n \"concurrency\": [],\n \"channel\": [],\n \"artifact\": \"\",\n \"bucket\": \"\",\n \"save_reports\": False,\n \"junit\": False,\n \"quality_gate\": False,\n \"jira\": False,\n \"report_portal\": False,\n \"email\": False,\n \"email_recipients\": \"\",\n \"azure_devops\": False,\n \"deviation\": 0,\n \"max_deviation\": 0,\n \"test_id\": \"\"\n }\n for event in events:\n if \"container\" in event:\n args[\"container\"].append(event[\"container\"])\n if \"execution_params\" in event:\n args[\"execution_params\"].append(loads(event[\"execution_params\"]))\n if \"job_type\" in event:\n args[\"job_type\"].append(event[\"job_type\"])\n if \"concurrency\" in event:\n args[\"concurrency\"].append(event[\"concurrency\"])\n if \"channel\" in event:\n args[\"channel\"].append(event[\"channel\"])\n args[\"job_name\"] = event.get('job_name', 'test')\n args[\"bucket\"] = event.get('bucket', '')\n args[\"artifact\"] = event.get('artifact', '')\n args[\"save_reports\"] = event.get('save_reports', False)\n args[\"junit\"] = event.get('junit', False)\n args[\"quality_gate\"] = event.get('quality_gate', False)\n args[\"jira\"] = event.get('jira', False)\n args[\"report_portal\"] = event.get('report_portal', False)\n args[\"email\"] = event.get('email', False)\n args[\"email_recipients\"] = event.get('email_recipients', \"\")\n args[\"azure_devops\"] = event.get('azure_devops', False)\n args[\"deviation\"] = event.get('deviation', 0)\n args[\"max_deviation\"] = event.get('max_deviation', 0)\n args[\"test_id\"] = event.get('test_id', '')\n env_vars = event.get(\"cc_env_vars\", None)\n if env_vars:\n for key, value in env_vars.items():\n os.environ[key] = value\n\n args = BulkConfig(\n bulk_container=args['container'],\n bulk_params=args[\"execution_params\"],\n job_type=args[\"job_type\"],\n job_name=args[\"job_name\"],\n bulk_concurrency=args[\"concurrency\"],\n channel=args[\"channel\"],\n bucket=args[\"bucket\"],\n artifact=args[\"artifact\"],\n save_reports=args[\"save_reports\"],\n junit=args[\"junit\"],\n quality_gate=args[\"quality_gate\"],\n deviation=args[\"deviation\"],\n max_deviation=args[\"max_deviation\"],\n report_path=\"/tmp/reports\",\n test_id=args[\"test_id\"],\n jira=args[\"jira\"],\n report_portal=args[\"report_portal\"],\n email=args[\"email\"],\n azure_devops=args[\"azure_devops\"],\n email_recipients=args[\"email_recipients\"]\n )\n\n from control_tower.run import str2bool, process_git_repo, split_csv_file\n if \"git\" in events[0]:\n process_git_repo(events[0], args)\n if str2bool(os.environ.get('split_csv', 'False')):\n split_csv_file(args)\n return args\n\n\ndef handler(event=None, context=None):\n try:\n if not os.path.exists('/tmp/reports'):\n os.mkdir('/tmp/reports')\n args = parse_args(event)\n from control_tower.run import _start_and_track\n _start_and_track(args)\n return {\n 'statusCode': 200,\n 'body': \"test is done\"\n }\n except:\n return {\n 'statusCode': 500,\n 'body': format_exc()\n }\n", "id": "11400691", "language": "Python", "matching_score": 1.3793739080429077, "max_stars_count": 0, "path": "package/lambda.py" }, { "content": "import pytest\n\nimport os\nimport shutil\n\nfrom control_tower import git_clone\n\ngit_config_1 = {\n \"repo\": \"https://github.com/carrier-io/demo-jmeter.git\",\n \"repo_user\": \"\",\n \"repo_pass\": \"\",\n \"repo_key\": \"\",\n \"repo_branch\": \"main\"\n}\n\ndef test_clone_http():\n git_clone.clone_repo(git_config_1)\n assert os.path.exists('/tmp/git_dir/BasicEcommerce.jmx')\n shutil.rmtree('/tmp/git_dir')\n", "id": "9219763", "language": "Python", "matching_score": 0.14905613660812378, "max_stars_count": 0, "path": "tests/test_git_clone.py" }, { "content": "import shelve\n\nDB_NAME = '/tmp/observer.db'\n\n\ndef save_to_storage(key, value):\n db = shelve.open(DB_NAME, writeback=True)\n try:\n db[key] = value\n finally:\n db.close()\n\n\ndef get_from_storage(key):\n db = None\n try:\n db = shelve.open(DB_NAME, flag='r')\n existing = db[key]\n except Exception as e:\n return None\n finally:\n if db:\n db.close()\n return existing\n", "id": "2365561", "language": "Python", "matching_score": 0.029193738475441933, "max_stars_count": 0, "path": "observer_hub/db.py" }, { "content": "# class ValidationError(Exception):\n# def __init__(self, data):\n# super().__init__()\n# self.data = data\n#\n#\n# def format_test_parameters(test_parameters: list) -> dict:\n# REQUIRED_PARAMETERS = set(i.lower() for i in ['URL to scan'])\n# result = dict()\n# errors = dict()\n# item_value_key = 'default'\n# for index, i in enumerate(test_parameters):\n# name = i.get('name').lower()\n#\n# for k in set(i.keys()):\n# if k.startswith('_'):\n# del i[k]\n#\n# data_type = i.get('type', '').lower()\n# if data_type == 'list':\n# if not isinstance(i[item_value_key], list):\n# i[item_value_key] = [x.strip() for x in i[item_value_key].split(',')]\n# elif data_type in ('integer', 'number'):\n# try:\n# if isinstance(i[item_value_key], list):\n# i[item_value_key] = float(i[item_value_key][0])\n# else:\n# i[item_value_key] = float(i[item_value_key])\n# except ValueError as e:\n# errors[index] = str(e)\n# elif data_type in ('string', ''):\n# if isinstance(i[item_value_key], list):\n# i[item_value_key] = ','.join(i[item_value_key])\n# i[item_value_key] = i[item_value_key].strip()\n#\n# if name in REQUIRED_PARAMETERS and not i[item_value_key]:\n# errors[index] = f'{name} is required'\n#\n# result[name] = i\n# if errors:\n# raise ValidationError(errors)\n# return result\n", "id": "4591476", "language": "Python", "matching_score": 1.965760350227356, "max_stars_count": 0, "path": "api/utils.py" }, { "content": "from typing import Optional, Dict, Any, List, Union, get_origin\nfrom uuid import uuid4\n\nfrom pydantic import BaseModel, Json, validator, AnyUrl, parse_obj_as, root_validator, ValidationError\n\n\n@root_validator(pre=True, allow_reuse=True)\ndef empty_str_to_none(cls, values):\n removed = []\n # print('\\nROOT', values)\n for k in list(values.keys()):\n if values[k] == '':\n removed.append(k)\n del values[k]\n # values[k] = None\n # print('\\tROOT removed', removed)\n return values\n\n\ndef test_param_model_factory(required_params: Union[list, set, tuple] = None, type_mapping_by_name: dict = None):\n\n class TestParameter(BaseModel):\n class Config:\n anystr_strip_whitespace = True\n anystr_lower = True\n\n _type_mapping = {\n 'url': List[AnyUrl],\n 'urls': List[AnyUrl],\n 'string': str,\n 'number': int,\n 'list': list,\n 'item': str\n }\n\n # _empty_str_to_none = empty_str_to_none\n \n name: str\n type: Optional[str] = 'string'\n description: Optional[str] = ''\n default: Optional[Any] = ''\n\n # @validator('name', allow_reuse=True, always=True)\n # def validate_required(cls, value):\n # assert value, f'{value} is required'\n # return\n\n @validator('default', allow_reuse=True, always=True)\n def validate_required_value(cls, value, values):\n # print('default validator', values)\n name = values.get('name')\n\n type_ = values.get('type', str) \n if cls._type_mapping_by_name.get(name):\n type_ = cls._type_mapping_by_name.get(name)\n elif cls._type_mapping.get(type_):\n type_ = cls._type_mapping.get(type_)\n\n # print('default validator types', type_)\n if name in cls._required_params:\n # print('required!!', value, values.get('default'))\n assert value, f'{name} is required'\n value = cls.convert_types(value, type_)\n # print('\\tvalue final', parse_obj_as(Optional[type_], value))\n return parse_obj_as(Optional[type_], value)\n\n @staticmethod\n def convert_types(value, _type, list_delimiter=','):\n _checked_type = get_origin(_type) or _type\n # print('\\tvalue', value, type(value))\n # print('\\ttype', _type, _checked_type)\n if value is not None:\n if isinstance(value, str):\n value = value.strip()\n if not isinstance(value, list) and _checked_type is list:\n value = [i.strip() for i in str(value).split(list_delimiter)]\n elif isinstance(value, list) and _checked_type is not list:\n value = list_delimiter.join(value)\n # print('\\tvalue AFTER', value)\n return value\n\n TestParameter._required_params = set() if not required_params else set((i.lower() for i in required_params))\n TestParameter._type_mapping_by_name = type_mapping_by_name or dict()\n return TestParameter\n\n\nclass SecurityTestParams(BaseModel):\n _test_params_mapping = {\n 'url to scan': 'urls_to_scan',\n 'exclusions': 'urls_exclusions',\n 'scan location': 'scan_location',\n }\n\n # the following fields are optional as they are set in test_parameters validator using _test_params_mapping\n urls_to_scan: Optional[List[AnyUrl]] = []\n urls_exclusions: Optional[List[AnyUrl]] = []\n scan_location: Optional[str] = ''\n\n test_parameters: List[test_param_model_factory(['url to scan'], type_mapping_by_name={'url to scan': List[AnyUrl]})]\n\n @validator('test_parameters')\n def set_values_from_test_params(cls, value, values):\n for i in value:\n # print('i', i)\n # print('i in', i.name in cls._test_params_mapping.keys())\n if i.name in cls._test_params_mapping.keys():\n values[cls._test_params_mapping[i.name]] = i.default\n return value\n\n\nclass SecurityTestCommon(BaseModel):\n _empty_str_to_none = empty_str_to_none\n\n project_id: int\n project_name: str\n test_uid: Optional[str]\n name: str\n description: Optional[str] = ''\n\n @root_validator\n def set_uuid(cls, values):\n # print('RV', values)\n if not values.get('test_uid'):\n values['test_uid'] = str(uuid4())\n return values\n\n\nif __name__ == '__main__':\n import json\n source = json.loads('''\n {\"name\":\"df\",\"description\":\"\",\"test_parameters\":[{\"_data\":{},\"name\":\"URL to scan\",\"_name_class\":\"disabled\",\"_name_data\":{},\"default\":\"\",\"_default_data\":{},\"type\":\"URLs\",\"_type_class\":\"disabled\",\"_type_data\":{},\"description\":\"Data\",\"_description_class\":\"disabled\",\"_description_data\":{}},{\"_data\":{},\"name\":\"Exclusions\",\"_name_class\":\"disabled\",\"_name_data\":{},\"default\":\"\",\"_default_data\":{},\"type\":\"List\",\"_type_class\":\"disabled\",\"_type_data\":{},\"description\":\"Data\",\"_description_class\":\"disabled\",\"_description_data\":{}},{\"_data\":{},\"name\":\"Scan location\",\"_name_class\":\"disabled\",\"_name_data\":{},\"default\":\"Carrier default config\",\"_default_data\":{},\"type\":\"Item\",\"_type_class\":\"disabled\",\"_type_data\":{},\"description\":\"Data\",\"_description_class\":\"disabled\",\"_description_data\":{}}],\"integrations\":{},\"security_scheduling\":[]}\n ''')\n tp = json.loads('''\n {\"default\":\"\",\"description\":\"Data\",\"name\":\"Exclusions\",\"type\":\"List\",\"_description_class\":\"disabled\",\"_name_class\":\"disabled\",\"_type_class\":\"disabled\"}\n ''')\n # x = SecurityTestParams(test_parameters=source['test_parameters'])\n # print(x.test_parameters)\n # print(SecurityTestParams.__dict__)\n print(SecurityTestCommon(**source, project_id=1, project_name='qqq').dict())\n\n # pd_tp = test_param_model_factory(['exclusions1'])\n # pd_obj = pd_tp(**tp)\n # print(pd_obj.dict())\n", "id": "6644005", "language": "Python", "matching_score": 2.15712308883667, "max_stars_count": 0, "path": "models/pd/security_test.py" }, { "content": "import json\nfrom typing import Union\n\nfrom flask import request, make_response\nfrom flask_restful import abort, Resource\nfrom sqlalchemy import and_\n\nfrom ..utils import run_test, parse_test_data\nfrom ..models.api_tests import SecurityTestsDAST\nfrom ..models.security_results import SecurityResultsDAST\nfrom ..models.security_reports import SecurityReport\n\nfrom ...shared.utils.rpc import RpcMixin\n\n\nclass SecurityTestApi(Resource, RpcMixin):\n\n @staticmethod\n def get_filter(project_id: int, test_id: Union[int, str]):\n if isinstance(test_id, int):\n return and_(\n SecurityTestsDAST.project_id == project_id,\n SecurityTestsDAST.id == test_id\n )\n return and_(\n SecurityTestsDAST.project_id == project_id,\n SecurityTestsDAST.test_uid == test_id\n )\n\n def get(self, project_id: int, test_id: Union[int, str]):\n test = SecurityResultsDAST.query.filter(self.get_filter(project_id, test_id)).first()\n test = test.to_json()\n scanners = SecurityReport.query.with_entities(SecurityReport.tool_name).filter(\n self.get_filter(project_id, test_id)\n ).distinct().all()\n\n if scanners:\n test[\"scanners\"] = \", \".join([scan[0] for scan in scanners])\n return test\n\n def put(self, project_id: int, test_id: Union[int, str]):\n \"\"\" Update test data \"\"\"\n run_test_ = request.json.pop('run_test', False)\n test_data, errors = parse_test_data(\n project_id=project_id,\n request_data=request.json,\n rpc=self.rpc,\n common_kwargs={'exclude': {'test_uid', }}\n )\n\n if errors:\n return make_response(json.dumps(errors, default=lambda o: o.dict()), 400)\n\n test = SecurityTestsDAST.query.filter(self.get_filter(project_id, test_id))\n test.update(test_data)\n SecurityTestsDAST.commit()\n\n test = test.first()\n if run_test_:\n return run_test(test)\n\n return make_response(test.to_json(), 200)\n\n def post(self, project_id: int, test_id: Union[int, str]):\n \"\"\" Run test \"\"\"\n test = SecurityTestsDAST.query.filter(\n self.get_filter(project_id, test_id)\n ).first()\n return run_test(test, config_only=request.json.get('type', False))\n", "id": "4685481", "language": "Python", "matching_score": 3.9269561767578125, "max_stars_count": 0, "path": "api/test.py" }, { "content": "from sqlalchemy.sql import func, LABEL_STYLE_TABLENAME_PLUS_COL\n\nfrom .models.pd.security_test import SecurityTestParams, SecurityTestCommon\nfrom .models.security_reports import SecurityReport\nfrom .models.security_results import SecurityResultsDAST\n\nfrom ..shared.utils.rpc import RpcMixin\n\n\ndef security_results_or_404(run_id: int) -> SecurityResultsDAST:\n return SecurityResultsDAST.query.get_or_404(run_id)\n\n\ndef overview_data(project_id: int) -> dict:\n queries = [\n func.sum(getattr(SecurityResultsDAST, i)).label(f'sum_{i}')\n for i in SecurityReport.SEVERITY_CHOICES.keys()\n ]\n q = SecurityResultsDAST.query.with_entities(\n *queries\n ).filter(\n SecurityResultsDAST.project_id == project_id,\n )\n return dict(zip([i['name'] for i in q.column_descriptions], q.first()))\n\n\ndef parse_test_parameters(data: list, **kwargs) -> dict:\n pd_object = SecurityTestParams(test_parameters=data)\n return pd_object.dict(**kwargs)\n\n\ndef parse_common_test_parameters(project_id: int, name: str, description: str, **kwargs) -> dict:\n rpc = RpcMixin().rpc\n project = rpc.call.project_get_or_404(project_id=project_id)\n pd_object = SecurityTestCommon(\n project_id=project.id,\n project_name=project.name,\n name=name,\n description=description\n )\n return pd_object.dict(**kwargs)\n", "id": "5645325", "language": "Python", "matching_score": 2.4896936416625977, "max_stars_count": 0, "path": "rpc.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" Module \"\"\"\n\nfrom pylon.core.tools import log # pylint: disable=E0611,E0401\nfrom pylon.core.tools import module # pylint: disable=E0611,E0401\n\nfrom .api.tests import SecurityTestsRerun\nfrom .init_db import init_db\nfrom .rpc import security_results_or_404, overview_data, parse_test_parameters, parse_common_test_parameters\n\nfrom ..shared.utils.api_utils import add_resource_to_api\n\n\nclass Module(module.ModuleModel):\n \"\"\" Task module \"\"\"\n\n def __init__(self, context, descriptor):\n self.context = context\n self.descriptor = descriptor\n #\n self.settings = self.descriptor.config\n\n def init(self):\n \"\"\" Init module \"\"\"\n log.info(f'Initializing module {self.descriptor.name}')\n init_db()\n from .api.tests import SecurityTestsApi\n from .api.test import SecurityTestApi\n from .api.security_results_api import SecurityResultsApi\n from .api.security_dispatcher import SecuritySeedDispatcher\n from .api.security_findings_api import FindingsAPI\n from .api.update_test_status import TestStatusUpdater\n from .api.get_loki_url import GetLokiUrl\n from .api.security_report_api import SecurityReportAPI\n add_resource_to_api(\n self.context.api, GetLokiUrl,\n \"/security/<int:project_id>/get_url\",\n resource_class_kwargs={\"settings\": self.settings}\n )\n add_resource_to_api(\n self.context.api,\n SecurityTestsApi,\n \"/security/<int:project_id>/dast\"\n )\n add_resource_to_api(\n self.context.api, SecurityTestApi,\n \"/security/<int:project_id>/dast/<int:test_id>\",\n \"/security/<int:project_id>/dast/<string:test_id>\"\n )\n add_resource_to_api(\n self.context.api, SecurityResultsApi,\n \"/security/<int:project_id>/dast/results\"\n )\n add_resource_to_api(\n self.context.api, SecuritySeedDispatcher,\n \"/tests/<int:project_id>/security/<string:seed>\"\n )\n add_resource_to_api(\n self.context.api, FindingsAPI,\n \"/security/<int:project_id>/findings/<int:test_id>\",\n \"/security/<int:project_id>/finding\"\n )\n add_resource_to_api(\n self.context.api, TestStatusUpdater,\n \"/security/<int:project_id>/update_status/<int:test_id>\",\n \"/security/<int:project_id>/update_status/<string:test_id>\"\n )\n add_resource_to_api(\n self.context.api, SecurityReportAPI,\n \"/security/<int:project_id>\"\n )\n add_resource_to_api(\n self.context.api, SecurityTestsRerun,\n \"/security/rerun/<int:security_results_dast_id>\"\n )\n\n self.context.rpc_manager.register_function(\n security_results_or_404, name='security_results_or_404')\n self.context.rpc_manager.register_function(\n overview_data, name='security_overview_data')\n self.context.rpc_manager.register_function(\n parse_test_parameters, name='security_test_create_test_parameters')\n self.context.rpc_manager.register_function(\n parse_common_test_parameters, name='security_test_create_common_parameters')\n\n def deinit(self): # pylint: disable=R0201\n \"\"\" De-init module \"\"\"\n log.info(f'De-initializing module {self.descriptor.name}')\n", "id": "8354216", "language": "Python", "matching_score": 3.93570613861084, "max_stars_count": 0, "path": "module.py" }, { "content": "# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" Module \"\"\"\n\nimport flask # pylint: disable=E0401\nimport jinja2 # pylint: disable=E0401\n\nfrom flask import request, render_template\n\nfrom pylon.core.tools import log # pylint: disable=E0611,E0401\nfrom pylon.core.tools import module # pylint: disable=E0611,E0401\n\nfrom ..shared.utils.api_utils import add_resource_to_api\nfrom .rpc_worker import backend_results_or_404\n\nfrom .init_db import init_db\n\n\nclass Module(module.ModuleModel):\n \"\"\" Task module \"\"\"\n\n def __init__(self, context, descriptor):\n self.context = context\n self.descriptor = descriptor\n\n def init(self):\n \"\"\" Init module \"\"\"\n log.info(\"Initializing module Backend_performance\")\n init_db()\n from .api.tests import TestsApi\n add_resource_to_api(self.context.api, TestsApi, \"/backend/<int:project_id>\")\n from .api.test import TestApiBackend\n add_resource_to_api(self.context.api, TestApiBackend, \"/tests/<int:project_id>/backend/<string:test_id>\")\n from .api.thresholds import BackendThresholdsAPI\n add_resource_to_api(self.context.api, BackendThresholdsAPI, \"/thresholds/<int:project_id>/backend\")\n from .api.baseline import BaselineAPI\n add_resource_to_api(self.context.api, BaselineAPI, \"/baseline/<int:project_id>\")\n from .api.reports import ReportAPI\n add_resource_to_api(self.context.api, ReportAPI, \"/reports/<int:project_id>\")\n from .api.charts import ReportChartsAPI\n add_resource_to_api(self.context.api, ReportChartsAPI, \"/chart/<string:source>/<string:target>\")\n from .api.report_status import ReportStatusAPI\n add_resource_to_api(self.context.api, ReportStatusAPI, \"/reports/<int:project_id>/<int:report_id>/status\")\n from .api.environments import EnvironmentsAPI\n add_resource_to_api(self.context.api, EnvironmentsAPI, \"/environment/<int:project_id>\")\n from .api.requests import RequestsAPI\n add_resource_to_api(self.context.api, RequestsAPI, \"/requests/<int:project_id>\")\n\n self.context.rpc_manager.register_function(backend_results_or_404, name='backend_results_or_404')\n\n def deinit(self): # pylint: disable=R0201\n \"\"\" De-init module \"\"\"\n log.info(\"De-initializing Backend_performance\")\n", "id": "2389958", "language": "Python", "matching_score": 3.149765968322754, "max_stars_count": 0, "path": "module.py" }, { "content": "# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" Module \"\"\"\n\nfrom pylon.core.tools import log # pylint: disable=E0611,E0401\nfrom pylon.core.tools import module # pylint: disable=E0611,E0401\n\nfrom .api.artifacts_security_results_page import ArtifactsForSecurityResults, ArtifactDownload\nfrom ..shared.utils.api_utils import add_resource_to_api\n\n\nclass Module(module.ModuleModel):\n \"\"\" Task module \"\"\"\n\n def __init__(self, context, descriptor):\n self.context = context\n self.descriptor = descriptor\n\n def init(self):\n \"\"\" Init module \"\"\"\n log.info(\"Initializing module Artifacts\")\n from .api.buckets import Buckets\n from .api.artifacts import Artifacts\n from .api.artifact import Artifact\n\n add_resource_to_api(self.context.api, Buckets, \"/artifact/<int:project_id>\")\n add_resource_to_api(self.context.api, Artifacts, \"/artifact/<int:project_id>/<string:bucket>\")\n add_resource_to_api(self.context.api, Artifact, \"/artifact/<int:project_id>/<string:bucket>/<string:filename>\")\n #TODO: rename in interceptor\n add_resource_to_api(self.context.api, Artifact, \"/artifacts/<int:project_id>/<string:bucket>/<string:filename>\",\n endpoint=\"artifact_old\")\n\n add_resource_to_api(self.context.api, ArtifactsForSecurityResults, \"/artifact/security/<int:run_id>\")\n add_resource_to_api(self.context.api, ArtifactDownload, \"/artifact/security/<int:run_id>/<string:filename>\")\n\n def deinit(self): # pylint: disable=R0201\n \"\"\" De-init module \"\"\"\n log.info(\"De-initializing module Artifacts\")\n", "id": "1100666", "language": "Python", "matching_score": 0.9933077096939087, "max_stars_count": 0, "path": "module.py" }, { "content": "from ..shared.db_manager import Base, engine\n\n\ndef init_db():\n from .models.api_reports import APIReport\n # from .models.api_tag import APITag\n from .models.api_baseline import APIBaseline\n from .models.api_tests import ApiTests\n from .models.api_thresholds import APIThresholds\n Base.metadata.create_all(bind=engine)\n\n", "id": "10584339", "language": "Python", "matching_score": 1.7189593315124512, "max_stars_count": 0, "path": "init_db.py" }, { "content": "from .models.api_reports import APIReport\n\n\ndef backend_results_or_404(run_id):\n return APIReport.query.get_or_404(run_id)", "id": "2724091", "language": "Python", "matching_score": 0.03794823959469795, "max_stars_count": 0, "path": "rpc_worker.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n# pylint: disable=I0011,W1401,E0401,R0914,R0915,R0912\n\n# Copyright 2019 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\n Trivy JSON parser\n\"\"\"\n\nimport json\nfrom collections import namedtuple\n\nfrom dusty.tools import log, markdown\nfrom dusty.models.finding import SastFinding\n\n\ndef parse_findings(filename, scanner):\n \"\"\" Parse findings \"\"\"\n # Load JSON\n try:\n with open(filename, \"r\") as file:\n data = json.load(file)\n except: # pylint: disable=W0702\n log.exception(\"Failed to load report JSON\")\n return\n # Severity mapping\n severity_mapping = {\n \"UNKNOWN\": \"Info\",\n \"LOW\": \"Low\",\n \"MEDIUM\": \"Medium\",\n \"HIGH\": \"High\",\n \"CRITICAL\": \"Critical\",\n }\n # Parse JSON\n if not isinstance(data, list) or not data:\n log.info(\"No data in report\")\n return\n # Make finding instances\n for data_block in data:\n if not data_block.get(\"Vulnerabilities\", list()):\n log.info(\"Skipping empty data block: %s\", data_block.get(\"Target\", data_block))\n continue\n for item in data_block.get(\"Vulnerabilities\", list()):\n #\n vuln_id = item.get(\"VulnerabilityID\", \"\")\n vuln_pkgname = item.get(\"PkgName\", \"\")\n vuln_installed_version = item.get(\"InstalledVersion\", \"\")\n vuln_fixed_version = item.get(\"FixedVersion\", \"\")\n vuln_layer = item.get(\"Layer\", dict()).get(\"DiffID\", \"\")\n #\n vuln_title = item.get(\"Title\", \"-\")\n if vuln_id:\n vuln_title = f\"{vuln_id}: {vuln_title}\"\n if vuln_pkgname:\n vuln_title = f\"{vuln_pkgname}: {vuln_title}\"\n #\n if not scanner.config.get(\"show_with_temp_id\", False) and \\\n vuln_id.startswith(\"TEMP-\"):\n log.info(\"Skipping finding with TEMP ID: %s\", vuln_title)\n continue\n if not scanner.config.get(\"show_without_description\", True) and \\\n \"Description\" not in item:\n log.info(\"Skipping finding without description: %s\", vuln_title)\n continue\n #\n vuln_severity = severity_mapping[item.get(\"Severity\", \"UNKNOWN\")]\n vuln_file = vuln_layer\n #\n vuln_info_chunks = list()\n #\n vuln_info_chunks.append(markdown.markdown_escape(item.get(\"Description\", \"-\")))\n #\n if vuln_id:\n vuln_info_chunks.append(f\"**VulnerabilityID:** {markdown.markdown_escape(vuln_id)}\")\n if vuln_pkgname:\n vuln_info_chunks.append(f\"**PkgName:** {markdown.markdown_escape(vuln_pkgname)}\")\n if vuln_installed_version:\n vuln_info_chunks.append(\n f\"**InstalledVersion:** {markdown.markdown_escape(vuln_installed_version)}\"\n )\n if vuln_fixed_version:\n vuln_info_chunks.append(\n f\"**FixedVersion:** {markdown.markdown_escape(vuln_fixed_version)}\"\n )\n if vuln_layer:\n vuln_info_chunks.append(f\"**Layer DiffID:** {markdown.markdown_escape(vuln_layer)}\")\n #\n vuln_refs = item.get(\"References\", list())\n if vuln_refs:\n vuln_info_chunks.append(\"**References:**\")\n for vuln_ref in vuln_refs:\n vuln_info_chunks.append(markdown.markdown_escape(vuln_ref))\n #\n finding = SastFinding(\n title=vuln_title,\n description=[\n \"\\n\\n\".join(vuln_info_chunks)\n ]\n )\n finding.set_meta(\"tool\", scanner.get_name())\n finding.set_meta(\"severity\", vuln_severity)\n finding.set_meta(\"legacy.file\", vuln_file)\n endpoints = list()\n if vuln_file:\n endpoints.append(namedtuple(\"Endpoint\", [\"raw\"])(raw=vuln_file))\n finding.set_meta(\"endpoints\", endpoints)\n log.debug(f\"Endpoints: {finding.get_meta('endpoints')}\")\n scanner.findings.append(finding)\n", "id": "11936967", "language": "Python", "matching_score": 2.080267906188965, "max_stars_count": 6, "path": "dusty/scanners/sast/trivy/parser.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n# pylint: disable=I0011,E0401\n\n# Copyright 2019 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\n Reporter: reportportal\n\"\"\"\n\nfrom dusty.tools import log, markdown\nfrom dusty.models.module import DependentModuleModel\nfrom dusty.models.reporter import ReporterModel\nfrom dusty.models.finding import DastFinding, SastFinding\nfrom dusty.constants import SEVERITIES\n\nfrom .legacy import launch_reportportal_service\n\n\nclass Reporter(DependentModuleModel, ReporterModel):\n \"\"\" Report findings from scanners \"\"\"\n\n def __init__(self, context):\n \"\"\" Initialize reporter instance \"\"\"\n super().__init__()\n self.context = context\n self.config = \\\n self.context.config[\"reporters\"][__name__.split(\".\")[-2]]\n # Prepare config object (code from legacy 'parse_rp_config')\n self._rp_config = {\n \"rp_project\": self.config.get(\"rp_project_name\", \"Dusty\"),\n \"rp_launch_name\": self.config.get(\"rp_launch_name\", self.context.suite),\n \"rp_url\": self.config.get(\"rp_host\"),\n \"rp_token\": self.config.get(\"rp_token\"),\n }\n self._rp_config[\"rp_launch_tags\"] = self.config.get(\"rp_launch_tags\", None)\n self._rp_client = None\n\n def on_start(self):\n \"\"\" Called when testing starts \"\"\"\n log.info(\"Starting ReportPortal launch\")\n self._rp_client = launch_reportportal_service(self._rp_config)\n\n def report(self):\n \"\"\" Report \"\"\"\n if not self._rp_client:\n log.warning(\"ReportPortal configuration/connection is invalid. Skipping RP reporting\")\n return\n log.info(\"Reporting to ReportPortal\")\n for item in self.context.findings:\n if item.get_meta(\"information_finding\", False) or \\\n item.get_meta(\"false_positive_finding\", False) or \\\n item.get_meta(\"excluded_finding\", False):\n continue\n if isinstance(item, DastFinding):\n item_details = markdown.markdown_unescape(item.description)\n item_description = item_details\n tags = [\n f'Tool: {item.get_meta(\"tool\", \"\")}',\n f'TestType: {self.context.get_meta(\"testing_type\", \"DAST\")}',\n f'Severity: {item.get_meta(\"severity\", SEVERITIES[-1])}'\n ]\n if item.get_meta(\"confidence\", None):\n tags.append(f'Confidence: {item.get_meta(\"confidence\")}')\n item_id = self._rp_client.start_test_item(item.title, description=item_description)\n if item.get_meta(\"legacy.images\", None):\n for attachment in item.get_meta(\"legacy.images\"):\n self._rp_client.test_item_message(attachment[\"name\"], \"INFO\", attachment)\n self._rp_client.test_item_message(\"!!!MARKDOWN_MODE!!! %s \" % item_details, \"INFO\")\n self._rp_client.test_item_message(item.get_meta(\"issue_hash\", \"<no_hash>\"), \"ERROR\")\n self._rp_client.finish_test_item(item_id)\n elif isinstance(item, SastFinding):\n item_details = markdown.markdown_unescape(\"\\n\\n\".join(item.description))\n item_description = item_details\n tags = [\n f'Tool: {item.get_meta(\"tool\", \"\")}',\n f'TestType: {self.context.get_meta(\"testing_type\", \"SAST\")}',\n f'Severity: {item.get_meta(\"severity\", SEVERITIES[-1])}'\n ]\n if item.get_meta(\"confidence\", None):\n tags.append(f'Confidence: {item.get_meta(\"confidence\")}')\n item_id = self._rp_client.start_test_item(item.title, description=item_description)\n self._rp_client.test_item_message(\"!!!MARKDOWN_MODE!!! %s \" % item_details, \"INFO\")\n self._rp_client.test_item_message(item.get_meta(\"issue_hash\", \"<no_hash>\"), \"ERROR\")\n self._rp_client.finish_test_item(item_id)\n else:\n log.warning(\"Unsupported finding type\")\n continue # raise ValueError(\"Unsupported item type\")\n self._rp_client.finish_test()\n\n @staticmethod\n def fill_config(data_obj):\n \"\"\" Make sample config \"\"\"\n data_obj.insert(\n len(data_obj),\n \"rp_host\", \"https://rp.com\",\n comment=\"url to ReportPortal.io deployment\"\n )\n data_obj.insert(\n len(data_obj),\n \"rp_token\", \"<PASSWORD>\",\n comment=\"ReportPortal authentication token\"\n )\n data_obj.insert(\n len(data_obj),\n \"rp_project_name\", \"XXXXXX\",\n comment=\"Name of a Project in ReportPortal to send results to\"\n )\n data_obj.insert(\n len(data_obj),\n \"rp_launch_name\", \"XXXXXXX\",\n comment=\"Name of a Launch in ReportPortal to send results to\"\n )\n\n @staticmethod\n def validate_config(config):\n \"\"\" Validate config \"\"\"\n required = [\"rp_project_name\", \"rp_launch_name\", \"rp_host\", \"rp_token\"]\n not_set = [item for item in required if item not in config]\n if not_set:\n error = f\"Required configuration options not set: {', '.join(not_set)}\"\n log.error(error)\n raise ValueError(error)\n\n @staticmethod\n def get_name():\n \"\"\" Reporter name \"\"\"\n return \"ReportPortal\"\n\n @staticmethod\n def get_description():\n \"\"\" Reporter description \"\"\"\n return \"ReportPortal reporter\"\n", "id": "3539699", "language": "Python", "matching_score": 4.635707855224609, "max_stars_count": 6, "path": "dusty/reporters/reportportal/reporter.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n# pylint: disable=I0011,R0902,E0401\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\n Reporter: Carrier 3.0 status reporting\n\"\"\"\n\nimport requests\n\nfrom dusty.tools import log\nfrom dusty.models.module import DependentModuleModel\nfrom dusty.models.reporter import ReporterModel\n\n\nclass Reporter(DependentModuleModel, ReporterModel):\n \"\"\" Listen to status events and report to Carrier platform \"\"\"\n\n def __init__(self, context):\n \"\"\" Initialize reporter instance \"\"\"\n super().__init__()\n self.context = context\n self.config = \\\n self.context.config[\"reporters\"][__name__.split(\".\")[-2]]\n self._enable_status_reporting()\n\n def _enable_status_reporting(self):\n self.context.event.subscribe(\"status\", self._status_listener)\n\n def _status_listener(self, event, data):\n log.debug(\"Got event: event=%s, data=%s\", event, data)\n requests.put(\n f'{self.config[\"url\"]}/api/v1/security/test_status/{self.config[\"project_id\"]}/{self.config[\"test_id\"]}',\n json={\"test_status\": data},\n headers={\"Authorization\": f'Bearer {self.config[\"token\"]}'}\n )\n\n @staticmethod\n def fill_config(data_obj):\n \"\"\" Make sample config \"\"\"\n\n @staticmethod\n def validate_config(config):\n \"\"\" Validate config \"\"\"\n required = [\"url\", \"project_id\", \"test_id\", \"token\"]\n not_set = [item for item in required if item not in config]\n if not_set:\n error = f\"Required configuration options not set: {', '.join(not_set)}\"\n log.error(error)\n raise ValueError(error)\n\n @staticmethod\n def get_name():\n \"\"\" Reporter name \"\"\"\n return \"Status\"\n\n @staticmethod\n def get_description():\n \"\"\" Reporter description \"\"\"\n return \"Scan status reporter\"\n", "id": "10117194", "language": "Python", "matching_score": 2.0162973403930664, "max_stars_count": 0, "path": "dusty/reporters/centry_status/reporter.py" }, { "content": "from requests import post\nfrom json import dumps\nfrom . import constants as c\n\n\nclass CentryConnector(object):\n def __init__(self, url, token, project_id, test_id):\n self.url = url\n self.project_id = project_id\n self.test_id = test_id\n #\n self.report_url = c.REPORT_API.format(project_id=self.project_id, test_id=self.test_id)\n self.finding_api = c.FINDING_API.format(project_id=self.project_id, test_id=self.test_id)\n #\n self.headers = {\n \"Content-type\": \"application/json\",\n \"Authorization\": f\"Bearer {token}\",\n }\n\n def create_test_results(self, test_results):\n report_id = post(f'{self.url}{self.report_url}',\n data=dumps(test_results),\n headers=self.headers).json()\n return report_id['id']\n\n def create_findings(self, findings):\n result = post(f'{self.url}{self.finding_api}',\n data=dumps(findings),\n headers=self.headers)\n return result.content\n", "id": "1499575", "language": "Python", "matching_score": 0.7439406514167786, "max_stars_count": 0, "path": "dusty/reporters/centry/connector.py" }, { "content": "import base64\nimport os\nimport re\nfrom multiprocessing import Pool\nfrom subprocess import PIPE, Popen\nfrom uuid import uuid4\nfrom jinja2 import Environment, PackageLoader, select_autoescape\nfrom observer_hub.audits import accessibility_audit, bestpractice_audit, performance_audit, privacy_audit\nfrom observer_hub.constants import REPORT_PATH, FFMPEG_PATH\nfrom observer_hub.util import logger\nfrom observer_hub.video import get_video_length\n\n\nclass HtmlReporter(object):\n def __init__(self, test_result, video_path, request_params, processing_path, screenshot_path):\n self.processing_path = processing_path\n self.title = request_params['info']['title']\n self.performance_timing = request_params['performancetiming']\n self.timing = request_params['timing']\n self.report_specific(test_result, video_path, request_params, screenshot_path)\n\n def report_specific(self, test_result, video_path, request_params, screenshot_path):\n self.acc_score, self.acc_data = accessibility_audit(request_params['accessibility'])\n self.bp_score, self.bp_data = bestpractice_audit(request_params['bestPractices'])\n self.perf_score, self.perf_data = performance_audit(request_params['performance'])\n self.priv_score, self.priv_data = privacy_audit(request_params['privacy'])\n self.test_result = test_result\n\n base64_encoded_string = \"\"\n if screenshot_path:\n with open(screenshot_path, 'rb') as image_file:\n base64_encoded_string = base64.b64encode(image_file.read()).decode(\"utf-8\")\n\n self.html = self.generate_html(page_name=request_params['info']['title'],\n video_path=video_path,\n test_status=self.test_result,\n start_time=request_params['info'].get('testStart', 0),\n perf_score=self.perf_score,\n priv_score=self.priv_score,\n acc_score=self.acc_score,\n bp_score=self.bp_score,\n acc_findings=self.acc_data,\n perf_findings=self.perf_data,\n bp_findings=self.bp_data,\n priv_findings=self.priv_data,\n resource_timing=request_params['performanceResources'],\n marks=self.fix_details(request_params['marks']),\n measures=self.fix_details(request_params['measures']),\n navigation_timing=request_params['performancetiming'],\n info=request_params['info'],\n timing=request_params['timing'],\n base64_full_page_screen=base64_encoded_string)\n\n def fix_details(self, values):\n for value in values:\n if \"detail\" in value.keys() and value[\"detail\"] is None:\n value['detail'] = ''\n return values\n\n def concut_video(self, start, end, page_name, video_path, encode=True):\n logger.info(f\"Concut video {video_path}\")\n p = Pool(7)\n res = []\n try:\n page_name = page_name.replace(\" \", \"_\")\n process_params = [{\n \"video_path\": video_path,\n \"ms\": part,\n \"test_name\": page_name,\n \"processing_path\": self.processing_path,\n \"encode\": encode\n } for part in range(start, end, (end - start) // 8)][1:]\n\n os.makedirs(os.path.join(self.processing_path, sanitize(page_name)), exist_ok=True)\n res = p.map(trim_screenshot, process_params)\n except:\n from traceback import format_exc\n logger.warn(format_exc())\n finally:\n p.terminate()\n return res\n\n def generate_html(self, page_name, video_path, test_status, start_time, perf_score,\n priv_score, acc_score, bp_score, acc_findings, perf_findings, bp_findings,\n priv_findings, resource_timing, marks, measures, navigation_timing, info, timing,\n base64_full_page_screen):\n\n env = Environment(\n loader=PackageLoader('observer_hub', 'templates'),\n autoescape=select_autoescape(['html', 'xml'])\n )\n\n end = get_video_length(video_path)\n screenshots = self.cut_video_to_screenshots(start_time, end, page_name, video_path)\n template = env.get_template('perfreport.html')\n res = template.render(page_name=page_name, test_status=test_status,\n perf_score=perf_score, priv_score=priv_score, acc_score=acc_score, bp_score=bp_score,\n screenshots=screenshots, full_page_screen=base64_full_page_screen,\n acc_findings=acc_findings,\n perf_findings=perf_findings,\n bp_findings=bp_findings, priv_findings=priv_findings, resource_timing=resource_timing,\n marks=marks, measures=measures, navigation_timing=navigation_timing,\n info=info, timing=timing)\n\n return re.sub(r'[^\\x00-\\x7f]', r'', res)\n\n def cut_video_to_screenshots(self, start_time, end, page_name, video_path, encode=True):\n if video_path is None:\n return []\n\n screenshots_dict = []\n for each in self.concut_video(start_time, end, page_name, video_path, encode):\n if each:\n screenshots_dict.append(each)\n if encode:\n return [list(e.values())[0] for e in sorted(screenshots_dict, key=lambda d: list(d.keys()))]\n else:\n return screenshots_dict\n\n def save_report(self):\n report_uuid = uuid4()\n os.makedirs(REPORT_PATH, exist_ok=True)\n report_file_name = f'{REPORT_PATH}/{self.title}_{report_uuid}.html'\n logger.info(f\"Generate html report {report_file_name}\")\n with open(report_file_name, 'w') as f:\n f.write(self.html)\n return HtmlReport(self.title, report_uuid)\n\n\nclass HtmlReport(object):\n def __init__(self, title, report_uuid, extension=\"html\"):\n self.report_uuid = report_uuid\n self.file_name = f\"{title}_{report_uuid}.{extension}\"\n self.path = f\"{REPORT_PATH}/{self.file_name}\"\n\n\ndef sanitize(filename):\n return \"\".join(x for x in filename if x.isalnum())[0:25]\n\n\ndef trim_screenshot(kwargs):\n try:\n image_path = f'{os.path.join(kwargs[\"processing_path\"], sanitize(kwargs[\"test_name\"]), str(kwargs[\"ms\"]))}_out.jpg'\n command = f'{FFMPEG_PATH} -ss {str(round(kwargs[\"ms\"] / 1000, 3))} -i {kwargs[\"video_path\"]} ' \\\n f'-vframes 1 {image_path}'\n Popen(command, stderr=PIPE, shell=True, universal_newlines=True).communicate()\n if kwargs.get(\"encode\", True):\n with open(image_path, \"rb\") as image_file:\n return {kwargs[\"ms\"]: base64.b64encode(image_file.read()).decode(\"utf-8\")}\n else:\n if os.path.exists(image_path):\n return {kwargs[\"ms\"]: {\n \"path\": image_path,\n \"name\": f'{str(kwargs[\"ms\"])}_out.jpg'}\n }\n raise FileNotFoundError()\n except FileNotFoundError:\n from traceback import format_exc\n logger.warn(format_exc())\n return {}\n\n\ndef get_test_status(threshold_results):\n if threshold_results['failed'] > 0:\n return \"failed\"\n return \"passed\"\n", "id": "6908070", "language": "Python", "matching_score": 5.576266288757324, "max_stars_count": 0, "path": "observer_hub/reporters/html_reporter.py" }, { "content": "import os\nimport shutil\nfrom json import dumps\nfrom uuid import uuid4\nfrom observer_hub.video import get_video_length\nfrom observer_hub.reporters.html_reporter import HtmlReport, HtmlReporter\nfrom observer_hub.constants import REPORT_PATH\n\n\nclass MinioReporter(HtmlReporter):\n def __init__(self, test_result, video_path, request_params, processing_path, screenshot_path):\n self.package = {}\n super().__init__(test_result, video_path, request_params, processing_path, screenshot_path)\n\n def report_specific(self, test_result, video_path, request_params, screenshot_path):\n screenshots = self.cut_video_to_screenshots(request_params['info'].get('testStart', 0),\n get_video_length(video_path),\n request_params['info']['title'],\n video_path=video_path,\n encode=False)\n self.package = dict(page_name=request_params['info'].get('testStart', 0), test_status=test_result,\n screenshots=screenshots, full_page_screen=screenshot_path,\n resource_timing=request_params['performanceResources'],\n marks=self.fix_details(request_params['marks']),\n measures=self.fix_details(request_params['measures']),\n navigation_timing=request_params['performancetiming'],\n info=request_params['info'], timing=request_params['timing'])\n\n def save_report(self):\n \"\"\"\n creating zip with all required data\n :return: path\n \"\"\"\n report_uuid = uuid4()\n os.makedirs(REPORT_PATH, exist_ok=True)\n minio_path = os.path.join(REPORT_PATH, f'observer_{report_uuid}')\n os.makedirs(minio_path, exist_ok=True)\n with open(os.path.join(minio_path, 'package.json'), 'w') as f:\n f.write(dumps(self.package, indent=2))\n for screenshot in self.package[\"screenshots\"]:\n f = screenshot[list(screenshot.keys())[0]]\n try:\n shutil.move(f[\"path\"], os.path.join(minio_path, f['name']))\n except FileNotFoundError:\n pass\n shutil.make_archive(minio_path, 'zip', minio_path)\n return HtmlReport('observer', report_uuid, \"zip\")\n", "id": "3569883", "language": "Python", "matching_score": 2.662674903869629, "max_stars_count": 0, "path": "observer_hub/reporters/minio_reporter.py" }, { "content": "import copy\nfrom deepdiff import DeepDiff\n\nfrom observer_hub.assertions import assert_page_thresholds\nfrom observer_hub.integrations.galloper import notify_on_test_end, notify_on_command_end\nfrom observer_hub.reporters.html_reporter import HtmlReporter, get_test_status\nfrom observer_hub.reporters.minio_reporter import MinioReporter\nfrom observer_hub.reporters.junit_reporter import generate_junit_report\nfrom observer_hub.reporters.browsertime_reporter import port_async_processing_task\nfrom observer_hub.util import logger\n\n\ndef compute_results_for_simple_page(perf_agent):\n metrics = perf_agent.get_performance_metrics()\n resources = copy.deepcopy(metrics['performanceResources'])\n\n sorted_items = sorted(resources, key=lambda k: k['startTime'])\n current_total = metrics['performancetiming']['loadEventEnd'] - metrics['performancetiming']['navigationStart']\n fixed_end = sorted_items[-1]['responseEnd']\n diff = fixed_end - current_total\n metrics['performancetiming']['loadEventEnd'] += round(diff)\n return metrics\n\n\n# total_load_time = perf_timing['loadEventEnd'] - perf_timing['navigationStart']\n# time_to_first_byte = perf_timing['responseStart'] - perf_timing['navigationStart']\n# time_to_first_paint = timing['firstPaint']\n# dom_content_loading = perf_timing['domContentLoadedEventStart'] - perf_timing['domLoading']\n# dom_processing = perf_timing['domComplete'] - perf_timing['domLoading']\ndef compute_results_for_spa(old, new):\n result = copy.deepcopy(new)\n\n timing = {\n 'connectEnd': 0,\n 'connectStart': 0,\n 'domComplete': 0,\n 'domContentLoadedEventEnd': 0,\n 'domContentLoadedEventStart': 0,\n 'domInteractive': 0,\n 'domLoading': 0,\n 'domainLookupEnd': 0,\n 'domainLookupStart': 0,\n 'fetchStart': 0,\n 'loadEventEnd': 0,\n 'loadEventStart': 0,\n 'navigationStart': 0,\n 'redirectEnd': 0,\n 'redirectStart': 0,\n 'requestStart': 0,\n 'responseEnd': 0,\n 'responseStart': 0,\n 'secureConnectionStart': 0,\n 'unloadEventEnd': 0,\n 'unloadEventStart': 0\n }\n\n diff = DeepDiff(old[\"performanceResources\"], new[\"performanceResources\"], ignore_order=True)\n\n removed = {}\n added = diff['iterable_item_added']\n if 'iterable_item_removed' in diff.keys():\n removed = diff['iterable_item_removed']\n\n items_added = []\n for key, item in added.items():\n if key not in removed.keys():\n items_added.append(item)\n\n sorted_items = sorted(items_added, key=lambda k: k['startTime'])\n\n fields = [\n 'connectEnd',\n 'connectStart',\n 'domainLookupEnd',\n 'domainLookupStart',\n 'fetchStart',\n 'requestStart',\n 'responseEnd',\n 'responseStart',\n 'secureConnectionStart',\n 'startTime'\n ]\n\n first_result = sorted_items[0]\n first_point = first_result['startTime']\n for item in sorted_items:\n for field in fields:\n curr_value = item[field]\n if curr_value == 0:\n continue\n item[field] = curr_value - first_point\n\n sorted_items = sorted(items_added, key=lambda k: k['responseEnd'])\n latest_response = round(sorted_items[-1]['responseEnd'])\n\n result[\"performanceResources\"] = sorted_items\n timing['requestStart'] = round(first_result['requestStart'])\n timing['responseStart'] = round(first_result['responseStart'])\n timing['loadEventEnd'] = round(latest_response)\n\n content_loading_time = 0\n for item in sorted_items:\n if item['decodedBodySize'] > 0:\n content_loading_time += round(item[\"responseEnd\"] - item[\"responseStart\"])\n\n timing['domContentLoadedEventStart'] = 1\n timing['domContentLoadedEventEnd'] = timing['domContentLoadedEventStart']\n\n result['performancetiming'] = timing\n result['timing']['firstPaint'] = new['timing']['firstPaint'] - old['timing']['firstPaint']\n\n return result\n\n\ndef process_results_for_page(galloper_url, galloper_project_id, galloper_token, report_id, execution_result,\n thresholds, session_id):\n threshold_results = assert_page_thresholds(execution_result, thresholds)\n report, minio_package = generate_html_report(execution_result, threshold_results)\n logger.info(execution_result.results[\"info\"].get(\"url\"))\n logger.info(execution_result.results[\"info\"].get(\"headers\"))\n if execution_result.results[\"info\"].get(\"url\"):\n logger.info(\"About to start browsertime\")\n port_async_processing_task(galloper_url, galloper_project_id, galloper_token,\n execution_result, report.file_name, minio_package.file_name)\n notify_on_command_end(galloper_url, galloper_project_id, galloper_token, report_id, report, minio_package,\n execution_result, threshold_results, session_id)\n execution_result.report = report\n\n\ndef process_results_for_test(galloper_url, galloper_project_id, galloper_token, report_id, scenario_name,\n threshold_results, junit_report,\n junit_report_bucket, tz):\n\n junit_report_name = None\n if junit_report:\n junit_report_name = generate_junit_report(scenario_name, threshold_results, junit_report)\n\n notify_on_test_end(galloper_url, galloper_project_id, galloper_token, report_id, threshold_results, None,\n junit_report_name, junit_report_bucket,\n tz)\n return threshold_results, junit_report_name\n\n\ndef generate_html_report(execution_result, threshold_results):\n logger.info(\"=====> Reports generation\")\n test_status = get_test_status(threshold_results)\n reporter = HtmlReporter(test_status, execution_result.video_path,\n execution_result.results,\n execution_result.video_folder,\n execution_result.screenshot_path)\n minio_reporter = MinioReporter(test_status, execution_result.video_path,\n execution_result.results,\n execution_result.video_folder,\n execution_result.screenshot_path)\n\n return reporter.save_report(), minio_reporter.save_report()\n", "id": "1891638", "language": "Python", "matching_score": 4.040355205535889, "max_stars_count": 0, "path": "observer_hub/processors/results_processor.py" }, { "content": "import abc\nimport json\nimport os\nimport re\nimport time\nfrom datetime import datetime\nfrom urllib.parse import urlparse\n\nfrom influxdb import InfluxDBClient\n\nfrom observer_hub.constants import EXPORTERS_PATH\nimport uuid\n\n\nclass Exporter(object):\n\n def __init__(self, raw_data):\n self.raw_data = raw_data\n\n resources = raw_data['performanceResources']\n perf_timing = raw_data['performancetiming']\n timing = raw_data['timing']\n\n self.requests = self.count_request_number(resources)\n self.domains = self.count_unique_domain_number(resources)\n self.total_load_time = perf_timing['loadEventEnd'] - perf_timing['navigationStart']\n self.speed_index = timing['speedIndex']\n self.time_to_first_byte = perf_timing['responseStart'] - perf_timing['navigationStart']\n self.time_to_first_paint = timing['firstPaint']\n self.dom_content_loading = perf_timing['domContentLoadedEventStart'] - perf_timing['domLoading']\n self.dom_processing = perf_timing['domComplete'] - perf_timing['domLoading']\n\n def count_request_number(self, resources):\n return list(filter(\n lambda x: not re.match(r'/http[s]?:\\/\\/(micmro|nurun).github.io\\/performance-bookmarklet\\/.*/', x['name']),\n resources))\n\n def count_unique_domain_number(self, resources):\n result = set()\n for e in resources:\n url = urlparse(e['name'])\n result.add(url.netloc)\n return result\n\n @abc.abstractmethod\n def export(self):\n pass\n\n\nclass TelegraphJsonExporter(Exporter):\n\n def __init__(self, raw_data):\n super().__init__(raw_data)\n\n def export(self):\n # https://github.com/influxdata/telegraf/tree/master/plugins/serializers/json\n\n result = {\n \"fields\": {\n \"requests\": len(self.requests),\n \"domains\": len(self.domains),\n \"total\": self.total_load_time,\n \"speed_index\": self.speed_index,\n \"time_to_first_byte\": self.time_to_first_byte,\n \"time_to_first_paint\": self.time_to_first_paint,\n \"dom_content_loading\": self.dom_content_loading,\n \"dom_processing\": self.dom_processing\n },\n \"name\": self.raw_data['info']['title'],\n \"tags\": {},\n \"timestamp\": time.time()\n }\n os.makedirs(EXPORTERS_PATH, exist_ok=True)\n with open(os.path.join(EXPORTERS_PATH, f'{uuid.uuid1()}.json'), 'w') as outfile:\n json.dump(result, outfile, indent=4)\n return json.dumps(result)\n\n\nclass InfluxExporter(Exporter):\n\n def __init__(self, raw_data):\n influx_host = os.getenv(\"INFLUX_HOST\", \"carrier-influx\")\n influx_port = os.getenv(\"INFLUX_PORT\", \"8086\")\n influx_db_name = os.getenv(\"INFLUX_DB\", \"perfui\")\n\n self.client = InfluxDBClient(host=influx_host, port=influx_port)\n self.client.switch_database(influx_db_name)\n super().__init__(raw_data)\n\n def export(self):\n json_body = [{\n \"measurement\": \"ui_performance\",\n \"tags\": {\n \"name\": self.raw_data['info']['title']\n },\n \"time\": datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),\n \"fields\": {\n \"requests\": len(self.requests),\n \"domains\": len(self.domains),\n \"total\": self.total_load_time,\n \"speed_index\": self.speed_index,\n \"time_to_first_byte\": self.time_to_first_byte,\n \"time_to_first_paint\": self.time_to_first_paint,\n \"dom_content_loading\": self.dom_content_loading,\n \"dom_processing\": self.dom_processing\n }\n }]\n\n self.client.write_points(json_body)\n\n\nclass GalloperExporter(Exporter):\n\n def __init__(self, raw_data):\n super().__init__(raw_data)\n\n def export(self):\n return {\n \"requests\": len(self.requests),\n \"domains\": len(self.domains),\n \"total\": self.total_load_time,\n \"speed_index\": self.speed_index,\n \"time_to_first_byte\": self.time_to_first_byte,\n \"time_to_first_paint\": self.time_to_first_paint,\n \"dom_content_loading\": self.dom_content_loading,\n \"dom_processing\": self.dom_processing\n }\n\n\nclass JsonExporter(Exporter):\n\n def __init__(self, raw_data):\n super().__init__(raw_data)\n\n def export(self):\n return {\n \"measurement\": \"ui_performance\",\n \"time\": datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),\n \"fields\": {\n \"requests\": len(self.requests),\n \"domains\": len(self.domains),\n \"total\": self.total_load_time,\n \"speed_index\": self.speed_index,\n \"time_to_first_byte\": self.time_to_first_byte,\n \"time_to_first_paint\": self.time_to_first_paint,\n \"dom_content_loading\": self.dom_content_loading,\n \"dom_processing\": self.dom_processing\n }\n }\n\n\nclass Exporter(object):\n\n def __init__(self, formats):\n self.formats = formats\n\n def export(self, data):\n if \"telegraph\" in self.formats:\n TelegraphJsonExporter(data).export()\n if \"influx\" in self.formats:\n InfluxExporter(data).export()\n\n def to_json(self, data):\n return JsonExporter(data).export()['fields']\n", "id": "1928152", "language": "Python", "matching_score": 1.606111764907837, "max_stars_count": 0, "path": "observer_hub/models/exporters.py" }, { "content": "import math\nimport requests\nfrom traceback import format_exc\nimport os\nfrom json import loads\nimport sys\nfrom datetime import datetime\nimport pytz\nimport re\nimport shutil\n\n\ndef is_threshold_failed(actual, comparison, expected):\n if comparison == 'gte':\n return actual >= expected\n elif comparison == 'lte':\n return actual <= expected\n elif comparison == 'gt':\n return actual > expected\n elif comparison == 'lt':\n return actual < expected\n elif comparison == 'eq':\n return actual == expected\n return False\n\n\ndef get_aggregated_value(aggregation, metrics):\n if aggregation == 'max':\n return max(metrics)\n elif aggregation == 'min':\n return min(metrics)\n elif aggregation == 'avg':\n return int(sum(metrics) / len(metrics))\n elif aggregation == 'pct95':\n return percentile(metrics, 95)\n elif aggregation == 'pct50':\n return percentile(metrics, 50)\n else:\n raise Exception(f\"No such aggregation {aggregation}\")\n\n\ndef percentile(data, percentile):\n size = len(data)\n return sorted(data)[int(math.ceil((size * percentile) / 100)) - 1]\n\n\ndef process_page_results(page_name, path, galloper_url, project_id, token, timestamp, prefix, loops):\n print(f\"processing: {path}\")\n report_bucket = f\"{galloper_url}/api/v1/artifacts/{project_id}/reports\"\n static_bucket = f\"{galloper_url}/api/v1/artifacts/{project_id}/sitespeedstatic\"\n # index.html\n with open(f\"{path}index.html\", \"r\", encoding='utf-8') as f:\n index_html = f.read()\n index_html = update_page_results_html(index_html, report_bucket, static_bucket, page_name, timestamp, loops, prefix)\n with open(f\"/{page_name}_{timestamp}_index.html\", 'w') as f:\n f.write(index_html)\n upload_file(f\"{page_name}_{timestamp}_index.html\", \"/\", galloper_url, project_id, token)\n # metrics.html\n with open(f\"{path}metrics.html\", \"r\", encoding='utf-8') as f:\n metrics_html = f.read()\n metrics_html = update_page_results_html(metrics_html, report_bucket, static_bucket, page_name, timestamp, loops, prefix)\n with open(f\"/{page_name}_{timestamp}_metrics.html\", 'w') as f:\n f.write(metrics_html)\n upload_file(f\"{page_name}_{timestamp}_metrics.html\", \"/\", galloper_url, project_id, token)\n\n # results.html\n for i in range(1, loops + 1):\n with open(f\"{path}{i}.html\", \"r\", encoding='utf-8') as f:\n results_html = f.read()\n results_html = update_page_results_html(results_html, report_bucket, static_bucket, page_name, timestamp, loops, prefix)\n with open(f\"/{page_name}_{timestamp}_{i}.html\", 'w') as f:\n f.write(results_html)\n upload_file(f\"{page_name}_{timestamp}_{i}.html\", \"/\", galloper_url, project_id, token)\n\n upload_page_results_data(path, page_name, timestamp, galloper_url, project_id, token, loops)\n\n page_results = get_page_results(path)\n return page_results\n\n\ndef get_page_results(path):\n json_file = f\"{path}data/browsertime.pageSummary.json\"\n with open(json_file, \"r\") as f:\n page = loads(f.read())\n page_result = {\"total\": [each for each in page[\"fullyLoaded\"]],\n \"speed_index\": [each[\"SpeedIndex\"] for each in page[\"visualMetrics\"]],\n \"time_to_first_byte\": [each[\"timings\"][\"ttfb\"] for each in page[\"browserScripts\"]],\n \"time_to_first_paint\": [each[\"timings\"][\"firstPaint\"] for each in page[\"browserScripts\"]],\n \"dom_content_loading\": [each[\"timings\"][\"navigationTiming\"][\"domContentLoadedEventEnd\"]\n for each in page[\"browserScripts\"]],\n \"dom_processing\": [each[\"timings\"][\"navigationTiming\"][\"domComplete\"]\n for each in page[\"browserScripts\"]],\n \"first_contentful_paint\": [each[\"firstContentfulPaint\"] for each in page[\"googleWebVitals\"]],\n \"largest_contentful_paint\": [each[\"largestContentfulPaint\"] for each in\n page[\"googleWebVitals\"]],\n \"cumulative_layout_shift\": [round(float(each[\"cumulativeLayoutShift\"])) for each in\n page[\"googleWebVitals\"]],\n \"total_blocking_time\": [each[\"totalBlockingTime\"] for each in page[\"googleWebVitals\"]],\n \"first_visual_change\": [each[\"FirstVisualChange\"] for each in page[\"visualMetrics\"]],\n \"last_visual_change\": [each[\"LastVisualChange\"] for each in page[\"visualMetrics\"]]}\n return page_result\n\n\ndef update_report(page_name, aggregated_result, galloper_url, project_id, token, report_id, timestamp):\n data = {\n \"name\": page_name,\n \"type\": \"page\",\n \"identifier\": page_name,\n \"metrics\": aggregated_result,\n \"bucket_name\": \"reports\",\n \"file_name\": f\"{page_name}_{timestamp}_index.html\",\n \"resolution\": \"auto\",\n \"browser_version\": \"chrome\",\n \"thresholds_total\": 0, # add thresholds\n \"thresholds_failed\": 0,\n \"locators\": [],\n \"session_id\": \"session_id\"\n }\n\n try:\n requests.post(f\"{galloper_url}/api/v1/observer/{project_id}/{report_id}\", json=data,\n headers={'Authorization': f\"Bearer {token}\"})\n except Exception:\n print(format_exc())\n\n\ndef finalize_report(galloper_url, project_id, token, report_id):\n time = datetime.now(tz=pytz.timezone(\"UTC\"))\n # exception_message = \"\"\n # if test_thresholds_total:\n # violated = round(float(test_thresholds_failed / test_thresholds_total) * 100, 2)\n # print(f\"Failed thresholds: {violated}\")\n # if violated > 30:\n # exception_message = f\"Failed thresholds rate more then {violated}%\"\n report_data = {\n \"report_id\": report_id,\n \"time\": time.strftime('%Y-%m-%d %H:%M:%S'),\n \"status\": \"Finished\",\n \"thresholds_total\": 0,\n \"thresholds_failed\": 0,\n \"exception\": \"\"\n }\n\n try:\n requests.put(f\"{galloper_url}/api/v1/observer/{project_id}\", json=report_data,\n headers={'Authorization': f\"Bearer {token}\"})\n except Exception:\n print(format_exc())\n\n\ndef upload_file(file_name, file_path, galloper_url, project_id, token, bucket=\"reports\"):\n file = {'file': open(f\"{file_path}{file_name}\", 'rb')}\n try:\n requests.post(f\"{galloper_url}/api/v1/artifacts/{project_id}/{bucket}/{file_name}\",\n files=file,\n headers={'Authorization': f\"Bearer {token}\"})\n except Exception:\n print(format_exc())\n\n\ndef upload_page_results_data(path, page_name, timestamp, galloper_url, project_id, token, loops):\n for i in range(1, loops + 1):\n filmstrip_files = os.listdir(f\"{path}data/filmstrip/{i}/\")\n for each in filmstrip_files:\n os.rename(f\"{path}data/filmstrip/{i}/{each}\", f\"{path}data/filmstrip/{i}/{page_name}_{timestamp}_{each}\")\n upload_file(f\"{page_name}_{timestamp}_{each}\", f\"{path}data/filmstrip/{i}/\", galloper_url, project_id, token)\n screenshot_files = os.listdir(f\"{path}data/screenshots/{i}/\")\n for each in screenshot_files:\n os.rename(f\"{path}data/screenshots/{i}/{each}\", f\"{path}data/screenshots/{i}/{page_name}_{timestamp}_{each}\")\n upload_file(f\"{page_name}_{timestamp}_{each}\", f\"{path}data/screenshots/{i}/\", galloper_url, project_id, token)\n\n os.rename(f\"{path}data/video/{i}.mp4\", f\"{path}data/video/{page_name}_{timestamp}_{i}.mp4\")\n upload_file(f\"{page_name}_{timestamp}_{i}.mp4\", f\"{path}data/video/\", galloper_url, project_id, token)\n\n\ndef upload_static_files(path, galloper_url, project_id, token):\n static_bucket = \"sitespeedstatic\"\n for each in [\"css\", \"img\", \"img/ico\", \"js\", \"font\"]:\n files = [f for f in os.listdir(f\"{path}{each}/\") if os.path.isfile(f\"{path}{each}/{f}\")]\n for file in files:\n upload_file(file, f\"{path}{each}/\", galloper_url, project_id, token, bucket=static_bucket)\n\n\ndef upload_distributed_report_files(path, timestamp, galloper_url, project_id, token, loops):\n report_bucket = f\"{galloper_url}/api/v1/artifacts/{project_id}/reports\"\n static_bucket = f\"{galloper_url}/api/v1/artifacts/{project_id}/sitespeedstatic\"\n for each in [\"index.html\", \"detailed.html\", \"pages.html\", \"domains.html\", \"toplist.html\", \"assets.html\", \"settings.html\", \"help.html\"]:\n with open(f\"{path}{each}\", \"r\", encoding='utf-8') as f:\n html = f.read()\n html = update_page_results_html(html, report_bucket, static_bucket, \"\", timestamp, loops, \"\")\n with open(f\"/{timestamp}_{each}\", 'w') as f:\n f.write(html)\n upload_file(f\"{timestamp}_{each}\", \"/\", galloper_url, project_id, token)\n\n\ndef aggregate_results(page_result):\n aggregated_result = {\"requests\": len(page_result[\"total\"]), \"domains\": 1,\n \"time_to_interactive\": 0} # there is no TTI in browsertime json\n for metric in list(page_result.keys()):\n aggregated_result[metric] = get_aggregated_value(sys.argv[4], page_result[metric])\n return aggregated_result\n\n\ndef update_page_results_html(html, report_bucket, static_bucket, page_name, timestamp, loops, prefix):\n html = html.replace(f'<li><a href=\"{prefix}assets.html\">Assets</a></li>', f'<li><a href=\"{report_bucket}/{timestamp}_assets.html\">Assets</a></li> <li><a href=\"{timestamp}_distributed_report.zip\">Report</a></li>')\n html = html.replace(f'href=\"{prefix}css/index.min.css\"', f'href=\"{static_bucket}/index.min.css\"')\n html = html.replace(f'href=\"{prefix}img/ico/sitespeed.io-144.png\"', f'href=\"{static_bucket}/sitespeed.io-144.png\"')\n html = html.replace(f'href=\"{prefix}img/ico/sitespeed.io-114.png\"', f'href=\"{static_bucket}/sitespeed.io-114.png\"')\n html = html.replace(f'href=\"{prefix}img/ico/sitespeed.io-72.png\"', f'href=\"{static_bucket}/sitespeed.io-72.png\"')\n html = html.replace(f'href=\"{prefix}img/ico/sitespeed.io.ico\"', f'href=\"{static_bucket}/sitespeed.io.ico\"')\n html = html.replace(f'src=\"{prefix}img/sitespeed.io-logo.png\"', f'src=\"{static_bucket}/sitespeed.io-logo.png\"')\n html = html.replace(f'src=\"{prefix}img/coach.png\"', f'src=\"{static_bucket}/coach.png\"')\n html = html.replace(f'src=\"{prefix}js/perf-cascade.min.js\"', f'src=\"{static_bucket}/perf-cascade.min.js\"')\n html = html.replace(f'src=\"{prefix}js/sortable.min.js\"', f'src=\"{static_bucket}/sortable.min.js\"')\n html = html.replace(f'src=\"{prefix}js/chartist.min.js\"', f'src=\"{static_bucket}/chartist.min.js\"')\n html = html.replace(f'src=\"{prefix}js/chartist-plugin-axistitle.min.js\"', f'src=\"{static_bucket}/chartist-plugin-axistitle.min.js\"')\n html = html.replace(f'src=\"{prefix}js/chartist-plugin-tooltip.min.js\"', f'src=\"{static_bucket}/chartist-plugin-tooltip.min.js\"')\n html = html.replace(f'src=\"{prefix}js/chartist-plugin-legend.min.js\"', f'src=\"{static_bucket}/chartist-plugin-legend.min.js\"')\n html = html.replace(f'src=\"{prefix}js/video.core.novtt.min.js\"', f'src=\"{static_bucket}/video.core.novtt.min.js\"')\n html = html.replace(f'href=\"{prefix}help.html', f'href=\"{report_bucket}/{timestamp}_help.html')\n\n for html_file in [\"index.html\", \"detailed.html\", \"pages.html\", \"domains.html\", \"toplist.html\", \"settings.html\"]:\n html = html.replace(f'href=\"{prefix}{html_file}\"', f'href=\"{report_bucket}/{timestamp}_{html_file}\"')\n for i in range(1, loops + 1):\n html = html.replace(f'href=\"./{i}.html\"', f'href=\"{report_bucket}/{page_name}_{timestamp}_{i}.html\"')\n for data_file_path in [f\"data/screenshots/{i}/\", \"data/video/\", f\"data/filmstrip/{i}/\"]:\n html = html.replace(data_file_path, f'{report_bucket}/{page_name}_{timestamp}_')\n html = html.replace('href=\"metrics.html\"', f'href=\"{report_bucket}/{page_name}_{timestamp}_metrics.html\"')\n\n # Links for pages\n links = re.findall('href=\"pages/(.+?)/index.html\"', html)\n for each in links:\n try:\n link = f'href=\"pages/{each}/index.html\"'\n page_name = link.split(\"/\")[-2]\n html = html.replace(f'href=\"pages/{each}/index.html\"', f'href=\"{report_bucket}/{page_name}_{timestamp}_index.html\"')\n except:\n print(f\"failed to update {each} link\")\n return html\n\n\ndef upload_distributed_report(timestamp, galloper_url, project_id, token):\n shutil.make_archive(base_name=f'{timestamp}_distributed_report', format=\"zip\", root_dir=\"/\", base_dir=\"/sitespeed.io/sitespeed-result\")\n upload_file(f'{timestamp}_distributed_report.zip', \"/sitespeed.io/\", galloper_url, project_id, token)\n", "id": "4942200", "language": "Python", "matching_score": 6.3076863288879395, "max_stars_count": 0, "path": "util.py" }, { "content": "from util import is_threshold_failed, get_aggregated_value\nfrom os import environ, rename\nfrom traceback import format_exc\nimport requests\nfrom json import loads\nfrom datetime import datetime\nimport pytz\nimport sys\n\nPROJECT_ID = environ.get('GALLOPER_PROJECT_ID')\nURL = environ.get('GALLOPER_URL')\nREPORT_ID = environ.get('REPORT_ID')\nBUCKET = environ.get(\"TESTS_BUCKET\")\nREPORTS_BUCKET = environ.get(\"REPORTS_BUCKET\")\nTEST = environ.get(\"ARTIFACT\")\nTOKEN = environ.get(\"token\")\nPATH_TO_FILE = f'/tmp/{TEST}'\nTESTS_PATH = environ.get(\"tests_path\", '/')\nTEST_NAME = environ.get(\"JOB_NAME\")\nENV = environ.get(\"ENV\")\n\n\ntry:\n # Get thresholds\n res = None\n try:\n res = requests.get(\n f\"{URL}/api/v1/thresholds/{PROJECT_ID}/ui?name={TEST_NAME}&environment={ENV}&order=asc\",\n headers={'Authorization': f\"Bearer {TOKEN}\"})\n except Exception:\n print(format_exc())\n\n if not res or res.status_code != 200:\n thresholds = []\n\n try:\n thresholds = res.json()\n except ValueError:\n thresholds = []\n\n all_thresholds: list = list(filter(lambda _th: _th['scope'] == 'all', thresholds))\n every_thresholds: list = list(filter(lambda _th: _th['scope'] == 'every', thresholds))\n page_thresholds: list = list(filter(lambda _th: _th['scope'] != 'every' and _th['scope'] != 'all', thresholds))\n test_thresholds_total = 0\n test_thresholds_failed = 0\n\n all_results = {\"total\": [], \"speed_index\": [], \"time_to_first_byte\": [], \"time_to_first_paint\": [],\n \"dom_content_loading\": [], \"dom_processing\": [], \"first_contentful_paint\": [],\n \"largest_contentful_paint\": [], \"cumulative_layout_shift\": [], \"total_blocking_time\": [],\n \"first_visual_change\": [], \"last_visual_change\": [], \"time_to_interactive\": []}\n\n format_str = \"%d%b%Y_%H:%M:%S\"\n timestamp = datetime.now().strftime(format_str)\n html_path = f\"/{timestamp}_user-flow.report.html\"\n rename(\"/user-flow.report.html\", html_path)\n\n json_path = '/user-flow.report.json'\n # Read and process results json\n with open(json_path, \"r\") as f:\n json_data = loads(f.read())\n index = 0\n for step in json_data[\"steps\"]:\n page_thresholds_total = 0\n page_thresholds_failed = 0\n file_name = html_path.split(\"/\")[-1]\n if \"metrics\" in list(step[\"lhr\"][\"audits\"].keys()):\n step_type = \"page\"\n result = {\n \"requests\": 1,\n \"domains\": 1,\n \"total\": int(step[\"lhr\"][\"audits\"][\"metrics\"][\"details\"]['items'][0][\"observedLoad\"]),\n \"speed_index\": int(step[\"lhr\"][\"audits\"][\"metrics\"][\"details\"]['items'][0][\"speedIndex\"]),\n \"time_to_first_byte\": int(step[\"lhr\"][\"audits\"]['server-response-time']['numericValue']),\n \"time_to_first_paint\": int(\n step[\"lhr\"][\"audits\"][\"metrics\"][\"details\"]['items'][0][\"observedFirstPaint\"]),\n \"dom_content_loading\": int(\n step[\"lhr\"][\"audits\"][\"metrics\"][\"details\"]['items'][0][\"observedDomContentLoaded\"]),\n \"dom_processing\": int(\n step[\"lhr\"][\"audits\"][\"metrics\"][\"details\"]['items'][0][\"observedDomContentLoaded\"]),\n \"first_contentful_paint\": int(\n step[\"lhr\"][\"audits\"][\"metrics\"][\"details\"]['items'][0][\"firstContentfulPaint\"]),\n \"largest_contentful_paint\": int(\n step[\"lhr\"][\"audits\"][\"metrics\"][\"details\"]['items'][0][\"largestContentfulPaint\"]),\n \"cumulative_layout_shift\": round(\n float(int(step[\"lhr\"][\"audits\"][\"metrics\"][\"details\"]['items'][0][\"cumulativeLayoutShift\"])),\n 3),\n \"total_blocking_time\": int(\n step[\"lhr\"][\"audits\"][\"metrics\"][\"details\"]['items'][0][\"totalBlockingTime\"]),\n \"first_visual_change\": int(\n step[\"lhr\"][\"audits\"][\"metrics\"][\"details\"]['items'][0][\"observedFirstVisualChange\"]),\n \"last_visual_change\": int(\n step[\"lhr\"][\"audits\"][\"metrics\"][\"details\"]['items'][0][\"observedLastVisualChange\"]),\n \"time_to_interactive\": int(step[\"lhr\"][\"audits\"][\"metrics\"][\"details\"]['items'][0][\"interactive\"])\n }\n else:\n step_type = \"action\"\n result = {\n \"requests\": 1,\n \"domains\": 1,\n \"total\": 0,\n \"speed_index\": 0,\n \"time_to_first_byte\": 0,\n \"time_to_first_paint\": 0,\n \"dom_content_loading\": 0,\n \"dom_processing\": 0,\n \"first_contentful_paint\": 0,\n \"largest_contentful_paint\": 0,\n \"cumulative_layout_shift\": round(float(step[\"lhr\"][\"audits\"]['cumulative-layout-shift']['numericValue']), 3),\n \"total_blocking_time\": int(step[\"lhr\"][\"audits\"]['total-blocking-time']['numericValue']),\n \"first_visual_change\": 0,\n \"last_visual_change\": 0,\n \"time_to_interactive\": 0\n }\n\n # Add page results to the summary dict\n for metric in list(all_results.keys()):\n all_results[metric].append(result[metric])\n\n # Process thresholds with scope = every\n for th in every_thresholds:\n test_thresholds_total += 1\n page_thresholds_total += 1\n if not is_threshold_failed(result.get(th[\"target\"]), th[\"comparison\"], th[\"metric\"]):\n print(f\"Threshold: {th['scope']} {th['target']} {th['aggregation']} value {result.get(th['target'])}\"\n f\" comply with rule {th['comparison']} {th['metric']} [PASSED]\")\n else:\n test_thresholds_failed += 1\n page_thresholds_failed += 1\n print(f\"Threshold: {th['scope']} {th['target']} {th['aggregation']} value {result.get(th['target'])}\"\n f\" violates rule {th['comparison']} {th['metric']} [FAILED]\")\n\n # Process thresholds for current page\n for th in page_thresholds:\n if th[\"scope\"] == f'{step[\"lhr\"][\"requestedUrl\"]}@{step[\"name\"]}':\n test_thresholds_total += 1\n page_thresholds_total += 1\n if not is_threshold_failed(result.get(th[\"target\"]), th[\"comparison\"], th[\"metric\"]):\n print(f\"Threshold: {th['name']} {th['scope']} {th['target']} {th['aggregation']} value {result.get(th['target'])}\"\n f\" comply with rule {th['comparison']} {th['metric']} [PASSED]\")\n else:\n test_thresholds_failed += 1\n page_thresholds_failed += 1\n print(f\"Threshold: {th['name']} {th['scope']} {th['target']} {th['aggregation']} value {result.get(th['target'])}\"\n f\" violates rule {th['comparison']} {th['metric']} [FAILED]\")\n\n # Update report with page results\n data = {\n \"name\": step[\"name\"],\n \"type\": step_type,\n \"identifier\": f'{step[\"lhr\"][\"requestedUrl\"]}@{step[\"name\"]}',\n \"metrics\": result,\n \"bucket_name\": \"reports\",\n \"file_name\": f\"{file_name}#index={index}\",\n \"resolution\": \"auto\",\n \"browser_version\": \"chrome\",\n \"thresholds_total\": page_thresholds_total,\n \"thresholds_failed\": page_thresholds_failed,\n \"locators\": [],\n \"session_id\": \"session_id\"\n }\n index += 1\n\n try:\n requests.post(f\"{URL}/api/v1/observer/{PROJECT_ID}/{REPORT_ID}\", json=data,\n headers={'Authorization': f\"Bearer {TOKEN}\"})\n except Exception:\n print(format_exc())\n\n # Send html file with page results\n file = {'file': open(html_path, 'rb')}\n\n try:\n requests.post(f\"{URL}/api/v1/artifacts/{PROJECT_ID}/reports/{file_name}\",\n files=file,\n headers={'Authorization': f\"Bearer {TOKEN}\"})\n except Exception:\n print(format_exc())\n\n # Process thresholds with scope = all\n for th in all_thresholds:\n test_thresholds_total += 1\n if not is_threshold_failed(get_aggregated_value(th[\"aggregation\"], all_results.get(th[\"target\"])),\n th[\"comparison\"], th[\"metric\"]):\n print(f\"Threshold: {th['scope']} {th['target']} {th['aggregation']} value {all_results.get(th['target'])}\"\n f\" comply with rule {th['comparison']} {th['metric']} [PASSED]\")\n else:\n test_thresholds_failed += 1\n print(f\"Threshold: {th['scope']} {th['target']} {th['aggregation']} value {all_results.get(th['target'])}\"\n f\" violates rule {th['comparison']} {th['metric']} [FAILED]\")\n\n # Finalize report\n time = datetime.now(tz=pytz.timezone(\"UTC\"))\n exception_message = \"\"\n if test_thresholds_total:\n violated = round(float(test_thresholds_failed / test_thresholds_total) * 100, 2)\n print(f\"Failed thresholds: {violated}\")\n if violated > 30:\n exception_message = f\"Failed thresholds rate more then {violated}%\"\n report_data = {\n \"report_id\": REPORT_ID,\n \"time\": time.strftime('%Y-%m-%d %H:%M:%S'),\n \"status\": \"Finished\",\n \"thresholds_total\": test_thresholds_total,\n \"thresholds_failed\": test_thresholds_failed,\n \"exception\": exception_message\n }\n\n try:\n requests.put(f\"{URL}/api/v1/observer/{PROJECT_ID}\", json=report_data,\n headers={'Authorization': f\"Bearer {TOKEN}\"})\n except Exception:\n print(format_exc())\n\n # Email notification\n if len(sys.argv) > 2 and \"email\" in sys.argv[2].split(\";\"):\n secrets_url = f\"{URL}/api/v1/secrets/{PROJECT_ID}/\"\n try:\n email_notification_id = requests.get(secrets_url + \"email_notification_id\",\n headers={'Authorization': f'bearer {TOKEN}',\n 'Content-type': 'application/json'}\n ).json()[\"secret\"]\n except:\n email_notification_id = \"\"\n\n if email_notification_id:\n task_url = f\"{URL}/api/v1/task/{PROJECT_ID}/{email_notification_id}\"\n\n event = {\n \"notification_type\": \"ui\",\n \"test_id\": sys.argv[1],\n \"report_id\": REPORT_ID\n }\n\n res = requests.post(task_url, json=event, headers={'Authorization': f'bearer {TOKEN}',\n 'Content-type': 'application/json'})\n\nexcept Exception:\n print(format_exc())\n", "id": "4703377", "language": "Python", "matching_score": 6.267261028289795, "max_stars_count": 0, "path": "results_processing.py" }, { "content": "from util import is_threshold_failed, get_aggregated_value, process_page_results, aggregate_results, update_report, \\\n finalize_report, upload_distributed_report_files, upload_distributed_report, upload_static_files\n\nimport os\nfrom traceback import format_exc\nimport requests\nfrom json import loads\nfrom datetime import datetime\nimport pytz\nimport sys\n\nPROJECT_ID = os.environ.get('GALLOPER_PROJECT_ID')\nURL = os.environ.get('GALLOPER_URL')\nREPORT_ID = os.environ.get('REPORT_ID')\nBUCKET = os.environ.get(\"TESTS_BUCKET\")\nREPORTS_BUCKET = os.environ.get(\"REPORTS_BUCKET\")\nTEST = os.environ.get(\"ARTIFACT\")\nTOKEN = os.environ.get(\"token\")\nPATH_TO_FILE = f'/tmp/{TEST}'\nTESTS_PATH = os.environ.get(\"tests_path\", '/')\nTEST_NAME = os.environ.get(\"JOB_NAME\")\n\ntry:\n # Get thresholds\n res = None\n try:\n res = requests.get(\n f\"{URL}/api/v1/thresholds/{PROJECT_ID}/ui?name={TEST_NAME}&environment=Default&order=asc\",\n headers={'Authorization': f\"Bearer {TOKEN}\"})\n except Exception:\n print(format_exc())\n\n if not res or res.status_code != 200:\n thresholds = []\n\n try:\n thresholds = res.json()\n except ValueError:\n thresholds = []\n\n all_thresholds: list = list(filter(lambda _th: _th['scope'] == 'all', thresholds))\n every_thresholds: list = list(filter(lambda _th: _th['scope'] == 'every', thresholds))\n page_thresholds: list = list(filter(lambda _th: _th['scope'] != 'every' and _th['scope'] != 'all', thresholds))\n\n format_str = \"%d%b%Y_%H:%M:%S\"\n timestamp = datetime.now().strftime(format_str)\n upload_distributed_report(timestamp, URL, PROJECT_ID, TOKEN)\n results_path = f\"/sitespeed.io/sitespeed-result/{sys.argv[2].replace('.', '_')}/\"\n dir_name = os.listdir(results_path)\n upload_static_files(f\"{results_path}{dir_name[0]}/\", URL, PROJECT_ID, TOKEN)\n upload_distributed_report_files(f\"{results_path}{dir_name[0]}/\", timestamp, URL, PROJECT_ID, TOKEN, int(sys.argv[3]))\n results_path = f\"{results_path}{dir_name[0]}/pages/\"\n dir_names = os.listdir(results_path)\n all_results = {\"total\": [], \"speed_index\": [], \"time_to_first_byte\": [], \"time_to_first_paint\": [],\n \"dom_content_loading\": [], \"dom_processing\": [], \"first_contentful_paint\": [],\n \"largest_contentful_paint\": [], \"cumulative_layout_shift\": [], \"total_blocking_time\": [],\n \"first_visual_change\": [], \"last_visual_change\": []}\n test_thresholds_total = 0\n test_thresholds_failed = 0\n sub_dir_names = []\n for each in dir_names:\n _sub_dirs = os.listdir(f\"{results_path}{each}/\")\n for _ in _sub_dirs:\n if \"index.html\" in os.listdir(f\"{results_path}{each}/{_}\"):\n _sub_dirs = [os.path.join(f\"{results_path}{each}/\", f\"{_}/\")]\n else:\n _sub_dirs = [os.path.join(f\"{results_path}{each}/{_}\", f\"{f}/\") for f in os.listdir(f\"{results_path}{each}/{_}\")]\n sub_dir_names.extend(_sub_dirs)\n sub_dir_names.sort(key=lambda x: os.path.getmtime(x))\n for sub_dir_path in sub_dir_names:\n sub_dir = sub_dir_path.split(\"/\")[-2]\n if \"index.html\" in os.listdir(sub_dir_path):\n page_result = process_page_results(sub_dir, sub_dir_path, URL, PROJECT_ID, TOKEN, timestamp,\n prefix=\"../../../\", loops=int(sys.argv[3]))\n # Add page results to the summary dict\n for metric in list(all_results.keys()):\n all_results[metric].extend(page_result[metric])\n aggregated_result = aggregate_results(page_result)\n update_report(sub_dir, aggregated_result, URL, PROJECT_ID, TOKEN, REPORT_ID, timestamp)\n else:\n for sub_sub_dir in os.listdir(sub_dir_path):\n page_result = process_page_results(sub_sub_dir, f\"{sub_dir_path}{sub_sub_dir}/\", URL, PROJECT_ID,\n TOKEN, timestamp, prefix=\"../../../../\", loops=int(sys.argv[3]))\n # Add page results to the summary dict\n for metric in list(all_results.keys()):\n all_results[metric].extend(page_result[metric])\n aggregated_result = aggregate_results(page_result)\n update_report(sub_sub_dir, aggregated_result, URL, PROJECT_ID, TOKEN, REPORT_ID, timestamp)\n\n finalize_report(URL, PROJECT_ID, TOKEN, REPORT_ID)\n\n # Email notification\n if len(sys.argv) > 5 and \"email\" in sys.argv[5].split(\";\"):\n secrets_url = f\"{URL}/api/v1/secrets/{PROJECT_ID}/\"\n try:\n email_notification_id = requests.get(secrets_url + \"email_notification_id\",\n headers={'Authorization': f'bearer {TOKEN}',\n 'Content-type': 'application/json'}\n ).json()[\"secret\"]\n except:\n email_notification_id = \"\"\n\n if email_notification_id:\n task_url = f\"{URL}/api/v1/task/{PROJECT_ID}/{email_notification_id}\"\n\n event = {\n \"notification_type\": \"ui\",\n \"test_id\": sys.argv[1],\n \"report_id\": REPORT_ID\n }\n\n res = requests.post(task_url, json=event, headers={'Authorization': f'bearer {TOKEN}',\n 'Content-type': 'application/json'})\n print(f\"Email notification {res.text}\")\n\nexcept Exception:\n print(format_exc())\n", "id": "12135175", "language": "Python", "matching_score": 1.0587024688720703, "max_stars_count": 0, "path": "results_processing.py" }, { "content": "import os\nimport zipfile\nimport requests\nimport shutil\n\n\ndef process_csv(galloper_url, token, project_id, artifact, bucket, csv_path, lg_count):\n download_artifact(galloper_url, project_id, token, bucket, artifact)\n csv_files = split_csv(csv_path, lg_count)\n csv_array = upload_csv(galloper_url, token, project_id, csv_files, bucket, csv_path)\n return csv_array\n\n\ndef download_artifact(galloper_url, project_id, token, bucket, artifact):\n endpoint = f'/api/v1/artifact/{project_id}/{bucket}/{artifact}'\n headers = {'Authorization': f'bearer {token}'}\n r = requests.get(f'{galloper_url}{endpoint}', allow_redirects=True, headers=headers)\n with open(\"/tmp/file_data.zip\", 'wb') as file_data:\n file_data.write(r.content)\n try:\n os.mkdir(\"/tmp/file_data\")\n except FileExistsError:\n shutil.rmtree(\"/tmp/file_data\")\n os.mkdir(\"/tmp/file_data\")\n with zipfile.ZipFile(\"/tmp/file_data.zip\", 'r') as zip_ref:\n zip_ref.extractall(\"/tmp/file_data\")\n\n\ndef split_csv(csv_path, lg_count):\n try:\n os.mkdir(\"/tmp/scv_files\")\n except FileExistsError:\n shutil.rmtree(\"/tmp/scv_files\")\n os.mkdir(\"/tmp/scv_files\")\n with open(f\"/tmp/file_data/{csv_path}\", \"r\") as csv:\n csv_lines = csv.readlines()\n lines_per_generator = int(len(csv_lines)/lg_count)\n csv_name = csv_path.split(\"/\")[-1].split(\".csv\")[0]\n csv_suffix = 1\n csv_array = []\n _tmp = []\n for each in csv_lines:\n if len(_tmp) != lines_per_generator:\n _tmp.append(each)\n else:\n with open(f\"/tmp/scv_files/{csv_name}_{csv_suffix}.csv\", \"w\") as f:\n for line in _tmp:\n f.write(f\"{line}\")\n csv_array.append(f\"/tmp/scv_files/{csv_name}_{csv_suffix}.csv\")\n _tmp = []\n _tmp.append(each)\n csv_suffix += 1\n\n if len(csv_array) < lg_count and _tmp:\n with open(f\"/tmp/scv_files/{csv_name}_{csv_suffix}.csv\", \"w\") as f:\n for line in _tmp:\n f.write(f\"{line}\")\n csv_array.append(f\"/tmp/scv_files/{csv_name}_{csv_suffix}.csv\")\n return csv_array\n\n\ndef upload_csv(galloper_url, token, project_id, csv_files, bucket, csv_path):\n csv_array = []\n headers = {'Authorization': f'bearer {token}'}\n upload_url = f'{galloper_url}/api/v1/artifact/{project_id}/{bucket}'\n for each in csv_files:\n csv_name = each.replace(\"/tmp/scv_files/\", \"\")\n files = {'file': open(each, 'rb')}\n requests.post(upload_url, allow_redirects=True, files=files, headers=headers)\n csv_array.append({f\"{bucket}/{csv_name}\": f\"/mnt/jmeter/{csv_path}\"})\n return csv_array\n\n\ndef delete_csv(galloper_url, token, project_id, artifact):\n url = f'{galloper_url}/api/v1/artifact/{project_id}/tests'\n headers = {'Authorization': f'bearer {token}'} if token else {}\n requests.delete(f'{url}?fname[]={artifact}', headers=headers)\n", "id": "6932262", "language": "Python", "matching_score": 3.2153775691986084, "max_stars_count": 0, "path": "control_tower/csv_splitter.py" }, { "content": "import pytest\nimport requests_mock\nfrom os import path\nfrom control_tower.csv_splitter import process_csv\n\ngalloper_url = \"http://example\"\nartifact = \"test.zip\"\ntoken = \"<PASSWORD>\"\nproject_id = 1\nbucket = 'test'\ncsv_path = \"age.csv\"\nlg_count = 5\n\n\ndef test_split_csv():\n with requests_mock.Mocker() as mock:\n mock.get(f'{galloper_url}/api/v1/artifact/{project_id}/{bucket}/{artifact}',\n content=open('tests/test.zip', \"rb\").read(), status_code=200)\n mock.post(f'{galloper_url}/api/v1/artifact/{project_id}/{bucket}',\n json={\"status\": \"mocked\"}, status_code=200)\n process_csv(galloper_url, token, project_id, artifact, bucket, csv_path, lg_count)\n assert path.exists(\"/tmp/file_data/age.csv\")\n for i in [1, 2, 3, 4, 5]:\n assert path.exists(f\"/tmp/scv_files/age_{i}.csv\")\n assert len(open(f\"/tmp/scv_files/age_{i}.csv\", \"r\").readlines()) == 20\n", "id": "274863", "language": "Python", "matching_score": 1.9358688592910767, "max_stars_count": 0, "path": "tests/test_csv_splitter.py" }, { "content": "import pytest\nimport requests_mock\nimport tests.utils.constants as c\nfrom interceptor.post_processor import PostProcessor\n\ngalloper_url = \"https://example.com\"\ngalloper_web_hook = \"hook\"\nproject_id = 1\ntoken = \"<PASSWORD>\"\n\n\ndef test_results_post_processing():\n with requests_mock.Mocker() as mock:\n post_processor = PostProcessor(galloper_url=galloper_url, project_id=project_id,\n galloper_web_hook=galloper_web_hook, bucket=\"test\", prefix=\"test\")\n mock.get(f\"{galloper_url}/api/v1/task/{project_id}/{galloper_web_hook}?exec=True\", json=c.task_info)\n mock.register_uri(requests_mock.POST, c.docker_volume_url, status_code=204, json={\"Id\": \"1\"})\n mock.get(c.galloper_get_artifact, content=open(\"tests/utils/test.zip\", \"rb\").read(), status_code=200)\n mock.register_uri(requests_mock.POST, c.docker_create_url, status_code=204, json={\"Id\": \"1\"})\n mock.get(c.docker_get_container_info_url, json={\"Id\": \"1\", \"HostConfig\": {\"LogConfig\": {\"Type\": \"default\"}}})\n mock.post(c.docker_start_url, json={\"Id\": \"1\"})\n mock.post(c.docker_wait_url, json={\"StatusCode\": 0})\n mock.delete(c.docker_delete_url)\n mock.post(f\"{galloper_url}/api/v1/task/{project_id}/results\")\n post_processor.results_post_processing()\n assert mock.call_count == 10\n", "id": "10650039", "language": "Python", "matching_score": 3.590543508529663, "max_stars_count": 0, "path": "tests/test_post_processor.py" }, { "content": "import pytest\nimport requests_mock\nimport docker\nimport tests.utils.constants as c\nfrom interceptor.jobs_wrapper import JobsWrapper\n\ngalloper_url = \"https://example.com\"\nproject_id = 1\ntoken = \"<PASSWORD>\"\n\n\ndef test_job_wrapper_dast():\n with requests_mock.Mocker() as mock:\n client = docker.from_env()\n mock.register_uri(requests_mock.POST, c.docker_create_url, status_code=204, json={\"Id\": \"1\"})\n mock.get(c.docker_get_container_info_url, json={\"Id\": \"1\"})\n mock.post(c.docker_start_url, json={\"Id\": \"1\"})\n JobsWrapper.dast(client, \"getcarrier/dast:latest\", {\"cmd\": \"\", \"GALLOPER_PROJECT_ID\": project_id,\n \"GALLOPER_URL\": galloper_url,\n \"GALLOPER_AUTH_TOKEN\": token}, \"dast\")\n assert mock.call_count == 3\n\n\ndef test_job_wrapper_sast():\n with requests_mock.Mocker() as mock:\n client = docker.from_env()\n mock.register_uri(requests_mock.POST, c.docker_create_url, status_code=204, json={\"Id\": \"1\"})\n mock.get(c.docker_get_container_info_url, json={\"Id\": \"1\"})\n mock.post(c.docker_start_url, json={\"Id\": \"1\"})\n JobsWrapper.sast(client, \"getcarrier/sast:latest\", {\"cmd\": \"\", \"GALLOPER_PROJECT_ID\": project_id,\n \"GALLOPER_URL\": galloper_url,\n \"GALLOPER_AUTH_TOKEN\": token}, \"sast\")\n assert mock.call_count == 3\n\n\ndef test_job_wrapper_perfmeter():\n with requests_mock.Mocker() as mock:\n client = docker.from_env()\n mock.register_uri(requests_mock.POST, c.docker_create_url, status_code=204, json={\"Id\": \"1\"})\n mock.get(c.docker_get_container_info_url, json={\"Id\": \"1\"})\n mock.post(c.docker_start_url, json={\"Id\": \"1\"})\n JobsWrapper.perfmeter(client, \"getcarrier/perfmeter:latest\", {\"cmd\": \"\", \"DISTRIBUTED_MODE_PREFIX\": \"prefix\",\n \"build_id\": \"test_build\",\n \"config_yaml\": {}}, \"perfmeter\")\n assert mock.call_count == 3\n\n\ndef test_job_wrapper_perfgun():\n with requests_mock.Mocker() as mock:\n client = docker.from_env()\n mock.register_uri(requests_mock.POST, c.docker_create_url, status_code=204, json={\"Id\": \"1\"})\n mock.get(c.docker_get_container_info_url, json={\"Id\": \"1\"})\n mock.post(c.docker_start_url, json={\"Id\": \"1\"})\n JobsWrapper.perfgun(client, \"getcarrier/perfgun:latest\", {\"cmd\": \"\", \"DISTRIBUTED_MODE_PREFIX\": \"prefix\",\n \"build_id\": \"test_build\", \"GATLING_TEST_PARAMS\": \"\",\n \"config_yaml\": {}}, \"perfgun\")\n assert mock.call_count == 3\n\n\ndef test_job_wrapper_observer():\n with requests_mock.Mocker() as mock:\n client = docker.from_env()\n mock.register_uri(requests_mock.POST, c.docker_create_url, status_code=204, json={\"Id\": \"1\"})\n mock.get(c.docker_get_container_info_url, json={\"Id\": \"1\"})\n mock.post(c.docker_start_url, json={\"Id\": \"1\"})\n JobsWrapper.observer(client, \"getcarrier/observer:latest\", {\"cmd\": \"\"}, \"observer\")\n assert mock.call_count == 3\n\n\ndef test_job_wrapper_browsertime():\n with requests_mock.Mocker() as mock:\n client = docker.from_env()\n mock.register_uri(requests_mock.POST, c.docker_create_url, status_code=204, json={\"Id\": \"1\"})\n mock.get(c.docker_get_container_info_url, json={\"Id\": \"1\"})\n mock.post(c.docker_start_url, json={\"Id\": \"1\"})\n JobsWrapper.observer(client, \"getcarrier/browsertime:latest\", {\"cmd\": \"\"}, \"browsertime\")\n assert mock.call_count == 3\n\n\ndef test_job_wrapper_perfui():\n with requests_mock.Mocker() as mock:\n client = docker.from_env()\n mock.register_uri(requests_mock.POST, c.docker_create_url, status_code=204, json={\"Id\": \"1\"})\n mock.get(c.docker_get_container_info_url, json={\"Id\": \"1\"})\n mock.post(c.docker_start_url, json={\"Id\": \"1\"})\n JobsWrapper.perfui(client, \"getcarrier/perfui:latest\", {\"cmd\": \"\"}, \"perfui\")\n assert mock.call_count == 3\n", "id": "3478002", "language": "Python", "matching_score": 0.8056196570396423, "max_stars_count": 0, "path": "tests/test_job_wrapper.py" }, { "content": "class DockerClient(object):\n\n def __init__(self, client):\n self.client = client\n\n def get_container(self, container_id):\n try:\n return DockerContainer(self.client.containers.get(container_id))\n except:\n return DockerContainer()\n\n def run(self, image, **kwargs):\n try:\n return self.client.containers.run(image, **kwargs)\n except Exception:\n return None\n\n def port(self, container_id, port):\n return self.client.api.port(container_id, port)[0][\"HostPort\"]\n\n\nclass DockerContainer(object):\n\n def __init__(self, container=None):\n self.container = container\n\n def remove(self, force=False):\n if self.container:\n self.container.remove(force=force)\n", "id": "5965549", "language": "Python", "matching_score": 1.032753348350525, "max_stars_count": 0, "path": "observer_hub/docker_client.py" }, { "content": "docker_create_url = \"http+docker://localhost/v1.35/containers/create\"\n\ndocker_get_container_info_url = \"http+docker://localhost/v1.35/containers/1/json\"\n\ndocker_start_url = \"http+docker://localhost/v1.35/containers/1/start\"\n\ndocker_volume_url = \"http+docker://localhost/v1.35/volumes/create\"\n\ndocker_wait_url = \"http+docker://localhost/v1.35/containers/1/wait\"\n\ndocker_delete_url = \"http+docker://localhost/v1.35/containers/1?v=False&link=False&force=False\"\n\ntask_info = {\"id\": \"1\", \"project_id\": 1, \"task_id\": \"1\", \"zippath\": \"tasks/test.zip\", \"task_name\": \"test\",\n \"task_handler\": \"lambda.handler\", \"runtime\": \"Python 3.7\", \"webhook\": \"/task/test\", \"last_run\": \"1\",\n \"status\": \"activated\", \"token\": \"test\", \"func_args\": {\"test\": 1}, \"env_vars\": '{\"test\": 1}', \"callback\": \"\"}\n\ngalloper_get_artifact = \"https://example.com//api/v1/artifacts/1/tasks/test.zip\"\n", "id": "11601711", "language": "Python", "matching_score": 1.0155153274536133, "max_stars_count": 0, "path": "tests/utils/constants.py" }, { "content": "# Copyright 2020 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nimport pika\nfrom time import sleep\nfrom uuid import uuid4\nfrom json import loads, dumps\nimport logging\n\nfrom arbiter.event.base import BaseEventHandler\n\n\nclass RPCClintEventHandler(BaseEventHandler):\n def __init__(self, settings, subscriptions, state):\n super().__init__(settings, subscriptions, state)\n self.callback_queue = None\n self.correlation_id = None\n self.response = None\n self.client = self._get_channel()\n\n def _connect_to_specific_queue(self, channel):\n result = channel.queue_declare(queue='', exclusive=True)\n self.callback_queue = result.method.queue\n channel.basic_qos(prefetch_count=1)\n channel.basic_consume(\n queue=self.callback_queue,\n on_message_callback=self.queue_event_callback,\n auto_ack=True\n )\n logging.info(\"[%s] Waiting for task events\", self.ident)\n return channel\n\n def queue_event_callback(self, channel, method, properties, body): # pylint: disable=R0912,R0915\n if self.correlation_id == properties.correlation_id:\n self.response = body\n\n def call(self, tasks_module, task, args, kwargs):\n self.response = None\n self.correlation_id = str(uuid4())\n message = {\n \"task_name\": task,\n \"args\": args,\n \"kwargs\": kwargs\n }\n logging.info(message)\n try:\n self.client.basic_publish(\n exchange='',\n routing_key=tasks_module,\n properties=pika.BasicProperties(\n reply_to=self.callback_queue,\n correlation_id=self.correlation_id,\n ),\n body=dumps(message).encode(\"utf-8\"))\n except (pika.exceptions.ConnectionClosedByBroker,\n pika.exceptions.AMQPChannelError,\n pika.exceptions.AMQPConnectionError,\n pika.exceptions.StreamLostError):\n sleep(0.1)\n self.client = self._get_channel()\n logging.error(\"Reconnecting ... \")\n return self.call(tasks_module, task, args, kwargs)\n while self.response is None:\n self.client.connection.process_data_events()\n resp = loads(self.response)\n if resp.get(\"type\") == \"exception\":\n raise ChildProcessError(resp[\"message\"])\n return resp.get(\"message\")\n\n", "id": "12812829", "language": "Python", "matching_score": 1.8114328384399414, "max_stars_count": 0, "path": "arbiter/event/rpcClient.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n# pylint: disable=I0011,E0401\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\n Events\n\"\"\"\n\nfrom dusty.tools import log\n\nfrom arbiter.eventnode import MockEventNode\n\n\nclass EventManager:\n \"\"\" Events \"\"\"\n\n def __init__(self):\n self.node = MockEventNode()\n\n def subscribe(self, event, callback):\n \"\"\"\" Subscribe to event \"\"\"\n log.debug(\"Adding event subscription: event=%s, callback=%s\", event, callback)\n self.node.subscribe(event, callback)\n\n def unsubscribe(self, event, callback):\n \"\"\"\" Unsubscribe from event \"\"\"\n log.debug(\"Removing event subscription: event=%s, callback=%s\", event, callback)\n self.node.unsubscribe(event, callback)\n\n def emit(self, event, data=None):\n \"\"\" Emit event with data \"\"\"\n log.debug(\"Emitting event: event=%s, data=%s\", event, data)\n self.node.emit(event, data)\n", "id": "10357923", "language": "Python", "matching_score": 0.6682795286178589, "max_stars_count": 6, "path": "dusty/tools/event.py" }, { "content": "import pytest\n\nfrom time import sleep, time\nfrom arbiter import Arbiter, Task\nfrom tests.minion import stop_minion, start_minion\n\narbiter_host = \"localhost\"\narbiter_user = \"user\"\narbiter_password = \"password\"\narbiter_queue = \"default\"\n\n\nclass TestArbiter:\n p = None\n\n @classmethod\n def setup_class(cls):\n cls.p = start_minion()\n\n @classmethod\n def teardown_class(cls):\n stop_minion(cls.p)\n\n @staticmethod\n def test_task_in_task():\n arbiter = Arbiter(host=arbiter_host, port=5672, user=arbiter_user, password=<PASSWORD>)\n assert arbiter.workers()[arbiter_queue]['total'] == 10\n task_keys = []\n for _ in range(20):\n task_keys.append(arbiter.apply(\"simple_add\", task_args=[1, 2])[0])\n for task_key in task_keys:\n assert arbiter.status(task_key)['state'] in ('initiated', 'running')\n for message in arbiter.wait_for_tasks(task_keys):\n assert message['state'] == 'done'\n assert message['result'] == 3\n for task_key in task_keys:\n assert arbiter.status(task_key)['state'] == 'done'\n assert arbiter.status(task_key)['result'] == 3\n assert arbiter.workers()[arbiter_queue]['available'] == 10\n arbiter.close()\n\n @staticmethod\n def test_squad():\n tasks_in_squad = 3\n arbiter = Arbiter(host=arbiter_host, port=5672, user=arbiter_user, password=<PASSWORD>iter_password)\n tasks = []\n for _ in range(tasks_in_squad):\n tasks.append(Task(\"simple_add\", task_args=[1, 2]))\n squad_id = arbiter.squad(tasks)\n while arbiter.status(squad_id).get(\"state\") != \"done\":\n sleep(1)\n status = arbiter.status(squad_id)\n assert status[\"done\"] == tasks_in_squad\n assert len(status[\"tasks\"]) == tasks_in_squad\n assert arbiter.workers()[arbiter_queue]['available'] == 10\n arbiter.close()\n\n @staticmethod\n def test_pipe():\n tasks_in_pipe = 20\n arbiter = Arbiter(host=arbiter_host, port=5672, user=arbiter_user, password=<PASSWORD>)\n tasks = []\n for _ in range(tasks_in_pipe):\n tasks.append(Task(\"add_in_pipe\", task_args=[2]))\n pipe_id = None\n _loop_result = 0\n _loop_id = 1\n for message in arbiter.pipe(tasks, persistent_args=[2]):\n if \"pipe_id\" in message:\n pipe_id = message[\"pipe_id\"]\n else:\n _loop_result = message['result']\n assert _loop_result == 4 * _loop_id\n _loop_id += 1\n status = arbiter.status(pipe_id)\n assert status[\"done\"] == tasks_in_pipe\n assert len(status[\"tasks\"]) == tasks_in_pipe\n assert arbiter.workers()[arbiter_queue]['available'] == 10\n arbiter.close()\n\n @staticmethod\n def test_kill_task():\n arbiter = Arbiter(host=arbiter_host, port=5672, user=arbiter_user, password=<PASSWORD>)\n start = time()\n tasks = arbiter.apply(\"long_running\")\n for task_key in tasks:\n assert arbiter.status(task_key)['state'] == 'initiated'\n sleep(2) # time for task to settle\n arbiter.kill(tasks[0], sync=True)\n for message in arbiter.wait_for_tasks(tasks):\n assert message['state'] == 'done'\n assert time()-start < 180 # 180 sec is a length of task\n assert arbiter.workers()[arbiter_queue]['available'] == 10\n arbiter.close()\n\n @staticmethod\n def test_kill_group():\n tasks_in_squad = 3\n start = time()\n arbiter = Arbiter(host=arbiter_host, port=5672, user=arbiter_user, password=arbiter_password)\n tasks = []\n for _ in range(tasks_in_squad):\n tasks.append(Task(\"long_running\"))\n squad_id = arbiter.squad(tasks)\n sleep(5) # time for squad to settle\n arbiter.kill_group(squad_id)\n while arbiter.status(squad_id).get(\"state\") != \"done\":\n sleep(1)\n assert time() - start < 180\n assert arbiter.workers()[arbiter_queue]['available'] == 10\n arbiter.close()\n\n @staticmethod\n def test_squad_callback():\n tasks_in_squad = 3\n arbiter = Arbiter(host=arbiter_host, port=5672, user=arbiter_user, password=<PASSWORD>)\n tasks = []\n for _ in range(tasks_in_squad):\n tasks.append(Task(\"simple_add\", task_args=[1, 2]))\n squad_id = arbiter.squad(tasks, callback=Task(\"simple_add\", task_args=[5, 4]))\n while arbiter.status(squad_id).get(\"state\") != \"done\":\n sleep(1)\n status = arbiter.status(squad_id)\n assert status[\"done\"] == tasks_in_squad + 1\n assert len(status[\"tasks\"]) == tasks_in_squad + 1\n assert status[\"tasks\"][-1]['task_type'] == \"callback\"\n assert status[\"tasks\"][-1]['result'] == 9\n assert arbiter.workers()[arbiter_queue]['available'] == 10\n arbiter.close()\n\n @staticmethod\n def test_squad_finalyzer():\n tasks_in_squad = 3\n arbiter = Arbiter(host=arbiter_host, port=5672, user=arbiter_user, password=<PASSWORD>)\n tasks = []\n for _ in range(tasks_in_squad):\n tasks.append(Task(\"simple_add\", task_args=[1, 2]))\n tasks.append(Task(\"simple_add\", task_args=[5, 5], task_type='finalize'))\n squad_id = arbiter.squad(tasks, callback=Task(\"simple_add\", task_args=[5, 4]))\n while arbiter.status(squad_id).get(\"state\") != \"done\":\n sleep(1)\n status = arbiter.status(squad_id)\n assert status[\"done\"] == tasks_in_squad + 2 # callback + finalizer\n assert len(status[\"tasks\"]) == tasks_in_squad + 2 # callback + finalizer\n assert status[\"tasks\"][-1]['task_type'] == \"finalize\"\n assert status[\"tasks\"][-1]['result'] == 10\n assert arbiter.workers()[arbiter_queue]['available'] == 10\n arbiter.close()\n\n @staticmethod\n def test_sync_task():\n arbiter = Arbiter(host=arbiter_host, port=5672, user=arbiter_user, password=<PASSWORD>)\n for result in arbiter.add_task(Task(\"simple_add\", task_args=[1, 2]), sync=True):\n if isinstance(result, dict):\n assert result['state'] == 'done'\n assert result['result'] == 3\n assert arbiter.workers()[arbiter_queue]['available'] == 10\n", "id": "1151646", "language": "Python", "matching_score": 3.7196996212005615, "max_stars_count": 0, "path": "tests/test_arbiter.py" }, { "content": "from arbiter import Minion\nfrom multiprocessing import Process\nfrom time import sleep\nimport logging\n\napp = Minion(host=\"localhost\", port=5672, user='user', password='password', queue=\"default\")\n\n\n@app.task(name=\"add\")\ndef add(x, y):\n logging.info(\"Running task 'add'\")\n # task that initiate new task within same app\n increment = 0\n for message in app.apply('simple_add', task_args=[3, 4]):\n if isinstance(message, dict):\n increment = message[\"result\"]\n logging.info(\"sleep done\")\n return x + y + increment\n\n\n@app.task(name=\"simple_add\")\ndef adds(x, y):\n from time import sleep\n sleep(10)\n logging.info(f\"Running task 'add_small' with params {x}, {y}\")\n return x + y\n\n\n@app.task(name=\"add_in_pipe\")\ndef addp(x, y, upstream=0):\n logging.info(\"Running task 'add_in_pipe'\")\n return x + y + upstream\n\n\n@app.task(name=\"long_running\")\ndef long_task():\n sleep(180)\n return \"Long Task\"\n\n\ndef run(rpc):\n if rpc:\n app.rpc(workers=1, blocking=True)\n else:\n app.run(workers=10)\n\n\ndef start_minion(rpc: bool = False) -> Process:\n p = Process(target=run, args=(rpc,))\n p.start()\n sleep(5) # some time to start Minion\n return p\n\n\ndef stop_minion(p: Process):\n p.terminate()\n p.join()\n\n\nif __name__ == \"__main__\":\n run(False)\n", "id": "9378007", "language": "Python", "matching_score": 2.674344539642334, "max_stars_count": 0, "path": "tests/minion.py" }, { "content": "import pytest\n\nfrom arbiter import RPCClient\nfrom tests.minion import stop_minion, start_minion\n\narbiter_host = \"localhost\"\narbiter_user = \"user\"\narbiter_password = \"password\"\narbiter_queue = \"default\"\n\n\nclass TestJSONRCP:\n p = None\n\n @classmethod\n def setup_class(cls):\n cls.p = start_minion(rpc=True)\n\n @classmethod\n def teardown_class(cls):\n stop_minion(cls.p)\n\n @staticmethod\n def test_simple_task_args():\n rpc = RPCClient(arbiter_host, port=5672, user=arbiter_user, password=<PASSWORD>)\n assert rpc.call(arbiter_queue, 'simple_add', (1, 7,)) == 8\n\n @staticmethod\n def test_simple_task_kwargs():\n rpc = RPCClient(arbiter_host, port=5672, user=arbiter_user, password=<PASSWORD>)\n assert rpc.call(arbiter_queue, 'simple_add', (), {\"x\": 1, \"y\": 7}) == 8\n\n @staticmethod\n def test_simple_task_mix():\n rpc = RPCClient(arbiter_host, port=5672, user=arbiter_user, password=<PASSWORD>)\n assert rpc.call(arbiter_queue, 'simple_add', (1,), {\"y\": 7}) == 8\n", "id": "3456491", "language": "Python", "matching_score": 0.5970095992088318, "max_stars_count": 0, "path": "tests/test_jsonrpc.py" }, { "content": "import argparse\nimport requests\nfrom perfreporter.downsampling import Downsampler\nfrom os import environ\n\n\ndef get_args():\n parser = argparse.ArgumentParser(description='Simlog parser.')\n parser.add_argument(\"-t\", \"--type\", help=\"Test type.\")\n parser.add_argument(\"-s\", \"--simulation\", help='Test simulation', default=None)\n parser.add_argument(\"-b\", \"--build_id\", help=\"build ID\", default=None)\n parser.add_argument(\"-en\", \"--env\", help=\"Test type.\", default=None)\n parser.add_argument(\"-i\", \"--influx_host\", help='InfluxDB host or IP', default=None)\n parser.add_argument(\"-p\", \"--influx_port\", help='InfluxDB port', default=8086)\n parser.add_argument(\"-iu\", \"--influx_user\", help='InfluxDB user', default=\"\")\n parser.add_argument(\"-ip\", \"--influx_password\", help='InfluxDB password', default=\"\")\n parser.add_argument(\"-idb\", \"--influx_db\", help='Test results InfluxDB', default=\"gatling\")\n parser.add_argument(\"-l\", \"--lg_id\", help='Load generator ID', default=None)\n return vars(parser.parse_args())\n\n\ndef update_test_status():\n headers = {'content-type': 'application/json', 'Authorization': f'bearer {environ.get(\"token\")}'}\n url = f'{environ.get(\"galloper_url\")}/api/v1/reports/{environ.get(\"project_id\")}/{environ.get(\"report_id\")}/status'\n response = requests.get(url, headers=headers).json()\n if response[\"message\"] == \"Preparing...\":\n data = {\"test_status\": {\"status\": \"In progress\", \"percentage\": 10,\n \"description\": \"Test started. Results will be updated every minute\"}}\n response = requests.put(url, json=data, headers=headers)\n try:\n print(response.json()[\"message\"])\n except:\n print(response.text)\n\n\nif __name__ == '__main__':\n if environ.get(\"report_id\"):\n update_test_status()\n args = get_args()\n Downsampler(args).run()\n", "id": "10431145", "language": "Python", "matching_score": 2.3179948329925537, "max_stars_count": 7, "path": "post_processing/downsampling.py" }, { "content": "import argparse\nimport requests\nimport zipfile\nimport logging\nimport logging_loki\nfrom multiprocessing import Queue\n\nfrom os import environ, path, walk, sep\nfrom traceback import format_exc\n\nfrom subprocess import Popen, PIPE\n\nlogger = logging.getLogger(\"browsertime\")\n\n\nGALLOPER_URL = environ.get(\"galloper_url\", None)\nTOKEN = environ.get(\"token\", None)\nBUCKET = environ.get(\"bucket\")\nNAME = environ.get(\"filename\")\nPROJECT = environ.get(\"project_id\")\nVIEWPORT = environ.get(\"view\", \"1920x1080\")\nITERATIONS = environ.get(\"tests\", \"1\")\nLOG_LEVEL = environ.get(\"log_level\", \"info\")\n\nif GALLOPER_URL:\n handler = logging_loki.LokiQueueHandler(\n Queue(-1),\n url=f\"{GALLOPER_URL.replace('https://', 'http://')}:3100/loki/api/v1/push\",\n tags={\"application\": \"interceptor\"},\n version=\"1\",\n )\n\n logger.setLevel(logging.INFO if LOG_LEVEL == 'info' else logging.DEBUG)\n logger.addHandler(handler)\n\nparser = argparse.ArgumentParser(description='Browsertime Args Parser')\nparser.add_argument('--browser', '-b', type=str, nargs=\"?\", default='chrome',\n help='Broser to be tested, chrome and firefox are allowed')\nparser.add_argument('--headers', '-H', type=str, nargs=\"*\", help='Headers in construct of KEY:VALUE')\nparser.add_argument('url', type=str, help='URL to be tested')\n\n\ndef get_headers():\n if TOKEN:\n return {'Authorization': 'Bearer {}'.format(TOKEN)}\n logger.error(\"Auth TOKEN is not set!\")\n return None\n\n\ndef upload_artifacts(file_path):\n try:\n file = {'file': open(file_path, 'rb')}\n requests.post('{}/api/v1/artifacts/{}/{}/{}.json'.format(GALLOPER_URL, PROJECT, BUCKET, NAME),\n files=file,\n headers=get_headers())\n logger.debug(\"Upload Successful\")\n except Exception:\n from time import sleep\n logging.error(format_exc())\n sleep(120)\n\n\ndef zipdir(p, zf):\n for root, dirs, files in walk(p):\n for file in files:\n if file.endswith(\".jpg\"):\n _p = sep.join(root.split(\"/\")[-2:])\n elif file.endswith(\".mp4\"):\n _p = \"video\"\n else:\n _p = \"\"\n zf.write(path.join(root, file), arcname=path.join(_p, file))\n\n\ndef run():\n args = parser.parse_args()\n exec_string = '/start.sh --useSameDir --visualMetrics true --visuaElements true -o {} --skipHar -b {} --viewPort {} -n {} '.format(NAME, args.browser, VIEWPORT, int(ITERATIONS))\n if args.headers:\n for header in args.headers:\n exec_string += f'-r \"{header}\" '\n exec_string += args.url\n logger.debug(f\"Execution String: {exec_string}\")\n p = Popen(exec_string, stdout=PIPE, stderr=PIPE, shell=True)\n res = p.communicate()\n logger.debug(f\"Execution result: {res[0]}\")\n zf = zipfile.ZipFile('/browsertime/{}.zip'.format(NAME), 'w', zipfile.ZIP_DEFLATED)\n zipdir('browsertime-results/', zf)\n zf.close()\n upload_artifacts('/browsertime/{}.zip'.format(NAME))\n\n\nif __name__ == \"__main__\":\n run()\n", "id": "5000989", "language": "Python", "matching_score": 2.5160036087036133, "max_stars_count": 0, "path": "wrapper/start.py" }, { "content": "# Copyright 2018 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport docker\nimport boto3\nimport signal\nimport logging_loki\nfrom multiprocessing import Queue\nfrom arbiter import Minion\nimport logging\nfrom os import environ\nfrom time import sleep\nfrom interceptor import constants as c\nfrom traceback import format_exc\nfrom interceptor.jobs_wrapper import JobsWrapper\nfrom interceptor.post_processor import PostProcessor\nfrom interceptor.lambda_executor import LambdaExecutor\n\nRABBIT_USER = environ.get('RABBIT_USER', 'user')\nRABBIT_PASSWORD = environ.get('RABBIT_PASSWORD', 'password')\nRABBIT_HOST = environ.get('RABBIT_HOST', 'localhost')\nRABBIT_PORT = environ.get('RABBIT_PORT', '5672')\nQUEUE_NAME = environ.get('QUEUE_NAME', \"default\")\nCPU_CORES = environ.get('CPU_CORES', 2)\nVHOST = environ.get('VHOST', 'carrier')\n\napp = Minion(host=RABBIT_HOST, port=RABBIT_PORT,\n user=RABBIT_USER, password=<PASSWORD>, queue=QUEUE_NAME, vhost=VHOST)\n\nlogger = logging.getLogger(\"interceptor\")\n\n\nif c.LOKI_HOST:\n handler = logging_loki.LokiQueueHandler(\n Queue(-1),\n url=f\"{c.LOKI_HOST.replace('https://', 'http://')}:{c.LOKI_PORT}/loki/api/v1/push\",\n tags={\"application\": \"interceptor\"},\n version=\"1\",\n )\n\n logger.setLevel(logging.INFO if c.LOG_LEVEL == 'info' else logging.DEBUG)\n logger.addHandler(handler)\n\n\nstop_task = False\n\n\ndef sigterm_handler(signal, frame):\n global stop_task\n stop_task = True\n\n\nsignal.signal(signal.SIGTERM, sigterm_handler)\n\n\n@app.task(name=\"terminate_ec2_instances\")\ndef terminate_ec2_instances(aws_access_key_id, aws_secret_access_key, region_name, fleet_id):\n try:\n ec2 = boto3.client('ec2', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key,\n region_name=region_name)\n response = ec2.cancel_spot_fleet_requests(\n SpotFleetRequestIds=[\n fleet_id,\n ],\n TerminateInstances=True\n )\n logger.info(response)\n return \"Done\"\n except Exception:\n logger.error(format_exc())\n logger.info(\"Failed to terminate AWS ec2 instances\")\n return \"Failed\"\n\n\n@app.task(name=\"post_process\")\ndef post_process(galloper_url, project_id, galloper_web_hook, bucket, prefix, junit=False, token=None, integration=[],\n email_recipients=None):\n try:\n PostProcessor(galloper_url, project_id, galloper_web_hook, bucket,\n prefix, junit, token, integration, email_recipients).results_post_processing()\n return \"Done\"\n except Exception:\n logger.error(format_exc())\n logger.info(\"Failed to run post processor\")\n return \"Failed\"\n\n\n@app.task(name=\"browsertime\")\ndef browsertime(galloper_url, project_id, token, bucket, filename, url, view='1920x1080', tests='1',\n headers=None, browser=\"\", *args, **kwargs):\n try:\n if not headers:\n headers = {}\n client = docker.from_env()\n env_vars = {\"galloper_url\": galloper_url, \"project_id\": project_id, \"token\": token, \"bucket\": bucket,\n \"filename\": filename, \"view\": view, \"tests\": tests}\n cmd = url\n if headers:\n cmd += \" -H \"\n for key, value in headers.items():\n if key.strip().lower() not in c.STRIP_HEADERS:\n cmd += f'\"{key.strip()}:{value.strip()}\" '\n if browser:\n cmd += f\" -b {browser}\"\n\n cid = getattr(JobsWrapper, 'browsertime')(client, c.BROWSERTIME_CONTAINER, env_vars, cmd)\n while cid.status != \"exited\":\n logger.info(f\"Executing: {c.BROWSERTIME_CONTAINER}\")\n logger.info(f\"Execution params: {cmd}\")\n logger.info(f\"Container {cid.id} status {cid.status}\")\n try:\n cid.reload()\n except:\n break\n sleep(10)\n return \"Done\"\n except Exception:\n logger.error(format_exc())\n logger.info(\"Failed to run browsertime task\")\n return \"Failed\"\n\n\n@app.task(name=\"execute_lambda\")\ndef execute_lambda(task, event, galloper_url, token):\n try:\n LambdaExecutor(task, event, galloper_url, token).execute_lambda()\n return \"Done\"\n except Exception:\n logger.error(format_exc())\n logger.info(f\"Failed to execute {task['task_name']} lambda\")\n return \"Failed\"\n\n\n@app.task(name=\"execute\")\ndef execute_job(job_type, container, execution_params, job_name):\n if not getattr(JobsWrapper, job_type):\n return False, \"Job Type not found\"\n client = docker.from_env()\n client.info()\n logger.info(f\"Executing: {job_type} on {container} with name {job_name}\")\n logger.info(f\"Execution params: {execution_params}\")\n try:\n cid = getattr(JobsWrapper, job_type)(client, container, execution_params, job_name)\n except:\n return f\"Failed to run docker container {container}\"\n logger.info(f\"Container {cid.id} status {cid.status}\")\n client_lowlevel = docker.APIClient(base_url='unix://var/run/docker.sock')\n last_log = []\n while cid.status != \"exited\":\n global stop_task\n if stop_task:\n stop_task = False\n cid.stop(timeout=60)\n logger.info(f\"Aborted: {job_type} on {container} with name {job_name}\")\n exit(0)\n try:\n cid.reload()\n logger.info(f'Container Status: {cid.status}')\n resource_usage = client_lowlevel.stats(cid.id, stream=False)\n logger.info(f'Container {cid.id} resource usage -- '\n f'CPU: {round(float(resource_usage[\"cpu_stats\"][\"cpu_usage\"][\"total_usage\"]) / c.CPU_MULTIPLIER, 2)} '\n f'RAM: {round(float(resource_usage[\"memory_stats\"][\"usage\"]) / (1024 * 1024), 2)} Mb '\n f'of {round(float(resource_usage[\"memory_stats\"][\"limit\"]) / (1024 * 1024), 2)} Mb')\n logs = client_lowlevel.logs(cid.id, stream=False, tail=100).decode(\"utf-8\", errors='ignore').split('\\r\\n')\n for each in logs:\n if each not in last_log:\n logging.info(each)\n last_log = logs\n except:\n break\n sleep(10)\n return \"Done\"\n\n\ndef main():\n app.run(workers=int(CPU_CORES))\n\n\nif __name__ == '__main__':\n main()\n\n\n", "id": "6746268", "language": "Python", "matching_score": 4.847971439361572, "max_stars_count": 0, "path": "interceptor/run.py" }, { "content": "import boto3\nimport base64\nfrom json import loads, JSONDecodeError\nimport requests\nfrom time import sleep\nfrom uuid import uuid4\nfrom arbiter import Arbiter\n\n\nec2 = None\n\n\ndef request_spot_fleets(args, galloper_url, project_id, token, rabbit_host, rabbit_user, rabbit_password, rabbit_port,\n vhost):\n print(\"Requesting Spot Fleets...\")\n secrets_url = f\"{galloper_url}/api/v1/secrets/{project_id}/aws\"\n headers = {\n 'Authorization': f'bearer {token}',\n 'Content-type': 'application/json'\n }\n aws_config = {}\n try:\n aws_config = loads(requests.get(secrets_url, headers=headers).json()[\"secret\"])\n except (AttributeError, JSONDecodeError):\n print(\"Failed to load AWS config for the project\")\n exit(1)\n queue_name = str(uuid4())\n finalizer_queue_name = str(uuid4())\n total_workers = 0\n cpu = float(args.execution_params[0][\"cpu_cores_limit\"]) if \"cpu_cores_limit\" in args.execution_params[0] else 1.0\n memory = int(args.execution_params[0][\"memory_limit\"]) if \"memory_limit\" in args.execution_params[0] else 1\n for i in range(len(args.concurrency)):\n args.channel[i] = queue_name\n args.execution_params[i][\"JVM_ARGS\"] = f\"-Xms{memory}g -Xmx{memory}g\"\n total_workers += args.concurrency[i]\n cpu += 0.5\n memory += 1\n if cpu > 8:\n print(\"Max CPU cores limit should be less then 8\")\n exit(1)\n if memory > 30:\n print(\"Max memory limit should be less then 30g\")\n exit(1)\n total_cpu_cores = round(cpu * total_workers + 0.1)\n workers_per_lg = 2 if total_cpu_cores > 2 and memory < 8 else 1\n lg_count = round(total_workers / workers_per_lg + 0.1)\n print(f\"CPU per worker - {cpu}. Memory per worker - {memory}g\")\n print(f\"Instances count - {lg_count}\")\n global ec2\n ec2 = boto3.client('ec2', aws_access_key_id=aws_config.get(\"aws_access_key\"),\n aws_secret_access_key=aws_config[\"aws_secret_access_key\"], region_name=aws_config[\"region_name\"])\n user_data = '''#!/bin/bash\n apt update\n apt install docker\n apt install docker.io -y\n '''\n user_data += f\"docker pull {args.container[0]}\\n\"\n user_data += f\"docker run -d -v /var/run/docker.sock:/var/run/docker.sock -e RAM_QUOTA=1g -e CPU_QUOTA=1\" \\\n f\" -e CPU_CORES=1 -e RABBIT_HOST={rabbit_host} -e RABBIT_USER={rabbit_user}\" \\\n f\" -e RABBIT_PASSWORD={<PASSWORD>} -e VHOST={vhost} -e QUEUE_NAME={finalizer_queue_name}\" \\\n f\" -e LOKI_HOST={galloper_url.replace('https://', 'http://')} \" \\\n f\"getcarrier/interceptor:2.5\\n\"\n user_data += f\"docker run -d -v /var/run/docker.sock:/var/run/docker.sock -e RAM_QUOTA={memory}g -e CPU_QUOTA={cpu}\" \\\n f\" -e CPU_CORES={workers_per_lg} -e RABBIT_HOST={rabbit_host} -e RABBIT_USER={rabbit_user}\" \\\n f\" -e RABBIT_PASSWORD={<PASSWORD>} -e VHOST={vhost} -e QUEUE_NAME={queue_name}\" \\\n f\" -e LOKI_HOST={galloper_url.replace('https://', 'http://')} \" \\\n f\"getcarrier/interceptor:2.5\"\n user_data = base64.b64encode(user_data.encode(\"ascii\")).decode(\"ascii\")\n config = {\n \"Type\": \"request\",\n 'AllocationStrategy': \"lowestPrice\",\n \"IamFleetRole\": aws_config[\"iam_fleet_role\"],\n \"TargetCapacity\": lg_count,\n \"SpotPrice\": \"2.5\",\n \"TerminateInstancesWithExpiration\": True,\n 'LaunchSpecifications': []\n }\n\n instance_types = get_instance_types(cpu, memory, workers_per_lg)\n for each in instance_types:\n specs = {\n \"ImageId\": aws_config[\"image_id\"],\n \"InstanceType\": each,\n \"BlockDeviceMappings\": [],\n \"SpotPrice\": \"2.5\",\n \"NetworkInterfaces\": [],\n \"SecurityGroups\": [],\n \"UserData\": user_data\n }\n if aws_config[\"security_groups\"]:\n for sg in aws_config[\"security_groups\"].split(\",\"):\n specs[\"SecurityGroups\"].append({\"GroupId\": sg})\n config[\"LaunchSpecifications\"].append(specs)\n response = ec2.request_spot_fleet(SpotFleetRequestConfig=config)\n print(\"*********************************************\")\n print(response)\n fleet_id = response[\"SpotFleetRequestId\"]\n arbiter = Arbiter(host=rabbit_host, port=rabbit_port, user=rabbit_user, password=<PASSWORD>, vhost=vhost)\n retry = 10\n while retry != 0:\n try:\n workers = arbiter.workers()\n except:\n workers = {}\n print(workers)\n if args.channel[0] in workers and workers[args.channel[0]][\"available\"] >= total_workers:\n print(\"Spot Fleet instances are ready\")\n break\n else:\n print(\"Waiting for the Spot Fleet instances to start ...\")\n sleep(60)\n retry -= 1\n if retry == 0:\n print(\"Spot instances set up timeout - 600 seconds ...\")\n terminate_spot_instances(fleet_id)\n exit(1)\n ec2_settings = {\n \"aws_access_key_id\": aws_config.get(\"aws_access_key\"),\n \"aws_secret_access_key\": aws_config[\"aws_secret_access_key\"],\n \"region_name\": aws_config[\"region_name\"],\n \"fleet_id\": fleet_id,\n \"finalizer_queue_name\": finalizer_queue_name\n }\n return ec2_settings\n\n\ndef terminate_spot_instances(fleet_id):\n print(\"Terminating Spot instances...\")\n global ec2\n response = ec2.cancel_spot_fleet_requests(\n SpotFleetRequestIds=[\n fleet_id,\n ],\n TerminateInstances=True\n )\n print(response)\n\n\ndef get_instance_types(cpu, memory, workers_per_lg):\n instances = {\n \"2 cpu\": {\n \"4g\": [\"c5.large\", \"t2.medium\", \"t3a.medium\", \"t3.medium\"],\n \"8g\": [\"m5n.large\", \"m5zn.large\", \"m5.large\", \"t2.large\", \"t3.large\"],\n \"16g\": [\"r4.large\", \"r5n.large\", \"r5ad.large\", \"r5.large\", \"r5d.large\"]\n },\n \"4 cpu\": {\n \"8g\": [\"c5a.xlarge\", \"c5.xlarge\", \"c5d.xlarge\", \"c5ad.xlarge\"],\n \"16g\": [\"m5ad.xlarge\", \"m5d.xlarge\", \"m5zn.xlarge\", \"m5.xlarge\", \"m5a.xlarge\"]\n },\n \"8 cpu\": {\n \"16g\": [\"c5.2xlarge\", \"c5ad.2xlarge\", \"c5a.2xlarge\", \"c5n.2xlarge\"],\n \"32g\": [\"m4.2xlarge\", \"m5dn.2xlarge\", \"m5ad.2xlarge\", \"m5d.2xlarge\"]\n }\n }\n\n if cpu * workers_per_lg < 2:\n cpu_key = \"2 cpu\"\n elif cpu * workers_per_lg < 4:\n cpu_key = \"4 cpu\"\n else:\n cpu_key = \"8 cpu\"\n\n if memory * workers_per_lg < 4 and cpu_key == \"2 cpu\":\n memory_key = \"4g\"\n elif memory * workers_per_lg < 8:\n memory_key = \"8g\"\n elif memory * workers_per_lg < 16:\n memory_key = \"16g\"\n else:\n memory_key = \"32g\"\n cpu_key = \"8 cpu\"\n\n print(f\"Instance types for {cpu_key} and {memory_key}:\")\n print(instances[cpu_key][memory_key])\n return instances[cpu_key][memory_key]\n", "id": "10721204", "language": "Python", "matching_score": 1.9556081295013428, "max_stars_count": 0, "path": "control_tower/aws.py" }, { "content": "import docker\nimport operator\nfrom sqlalchemy import and_\nfrom json import loads\nimport re\nfrom datetime import datetime\nfrom uuid import uuid4\n\nfrom ..constants import JOB_CONTAINER_MAPPING, JOB_TYPE_MAPPING\nfrom ...projects.models.statistics import Statistic\nfrom ...tasks.api.utils import run_task\n\n\ndef compile_tests(project_id, file_name, runner):\n from flask import current_app\n client = docker.from_env()\n container_name = JOB_CONTAINER_MAPPING.get(runner)[\"container\"]\n secrets = current_app.config[\"CONTEXT\"].rpc_manager.call.get_secrets(project_id=project_id)\n env_vars = {\"artifact\": file_name, \"bucket\": \"tests\", \"galloper_url\": secrets[\"galloper_url\"],\n \"token\": secrets[\"auth_token\"], \"project_id\": project_id, \"compile\": \"true\"}\n client.containers.run(container_name, stderr=True, remove=True, environment=env_vars, tty=True, user='0:0')\n\n\ndef exec_test(project_id, event):\n response = run_task(project_id, event)\n response[\"redirect\"] = f\"/task/{response['task_id']}/results\"\n\n statistic = Statistic.query.filter_by(project_id=project_id).first()\n statistic.performance_test_runs += 1\n statistic.commit()\n\n return response\n\n\ndef get_backend_test_data(event):\n users_count = 0\n duration = 0\n vusers_var_names = [\"vusers\", \"users\", \"users_count\", \"ramp_users\", \"user_count\"]\n lg_type = JOB_TYPE_MAPPING.get(event[\"job_type\"], \"other\")\n tests_count = 1\n if lg_type == 'jmeter':\n for i in range(tests_count):\n exec_params = loads(event[\"execution_params\"])[\"cmd\"] + \" \"\n test_type = re.findall('-Jtest_type=(.+?) ', exec_params)\n test_type = test_type[0] if len(test_type) else 'demo'\n environment = re.findall(\"-Jenv_type=(.+?) \", exec_params)\n environment = environment[0] if len(environment) else 'demo'\n test_name = re.findall(\"-Jtest_name=(.+?) \", exec_params)\n test_name = test_name[0] if len(test_name) else 'test'\n duration = re.findall(\"-JDURATION=(.+?) \", exec_params)\n duration = float(duration[0]) if len(duration) else 0\n for each in vusers_var_names:\n if f'-j{each}' in exec_params.lower():\n pattern = f'-j{each}=(.+?) '\n vusers = re.findall(pattern, exec_params.lower())\n users_count += int(vusers[0]) * event[\"concurrency\"]\n break\n elif lg_type == 'gatling':\n for i in range(tests_count):\n exec_params = loads(event[\"execution_params\"])\n test_type = exec_params['test_type'] if exec_params.get('test_type') else 'demo'\n test_name = exec_params['test'].split(\".\")[1].lower() if exec_params.get('test') else 'test'\n environment = exec_params['env'] if exec_params.get('env') else 'demo'\n if exec_params.get('GATLING_TEST_PARAMS'):\n if '-dduration' in exec_params['GATLING_TEST_PARAMS'].lower():\n duration = re.findall(\"-dduration=(.+?) \", exec_params['GATLING_TEST_PARAMS'].lower())[0]\n for each in vusers_var_names:\n if f'-d{each}' in exec_params['GATLING_TEST_PARAMS'].lower():\n pattern = f'-d{each}=(.+?) '\n vusers = re.findall(pattern, exec_params['GATLING_TEST_PARAMS'].lower())\n users_count += int(vusers[0]) * event[\"concurrency\"]\n break\n else:\n return {}\n start_time = datetime.utcnow().isoformat(\"T\") + \"Z\"\n\n data = {'build_id': f'build_{uuid4()}', 'test_name': test_name, 'lg_type': lg_type, 'type': test_type,\n 'duration': duration, 'vusers': users_count, 'environment': environment, 'start_time': start_time,\n 'missed': 0}\n return data\n\n\ndef _calculate_limit(limit, total):\n return len(total) if limit == 'All' else limit\n\n\ndef get(project, args, data_model, additional_filter=None):\n limit_ = args.get(\"limit\")\n offset_ = args.get(\"offset\")\n if args.get(\"sort\"):\n sort_rule = getattr(getattr(data_model, args[\"sort\"]), args[\"order\"])()\n else:\n sort_rule = data_model.id.desc()\n filter_ = list()\n filter_.append(operator.eq(data_model.project_id, project.id))\n if additional_filter:\n for key, value in additional_filter.items():\n filter_.append(operator.eq(getattr(data_model, key), value))\n if args.get('filter'):\n for key, value in loads(args.get(\"filter\")).items():\n filter_.append(operator.eq(getattr(data_model, key), value))\n filter_ = and_(*tuple(filter_))\n total = data_model.query.order_by(sort_rule).filter(filter_).count()\n res = data_model.query.filter(filter_).order_by(sort_rule).limit(\n _calculate_limit(limit_, total)).offset(offset_).all()\n return total, res\n", "id": "9257368", "language": "Python", "matching_score": 4.0449018478393555, "max_stars_count": 0, "path": "utils/utils.py" }, { "content": "# Copyright (c) 2018 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\n\nimport os\nimport tempfile\nimport zipfile\nimport arbiter\nfrom copy import deepcopy\nfrom json import loads, dumps\nfrom os import environ, path\nfrom time import sleep, time\nfrom uuid import uuid4\nimport re\nfrom datetime import datetime\nimport requests\nimport sys\n\nRABBIT_USER = environ.get('RABBIT_USER', 'user')\nRABBIT_PASSWORD = environ.get('RABBIT_PASSWORD', 'password')\nRABBIT_HOST = environ.get('RABBIT_HOST', 'localhost')\nRABBIT_VHOST = environ.get('RABBIT_VHOST', 'carrier')\nRABBIT_PORT = environ.get('RABBIT_PORT', '5672')\nGALLOPER_WEB_HOOK = environ.get('GALLOPER_WEB_HOOK', None)\nLOKI_HOST = environ.get('loki_host', None)\nLOKI_PORT = environ.get('loki_port', '3100')\nGALLOPER_URL = environ.get('galloper_url', None)\nPROJECT_ID = environ.get('project_id', None)\nBUCKET = environ.get('bucket', None)\nTEST = environ.get('artifact', None)\nADDITIONAL_FILES = environ.get('additional_files', None)\nBUILD_ID = environ.get('build_id', f'build_{uuid4()}')\nDISTRIBUTED_MODE_PREFIX = environ.get('PREFIX', f'test_results_{uuid4()}_')\nJVM_ARGS = environ.get('JVM_ARGS', None)\nTOKEN = environ.get('token', None)\nmounts = environ.get('mounts', None)\nrelease_id = environ.get('release_id', None)\napp = None\nSAMPLER = environ.get('sampler', \"REQUEST\")\nREQUEST = environ.get('request', \"All\")\nCALCULATION_DELAY = environ.get('data_wait', 300)\nCHECK_SATURATION = environ.get('check_saturation', None)\nMAX_ERRORS = environ.get('error_rate', 100)\nDEVIATION = environ.get('dev', 0.02)\nMAX_DEVIATION = environ.get('max_dev', 0.05)\nU_AGGR = environ.get('u_aggr', 1)\nKILL_MAX_WAIT_TIME = 10\nSPLIT_CSV = environ.get('split_csv', 'False')\nCSV_PATH = environ.get('csv_path', '')\nreport_type = \"\"\n\nJOB_TYPE_MAPPING = {\n \"perfmeter\": \"jmeter\",\n \"perfgun\": \"gatling\",\n \"free_style\": \"other\",\n \"observer\": \"observer\",\n \"dast\": \"dast\",\n \"sast\": \"sast\",\n}\n\nREPORT_TYPE_MAPPING = {\n \"gatling\": \"backend\",\n \"jmeter\": \"backend\",\n \"observer\": \"frontend\",\n \"dast\": \"security\",\n \"sast\": \"security\"\n}\n\nPROJECT_PACKAGE_MAPPER = {\n \"basic\": {\"duration\": 1800, \"load_generators\": 1},\n \"startup\": {\"duration\": 7200, \"load_generators\": 5},\n \"professional\": {\"duration\": 28800, \"load_generators\": 10},\n \"enterprise\": {\"duration\": -1, \"load_generators\": -1},\n \"custom\": {\"duration\": -1, \"load_generators\": -1}, # need to set custom values?\n}\n\nENV_VARS_MAPPING = {\n \"RABBIT_USER\": \"RABBIT_USER\",\n \"RABBIT_PASSWORD\": \"<PASSWORD>_PASSWORD\",\n \"RABBIT_HOST\": \"RABBIT_HOST\",\n \"RABBIT_VHOST\": \"RABBIT_VHOST\",\n \"RABBIT_PORT\": \"RABBIT_PORT\",\n \"GALLOPER_WEB_HOOK\": \"GALLOPER_WEB_HOOK\",\n \"LOKI_PORT\": \"LOKI_PORT\",\n \"mounts\": \"mounts\",\n \"release_id\": \"release_id\",\n \"sampler\": \"SAMPLER\",\n \"request\": \"REQUEST\",\n \"data_wait\": \"CALCULATION_DELAY\",\n \"check_saturation\": \"CHECK_SATURATION\",\n \"error_rate\": \"MAX_ERRORS\",\n \"dev\": \"DEVIATION\",\n \"max_dev\": \"MAX_DEVIATION\",\n \"galloper_url\": \"GALLOPER_URL\",\n \"token\": \"TOKEN\",\n \"project_id\": \"PROJECT_ID\",\n \"bucket\": \"BUCKET\",\n \"u_aggr\": \"U_AGGR\",\n \"split_csv\": \"SPLIT_CSV\",\n \"csv_path\": \"CSV_PATH\"\n}\n\n\ndef str2bool(v):\n if isinstance(v, bool):\n return v\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise argparse.ArgumentTypeError('Boolean value expected.')\n\n\ndef str2json(v):\n try:\n return loads(v)\n except:\n raise argparse.ArgumentTypeError('Json is not properly formatted.')\n\n\ndef arg_parse():\n parser = argparse.ArgumentParser(description='Carrier Command Center')\n parser.add_argument('-c', '--container', action=\"append\", type=str, default=[],\n help=\"Name of container to run the job e.g. getcarrier/dusty:latest\")\n parser.add_argument('-e', '--execution_params', action=\"append\", type=str2json, default=[],\n help=\"Execution params for jobs e.g. \\n\"\n \"{\\n\\t'host': 'localhost', \\n\\t'port':'443', \\n\\t'protocol':'https'\"\n \", \\n\\t'project_name':'MY_PET', \\n\\t'environment':'stag', \\n\\t\"\n \"'test_type': 'basic'\"\n \"\\n} will be valid for dast container\")\n parser.add_argument('-t', '--job_type', action=\"append\", type=str, default=[],\n help=\"Type of a job: e.g. sast, dast, perfmeter, perfgun, perf-ui\")\n parser.add_argument('-n', '--job_name', type=str, default=\"test\",\n help=\"Name of a job (e.g. unique job ID, like %JOBNAME%_%JOBID%)\")\n parser.add_argument('-q', '--concurrency', action=\"append\", type=int, default=[],\n help=\"Number of parallel workers to run the job\")\n parser.add_argument('-r', '--channel', action=\"append\", default=[], type=str,\n help=\"Rabbit (interceptor) queue name to run the job\")\n parser.add_argument('-a', '--artifact', default=\"\", type=str)\n parser.add_argument('-b', '--bucket', default=\"\", type=str)\n parser.add_argument('-sr', '--save_reports', default=False, type=str2bool)\n parser.add_argument('-j', '--junit', default=False, type=str2bool)\n parser.add_argument('-qg', '--quality_gate', default=False, type=str2bool)\n parser.add_argument('-jr', '--jira', default=False, type=str2bool)\n parser.add_argument('-eml', '--email', default=False, type=str2bool)\n parser.add_argument('-el', '--email_recipients', default=\"\", type=str)\n parser.add_argument('-rp', '--report_portal', default=False, type=str2bool)\n parser.add_argument('-ado', '--azure_devops', default=False, type=str2bool)\n parser.add_argument('-p', '--report_path', default=\"/tmp/reports\", type=str)\n parser.add_argument('-d', '--deviation', default=0, type=float)\n parser.add_argument('-md', '--max_deviation', default=0, type=float)\n parser.add_argument('-tid', '--test_id', default=\"\", type=str)\n args, _ = parser.parse_known_args()\n if args.test_id and GALLOPER_URL:\n args = append_test_config(args)\n return args\n\n\ndef append_test_config(args):\n headers = {'content-type': 'application/json'}\n if TOKEN:\n headers['Authorization'] = f'bearer {TOKEN}'\n url = f\"{GALLOPER_URL}/api/v1/tests/{PROJECT_ID}/{args.test_id}\"\n # get job_type\n test_config = requests.get(url, headers=headers)\n try:\n test_config = test_config.json()\n except:\n print(test_config.text)\n exit(1)\n job_type = args.job_type[0] if args.job_type else test_config[\"job_type\"]\n lg_type = JOB_TYPE_MAPPING.get(job_type, \"other\")\n params = {}\n execution_params = []\n concurrency = []\n container = []\n job_type = []\n tests_count = len(args.concurrency) if args.concurrency else 1\n # prepare params\n for i in range(tests_count):\n if lg_type == 'jmeter':\n url = f\"{GALLOPER_URL}/api/v1/tests/{PROJECT_ID}/backend/{args.test_id}\"\n if args.execution_params and \"cmd\" in args.execution_params[i].keys():\n exec_params = args.execution_params[i]['cmd'].split(\"-J\")\n for each in exec_params:\n if \"=\" in each:\n _ = each.split(\"=\")\n params[_[0]] = str(_[1]).strip()\n elif lg_type == 'gatling':\n url = f\"{GALLOPER_URL}/api/v1/tests/{PROJECT_ID}/backend/{args.test_id}\"\n if args.execution_params and \"GATLING_TEST_PARAMS\" in args.execution_params[i].keys():\n exec_params = args.execution_params[i]['GATLING_TEST_PARAMS'].split(\"-D\")\n for each in exec_params:\n if \"=\" in each:\n _ = each.split(\"=\")\n params[_[0]] = str(_[1]).strip()\n elif lg_type == 'observer':\n url = f\"{GALLOPER_URL}/api/v1/tests/{PROJECT_ID}/frontend/{args.test_id}\"\n elif lg_type == 'dast':\n url = f\"{GALLOPER_URL}/api/v1/tests/{PROJECT_ID}/dast/{args.test_id}\"\n elif lg_type == 'sast':\n url = f\"{GALLOPER_URL}/api/v1/tests/{PROJECT_ID}/sast/{args.test_id}\"\n else:\n print(f\"No data found for test_id={args.test_id}\")\n exit(1)\n\n data = {\n \"parallel\": args.concurrency[i] if args.concurrency else None,\n \"params\": dumps(params),\n \"emails\": args.email_recipients if args.email_recipients else \"\",\n \"type\": \"config\"\n }\n # merge params with test config\n test_config = requests.post(url, json=data, headers=headers)\n try:\n test_config = test_config.json()\n except:\n print(test_config.text)\n exit(1)\n # set args and env vars\n execution_params.append(loads(test_config[\"execution_params\"]))\n concurrency.append(test_config[\"concurrency\"])\n container.append(test_config[\"container\"])\n job_type.append(test_config[\"job_type\"])\n\n for each in [\"artifact\", \"bucket\", \"job_name\", \"email_recipients\"]:\n if not getattr(args, each) and each in test_config.keys():\n setattr(args, each, test_config[each])\n for each in [\"container\", \"job_type\", \"channel\"]:\n if not getattr(args, each) and each in test_config.keys():\n setattr(args, each, [test_config[each]])\n for each in [\"junit\", \"quality_gate\", \"save_reports\", \"jira\", \"report_portal\", \"email\", \"azure_devops\"]:\n if not getattr(args, each) and each in test_config.keys():\n setattr(args, each, str2bool(test_config[each]))\n env_vars = test_config[\"cc_env_vars\"]\n for key, value in env_vars.items():\n if not environ.get(key, None):\n globals()[ENV_VARS_MAPPING.get(key)] = value\n\n setattr(args, \"execution_params\", execution_params)\n setattr(args, \"concurrency\", concurrency)\n setattr(args, \"container\", container)\n setattr(args, \"job_type\", job_type)\n if \"git\" in test_config.keys():\n process_git_repo(test_config, args)\n if str2bool(SPLIT_CSV):\n split_csv_file(args)\n return args\n\n\ndef process_git_repo(test_config, args):\n from control_tower.git_clone import clone_repo, post_artifact\n git_setting = test_config[\"git\"]\n clone_repo(git_setting)\n post_artifact(GALLOPER_URL, TOKEN, PROJECT_ID, f\"{BUILD_ID}.zip\")\n setattr(args, \"artifact\", f\"{BUILD_ID}.zip\")\n setattr(args, \"bucket\", \"tests\")\n globals()[\"compile_and_run\"] = \"true\"\n\n\ndef split_csv_file(args):\n from control_tower.csv_splitter import process_csv\n globals()[\"csv_array\"] = process_csv(GALLOPER_URL, TOKEN, PROJECT_ID, args.artifact, args.bucket, CSV_PATH,\n args.concurrency[0])\n concurrency, execution_params, job_type, container, channel = [], [], [], [], []\n for i in range(args.concurrency[0]):\n concurrency.append(1)\n execution_params.append(args.execution_params[0])\n job_type.append(args.job_type[0])\n container.append(args.container[0])\n channel.append(args.channel[0])\n args.concurrency = concurrency\n args.execution_params = execution_params\n args.job_type = job_type\n args.container = container\n args.channel = channel\n\n\ndef parse_id():\n parser = argparse.ArgumentParser(description='Carrier Command Center')\n parser.add_argument('-g', '--groupid', type=str, default=\"\", help=\"ID of the group for a task\")\n parser.add_argument('-c', '--container', type=str, help=\"Name of container to run the job \"\n \"e.g. getcarrier/dusty:latest\")\n parser.add_argument('-t', '--job_type', type=str, help=\"Type of a job: e.g. sast, dast, perf-jmeter, perf-ui\")\n parser.add_argument('-n', '--job_name', type=str, help=\"Name of a job (e.g. unique job ID, like %JOBNAME%_%JOBID%)\")\n args, _ = parser.parse_known_args()\n if args.groupid:\n for unparsed in _:\n args.groupid = args.groupid + unparsed\n if 'group_id' in args.groupid:\n args.groupid = loads(args.groupid)\n return args\n\n\ndef start_job(args=None):\n if not args:\n args = arg_parse()\n if GALLOPER_URL and PROJECT_ID and TOKEN:\n package = get_project_package()\n allowable_load_generators = PROJECT_PACKAGE_MAPPER.get(package)[\"load_generators\"]\n for each in args.concurrency:\n if allowable_load_generators != -1 and allowable_load_generators < each:\n print(f\"Only {allowable_load_generators} parallel load generators allowable for {package} package.\")\n exit(0)\n\n # AWS integration\n ec2_settings = {}\n if args.channel[0] == \"aws\":\n from control_tower.aws import request_spot_fleets\n ec2_settings = request_spot_fleets(args, GALLOPER_URL, PROJECT_ID, TOKEN, RABBIT_HOST, RABBIT_USER,\n RABBIT_PASSWORD, RABBIT_PORT, RABBIT_VHOST)\n\n results_bucket = str(args.job_name).replace(\"_\", \"\").replace(\" \", \"\").lower()\n integration = []\n for each in [\"jira\", \"report_portal\", \"email\", \"azure_devops\"]:\n if getattr(args, each):\n integration.append(each)\n post_processor_args = {\n \"galloper_url\": GALLOPER_URL,\n \"project_id\": PROJECT_ID,\n \"galloper_web_hook\": GALLOPER_WEB_HOOK,\n \"bucket\": results_bucket,\n \"prefix\": DISTRIBUTED_MODE_PREFIX,\n \"junit\": args.junit,\n \"token\": TOKEN,\n \"integration\": integration,\n \"email_recipients\": args.email_recipients\n }\n globals()[\"report_type\"] = JOB_TYPE_MAPPING.get(args.job_type[0], \"other\")\n arb = arbiter.Arbiter(host=RABBIT_HOST, port=RABBIT_PORT, user=RABBIT_USER,\n password=<PASSWORD>, vhost=RABBIT_VHOST)\n tasks = []\n for i in range(len(args.concurrency)):\n exec_params = deepcopy(args.execution_params[i])\n if mounts:\n exec_params['mounts'] = mounts\n if args.job_type[i] in ['perfgun', 'perfmeter']:\n exec_params['config_yaml'] = {}\n if LOKI_HOST:\n exec_params['loki_host'] = LOKI_HOST\n exec_params['loki_port'] = LOKI_PORT\n if ADDITIONAL_FILES:\n exec_params['additional_files'] = ADDITIONAL_FILES\n if globals().get(\"csv_array\"):\n if 'additional_files' in exec_params:\n exec_params['additional_files'] = {**exec_params['additional_files'],\n **globals().get(\"csv_array\")[i]}\n else:\n exec_params['additional_files'] = globals().get(\"csv_array\")[i]\n\n if 'additional_files' in exec_params:\n exec_params['additional_files'] = dumps(exec_params['additional_files']).replace(\"'\", \"\\\"\")\n if JVM_ARGS:\n exec_params['JVM_ARGS'] = JVM_ARGS\n exec_params['build_id'] = BUILD_ID\n exec_params['DISTRIBUTED_MODE_PREFIX'] = DISTRIBUTED_MODE_PREFIX\n exec_params['galloper_url'] = GALLOPER_URL\n exec_params['bucket'] = BUCKET if not args.bucket else args.bucket\n exec_params['artifact'] = TEST if not args.artifact else args.artifact\n exec_params['results_bucket'] = results_bucket\n exec_params['save_reports'] = args.save_reports\n if globals().get(\"compile_and_run\") == \"true\":\n exec_params[\"compile_and_run\"] = \"true\"\n if PROJECT_ID:\n exec_params['project_id'] = PROJECT_ID\n if TOKEN:\n exec_params['token'] = TOKEN\n\n elif args.job_type[i] == \"observer\":\n execution_params = args.execution_params[i]\n\n exec_params[\"GALLOPER_URL\"] = GALLOPER_URL\n exec_params[\"REPORTS_BUCKET\"] = BUCKET\n exec_params[\"RESULTS_BUCKET\"] = results_bucket\n exec_params[\"RESULTS_REPORT_NAME\"] = DISTRIBUTED_MODE_PREFIX\n exec_params[\"GALLOPER_PROJECT_ID\"] = PROJECT_ID\n exec_params[\"JOB_NAME\"] = args.job_name\n exec_params['ARTIFACT'] = args.artifact\n exec_params['TESTS_BUCKET'] = args.bucket\n exec_params['REPORT_ID'] = BUILD_ID.replace(\"build_\", \"\")\n\n if TOKEN:\n exec_params['token'] = TOKEN\n if mounts:\n exec_params['mounts'] = mounts if not execution_params[\"mounts\"] else execution_params[\n \"mounts\"]\n\n elif args.job_type[i] == \"sast\":\n if \"code_path\" in exec_params:\n print(\"Uploading code artifact to Galloper ...\")\n with tempfile.TemporaryFile() as src_file:\n with zipfile.ZipFile(src_file, \"w\", zipfile.ZIP_DEFLATED) as zip_file:\n src_dir = os.path.abspath(\"/code\")\n for dirpath, _, filenames in os.walk(src_dir):\n if dirpath == src_dir:\n rel_dir = \"\"\n else:\n rel_dir = os.path.relpath(dirpath, src_dir)\n zip_file.write(dirpath, arcname=rel_dir)\n for filename in filenames:\n zip_file.write(\n os.path.join(dirpath, filename),\n arcname=os.path.join(rel_dir, filename)\n )\n src_file.seek(0)\n headers = {\n \"Authorization\": f\"Bearer {TOKEN}\"\n }\n url = f\"{GALLOPER_URL}/api/v1/artifact/{PROJECT_ID}/sast/{args.test_id}.zip\"\n requests.post(\n url, headers=headers, files={\n \"file\": (f\"{args.test_id}.zip\", src_file)\n }\n )\n for _ in range(int(args.concurrency[i])):\n task_kwargs = {'job_type': str(args.job_type[i]), 'container': args.container[i],\n 'execution_params': exec_params, 'job_name': args.job_name}\n queue_name = args.channel[i] if len(args.channel) > i else \"default\"\n tasks.append(arbiter.Task(\"execute\", queue=queue_name, task_kwargs=task_kwargs))\n\n if ec2_settings:\n finalizer_queue_name = ec2_settings.pop(\"finalizer_queue_name\")\n tasks.append(arbiter.Task(\"terminate_ec2_instances\", queue=finalizer_queue_name, task_type=\"finalize\",\n task_kwargs=ec2_settings))\n\n if args.job_type[0] in ['perfgun', 'perfmeter']:\n test_details = backend_perf_test_start_notify(args)\n group_id = arb.squad(tasks, callback=arbiter.Task(\"post_process\", queue=args.channel[0],\n task_kwargs=post_processor_args))\n elif args.job_type[0] == \"observer\":\n test_details = frontend_perf_test_start_notify(args)\n group_id = arb.squad(tasks)\n else:\n group_id = arb.squad(tasks)\n test_details = {}\n\n return arb, group_id, test_details\n\n\ndef frontend_perf_test_start_notify(args):\n if GALLOPER_URL:\n exec_params = args.execution_params[0][\"cmd\"] + \" \"\n browser = re.findall('-b (.+?) ', exec_params)\n browser_name = browser[0].split(\"_\")[0].lower()\n browser_version = browser[0].split(\"_\")[1]\n loops = re.findall('-l (.+?) ', exec_params)[0]\n aggregation = re.findall('-a (.+?) ', exec_params)[0]\n\n data = {\n \"report_id\": BUILD_ID.replace(\"build_\", \"\"),\n \"status\": \"In progress\",\n \"test_name\": args.job_name,\n \"base_url\": \"\",\n \"browser_name\": browser_name,\n \"browser_version\": browser_version,\n \"env\": args.execution_params[0][\"ENV\"],\n \"loops\": loops,\n \"aggregation\": aggregation,\n \"time\": datetime.utcnow().isoformat(\" \").split(\".\")[0]\n }\n headers = {'content-type': 'application/json'}\n if TOKEN:\n headers['Authorization'] = f'bearer {TOKEN}'\n\n response = requests.post(f\"{GALLOPER_URL}/api/v1/observer/{PROJECT_ID}\", json=data, headers=headers)\n try:\n print(response.json()[\"message\"])\n except:\n print(response.text)\n return response.json()\n\n\ndef backend_perf_test_start_notify(args):\n if GALLOPER_URL:\n users_count = 0\n duration = 0\n vusers_var_names = [\"vusers\", \"users\", \"users_count\", \"ramp_users\", \"user_count\"]\n lg_type = JOB_TYPE_MAPPING.get(args.job_type[0], \"other\")\n tests_count = len(args.execution_params) if args.execution_params else 1\n if lg_type == 'jmeter':\n for i in range(tests_count):\n exec_params = args.execution_params[i]['cmd'] + \" \"\n test_type = re.findall('-Jtest_type=(.+?) ', exec_params)\n test_type = test_type[0] if len(test_type) else 'demo'\n environment = re.findall(\"-Jenv_type=(.+?) \", exec_params)\n environment = environment[0] if len(environment) else 'demo'\n test_name = re.findall(\"-Jtest_name=(.+?) \", exec_params)\n test_name = test_name[0] if len(test_name) else 'test'\n duration = re.findall(\"-JDURATION=(.+?) \", exec_params)\n duration = float(duration[0]) if len(duration) else 0\n for each in vusers_var_names:\n if f'-j{each}' in exec_params.lower():\n pattern = f'-j{each}=(.+?) '\n vusers = re.findall(pattern, exec_params.lower())\n users_count += int(vusers[0]) * args.concurrency[i]\n break\n elif lg_type == 'gatling':\n for i in range(tests_count):\n exec_params = args.execution_params[i]\n test_type = exec_params['test_type'] if exec_params.get('test_type') else 'demo'\n test_name = exec_params['test'].split(\".\")[1].lower() if exec_params.get('test') else 'test'\n environment = exec_params['env'] if exec_params.get('env') else 'demo'\n if exec_params.get('GATLING_TEST_PARAMS'):\n if '-dduration' in exec_params['GATLING_TEST_PARAMS'].lower():\n duration = re.findall(\"-dduration=(.+?) \", exec_params['GATLING_TEST_PARAMS'].lower())[0]\n for each in vusers_var_names:\n if f'-d{each}' in exec_params['GATLING_TEST_PARAMS'].lower():\n pattern = f'-d{each}=(.+?) '\n vusers = re.findall(pattern, exec_params['GATLING_TEST_PARAMS'].lower())\n users_count += int(vusers[0]) * args.concurrency[i]\n break\n else:\n return {}\n start_time = datetime.utcnow().isoformat(\"T\") + \"Z\"\n if args.test_id:\n test_id = args.test_id\n else:\n test_id = \"\"\n data = {'test_id': test_id, 'build_id': BUILD_ID, 'test_name': test_name, 'lg_type': lg_type, 'type': test_type,\n 'duration': duration, 'vusers': users_count, 'environment': environment, 'start_time': start_time,\n 'missed': 0, 'status': 'In progress'}\n if release_id:\n data['release_id'] = release_id\n\n headers = {'content-type': 'application/json'}\n if TOKEN:\n headers['Authorization'] = f'bearer {TOKEN}'\n if PROJECT_ID:\n url = f'{GALLOPER_URL}/api/v1/reports/{PROJECT_ID}'\n else:\n url = f'{GALLOPER_URL}/api/report'\n\n response = requests.post(url, json=data, headers=headers)\n\n try:\n print(response.json()[\"message\"])\n except:\n print(response.text)\n\n if response.status_code == requests.codes.forbidden:\n print(response.json().get('Forbidden'))\n exit(126)\n return response.json()\n return {}\n\n\ndef get_project_package():\n try:\n url = f\"{GALLOPER_URL}/api/v1/project/{PROJECT_ID}\"\n headers = {'content-type': 'application/json', 'Authorization': f'bearer {TOKEN}'}\n package = requests.get(url, headers=headers).json()[\"package\"]\n except:\n package = \"custom\"\n return package\n\n\ndef start_job_exec(args=None):\n start_job(args)\n exit(0)\n\n\ndef check_ready(result):\n if result and not result.ready():\n return False\n return True\n\n\ndef check_test_is_saturating(test_id=None, deviation=0.02, max_deviation=0.05):\n if test_id and PROJECT_ID and SAMPLER and REQUEST:\n url = f'{GALLOPER_URL}/api/v1/saturation'\n headers = {'Authorization': f'bearer {TOKEN}'} if TOKEN else {}\n headers[\"Content-type\"] = \"application/json\"\n params = {\n \"test_id\": test_id,\n \"project_id\": PROJECT_ID,\n \"sampler\": SAMPLER,\n \"request\": REQUEST,\n \"wait_till\": CALCULATION_DELAY,\n \"max_errors\": MAX_ERRORS,\n \"deviation\": deviation,\n \"max_deviation\": max_deviation,\n \"u_aggr\": U_AGGR\n }\n return requests.get(url, params=params, headers=headers).json()\n return {\"message\": \"Test is in progress\", \"code\": 0}\n\n\ndef track_job(bitter, group_id, test_id=None, deviation=0.02, max_deviation=0.05):\n result = 0\n test_start = time()\n max_duration = -1\n if GALLOPER_URL and PROJECT_ID and TOKEN:\n package = get_project_package()\n max_duration = PROJECT_PACKAGE_MAPPER.get(package)[\"duration\"]\n\n while not bitter.status(group_id)['state'] == 'done':\n sleep(60)\n if CHECK_SATURATION:\n test_status = check_test_is_saturating(test_id, deviation, max_deviation)\n print(\"Status:\")\n print(test_status)\n if test_status.get(\"code\", 0) == 1:\n print(\"Kill job\")\n try:\n bitter.kill_group(group_id)\n except Exception as e:\n print(e)\n print(\"Terminated\")\n result = 1\n else:\n print(\"Still processing ...\")\n if test_was_canceled(test_id) and result != 1:\n print(\"Test was canceled\")\n try:\n bitter.kill_group(group_id)\n except Exception as e:\n print(e)\n print(\"Terminated\")\n result = 1\n if max_duration != -1 and max_duration <= int((time() - test_start)) and result != 1:\n print(f\"Exceeded max test duration - {max_duration} sec\")\n try:\n bitter.kill_group(group_id)\n except Exception as e:\n print(e)\n try:\n bitter.close()\n except Exception as e:\n print(e)\n return result\n\n\ndef test_was_canceled(test_id):\n try:\n if test_id and PROJECT_ID and GALLOPER_URL and report_type:\n url = f'{GALLOPER_URL}/api/v1/reports/{PROJECT_ID}/{REPORT_TYPE_MAPPING.get(report_type)}/{test_id}/status'\n headers = {'Authorization': f'bearer {TOKEN}'} if TOKEN else {}\n headers[\"Content-type\"] = \"application/json\"\n status = requests.get(url, headers=headers).json()['message']\n return True if status in [\"Canceled\", \"Finished\"] else False\n return False\n except:\n return False\n\n\ndef _start_and_track(args=None):\n if not args:\n args = arg_parse()\n deviation = DEVIATION if args.deviation == 0 else args.deviation\n max_deviation = MAX_DEVIATION if args.max_deviation == 0 else args.max_deviation\n bitter, group_id, test_details = start_job(args)\n print(\"Job started, waiting for containers to settle ... \")\n track_job(bitter, group_id, test_details.get(\"id\", None), deviation, max_deviation)\n if args.junit:\n print(\"Processing junit report ...\")\n process_junit_report(args)\n if args.job_type[0] in [\"dast\", \"sast\"] and args.quality_gate:\n print(\"Processing security quality gate ...\")\n process_security_quality_gate(args)\n if args.artifact == f\"{BUILD_ID}.zip\":\n from control_tower.git_clone import delete_artifact\n delete_artifact(GALLOPER_URL, TOKEN, PROJECT_ID, args.artifact)\n if globals().get(\"csv_array\"):\n from control_tower.csv_splitter import delete_csv\n for each in globals().get(\"csv_array\"):\n csv_name = list(each.keys())[0].replace(\"tests/\", \"\")\n delete_csv(GALLOPER_URL, TOKEN, PROJECT_ID, csv_name)\n\n\ndef start_and_track(args=None):\n _start_and_track(args)\n exit(0)\n\n\ndef process_security_quality_gate(args):\n # Save jUnit report as file to local filesystem\n junit_report_data = download_junit_report(\n args.job_type[0], f\"{args.test_id}_junit_report.xml\", retry=12\n )\n if junit_report_data:\n with open(os.path.join(args.report_path, f\"junit_report_{args.test_id}.xml\"), \"w\") as rept:\n rept.write(junit_report_data.text)\n # Quality Gate\n quality_gate_data = download_junit_report(\n args.job_type[0], f\"{args.test_id}_quality_gate_report.json\", retry=12\n )\n if not quality_gate_data:\n print(\"No security quality gate data found\")\n return\n quality_gate = loads(quality_gate_data.text)\n if quality_gate[\"quality_gate_stats\"]:\n for line in quality_gate[\"quality_gate_stats\"]:\n print(line)\n if quality_gate[\"fail_quality_gate\"]:\n exit(1)\n\n\ndef process_junit_report(args):\n file_name = \"junit_report_{}.xml\".format(DISTRIBUTED_MODE_PREFIX)\n results_bucket = str(args.job_name).replace(\"_\", \"\").lower()\n junit_report = download_junit_report(results_bucket, file_name, retry=12)\n if junit_report:\n with open(\"{}/{}\".format(args.report_path, file_name), \"w\") as f:\n f.write(junit_report.text)\n\n failed = int(re.findall(\"testsuites .+? failures=\\\"(.+?)\\\"\", junit_report.text)[0])\n total = int(re.findall(\"testsuites .+? tests=\\\"(.+?)\\\"\", junit_report.text)[0])\n errors = int(re.findall(\"testsuites .+? errors=\\\"(.+?)\\\"\", junit_report.text)[0])\n skipped = int(re.findall(\"testsuite .+? skipped=\\\"(.+?)\\\"\", junit_report.text)[0])\n print(\"**********************************************\")\n print(\"* Performance testing jUnit report | Carrier *\")\n print(\"**********************************************\")\n print(f\"Tests run: {total}, Failures: {failed}, Errors: {errors}, Skipped: {skipped}\")\n if args.quality_gate:\n rate = round(float(failed / total) * 100, 2) if total != 0 else 0\n if rate > 20:\n print(\"Missed threshold rate is {}\".format(rate), file=sys.stderr)\n exit(1)\n\n\ndef download_junit_report(results_bucket, file_name, retry):\n if PROJECT_ID:\n url = f'{GALLOPER_URL}/api/v1/artifact/{PROJECT_ID}/{results_bucket}/{file_name}'\n else:\n url = f'{GALLOPER_URL}/artifacts/{results_bucket}/{file_name}'\n headers = {'Authorization': f'bearer {TOKEN}'} if TOKEN else {}\n junit_report = requests.get(url, headers=headers, allow_redirects=True)\n if junit_report.status_code != 200 or 'botocore.errorfactory.NoSuchKey' in junit_report.text:\n print(\"Waiting for report to be accessible ...\")\n retry -= 1\n if retry == 0:\n return None\n sleep(10)\n return download_junit_report(results_bucket, file_name, retry)\n return junit_report\n\n\n# if __name__ == \"__main__\":\n# from control_tower.config_mock import BulkConfig\n# args = BulkConfig(\n# bulk_container=[\"getcarrier/perfmeter:latest\"],\n# bulk_params=[{\"cmd\": \"-n -t /mnt/jmeter/FloodIO.jmx -Jtest_type=debug -Jenv_type=debug \"\n# \"-Jinflux.host= -JVUSERS=100 -JDURATION=1200 \"\n# \"-JRAMP_UP=60 -Jtest_name=Flood\"}],\n# job_type=[\"perfmeter\"],\n# job_name='DemoTest',\n# bulk_concurrency=[2]\n# )\n# groups, test_details, post_processor_args = start_job(args)\n# for group in groups:\n# track_job(group, test_details[\"id\"])\n", "id": "7519872", "language": "Python", "matching_score": 7.037619113922119, "max_stars_count": 0, "path": "control_tower/run.py" }, { "content": "import pytest\nfrom time import time\nfrom uuid import uuid4\nfrom os import environ\n\nenviron[\"galloper_url\"] = \"http://example\"\nenviron[\"RABBIT_HOST\"] = \"example\"\nenviron[\"GALLOPER_WEB_HOOK\"] = \"http://example/hook\"\nenviron[\"artifact\"] = \"test.zip\"\nenviron[\"token\"] = \"<PASSWORD>\"\nenviron[\"project_id\"] = \"1\"\nenviron[\"bucket\"] = 'test'\nenviron[\"csv_path\"] = \"age.csv\"\nenviron[\"lg_count\"] = \"5\"\n\nimport mock\nimport requests_mock\nimport argparse\nfrom control_tower.config_mock import BulkConfig\nfrom control_tower import run\n\n\ntest_response = {\"container\": \"getcarrier/perfmeter:latest-5.3\",\n \"execution_params\": \"{\\\"cmd\\\": \\\"-n -t /mnt/jmeter/test.jmx -Jinflux.port=8086 \"\n \"-Jinflux.host=example -Jinflux.username=test \"\n \"-Jinflux.password=<PASSWORD> \"\n \"-Jgalloper_url=https://example -Jinflux.db=test \"\n \"-Jtest_name=Flood -Jcomparison_db=comparison -Jtelegraf_db=telegraf \"\n \"-Jloki_host=http://example -Jloki_port=3100 -Jtest.type=default \"\n \"-JDURATION=60 -JVUSERS=10 -JRAMP_UP=30\\\",\"\n \" \\\"cpu_cores_limit\\\": \\\"1\\\", \"\n \"\\\"memory_limit\\\": \\\"3\\\", \"\n \"\\\"influxdb_host\\\": \\\"example\\\", \"\n \"\\\"influxdb_user\\\": \\\"test\\\", \"\n \"\\\"influxdb_password\\\": \\\"test\\\", \"\n \"\\\"influxdb_comparison\\\": \\\"comparison\\\", \"\n \"\\\"influxdb_telegraf\\\": \\\"telegraf\\\", \"\n \"\\\"loki_host\\\": \\\"http://example\\\", \"\n \"\\\"loki_port\\\": \\\"3100\\\"}\",\n \"cc_env_vars\": {\"RABBIT_HOST\": \"example\",\n \"RABBIT_USER\": \"test\",\n \"RABBIT_PASSWORD\": \"<PASSWORD>\",\n \"RABBIT_VHOST\": \"test\",\n \"GALLOPER_WEB_HOOK\": \"https://example/task/1\"},\n \"bucket\": \"tests\",\n \"job_name\": \"test\",\n \"artifact\": \"test.zip\",\n \"job_type\": \"perfmeter\",\n \"concurrency\": 5,\n \"channel\": \"default\",\n \"email\": \"True\",\n \"email_recipients\": \"<EMAIL>\"}\njob_name = 'DemoTest'\n\n\nclass arbiterMock:\n def __init__(self, *args, **kwargs):\n self.squad_uuid = str(uuid4())\n\n def squad(self, *args, **kwargs):\n return self.squad_uuid\n\n\nclass bitter:\n def __init__(self, duration=10):\n self.start_time = time()\n self.duration = duration\n\n def status(self, *args, **kwargs):\n if time() - self.start_time > self.duration:\n return {'state': 'done'}\n return {'state': 'in progress'}\n\n def kill_group(self, *args, **kwargs):\n pass\n\n def close(self):\n pass\n\nclass taskMock:\n def __init__(self, *args, **kwargs):\n self.task_id = str(uuid4())\n\n\ndef test_str2bool():\n assert run.str2bool(\"true\") is True\n assert run.str2bool(\"0\") is False\n try:\n run.str2bool(\"zzz\")\n except argparse.ArgumentTypeError as ex:\n assert str(ex) == 'Boolean value expected.'\n\n\ndef test_str2json():\n assert run.str2json(\"{}\") == {}\n try:\n run.str2json(\"zzz\")\n except argparse.ArgumentTypeError as ex:\n assert str(ex) == 'Json is not properly formatted.'\n\n\n@mock.patch(\"arbiter.Arbiter\")\n@mock.patch(\"arbiter.Task\")\ndef test_start_job(arbiterMock, taskMock):\n args = BulkConfig(\n bulk_container=[],\n bulk_params=[],\n job_type=[],\n job_name=job_name,\n bulk_concurrency=[],\n test_id=1\n )\n with requests_mock.Mocker() as req_mock:\n req_mock.get(f\"{environ['galloper_url']}/api/v1/tests/{environ['project_id']}/backend/{args.test_id}\",\n json=test_response)\n req_mock.post(f\"{environ['galloper_url']}/api/v1/tests/{environ['project_id']}/backend/{args.test_id}\",\n json=test_response)\n req_mock.get(f\"{environ['galloper_url']}/api/v1/tests/{environ['project_id']}/{args.test_id}\",\n json={\"job_type\": \"perfmeter\"})\n req_mock.get(f\"{environ['galloper_url']}/api/v1/project/{environ['project_id']}\", text=\"custom\")\n req_mock.post(f\"{environ['galloper_url']}/api/v1/reports/{environ['project_id']}\", json={\"message\": \"patched\"})\n args = run.append_test_config(args)\n assert all(key in args.execution_params[0] for key in ['cmd', 'cpu_cores_limit', 'memory_limit',\n 'influxdb_host', 'influxdb_user', 'influxdb_password',\n 'influxdb_comparison', 'influxdb_telegraf',\n 'loki_host', 'loki_port'])\n assert args.job_name == job_name\n arb, group_id, test_details = run.start_job(args)\n assert arb.squad.called\n assert len(arb.squad.call_args[0][0]) == int(environ[\"lg_count\"])\n assert 'callback' in arb.squad.call_args[1]\n result = run.track_job(bitter(), str(uuid4()), args.test_id)\n assert result == 0\n\n", "id": "559490", "language": "Python", "matching_score": 3.5780866146087646, "max_stars_count": 0, "path": "tests/test_run.py" }, { "content": "# Copyright 2020 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport string\nfrom os import path\nfrom uuid import uuid4\nfrom json import dumps\nfrom flask import current_app\nfrom sqlalchemy import Column, Integer, String, Text, JSON, ARRAY\n\nfrom galloper.database.db_manager import Base\nfrom galloper.database.abstract_base import AbstractBaseMixin\nfrom galloper.dal.vault import unsecret\nfrom galloper.dal.rabbitmq import get_project_queues\nfrom galloper.constants import JOB_CONTAINER_MAPPING, CURRENT_RELEASE\n\n\nclass PerformanceTests(AbstractBaseMixin, Base):\n __tablename__ = \"performance_tests\"\n id = Column(Integer, primary_key=True)\n project_id = Column(Integer, unique=False, nullable=False)\n test_uid = Column(String(128), unique=True, nullable=False)\n name = Column(String(128), nullable=False)\n parallel = Column(Integer, nullable=False)\n region = Column(String(128), nullable=False)\n bucket = Column(String(128), nullable=False)\n file = Column(String(128), nullable=False)\n entrypoint = Column(String(128), nullable=False)\n runner = Column(String(128), nullable=False)\n reporting = Column(ARRAY(String), nullable=False)\n emails = Column(Text)\n params = Column(JSON)\n env_vars = Column(JSON)\n customization = Column(JSON)\n cc_env_vars = Column(JSON)\n git = Column(JSON)\n last_run = Column(Integer)\n job_type = Column(String(20))\n\n\n def set_last_run(self, ts):\n self.last_run = ts\n self.commit()\n\n @staticmethod\n def sanitize(val):\n valid_chars = \"_%s%s\" % (string.ascii_letters, string.digits)\n return ''.join(c for c in val if c in valid_chars)\n\n def insert(self):\n if self.runner not in JOB_CONTAINER_MAPPING.keys():\n return False\n self.name = self.sanitize(self.name)\n if not self.test_uid:\n self.test_uid = str(uuid4())\n if \"influx.port\" not in self.params.keys():\n self.params[\"influx.port\"] = \"{{secret.influx_port}}\"\n if \"influx.host\" not in self.params.keys():\n self.params[\"influx.host\"] = \"{{secret.influx_ip}}\"\n if \"influx_user\" not in self.params.keys():\n self.params[\"influx.username\"] = \"{{secret.influx_user}}\"\n if \"influx_password\" not in self.params.keys():\n self.params[\"influx.password\"] = \"{{secret.influx_password}}\"\n if \"galloper_url\" not in self.env_vars.keys():\n self.params[\"galloper_url\"] = \"{{secret.galloper_url}}\"\n if \"influx.db\" not in self.params.keys():\n self.params[\"influx.db\"] = JOB_CONTAINER_MAPPING[self.runner]['influx_db']\n if \"test_name\" not in self.params.keys():\n self.params[\"test_name\"] = self.name # TODO: add sanitization\n if \"comparison_db\" not in self.params.keys():\n self.params[\"comparison_db\"] = \"{{secret.comparison_db}}\"\n if \"telegraf_db\" not in self.params.keys():\n self.params[\"telegraf_db\"] = \"{{secret.telegraf_db}}\"\n if \"loki_host\" not in self.env_vars.keys():\n self.params[\"loki_host\"] = \"{{secret.loki_host}}\"\n if \"loki_port\" not in self.env_vars.keys():\n self.params[\"loki_port\"] = \"{{secret.loki_port}}\"\n self.job_type = JOB_CONTAINER_MAPPING[self.runner]['job_type']\n test_type = \"test.type\" if self.job_type == \"perfmeter\" else \"test_type\"\n if test_type not in self.params.keys():\n self.params[test_type] = 'default'\n if self.region == \"\":\n self.region = \"default\"\n self.runner = JOB_CONTAINER_MAPPING[self.runner]['container'] # here because influx_db\n\n super().insert()\n\n def configure_execution_json(self, output='cc', test_type=None, params=None, env_vars=None, reporting=None,\n customization=None, cc_env_vars=None, parallel=None, region=None, execution=False, emails=None):\n pairs = {\n \"customization\": [customization, self.customization],\n \"params\": [params, self.params],\n \"env_vars\": [env_vars, self.env_vars],\n \"cc_env_vars\": [cc_env_vars, self.cc_env_vars],\n \"reporting\": [reporting, self.reporting]\n }\n for pair in pairs.keys():\n if not pairs[pair][0]:\n pairs[pair][0] = pairs[pair][1]\n else:\n for each in list(pairs[pair][0].keys()) + list(set(pairs[pair][1].keys()) - set(pairs[pair][0].keys())):\n pairs[pair][0][each] = pairs[pair][0][each] if each in list(pairs[pair][0].keys()) \\\n else pairs[pair][1][each]\n cmd = ''\n if not params:\n params = self.params\n if self.job_type == 'perfmeter':\n entrypoint = self.entrypoint if path.exists(self.entrypoint) else path.join('/mnt/jmeter', self.entrypoint)\n cmd = f\"-n -t {entrypoint}\"\n if \"custom_jmeter_cmd\" in list(env_vars.keys()):\n cmd += f\" {env_vars['custom_jmeter_cmd']}\"\n for key, value in params.items():\n if test_type and key == \"test.type\":\n cmd += f\" -Jtest.type={test_type}\"\n else:\n cmd += f\" -J{key}={value}\"\n execution_json = {\n \"container\": self.runner,\n \"execution_params\": {\n \"cmd\": cmd\n },\n \"cc_env_vars\": {},\n \"bucket\": self.bucket,\n \"job_name\": self.name,\n \"artifact\": self.file,\n \"job_type\": self.job_type,\n \"concurrency\": self.parallel if not parallel else parallel,\n \"channel\": region if region else self.region\n }\n if self.reporting:\n if \"junit\" in self.reporting:\n execution_json[\"junit\"] = \"True\"\n if \"quality\" in self.reporting:\n execution_json[\"quality_gate\"] = \"True\"\n if \"perfreports\" in self.reporting:\n execution_json[\"save_reports\"] = \"True\"\n if \"jira\" in self.reporting:\n execution_json[\"jira\"] = \"True\"\n if \"email\" in self.reporting:\n execution_json[\"email\"] = \"True\"\n if \"rp\" in self.reporting:\n execution_json[\"report_portal\"] = \"True\"\n if \"ado\" in self.reporting:\n execution_json[\"azure_devops\"] = \"True\"\n if emails:\n _emails = self.emails\n for each in emails.split(\",\"):\n if each not in _emails:\n _emails += f\",{each}\"\n execution_json[\"email_recipients\"] = _emails\n else:\n execution_json[\"email_recipients\"] = self.emails\n\n if pairs[\"env_vars\"][0]:\n for key, value in pairs[\"env_vars\"][0].items():\n execution_json[\"execution_params\"][key] = value\n if \"influxdb_host\" not in execution_json[\"execution_params\"].keys():\n execution_json[\"execution_params\"][\"influxdb_host\"] = \"{{secret.influx_ip}}\"\n if \"influxdb_user\" not in execution_json[\"execution_params\"].keys():\n execution_json[\"execution_params\"][\"influxdb_user\"] = \"{{secret.influx_user}}\"\n if \"influxdb_password\" not in execution_json[\"execution_params\"].keys():\n execution_json[\"execution_params\"][\"influxdb_password\"] = <PASSWORD>}}\"\n if \"influxdb_database\" not in execution_json[\"execution_params\"].keys():\n execution_json[\"execution_params\"][\"influxdb_database\"] = \"{{secret.gatling_db}}\"\n if \"influxdb_comparison\" not in execution_json[\"execution_params\"].keys():\n execution_json[\"execution_params\"][\"influxdb_comparison\"] = \"{{secret.comparison_db}}\"\n if \"influxdb_telegraf\" not in execution_json[\"execution_params\"].keys():\n execution_json[\"execution_params\"][\"influxdb_telegraf\"] = \"{{secret.telegraf_db}}\"\n if \"loki_host\" not in execution_json[\"execution_params\"].keys():\n execution_json[\"execution_params\"][\"loki_host\"] = \"{{secret.loki_host}}\"\n if \"loki_port\" not in execution_json[\"execution_params\"].keys():\n execution_json[\"execution_params\"][\"loki_port\"] = \"3100\"\n if pairs[\"cc_env_vars\"][0]:\n for key, value in pairs[\"cc_env_vars\"][0].items():\n execution_json[\"cc_env_vars\"][key] = value\n if \"RABBIT_HOST\" not in execution_json[\"cc_env_vars\"].keys():\n execution_json[\"cc_env_vars\"][\"RABBIT_HOST\"] = \"{{secret.rabbit_host}}\"\n project_queues = get_project_queues(project_id=self.project_id)\n if execution_json[\"channel\"] in project_queues[\"public\"]:\n execution_json[\"cc_env_vars\"][\"RABBIT_USER\"] = \"{{secret.rabbit_user}}\"\n execution_json[\"cc_env_vars\"][\"RABBIT_PASSWORD\"] = \"{{<PASSWORD>}}\"\n execution_json[\"cc_env_vars\"][\"RABBIT_VHOST\"] = \"carrier\"\n else:\n execution_json[\"cc_env_vars\"][\"RABBIT_USER\"] = \"{{secret.rabbit_project_user}}\"\n execution_json[\"cc_env_vars\"][\"RABBIT_PASSWORD\"] = \"{{<PASSWORD>_project_password}}\"\n execution_json[\"cc_env_vars\"][\"RABBIT_VHOST\"] = \"{{secret.rabbit_project_vhost}}\"\n if \"GALLOPER_WEB_HOOK\" not in execution_json[\"cc_env_vars\"].keys():\n execution_json[\"cc_env_vars\"][\"GALLOPER_WEB_HOOK\"] = \"{{secret.post_processor}}\"\n if pairs[\"customization\"][0]:\n for key, value in pairs[\"customization\"][0].items():\n if \"additional_files\" not in execution_json[\"execution_params\"]:\n execution_json[\"execution_params\"][\"additional_files\"] = dict()\n execution_json[\"execution_params\"][\"additional_files\"][key] = value\n if self.git:\n execution_json[\"git\"] = self.git\n if self.job_type == \"perfgun\":\n execution_json[\"execution_params\"]['test'] = self.entrypoint\n execution_json[\"execution_params\"][\"GATLING_TEST_PARAMS\"] = \"\"\n for key, value in params.items():\n execution_json[\"execution_params\"][\"GATLING_TEST_PARAMS\"] += f\"-D{key}={value} \"\n execution_json[\"execution_params\"] = dumps(execution_json[\"execution_params\"])\n if execution:\n execution_json = unsecret(execution_json, project_id=self.project_id)\n if output == 'cc':\n return execution_json\n else:\n return \"docker run -e project_id=%s -e galloper_url=%s -e token=%s\" \\\n \" getcarrier/control_tower:%s --test_id=%s\" \\\n \"\" % (self.project_id, unsecret(\"{{secret.galloper_url}}\", project_id=self.project_id),\n unsecret(\"{{secret.auth_token}}\", project_id=self.project_id), CURRENT_RELEASE, self.test_uid)\n\n def to_json(self, exclude_fields: tuple = ()) -> dict:\n test_param = super().to_json()\n for key in exclude_fields:\n if self.params.get(key):\n del test_param['params'][key]\n elif key in test_param.keys():\n del test_param[key]\n return test_param\n\n\nclass UIPerformanceTests(AbstractBaseMixin, Base):\n __tablename__ = \"ui_performance_tests\"\n id = Column(Integer, primary_key=True)\n project_id = Column(Integer, unique=False, nullable=False)\n test_uid = Column(String(128), unique=True, nullable=False)\n name = Column(String(128), nullable=False)\n bucket = Column(String(128), nullable=False)\n file = Column(String(128), nullable=False)\n entrypoint = Column(String(128), nullable=False)\n runner = Column(String(128), nullable=False)\n region = Column(String(128), nullable=False)\n browser = Column(String(128), nullable=False)\n reporting = Column(ARRAY(String), nullable=False)\n parallel = Column(Integer, nullable=False)\n params = Column(JSON)\n env_vars = Column(JSON)\n customization = Column(JSON)\n git = Column(JSON)\n cc_env_vars = Column(JSON)\n last_run = Column(Integer)\n job_type = Column(String(20))\n emails = Column(Text)\n loops = Column(Integer)\n aggregation = Column(String(20))\n\n \n def configure_execution_json(self, output='cc', browser=None, test_type=None, params=None, env_vars=None, reporting=None,\n customization=None, cc_env_vars=None, parallel=None, execution=False):\n\n reports = []\n for report in self.reporting:\n if report:\n reports.append(f\"-r {report}\")\n\n cmd = f\"-sc {self.entrypoint} -l {self.loops} -b {browser} \" \\\n f\"-a {self.aggregation} {' '.join(reports)} -tid {self.test_uid}\"\n\n execution_json = {\n \"container\": self.runner,\n \"execution_params\": {\n \"cmd\": cmd,\n \"REMOTE_URL\": f'{unsecret(\"{{secret.redis_host}}\", project_id=self.project_id)}:4444'\n },\n \"cc_env_vars\": {},\n \"bucket\": self.bucket,\n \"job_name\": self.name,\n \"artifact\": self.file,\n \"job_type\": self.job_type,\n \"test_id\": self.test_uid,\n \"concurrency\": 1,\n \"channel\": self.region\n }\n\n if \"jira\" in self.reporting:\n execution_json[\"execution_params\"][\"JIRA\"] = unsecret(\"{{secret.jira}}\", project_id=self.project_id)\n\n if \"ado\" in self.reporting:\n execution_json[\"execution_params\"][\"ADO\"] = unsecret(\"{{secret.ado}}\", project_id=self.project_id)\n\n if \"quality\" in self.reporting:\n execution_json[\"quality_gate\"] = True\n if \"junit\" in self.reporting:\n execution_json[\"junit\"] = True\n\n if self.git:\n execution_json[\"git\"] = self.git\n\n if self.env_vars:\n for key, value in self.env_vars.items():\n execution_json[\"execution_params\"][key] = value\n\n if self.cc_env_vars:\n for key, value in self.cc_env_vars.items():\n execution_json[\"cc_env_vars\"][key] = value\n if \"RABBIT_HOST\" not in execution_json[\"cc_env_vars\"].keys():\n execution_json[\"cc_env_vars\"][\"RABBIT_HOST\"] = \"{{secret.rabbit_host}}\"\n project_queues = get_project_queues(project_id=self.project_id)\n if execution_json[\"channel\"] in project_queues[\"public\"]:\n execution_json[\"cc_env_vars\"][\"RABBIT_USER\"] = \"{{secret.rabbit_user}}\"\n execution_json[\"cc_env_vars\"][\"RABBIT_PASSWORD\"] = \"{{<PASSWORD>}}\"\n execution_json[\"cc_env_vars\"][\"RABBIT_VHOST\"] = \"carrier\"\n else:\n execution_json[\"cc_env_vars\"][\"RABBIT_USER\"] = \"{{secret.rabbit_project_user}}\"\n execution_json[\"cc_env_vars\"][\"RABBIT_PASSWORD\"] = \"{{<PASSWORD>}}\"\n execution_json[\"cc_env_vars\"][\"RABBIT_VHOST\"] = \"{{secret.rabbit_project_vhost}}\"\n\n if self.customization:\n for key, value in self.customization.items():\n if \"additional_files\" not in execution_json[\"execution_params\"]:\n execution_json[\"execution_params\"][\"additional_files\"] = dict()\n execution_json[\"execution_params\"][\"additional_files\"][key] = value\n execution_json[\"execution_params\"] = dumps(execution_json[\"execution_params\"])\n if execution:\n execution_json = unsecret(execution_json, project_id=self.project_id)\n if output == 'cc':\n current_app.logger.error(execution_json)\n return execution_json\n\n return f'docker run -t --rm -e project_id={self.project_id} ' \\\n f'-e galloper_url={unsecret(\"{{secret.galloper_url}}\", project_id=self.project_id)} ' \\\n f\"-e token=\\\"{unsecret('{{secret.auth_token}}', project_id=self.project_id)}\\\" \" \\\n f'getcarrier/control_tower:{CURRENT_RELEASE} ' \\\n f'--test_id {self.test_uid}'\n", "id": "11002237", "language": "Python", "matching_score": 3.26574969291687, "max_stars_count": 1, "path": "galloper/database/models/performance_tests.py" }, { "content": "from ..shared.constants import CURRENT_RELEASE\n\n\nJOB_CONTAINER_MAPPING = {\n \"v5.4.1\": {\n \"container\": f\"getcarrier/perfmeter:{CURRENT_RELEASE}-5.4.1\",\n \"job_type\": \"perfmeter\",\n \"influx_db\": \"{{secret.jmeter_db}}\"\n },\n \"v5.3\": {\n \"container\": f\"getcarrier/perfmeter:{CURRENT_RELEASE}-5.3\",\n \"job_type\": \"perfmeter\",\n \"influx_db\": \"{{secret.jmeter_db}}\"\n },\n \"v5.2.1\": {\n \"container\": f\"getcarrier/perfmeter:{CURRENT_RELEASE}-5.2.1\",\n \"job_type\": \"perfmeter\",\n \"influx_db\": \"{{secret.jmeter_db}}\"\n },\n \"v5.2\": {\n \"container\": f\"getcarrier/perfmeter:{CURRENT_RELEASE}-5.2\",\n \"job_type\": \"perfmeter\",\n \"influx_db\": \"{{secret.jmeter_db}}\"\n },\n \"v5.1.1\": {\n \"container\": f\"getcarrier/perfmeter:{CURRENT_RELEASE}-5.1.1\",\n \"job_type\": \"perfmeter\",\n \"influx_db\": \"{{secret.jmeter_db}}\"\n },\n \"v5.1\": {\n \"container\": f\"getcarrier/perfmeter:{CURRENT_RELEASE}-5.1\",\n \"job_type\": \"perfmeter\",\n \"influx_db\": \"{{secret.jmeter_db}}\"\n },\n \"v5.0\": {\n \"container\": f\"getcarrier/perfmeter:{CURRENT_RELEASE}-5.0\",\n \"job_type\": \"perfmeter\",\n \"influx_db\": \"{{secret.jmeter_db}}\"\n },\n \"v4.0\": {\n \"container\": f\"getcarrier/perfmeter:{CURRENT_RELEASE}-4.0\",\n \"job_type\": \"perfmeter\",\n \"influx_db\": \"{{secret.jmeter_db}}\"\n },\n \"v3.1\": {\n \"container\": f\"getcarrier/perfgun:{CURRENT_RELEASE}-3.1\",\n \"job_type\": \"perfgun\",\n \"influx_db\": \"{{secret.gatling_db}}\"\n },\n \"v2.3\": {\n \"container\": f\"getcarrier/perfgun:{CURRENT_RELEASE}-2.3\",\n \"job_type\": \"perfgun\",\n \"influx_db\": \"{{secret.gatling_db}}\"\n }\n}\n\nJOB_TYPE_MAPPING = {\n \"perfmeter\": \"jmeter\",\n \"perfgun\": \"gatling\",\n \"free_style\": \"other\",\n \"observer\": \"observer\",\n \"dast\": \"dast\",\n \"sast\": \"sast\",\n}\n", "id": "9490282", "language": "Python", "matching_score": 0.010005827993154526, "max_stars_count": 0, "path": "constants.py" }, { "content": "import pytest\nimport hashlib\nimport requests_mock\nfrom perfreporter.reporter import Reporter\nfrom perfreporter.ado_reporter import ADOReporter\nimport tests.utils.constants as c\n\nreporter = Reporter(config_file=\"tests/utils/config.yaml\")\n\nado_reporter = ADOReporter(ado_config={\"org\": \"my_org\", \"project\": \"my_project\", \"pat\": \"my_pat\", \"team\": \"my_team\"},\n args=c.ARGS)\n\n\ndef test_parse_config_file():\n args = {}\n rp_service, jira_service = reporter.parse_config_file(args)\n assert rp_service is not None\n assert jira_service is not None\n assert rp_service.rp_url == \"https://rp.com\"\n assert jira_service.url == \"https://jira.com\"\n assert not jira_service.valid\n\n\ndef test_get_jira_service():\n args = {}\n jira_core_config = {\n \"jira_url\": \"https://jira.com\",\n \"jira_login\": \"my_login\",\n \"jira_password\": \"<PASSWORD>\",\n \"jira_project\": \"my_project\",\n \"issue_type\": \"Bug\"\n }\n jira_additional_config = {\n \"check_functional_errors\": \"True\",\n \"check_performance_degradation\": \"True\",\n \"check_missed_thresholds\": \"True\",\n \"performance_degradation_rate\": 20,\n \"missed_thresholds_rate\": 50,\n \"jira_labels\": \"performance, api\",\n \"jira_watchers\": \"\",\n \"jira_epic_key\": \"\"\n }\n jira_service = reporter.get_jira_service(args, jira_core_config, jira_additional_config)\n assert jira_service is not None\n assert jira_service.url == \"https://jira.com\"\n assert not jira_service.valid\n\n\ndef test_get_rp_service():\n args = {}\n rp_core_config = {\n \"rp_host\": \"https://rp.com\",\n \"rp_token\": \"<PASSWORD>\",\n \"rp_project\": \"my_project\"\n }\n rp_additional_config = {\n \"rp_launch_name\": \"carrier\",\n \"check_functional_errors\": \"True\",\n \"check_performance_degradation\": \"True\",\n \"check_missed_thresholds\": \"True\",\n \"performance_degradation_rate\": 20,\n \"missed_thresholds_rate\": 50\n }\n rp_service = reporter.get_rp_service(args, rp_core_config, rp_additional_config)\n assert rp_service is not None\n assert rp_service.rp_url == \"https://rp.com\"\n\n\ndef test_jira_create_functional_error_description():\n args = {}\n rp_service, jira_service = reporter.parse_config_file(args)\n functional_error_description = jira_service.create_functional_error_description(c.error, c.ARGS)\n assert len(functional_error_description) == 2361\n for each in [\"*Request name*: Step5\", \"*HTTP Method*: GET\", \"*Request URL*: https://challengers.flood.io/done\",\n \"*Request headers*: {code}Connection:keep-alive\", \"*Error count*: 2;\", \"*Response code*: 200;\"]:\n assert each in functional_error_description\n\n\ndef test_jira_create_performance_degradation_description():\n args = {}\n rp_service, jira_service = reporter.parse_config_file(args)\n performance_degradation_description = jira_service.create_performance_degradation_description(c.baseline_rate,\n c.compare_with_baseline,\n c.ARGS)\n assert len(performance_degradation_description) == 528\n for each in [\"Performance degradation in test: Flood\", \"Test performance degradation is 62.5%\",\n \"\\\"Step4\\\" reached 420 ms by pct95. Baseline 415 ms.\", \"\\\"Step3\\\" reached 425 ms by pct95.\",\n \"\\\"Step2\\\" reached 431 ms by pct95.\", \"\\\"Step1\\\" reached 428 ms by pct95.\"]:\n assert each in performance_degradation_description\n\n\ndef test_jira_create_missed_thresholds_description():\n args = {}\n rp_service, jira_service = reporter.parse_config_file(args)\n missed_thresholds_description = jira_service.create_missed_thresholds_description(c.threshold_rate,\n c.compare_with_thresholds,\n c.ARGS)\n assert len(missed_thresholds_description) == 582\n for each in [\"Missed thresholds in test: Flood\", \"Percentage of requests exceeding the threshold was 40.0%\",\n \"\\\"All\\\" error_rate % with value 5.26 % exceeded threshold of 5.0 %\",\n \"\\\"Step1\\\" response_time ms with value 428 ms exceeded threshold of 200.0 ms\"]:\n assert each in missed_thresholds_description\n\n\ndef test_jira_get_functional_error_hash_code():\n args = {}\n rp_service, jira_service = reporter.parse_config_file(args)\n item_hash_code = jira_service.get_functional_error_hash_code(c.error, c.ARGS)\n assert item_hash_code == hashlib.sha256(c.error_string.strip().encode('utf-8')).hexdigest()\n\n\ndef test_rp_create_project():\n args = {}\n rp_service, jira_service = reporter.parse_config_file(args)\n with requests_mock.Mocker() as mock:\n mock.get(c.rp_get_project_request, status_code=404)\n mock.post(c.rp_url, status_code=204)\n rp_service.create_project()\n assert mock.call_count == 2\n\n\ndef test_rp_html_decode():\n args = {}\n rp_service, jira_service = reporter.parse_config_file(args)\n decoded_string = rp_service.html_decode(c.html_str)\n assert decoded_string == c.decoded_html_string\n\n\ndef test_ado_create_functional_error_description():\n functional_error_description = ado_reporter.create_functional_error_description(c.error, c.ARGS)\n assert len(functional_error_description) == 3214\n for each in [\"<strong>Request name</strong>: Step5<br>\", \"HTTP Method</strong>: GET\",\n \"<strong>Request URL</strong>: https://challengers.flood.io/done<br>\",\n \"<strong>Request headers</strong>: <br>Connection:keep-alive<br>Accept-Language:ru-RU,ru;q=0.8\"]:\n assert each in functional_error_description\n\n\ndef test_ado_create_performance_degradation_description():\n performance_degradation_description = ado_reporter.create_performance_degradation_description(c.baseline_rate,\n c.compare_with_baseline,\n \"123\",\n c.ARGS)\n assert len(performance_degradation_description) == 434\n for each in [\"Test performance degradation is 62.5% compared to the baseline\",\n \"\\\"Step5_Get_Code\\\" reached 208 ms by pct95. Baseline 199 ms.\",\n \"\\\"Step4\\\" reached 420 ms by pct95. Baseline 415 ms.\", \"\\\"Step3\\\" reached 425 ms by pct95.\"]:\n assert each in performance_degradation_description\n\n\ndef test_ado_create_missed_thresholds_description():\n missed_thresholds_description = ado_reporter.create_missed_thresholds_description(c.threshold_rate,\n c.compare_with_thresholds,\n c.ARGS)\n assert len(missed_thresholds_description) == 871\n for each in [\"Percentage of requests exceeding the threshold was 40.0%.\",\n \"\\\"All\\\" error_rate % with value 5.26 % exceeded threshold of 5.0 %\",\n \"\\\"Step1\\\" response_time ms with value 428 ms exceeded threshold of 200.0 ms\"]:\n assert each in missed_thresholds_description\n\n\ndef test_ado_get_functional_error_hash_code():\n item_hash_code = ado_reporter.get_functional_error_hash_code(c.error, c.ARGS)\n assert item_hash_code == hashlib.sha256(c.error_string.strip().encode('utf-8')).hexdigest()\n", "id": "4957391", "language": "Python", "matching_score": 3.620400905609131, "max_stars_count": 0, "path": "tests/test_reporters.py" }, { "content": "import pytest\nimport requests_mock\nimport os\nfrom perfreporter.data_manager import DataManager\nimport tests.utils.constants as c\nfrom perfreporter.junit_reporter import JUnit_reporter\n\n\ngalloper_url = \"http://example\"\ntoken = \"<PASSWORD>\"\nproject_id = 1\ndata_manager = DataManager(c.ARGS, galloper_url, token, project_id)\n\n\ndef test_compare_with_baseline():\n performance_degradation_rate, compare_with_baseline = data_manager.compare_with_baseline(c.BASELINE, c.TEST_DATA)\n print(performance_degradation_rate)\n print(compare_with_baseline)\n failed_requests = []\n for each in compare_with_baseline:\n failed_requests.append(each[\"request_name\"])\n assert performance_degradation_rate == 62.5\n assert all(req in ['Step1', 'Step2', 'Step3', 'Step4', 'Step5_Get_Code'] for req in failed_requests)\n\n\ndef test_get_thresholds_and_create_junit_report():\n with requests_mock.Mocker() as mock:\n mock.get(f\"{galloper_url}/api/v1/thresholds/{project_id}/backend?name={c.ARGS['simulation']}&\"\n f\"environment={c.ARGS['env']}&order=asc\", json=c.THRESHOLDS, status_code=200)\n mock.get(c.ALL_METRICS_REQUEST, json=c.ALL_METRICS_RESPONSE)\n mock.get(c.TP_REQUEST, json=c.TP_RESPONSE)\n total_checked, missed_threshold_rate, compare_with_thresholds = data_manager.get_thresholds(test=c.TEST_DATA,\n add_green=True)\n failed_requests = []\n print(missed_threshold_rate)\n print(compare_with_thresholds)\n for each in compare_with_thresholds:\n if each[\"threshold\"] != \"green\":\n failed_requests.append(each[\"request_name\"])\n assert missed_threshold_rate == 40.0\n assert all(req in ['Step1', 'Step5', 'All', 'all'] for req in failed_requests)\n\n JUnit_reporter.create_report(compare_with_thresholds, \"1\")\n assert os.path.exists(\"/tmp/junit_report_1.xml\")\n os.remove(\"/tmp/junit_report_1.xml\")\n\n\ndef test_write_comparison_data_to_influx():\n with requests_mock.Mocker() as mock:\n mock.get(c.user_count_request, json=c.user_count_response)\n mock.get(c.total_requests_count_request, json=c.total_requests_count_response)\n mock.get(c.request_names_request, json=c.request_names_response)\n mock.get(c.first_request, json=c.first_response)\n mock.get(c.last_request, json=c.last_response)\n mock.get(c.response_time_request.format(\"Home_Page\"), json=c.home_page_response_time_response)\n mock.get(c.response_time_request.format(\"Step1\"), json=c.step1_response_time_response)\n mock.get(c.response_time_request.format(\"Step2\"), json=c.step2_response_time_response)\n mock.get(c.response_time_request.format(\"Step3\"), json=c.step3_response_time_response)\n mock.get(c.response_time_request.format(\"Step4\"), json=c.step4_response_time_response)\n mock.get(c.response_time_request.format(\"Step5\"), json=c.step5_response_time_response)\n mock.get(c.response_time_request.format(\"Step5_Get_Code\"), json=c.step5_get_code_response_time_response)\n\n for each in [\"Home_Page\", \"Step1\", \"Step2\", \"Step3\", \"Step4\", \"Step5\", \"Step5_Get_Code\"]:\n mock.get(c.methods_request.format(each), json=c.methods_response)\n mock.get(c.ko_count_request.format(each), json=c.empty_response)\n if each == \"Step5\":\n mock.get(c.ok_count_request.format(each), json=c.step5_total_response)\n mock.get(c.total_request.format(each), json=c.step5_total_response)\n else:\n mock.get(c.ok_count_request.format(each), json=c.total_response)\n mock.get(c.total_request.format(each), json=c.total_response)\n\n # mock status codes\n for each in [\"Home_Page\", \"Step1\", \"Step2\", \"Step3\", \"Step4\", \"Step5\", \"Step5_Get_Code\"]:\n mock.get(c.nan_status_code_request.format(each), json=c.empty_response)\n for code in [1, 2, 3, 4, 5]:\n if code == 2:\n if each == \"Step5\":\n mock.get(c.status_code_request.format(each, code), json=c.step5_total_response)\n else:\n mock.get(c.status_code_request.format(each, code), json=c.total_response)\n else:\n mock.get(c.status_code_request.format(each, code), json=c.empty_response)\n\n mock.register_uri(requests_mock.POST, \"http://localhost:8086/write\", status_code=204)\n users_count, duration, response_times = data_manager.write_comparison_data_to_influx()\n\n assert users_count == 1\n assert duration == 29\n assert response_times[\"min\"] == 206.0\n assert response_times[\"max\"] == 1121.0\n assert response_times[\"mean\"] == 401.0\n assert response_times[\"pct50\"] == 416\n assert response_times[\"pct75\"] == 420\n assert response_times[\"pct90\"] == 424\n assert response_times[\"pct95\"] == 461\n assert response_times[\"pct99\"] == 989\n\n\ndef test_get_baseline():\n with requests_mock.Mocker() as mock:\n mock.get(f\"{galloper_url}/api/v1/baseline/{project_id}?test_name=Flood&env=demo\", json={\"baseline\": c.BASELINE})\n baseline = data_manager.get_baseline()\n assert baseline == c.BASELINE\n\n\ndef test_get_last_build():\n with requests_mock.Mocker() as mock:\n mock.get(c.last_build_request, json=c.last_build_response)\n last_build = data_manager.get_last_build()\n assert last_build == c.last_build\n", "id": "2299756", "language": "Python", "matching_score": 4.010860919952393, "max_stars_count": 0, "path": "tests/test_data_manager.py" }, { "content": "import pytest\nimport os\nimport shutil\nfrom json import loads\nfrom perfreporter.post_processor import PostProcessor\nfrom perfreporter.junit_reporter import JUnit_reporter\n\npost_processor = PostProcessor(config_file={})\nresults = {'requests':\n {'Home_Page': {'request_name': 'Home_Page', 'response_time': 1135, 'OK': 3, 'KO': 0},\n 'Step1': {'request_name': 'Step1', 'response_time': 422, 'OK': 3, 'KO': 0},\n 'Step2': {'request_name': 'Step2', 'response_time': 434, 'OK': 3, 'KO': 0},\n 'Step3': {'request_name': 'Step3', 'response_time': 423, 'OK': 3, 'KO': 0},\n 'Step4': {'request_name': 'Step4', 'response_time': 423, 'OK': 3, 'KO': 0},\n 'Step5_Get_Code': {'request_name': 'Step5_Get_Code', 'response_time': 217, 'OK': 3, 'KO': 0},\n 'Step5': {'request_name': 'Step5', 'response_time': 436, 'OK': 0, 'KO': 2}},\n 'throughput': 0.62,\n 'error_rate': 10.0}\n\n\ndef test_aggregate_errors():\n errors = []\n with open(\"tests/utils/aggregated_errors.json\", \"r\") as f:\n error = loads(f.read())\n errors.append(error)\n errors.append(error)\n\n aggregated_errors = post_processor.aggregate_errors(errors)\n assert aggregated_errors[\"Step5_GET_200\"][\"Error count\"] == 4\n\n\ndef test_calculate_thresholds_and_create_junit_report():\n thresholds = post_processor.calculate_thresholds(results)\n assert len(thresholds) == 9\n for i in range(3):\n assert thresholds[i][\"status\"] == \"FAILED\"\n for i in range(3, 9):\n assert thresholds[i][\"status\"] == \"PASSED\"\n\n os.mkdir(\"/tmp/reports\")\n JUnit_reporter.process_report(results['requests'], thresholds)\n assert os.path.exists('/tmp/reports/jmeter.xml')\n shutil.rmtree('/tmp/reports')\n", "id": "11731935", "language": "Python", "matching_score": 2.892432451248169, "max_stars_count": 0, "path": "tests/test_post_processor.py" }, { "content": "import pytest\nfrom perfreporter.utils import calculate_appendage\n\n\ndef test_utils():\n appendage = calculate_appendage(\"throughput\")\n assert appendage == \" RPS\"\n appendage = calculate_appendage(\"response_time\")\n assert appendage == \" ms\"\n appendage = calculate_appendage(\"error_rate\")\n assert appendage == \" %\"\n", "id": "1059262", "language": "Python", "matching_score": 0.7291030287742615, "max_stars_count": 0, "path": "tests/test_utils.py" }, { "content": "import pytest\nfrom perfreporter.error_parser import ErrorLogParser\n\n\ndef test_error_parser():\n args = {\"error_logs\": \"tests/utils/\", \"simulation\": \"error\"}\n error_parser = ErrorLogParser(args)\n aggregated_errors = error_parser.parse_errors()\n assert aggregated_errors[\"Step5_GET_200\"][\"Error count\"] == 5\n", "id": "526247", "language": "Python", "matching_score": 1.7238702774047852, "max_stars_count": 0, "path": "tests/test_error_parser.py" }, { "content": "import pytest\nfrom perfreporter.jtl_parser import JTLParser\n\n\ndef test_parse_jtl():\n jtl_parser = JTLParser()\n results = jtl_parser.parse_jtl(log_file=\"tests/utils/jmeter.jtl\")\n assert results[\"throughput\"] == 0.62\n assert results[\"error_rate\"] == 10.0\n assert all(key in list(results[\"requests\"].keys()) for key in ['Home_Page', 'Step1', 'Step2', 'Step3', 'Step4',\n 'Step5_Get_Code', 'Step5'])\n", "id": "11833652", "language": "Python", "matching_score": 0.24104247987270355, "max_stars_count": 0, "path": "tests/test_jtl_parser.py" }, { "content": "import base64\n\nimport requests\n\nfrom observer_hub.constants import check_ui_performance\n\n\nclass PerfAgent(object):\n\n def __init__(self, host, session_id):\n self.host = host\n self.session_id = session_id\n\n def __execute_script(self, script):\n content = {\"script\": script, \"args\": []}\n\n url = f'http://{self.host}/wd/hub/session/{self.session_id}/execute/sync'\n res = requests.post(url=url, json=content)\n return res.json()['value']\n\n def get_performance_timing(self):\n return self.__execute_script(\"return performance.timing\")\n\n def page_title(self):\n res = requests.get(f'http://{self.host}/wd/hub/session/{self.session_id}/title')\n return res.json()['value']\n\n def get_dom_size(self):\n return self.__execute_script(\"return document.getElementsByTagName('*').length\")\n\n def get_performance_metrics(self):\n return self.__execute_script(check_ui_performance)\n\n def get_performance_entities(self):\n return self.__execute_script(\"return performance.getEntriesByType('resource')\")\n\n def get_current_url(self):\n res = requests.get(f'http://{self.host}/wd/hub/session/{self.session_id}/url')\n return res.json()['value']\n\n def take_screenshot(self, filename):\n res = requests.get(f'http://{self.host}/wd/hub/session/{self.session_id}/screenshot')\n encoded_data = res.json()['value']\n imgdata = base64.b64decode(encoded_data)\n with open(filename, 'wb') as f:\n f.write(imgdata)\n return filename\n\n def get_page_headers(self):\n script = \"\"\"var req = new XMLHttpRequest();\nreq.open('GET', document.location, false);\nreq.send(null);\nreturn req.getAllResponseHeaders().toLowerCase();\"\"\"\n headers = self.__execute_script(script)\n headers_json = dict()\n for line in headers.strip().split('\\n'):\n line_arr = line.split(\":\")\n try:\n headers_json[line_arr[0].strip()] = line_arr[1].strip()\n except IndexError:\n continue\n return headers_json\n", "id": "5080106", "language": "Python", "matching_score": 2.8529627323150635, "max_stars_count": 0, "path": "observer_hub/perf_agent.py" }, { "content": "import os\nfrom urllib.parse import urlparse\nfrom uuid import uuid4\n\nfrom observer_hub.constants import SCREENSHOTS_PATH\nfrom observer_hub.db import get_from_storage, save_to_storage\nfrom observer_hub.models.execution_result import ExecutionResult\nfrom observer_hub.perf_agent import PerfAgent\nfrom observer_hub.processors.results_processor import compute_results_for_simple_page, compute_results_for_spa\nfrom observer_hub.util import is_performance_entities_changed, is_dom_changed, logger\n\n\ndef process_request(original_request, host, session_id, start_time, locators, commands):\n perf_agent = PerfAgent(host, session_id)\n\n performance_timing = perf_agent.get_performance_timing()\n if 'loadEventEnd' not in performance_timing.keys():\n return ExecutionResult()\n\n load_event_end = performance_timing['loadEventEnd']\n data = get_from_storage(session_id)\n\n results = None\n screenshot_path = None\n results_type = \"page\"\n\n if data is None or data['load_event_end'] != load_event_end:\n results = compute_results_for_simple_page(perf_agent)\n results['info']['title'] = perf_agent.page_title()\n dom = perf_agent.get_dom_size()\n\n save_to_storage(session_id, {\n \"dom_size\": dom,\n \"results\": results,\n \"load_event_end\": load_event_end,\n \"perf_entities\": []\n })\n\n results['info']['testStart'] = start_time\n results[\"info\"][\"url\"] = perf_agent.get_current_url()\n results[\"info\"][\"headers\"] = perf_agent.get_page_headers()\n\n else:\n perf_entities = data['perf_entities']\n old_dom = data['dom_size']\n previous_results = data['results']\n latest_pef_entries = perf_agent.get_performance_entities()\n\n is_entities_changed = is_performance_entities_changed(perf_entities, latest_pef_entries)\n new_dom = perf_agent.get_dom_size()\n if is_entities_changed and is_dom_changed(old_dom, new_dom):\n latest_results = perf_agent.get_performance_metrics()\n latest_results['info']['testStart'] = start_time\n results = compute_results_for_spa(previous_results, latest_results)\n\n save_to_storage(session_id, {\n \"dom_size\": new_dom,\n \"results\": latest_results,\n \"load_event_end\": load_event_end,\n \"perf_entities\": latest_pef_entries\n })\n\n results_type = \"action\"\n\n page_identifier = None\n if results:\n current_url = perf_agent.get_current_url()\n os.makedirs(SCREENSHOTS_PATH, exist_ok=True)\n screenshot_path = perf_agent.take_screenshot(f\"{SCREENSHOTS_PATH}/{uuid4()}.png\")\n page_identifier = get_page_identifier(current_url, results['info']['title'], original_request, locators,\n session_id)\n\n return ExecutionResult(page_identifier, results, screenshot_path, results_type, commands)\n\n\ndef get_page_identifier(current_url, title, original_request, locators, session_id):\n parsed_url = urlparse(current_url)\n logger.info(f\"Get page identifier {original_request.path_components}\")\n\n if original_request.method == \"DELETE\":\n locator = __find_actionable_locator(locators, len(locators))\n return f\"{title}:{parsed_url.path}@{locator['action']}({locator['using']}={locator['value']})\"\n\n if original_request.method == \"POST\" and original_request.path.endswith('/url'):\n locator = __find_actionable_locator(locators, len(locators))\n return f\"{title}:{parsed_url.path}@{locator['action']}({locator['using']}={locator['value']})\"\n\n current_element_id = original_request.path_components[5]\n\n if len(locators.keys()) == 2 and list(locators.keys())[0] == \"open\":\n url = locators['open']\n return f\"{title}:{parsed_url.path}@open({url})\"\n\n elements = list(locators.keys())\n current_element_index = elements.index(current_element_id)\n locator = __find_actionable_locator(locators, current_element_index)\n\n return f\"{title}:{parsed_url.path}@{locator['action']}({locator['using']}={locator['value']})\"\n\n\ndef __find_actionable_locator(locators, current_element_index):\n locator = {}\n for v in reversed(list(locators.values())[:current_element_index]):\n if isinstance(v, dict) and v.get('action') == 'click':\n locator = v\n break\n\n return locator\n", "id": "10371557", "language": "Python", "matching_score": 3.6787121295928955, "max_stars_count": 0, "path": "observer_hub/processors/request_processors.py" }, { "content": "import hashlib\nimport json\nimport logging\nimport math\nimport os\nfrom datetime import datetime\nfrom shutil import rmtree\nfrom time import sleep\n\nimport pytz\nfrom deepdiff import DeepDiff\nimport requests\nfrom pytz import UnknownTimeZoneError\n\nfrom observer_hub.constants import CONFIG_PATH, REPORT_PATH\n\nlogger = logging.getLogger('Observer hub')\n\nhandler = logging.StreamHandler()\nformatter = logging.Formatter('[%(name)s] - %(message)s')\nhandler.setFormatter(formatter)\nlogger.addHandler(handler)\nlogger.setLevel(logging.INFO)\n\n\ndef wait_for_agent(host, port):\n for _ in range(120):\n try:\n if requests.get(f'http://{host}:{port}').content == b'OK':\n break\n except:\n pass\n sleep(0.1)\n\n\ndef wait_for_hub(host, port):\n for _ in range(120):\n try:\n if requests.get(f'http://{host}:{port}/wd/hub/status').status_code == 200:\n break\n except:\n pass\n sleep(0.1)\n\n\ndef get_desired_capabilities(original_request):\n content = json.loads(original_request.content.decode('utf-8'))\n return content['desiredCapabilities']\n\n\ndef read_config():\n with open(CONFIG_PATH) as f:\n return json.load(f)\n\n\ndef is_actionable(command):\n return \"/click\" in command\n\n\ndef is_performance_entities_changed(old_entities, latest_entries):\n ddiff = DeepDiff(old_entities, latest_entries, ignore_order=True)\n if not ddiff:\n return False\n\n if ddiff['iterable_item_added'] or ddiff['iterable_item_removed']:\n return True\n\n return False\n\n\ndef is_dom_changed(old_dom, new_dom):\n return old_dom != new_dom\n\n\ndef filter_thresholds_for(name, arr):\n test_scope = [x for x in arr if x['scope'] == name]\n every_scope = [x for x in arr if x['scope'] == 'every']\n return list({x['target']: x for x in every_scope + test_scope}.values())\n\n\ndef percentile(data, percentile):\n size = len(data)\n return sorted(data)[int(math.ceil((size * percentile) / 100)) - 1]\n\n\ndef is_values_match(actual, comparison, expected):\n if comparison == 'gte':\n return actual >= expected\n elif comparison == 'lte':\n return actual <= expected\n elif comparison == 'gt':\n return actual > expected\n elif comparison == 'lt':\n return actual < expected\n elif comparison == 'eq':\n return actual == expected\n return False\n\n\ndef get_aggregated_value(aggregation, metrics):\n if aggregation == 'max':\n return max(metrics), metrics\n elif aggregation == 'min':\n return min(metrics), metrics\n elif aggregation == 'avg':\n return round(sum(metrics) / len(metrics), 2), metrics\n elif aggregation == 'pct95':\n return percentile(metrics, 95), metrics\n elif aggregation == 'pct50':\n return percentile(metrics, 50), metrics\n else:\n raise Exception(f\"No such aggregation {aggregation}\")\n\n\ndef flatten_list(l):\n return [item for sublist in l for item in sublist]\n\n\ndef closest(lst, val):\n return lst[min(range(len(lst)), key=lambda i: abs(lst[i] - val))]\n\n\ndef clean_up_data(results, junit_report_name):\n logger.info(\"Cleaning up generated report data...\")\n __remove_file(f\"{REPORT_PATH}/junit/{junit_report_name}\")\n\n for execution_result in results:\n rmtree(execution_result.video_folder, ignore_errors=True)\n __remove_file(execution_result.screenshot_path)\n __remove_file(execution_result.report.path)\n\n\ndef __remove_file(path):\n try:\n os.remove(path)\n except FileNotFoundError:\n pass\n\n\ndef request_to_command(original_request, locators):\n content = json.loads(original_request.content.decode('utf-8'))\n session_id = original_request.path_components[3][32:]\n command = {}\n if original_request.path.endswith(\"/url\"):\n command = {\n \"command\": \"open\",\n \"target\": content['url'],\n \"value\": \"\"\n }\n if original_request.path.endswith(\"/click\"):\n locator = locators[session_id][original_request.path_components[5]]\n command = {\n \"command\": \"click\",\n \"target\": locator['value'],\n \"value\": \"\"\n }\n\n return session_id, command\n\n\ndef get_hash(data):\n return hashlib.md5(data.encode('utf-8')).hexdigest()\n\n\ndef current_time(tz):\n try:\n return datetime.now(tz=pytz.timezone(tz))\n except UnknownTimeZoneError:\n logger.warning(f\"Wrong timezone {tz}. Defaulting to UTC\")\n return datetime.now(tz=pytz.timezone(\"UTC\"))\n\n\ndef mark_element_actionable(original_request, locators):\n session_id = original_request.path_components[3][32:]\n locators_list = locators[session_id]\n current_element_id = original_request.path_components[5]\n command = original_request.path_components[6]\n locators_list[current_element_id]['action'] = command\n", "id": "4264962", "language": "Python", "matching_score": 4.196933746337891, "max_stars_count": 0, "path": "observer_hub/util.py" }, { "content": "import json\nfrom datetime import datetime\nimport docker\nfrom apscheduler.schedulers.background import BackgroundScheduler\nfrom mitmproxy import http\nfrom mitmproxy import proxy, options\nfrom mitmproxy.tools.dump import DumpMaster\n\nfrom observer_hub.assertions import assert_test_thresholds\nfrom observer_hub.constants import TIMEOUT, SCHEDULER_INTERVAL, SELENIUM_PORT, VIDEO_PORT, SCREEN_RESOLUTION, QUOTA, \\\n VNC_PORT, PORT\nfrom observer_hub.docker_client import DockerClient\nfrom observer_hub.integrations.galloper_api_client import get_thresholds\nfrom observer_hub.integrations.galloper import notify_on_test_start\nfrom observer_hub.models.collector import CommandsCollector, LocatorsCollector, ExecutionResultsCollector, \\\n ResultsCollector\nfrom observer_hub.processors.request_processors import process_request\nfrom observer_hub.processors.results_processor import process_results_for_page, process_results_for_test\nfrom observer_hub.reporters.azure_devops import notify_azure_devops\nfrom observer_hub.reporters.email_reporter import init_email_notification\nfrom observer_hub.reporters.jira_reporter import notify_jira\nfrom observer_hub.util import wait_for_agent, get_desired_capabilities, read_config, wait_for_hub, is_actionable, \\\n logger, clean_up_data, request_to_command, get_hash, mark_element_actionable\nfrom observer_hub.video import stop_recording, start_video_recording\nfrom observer_hub.wait import wait_for_page_to_load\n\ndocker_client = DockerClient(docker.from_env())\nscheduler = BackgroundScheduler()\nconfig = read_config()\n\nmapping = {}\nexecution_results = ExecutionResultsCollector()\nlocators = LocatorsCollector()\ncommands = CommandsCollector()\n\n\ndef container_inspector_job():\n deleted = []\n for k, v in mapping.items():\n if v['container_id'] is None:\n deleted.append(k)\n continue\n\n if 'lastly_used' not in v.keys():\n continue\n lastly_used = datetime.strptime(v['lastly_used'], '%Y-%m-%d %H:%M:%S.%f')\n now = datetime.now()\n diff = (now - lastly_used).seconds\n container_id = v['container_id']\n\n logger.info(f\"Container {container_id} was lastly used {diff} seconds ago\")\n\n if diff >= TIMEOUT and v['session_id'] in execution_results.keys():\n logger.info(f\"Container {container_id} usage time exceeded timeout!\")\n docker_client.get_container(container_id).remove(force=True)\n logger.info(f\"Container {container_id} was deleted!\")\n\n results = execution_results[v['session_id']]\n junit_report_name = generate_reports(results, v)\n\n deleted.append(k)\n locators.pop(v['session_id'])\n commands.pop(v['session_id'])\n clean_up_data(results, junit_report_name)\n\n for d in deleted:\n mapping.pop(d, None)\n\n logger.info(f'There are {len(mapping.keys())} containers running...')\n\n\ndef generate_reports(results, args):\n report_id = args['report_id']\n browser_name = args['desired_capabilities']['browserName']\n version = args['desired_capabilities']['version']\n junit_report = args['junit_report']\n thresholds = args['thresholds']\n junit_report_bucket = args['junit_report_bucket']\n galloper_project_id = args['galloper_project_id']\n galloper_url = args['galloper_url']\n galloper_token = args['galloper_token']\n tz = args['tz']\n email_report = args['desired_capabilities'].get('email_report', '')\n test_id = args['desired_capabilities'].get('test_id', '')\n\n test_name = f\"{browser_name}_{version}\"\n\n result_collector = ResultsCollector()\n for r in results:\n result_collector.add(r.page_identifier, r)\n\n threshold_results = assert_test_thresholds(test_name, thresholds, result_collector.data)\n\n _, junit_report_name = process_results_for_test(galloper_url, galloper_project_id, galloper_token, report_id,\n test_name, threshold_results,\n junit_report,\n junit_report_bucket, tz)\n\n notify_jira(test_name, threshold_results, args)\n notify_azure_devops(test_name, thresholds, args)\n\n if email_report:\n init_email_notification(galloper_url, galloper_project_id, galloper_token, report_id, test_id)\n\n return junit_report_name\n\n\nclass Interceptor:\n def __init__(self):\n pass\n\n def process(self, original_request, commands_full=False):\n session_id = original_request.path_components[3]\n host_hash = session_id[0:32]\n session_id = session_id[32:]\n host_info = mapping[host_hash]\n\n host = host_info['host']\n start_time = host_info['start_time']\n page_load_timeout = host_info['page_load_timeout']\n\n session_commands = commands[session_id][:-1]\n if commands_full:\n session_commands = commands[session_id]\n\n wait_for_page_to_load(page_load_timeout)\n\n if session_id not in locators.keys():\n return\n locators_list = locators[session_id]\n\n results = process_request(original_request, host, session_id, start_time, locators_list,\n session_commands)\n video_host = host_info['video']\n video_folder, video_path = stop_recording(video_host)\n results.video_folder = video_folder\n results.video_path = video_path\n if results.results:\n report_id = host_info[\"report_id\"]\n thresholds = host_info['thresholds']\n galloper_project_id = host_info['galloper_project_id']\n galloper_url = host_info['galloper_url']\n galloper_token = host_info['galloper_token']\n process_results_for_page(galloper_url, galloper_project_id, galloper_token, report_id,\n results, thresholds, session_id)\n execution_results.add(session_id, results)\n return host_hash, video_host\n\n def request(self, flow):\n original_request = flow.request\n\n path_components = list(original_request.path_components)\n host = None\n host_hash = None\n container_id = None\n selenium_port = None\n video_port = None\n browser_name = \"\"\n version = \"\"\n\n if flow.request.path == \"/status\" or flow.request.path == '/favicon.ico':\n content = {\"quota\": QUOTA, \"active\": len(mapping.keys())}\n response = json.dumps(content).encode('utf-8')\n flow.response = http.HTTPResponse.make(\n 200,\n response\n )\n return\n\n if original_request.method != \"GET\" and \\\n original_request.method != \"DELETE\" and \\\n original_request.path != '/wd/hub/session':\n\n session_id, command = request_to_command(original_request, locators)\n\n if command:\n commands.add(session_id, command)\n\n if \"element\" in original_request.path and is_actionable(original_request.path):\n mark_element_actionable(original_request, locators)\n host_hash, video_host = self.process(original_request)\n\n start_time = start_video_recording(video_host)\n mapping[host_hash]['start_time'] = start_time\n\n if \"/wd/hub/session\" in original_request.path and original_request.method == \"DELETE\" \\\n and len(original_request.path_components) >= 4:\n self.process(original_request, commands_full=True)\n\n if original_request.path.endswith('/url') and original_request.method == \"POST\" \\\n and len(original_request.path_components) >= 4:\n self.process(original_request, commands_full=True)\n\n if original_request.path == \"/wd/hub/session\":\n desired_capabilities = get_desired_capabilities(original_request)\n browser_name = desired_capabilities['browserName']\n version = desired_capabilities.get('version', '')\n vnc = bool(desired_capabilities.get('vnc', False))\n page_load_timeout = int(desired_capabilities.get('page_load_timeout', 0))\n junit_report = desired_capabilities.get('junit_report', \"\")\n junit_report_bucket = desired_capabilities.get('junit_report_bucket', \"\")\n galloper_project_id = desired_capabilities.get('galloper_project_id', 1)\n galloper_token = desired_capabilities.get('galloper_token', None)\n galloper_url = desired_capabilities.get('galloper_url', 'http://localhost')\n env = desired_capabilities.get('venv', 'default')\n tz = desired_capabilities.get('tz', 'UTC')\n\n try:\n container_id, selenium_port, video_port = start_container(browser_name, version, vnc)\n except Exception:\n logger.error(f\"There is no container for {browser_name}:{version}\")\n\n if container_id is not None:\n host = f\"localhost:{selenium_port}\"\n host_hash = get_hash(host)\n report_id = desired_capabilities.get(\"report_uid\")\n if not report_id:\n report_id, test_name = notify_on_test_start(galloper_url, galloper_project_id, galloper_token,\n desired_capabilities)\n thresholds = get_thresholds(galloper_url, galloper_project_id, galloper_token,\n desired_capabilities.get('job_name', ''), env)\n\n mapping[host_hash] = {\n \"host\": f\"localhost:{selenium_port}\",\n \"container_id\": container_id,\n \"video\": f\"localhost:{video_port}\",\n \"report_id\": report_id,\n \"desired_capabilities\": desired_capabilities,\n \"thresholds\": thresholds,\n 'page_load_timeout': page_load_timeout,\n 'junit_report': junit_report,\n 'junit_report_bucket': junit_report_bucket,\n 'galloper_url': galloper_url,\n 'galloper_project_id': galloper_project_id,\n 'galloper_token': galloper_token,\n 'env': env,\n 'tz': tz\n }\n\n if len(path_components) > 3:\n session_id = path_components[3]\n host_hash = session_id[0:32]\n host = mapping[host_hash]['host']\n path_components[3] = session_id[32:]\n container_id = mapping[host_hash]['container_id']\n\n if container_id is None:\n content = {\"value\": {\"error\": -1, \"message\": f\"There is no container for {browser_name}:{version}\"}}\n response = json.dumps(content).encode('utf-8')\n flow.response = http.HTTPResponse.make(\n 500,\n response\n )\n return\n\n url = f\"{original_request.scheme}://{host}/{'/'.join(path_components)}\"\n\n flow.request = http.HTTPRequest.make(\n method=original_request.method,\n url=url,\n content=original_request.content,\n headers=original_request.headers.fields\n )\n\n mapping[host_hash][\"lastly_used\"] = str(datetime.now())\n\n def response(self, flow):\n response = flow.response.content\n\n if flow.response.status_code == 500 or flow.response.status_code == 404:\n flow.response = http.HTTPResponse.make(\n flow.response.status_code,\n response,\n flow.response.headers.fields\n )\n return\n\n if flow.request.path == \"/wd/hub/session\":\n host_hash = get_hash(f\"localhost:{flow.request.port}\")\n\n content = json.loads(response.decode('utf-8'))\n\n session_id = content['value']['sessionId']\n content['value']['sessionId'] = host_hash + session_id\n response = json.dumps(content).encode('utf-8')\n\n video_host = mapping[host_hash][\"video\"]\n start_time = start_video_recording(video_host)\n mapping[host_hash]['start_time'] = start_time\n mapping[host_hash]['session_id'] = session_id\n\n if flow.request.path.endswith(\"element\"):\n session_id = flow.request.path_components[3]\n content = json.loads(response.decode('utf-8'))\n element_id = [*content['value'].values()][0]\n\n locator = json.loads(flow.request.content.decode('utf-8'))\n locators.save(session_id, element_id, locator)\n\n if flow.request.path.endswith(\"/url\"):\n session_id = flow.request.path_components[3]\n element_id = \"open\"\n url = json.loads(flow.request.content.decode('utf-8'))['url']\n locators.save(session_id, element_id, url)\n\n flow.response = http.HTTPResponse.make(\n flow.response.status_code,\n response,\n flow.response.headers.fields\n )\n\n\ndef start_container(browser_name, version, vnc):\n container_config = get_container_configuration(browser_name, version)\n container_image = container_config['image']\n env_vars = container_config.get('env', {})\n\n env = [f\"RESOLUTION={SCREEN_RESOLUTION}\"]\n for k, v in env_vars.items():\n env.append(f\"{k}={v}\")\n\n ports = {f\"{SELENIUM_PORT}\": None, f\"{VIDEO_PORT}\": None}\n if vnc:\n ports[VNC_PORT] = None\n env.append(\"ENABLE_VNC=true\")\n\n logger.info(f\"Starting container {container_image} ...\")\n container = docker_client.run(\n container_image,\n detach=True,\n ports=ports,\n volumes={'/dev/shm': {'bind': '/dev/shm'}},\n environment=env,\n privileged=True\n )\n selenium_port = docker_client.port(container.short_id, SELENIUM_PORT)\n video_port = docker_client.port(container.short_id, VIDEO_PORT)\n wait_for_hub(\"localhost\", selenium_port)\n wait_for_agent(\"localhost\", video_port)\n\n logger.info(f'Container has been {container.id} started')\n return container.short_id, selenium_port, video_port\n\n\ndef get_container_configuration(browser_name, version):\n cfg = config[browser_name]\n if not version:\n version = cfg['default']\n\n return cfg['versions'][version]\n\n\ndef start_proxy():\n opts = options.Options(listen_host='0.0.0.0',\n listen_port=PORT,\n mode=\"transparent\")\n pconf = proxy.config.ProxyConfig(opts)\n m = DumpMaster(opts)\n m.server = proxy.server.ProxyServer(pconf)\n logger.info(f'Intercepting Proxy listening on {PORT}')\n\n m.addons.add(Interceptor())\n try:\n m.run()\n except KeyboardInterrupt:\n m.shutdown()\n\n\ndef main():\n scheduler.add_job(container_inspector_job, 'interval', seconds=SCHEDULER_INTERVAL)\n scheduler.start()\n start_proxy()\n\n\nif __name__ == '__main__':\n main()\n", "id": "450550", "language": "Python", "matching_score": 3.2812609672546387, "max_stars_count": 0, "path": "observer_hub/app.py" }, { "content": "class Collector(object):\n\n def __init__(self):\n self.data = {}\n\n def add(self, key, data):\n\n if key in self.data.keys():\n self.data[key].append(data)\n else:\n self.data[key] = [data]\n\n def pop(self, k, default=None):\n self.data.pop(k, default)\n\n def __getitem__(self, key):\n return self.data[key]\n\n def keys(self):\n return self.data.keys()\n\n\nclass ExecutionResultsCollector(Collector):\n\n def __init__(self):\n super().__init__()\n\n\nclass ResultsCollector(Collector):\n\n def __init__(self):\n super().__init__()\n\n\nclass CommandsCollector(Collector):\n\n def __init__(self):\n super().__init__()\n\n\nclass LocatorsCollector(Collector):\n def __init__(self):\n super().__init__()\n\n def save(self, session_id, element_id, locator):\n if session_id in self.data.keys():\n self.data[session_id][element_id] = locator\n else:\n self.data[session_id] = {element_id: locator}\n", "id": "10120316", "language": "Python", "matching_score": 0.39018958806991577, "max_stars_count": 0, "path": "observer_hub/models/collector.py" }, { "content": "import time\n\nfrom selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support.wait import WebDriverWait\n\n\nclass Browser(object):\n\n def __init__(self):\n # self.driver = webdriver.Chrome(executable_path=ChromeDriverManager().install())\n options = webdriver.ChromeOptions()\n options.add_argument('--window-size=1920,1080')\n options.set_capability(\"version\", \"83.0\")\n # options = webdriver.FirefoxOptions()\n self.driver = webdriver.Remote(command_executor='http://localhost:4444/wd/hub', options=options)\n self.wait = WebDriverWait(self.driver, 10)\n\n def current_url(self):\n return self.driver.current_url\n\n def open_url(self, url):\n self.driver.get(url)\n\n def set_window_size(self, width, height):\n self.driver.set_window_size(width, height)\n\n def set_window_position(self, x, y):\n self.driver.set_window_position(x, y)\n\n def get(self, css_locator):\n # return self.wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, css_locator)))\n return Element(self.driver, css_locator)\n\n def session_id(self):\n return self.driver.session_id\n\n def close(self):\n self.driver.quit()\n\n\nclass Element(object):\n\n def __init__(self, driver, locator):\n self.driver = driver\n self.locator = locator\n\n def _find(self):\n finish_time = time.time() + 4\n while True:\n try:\n element = self.driver.find_element(by=By.CSS_SELECTOR, value=self.locator)\n if element.is_displayed():\n return element\n else:\n raise Exception()\n except Exception as reason:\n if time.time() > finish_time:\n raise TimeoutError(reason)\n time.sleep(0.4)\n\n def click(self):\n self._find().click()\n\n @property\n def text(self):\n return self._find().text\n", "id": "2838542", "language": "Python", "matching_score": 2.640000581741333, "max_stars_count": 0, "path": "tests/browser/browser.py" }, { "content": "import os\nfrom uuid import uuid4\n\nimport pytest\nfrom selene import Config, Browser, have\nfrom selene.support import webdriver\nfrom selenium import webdriver\n\n\n@pytest.fixture(scope=\"session\")\ndef browser():\n options = webdriver.ChromeOptions()\n # options = webdriver.FirefoxOptions()\n\n options.set_capability(\"version\", \"84.0\")\n options.set_capability(\"venv\", \"DEV\")\n options.set_capability(\"vnc\", True)\n options.set_capability(\"junit_report\", \"test_report\")\n options.set_capability(\"report_uid\", str(uuid4()))\n # options.set_capability(\"report_uid\", \"12345\")\n options.set_capability(\"job_name\", \"DEMO\")\n options.set_capability(\"galloper_url\", \"http://localhost\")\n options.set_capability(\"galloper_token\", os.environ.get('galloper_token'))\n options.set_capability('galloper_project_id', 1)\n options.set_capability('tz', 'Europe/Kiev')\n\n options.set_capability(\"jira_url\", \"http://localhost:8080\")\n options.set_capability(\"jira_project\", \"DEMO\")\n options.set_capability(\"jira_user\", \"\")\n options.set_capability(\"jira_password\", \"<PASSWORD>\")\n\n driver = webdriver.Remote(command_executor='http://localhost:4444/wd/hub', options=options)\n\n browser = Browser(Config(\n driver=driver,\n # driver=webdriver.Chrome(),\n base_url='https://ej2.syncfusion.com',\n timeout=4,\n window_width=1920,\n window_height=1080))\n yield browser\n browser.close_current_tab()\n\n\ndef test_web_mail_selene(browser):\n browser.open('/showcase/typescript/webmail/#/home')\n browser.element(\"#tree li.e-level-2[data-uid='21']\").click()\n\n browser.element(\"li.e-level-1[data-uid='SF10205']\").click()\n browser.element(\"#sub\").should(have.exact_text('Fletcher Beck'))\n\n browser.element(\"li.e-level-1[data-uid='SF10202']\").click()\n browser.element(\"#sub\").should(have.exact_text('<NAME>'))\n\n\ndef test_web_mail_selene2(browser):\n browser.open('/showcase/typescript/webmail/#/home')\n browser.element(\"#tree li.e-level-2[data-uid='11']\").click()\n\n browser.element(\"li.e-level-1[data-uid='SF10095']\").click()\n browser.element(\"#sub\").should(have.exact_text('<NAME>'))\n\n browser.element(\"#tree li.e-level-2[data-uid='12']\").click()\n browser.element(\"li.e-level-1[data-uid='SF10142']\").click()\n browser.element(\"#sub\").should(have.exact_text('<NAME>'))\n\n\ndef test_web_mail_selene3(browser):\n browser.open('/showcase/typescript/webmail/#/home')\n browser.element(\"#tree li.e-level-2[data-uid='21']\").click()\n\n browser.element(\"li.e-level-1[data-uid='SF10208']\").click()\n browser.element(\"#sub\").should(have.exact_text('<NAME>'))\n\n browser.element(\"li.e-level-1[data-uid='SF10203']\").click()\n browser.element(\"#sub\").should(have.exact_text('<NAME>'))\n", "id": "9403133", "language": "Python", "matching_score": 2.666710138320923, "max_stars_count": 0, "path": "tests/test_main.py" }, { "content": "from selene.support.shared import SharedBrowser\nfrom selenium import webdriver\nfrom selenium.webdriver.webkitgtk.options import Options\n\nfrom observer.constants import REMOTE_DRIVER_ADDRESS, RESULTS_REPORT_NAME, RESULTS_BUCKET, ENV, TZ, GALLOPER_PROJECT_ID, \\\n BROWSER_VERSION, GALLOPER_URL, TOKEN, REPORT_ID, OBSERVER_USER, OBSERVER_PASSWORD, JOB_NAME, ENABLE_VNC, JIRA_URL, \\\n JIRA_PROJECT, JIRA_USER, JIRA_PASSWORD, ADO_PROJECT, ADO_ORGANIZATION, ADO_TOKEN, ADO_TEAM, \\\n PROXY_SETTINGS, STANDALONE\nfrom observer.util import get_browser_version\n\nbrowser = None\ncfg = None\nexec_args = None\n\n\ndef get_driver():\n global browser\n browser_name = 'chrome'\n version = None\n if browser is None:\n if not STANDALONE:\n browser_name, version = get_browser_version(exec_args.browser)\n options = get_browser_options(browser_name, version, exec_args)\n driver = webdriver.Remote(\n command_executor=f'http://{OBSERVER_USER}:{OBSERVER_PASSWORD}@{REMOTE_DRIVER_ADDRESS}/wd/hub',\n options=options)\n\n cfg.browser_name = browser_name\n cfg.driver = driver\n driver.set_window_position(0, 0)\n browser = SharedBrowser(cfg)\n return browser\n\n\ndef get_browser_options(browser_name, version, args):\n options = Options()\n\n if \"chrome\" == browser_name:\n options = webdriver.ChromeOptions()\n\n if \"firefox\" == browser_name:\n options = webdriver.FirefoxOptions()\n\n if options.capabilities.get(\"browserName\") == 'MiniBrowser':\n raise Exception(f\"Unsupported browser {browser_name}\")\n\n if 'junit' in args.report:\n options.set_capability(\"junit_report\", RESULTS_REPORT_NAME)\n options.set_capability(\"junit_report_bucket\", RESULTS_BUCKET)\n\n if 'jira' in args.report:\n options.set_capability(\"jira_url\", JIRA_URL)\n options.set_capability(\"jira_project\", JIRA_PROJECT)\n options.set_capability(\"jira_user\", JIRA_USER)\n options.set_capability(\"jira_password\", <PASSWORD>)\n\n if 'ado' in args.report:\n options.set_capability(\"ado_organization\", ADO_ORGANIZATION)\n options.set_capability(\"ado_project\", ADO_PROJECT)\n options.set_capability(\"ado_token\", ADO_TOKEN)\n options.set_capability(\"ado_team\", ADO_TEAM)\n\n if 'email' in args.report:\n options.set_capability(\"email_report\", True)\n\n if BROWSER_VERSION:\n version = BROWSER_VERSION\n\n options.set_capability(\"test_id\", args.test_id)\n options.set_capability(\"version\", version)\n options.set_capability(\"venv\", ENV)\n options.set_capability('tz', TZ)\n options.set_capability('galloper_project_id', GALLOPER_PROJECT_ID)\n options.set_capability('galloper_url', GALLOPER_URL)\n options.set_capability('galloper_token', TOKEN)\n options.set_capability('aggregation', args.aggregation)\n options.set_capability('job_name', JOB_NAME)\n options.set_capability('report_uid', REPORT_ID)\n options.set_capability(\"vnc\", ENABLE_VNC)\n if PROXY_SETTINGS:\n options.add_argument(\"--incognito\")\n options.add_argument('--ignore-certificate-errors')\n options.add_argument(f'--proxy-server={PROXY_SETTINGS}')\n return options\n\n\ndef close_driver():\n global browser\n if browser:\n browser.quit()\n browser = None\n\n\ndef set_config(config):\n global cfg\n cfg = config\n\n\ndef set_args(args):\n global exec_args\n exec_args = args\n", "id": "8443460", "language": "Python", "matching_score": 2.078352928161621, "max_stars_count": 0, "path": "observer/driver_manager.py" }, { "content": "import hashlib\n\nfrom requests import post\n\nfrom observer_hub.util import logger\n\nPRIORITY_MAPPING = {\"Critical\": 1, \"High\": 1, \"Medium\": 2, \"Low\": 3, \"Info\": 4}\n\n\nclass AdoClient(object):\n\n def __init__(self, organization, project, personal_access_token,\n team=None, issue_type=\"issue\", rules=\"false\", notify=\"false\"):\n self.auth = ('', personal_access_token)\n self.team = f\"{project}\"\n if team:\n self.team = f\"{project}\\\\{team}\"\n\n self.url = f'https://dev.azure.com/{organization}/{project}/_apis/wit/workitems/' \\\n f'${issue_type}?bypassRules={rules}&suppressNotifications={notify}&api-version=5.1'\n\n self.query_url = f'https://dev.azure.com/{organization}/{project}/_apis/wit/wiql?api-version=5.1'\n\n def get_issues(self, issue_hash=None):\n q = f\"SELECT [System.Id] From WorkItems Where [System.Description] Contains \\\"{issue_hash}\\\"\"\n data = post(self.query_url, auth=self.auth, json={\"query\": q},\n headers={'content-type': 'application/json'}).json()\n return data[\"workItems\"]\n\n def create_issues(self, test_name, data):\n\n for d in data:\n if d['status'] == 'passed':\n continue\n\n issue_hash = hashlib.sha256(\n f\"{d['scope']} {d['name']} {d['aggregation']} {d['raw_result'].page_identifier}\".encode(\n 'utf-8')).hexdigest()\n\n if len(self.get_issues(issue_hash)) > 0:\n continue\n\n logger.info(f\"=====> About to crate Azure DevOps issues\")\n\n steps = []\n for i, cmd in enumerate(d['raw_result'].commands, 1):\n command = cmd['command']\n value = cmd[\"value\"]\n target = cmd['target']\n action = \"to\" if value != \"\" else \"on\"\n text = f\"*{command}* {value} {action} *{target}*\"\n if command == \"open\":\n text = f\"*{command}* {action} {target}\"\n\n steps.append(f\"{i}. {text}\")\n\n steps = \"\\n\".join(steps)\n\n summary = f\"{d['scope'].capitalize()} [{d['name']}] {d['aggregation']} value violates threshold rule for {test_name}\"\n\n description = f\"\"\"Value {d['actual']} violates threshold rule: {d['scope']} [{d['name']}] {d['aggregation']}\n {d['rule']} {d['expected']} for {test_name}\"\n\n Steps:\\n {steps}\n\n *Issue Hash:* {issue_hash} \n \"\"\"\n\n fields_mapping = {\n \"/fields/System.Title\": summary,\n \"/fields/Microsoft.VSTS.Common.Priority\": PRIORITY_MAPPING['High'],\n \"/fields/System.Description\": description,\n \"/fields/System.AreaPath\": self.team,\n \"/fields/System.IterationPath\": self.team\n }\n\n body = []\n for key, value in fields_mapping.items():\n if value:\n _piece = {\"op\": \"add\", \"path\": key, \"value\": value}\n body.append(_piece)\n\n res = post(self.url, auth=self.auth, json=body,\n headers={'content-type': 'application/json-patch+json'})\n\n logger.info(f\"Azure DevOps issue {res.json()['id']} has been created\")\n\n\ndef notify_azure_devops(test_name, threshold_results, args):\n caps = args['desired_capabilities']\n ado_organization = caps.get('ado_organization', '')\n ado_project = caps.get('ado_project', '')\n ado_token = caps.get('ado_token', '')\n ado_team = caps.get('ado_team', '')\n if ado_organization and ado_project and ado_token:\n try:\n client = AdoClient(ado_organization, ado_project, ado_token, ado_team)\n client.create_issues(test_name, threshold_results[\"details\"])\n except Exception as e:\n logger.error(f\"Error during Azure DevOps ticket creation {e}\")\n", "id": "5416917", "language": "Python", "matching_score": 3.6499760150909424, "max_stars_count": 0, "path": "observer_hub/reporters/azure_devops.py" }, { "content": "import hashlib\n\nfrom jira import JIRA\n\nfrom observer_hub.util import logger\n\n\nclass JiraClient(object):\n\n def __init__(self, url, user, password, project):\n self.url = url\n self.password = password\n self.user = user\n self.project = project.upper()\n self.client = self.__connect()\n\n def __connect(self):\n try:\n jira = JIRA(self.url, basic_auth=(self.user, self.password))\n except Exception as e:\n logger.error(f\"Failed to connect to Jira {self.url} {e}\")\n raise e\n\n projects = [project.key for project in jira.projects()]\n if self.project not in projects:\n raise Exception(f\"No such project {self.project}\")\n\n return jira\n\n def add_attachment(self, issue, report):\n if not report:\n return\n\n try:\n self.client.add_attachment(issue, attachment=report.path)\n except Exception as e:\n logger.error(e)\n\n def create_issues(self, scenario_name, data):\n for d in data:\n if d['status'] == 'passed':\n continue\n\n issue_hash = hashlib.sha256(\n f\"{d['scope']} {d['name']} {d['aggregation']} {d['raw_result'].page_identifier}\".encode(\n 'utf-8')).hexdigest()\n\n if len(self.get_issues(issue_hash)) > 0:\n logger.info(f\"JIRA with hash {issue_hash} already exist\")\n continue\n\n logger.info(f\"=====> About to crate JIRA issues\")\n\n steps = []\n for i, cmd in enumerate(d['raw_result'].commands, 1):\n command = cmd['command']\n value = cmd['value']\n action = \"to\" if value != \"\" else \"on\"\n target = cmd['target']\n text = f\"*{command}* {value} {action} *{target}*\"\n if command == \"open\":\n text = f\"*{command}* {action} {target}\"\n\n steps.append(f\"{i}. {text}\")\n\n steps = \"\\n\".join(steps)\n\n summary = f\"{d['scope'].capitalize()} [{d['name']}] {d['aggregation']} value violates threshold rule for {scenario_name}\"\n\n description = f\"\"\"Value {d['actual']} violates threshold rule: {d['scope']} [{d['name']}] {d['aggregation']}\n{d['rule']} {d['expected']} for {scenario_name}\"\n \n Steps:\\n {steps}\n \n *Issue Hash:* {issue_hash} \n \"\"\"\n\n field_list = {\n 'project': {'key': self.project},\n 'issuetype': 'Bug',\n 'summary': summary,\n 'description': description,\n 'priority': {'name': \"High\"},\n 'labels': ['observer', 'ui_performance', scenario_name]\n }\n\n issue = self.client.create_issue(field_list)\n self.add_attachment(issue, d['raw_result'].report)\n logger.info(f\"JIRA {issue} has been created\")\n\n def get_issues(self, issue_hash):\n issues = []\n i = 0\n chunk_size = 100\n while True:\n chunk = self.client.search_issues(\n f'project = {self.project} AND issuetype = Bug AND description ~ {issue_hash}', startAt=i,\n maxResults=chunk_size)\n i += chunk_size\n issues += chunk.iterable\n if i >= chunk.total:\n break\n return issues\n\n\ndef notify_jira(test_name, threshold_results, args):\n caps = args['desired_capabilities']\n jira_url = caps.get('jira_url', '')\n jira_project = caps.get('jira_project', '')\n jira_user = caps.get('jira_user', '')\n jira_password = caps.get('jira_password', '')\n\n if jira_url and jira_project:\n try:\n jira = JiraClient(jira_url, jira_user, jira_password, jira_project)\n jira.create_issues(test_name, threshold_results[\"details\"])\n except Exception as e:\n logger.error(f\"Error during jira ticket creation {e}\")\n", "id": "11385134", "language": "Python", "matching_score": 1.1098977327346802, "max_stars_count": 0, "path": "observer_hub/reporters/jira_reporter.py" }, { "content": "from sqlalchemy import and_\n\nfrom ...shared.utils.restApi import RestResource\nfrom ...shared.utils.api_utils import build_req_parser\nfrom ..models.api_thresholds import APIThresholds\n\n\nclass BackendThresholdsAPI(RestResource):\n get_rules = (\n dict(name=\"test\", type=str, location=\"args\"),\n dict(name=\"env\", type=str, location=\"args\")\n )\n delete_rules = (\n dict(name=\"test\", type=str, location=(\"args\", \"json\")),\n dict(name=\"scope\", type=str, location=(\"args\", \"json\")),\n dict(name=\"target\", type=str, location=(\"args\", \"json\")),\n dict(name=\"aggregation\", type=str, location=(\"args\", \"json\")),\n dict(name=\"comparison\", type=str, location=(\"args\", \"json\")),\n dict(name=\"env\", type=str, location=(\"args\", \"json\"))\n )\n post_rules = delete_rules + (\n dict(name=\"value\", type=float, location=\"json\"),\n )\n\n def __init__(self):\n super().__init__()\n self.__init_req_parsers()\n\n def __init_req_parsers(self):\n self._parser_get = build_req_parser(rules=self.get_rules)\n self._parser_post = build_req_parser(rules=self.post_rules)\n self._parser_delete = build_req_parser(rules=self.delete_rules)\n\n def get(self, project_id: int):\n project = self.rpc.project_get_or_404(project_id=project_id)\n args = self._parser_get.parse_args(strict=False)\n if args.get(\"test\") and args.get(\"env\"):\n res = APIThresholds.query.filter().filter(\n and_(APIThresholds.project_id == project.id,\n APIThresholds.test == args.get(\"test\"),\n APIThresholds.environment == args.get(\"env\"))).all()\n else:\n res = APIThresholds.query.filter().filter(APIThresholds.project_id == project.id).all()\n return [th.to_json() for th in res]\n\n def post(self, project_id: int):\n project = self.rpc.project_get_or_404(project_id=project_id)\n args = self._parser_post.parse_args(strict=False)\n APIThresholds(project_id=project.id,\n test=args[\"test\"],\n scope=args[\"scope\"],\n environment=args[\"env\"],\n target=args[\"target\"],\n value=args[\"value\"],\n aggregation=args[\"aggregation\"],\n comparison=args[\"comparison\"]).insert()\n return {\"message\": \"OK\"}\n\n def delete(self, project_id: int):\n project = self.rpc.project_get_or_404(project_id=project_id)\n args = self._parser_delete.parse_args(strict=False)\n APIThresholds.query.filter().filter(\n and_(APIThresholds.project_id == project.id,\n APIThresholds.test == args.get(\"test\"),\n APIThresholds.scope == args.get(\"scope\"),\n APIThresholds.target == args.get(\"target\"),\n APIThresholds.environment == args.get(\"env\"),\n APIThresholds.aggregation == args.get(\"aggregation\"),\n APIThresholds.comparison == args.get(\"comparison\"))).first().delete()\n return {\"message\": \"OK\"}\n", "id": "3504855", "language": "Python", "matching_score": 3.2735414505004883, "max_stars_count": 0, "path": "api/thresholds.py" }, { "content": "from ...shared.utils.restApi import RestResource\nfrom ...shared.utils.api_utils import build_req_parser\nfrom ..models.api_baseline import APIBaseline\nfrom ..models.api_reports import APIReport\nfrom ..connectors.influx import get_aggregated_test_results\n\n\nclass BaselineAPI(RestResource):\n get_rules = (\n dict(name=\"test_name\", type=str, location=\"args\"),\n dict(name=\"env\", type=str, location=\"args\")\n )\n post_rules = (\n dict(name=\"test_name\", type=str, location=\"json\"),\n dict(name=\"build_id\", type=str, location=\"json\"),\n dict(name=\"env\", type=str, location=\"json\")\n )\n\n def __init__(self):\n super().__init__()\n self.__init_req_parsers()\n\n def __init_req_parsers(self):\n self._parser_get = build_req_parser(rules=self.get_rules)\n self._parser_post = build_req_parser(rules=self.post_rules)\n\n def get(self, project_id: int):\n args = self._parser_get.parse_args(strict=False)\n project = self.rpc.project_get_or_404(project_id=project_id)\n baseline = APIBaseline.query.filter_by(project_id=project.id, test=args.get(\"test_name\"),\n environment=args.get(\"env\")).first()\n test = baseline.summary if baseline else []\n report_id = baseline.report_id if baseline else 0\n return {\"baseline\": test, \"report_id\": report_id}\n\n def post(self, project_id: int):\n args = self._parser_post.parse_args(strict=False)\n project = self.rpc.project_get_or_404(project_id=project_id)\n report_id = APIReport.query.filter_by(project_id=project_id, name=args['test_name'],\n build_id=args['build_id']).first().to_json()['id']\n baseline = APIBaseline.query.filter_by(project_id=project.id, test=args.get(\"test_name\"),\n environment=args.get(\"env\")).first()\n if baseline:\n baseline.delete()\n test = get_aggregated_test_results(args['test_name'], args['build_id'])\n summary = []\n for req in test[0]:\n summary.append(req)\n baseline = APIBaseline(test=args[\"test_name\"],\n environment=args[\"env\"],\n project_id=project.id,\n report_id=report_id,\n summary=summary)\n baseline.insert()\n return {\"message\": \"baseline is set\"}", "id": "6995336", "language": "Python", "matching_score": 3.0850002765655518, "max_stars_count": 0, "path": "api/baseline.py" }, { "content": "from ...shared.utils.restApi import RestResource\nfrom ...shared.utils.api_utils import build_req_parser\nfrom ..utils.charts_utils import (requests_summary, requests_hits, avg_responses, summary_table, get_issues,\n get_data_from_influx)\n\n\nclass ReportChartsAPI(RestResource):\n get_rules = (\n dict(name=\"low_value\", type=float, default=0, location=\"args\"),\n dict(name=\"high_value\", type=float, default=100, location=\"args\"),\n dict(name=\"start_time\", type=str, default=\"\", location=\"args\"),\n dict(name=\"end_time\", type=str, default=\"\", location=\"args\"),\n dict(name=\"aggregator\", type=str, default=\"auto\", location=\"args\"),\n dict(name=\"sampler\", type=str, default=\"REQUEST\", location=\"args\"),\n dict(name=\"metric\", type=str, default=\"\", location=\"args\"),\n dict(name=\"scope\", type=str, default=\"\", location=\"args\"),\n dict(name=\"build_id\", type=str, location=\"args\"),\n dict(name=\"test_name\", type=str, location=\"args\"),\n dict(name=\"lg_type\", type=str, location=\"args\"),\n dict(name='status', type=str, default='all', location=\"args\")\n )\n mapping = {\n \"requests\": {\n \"summary\": requests_summary,\n \"hits\": requests_hits,\n \"average\": avg_responses,\n \"table\": summary_table,\n \"data\": get_data_from_influx\n },\n \"errors\": {\n \"table\": get_issues\n }\n }\n\n def __init__(self):\n super().__init__()\n self.__init_req_parsers()\n\n def __init_req_parsers(self):\n self._parser_get = build_req_parser(rules=self.get_rules)\n\n def get(self, source: str, target: str):\n args = self._parser_get.parse_args(strict=False)\n return self.mapping[source][target](args)\n", "id": "12496198", "language": "Python", "matching_score": 2.9309253692626953, "max_stars_count": 0, "path": "api/charts.py" }, { "content": "from sqlalchemy import and_\nfrom json import loads\n\nfrom ...shared.utils.restApi import RestResource\nfrom ...shared.utils.api_utils import build_req_parser\nfrom ...projects.models.statistics import Statistic\nfrom ..models.api_baseline import APIBaseline\nfrom ..models.api_reports import APIReport\nfrom ..utils.utils import get\nfrom ..connectors.influx import get_test_details, delete_test_data\n\n\nclass ReportAPI(RestResource):\n get_rules = (\n dict(name=\"offset\", type=int, default=0, location=\"args\"),\n dict(name=\"limit\", type=int, default=0, location=\"args\"),\n dict(name=\"search\", type=str, default=\"\", location=\"args\"),\n dict(name=\"sort\", type=str, default=\"\", location=\"args\"),\n dict(name=\"order\", type=str, default=\"\", location=\"args\"),\n dict(name=\"name\", type=str, location=\"args\"),\n dict(name=\"filter\", type=str, location=\"args\"),\n dict(name=\"report_id\", type=int, default=None, location=\"args\")\n )\n delete_rules = (\n dict(name=\"id[]\", type=int, action=\"append\", location=\"args\"),\n )\n put_rules = (\n dict(name=\"build_id\", type=str, location=\"json\"),\n dict(name=\"test_name\", type=str, location=\"json\"),\n dict(name=\"lg_type\", type=str, location=\"json\"),\n dict(name=\"missed\", type=int, location=\"json\"),\n dict(name=\"test_status\", type=dict, location=\"json\"),\n dict(name=\"response_times\", type=str, location=\"json\"),\n dict(name=\"duration\", type=float, location=\"json\"),\n dict(name=\"vusers\", type=int, location=\"json\")\n )\n post_rules = put_rules + (\n dict(name=\"start_time\", type=str, location=\"json\"),\n dict(name=\"environment\", type=str, location=\"json\"),\n dict(name=\"type\", type=str, location=\"json\"),\n dict(name=\"release_id\", type=int, location=\"json\"),\n dict(name=\"test_id\", type=str, default=None, location=\"json\")\n )\n\n def __init__(self):\n super().__init__()\n self.__init_req_parsers()\n\n def __init_req_parsers(self):\n self._parser_get = build_req_parser(rules=self.get_rules)\n self._parser_put = build_req_parser(rules=self.put_rules)\n self._parser_post = build_req_parser(rules=self.post_rules)\n self._parser_delete = build_req_parser(rules=self.delete_rules)\n\n def get(self, project_id: int):\n args = self._parser_get.parse_args(strict=False)\n if args.get(\"report_id\"):\n report = APIReport.query.filter_by(project_id=project_id, id=args.get(\"report_id\")).first().to_json()\n return report\n reports = []\n project = self.rpc.project_get_or_404(project_id=project_id)\n total, res = get(project, args, APIReport)\n for each in res:\n each_json = each.to_json()\n each_json[\"start_time\"] = each_json[\"start_time\"].replace(\"T\", \" \").split(\".\")[0]\n each_json[\"duration\"] = int(each_json[\"duration\"] if each_json[\"duration\"] else 0)\n try:\n each_json[\"failure_rate\"] = round((each_json[\"failures\"] / each_json[\"total\"]) * 100, 2)\n except ZeroDivisionError:\n each_json[\"failure_rate\"] = 0\n reports.append(each_json)\n return {\"total\": total, \"rows\": reports}\n\n def post(self, project_id: int):\n args = self._parser_post.parse_args(strict=False)\n project = self.rpc.project_get_or_404(project_id=project_id)\n\n # TODO: we need to check api performance tests quota here\n # if not ProjectQuota.check_quota(project_id=project_id, quota='performance_test_runs'):\n # return {\"Forbidden\": \"The number of performance test runs allowed in the project has been exceeded\"}\n report = APIReport(name=args[\"test_name\"],\n test_status=args[\"test_status\"],\n project_id=project.id,\n environment=args[\"environment\"],\n type=args[\"type\"],\n end_time=\"\",\n start_time=args[\"start_time\"],\n failures=0,\n total=0,\n thresholds_missed=0,\n throughput=0,\n vusers=args[\"vusers\"],\n pct50=0,\n pct75=0,\n pct90=0,\n pct95=0,\n pct99=0,\n _max=0,\n _min=0,\n mean=0,\n duration=args[\"duration\"],\n build_id=args[\"build_id\"],\n lg_type=args[\"lg_type\"],\n onexx=0,\n twoxx=0,\n threexx=0,\n fourxx=0,\n fivexx=0,\n requests=\"\",\n release_id=args.get(\"release_id\"),\n test_uid=args.get(\"test_id\"))\n report.insert()\n statistic = Statistic.query.filter_by(project_id=project_id).first()\n setattr(statistic, 'performance_test_runs', Statistic.performance_test_runs + 1)\n statistic.commit()\n return report.to_json()\n\n def put(self, project_id: int):\n args = self._parser_put.parse_args(strict=False)\n project = self.rpc.project_get_or_404(project_id=project_id)\n test_data = get_test_details(project_id=project_id, build_id=args[\"build_id\"], test_name=args[\"test_name\"],\n lg_type=args[\"lg_type\"])\n response_times = loads(args[\"response_times\"])\n report = APIReport.query.filter(\n and_(APIReport.project_id == project.id, APIReport.build_id == args[\"build_id\"])\n ).first()\n report.end_time = test_data[\"end_time\"]\n report.start_time = test_data[\"start_time\"]\n report.failures = test_data[\"failures\"]\n report.total = test_data[\"total\"]\n report.thresholds_missed = args.get(\"missed\", 0)\n report.throughput = test_data[\"throughput\"]\n report.pct50 = response_times[\"pct50\"]\n report.pct75 = response_times[\"pct75\"]\n report.pct90 = response_times[\"pct90\"]\n report.pct95 = response_times[\"pct95\"]\n report.pct99 = response_times[\"pct99\"]\n report._max = response_times[\"max\"]\n report._min = response_times[\"min\"]\n report.mean = response_times[\"mean\"]\n report.onexx = test_data[\"1xx\"]\n report.twoxx = test_data[\"2xx\"]\n report.threexx = test_data[\"3xx\"]\n report.fourxx = test_data[\"4xx\"]\n report.fivexx = test_data[\"5xx\"]\n report.requests = \";\".join(test_data[\"requests\"])\n print(\"***********************************\")\n print(args[\"test_status\"])\n print(type(args[\"test_status\"]))\n print(\"***********************************\")\n report.test_status = args[\"test_status\"]\n report.vusers = args[\"vusers\"]\n report.duration = args[\"duration\"]\n report.commit()\n return {\"message\": \"updated\"}\n\n def delete(self, project_id: int):\n args = self._parser_delete.parse_args(strict=False)\n project = self.rpc.project_get_or_404(project_id=project_id)\n query_result = APIReport.query.filter(\n and_(APIReport.project_id == project.id, APIReport.id.in_(args[\"id[]\"]))\n ).all()\n for each in query_result:\n delete_test_data(each.build_id, each.name, each.lg_type)\n baseline = APIBaseline.query.filter_by(project_id=project.id, report_id=each.id).first()\n if baseline:\n baseline.delete()\n each.delete()\n return {\"message\": \"deleted\"}\n", "id": "5521876", "language": "Python", "matching_score": 4.4079766273498535, "max_stars_count": 0, "path": "api/reports.py" }, { "content": "from json import loads\n\nfrom ...shared.utils.restApi import RestResource\nfrom ...shared.utils.api_utils import build_req_parser\nfrom ..models.api_reports import APIReport\n\n\nclass ReportStatusAPI(RestResource):\n put_rules = (\n dict(name=\"test_status\", type=dict, location=\"json\"),\n )\n\n def __init__(self):\n super().__init__()\n self.__init_req_parsers()\n\n def __init_req_parsers(self):\n self._parser_put = build_req_parser(rules=self.put_rules)\n\n def get(self, project_id: int, report_id: int):\n project = self.rpc.project_get_or_404(project_id=project_id)\n report = APIReport.query.filter_by(project_id=project.id, id=report_id).first().to_json()\n return {\"message\": report[\"test_status\"][\"status\"]}\n\n def put(self, project_id: int, report_id: int):\n args = self._parser_put.parse_args(strict=False)\n project = self.rpc.project_get_or_404(project_id=project_id)\n report = APIReport.query.filter_by(project_id=project.id, id=report_id).first()\n test_status = args[\"test_status\"]\n report.test_status = test_status\n report.commit()\n return {\"message\": f\"status changed to {report.test_status['status']}\"}\n", "id": "9359939", "language": "Python", "matching_score": 3.998399019241333, "max_stars_count": 0, "path": "api/report_status.py" }, { "content": "from ...shared.utils.restApi import RestResource\nfrom ...shared.utils.api_utils import build_req_parser\nfrom ..models.api_reports import APIReport\nfrom sqlalchemy import and_\n\n\nclass RequestsAPI(RestResource):\n get_rules = (\n dict(name=\"name\", type=str, location=\"args\"),\n dict(name=\"env\", type=str, location=\"args\"),\n dict(name=\"type\", type=str, default=\"backend\", location=\"args\")\n )\n\n def __init__(self):\n super().__init__()\n self.__init_req_parsers()\n\n def __init_req_parsers(self):\n self._parser_get = build_req_parser(rules=self.get_rules)\n\n def get(self, project_id: int):\n args = self._parser_get.parse_args(strict=False)\n project = self.rpc.project_get_or_404(project_id=project_id)\n requests_data = set()\n query_result = APIReport.query.filter(\n and_(APIReport.name == args.get(\"name\"), APIReport.environment == args.get(\"env\"),\n APIReport.project_id == project.id)\n ).order_by(APIReport.id.asc()).all()\n for each in query_result:\n requests_data.update(set(each.requests.split(\";\")))\n if \"All\" in requests_data:\n requests_data.remove(\"All\")\n return list(requests_data)\n", "id": "538607", "language": "Python", "matching_score": 5.485044479370117, "max_stars_count": 0, "path": "api/requests.py" }, { "content": "from ...shared.utils.restApi import RestResource\nfrom ...shared.utils.api_utils import build_req_parser\nfrom ..models.api_reports import APIReport\nfrom sqlalchemy import and_\n\n\nclass EnvironmentsAPI(RestResource):\n get_rules = (\n dict(name=\"name\", type=str, location=\"args\"),\n dict(name=\"type\", type=str, default=\"backend\", location=\"args\")\n )\n\n def __init__(self):\n super().__init__()\n self.__init_req_parsers()\n\n def __init_req_parsers(self):\n self._parser_get = build_req_parser(rules=self.get_rules)\n\n def get(self, project_id: int):\n args = self._parser_get.parse_args(strict=False)\n project = self.rpc.project_get_or_404(project_id=project_id)\n query_result = APIReport.query.with_entities(APIReport.environment).distinct().filter(\n and_(APIReport.name == args.get(\"name\"),\n APIReport.project_id == project.id)\n ).order_by(APIReport.id.asc()).all()\n return list(set([each.environment for each in query_result]))", "id": "6654442", "language": "Python", "matching_score": 2.939754009246826, "max_stars_count": 0, "path": "api/environments.py" }, { "content": "from sqlalchemy import and_\nfrom uuid import uuid4\nfrom werkzeug.datastructures import FileStorage\nfrom json import loads\n\nfrom ...shared.utils.restApi import RestResource\nfrom ...shared.utils.api_utils import build_req_parser, str2bool, get, upload_file\n\nfrom ..models.api_tests import ApiTests\nfrom ..utils.utils import compile_tests\n\n\nclass TestsApi(RestResource):\n _get_rules = (\n dict(name=\"offset\", type=int, default=0, location=\"args\"),\n dict(name=\"limit\", type=int, default=0, location=\"args\"),\n dict(name=\"search\", type=str, default=\"\", location=\"args\"),\n dict(name=\"sort\", type=str, default=\"\", location=\"args\"),\n dict(name=\"order\", type=str, default=\"\", location=\"args\"),\n dict(name=\"name\", type=str, location=\"args\"),\n dict(name=\"filter\", type=str, location=\"args\")\n )\n\n _post_rules = (\n dict(name=\"file\", type=FileStorage, location='files'),\n dict(name=\"local_path\", type=str, location='form'),\n dict(name=\"git\", type=str, location='form'),\n dict(name=\"name\", type=str, location='form'),\n dict(name=\"entrypoint\", type=str, location='form'),\n dict(name=\"parallel\", type=int, location='form'),\n dict(name=\"region\", type=str, location='form'),\n dict(name=\"reporting\", type=str, location='form'),\n dict(name=\"emails\", type=str, location='form'),\n dict(name=\"runner\", type=str, location='form'),\n dict(name=\"compile\", type=str2bool, location='form'),\n dict(name=\"params\", type=str, location='form'),\n dict(name=\"env_vars\", type=str, location='form'),\n dict(name=\"customization\", type=str, location='form'),\n dict(name=\"cc_env_vars\", type=str, location='form')\n )\n\n _delete_rules = (\n dict(name=\"id[]\", type=int, action=\"append\", location=\"args\"),\n )\n\n def __init__(self):\n super().__init__()\n self.__init_req_parsers()\n\n def __init_req_parsers(self):\n self.get_parser = build_req_parser(rules=self._get_rules)\n self.post_parser = build_req_parser(rules=self._post_rules)\n self.delete_parser = build_req_parser(rules=self._delete_rules)\n\n def get(self, project_id: int):\n args = self.get_parser.parse_args(strict=False)\n reports = []\n total, res = get(project_id, args, ApiTests)\n for each in res:\n reports.append(each.to_json([\"influx.port\", \"influx.host\", \"galloper_url\",\n \"influx.db\", \"comparison_db\", \"telegraf_db\",\n \"loki_host\", \"loki_port\", \"influx.username\", \"influx.password\"]))\n return {\"total\": total, \"rows\": reports}\n\n def delete(self, project_id: int):\n args = self.delete_parser.parse_args(strict=False)\n project = self.rpc.project_get_or_404(project_id=project_id)\n query_result = ApiTests.query.filter(\n and_(ApiTests.project_id == project.id, ApiTests.id.in_(args[\"id[]\"]))\n ).all()\n for each in query_result:\n each.delete()\n return {\"message\": \"deleted\"}\n\n def post(self, project_id: int):\n args = self.post_parser.parse_args(strict=False)\n project = self.rpc.project_get_or_404(project_id=project_id)\n if args.get(\"git\"):\n file_name = \"\"\n bucket = \"\"\n git_settings = loads(args[\"git\"])\n else:\n git_settings = {}\n file_name = args[\"file\"].filename\n bucket = \"tests\"\n upload_file(bucket, args[\"file\"], project, create_if_not_exists=True)\n\n if args[\"compile\"] and args[\"runner\"] in [\"v3.1\", \"v2.3\"]:\n compile_tests(project.id, file_name, args[\"runner\"])\n\n test = ApiTests(project_id=project.id,\n test_uid=str(uuid4()),\n name=args[\"name\"],\n parallel=args[\"parallel\"],\n region=args[\"region\"],\n bucket=bucket,\n file=file_name,\n git=git_settings,\n local_path=args[\"local_path\"],\n entrypoint=args[\"entrypoint\"],\n runner=args[\"runner\"],\n reporting=loads(args[\"reporting\"]),\n params=loads(args[\"params\"]),\n env_vars=loads(args[\"env_vars\"]),\n customization=loads(args[\"customization\"]),\n cc_env_vars=loads(args[\"cc_env_vars\"]))\n test.insert()\n return test.to_json(exclude_fields=(\"id\",))\n", "id": "7777025", "language": "Python", "matching_score": 5.561106204986572, "max_stars_count": 0, "path": "api/tests.py" }, { "content": "from json import loads\nfrom sqlalchemy import and_\nfrom copy import deepcopy\n\nfrom ...shared.utils.restApi import RestResource\nfrom ...shared.utils.api_utils import str2bool, build_req_parser\nfrom ..models.api_tests import ApiTests\nfrom ..models.api_reports import APIReport\nfrom ..utils.utils import exec_test, get_backend_test_data\n\n\nclass TestApiBackend(RestResource):\n _get_rules = (\n dict(name=\"raw\", type=int, default=0, location=\"args\"),\n dict(name=\"type\", type=str, default='cc', location=\"args\"),\n dict(name=\"exec\", type=str2bool, default=False, location=\"args\")\n )\n\n _put_rules = (\n dict(name=\"parallel\", type=int, required=False, location='json'),\n dict(name=\"region\", type=str, required=False, location='json'),\n dict(name=\"params\", type=str, default=\"[]\", required=False, location='json'),\n dict(name=\"env_vars\", type=str, default=\"{}\", required=False, location='json'),\n dict(name=\"customization\", type=str, default=\"{}\", required=False, location='json'),\n dict(name=\"cc_env_vars\", type=str, default=\"{}\", required=False, location='json'),\n dict(name=\"reporter\", type=list, required=False, location='json'),\n dict(name=\"emails\", type=str, required=False, location='json'),\n dict(name=\"git\", type=str, required=False, location='json'),\n )\n\n _post_rules = _put_rules + (\n dict(name=\"test_type\", type=str, required=False, location='json'),\n dict(name=\"runner\", type=str, required=False, location='json'),\n dict(name=\"type\", type=str, default=None, required=False, location='json')\n )\n\n def __init__(self):\n super().__init__()\n self.__init_req_parsers()\n\n def __init_req_parsers(self):\n self.get_parser = build_req_parser(rules=self._get_rules)\n self.put_parser = build_req_parser(rules=self._put_rules)\n self.post_parser = build_req_parser(rules=self._post_rules)\n\n def get(self, project_id, test_id):\n args = self.get_parser.parse_args(strict=False)\n project = self.rpc.project_get_or_404(project_id=project_id)\n if isinstance(test_id, int):\n _filter = and_(ApiTests.project_id == project.id, ApiTests.id == test_id)\n else:\n _filter = and_(ApiTests.project_id == project.id, ApiTests.test_uid == test_id)\n test = ApiTests.query.filter(_filter).first()\n if args.raw:\n return test.to_json([\"influx.port\", \"influx.host\", \"galloper_url\",\n \"influx.db\", \"comparison_db\", \"telegraf_db\",\n \"loki_host\", \"loki_port\", \"influx.username\", \"influx.password\"])\n if args[\"type\"] == \"docker\":\n message = test.configure_execution_json(args.get(\"type\"), execution=args.get(\"exec\"))\n else:\n message = [{\"test_id\": test.test_uid}]\n return {\"config\": message} # this is cc format\n\n def put(self, project_id, test_id):\n default_params = [\"influx.port\", \"influx.host\", \"galloper_url\", \"influx.db\", \"comparison_db\", \"telegraf_db\",\n \"loki_host\", \"loki_port\", \"test.type\", \"test_type\", \"influx.username\", \"influx.password\"]\n project = self.rpc.project_get_or_404(project_id=project_id)\n args = self.put_parser.parse_args(strict=False)\n if isinstance(test_id, int):\n _filter = and_(ApiTests.project_id == project.id, ApiTests.id == test_id)\n else:\n _filter = and_(ApiTests.project_id == project.id, ApiTests.test_uid == test_id)\n task = ApiTests.query.filter(_filter).first()\n\n params = deepcopy(getattr(task, \"params\"))\n new_params = loads(args.get(\"params\"))\n param_names = [each[\"name\"] for each in params]\n for param in new_params:\n if param[\"name\"] not in param_names:\n params.append(param)\n new_param_names = [each[\"name\"] for each in new_params]\n params = [param for param in params if (param[\"name\"] in new_param_names or param[\"name\"] in default_params)]\n for param in params:\n for _param in new_params:\n if param[\"name\"] == _param[\"name\"]:\n param[\"default\"] = _param[\"default\"]\n param[\"description\"] = _param[\"description\"]\n setattr(task, \"params\", params)\n for each in [\"env_vars\", \"customization\", \"cc_env_vars\"]:\n params = deepcopy(getattr(task, each))\n for key in list(params.keys()):\n if key not in loads(args.get(each)).keys() and key not in default_params:\n del params[key]\n for key, value in loads(args.get(each)).items():\n if key not in params or params[key] != value:\n params[key] = value\n setattr(task, each, params)\n\n if args.get(\"reporter\"):\n task.reporting = args[\"reporter\"]\n else:\n task.reporting = []\n\n if args.get(\"emails\"):\n task.emails = args[\"emails\"]\n else:\n task.emails = \"\"\n\n if args.get(\"parallel\"):\n task.parallel = args.get(\"parallel\")\n if args.get(\"region\"):\n task.region = args.get(\"region\")\n if args.get(\"git\"):\n task.git = loads(args.get(\"git\"))\n task.commit()\n return task.to_json([\"influx.port\", \"influx.host\", \"galloper_url\",\n \"influx.db\", \"comparison_db\", \"telegraf_db\",\n \"loki_host\", \"loki_port\", \"influx.username\", \"influx.password\"])\n\n def post(self, project_id, test_id):\n project = self.rpc.project_get_or_404(project_id=project_id)\n args = self.post_parser.parse_args(strict=False)\n if isinstance(test_id, int):\n _filter = and_(ApiTests.project_id == project.id, ApiTests.id == test_id)\n else:\n _filter = and_(ApiTests.project_id == project.id, ApiTests.test_uid == test_id)\n task = ApiTests.query.filter(_filter).first()\n event = list()\n execution = True if args['type'] and args[\"type\"] == \"config\" else False\n event.append(task.configure_execution_json(output='cc',\n test_type=args.get(\"test_type\"),\n params=loads(args.get(\"params\", None)),\n env_vars=loads(args.get(\"env_vars\", None)),\n reporting=args.get(\"reporter\", None),\n customization=loads(args.get(\"customization\", None)),\n cc_env_vars=loads(args.get(\"cc_env_vars\", None)),\n parallel=args.get(\"parallel\", None),\n region=args.get(\"region\", \"default\"),\n execution=execution, emails=args.get(\"emails\", None)))\n if args['type'] and args[\"type\"] == \"config\":\n return event[0]\n for each in event:\n each[\"test_id\"] = task.test_uid\n\n test_data = get_backend_test_data(event[0])\n report = APIReport(name=test_data[\"test_name\"],\n project_id=project.id,\n environment=test_data[\"environment\"],\n type=test_data[\"type\"],\n end_time=\"\",\n start_time=test_data[\"start_time\"],\n failures=0,\n total=0,\n thresholds_missed=0,\n throughput=0,\n vusers=test_data[\"vusers\"],\n pct50=0,\n pct75=0,\n pct90=0,\n pct95=0,\n pct99=0,\n _max=0,\n _min=0,\n mean=0,\n duration=test_data[\"duration\"],\n build_id=test_data[\"build_id\"],\n lg_type=test_data[\"lg_type\"],\n onexx=0,\n twoxx=0,\n threexx=0,\n fourxx=0,\n fivexx=0,\n requests=\"\",\n test_uid=task.test_uid)\n report.insert()\n event[0][\"cc_env_vars\"][\"REPORT_ID\"] = str(report.id)\n event[0][\"cc_env_vars\"][\"build_id\"] = test_data[\"build_id\"]\n response = exec_test(project.id, event)\n response[\"redirect\"] = f'/task/{response[\"task_id\"]}/results'\n return response\n", "id": "4602243", "language": "Python", "matching_score": 5.110074520111084, "max_stars_count": 0, "path": "api/test.py" }, { "content": "from flask import request\n\nfrom ...shared.utils.restApi import RestResource\nfrom ...shared.utils.api_utils import build_req_parser, str2bool\n\nfrom .utils import run_task\nfrom ..models.tasks import Task\n\n\nclass TaskApi(RestResource):\n _get_rules = (\n dict(name=\"exec\", type=str2bool, default=False, location=\"args\"),\n )\n\n _put_rules = (\n dict(name=\"invoke_func\", type=str, location='form'),\n dict(name=\"region\", type=str, location='form'),\n dict(name=\"env_vars\", type=str, location='form')\n )\n\n def __init__(self):\n super().__init__()\n self.__init_req_parsers()\n\n def __init_req_parsers(self):\n self.get_parser = build_req_parser(rules=self._get_rules)\n self.put_parser = build_req_parser(rules=self._put_rules)\n\n def _get_task(self, project_id, task_id):\n return self.rpc.project_get_or_404(project_id=project_id), \\\n Task.query.filter_by(task_id=task_id).first()\n\n def get(self, project_id: int, task_id: str):\n args = self.get_parser.parse_args(strict=False)\n project, task = self._get_task(project_id, task_id)\n if args.get(\"exec\"):\n from flask import current_app\n return current_app.config[\"CONTEXT\"].rpc_manager.call.unsecret_key(\n value=task.to_json(), project_id=project_id)\n return task.to_json()\n\n def post(self, project_id: int, task_id: str):\n project, task = self._get_task(project_id, task_id)\n event = request.get_json()\n return run_task(project.id, event, task.task_id)\n\n def put(self, project_id: int, task_id: str):\n args = self.put_parser.parse_args(strict=False)\n project, task = self._get_task(project_id, task_id)\n task.task_handler = args.get(\"invoke_func\")\n task.region = args.get(\"region\")\n task.env_vars = args.get(\"env_vars\")\n task.commit()\n return task.to_json()\n\n def delete(self, project_id: int, task_id: str):\n project, task = self._get_task(project_id, task_id)\n task.delete()\n return {}\n", "id": "5619018", "language": "Python", "matching_score": 1.7817431688308716, "max_stars_count": 0, "path": "api/task.py" }, { "content": "# Copyright 2020\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\n\nimport yaml\nfrom flask import Flask, current_app\nfrom flask_session import Session\n\nfrom auth.config import Config\nfrom auth.utils import config\n\n\ndef read_config(): # Reading the config file\n settings_file = Config.CONFIG_FILENAME\n if not settings_file:\n current_app.logger.error(\"Settings file path not set. Please set CONFIG_FILENAME\")\n with open(settings_file, \"rb\") as file:\n settings_data = file.read()\n settings = yaml.load(os.path.expandvars(settings_data), Loader=yaml.SafeLoader)\n settings = config.config_substitution(settings, config.vault_secrets(settings))\n\n current_app.config[\"global\"] = settings[\"global\"]\n current_app.config[\"endpoints\"] = settings[\"endpoints\"]\n current_app.config[\"auth\"] = settings[\"auth\"]\n current_app.config[\"mappers\"] = settings[\"mappers\"]\n current_app.config[\"keys\"] = []\n for key in Config.AUTH_PROXIES:\n if key not in settings:\n continue\n current_app.config[key] = settings[key]\n\n\ndef seed_endpoints():\n from auth.drivers.root import bp\n current_app.register_blueprint(bp, url_prefix=current_app.config[\"endpoints\"][\"root\"])\n if \"oidc\" in current_app.config:\n from auth.drivers.oidc import bp\n current_app.register_blueprint(bp, url_prefix=current_app.config[\"endpoints\"][\"oidc\"])\n\n\ndef create_app():\n app_session = Session()\n app = Flask(__name__)\n\n # Application Configuration\n app.config.from_object(Config)\n\n # Initialize Plugins\n app_session.init_app(app)\n\n with app.app_context():\n read_config()\n seed_endpoints()\n return app\n\n\ndef main():\n create_app().run(host=Config.APP_HOST, port=Config.APP_PORT, debug=True)\n\n\nif __name__ == \"__main__\":\n main()\n", "id": "3525892", "language": "Python", "matching_score": 3.5108094215393066, "max_stars_count": 0, "path": "auth/app.py" }, { "content": "# Copyright 2020\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom os import environ\n\nimport redis\n\n\nclass Config:\n \"\"\"Set Flask configuration vars from .env file.\"\"\"\n\n # General Config\n APP_HOST = \"0.0.0.0\"\n APP_PORT = \"80\"\n CONFIG_FILENAME = environ.get(\"CONFIG_FILENAME\", None)\n AUTH_PROXIES = (\"oidc\", \"root\")\n SECRET_KEY = b\"_5#y2L\\\"F4Q8z\\n\\xec]/\"\n SESSION_COOKIE_NAME = \"auth\"\n\n # Redis client\n REDIS_USER = environ.get(\"REDIS_USER\", \"\")\n REDIS_PASSWORD = environ.get(\"REDIS_PASSWORD\", \"password\")\n REDIS_HOST = environ.get(\"REDIS_HOST\", \"localhost\")\n REDIS_PORT = int(environ.get(\"REDIS_PORT\", 6379))\n REDIS_DB = int(environ.get(\"REDIS_DB\", 3))\n\n # Flask-Session\n SESSION_TYPE = environ.get(\"SESSION_TYPE\", \"redis\")\n SESSION_REDIS = redis.from_url(\n environ.get(\"SESSION_REDIS\", f\"redis://{REDIS_USER}:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}\")\n )\n", "id": "12380045", "language": "Python", "matching_score": 1.216139316558838, "max_stars_count": 0, "path": "auth/config.py" }, { "content": "# Copyright 2020\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport importlib\nfrom json import dumps, loads\nfrom base64 import b64decode\nfrom time import time\n\nfrom flask import current_app, session, request, redirect, make_response, Blueprint\n\nfrom auth.drivers.oidc import _validate_basic_auth, _validate_token_auth\nfrom auth.utils.redis_client import RedisClient\n\nbp = Blueprint(\"root\", __name__)\n\n\ndef handle_auth(auth_header: str):\n redis_client = RedisClient()\n if redis_client.check_auth_token(auth_header=auth_header):\n return make_response(\"OK\", 200)\n try:\n auth_key, auth_value = auth_header.strip().split(\" \")\n except ValueError:\n return make_response(\"KO\", 401)\n else:\n if auth_key.lower() == \"basic\":\n username, password = b64decode(auth_value.strip()).decode().split(\":\", 1)\n _, auth_data = _validate_basic_auth(username, password)\n if _:\n redis_client.set_auth_token(auth_header=auth_header, value=dumps(auth_data))\n return make_response(\"OK\", 200)\n elif auth_key.lower() == \"bearer\":\n _, auth_data = _validate_token_auth(auth_value)\n if _:\n redis_client.set_auth_token(auth_header=auth_header, value=dumps(auth_data))\n return make_response(\"OK\", 200)\n return make_response(\"KO\", 401)\n\n\n@bp.route(\"/auth\")\ndef auth():\n if \"X-Forwarded-Uri\" in request.headers:\n if request.headers[\"X-Forwarded-Uri\"].startswith(\"/static\") and \\\n any(request.headers[\"X-Forwarded-Uri\"].endswith(res) for res in [\".ico\", \".js\", \".css\"]):\n return make_response(\"OK\")\n # Check if need to login\n target = request.args.get(\"target\")\n scope = request.args.get(\"scope\")\n for header in (\"X-Forwarded-Proto\", \"X-Forwarded-Host\", \"X-Forwarded-Port\", \"X-Forwarded-Uri\"):\n if header in request.headers:\n session[header] = request.headers[header]\n if \"Authorization\" in request.headers:\n return handle_auth(auth_header=request.headers.get(\"Authorization\", \"\"))\n if \"X-Forwarded-Uri\" in request.headers and \"/api/v1\" in \"X-Forwarded-Uri\":\n if \"Referer\" in request.headers and \"/api/v1\" not in \"Referer\":\n session[\"X-Forwarded-Uri\"] = request.headers[\"Referer\"]\n else:\n session[\"X-Forwarded-Uri\"] = request.base_url\n if not session.get(\"auth_attributes\") or session[\"auth_attributes\"][\"exp\"] < int(time()):\n return redirect(current_app.config[\"auth\"][\"login_handler\"], 302)\n if not session.get(\"auth\", False) and not current_app.config[\"global\"][\"disable_auth\"]:\n # Redirect to login\n return redirect(current_app.config[\"auth\"].get(\"auth_redirect\",\n f\"{request.base_url}{request.script_root}/login\"))\n if target is None:\n target = \"raw\"\n # Map auth response\n response = make_response(\"OK\")\n try:\n mapper = importlib.import_module(f\"auth.mappers.{target}\")\n response = mapper.auth(scope, response)\n except (ImportError, AttributeError, TypeError):\n from traceback import format_exc\n current_app.logger.error(f\"Failed to map auth data {format_exc()}\")\n except NameError:\n return redirect(current_app.config[\"auth\"][\"login_default_redirect_url\"])\n return response\n\n\ndef me_from_token(auth_header: str):\n redis_client = RedisClient()\n try:\n res = redis_client.get_auth_token(auth_header=auth_header)\n res = loads(res)\n except TypeError:\n redis_client.clear_auth_token(auth_header=auth_header)\n handle_auth(auth_header=auth_header)\n res = loads(redis_client.get_auth_token(auth_header=auth_header))\n return res\n\n\n@bp.route('/me', methods=[\"GET\"])\ndef me():\n res = {}\n if isinstance(session.get(\"auth_attributes\"), dict):\n res = {\n \"username\": session.get(\"auth_attributes\")['preferred_username'],\n \"groups\": session.get(\"auth_attributes\")['groups']\n }\n if not res and \"Authorization\" in request.headers:\n res = me_from_token(auth_header=request.headers.get(\"Authorization\", \"\"))\n return make_response(dumps(res), 200)\n\n\n@bp.route(\"/token\")\ndef token():\n return redirect(current_app.config[\"auth\"][\"token_handler\"], 302)\n\n\n@bp.route(\"/login\")\ndef login():\n return redirect(current_app.config[\"auth\"][\"login_handler\"], 302)\n\n\n@bp.route(\"/logout\")\ndef logout():\n to = request.args.get(\"to\")\n return redirect(current_app.config[\"auth\"][\"logout_handler\"] + (f\"?to={to}\" if to is not None else \"\"))\n", "id": "2522555", "language": "Python", "matching_score": 4.426016807556152, "max_stars_count": 0, "path": "auth/drivers/root.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" Module \"\"\"\nfrom queue import Empty\nfrom time import time\n\nimport flask # pylint: disable=E0401\nimport jinja2 # pylint: disable=E0401\nfrom flask import request, make_response, session, redirect, Response\n\nfrom pylon.core.tools import log # pylint: disable=E0611,E0401\nfrom pylon.core.tools import module # pylint: disable=E0611,E0401\n\nfrom .utils.decorators import push_kwargs\nfrom .utils.token_manager import check_auth_token, clear_auth_token, get_auth_token, check_auth\n\n\nclass Module(module.ModuleModel):\n \"\"\" Pylon module \"\"\"\n\n def __init__(self, context, descriptor):\n self.context = context\n self.descriptor = descriptor\n self.rpc_prefix = None\n #\n self.settings = self.descriptor.config\n\n def init(self):\n \"\"\" Init module \"\"\"\n log.info('Initializing module auth_root')\n\n self.rpc_prefix = self.settings['rpc_manager']['prefix']['root']\n bp = self.descriptor.make_blueprint(\n url_prefix=f'/{self.settings[\"endpoints\"][\"root\"]}'\n )\n bp.add_url_rule('/auth', 'auth', self.auth)\n bp.add_url_rule('/me', 'me', self.me, methods=['GET'])\n bp.add_url_rule('/token', 'token', self.token)\n bp.add_url_rule('/login', 'login', self.login)\n bp.add_url_rule('/logout', 'logout', self.logout)\n\n # Register in app\n self.context.app.register_blueprint(bp)\n\n # rpc_manager\n self.context.rpc_manager.register_function(\n push_kwargs(\n rpc_manager=self.context.rpc_manager,\n rpc_prefix=self.rpc_prefix,\n rpc_timeout=int(self.settings['rpc_manager']['timeout'])\n )(check_auth),\n name=f'{self.rpc_prefix}check_auth'\n )\n\n def deinit(self): # pylint: disable=R0201\n \"\"\" De-init module \"\"\"\n log.info('De-initializing module auth_root')\n\n def auth(self):\n if \"X-Forwarded-Uri\" in request.headers:\n if request.headers[\"X-Forwarded-Uri\"].startswith(\"/static\") and \\\n any(request.headers[\"X-Forwarded-Uri\"].endswith(res) for res in [\".ico\", \".js\", \".css\"]):\n return make_response(\"OK\")\n\n # Check if need to login\n target = request.args.get(\"target\")\n scope = request.args.get(\"scope\")\n for header in (\"X-Forwarded-Proto\", \"X-Forwarded-Host\", \"X-Forwarded-Port\", \"X-Forwarded-Uri\"):\n if header in request.headers:\n session[header] = request.headers[header]\n if \"Authorization\" in request.headers:\n return self.handle_auth(auth_header=request.headers.get(\"Authorization\", \"\"))\n if \"X-Forwarded-Uri\" in request.headers and \"/api/v1\" in \"X-Forwarded-Uri\":\n if \"Referer\" in request.headers and \"/api/v1\" not in \"Referer\":\n session[\"X-Forwarded-Uri\"] = request.headers[\"Referer\"]\n else:\n session[\"X-Forwarded-Uri\"] = request.base_url\n if not session.get(\"auth_attributes\") or session[\"auth_attributes\"][\"exp\"] < int(time()):\n return redirect(self.settings[\"login_handler_url\"], 302)\n if not session.get(\"auth\", False) and not self.settings[\"disable_auth\"]:\n # Redirect to login\n return redirect(self.settings.get(\n \"auth_redirect\",\n f\"{request.base_url}{request.script_root}/login\")\n )\n if target is None:\n target = \"raw\"\n\n # Map auth response\n response = make_response(\"OK\")\n try:\n self.context.rpc_manager.call_function_with_timeout(\n func='{prefix}{key}'.format(\n prefix=self.settings['rpc_manager']['prefix']['mappers'],\n key=target.lower()\n ),\n timeout=int(self.settings['rpc_manager']['timeout']),\n response=response,\n scope=scope\n )\n except Empty:\n log.error(f'Cannot find mapper for auth_key {target}')\n return make_response(\"KO\", 403)\n except (AttributeError, TypeError):\n from traceback import format_exc\n log.error(f\"Failed to map auth data {format_exc()}\")\n except NameError:\n return redirect(self.settings[\"login_default_redirect_url\"])\n return response\n\n def me(self):\n if isinstance(session.get(\"auth_attributes\"), dict):\n return flask.jsonify(\n {\n \"username\": session.get(\"auth_attributes\")['preferred_username'],\n \"groups\": session.get(\"auth_attributes\")['groups']\n }\n )\n if \"Authorization\" in request.headers:\n auth_header = request.headers.get(\"Authorization\", \"\")\n if not check_auth_token(auth_header):\n clear_auth_token()\n self.handle_auth(auth_header)\n return flask.jsonify(get_auth_token())\n\n def token(self):\n return redirect(self.settings[\"token_handler_url\"], 302)\n\n def login(self):\n return redirect(self.settings[\"login_handler_url\"], 302)\n\n def logout(self):\n to = request.args.get(\"to\")\n return redirect(\n self.settings[\"logout_handler_url\"] + (f\"?to={to}\" if to else \"\"))\n\n def handle_auth(self, auth_header) -> Response:\n return check_auth(\n auth_header,\n rpc_manager=self.context.rpc_manager,\n rpc_prefix=self.rpc_prefix,\n rpc_timeout=int(self.settings['rpc_manager']['timeout'])\n )\n", "id": "2526761", "language": "Python", "matching_score": 3.090477705001831, "max_stars_count": 0, "path": "module.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n\n# Copyright 2022 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" Module \"\"\"\n\nimport re\nimport uuid\nimport urllib\nimport base64\nimport datetime\n\nimport jwt # pylint: disable=E0401\nimport flask # pylint: disable=E0401\nimport sqlalchemy # pylint: disable=E0401\n\nfrom pylon.core.tools import log # pylint: disable=E0611,E0401\nfrom pylon.core.tools import web # pylint: disable=E0611,E0401\nfrom pylon.core.tools import module # pylint: disable=E0611,E0401\n\nfrom pylon.core.tools.context import Context as Holder # pylint: disable=E0611,E0401\n\nfrom .db import db_migrations\nfrom .db import db_tools\nfrom .tools import rpc_tools\n\n\nclass Module(module.ModuleModel):\n \"\"\" Pylon module \"\"\"\n\n def __init__(self, context, descriptor):\n self.context = context\n self.descriptor = descriptor\n # Registry\n self.auth_providers = dict() # name -> {login_route/url, logout_route/url}\n self.auth_processors = list() # [rpc_endpoint_name]\n self.credential_handlers = dict() # type -> rpc_endpoint\n self.success_mappers = dict() # target -> rpc_endpoint\n self.info_mappers = dict() # target -> rpc_endpoint\n self.public_rules = list() # [rule]\n # DB\n self.db = Holder()\n self.db.tbl = Holder()\n self.db.url = self.descriptor.config.get(\"db_url\", None)\n self.db.options = self.descriptor.config.get(\"db_options\", dict())\n # RPCs\n self._rpcs = [\n [self._noop_success_mapper, \"auth_noop_success_mapper\"],\n [self._rpc_success_mapper, \"auth_rpc_success_mapper\"],\n [self._handle_bearer_token, \"auth_handle_bearer_token\"],\n [self._handle_basic_auth, \"auth_handle_basic_auth\"],\n #\n [\n self._get_referenced_auth_context,\n \"auth_get_referenced_auth_context\"\n ],\n [self._get_session_cookie_name, \"auth_get_session_cookie_name\"],\n #\n [self._register_auth_provider, \"auth_register_auth_provider\"],\n [self._unregister_auth_provider, \"auth_unregister_auth_provider\"],\n #\n [self._register_auth_processor, \"auth_register_auth_processor\"],\n [self._unregister_auth_processor, \"auth_unregister_auth_processor\"],\n #\n [\n self._register_credential_handler,\n \"auth_register_credential_handler\"\n ],\n [\n self._unregister_credential_handler,\n \"auth_unregister_credential_handler\"\n ],\n #\n [self._register_success_mapper, \"auth_register_success_mapper\"],\n [self._unregister_success_mapper, \"auth_unregister_success_mapper\"],\n #\n [self._register_info_mapper, \"auth_register_info_mapper\"],\n [self._unregister_info_mapper, \"auth_unregister_info_mapper\"],\n #\n [self._add_public_rule, \"auth_add_public_rule\"],\n [self._remove_public_rule, \"auth_remove_public_rule\"],\n #\n [self._add_user, \"auth_add_user\"],\n [self._delete_user, \"auth_delete_user\"],\n [self._get_user, \"auth_get_user\"],\n [self._list_users, \"auth_list_users\"],\n #\n [self._add_user_provider, \"auth_add_user_provider\"],\n [self._remove_user_provider, \"auth_remove_user_provider\"],\n [self._get_user_from_provider, \"auth_get_user_from_provider\"],\n [self._list_user_providers, \"auth_list_user_providers\"],\n #\n [self._add_group, \"auth_add_group\"],\n [self._delete_group, \"auth_delete_group\"],\n [self._get_group, \"auth_get_group\"],\n [self._list_groups, \"auth_list_groups\"],\n [self._walk_group_tree, \"auth_walk_group_tree\"],\n #\n [self._add_group_provider, \"auth_add_group_provider\"],\n [self._remove_group_provider, \"auth_remove_group_provider\"],\n [self._get_group_from_provider, \"auth_get_group_from_provider\"],\n [self._list_group_providers, \"auth_list_group_providers\"],\n #\n [self._add_user_group, \"auth_add_user_group\"],\n [self._remove_user_group, \"auth_remove_user_group\"],\n [self._get_user_group_ids, \"auth_get_user_group_ids\"],\n [self._get_user_groups, \"auth_get_user_groups\"],\n [self._list_user_groups, \"auth_list_user_groups\"],\n #\n [self._add_scope, \"auth_add_scope\"],\n [self._delete_scope, \"auth_delete_scope\"],\n [self._get_scope, \"auth_get_scope\"],\n [self._list_scopes, \"auth_list_scopes\"],\n [self._walk_scope_tree, \"auth_walk_scope_tree\"],\n #\n [self._add_group_permission, \"auth_add_group_permission\"],\n [self._remove_group_permission, \"auth_remove_group_permission\"],\n [self._get_group_permissions, \"auth_get_group_permissions\"],\n [self._list_group_permissions, \"auth_list_group_permissions\"],\n #\n [self._add_user_permission, \"auth_add_user_permission\"],\n [self._remove_user_permission, \"auth_remove_user_permission\"],\n [self._get_user_permissions, \"auth_get_user_permissions\"],\n [self._list_user_permissions, \"auth_list_user_permissions\"],\n #\n [self._add_token, \"auth_add_token\"],\n [self._delete_token, \"auth_delete_token\"],\n [self._get_token, \"auth_get_token\"],\n [self._list_tokens, \"auth_list_tokens\"],\n [self._encode_token, \"auth_encode_token\"],\n [self._decode_token, \"auth_decode_token\"],\n #\n [self._add_token_permission, \"auth_add_token_permission\"],\n [self._remove_token_permission, \"auth_remove_token_permission\"],\n [self._get_token_permissions, \"auth_get_token_permissions\"],\n [self._list_token_permissions, \"auth_list_token_permissions\"],\n [self._resolve_token_permissions, \"auth_resolve_token_permissions\"],\n #\n [self._ping, \"auth_ping\"],\n ]\n\n #\n # Module\n #\n\n def init(self):\n \"\"\" Init module \"\"\"\n log.info(\"Initializing module\")\n # Run DB migrations\n db_migrations.run_db_migrations(self, self.db.url)\n # Connect to DB\n module_name = self.descriptor.name\n #\n self.db.engine = sqlalchemy.create_engine(\n self.db.url, **self.db.options\n )\n self.db.metadata = sqlalchemy.MetaData()\n #\n self.db.tbl.user = sqlalchemy.Table(\n f\"{module_name}__user\", self.db.metadata,\n autoload_with=self.db.engine,\n )\n self.db.tbl.user_provider = sqlalchemy.Table(\n f\"{module_name}__user_provider\", self.db.metadata,\n autoload_with=self.db.engine,\n )\n self.db.tbl.group = sqlalchemy.Table(\n f\"{module_name}__group\", self.db.metadata,\n autoload_with=self.db.engine,\n )\n self.db.tbl.group_provider = sqlalchemy.Table(\n f\"{module_name}__group_provider\", self.db.metadata,\n autoload_with=self.db.engine,\n )\n self.db.tbl.user_group = sqlalchemy.Table(\n f\"{module_name}__user_group\", self.db.metadata,\n autoload_with=self.db.engine,\n )\n self.db.tbl.scope = sqlalchemy.Table(\n f\"{module_name}__scope\", self.db.metadata,\n autoload_with=self.db.engine,\n )\n self.db.tbl.user_permission = sqlalchemy.Table(\n f\"{module_name}__user_permission\", self.db.metadata,\n autoload_with=self.db.engine,\n )\n self.db.tbl.group_permission = sqlalchemy.Table(\n f\"{module_name}__group_permission\", self.db.metadata,\n autoload_with=self.db.engine,\n )\n self.db.tbl.token = sqlalchemy.Table(\n f\"{module_name}__token\", self.db.metadata,\n autoload_with=self.db.engine,\n )\n self.db.tbl.token_permission = sqlalchemy.Table(\n f\"{module_name}__token_permission\", self.db.metadata,\n autoload_with=self.db.engine,\n )\n # Init Blueprint\n self.descriptor.init_blueprint(\n url_prefix=\"/\",\n static_url_prefix=\"/\",\n )\n # Hooks\n self.context.app.errorhandler(Exception)(self._error_handler)\n self.context.app.after_request(self._after_request_hook)\n # Init RPCs\n for rpc_item in self._rpcs:\n self.context.rpc_manager.register_function(*rpc_item)\n # Register no-op success mapper\n self.context.rpc_manager.call.auth_register_success_mapper(\n None, \"auth_noop_success_mapper\"\n )\n # Register RPC success mapper\n self.context.rpc_manager.call.auth_register_success_mapper(\n \"rpc\", \"auth_rpc_success_mapper\"\n )\n # Register bearer token handler\n self.context.rpc_manager.call.auth_register_credential_handler(\n \"bearer\", \"auth_handle_bearer_token\"\n )\n # Register basic auth handler\n self.context.rpc_manager.call.auth_register_credential_handler(\n \"basic\", \"auth_handle_basic_auth\"\n )\n # Register auth tool\n self.descriptor.register_tool(\"auth\", self)\n\n def deinit(self): # pylint: disable=R0201\n \"\"\" De-init module \"\"\"\n log.info(\"De-initializing module\")\n # Unregister auth tool\n self.descriptor.unregister_tool(\"auth\")\n # Unregister basic token handler\n self.context.rpc_manager.call.auth_unregister_credential_handler(\n \"basic\"\n )\n # Unregister bearer token handler\n self.context.rpc_manager.call.auth_unregister_credential_handler(\n \"bearer\"\n )\n # Unregister RPC success mapper\n self.context.rpc_manager.call.auth_unregister_success_mapper(\n \"rpc\"\n )\n # Unregister no-op success mapper\n self.context.rpc_manager.call.auth_unregister_success_mapper(\n None\n )\n # De-init RPCs\n for rpc_item in self._rpcs:\n self.context.rpc_manager.unregister_function(*rpc_item)\n # De-init DB\n self.db.engine.dispose()\n\n #\n # Hooks\n #\n\n def _error_handler(self, error):\n log.error(\"Error: %s\", error)\n return self.access_denied_reply(), 400\n\n def _after_request_hook(self, response):\n additional_headers = self.descriptor.config.get(\n \"additional_headers\", dict()\n )\n for key, value in additional_headers.items():\n response.headers[key] = value\n return response\n\n #\n # Auth, login, logout, info\n #\n\n @web.route(\"/auth\")\n def auth(self): # pylint: disable=R0201\n \"\"\" Traefik ForwardAuth endpoint \"\"\"\n # Check if we got request from Traefik\n traefik_headers = [\n \"X-Forwarded-Method\",\n \"X-Forwarded-Proto\",\n \"X-Forwarded-Host\",\n \"X-Forwarded-Uri\",\n \"X-Forwarded-For\",\n ]\n for header in traefik_headers:\n if header not in flask.request.headers:\n # Not a traefik request\n return self.access_denied_reply()\n # Get source request data\n source = {\n \"method\": flask.request.headers.get(\"X-Forwarded-Method\"),\n \"proto\": flask.request.headers.get(\"X-Forwarded-Proto\"),\n \"host\": flask.request.headers.get(\"X-Forwarded-Host\"),\n \"uri\": flask.request.headers.get(\"X-Forwarded-Uri\"),\n \"ip\": flask.request.headers.get(\"X-Forwarded-For\"),\n #\n \"target\": flask.request.args.get(\"target\", None),\n \"scope\": flask.request.args.get(\"scope\", None),\n }\n # Check auth header\n if \"Authorization\" in flask.request.headers:\n auth_header = flask.request.headers.get(\"Authorization\")\n if \" \" not in auth_header:\n # Invalid auth header\n return self.access_denied_reply(source)\n #\n credential_type, credential_data = auth_header.split(\" \", 1)\n credential_type = credential_type.lower()\n #\n if credential_type not in self.credential_handlers:\n # No credential handler\n return self.access_denied_reply(source)\n #\n try:\n auth_type, auth_id, auth_reference = \\\n self.credential_handlers[credential_type](\n source, credential_data\n )\n except:\n # Bad credential\n return self.access_denied_reply(source)\n #\n return self.access_success_reply(\n source, auth_type, auth_id, auth_reference\n )\n # Browser auth\n auth_ctx = self.get_auth_context()\n if auth_ctx[\"done\"] and \\\n (\n auth_ctx[\"expiration\"] is None or\n datetime.datetime.now() < auth_ctx[\"expiration\"]\n ):\n # Auth done\n return self.access_success_reply(\n source,\n auth_type=\"user\",\n auth_id=str(auth_ctx[\"user_id\"]) \\\n if auth_ctx[\"user_id\"] is not None else \"-\",\n auth_reference=flask.request.cookies.get(\n self.context.app.session_cookie_name, \"-\"\n ),\n )\n # Check public rules\n for rule in self.public_rules:\n if self._public_rule_matches(rule, source):\n # Public request\n return self.access_success_reply(source, \"public\")\n # Auth needed or expired\n self.set_auth_context(dict())\n target_token = self.sign_target_url(self.make_source_url(source))\n return self.access_needed_redirect(target_token)\n\n @web.route(\"/login\")\n def login(self): # pylint: disable=R0201\n \"\"\" Login endpoint \"\"\"\n self.set_auth_context(dict())\n target_token = flask.request.args.get(\n \"target_to\",\n self.sign_target_url(\n self.descriptor.config.get(\"default_login_url\", \"/\")\n )\n )\n return self.access_needed_redirect(target_token)\n\n @web.route(\"/logout\")\n def logout(self): # pylint: disable=R0201\n \"\"\" Logout endpoint \"\"\"\n target_token = flask.request.args.get(\n \"target_to\",\n self.sign_target_url(\n self.descriptor.config.get(\"default_logout_url\", \"/\")\n )\n )\n return self.logout_needed_redirect(target_token)\n\n @web.route(\"/info\")\n def info(self): # pylint: disable=R0201\n \"\"\" Info endpoint \"\"\"\n target = flask.request.args.get(\"target\", None)\n scope = flask.request.args.get(\"scope\", None)\n #\n if target not in self.info_mappers:\n return self.access_denied_reply()\n #\n auth_ctx = self.get_auth_context()\n #\n try:\n mimetype, data = self.info_mappers[target](auth_ctx, scope)\n except:\n return self.access_denied_reply()\n #\n response = flask.make_response(data)\n response.mimetype = mimetype\n return response\n\n #\n # Tools\n #\n\n @staticmethod\n def make_source_url(source):\n \"\"\" Make original URL from source \"\"\"\n proto = source.get(\"proto\")\n host = source.get(\"host\")\n uri = source.get(\"uri\")\n return f\"{proto}://{host}{uri}\"\n\n def sign_target_url(self, url):\n \"\"\" Sign and encode URL in JWT token \"\"\"\n return jwt.encode(\n {\"url\": url},\n self.context.app.secret_key,\n algorithm=\"HS256\",\n )\n\n def verify_target_url(self, url_token):\n \"\"\" Verify and decode URL from JWT token \"\"\"\n try:\n url_data = jwt.decode(\n url_token, self.context.app.secret_key, algorithms=[\"HS256\"]\n )\n except:\n raise ValueError(\"Invalid URL token\")\n #\n return url_data[\"url\"]\n\n def get_auth_context(self, session=None):\n \"\"\" Get current auth context from session \"\"\"\n if session is None:\n session = flask.session\n #\n return {\n \"done\": session.get(\"auth_done\", False),\n \"error\": session.get(\"auth_error\", \"\"),\n \"expiration\": session.get(\"auth_expiration\", None),\n \"provider\": session.get(\"auth_provider\", None),\n \"provider_attr\": session.get(\"auth_provider_attr\", dict()),\n \"user_id\": session.get(\"auth_user_id\", None),\n }\n\n def set_auth_context(self, auth_context):\n \"\"\" Save current auth context in session \"\"\"\n flask.session[\"auth_done\"] = auth_context.get(\"done\", False)\n flask.session[\"auth_error\"] = auth_context.get(\"error\", \"\")\n flask.session[\"auth_expiration\"] = auth_context.get(\"expiration\", None)\n flask.session[\"auth_provider\"] = auth_context.get(\"provider\", None)\n flask.session[\"auth_provider_attr\"] = auth_context.get(\n \"provider_attr\", dict()\n )\n flask.session[\"auth_user_id\"] = auth_context.get(\"user_id\", None)\n\n def access_denied_reply(self, source=None):\n \"\"\" Traefik/client: bad auth reply/redirect \"\"\"\n # Check public rules\n if source is not None:\n for rule in self.public_rules:\n if self._public_rule_matches(rule, source):\n # Public request\n return self.access_success_reply(source, \"public\")\n #\n if \"auth_denied_url\" in self.descriptor.config:\n return flask.redirect(self.descriptor.config.get(\"auth_denied_url\"))\n return flask.make_response(\"Access Denied\", 403)\n\n def access_success_reply(\n self, source,\n auth_type, auth_id=\"-\", auth_reference=\"-\",\n ):\n \"\"\" Traefik: auth OK reply \"\"\"\n auth_target = source[\"target\"]\n if auth_target not in self.success_mappers:\n return self.access_denied_reply()\n #\n try:\n auth_allow, auth_headers = self.success_mappers[auth_target](\n source, auth_type, auth_id, auth_reference\n )\n except:\n auth_allow = False\n #\n if not auth_allow:\n return self.access_denied_reply()\n #\n response = flask.make_response(\"OK\")\n for key, value in auth_headers.items():\n response.headers[key] = str(value)\n return response\n\n def access_needed_redirect(self, target_token):\n \"\"\" Client: auth redirect \"\"\"\n target_provider = self.descriptor.config.get(\"auth_provider\", None)\n if target_provider not in self.auth_providers:\n return self.access_denied_reply()\n target_info = self.auth_providers[target_provider]\n #\n if target_info[\"login_route\"] is not None:\n try:\n return flask.redirect(\n flask.url_for(\n target_info[\"login_route\"],\n target_to=target_token,\n )\n )\n except:\n return self.access_denied_reply()\n #\n if target_info[\"login_url\"] is not None:\n try:\n url_params = urllib.parse.urlencode({\"target_to\": target_token})\n return flask.redirect(\n f'{target_info[\"login_url\"]}?{url_params}'\n )\n except:\n return self.access_denied_reply()\n #\n return self.access_denied_reply()\n\n def access_success_redirect(self, target_token):\n \"\"\" Client: auth OK redirect \"\"\"\n auth_ctx = self.get_auth_context()\n #\n for processor_endpoint in self.auth_processors:\n processor_rpc = getattr(\n self.context.rpc_manager.call, processor_endpoint\n )\n #\n try:\n auth_ctx = processor_rpc(auth_ctx)\n except:\n log.exception(\"Processor failed\")\n return self.access_denied_reply()\n #\n flask.session.regenerate()\n self.set_auth_context(auth_ctx)\n #\n try:\n target_url = self.verify_target_url(target_token)\n except:\n target_url = self.descriptor.config.get(\"default_login_url\", \"/\")\n #\n return flask.redirect(target_url)\n\n def logout_needed_redirect(self, target_token):\n \"\"\" Client: logout redirect \"\"\"\n target_provider = self.descriptor.config.get(\"auth_provider\", None)\n if target_provider not in self.auth_providers:\n return self.access_denied_reply()\n target_info = self.auth_providers[target_provider]\n #\n if target_info[\"logout_route\"] is not None:\n try:\n return flask.redirect(\n flask.url_for(\n target_info[\"logout_route\"],\n target_to=target_token,\n )\n )\n except:\n return self.access_denied_reply()\n #\n if target_info[\"logout_url\"] is not None:\n try:\n url_params = urllib.parse.urlencode({\"target_to\": target_token})\n return flask.redirect(\n f'{target_info[\"logout_url\"]}?{url_params}'\n )\n except:\n return self.access_denied_reply()\n #\n return self.access_denied_reply()\n\n def logout_success_redirect(self, target_token):\n \"\"\" Client: logout OK redirect \"\"\"\n flask.session.destroy()\n flask.session.regenerate()\n self.set_auth_context(dict())\n try:\n target_url = self.verify_target_url(target_token)\n except:\n target_url = self.descriptor.config.get(\"default_logout_url\", \"/\")\n #\n return flask.redirect(target_url)\n\n @staticmethod\n def _public_rule_matches(rule, source):\n for key, obj in rule.items():\n if not obj.fullmatch(source[key]):\n return False\n #\n return True\n\n #\n # RPC: No-op success mapper\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _noop_success_mapper(self, source, auth_type, auth_id, auth_reference):\n return True, dict()\n\n #\n # RPC: RPC success mapper\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _rpc_success_mapper(self, source, auth_type, auth_id, auth_reference):\n headers = dict()\n #\n headers[\"X-Auth-Type\"] = str(auth_type)\n headers[\"X-Auth-ID\"] = str(auth_id)\n headers[\"X-Auth-Reference\"] = str(auth_reference)\n #\n return True, headers\n\n #\n # RPC: Bearer token handler\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _handle_bearer_token(self, source, token_data):\n try:\n token = self._decode_token(token_data)\n except:\n raise ValueError(\"Bad token\")\n #\n if token[\"expires\"] is not None and \\\n datetime.datetime.now() >= token[\"expires\"]:\n raise ValueError(\"Token expired\")\n #\n return \"token\", token[\"id\"], \"-\"\n\n #\n # RPC: Basic auth handler\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _handle_basic_auth(self, source, auth_data):\n try:\n token_data, _ = base64.b64decode(auth_data).decode().split(\":\", 1)\n except:\n raise ValueError(\"Bad auth data\")\n #\n return self._handle_bearer_token(source, token_data)\n\n #\n # RPC: referenced auth context\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _get_referenced_auth_context(self, auth_reference):\n request = Holder()\n request.cookies = {\n self.context.app.session_cookie_name: auth_reference\n }\n #\n with self.context.app.app_context():\n session = self.context.app.session_interface.open_session(\n self.context.app, request,\n )\n #\n return self.get_auth_context(session)\n\n #\n # RPC: session cookie name\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _get_session_cookie_name(self):\n return self.context.app.session_cookie_name\n\n #\n # RPC: auth providers\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _register_auth_provider(\n self, name,\n login_route=None, login_url=None,\n logout_route=None, logout_url=None\n ):\n if name in self.auth_providers:\n raise ValueError(\n \"Provider is already registered: %s\", name\n )\n #\n self.auth_providers[name] = {\n \"login_route\": login_route,\n \"login_url\": login_url,\n \"logout_route\": logout_route,\n \"logout_url\": logout_url,\n }\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _unregister_auth_provider(self, name):\n if name not in self.auth_providers:\n raise ValueError(\n \"Provider is not registered: %s\", name\n )\n #\n self.auth_providers.pop(name)\n\n #\n # RPC: auth processors\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _register_auth_processor(self, rpc_endpoint_name):\n if rpc_endpoint_name in self.auth_processors:\n raise ValueError(\n \"Processor is already registered: %s\", rpc_endpoint_name\n )\n #\n self.auth_processors.append(rpc_endpoint_name)\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _unregister_auth_processor(self, rpc_endpoint_name):\n if rpc_endpoint_name not in self.auth_processors:\n raise ValueError(\n \"Processor is not registered: %s\", rpc_endpoint_name\n )\n #\n self.auth_processors.remove(rpc_endpoint_name)\n\n #\n # RPC: credential handlers\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _register_credential_handler(self, credential_type, rpc_endpoint):\n if credential_type in self.credential_handlers:\n raise ValueError(\n \"Credential type is already registered: %s\", credential_type\n )\n #\n self.credential_handlers[credential_type] = getattr(\n self.context.rpc_manager.call, rpc_endpoint\n )\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _unregister_credential_handler(self, credential_type):\n if credential_type not in self.credential_handlers:\n raise ValueError(\n \"Credential type is not registered: %s\", credential_type\n )\n #\n self.credential_handlers.pop(credential_type)\n\n #\n # RPC: success mappers\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _register_success_mapper(self, target, rpc_endpoint):\n if target in self.success_mappers:\n raise ValueError(\n \"Target is already registered: %s\", target\n )\n #\n self.success_mappers[target] = getattr(\n self.context.rpc_manager.call, rpc_endpoint\n )\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _unregister_success_mapper(self, target):\n if target not in self.success_mappers:\n raise ValueError(\n \"Target is not registered: %s\", target\n )\n #\n self.success_mappers.pop(target)\n\n #\n # RPC: info mappers\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _register_info_mapper(self, target, rpc_endpoint):\n if target in self.info_mappers:\n raise ValueError(\n \"Target is already registered: %s\", target\n )\n #\n self.info_mappers[target] = getattr(\n self.context.rpc_manager.call, rpc_endpoint\n )\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _unregister_info_mapper(self, target):\n if target not in self.info_mappers:\n raise ValueError(\n \"Target is not registered: %s\", target\n )\n #\n self.info_mappers.pop(target)\n\n #\n # RPC: public rules\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _add_public_rule(self, rule):\n rule_obj = dict()\n for key, regex in rule.items():\n rule_obj[key] = re.compile(regex)\n #\n if rule_obj not in self.public_rules:\n self.public_rules.append(rule_obj)\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _remove_public_rule(self, rule):\n rule_obj = dict()\n for key, regex in rule.items():\n rule_obj[key] = re.compile(regex)\n #\n while rule_obj in self.public_rules:\n self.public_rules.remove(rule_obj)\n\n #\n # RPC: users\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _add_user(self, email=\"\", name=\"\", id=...):\n values = {\n \"email\": email,\n \"name\": name,\n }\n #\n if id is not ...:\n values[\"id\"] = id\n #\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.user.insert().values(**values)\n ).inserted_primary_key[0]\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _delete_user(self, id):\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.user.delete().where(\n self.db.tbl.user.c.id == id\n )\n ).rowcount\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _get_user(self, id=None, email=None, name=None):\n if id is not None:\n with self.db.engine.connect() as connection:\n user = connection.execute(\n self.db.tbl.user.select().where(\n self.db.tbl.user.c.id == id,\n )\n ).mappings().one()\n return db_tools.sqlalchemy_mapping_to_dict(user)\n #\n if email is not None:\n with self.db.engine.connect() as connection:\n user = connection.execute(\n self.db.tbl.user.select().where(\n self.db.tbl.user.c.email == email,\n )\n ).mappings().one()\n return db_tools.sqlalchemy_mapping_to_dict(user)\n #\n if name is not None:\n with self.db.engine.connect() as connection:\n user = connection.execute(\n self.db.tbl.user.select().where(\n self.db.tbl.user.c.name == name,\n )\n ).mappings().one()\n return db_tools.sqlalchemy_mapping_to_dict(user)\n #\n raise ValueError(\"ID or name is not provided\")\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _list_users(self):\n with self.db.engine.connect() as connection:\n users = connection.execute(\n self.db.tbl.user.select()\n ).mappings().all()\n #\n return [db_tools.sqlalchemy_mapping_to_dict(item) for item in users]\n\n #\n # RPC: user provider\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _add_user_provider(self, user_id, provider_ref):\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.user_provider.insert().values(\n user_id=user_id,\n provider_ref=provider_ref,\n )\n ).inserted_primary_key[0]\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _remove_user_provider(self, user_id, provider_ref):\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.user_provider.delete().where(\n self.db.tbl.user_provider.c.user_id == user_id,\n self.db.tbl.user_provider.c.provider_ref == provider_ref,\n )\n ).rowcount\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _get_user_from_provider(self, provider_ref):\n with self.db.engine.connect() as connection:\n user_provider = connection.execute(\n self.db.tbl.user_provider.select().where(\n self.db.tbl.user_provider.c.provider_ref == provider_ref,\n )\n ).mappings().one()\n #\n return self._get_user(id=user_provider[\"user_id\"])\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _list_user_providers(self, user_id=None):\n with self.db.engine.connect() as connection:\n if user_id is not None:\n providers = connection.execute(\n self.db.tbl.user_provider.select().where(\n self.db.tbl.user_provider.c.user_id == user_id,\n )\n ).mappings().all()\n else:\n providers = connection.execute(\n self.db.tbl.user_provider.select()\n ).mappings().all()\n #\n return [\n db_tools.sqlalchemy_mapping_to_dict(item) for item in providers\n ]\n\n #\n # RPC: groups\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _add_group(self, name=\"\", parent_id=None, id=...):\n values = {\n \"name\": name,\n \"parent_id\": parent_id,\n }\n #\n if id is not ...:\n values[\"id\"] = id\n #\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.group.insert().values(**values)\n ).inserted_primary_key[0]\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _delete_group(self, id):\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.group.delete().where(\n self.db.tbl.group.c.id == id\n )\n ).rowcount\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _get_group(self, id):\n with self.db.engine.connect() as connection:\n group = connection.execute(\n self.db.tbl.group.select().where(\n self.db.tbl.group.c.id == id,\n )\n ).mappings().one()\n return db_tools.sqlalchemy_mapping_to_dict(group)\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _list_groups(self):\n with self.db.engine.connect() as connection:\n groups = connection.execute(\n self.db.tbl.group.select()\n ).mappings().all()\n #\n return [db_tools.sqlalchemy_mapping_to_dict(item) for item in groups]\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _walk_group_tree(self, id):\n groups = self._list_groups()\n group_map = {item[\"id\"]:item for item in groups}\n #\n result = list()\n #\n current_id = id\n while True:\n if current_id not in group_map:\n break\n #\n item = group_map[current_id]\n result.append(item)\n #\n if item[\"parent_id\"] is None:\n break\n #\n current_id = item[\"parent_id\"]\n #\n return result\n\n #\n # RPC: group provider\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _add_group_provider(self, group_id, provider_ref):\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.group_provider.insert().values(\n group_id=group_id,\n provider_ref=provider_ref,\n )\n ).inserted_primary_key[0]\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _remove_group_provider(self, group_id, provider_ref):\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.group_provider.delete().where(\n self.db.tbl.group_provider.c.group_id == group_id,\n self.db.tbl.group_provider.c.provider_ref == provider_ref,\n )\n ).rowcount\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _get_group_from_provider(self, provider_ref):\n with self.db.engine.connect() as connection:\n group_provider = connection.execute(\n self.db.tbl.group_provider.select().where(\n self.db.tbl.group_provider.c.provider_ref == provider_ref,\n )\n ).mappings().one()\n #\n return self._get_group(id=group_provider[\"group_id\"])\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _list_group_providers(self, group_id=None):\n with self.db.engine.connect() as connection:\n if group_id is not None:\n providers = connection.execute(\n self.db.tbl.group_provider.select().where(\n self.db.tbl.group_provider.c.group_id == group_id,\n )\n ).mappings().all()\n else:\n providers = connection.execute(\n self.db.tbl.group_provider.select()\n ).mappings().all()\n #\n return [\n db_tools.sqlalchemy_mapping_to_dict(item) for item in providers\n ]\n\n #\n # RPC: user groups\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _add_user_group(self, user_id, group_id):\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.user_group.insert().values(\n user_id=user_id,\n group_id=group_id,\n )\n ).inserted_primary_key[0]\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _remove_user_group(self, user_id, group_id):\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.user_group.delete().where(\n self.db.tbl.user_group.c.user_id == user_id,\n self.db.tbl.user_group.c.group_id == group_id,\n )\n ).rowcount\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _get_user_group_ids(self, user_id):\n with self.db.engine.connect() as connection:\n user_groups = connection.execute(\n self.db.tbl.user_group.select().where(\n self.db.tbl.user_group.c.user_id == user_id,\n )\n ).mappings().all()\n #\n return [\n item[\"group_id\"] for item in user_groups\n ]\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _get_user_groups(self, user_id):\n return [\n self._get_group(group_id)\n for group_id in self._get_user_group_ids(user_id)\n ]\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _list_user_groups(self, user_id=None):\n with self.db.engine.connect() as connection:\n if user_id is not None:\n user_groups = connection.execute(\n self.db.tbl.user_group.select().where(\n self.db.tbl.user_group.c.user_id == user_id,\n )\n ).mappings().all()\n else:\n user_groups = connection.execute(\n self.db.tbl.user_group.select()\n ).mappings().all()\n #\n return [\n db_tools.sqlalchemy_mapping_to_dict(item) for item in user_groups\n ]\n\n #\n # RPC: scopes\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _add_scope(self, name=\"\", parent_id=None, id=...):\n values = {\n \"name\": name,\n \"parent_id\": parent_id,\n }\n #\n if id is not ...:\n values[\"id\"] = id\n #\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.scope.insert().values(**values)\n ).inserted_primary_key[0]\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _delete_scope(self, id):\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.scope.delete().where(\n self.db.tbl.scope.c.id == id\n )\n ).rowcount\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _get_scope(self, id):\n with self.db.engine.connect() as connection:\n scope = connection.execute(\n self.db.tbl.scope.select().where(\n self.db.tbl.scope.c.id == id,\n )\n ).mappings().one()\n return db_tools.sqlalchemy_mapping_to_dict(scope)\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _list_scopes(self):\n with self.db.engine.connect() as connection:\n scopes = connection.execute(\n self.db.tbl.scope.select()\n ).mappings().all()\n #\n return [db_tools.sqlalchemy_mapping_to_dict(item) for item in scopes]\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _walk_scope_tree(self, id):\n scopes = self._list_scopes()\n scope_map = {item[\"id\"]:item for item in scopes}\n #\n result = list()\n #\n current_id = id\n while True:\n if current_id not in scope_map:\n break\n #\n item = scope_map[current_id]\n result.append(item)\n #\n if item[\"parent_id\"] is None:\n break\n #\n current_id = item[\"parent_id\"]\n #\n return result\n\n #\n # RPC: group permission\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _add_group_permission(self, group_id, scope_id, permission):\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.group_permission.insert().values(\n group_id=group_id,\n scope_id=scope_id,\n permission=permission,\n )\n ).inserted_primary_key[0]\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _remove_group_permission(self, group_id, scope_id, permission):\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.group_permission.delete().where(\n self.db.tbl.group_permission.c.group_id == group_id,\n self.db.tbl.group_permission.c.scope_id == scope_id,\n self.db.tbl.group_permission.c.permission == permission,\n )\n ).rowcount\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _get_group_permissions(self, group_id, scope_id):\n group_scopes = [\n scope[\"id\"] for scope in self._walk_scope_tree(scope_id)\n ]\n #\n with self.db.engine.connect() as connection:\n data = connection.execute(\n self.db.tbl.group_permission.select().where(\n self.db.tbl.group_permission.c.group_id == group_id,\n self.db.tbl.group_permission.c.scope_id.in_(group_scopes),\n )\n ).mappings().all()\n #\n result = list(set([item[\"permission\"] for item in data]))\n result.sort()\n #\n return result\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _list_group_permissions(self, group_id=None):\n with self.db.engine.connect() as connection:\n if group_id is not None:\n permissions = connection.execute(\n self.db.tbl.group_permission.select().where(\n self.db.tbl.group_permission.c.group_id == group_id,\n )\n ).mappings().all()\n else:\n permissions = connection.execute(\n self.db.tbl.group_permission.select()\n ).mappings().all()\n #\n return [\n db_tools.sqlalchemy_mapping_to_dict(item) for item in permissions\n ]\n\n #\n # RPC: user permission\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _add_user_permission(self, user_id, scope_id, permission):\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.user_permission.insert().values(\n user_id=user_id,\n scope_id=scope_id,\n permission=permission,\n )\n ).inserted_primary_key[0]\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _remove_user_permission(self, user_id, scope_id, permission):\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.user_permission.delete().where(\n self.db.tbl.user_permission.c.user_id == user_id,\n self.db.tbl.user_permission.c.scope_id == scope_id,\n self.db.tbl.user_permission.c.permission == permission,\n )\n ).rowcount\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _get_user_permissions(self, user_id, scope_id):\n user_scopes = [scope[\"id\"] for scope in self._walk_scope_tree(scope_id)]\n #\n with self.db.engine.connect() as connection:\n data = connection.execute(\n self.db.tbl.user_permission.select().where(\n self.db.tbl.user_permission.c.user_id == user_id,\n self.db.tbl.user_permission.c.scope_id.in_(user_scopes),\n )\n ).mappings().all()\n #\n result = set([item[\"permission\"] for item in data])\n #\n user_group_ids = self._get_user_group_ids(user_id)\n for group_id in user_group_ids:\n group_permissions = set(\n self._get_group_permissions(group_id, scope_id)\n )\n result |= group_permissions\n #\n result = list(result)\n result.sort()\n #\n return result\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _list_user_permissions(self, user_id=None):\n with self.db.engine.connect() as connection:\n if user_id is not None:\n permissions = connection.execute(\n self.db.tbl.user_permission.select().where(\n self.db.tbl.user_permission.c.user_id == user_id,\n )\n ).mappings().all()\n else:\n permissions = connection.execute(\n self.db.tbl.user_permission.select()\n ).mappings().all()\n #\n return [\n db_tools.sqlalchemy_mapping_to_dict(item) for item in permissions\n ]\n\n #\n # RPC: tokens\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _add_token(self, user_id, name=\"\", expires=None, id=...):\n token_uuid = str(uuid.uuid4())\n #\n values = {\n \"uuid\": token_uuid,\n \"user_id\": user_id,\n \"expires\": expires,\n \"name\": name,\n }\n #\n if id is not ...:\n values[\"id\"] = id\n #\n #\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.token.insert().values(**values)\n ).inserted_primary_key[0]\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _delete_token(self, id):\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.token.delete().where(\n self.db.tbl.token.c.id == id\n )\n ).rowcount\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _get_token(self, id=None, uuid=None):\n if id is not None:\n with self.db.engine.connect() as connection:\n token = connection.execute(\n self.db.tbl.token.select().where(\n self.db.tbl.token.c.id == id,\n )\n ).mappings().one()\n return db_tools.sqlalchemy_mapping_to_dict(token)\n #\n if uuid is not None:\n with self.db.engine.connect() as connection:\n token = connection.execute(\n self.db.tbl.token.select().where(\n self.db.tbl.token.c.uuid == uuid,\n )\n ).mappings().one()\n return db_tools.sqlalchemy_mapping_to_dict(token)\n #\n raise ValueError(\"ID or UUID or name is not provided\")\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _list_tokens(self, user_id=None):\n with self.db.engine.connect() as connection:\n if user_id is not None:\n tokens = connection.execute(\n self.db.tbl.token.select().where(\n self.db.tbl.token.c.user_id == user_id,\n )\n ).mappings().all()\n else:\n tokens = connection.execute(\n self.db.tbl.token.select()\n ).mappings().all()\n #\n return [\n db_tools.sqlalchemy_mapping_to_dict(item) for item in tokens\n ]\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _encode_token(self, id=None, uuid=None):\n if id is not None:\n token = self._get_token(id)\n token_uuid = token[\"uuid\"]\n elif uuid is not None:\n token_uuid = uuid\n else:\n raise ValueError(\"ID or UUID is not provided\")\n #\n return jwt.encode(\n {\"uuid\": token_uuid},\n self.context.app.secret_key,\n algorithm=\"HS512\",\n )\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _decode_token(self, token):\n try:\n token_data = jwt.decode(\n token, self.context.app.secret_key, algorithms=[\"HS512\"]\n )\n except:\n raise ValueError(\"Invalid token\")\n #\n return self._get_token(uuid=token_data[\"uuid\"])\n\n #\n # RPC: token permissions\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _add_token_permission(self, token_id, scope_id, permission):\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.token_permission.insert().values(\n token_id=token_id,\n scope_id=scope_id,\n permission=permission,\n )\n ).inserted_primary_key[0]\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _remove_token_permission(self, token_id, scope_id, permission):\n with self.db.engine.connect() as connection:\n return connection.execute(\n self.db.tbl.token_permission.delete().where(\n self.db.tbl.token_permission.c.token_id == token_id,\n self.db.tbl.token_permission.c.scope_id == scope_id,\n self.db.tbl.token_permission.c.permission == permission,\n )\n ).rowcount\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _get_token_permissions(self, token_id, scope_id):\n token = self._get_token(token_id)\n token_scopes = [\n scope[\"id\"] for scope in self._walk_scope_tree(scope_id)\n ]\n #\n with self.db.engine.connect() as connection:\n data = connection.execute(\n self.db.tbl.token_permission.select().where(\n self.db.tbl.token_permission.c.token_id == token_id,\n self.db.tbl.token_permission.c.scope_id.in_(token_scopes),\n )\n ).mappings().all()\n #\n user_permissions = set(self._get_user_permissions(\n token[\"user_id\"], scope_id\n ))\n #\n result = set([item[\"permission\"] for item in data]) & user_permissions\n result = list(result)\n result.sort()\n #\n return result\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _list_token_permissions(self, token_id=None):\n with self.db.engine.connect() as connection:\n if token_id is not None:\n permissions = connection.execute(\n self.db.tbl.token_permission.select().where(\n self.db.tbl.token_permission.c.token_id == token_id,\n )\n ).mappings().all()\n else:\n permissions = connection.execute(\n self.db.tbl.token_permission.select()\n ).mappings().all()\n #\n return [\n db_tools.sqlalchemy_mapping_to_dict(item) for item in permissions\n ]\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _resolve_token_permissions(self, token, scope_id):\n token = self._decode_token(token)\n return self._get_token_permissions(token[\"id\"], scope_id)\n\n #\n # RPC: ping\n #\n\n @rpc_tools.wrap_exceptions(RuntimeError)\n def _ping(self):\n _ = self\n return True\n", "id": "10737223", "language": "Python", "matching_score": 2.9871418476104736, "max_stars_count": 0, "path": "module.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n\n# Copyright 2022 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" DB migration \"\"\"\n\nrevision = \"202202021633\"\ndown_revision = None\nbranch_labels = None\n\nfrom alembic import op\nimport sqlalchemy as sa\n\ndef upgrade(module, payload):\n module_name = module.descriptor.name\n #\n op.create_table(\n f\"{module_name}__user\",\n sa.Column(\"id\", sa.Integer, primary_key=True, index=True),\n sa.Column(\"email\", sa.Text, index=True, unique=True),\n sa.Column(\"name\", sa.Text, index=True),\n )\n #\n op.create_table(\n f\"{module_name}__user_provider\",\n sa.Column(\n \"user_id\", sa.Integer,\n sa.ForeignKey(\n f\"{module_name}__user.id\",\n onupdate=\"CASCADE\", ondelete=\"CASCADE\"\n ),\n primary_key=True, index=True,\n ),\n sa.Column(\n \"provider_ref\", sa.Text,\n primary_key=True, index=True, unique=True,\n ),\n )\n #\n op.create_table(\n f\"{module_name}__group\",\n sa.Column(\"id\", sa.Integer, primary_key=True, index=True),\n sa.Column(\n \"parent_id\", sa.Integer,\n sa.ForeignKey(\n f\"{module_name}__group.id\",\n onupdate=\"CASCADE\", ondelete=\"SET NULL\"\n ),\n ),\n sa.Column(\"name\", sa.Text, index=True),\n )\n #\n op.create_table(\n f\"{module_name}__group_provider\",\n sa.Column(\n \"group_id\", sa.Integer,\n sa.ForeignKey(\n f\"{module_name}__group.id\",\n onupdate=\"CASCADE\", ondelete=\"CASCADE\"\n ),\n primary_key=True, index=True,\n ),\n sa.Column(\n \"provider_ref\", sa.Text,\n primary_key=True, index=True, unique=True,\n ),\n )\n #\n op.create_table(\n f\"{module_name}__user_group\",\n sa.Column(\n \"user_id\", sa.Integer,\n sa.ForeignKey(\n f\"{module_name}__user.id\",\n onupdate=\"CASCADE\", ondelete=\"CASCADE\"\n ),\n primary_key=True, index=True,\n ),\n sa.Column(\n \"group_id\", sa.Integer,\n sa.ForeignKey(\n f\"{module_name}__group.id\",\n onupdate=\"CASCADE\", ondelete=\"CASCADE\"\n ),\n primary_key=True, index=True,\n ),\n )\n #\n scope_table = op.create_table(\n f\"{module_name}__scope\",\n sa.Column(\"id\", sa.Integer, primary_key=True, index=True),\n sa.Column(\n \"parent_id\", sa.Integer,\n sa.ForeignKey(\n f\"{module_name}__scope.id\",\n onupdate=\"CASCADE\", ondelete=\"SET NULL\"\n ),\n ),\n sa.Column(\"name\", sa.Text, index=True),\n )\n #\n op.bulk_insert(\n scope_table,\n [\n {\"name\": \"Global\", \"parent_id\": None},\n ]\n )\n #\n op.create_table(\n f\"{module_name}__user_permission\",\n sa.Column(\n \"user_id\", sa.Integer,\n sa.ForeignKey(\n f\"{module_name}__user.id\",\n onupdate=\"CASCADE\", ondelete=\"CASCADE\"\n ),\n primary_key=True, index=True,\n ),\n sa.Column(\n \"scope_id\", sa.Integer,\n sa.ForeignKey(\n f\"{module_name}__scope.id\",\n onupdate=\"CASCADE\", ondelete=\"CASCADE\"\n ),\n primary_key=True, index=True,\n ),\n sa.Column(\"permission\", sa.Text, primary_key=True),\n )\n #\n op.create_table(\n f\"{module_name}__group_permission\",\n sa.Column(\n \"group_id\", sa.Integer,\n sa.ForeignKey(\n f\"{module_name}__group.id\",\n onupdate=\"CASCADE\", ondelete=\"CASCADE\"\n ),\n primary_key=True, index=True,\n ),\n sa.Column(\n \"scope_id\", sa.Integer,\n sa.ForeignKey(\n f\"{module_name}__scope.id\",\n onupdate=\"CASCADE\", ondelete=\"CASCADE\"\n ),\n primary_key=True, index=True,\n ),\n sa.Column(\"permission\", sa.Text, primary_key=True),\n )\n #\n op.create_table(\n f\"{module_name}__token\",\n sa.Column(\"id\", sa.Integer, primary_key=True, index=True),\n sa.Column(\n \"uuid\", sa.String(36),\n index=True, unique=True,\n ),\n sa.Column(\"expires\", sa.DateTime),\n sa.Column(\n \"user_id\", sa.Integer,\n sa.ForeignKey(\n f\"{module_name}__user.id\",\n onupdate=\"CASCADE\", ondelete=\"CASCADE\"\n ),\n index=True,\n ),\n sa.Column(\"name\", sa.Text),\n )\n #\n op.create_table(\n f\"{module_name}__token_permission\",\n sa.Column(\n \"token_id\", sa.Integer,\n sa.ForeignKey(\n f\"{module_name}__token.id\",\n onupdate=\"CASCADE\", ondelete=\"CASCADE\"\n ),\n primary_key=True, index=True,\n ),\n sa.Column(\n \"scope_id\", sa.Integer,\n sa.ForeignKey(\n f\"{module_name}__scope.id\",\n onupdate=\"CASCADE\", ondelete=\"CASCADE\"\n ),\n primary_key=True, index=True,\n ),\n sa.Column(\"permission\", sa.Text, primary_key=True),\n )\n\ndef downgrade(module, payload):\n module_name = module.descriptor.name\n #\n op.drop_table(f\"{module_name}__token_permission\")\n op.drop_table(f\"{module_name}__token\")\n op.drop_table(f\"{module_name}__group_permission\")\n op.drop_table(f\"{module_name}__user_permission\")\n op.drop_table(f\"{module_name}__scope\")\n op.drop_table(f\"{module_name}__user_group\")\n op.drop_table(f\"{module_name}__group_provider\")\n op.drop_table(f\"{module_name}__group\")\n op.drop_table(f\"{module_name}__user_provider\")\n op.drop_table(f\"{module_name}__user\")\n", "id": "11223913", "language": "Python", "matching_score": 1.5755902528762817, "max_stars_count": 0, "path": "db/migrations/202202021633_core.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n\n# Copyright 2022 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" DB migrations \"\"\"\n\nimport sqlalchemy\nimport sqlalchemy.pool\n\nimport alembic # pylint: disable=E0401\nimport alembic.util # pylint: disable=E0401\nimport alembic.config # pylint: disable=E0401\nimport alembic.script # pylint: disable=E0401\nimport alembic.migration # pylint: disable=E0401\nimport alembic.runtime.environment # pylint: disable=E0401\n\nfrom pylon.core.tools import log # pylint: disable=E0611,E0401\n\n\ndef run_db_migrations( # pylint: disable=R0913\n module, db_url, payload=None,\n migrations_path=None, version_table=None,\n revision=\"head\",\n ):\n \"\"\" Perform DB migrations \"\"\"\n log.info(\n \"Running DB migrations for %s up to revision %s\",\n module.descriptor.name, revision,\n )\n #\n if migrations_path is None:\n migrations_path = f\"plugins.{module.descriptor.name}:db/migrations\"\n #\n if version_table is None:\n version_table = f\"db_version__{module.descriptor.name}\"\n #\n config = alembic.config.Config()\n script = alembic.script.ScriptDirectory(\n alembic.util.coerce_resource_to_filename(migrations_path),\n version_locations=[migrations_path],\n )\n #\n with alembic.runtime.environment.EnvironmentContext(\n config, script,\n fn=lambda rev, context: script._upgrade_revs(revision, rev), # pylint: disable=W0212\n ) as alembic_context:\n engine = sqlalchemy.create_engine(\n db_url,\n poolclass=sqlalchemy.pool.NullPool,\n )\n with engine.connect() as connection:\n alembic_context.configure(\n target_metadata=None,\n connection=connection,\n version_table=version_table,\n )\n with alembic_context.begin_transaction():\n alembic_context.run_migrations(module=module, payload=payload)\n\n\ndef get_db_revision(module, db_url, version_table=None):\n \"\"\" Get current DB revision \"\"\"\n #\n if version_table is None:\n version_table = f\"db_version__{module.descriptor.name}\"\n #\n engine = sqlalchemy.create_engine(\n db_url,\n poolclass=sqlalchemy.pool.NullPool,\n )\n with engine.connect() as connection:\n alembic_context = alembic.migration.MigrationContext.configure(\n connection,\n opts={\"version_table\": version_table},\n )\n #\n return alembic_context.get_current_revision()\n\n\ndef get_db_head(module, migrations_path=None):\n \"\"\" Get migrations head revision \"\"\"\n #\n if migrations_path is None:\n migrations_path = f\"plugins.{module.descriptor.name}:db/migrations\"\n #\n script = alembic.script.ScriptDirectory(\n alembic.util.coerce_resource_to_filename(migrations_path),\n version_locations=[migrations_path],\n )\n #\n return script.get_current_head()\n", "id": "10354712", "language": "Python", "matching_score": 1.3055150508880615, "max_stars_count": 0, "path": "db/db_migrations.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" MetadataProvider \"\"\"\n\nimport requests # pylint: disable=E0401\n\nfrom . import MetadataProviderModel\n\n\nclass Provider(MetadataProviderModel):\n \"\"\" Provider model \"\"\"\n\n def __init__(self, context, settings):\n self.context = context\n self.settings = settings\n #\n self.username = self.settings.get(\"username\", None)\n self.password = self.settings.get(\"password\", None)\n self.verify = self.settings.get(\"verify\", True)\n\n def init(self):\n \"\"\" Initialize provider \"\"\"\n\n def deinit(self):\n \"\"\" De-initialize provider \"\"\"\n\n def get_metadata(self, target):\n \"\"\" Get plugin metadata \"\"\"\n #\n username = target.get(\"username\", self.username)\n password = target.get(\"password\", self.password)\n verify = target.get(\"verify\", self.verify)\n #\n auth = None\n if username is not None and password is not None:\n auth = (username, password)\n #\n response = requests.get(\n target.get(\"source\"),\n auth=auth,\n verify=verify,\n )\n #\n return response.json()\n\n def get_multiple_metadata(self, targets):\n \"\"\" Get plugins metadata \"\"\"\n result = list()\n #\n for target in targets:\n result.append(self.get_metadata(target))\n #\n return result\n", "id": "12447894", "language": "Python", "matching_score": 0.7997496128082275, "max_stars_count": 0, "path": "pylon/core/providers/metadata/http.py" }, { "content": "#!/usr/bin/python\n# coding=utf-8\n# pylint: disable=I0011\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\n Server tools\n\"\"\"\n\nimport os\n\nimport socketio # pylint: disable=E0401\n\nfrom gevent.pywsgi import WSGIServer # pylint: disable=E0401,C0412\nfrom geventwebsocket.handler import WebSocketHandler # pylint: disable=E0401,C0412\n\nfrom werkzeug.middleware.dispatcher import DispatcherMiddleware # pylint: disable=E0401\nfrom werkzeug.middleware.proxy_fix import ProxyFix # pylint: disable=E0401\n\nfrom pylon.core import constants\nfrom pylon.core.tools import log\n\n\ndef add_url_prefix(context):\n \"\"\" Add global URL prefix to context \"\"\"\n context.url_prefix = context.settings.get(\"server\", dict()).get(\"path\", \"/\")\n while context.url_prefix.endswith(\"/\"):\n context.url_prefix = context.url_prefix[:-1]\n\n\ndef add_middlewares(context):\n \"\"\" Add needed middlewares \"\"\"\n if context.url_prefix:\n context.app.wsgi_app = DispatcherMiddleware(\n noop_app, {context.url_prefix: context.app.wsgi_app},\n )\n #\n if context.settings.get(\"server\", dict()).get(\"proxy\", False):\n context.app.wsgi_app = ProxyFix(\n context.app.wsgi_app, x_proto=1, x_host=1,\n )\n\n\ndef noop_app(environ, start_response):\n \"\"\" Dummy app that always returns 404 \"\"\"\n _ = environ\n #\n start_response(\"404 Not Found\", [\n (\"Content-type\", \"text/plain\")\n ])\n #\n return [b\"Not Found\\n\"]\n\n\ndef create_socketio_instance(context):\n \"\"\" Create SocketIO instance \"\"\"\n client_manager = None\n #\n socketio_config = context.settings.get(\"socketio\", dict())\n socketio_rabbitmq = socketio_config.get(\"rabbitmq\", dict())\n #\n if socketio_rabbitmq:\n try:\n host = socketio_rabbitmq.get(\"host\")\n port = socketio_rabbitmq.get(\"port\", 5672)\n user = socketio_rabbitmq.get(\"user\", \"\")\n password = <PASSWORD>.get(\"password\", \"\")\n vhost = socketio_rabbitmq.get(\"vhost\", \"carrier\")\n queue = socketio_rabbitmq.get(\"queue\", \"socketio\")\n #\n url = f'ampq://{user}:{password}@{host}:{port}/{vhost}'\n client_manager = socketio.KombuManager(\n url=url, channel=queue,\n )\n except: # pylint: disable=W0702\n log.exception(\"Cannot make KombuManager instance, SocketIO is in standalone mode\")\n #\n if not context.debug:\n sio = socketio.Server(\n async_mode=\"gevent\",\n client_manager=client_manager,\n cors_allowed_origins=socketio_config.get(\"cors_allowed_origins\", \"*\"),\n )\n else:\n sio = socketio.Server(\n async_mode=\"threading\",\n client_manager=client_manager,\n cors_allowed_origins=socketio_config.get(\"cors_allowed_origins\", \"*\"),\n )\n #\n context.app.wsgi_app = socketio.WSGIApp(sio, context.app.wsgi_app)\n #\n return sio\n\n\ndef run_server(context):\n \"\"\" Run WSGI or Flask server \"\"\"\n if not context.debug:\n log.info(\"Starting WSGI server\")\n http_server = WSGIServer(\n (\n context.settings.get(\"server\", dict()).get(\"host\", constants.SERVER_DEFAULT_HOST),\n context.settings.get(\"server\", dict()).get(\"port\", constants.SERVER_DEFAULT_PORT)\n ),\n context.app,\n handler_class=WebSocketHandler,\n )\n http_server.serve_forever()\n else:\n log.info(\"Starting Flask server\")\n context.app.run(\n host=context.settings.get(\"server\", dict()).get(\"host\", constants.SERVER_DEFAULT_HOST),\n port=context.settings.get(\"server\", dict()).get(\"port\", constants.SERVER_DEFAULT_PORT),\n debug=context.debug,\n use_reloader=context.debug,\n reloader_type=context.settings.get(\"server\", dict()).get(\n \"reloader_type\", os.environ.get(\"CORE_RELOADER_TYPE\", \"auto\"),\n ),\n reloader_interval=context.settings.get(\"server\", dict()).get(\n \"reloader_interval\", int(os.environ.get(\"CORE_RELOADER_INTERVAL\", \"1\")),\n ),\n )\n", "id": "1143050", "language": "Python", "matching_score": 3.4748175144195557, "max_stars_count": 0, "path": "pylon/core/tools/server.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n# pylint: disable=C0411,C0413\n\n# Copyright 2020-2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\n Project entry point\n\"\"\"\n\n#\n# Before all other imports and code: patch standard library and other libraries to use async I/O\n#\n\nimport os\n\nCORE_DEVELOPMENT_MODE = os.environ.get(\"CORE_DEVELOPMENT_MODE\", \"\").lower() in [\"true\", \"yes\"]\n\nif not CORE_DEVELOPMENT_MODE:\n import gevent.monkey # pylint: disable=E0401\n gevent.monkey.patch_all(thread=False, subprocess=False)\n #\n import psycogreen.gevent # pylint: disable=E0401\n psycogreen.gevent.patch_psycopg()\n\n#\n# Normal imports and code below\n#\n\nimport socket\nimport signal\n\nimport flask # pylint: disable=E0401\nimport flask_restful # pylint: disable=E0401\n\nfrom pylon.core.tools import log\nfrom pylon.core.tools import log_loki\nfrom pylon.core.tools import module\nfrom pylon.core.tools import event\nfrom pylon.core.tools import seed\nfrom pylon.core.tools import git\nfrom pylon.core.tools import rpc\nfrom pylon.core.tools import slot\nfrom pylon.core.tools import server\nfrom pylon.core.tools import session\nfrom pylon.core.tools import traefik\n\nfrom pylon.core.tools.signal import signal_sigterm\nfrom pylon.core.tools.context import Context\n\n\ndef main(): # pylint: disable=R0912,R0914,R0915\n \"\"\" Entry point \"\"\"\n # Register signal handling\n signal.signal(signal.SIGTERM, signal_sigterm)\n # Enable logging and say hello\n log.enable_logging()\n log.info(\"Starting plugin-based Carrier core\")\n # Make context holder\n context = Context()\n # Save debug status\n context.debug = CORE_DEVELOPMENT_MODE\n # Load settings from seed\n log.info(\"Loading and parsing settings\")\n context.settings = seed.load_settings()\n if not context.settings:\n log.error(\"Settings are empty or invalid. Exiting\")\n os._exit(1) # pylint: disable=W0212\n # Save global node name\n context.node_name = context.settings.get(\"server\", dict()).get(\"name\", socket.gethostname())\n # Enable Loki logging if requested in config\n log_loki.enable_loki_logging(context)\n # Make ModuleManager instance\n context.module_manager = module.ModuleManager(context)\n # Make EventManager instance\n context.event_manager = event.EventManager(context)\n # Add global URL prefix to context\n server.add_url_prefix(context)\n # Make app instance\n log.info(\"Creating Flask application\")\n context.app = flask.Flask(\"pylon\")\n # Make API instance\n log.info(\"Creating API instance\")\n context.api = flask_restful.Api(context.app, catch_all_404s=True)\n # Make SocketIO instance\n log.info(\"Creating SocketIO instance\")\n context.sio = server.create_socketio_instance(context)\n # Add dispatcher and proxy middlewares if needed\n server.add_middlewares(context)\n # Set application settings\n context.app.config[\"CONTEXT\"] = context\n context.app.config.from_mapping(context.settings.get(\"application\", dict()))\n # Enable server-side sessions\n session.init_flask_sessions(context)\n # Make RpcManager instance\n context.rpc_manager = rpc.RpcManager(context)\n # Make SlotManager instance\n context.slot_manager = slot.SlotManager(context)\n # Apply patches needed for pure-python git and providers\n git.apply_patches()\n # Load and initialize modules\n context.module_manager.init_modules()\n # Register Traefik route via Redis KV\n traefik.register_traefik_route(context)\n # Run WSGI server\n try:\n server.run_server(context)\n finally:\n log.info(\"WSGI server stopped\")\n # Unregister traefik route\n traefik.unregister_traefik_route(context)\n # De-init modules\n context.module_manager.deinit_modules()\n # Exit\n log.info(\"Exiting\")\n\n\nif __name__ == \"__main__\":\n # Call entry point\n main()\n", "id": "3753634", "language": "Python", "matching_score": 2.8120923042297363, "max_stars_count": 0, "path": "pylon/main.py" }, { "content": "#!/usr/bin/python\n# coding=utf-8\n# pylint: disable=I0011\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\n Traefik tools\n\"\"\"\n\nimport os\nimport socket\n\nfrom redis import StrictRedis # pylint: disable=E0401\n\nfrom pylon.core import constants\nfrom pylon.core.tools import log\n\n\ndef register_traefik_route(context):\n \"\"\" Create Traefik route for this Pylon instance \"\"\"\n context.traefik_redis_keys = list()\n #\n if context.debug and os.environ.get(\"WERKZEUG_RUN_MAIN\") != \"true\":\n log.info(\"Running in development mode before reloader is started. Skipping registration\")\n return\n #\n traefik_config = context.settings.get(\"traefik\", dict())\n if not traefik_config:\n log.error(\"Cannot register route: no traefik config\")\n return\n #\n redis_config = traefik_config.get(\"redis\", dict())\n if not redis_config:\n log.error(\"Cannot register route: no redis config\")\n return\n #\n local_hostname = socket.gethostname()\n local_port = context.settings.get(\"server\", dict()).get(\"port\", constants.SERVER_DEFAULT_PORT)\n #\n node_name = context.node_name\n #\n if \"node_url\" in traefik_config:\n node_url = traefik_config.get(\"node_url\")\n elif \"node_hostname\" in traefik_config:\n node_url = f\"http://{traefik_config.get('node_hostname')}:{local_port}\"\n else:\n node_url = f\"http://{local_hostname}:{local_port}\"\n #\n log.info(\"Registering traefik route for node '%s'\", node_name)\n #\n store = StrictRedis(\n host=redis_config.get(\"host\", \"localhost\"),\n password=redis_config.get(\"password\", None),\n )\n #\n traefik_rootkey = traefik_config.get(\"rootkey\", \"traefik\")\n traefik_rule = traefik_config.get(\n \"rule\", f\"PathPrefix(`{context.url_prefix if context.url_prefix else '/'}`)\"\n )\n traefik_entrypoint = traefik_config.get(\"entrypoint\", \"http\")\n #\n store.set(f\"{traefik_rootkey}/http/routers/{node_name}/rule\", traefik_rule)\n store.set(f\"{traefik_rootkey}/http/routers/{node_name}/entrypoints/0\", traefik_entrypoint)\n store.set(f\"{traefik_rootkey}/http/routers/{node_name}/service\", f\"{node_name}\")\n store.set(f\"{traefik_rootkey}/http/services/{node_name}/loadbalancer/servers/0/url\", node_url)\n #\n context.traefik_redis_keys.append(f\"{traefik_rootkey}/http/routers/{node_name}/rule\")\n context.traefik_redis_keys.append(f\"{traefik_rootkey}/http/routers/{node_name}/entrypoints/0\")\n context.traefik_redis_keys.append(f\"{traefik_rootkey}/http/routers/{node_name}/service\")\n context.traefik_redis_keys.append(\n f\"{traefik_rootkey}/http/services/{node_name}/loadbalancer/servers/0/url\"\n )\n\n\ndef unregister_traefik_route(context):\n \"\"\" Delete Traefik route for this Pylon instance \"\"\"\n #\n if context.debug and os.environ.get(\"WERKZEUG_RUN_MAIN\") != \"true\":\n log.info(\"Running in development mode before reloader is started. Skipping unregistration\")\n return\n #\n traefik_config = context.settings.get(\"traefik\", dict())\n if not traefik_config:\n log.error(\"Cannot unregister route: no traefik config\")\n return\n #\n redis_config = traefik_config.get(\"redis\", dict())\n if not redis_config:\n log.error(\"Cannot unregister route: no redis config\")\n return\n #\n log.info(\"Unregistering traefik route for node '%s'\", context.node_name)\n #\n store = StrictRedis(\n host=redis_config.get(\"host\", \"localhost\"),\n password=redis_config.get(\"password\", None),\n )\n #\n while context.traefik_redis_keys:\n key = context.traefik_redis_keys.pop()\n store.delete(key)\n", "id": "9579702", "language": "Python", "matching_score": 3.056481122970581, "max_stars_count": 0, "path": "pylon/core/tools/traefik.py" }, { "content": "#!/usr/bin/python\n# coding=utf-8\n# pylint: disable=I0011\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\n Session tools\n\"\"\"\n\nfrom flask_kvsession import KVSessionExtension # pylint: disable=E0401\nfrom simplekv.decorator import PrefixDecorator # pylint: disable=E0401\nfrom simplekv.memory.redisstore import RedisStore # pylint: disable=E0401\nfrom simplekv.memory import DictStore # pylint: disable=E0401\nfrom redis import StrictRedis # pylint: disable=E0401\n\nfrom pylon.core.tools import log\n\n\ndef init_flask_sessions(context):\n \"\"\" Enable third-party server-side session storage \"\"\"\n redis_config = context.settings.get(\"sessions\", dict()).get(\"redis\", dict())\n #\n if redis_config:\n session_store = RedisStore(\n StrictRedis(\n host=redis_config.get(\"host\", \"localhost\"),\n password=redis_config.get(\"password\", None),\n )\n )\n session_prefix = context.settings.get(\"sessions\", dict()).get(\"prefix\", None)\n if session_prefix:\n session_store = PrefixDecorator(session_prefix, session_store)\n log.info(\"Using redis for session storage\")\n else:\n session_store = DictStore()\n log.info(\"Using memory for session storage\")\n #\n KVSessionExtension(session_store, context.app)\n", "id": "748855", "language": "Python", "matching_score": 0.7209673523902893, "max_stars_count": 0, "path": "pylon/core/tools/session.py" }, { "content": "#!/usr/bin/python\n# coding=utf-8\n# pylint: disable=I0011\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\n Seed tools\n\"\"\"\n\nimport os\nimport importlib\n\nimport yaml # pylint: disable=E0401\n\nfrom pylon.core.tools import log\nfrom pylon.core.tools import config\n\n\ndef load_settings():\n \"\"\" Load settings from seed from env \"\"\"\n settings_data = None\n settings_seed = os.environ.get(\"CORE_CONFIG_SEED\", None)\n #\n if not settings_seed or \":\" not in settings_seed:\n return None\n #\n settings_seed_tag = settings_seed[:settings_seed.find(\":\")]\n settings_seed_data = settings_seed[len(settings_seed_tag) + 1:]\n try:\n seed = importlib.import_module(f\"pylon.core.seeds.{settings_seed_tag}\")\n settings_data = seed.unseed(settings_seed_data)\n except: # pylint: disable=W0702\n log.exception(\"Failed to unseed settings\")\n #\n if not settings_data:\n return None\n #\n try:\n settings = yaml.load(os.path.expandvars(settings_data), Loader=yaml.SafeLoader)\n settings = config.config_substitution(settings, config.vault_secrets(settings))\n except: # pylint: disable=W0702\n log.exception(\"Failed to parse settings\")\n return None\n #\n return settings\n", "id": "10409896", "language": "Python", "matching_score": 1.2456657886505127, "max_stars_count": 0, "path": "pylon/core/tools/seed.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n\n# Copyright 2022 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" RPC tools \"\"\"\n\nimport functools\nimport traceback\n\n\ndef wrap_exceptions(target_exception):\n \"\"\" Wrap exceptions into generic exception (for RPC transport) \"\"\"\n #\n def _decorator(func):\n _target_exception = target_exception\n #\n @functools.wraps(func)\n def _decorated(*_args, **_kvargs):\n try:\n return func(*_args, **_kvargs)\n except BaseException as exception_data:\n if isinstance(exception_data, _target_exception):\n raise exception_data\n raise _target_exception(traceback.format_exc())\n #\n return _decorated\n #\n return _decorator\n", "id": "8600649", "language": "Python", "matching_score": 0.9055513739585876, "max_stars_count": 0, "path": "tools/rpc_tools.py" }, { "content": "#!/usr/bin/python\n# coding=utf-8\n# pylint: disable=I0011\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\n Web tools\n\"\"\"\n\n# from pylon.core.tools import log\n\nroutes_registry = dict() # module -> [routes] # pylint: disable=C0103\n\n\ndef route(rule, **options):\n \"\"\" (Pre-)Register route \"\"\"\n #\n def _decorator(obj):\n module = \".\".join(obj.__module__.split(\".\")[:2])\n endpoint = options.pop(\"endpoint\", None)\n #\n if module not in routes_registry:\n routes_registry[module] = list()\n #\n route_item = (rule, endpoint, obj, options)\n routes_registry[module].append(route_item)\n #\n return obj\n #\n return _decorator\n", "id": "1201515", "language": "Python", "matching_score": 0.4284748136997223, "max_stars_count": 0, "path": "pylon/core/tools/web.py" }, { "content": "#!/usr/bin/python\n# coding=utf-8\n# pylint: disable=I0011\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\n Git tools\n\"\"\"\n\nimport io\nimport os\nimport shutil\nimport getpass\n\nfrom dulwich import refs, repo, porcelain, client # pylint: disable=E0401\nfrom dulwich.contrib.paramiko_vendor import ParamikoSSHVendor # pylint: disable=E0401\n\nimport paramiko # pylint: disable=E0401\nimport paramiko.transport # pylint: disable=E0401\nfrom paramiko import SSHException, Message # pylint: disable=E0401\n\nfrom pylon.core.tools import log\n\n\ndef apply_patches():\n \"\"\" Patch dulwich and paramiko \"\"\"\n # Set USERNAME if needed\n try:\n getpass.getuser()\n except: # pylint: disable=W0702\n os.environ[\"USERNAME\"] = \"git\"\n # Patch dulwich to work without valid UID/GID\n repo._get_default_identity = patched_repo_get_default_identity(repo._get_default_identity) # pylint: disable=W0212\n # Patch dulwich to use paramiko SSH client\n client.get_ssh_vendor = ParamikoSSHVendor\n # Patch paramiko to skip key verification\n paramiko.transport.Transport._verify_key = patched_paramiko_transport_verify_key # pylint: disable=W0212\n # Patch paramiko to support direct pkey usage\n paramiko.client.SSHClient._auth = patched_paramiko_client_SSHClient_auth(paramiko.client.SSHClient._auth) # pylint: disable=C0301,W0212\n\n\ndef patched_repo_get_default_identity(original_repo_get_default_identity):\n \"\"\" Allow to run without valid identity \"\"\"\n def patched_function():\n try:\n return original_repo_get_default_identity()\n except: # pylint: disable=W0702\n return (\"Git User\", \"git@localhost\")\n return patched_function\n\n\ndef patched_paramiko_transport_verify_key(self, host_key, sig): # pylint: disable=W0613\n \"\"\" Only get key info, no deep verification \"\"\"\n key = self._key_info[self.host_key_type](Message(host_key)) # pylint: disable=W0212\n if key is None:\n raise SSHException('Unknown host key type')\n # Patched: no more checks are done here\n self.host_key = key\n\n\ndef patched_paramiko_client_SSHClient_auth(original_auth): # pylint: disable=C0103\n \"\"\" Allow to pass prepared pkey in key_filename(s) \"\"\"\n def patched_function( # pylint: disable=R0913\n self, username, password, pkey, key_filenames, allow_agent, look_for_keys, # pylint: disable=W0613\n gss_auth, gss_kex, gss_deleg_creds, gss_host, passphrase,\n ):\n if isinstance(key_filenames, list) and len(key_filenames) == 1 and \\\n isinstance(key_filenames[0], paramiko.RSAKey):\n target_pkey = key_filenames[0]\n target_key_filenames = list()\n return original_auth(\n username, password, target_pkey, target_key_filenames, allow_agent, look_for_keys,\n gss_auth, gss_kex, gss_deleg_creds, gss_host, passphrase,\n )\n return original_auth(\n username, password, pkey, key_filenames, allow_agent, look_for_keys,\n gss_auth, gss_kex, gss_deleg_creds, gss_host, passphrase,\n )\n return patched_function\n\n\ndef clone( # pylint: disable=R0913,R0912,R0914\n source, target, branch=\"main\", depth=None, delete_git_dir=False,\n username=None, password=<PASSWORD>, key_filename=None, key_data=None,\n track_branch_upstream=True,\n):\n \"\"\" Clone repository \"\"\"\n # Prepare auth args\n auth_args = dict()\n if username is not None:\n auth_args[\"username\"] = username\n if password is not None:\n auth_args[\"password\"] = password\n if key_filename is not None:\n auth_args[\"key_filename\"] = key_filename\n if key_data is not None:\n key_obj = io.StringIO(key_data.replace(\"|\", \"\\n\"))\n pkey = paramiko.RSAKey.from_private_key(key_obj)\n auth_args[\"key_filename\"] = pkey\n # Clone repository\n log.info(\"Cloning repository %s into %s\", source, target)\n repository = porcelain.clone(\n source, target, checkout=False, depth=depth,\n errstream=log.DebugLogStream(),\n **auth_args\n )\n # Get current HEAD tree (default branch)\n try:\n head_tree = repository[b\"HEAD\"]\n except: # pylint: disable=W0702\n head_tree = None\n # Get target tree (requested branch)\n branch_b = branch.encode(\"utf-8\")\n try:\n target_tree = repository[b\"refs/remotes/origin/\" + branch_b]\n except: # pylint: disable=W0702\n target_tree = None\n # Checkout branch\n branch_to_track = None\n if target_tree is not None:\n log.info(\"Checking out branch %s\", branch)\n repository[b\"refs/heads/\" + branch_b] = repository[b\"refs/remotes/origin/\" + branch_b]\n repository.refs.set_symbolic_ref(b\"HEAD\", b\"refs/heads/\" + branch_b)\n repository.reset_index(repository[b\"HEAD\"].tree)\n #\n branch_to_track = branch\n elif head_tree is not None:\n try:\n default_branch_name = repository.refs.follow(b\"HEAD\")[0][1]\n if default_branch_name.startswith(refs.LOCAL_BRANCH_PREFIX):\n default_branch_name = default_branch_name[len(refs.LOCAL_BRANCH_PREFIX):]\n default_branch_name = default_branch_name.decode(\"utf-8\")\n #\n log.warning(\n \"Branch %s was not found. Checking out default branch %s\",\n branch, default_branch_name\n )\n #\n branch_to_track = default_branch_name\n except: # pylint: disable=W0702\n log.warning(\"Branch %s was not found. Trying to check out default branch\", branch)\n #\n try:\n repository.reset_index(repository[b\"HEAD\"].tree)\n except: # pylint: disable=W0702\n log.exception(\"Failed to checkout default branch\")\n else:\n log.error(\"Branch %s was not found and default branch is not set. Skipping checkout\")\n # Add remote tracking\n if track_branch_upstream and branch_to_track is not None:\n log.info(\"Setting '%s' to track upstream branch\", branch_to_track)\n #\n branch_to_track_b = branch_to_track.encode(\"utf-8\")\n #\n config = repository.get_config()\n config.set(\n (b\"branch\", branch_to_track_b),\n b\"remote\", b\"origin\",\n )\n config.set(\n (b\"branch\", branch_to_track_b),\n b\"merge\", b\"refs/heads/\" + branch_to_track_b,\n )\n config.write_to_path()\n # Delete .git if requested\n if delete_git_dir:\n log.info(\"Deleting .git directory\")\n shutil.rmtree(os.path.join(target, \".git\"))\n # Return repo object\n return repository\n", "id": "7336666", "language": "Python", "matching_score": 6.685742378234863, "max_stars_count": 0, "path": "pylon/core/tools/git.py" }, { "content": "import os\nimport io\nimport dulwich\nfrom dulwich import porcelain\nfrom dulwich.contrib.paramiko_vendor import ParamikoSSHVendor\nimport paramiko.transport\nfrom paramiko.ssh_exception import SSHException\nfrom paramiko.message import Message\nimport zipfile\nfrom traceback import format_exc\nimport requests\n\n\ndef _dulwich_repo_get_default_identity():\n try:\n return dulwich.repo.__original__get_default_identity()\n except:\n return (\"Carrier User\", \"dusty@localhost\")\n\n\ndef _paramiko_transport_verify_key(self, host_key, sig):\n key = self._key_info[self.host_key_type](Message(host_key))\n if key is None:\n raise SSHException('Unknown host key type')\n self.host_key = key\n\n\ndef _paramiko_client_SSHClient_auth(original_auth, forced_pkey):\n def __paramiko_client_SSHClient_auth(\n self, username, password, pkey, key_filenames, allow_agent, look_for_keys,\n gss_auth, gss_kex, gss_deleg_creds, gss_host, passphrase):\n return original_auth(\n self, username, password, forced_pkey, key_filenames, allow_agent, look_for_keys,\n gss_auth, gss_kex, gss_deleg_creds, gss_host, passphrase)\n return __paramiko_client_SSHClient_auth\n\n\ndef clone_repo(git_settings):\n print(\"Cloning git repo ...\")\n # Patch dulwich to work without valid UID/GID\n dulwich.repo.__original__get_default_identity = dulwich.repo._get_default_identity\n dulwich.repo._get_default_identity = _dulwich_repo_get_default_identity\n # Patch dulwich to use paramiko SSH client\n dulwich.client.get_ssh_vendor = ParamikoSSHVendor\n # Patch paramiko to skip key verification\n paramiko.transport.Transport._verify_key = _paramiko_transport_verify_key\n # Set USERNAME if needed\n try:\n getpass.getuser()\n except: # pylint: disable=W0702\n os.environ[\"USERNAME\"] = \"git\"\n\n os.mkdir(\"/tmp/git_dir\")\n # Get options\n source = git_settings.get(\"repo\")\n target = \"/tmp/git_dir\"\n branch = git_settings.get(\"repo_branch\")\n if not branch:\n branch = \"master\"\n depth = None\n # Prepare auth\n auth_args = dict()\n if git_settings.get(\"repo_user\"):\n auth_args[\"username\"] = git_settings.get(\"repo_user\")\n if git_settings.get(\"repo_pass\"):\n auth_args[\"password\"] = git_settings.get(\"repo_pass\")\n if git_settings.get(\"repo_key\"):\n key = git_settings.get(\"repo_key\").replace(\"|\", \"\\n\")\n key_obj = io.StringIO(key)\n pkey = paramiko.RSAKey.from_private_key(key_obj)\n # Patch paramiko to use our key\n paramiko.client.SSHClient._auth = _paramiko_client_SSHClient_auth(paramiko.client.SSHClient._auth, pkey)\n # Clone repository\n repository = porcelain.clone(\n source, target, checkout=False, depth=depth, **auth_args\n )\n try:\n branch = branch.encode(\"utf-8\")\n repository[b\"refs/heads/\" + branch] = repository[b\"refs/remotes/origin/\" + branch]\n repository.refs.set_symbolic_ref(b\"HEAD\", b\"refs/heads/\" + branch)\n repository.reset_index(repository[b\"HEAD\"].tree)\n except KeyError:\n print(f\"The {branch} branch does not exist\")\n exit(1)\n\n\ndef zipdir(ziph):\n # ziph is zipfile handle\n for root, dirs, files in os.walk(\"/tmp/git_dir\"):\n for f in files:\n ziph.write(os.path.join(root, f), os.path.join(root.replace(\"/tmp/git_dir\", ''), f))\n\n\ndef post_artifact(galloper_url, token, project_id, artifact):\n try:\n ziph = zipfile.ZipFile(f\"/tmp/{artifact}\", 'w', zipfile.ZIP_DEFLATED)\n zipdir(ziph)\n ziph.close()\n files = {'file': open(f\"/tmp/{artifact}\", 'rb')}\n headers = {'Authorization': f'bearer {token}'} if token else {}\n if project_id:\n upload_url = f'{galloper_url}/api/v1/artifact/{project_id}/tests'\n else:\n upload_url = f'{galloper_url}/artifacts/tests/upload'\n r = requests.post(upload_url, allow_redirects=True, files=files, headers=headers)\n except Exception:\n print(format_exc())\n\n\ndef delete_artifact(galloper_url, token, project_id, artifact):\n url = f'{galloper_url}/api/v1/artifact/{project_id}/tests'\n headers = {'Authorization': f'bearer {token}'} if token else {}\n requests.delete(f'{url}?fname[]={artifact}', headers=headers)\n", "id": "10836534", "language": "Python", "matching_score": 1.8912978172302246, "max_stars_count": 0, "path": "control_tower/git_clone.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" SourceProvider \"\"\"\n\nimport tempfile\n\nfrom pylon.core.tools import git\n\nfrom . import SourceProviderModel\n\n\nclass Provider(SourceProviderModel): # pylint: disable=R0902\n \"\"\" Provider model \"\"\"\n\n def __init__(self, context, settings):\n self.context = context\n self.settings = settings\n #\n self.branch = self.settings.get(\"branch\", \"main\")\n self.depth = self.settings.get(\"depth\", 1)\n self.delete_git_dir = self.settings.get(\"delete_git_dir\", True)\n self.username = self.settings.get(\"username\", None)\n self.password = self.settings.get(\"password\", None)\n self.key_filename = self.settings.get(\"key_filename\", None)\n self.key_data = self.settings.get(\"key_data\", None)\n\n def init(self):\n \"\"\" Initialize provider \"\"\"\n\n def deinit(self):\n \"\"\" De-initialize provider \"\"\"\n\n def get_source(self, target):\n \"\"\" Get plugin source \"\"\"\n target_path = tempfile.mkdtemp()\n self.context.module_manager.temporary_objects.append(target_path)\n #\n git.clone(\n target.get(\"source\"),\n target_path,\n target.get(\"branch\", self.branch),\n target.get(\"depth\", self.depth),\n target.get(\"delete_git_dir\", self.delete_git_dir),\n target.get(\"username\", self.username),\n target.get(\"password\", <PASSWORD>),\n target.get(\"key_filename\", self.key_filename),\n target.get(\"key_data\", self.key_data),\n )\n #\n return target_path\n\n def get_multiple_source(self, targets):\n \"\"\" Get plugins source \"\"\"\n result = list()\n #\n for target in targets:\n result.append(self.get_source(target))\n #\n return result\n", "id": "2742769", "language": "Python", "matching_score": 4.287505626678467, "max_stars_count": 0, "path": "pylon/core/providers/source/git.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" SourceProvider \"\"\"\n\nimport os\n\nfrom . import SourceProviderModel\n\n\nclass Provider(SourceProviderModel):\n \"\"\" Provider model \"\"\"\n\n def __init__(self, context, settings):\n self.context = context\n self.settings = settings\n #\n self.path = self.settings[\"path\"]\n\n def init(self):\n \"\"\" Initialize provider \"\"\"\n os.makedirs(self.path, exist_ok=True)\n\n def deinit(self):\n \"\"\" De-initialize provider \"\"\"\n\n def get_source(self, target):\n \"\"\" Get plugin source \"\"\"\n target_path = os.path.join(self.path, target[\"name\"])\n #\n if not os.path.exists(target_path):\n raise RuntimeError(f\"Source not found: {target}\")\n #\n return target_path\n\n def get_multiple_source(self, targets):\n \"\"\" Get plugins source \"\"\"\n result = list()\n #\n for target in targets:\n result.append(self.get_source(target))\n #\n return result\n", "id": "4616374", "language": "Python", "matching_score": 1.3457248210906982, "max_stars_count": 0, "path": "pylon/core/providers/source/folder.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" RequirementsProvider \"\"\"\n\nimport os\nimport json\nimport shutil\n\n# from pylon.core.tools import log\n\nfrom . import RequirementsProviderModel\n\n\nclass Provider(RequirementsProviderModel):\n \"\"\" Provider \"\"\"\n\n def __init__(self, context, settings):\n self.context = context\n self.settings = settings\n #\n self.path = self.settings[\"path\"]\n\n def init(self):\n \"\"\" Initialize provider \"\"\"\n os.makedirs(self.path, exist_ok=True)\n\n def deinit(self):\n \"\"\" De-initialize provider \"\"\"\n\n def requirements_exist(self, plugin_name, cache_hash):\n \"\"\" Check if plugin requirements exist \"\"\"\n requirements_path = os.path.join(self.path, plugin_name)\n requirements_meta_path = os.path.join(self.path, f\"{plugin_name}.json\")\n #\n if os.path.exists(requirements_meta_path):\n with open(requirements_meta_path, \"rb\") as file:\n requirements_meta = json.load(file)\n else:\n requirements_meta = {\"cache_hash\": \"\"}\n #\n return os.path.exists(requirements_path) and requirements_meta[\"cache_hash\"] == cache_hash\n\n def get_requirements(self, plugin_name, cache_hash, temporary_objects=None):\n \"\"\" Get plugin requirements (a.k.a user site data) \"\"\"\n if not self.requirements_exist(plugin_name, cache_hash):\n return None\n #\n return os.path.join(self.path, plugin_name)\n\n def add_requirements(self, plugin_name, cache_hash, path):\n \"\"\" Add plugin requirements (a.k.a user site data) \"\"\"\n if os.path.exists(os.path.join(self.path, plugin_name)):\n self.delete_requirements(plugin_name)\n #\n shutil.copytree(path, os.path.join(self.path, plugin_name))\n with open(os.path.join(self.path, f\"{plugin_name}.json\"), \"wb\") as file:\n file.write(json.dumps({\"cache_hash\": cache_hash}).encode())\n\n def delete_requirements(self, plugin_name):\n \"\"\" Delete plugin requirements (a.k.a user site data) \"\"\"\n if os.path.exists(os.path.join(self.path, plugin_name)):\n shutil.rmtree(os.path.join(self.path, plugin_name))\n", "id": "10569525", "language": "Python", "matching_score": 4.223606109619141, "max_stars_count": 0, "path": "pylon/core/providers/requirements/folder.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" RequirementsProvider \"\"\"\n\n\nclass RequirementsProviderModel:\n \"\"\" Provider model \"\"\"\n\n # def __init__(self, context, settings):\n\n def init(self):\n \"\"\" Initialize provider \"\"\"\n raise NotImplementedError()\n\n def deinit(self):\n \"\"\" De-initialize provider \"\"\"\n raise NotImplementedError()\n\n def requirements_exist(self, plugin_name, cache_hash):\n \"\"\" Check if plugin requirements exist \"\"\"\n raise NotImplementedError()\n\n def get_requirements(self, plugin_name, cache_hash, temporary_objects=None):\n \"\"\" Get plugin requirements (a.k.a user site data) \"\"\"\n raise NotImplementedError()\n\n def add_requirements(self, plugin_name, cache_hash, path):\n \"\"\" Add plugin requirements (a.k.a user site data) \"\"\"\n raise NotImplementedError()\n\n def delete_requirements(self, plugin_name):\n \"\"\" Delete plugin requirements (a.k.a user site data) \"\"\"\n raise NotImplementedError()\n", "id": "6988392", "language": "Python", "matching_score": 4.361229419708252, "max_stars_count": 0, "path": "pylon/core/providers/requirements/__init__.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" ConfigProvider \"\"\"\n\n\nclass ConfigProviderModel:\n \"\"\" Provider model \"\"\"\n\n # def __init__(self, context, settings):\n\n def init(self):\n \"\"\" Initialize provider \"\"\"\n raise NotImplementedError()\n\n def deinit(self):\n \"\"\" De-initialize provider \"\"\"\n raise NotImplementedError()\n\n def config_data_exists(self, plugin_name):\n \"\"\" Check if custom plugin config exists \"\"\"\n raise NotImplementedError()\n\n def get_config_data(self, plugin_name):\n \"\"\" Get custom plugin config \"\"\"\n raise NotImplementedError()\n\n def add_config_data(self, plugin_name, config):\n \"\"\" Add custom plugin config \"\"\"\n raise NotImplementedError()\n\n def delete_config_data(self, plugin_name):\n \"\"\" Delete custom plugin config \"\"\"\n raise NotImplementedError()\n", "id": "4011777", "language": "Python", "matching_score": 4.111077308654785, "max_stars_count": 0, "path": "pylon/core/providers/config/__init__.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" ConfigProvider \"\"\"\n\nimport os\n\n# from pylon.core.tools import log\n\nfrom . import ConfigProviderModel\n\n\nclass Provider(ConfigProviderModel):\n \"\"\" Provider \"\"\"\n\n def __init__(self, context, settings):\n self.context = context\n self.settings = settings\n #\n self.path = self.settings[\"path\"]\n\n def init(self):\n \"\"\" Initialize provider \"\"\"\n os.makedirs(self.path, exist_ok=True)\n\n def deinit(self):\n \"\"\" De-initialize provider \"\"\"\n\n def config_data_exists(self, plugin_name):\n \"\"\" Check if custom plugin config exists \"\"\"\n return os.path.exists(os.path.join(self.path, f\"{plugin_name}.yml\"))\n\n def get_config_data(self, plugin_name):\n \"\"\" Get custom plugin config \"\"\"\n if not self.config_data_exists(plugin_name):\n return b\"\"\n with open(os.path.join(self.path, f\"{plugin_name}.yml\"), \"rb\") as file:\n data = file.read()\n return data\n\n def add_config_data(self, plugin_name, config):\n \"\"\" Add custom plugin config \"\"\"\n with open(os.path.join(self.path, f\"{plugin_name}.yml\"), \"wb\") as file:\n file.write(config)\n\n def delete_config_data(self, plugin_name):\n \"\"\" Delete custom plugin config \"\"\"\n if self.config_data_exists(plugin_name):\n os.remove(os.path.join(self.path, f\"{plugin_name}.yml\"))\n", "id": "3041415", "language": "Python", "matching_score": 2.013087034225464, "max_stars_count": 0, "path": "pylon/core/providers/config/folder.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" PluginsProvider \"\"\"\n\n\nclass PluginsProviderModel:\n \"\"\" Provider model \"\"\"\n\n # def __init__(self, context, settings):\n\n def init(self):\n \"\"\" Initialize provider \"\"\"\n raise NotImplementedError()\n\n def deinit(self):\n \"\"\" De-initialize provider \"\"\"\n raise NotImplementedError()\n\n def plugin_exists(self, name):\n \"\"\" Check if plugin exists \"\"\"\n raise NotImplementedError()\n\n def add_plugin(self, name, path):\n \"\"\" Add new plugin from path \"\"\"\n raise NotImplementedError()\n\n def delete_plugin(self, name):\n \"\"\" Delete existing plugin \"\"\"\n raise NotImplementedError()\n\n def list_plugins(self, exclude=None):\n \"\"\" Get existing plugin names \"\"\"\n raise NotImplementedError()\n\n def get_plugin_loader(self, name):\n \"\"\" Get loader for plugin \"\"\"\n raise NotImplementedError()\n\n def get_plugin_metadata(self, name):\n \"\"\" Get metadata for plugin \"\"\"\n raise NotImplementedError()\n", "id": "6073470", "language": "Python", "matching_score": 3.742629289627075, "max_stars_count": 0, "path": "pylon/core/providers/plugins/__init__.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" PluginsProvider \"\"\"\n\nimport os\nimport json\nimport shutil\n\n# from pylon.core.tools import log\nfrom pylon.core.tools.module import LocalModuleLoader\n\nfrom . import PluginsProviderModel\n\n\nclass Provider(PluginsProviderModel):\n \"\"\" Provider \"\"\"\n\n def __init__(self, context, settings):\n self.context = context\n self.settings = settings\n #\n self.path = self.settings[\"path\"]\n\n def init(self):\n \"\"\" Initialize provider \"\"\"\n os.makedirs(self.path, exist_ok=True)\n\n def deinit(self):\n \"\"\" De-initialize provider \"\"\"\n\n def plugin_exists(self, name):\n \"\"\" Check if plugin exists \"\"\"\n return os.path.exists(os.path.join(self.path, name))\n\n def add_plugin(self, name, path):\n \"\"\" Add new plugin from path \"\"\"\n if self.plugin_exists(name):\n self.delete_plugin(name)\n shutil.copytree(path, os.path.join(self.path, name))\n\n def delete_plugin(self, name):\n \"\"\" Delete existing plugin \"\"\"\n shutil.rmtree(os.path.join(self.path, name))\n\n def list_plugins(self, exclude=None):\n \"\"\" Get existing plugin names \"\"\"\n plugins = [\n item for item in os.listdir(self.path) if os.path.isdir(os.path.join(self.path, item))\n ]\n #\n if exclude is None:\n exclude = list()\n #\n for item in exclude:\n if item in plugins:\n plugins.remove(item)\n #\n plugins.sort()\n #\n return plugins\n\n def get_plugin_loader(self, name):\n \"\"\" Get loader for plugin \"\"\"\n if not self.plugin_exists(name):\n return None\n return LocalModuleLoader(f\"plugins.{name}\", os.path.join(self.path, name))\n\n def get_plugin_metadata(self, name):\n \"\"\" Get metadata for plugin \"\"\"\n if not self.plugin_exists(name):\n return None\n try:\n with open(os.path.join(self.path, name, \"metadata.json\"), \"rb\") as file:\n metadata = json.load(file)\n return metadata\n except: # pylint: disable=W0702\n return dict()\n", "id": "2013410", "language": "Python", "matching_score": 1.8532603979110718, "max_stars_count": 0, "path": "pylon/core/providers/plugins/folder.py" }, { "content": "#!/usr/bin/python\n# coding=utf-8\n\n# Copyright 2020 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" Modules \"\"\"\n\nimport io\nimport os\nimport sys\nimport json\nimport types\nimport shutil\nimport hashlib\nimport zipfile\nimport tempfile\nimport functools\nimport posixpath\nimport importlib\nimport subprocess\nimport pkg_resources\n\nimport yaml # pylint: disable=E0401\nimport flask # pylint: disable=E0401\nimport jinja2 # pylint: disable=E0401\n\nfrom pylon.core.tools import log\nfrom pylon.core.tools import web\nfrom pylon.core.tools import process\nfrom pylon.core.tools import dependency\nfrom pylon.core.tools.config import config_substitution, vault_secrets\n\n\nclass ModuleModel:\n \"\"\" Module model \"\"\"\n\n # def __init__(self, context, descriptor):\n\n def init(self):\n \"\"\" Initialize module \"\"\"\n raise NotImplementedError()\n\n def deinit(self):\n \"\"\" De-initialize module \"\"\"\n raise NotImplementedError()\n\n\nclass ModuleDescriptor:\n \"\"\" Module descriptor \"\"\"\n\n def __init__(self, context, name, loader, metadata, requirements):\n self.context = context\n self.name = name\n self.loader = loader\n self.metadata = metadata\n self.requirements = requirements\n #\n self.path = self.loader.get_local_path()\n self.config = None\n #\n self.requirements_base = None\n self.requirements_path = None\n #\n self.module = None\n\n def load_config(self):\n \"\"\" Load custom (or default) configuration \"\"\"\n if self.context.module_manager.providers[\"config\"].config_data_exists(self.name):\n config_data = self.context.module_manager.providers[\"config\"].get_config_data(self.name)\n elif self.loader.has_file(\"config.yml\"):\n config_data = self.loader.get_data(\"config.yml\")\n else:\n config_data = b\"\"\n #\n try:\n yaml_data = yaml.load(os.path.expandvars(config_data), Loader=yaml.SafeLoader)\n except: # pylint: disable=W0702\n log.exception(\"Invaid YAML config data for: %s\", self.name)\n yaml_data = None\n #\n if yaml_data is None:\n yaml_data = dict()\n #\n try:\n self.config = config_substitution(yaml_data, vault_secrets(self.context.settings))\n except: # pylint: disable=W0702\n log.exception(\"Could not add config secrets and env data for: %s\", self.name)\n self.config = yaml_data\n\n def save_config(self):\n \"\"\" Save custom config \"\"\"\n config_data = yaml.dump(self.config).encode()\n self.context.module_manager.providers[\"config\"].add_config_data(self.name, config_data)\n\n def make_blueprint(self, url_prefix=None, static_url_prefix=None, use_template_prefix=True):\n \"\"\" Make configured Blueprint instance \"\"\"\n template_folder = None\n if self.loader.has_directory(\"templates\"):\n template_folder = \"templates\"\n #\n if url_prefix is None:\n url_prefix = f\"/{self.name}\"\n #\n static_folder = None\n if self.loader.has_directory(\"static\"):\n static_folder = \"static\"\n if static_url_prefix is None:\n static_url_prefix = \"static\"\n #\n result_blueprint = flask.Blueprint(\n self.name, f\"plugins.{self.name}\",\n root_path=self.path,\n url_prefix=url_prefix,\n template_folder=template_folder,\n static_folder=static_folder,\n static_url_path=static_url_prefix,\n )\n #\n if template_folder is not None:\n if use_template_prefix:\n result_blueprint.jinja_loader = jinja2.PrefixLoader({\n self.name: jinja2.loaders.PackageLoader(f\"plugins.{self.name}\", \"templates\"),\n }, delimiter=\":\")\n else:\n result_blueprint.jinja_loader = jinja2.loaders.PackageLoader(\n f\"plugins.{self.name}\", \"templates\"\n )\n #\n return result_blueprint\n\n def init_blueprint(\n self,\n url_prefix=None, static_url_prefix=None, use_template_prefix=True,\n register_in_app=True, module_routes=True,\n ):\n \"\"\" Make and register blueprint with pre-registered routes \"\"\"\n # Make Blueprint\n result_blueprint = self.make_blueprint(url_prefix, static_url_prefix, use_template_prefix)\n # Add routes\n routes = web.routes_registry.pop(f\"plugins.{self.name}\", list())\n for route in routes:\n rule, endpoint, obj, options = route\n if module_routes:\n obj = functools.partial(obj, self.module)\n obj.__name__ = obj.func.__name__\n result_blueprint.add_url_rule(rule, endpoint, obj, **options)\n # Register in app\n if register_in_app:\n self.context.app.register_blueprint(result_blueprint)\n #\n return result_blueprint\n\n def template_name(self, name, module=None):\n \"\"\" Make prefixed template name \"\"\"\n if module is None:\n module = self.name\n #\n return f\"{module}:{name}\"\n\n def render_template(self, name, **context):\n \"\"\" Render tempate from this module \"\"\"\n module = self.name\n return flask.render_template(f\"{module}:{name}\", **context)\n\n\nclass ModuleManager:\n \"\"\" Manages modules \"\"\"\n\n def __init__(self, context):\n self.context = context\n self.settings = self.context.settings.get(\"modules\", dict())\n self.providers = dict() # object_type -> provider_instance\n self.modules = dict() # module_name -> module_descriptor\n self.temporary_objects = list()\n\n def init_modules(self):\n \"\"\" Load and init modules \"\"\"\n if self.context.debug and os.environ.get(\"WERKZEUG_RUN_MAIN\") != \"true\":\n log.info(\n \"Running in development mode before reloader is started. Skipping module loading\"\n )\n return\n # Disable bytecode caching and register resource providers\n sys.dont_write_bytecode = True\n pkg_resources.register_loader_type(DataModuleLoader, DataModuleProvider)\n # Make plugins holder\n if \"plugins\" not in sys.modules:\n sys.modules[\"plugins\"] = types.ModuleType(\"plugins\")\n sys.modules[\"plugins\"].__path__ = []\n # Make providers\n self._init_providers()\n #\n # Preload\n #\n log.info(\"Preloading modules\")\n # Create loaders for preload modules\n preload_module_meta_map = self._make_preload_module_meta_map()\n # Resolve preload module load order\n preload_module_order = dependency.resolve_depencies(\n preload_module_meta_map, list(self.modules),\n )\n # Make preload module descriptors\n preload_module_descriptors = self._make_descriptors(\n preload_module_meta_map, preload_module_order,\n )\n # Install/get/activate requirements and initialize preload modules\n preloaded_items = self._activate_modules(preload_module_descriptors)\n #\n # Target\n #\n log.info(\"Initializing modules\")\n # Create loaders for target modules\n target_module_meta_map = self._make_target_module_meta_map()\n # Resolve target module load order\n target_module_order = dependency.resolve_depencies(\n target_module_meta_map, list(self.modules),\n )\n # Make target module descriptors\n target_module_descriptors = self._make_descriptors(\n target_module_meta_map, target_module_order,\n )\n # Install/get/activate requirements and initialize target modules\n self._activate_modules(target_module_descriptors, preloaded_items)\n\n def _make_preload_module_meta_map(self):\n module_meta_map = dict() # module_name -> (metadata, loader)\n #\n if \"preload\" not in self.settings:\n return module_meta_map\n #\n for module_name in self.settings[\"preload\"]:\n if not self.providers[\"plugins\"].plugin_exists(module_name):\n module_target = self.settings[\"preload\"][module_name].copy()\n #\n if \"provider\" not in module_target or \\\n \"type\" not in module_target[\"provider\"]:\n continue\n #\n provider_config = module_target.pop(\"provider\").copy()\n provider_type = provider_config.pop(\"type\")\n #\n try:\n provider = importlib.import_module(\n f\"pylon.core.providers.source.{provider_type}\"\n ).Provider(self.context, provider_config)\n provider.init()\n #\n module_source = provider.get_source(module_target)\n #\n provider.deinit()\n except: # pylint: disable=W0702\n log.exception(\"Could not preload module: %s\", module_name)\n continue\n #\n self.providers[\"plugins\"].add_plugin(module_name, module_source)\n #\n try:\n module_loader, module_metadata = self._make_loader_and_metadata(module_name)\n except: # pylint: disable=W0702\n log.exception(\"Could not make module loader: %s\", module_name)\n continue\n #\n module_meta_map[module_name] = (module_metadata, module_loader)\n #\n return module_meta_map\n\n def _make_target_module_meta_map(self):\n module_meta_map = dict() # module_name -> (metadata, loader)\n #\n for module_name in self.providers[\"plugins\"].list_plugins(exclude=list(self.modules)):\n try:\n module_loader, module_metadata = self._make_loader_and_metadata(module_name)\n except: # pylint: disable=W0702\n log.exception(\"Could not make module loader: %s\", module_name)\n continue\n #\n module_meta_map[module_name] = (module_metadata, module_loader)\n #\n return module_meta_map\n\n def _make_loader_and_metadata(self, module_name):\n module_loader = self.providers[\"plugins\"].get_plugin_loader(module_name)\n #\n if not module_loader.has_file(\"metadata.json\"):\n raise ValueError(f\"Module has no metadata: {module_name}\")\n #\n module_metadata = json.loads(module_loader.get_data(\"metadata.json\"))\n #\n if module_loader.has_directory(\"static\") or module_metadata.get(\"extract\", False):\n module_loader = module_loader.get_local_loader(self.temporary_objects)\n #\n return module_loader, module_metadata\n\n def _make_descriptors(self, module_meta_map, module_order):\n module_descriptors = list()\n #\n for module_name in module_order:\n module_metadata, module_loader = module_meta_map[module_name]\n # Get module requirements\n if module_loader.has_file(\"requirements.txt\"):\n module_requirements = module_loader.get_data(\"requirements.txt\").decode()\n else:\n module_requirements = \"\"\n # Make descriptor\n module_descriptor = ModuleDescriptor(\n self.context, module_name, module_loader, module_metadata, module_requirements\n )\n # Preload config\n module_descriptor.load_config()\n #\n module_descriptors.append(module_descriptor)\n #\n return module_descriptors\n\n def _activate_modules(self, module_descriptors, activated_items=None):\n if activated_items is None:\n cache_hash_chunks = list()\n module_site_paths = list()\n module_constraint_paths = list()\n else:\n cache_hash_chunks, module_site_paths, module_constraint_paths = activated_items\n #\n for module_descriptor in module_descriptors:\n all_required_dependencies_present = True\n #\n for required_dependency in module_descriptor.metadata.get(\"depends_on\", list()):\n if required_dependency not in self.modules:\n log.error(\n \"Required dependency is not present: %s (required by %s)\",\n required_dependency, module_descriptor.name,\n )\n all_required_dependencies_present = False\n #\n if not all_required_dependencies_present:\n log.error(\"Skipping module: %s\", module_descriptor.name)\n continue\n #\n requirements_hash = hashlib.sha256(module_descriptor.requirements.encode()).hexdigest()\n cache_hash_chunks.append(requirements_hash)\n cache_hash = hashlib.sha256(\"_\".join(cache_hash_chunks).encode()).hexdigest()\n #\n module_name = module_descriptor.name\n #\n requirements_txt_fd, requirements_txt = tempfile.mkstemp(\".txt\")\n self.temporary_objects.append(requirements_txt)\n os.close(requirements_txt_fd)\n #\n with open(requirements_txt, \"wb\") as file:\n file.write(module_descriptor.requirements.encode())\n #\n if self.providers[\"requirements\"].requirements_exist(module_name, cache_hash):\n requirements_base = \\\n self.providers[\"requirements\"].get_requirements(\n module_name, cache_hash, self.temporary_objects,\n )\n else:\n requirements_base = tempfile.mkdtemp()\n self.temporary_objects.append(requirements_base)\n #\n try:\n self.install_requirements(\n requirements_path=requirements_txt,\n target_site_base=requirements_base,\n additional_site_paths=module_site_paths,\n constraint_paths=module_constraint_paths,\n )\n except: # pylint: disable=W0702\n log.exception(\"Failed to install requirements for: %s\", module_descriptor.name)\n continue\n #\n self.providers[\"requirements\"].add_requirements(\n module_name, cache_hash, requirements_base,\n )\n #\n requirements_path = self.get_user_site_path(requirements_base)\n module_site_paths.append(requirements_path)\n #\n module_descriptor.requirements_base = requirements_base\n module_descriptor.requirements_path = requirements_path\n #\n requirements_mode = self.settings[\"requirements\"].get(\"mode\", \"relaxed\")\n if requirements_mode == \"constrained\":\n module_constraint_paths.append(requirements_txt)\n elif requirements_mode == \"strict\":\n frozen_module_requirements = self.freeze_site_requirements(\n target_site_base=requirements_base,\n requirements_path=requirements_txt,\n additional_site_paths=module_site_paths,\n )\n #\n frozen_requirements_fd, frozen_requirements = tempfile.mkstemp(\".txt\")\n self.temporary_objects.append(frozen_requirements)\n os.close(frozen_requirements_fd)\n #\n with open(frozen_requirements, \"wb\") as file:\n file.write(frozen_module_requirements.encode())\n #\n module_constraint_paths.append(frozen_requirements)\n #\n self.activate_path(module_descriptor.requirements_path)\n self.activate_loader(module_descriptor.loader)\n #\n try:\n module_pkg = importlib.import_module(f\"plugins.{module_descriptor.name}.module\")\n module_obj = module_pkg.Module(\n context=self.context,\n descriptor=module_descriptor,\n )\n module_descriptor.module = module_obj\n module_obj.init()\n except: # pylint: disable=W0702\n log.exception(\"Failed to enable module: %s\", module_descriptor.name)\n continue\n #\n self.modules[module_descriptor.name] = module_descriptor\n #\n return cache_hash_chunks, module_site_paths, module_constraint_paths\n\n def deinit_modules(self):\n \"\"\" De-init and unload modules \"\"\"\n if self.context.debug and os.environ.get(\"WERKZEUG_RUN_MAIN\") != \"true\":\n log.info(\n \"Running in development mode before reloader is started. Skipping module unloading\"\n )\n return\n #\n for module_name in reversed(list(self.modules)):\n try:\n self.modules[module_name].module.deinit()\n except: # pylint: disable=W0702\n pass\n #\n self._deinit_providers()\n #\n for obj in self.temporary_objects:\n try:\n if os.path.isdir(obj):\n shutil.rmtree(obj)\n else:\n os.remove(obj)\n except: # pylint: disable=W0702\n pass\n\n def _init_providers(self):\n for key in [\"plugins\", \"requirements\", \"config\"]:\n log.info(\"Initializing %s provider\", key)\n #\n if key not in self.settings or \\\n \"provider\" not in self.settings[key] or \\\n \"type\" not in self.settings[key][\"provider\"]:\n raise RuntimeError(f\"No {key} provider set in config\")\n #\n provider_config = self.settings[key][\"provider\"].copy()\n provider_type = provider_config.pop(\"type\")\n #\n provider = importlib.import_module(\n f\"pylon.core.providers.{key}.{provider_type}\"\n ).Provider(self.context, provider_config)\n provider.init()\n #\n self.providers[key] = provider\n\n def _deinit_providers(self):\n for key, provider in self.providers.items():\n log.info(\"Deinitializing %s provider\", key)\n try:\n provider.deinit()\n except: # pylint: disable=W0702\n pass\n\n @staticmethod\n def activate_loader(loader):\n \"\"\" Activate loader \"\"\"\n sys.meta_path.insert(0, loader)\n importlib.invalidate_caches()\n\n @staticmethod\n def activate_path(path):\n \"\"\" Activate path \"\"\"\n sys.path.insert(0, path)\n importlib.invalidate_caches()\n\n @staticmethod\n def get_user_site_path(base):\n \"\"\" Get site path for specific site base \"\"\"\n env = os.environ.copy()\n env[\"PYTHONUSERBASE\"] = base\n #\n return subprocess.check_output(\n [sys.executable, \"-m\", \"site\", \"--user-site\"],\n env=env,\n ).decode().strip()\n\n @staticmethod\n def install_requirements(\n requirements_path, target_site_base, additional_site_paths=None, constraint_paths=None,\n ):\n \"\"\" Install requirements into target site \"\"\"\n if constraint_paths is None:\n constraint_paths = list()\n #\n env = os.environ.copy()\n env[\"PYTHONUSERBASE\"] = target_site_base\n #\n if additional_site_paths is not None:\n env[\"PYTHONPATH\"] = os.pathsep.join(additional_site_paths)\n #\n c_args = []\n for const in constraint_paths:\n c_args.append(\"-c\")\n c_args.append(const)\n #\n return process.run_command(\n [\n sys.executable,\n \"-m\", \"pip\", \"install\",\n \"--user\", \"--no-warn-script-location\",\n ] + c_args + [\n \"-r\", requirements_path,\n ],\n env=env,\n )\n\n @staticmethod\n def freeze_site_requirements(\n target_site_base, requirements_path=None, additional_site_paths=None\n ):\n \"\"\" Get installed requirements (a.k.a pip freeze) \"\"\"\n env = os.environ.copy()\n env[\"PYTHONUSERBASE\"] = target_site_base\n #\n if additional_site_paths is not None:\n env[\"PYTHONPATH\"] = os.pathsep.join(additional_site_paths)\n #\n opt_args = []\n if requirements_path is not None:\n opt_args.append(\"-r\")\n opt_args.append(requirements_path)\n #\n return subprocess.check_output(\n [sys.executable, \"-m\", \"pip\", \"freeze\", \"--user\"] + opt_args,\n env=env,\n ).decode()\n\n\nclass LocalModuleLoader(importlib.machinery.PathFinder):\n \"\"\" Allows to load modules from specific location \"\"\"\n\n def __init__(self, module_name, module_path):\n self.module_name = module_name\n self.module_name_components = self.module_name.split(\".\")\n self.module_path = module_path\n self.module_abspath = os.path.abspath(self.module_path)\n\n def _fullname_to_filename(self, fullname):\n base = fullname.replace(\".\", os.sep)\n # Try module directory\n filename = os.path.join(self.module_abspath, base, \"__init__.py\")\n if os.path.isfile(filename):\n return filename\n # Try module file\n filename = os.path.join(self.module_abspath, f\"{base}.py\")\n if os.path.isfile(filename):\n return filename\n # Not found\n return None\n\n def find_spec(self, fullname, path=None, target=None):\n \"\"\" Find spec for new module \"\"\"\n name_components = fullname.split(\".\")\n if name_components[:len(self.module_name_components)] != self.module_name_components:\n return None\n #\n filename = self._fullname_to_filename(\n \".\".join(name_components[len(self.module_name_components):])\n )\n if filename is None:\n return None\n #\n return importlib.util.spec_from_file_location(fullname, filename)\n\n def get_data(self, path):\n \"\"\" Read data resource \"\"\"\n try:\n with open(os.path.join(self.module_abspath, path), \"rb\") as file:\n data = file.read()\n return data\n except BaseException as exc:\n raise FileNotFoundError(f\"Resource not found: {path}\") from exc\n\n def has_file(self, path):\n \"\"\" Check if file is present in module \"\"\"\n return os.path.isfile(os.path.join(self.module_abspath, path))\n\n def has_directory(self, path):\n \"\"\" Check if directory is present in module \"\"\"\n return os.path.isdir(os.path.join(self.module_abspath, path))\n\n def get_local_path(self):\n \"\"\" Get path to module data \"\"\"\n return self.module_abspath\n\n def get_local_loader(self, temporary_objects=None):\n \"\"\" Get LocalModuleLoader from this module data \"\"\"\n return self\n\n\nclass DataModuleLoader(importlib.abc.MetaPathFinder):\n \"\"\" Allows to load modules from ZIP in-memory data \"\"\"\n\n def __init__(self, module_name, module_data):\n self.module_name = module_name\n self.module_name_components = self.module_name.split(\".\")\n self.storage = zipfile.ZipFile(io.BytesIO(module_data))\n self.storage_files = [item.filename for item in self.storage.filelist]\n\n def _fullname_to_filename(self, fullname):\n base = fullname.replace(\".\", posixpath.sep)\n # Try module directory\n filename = posixpath.join(base, \"__init__.py\")\n if filename in self.storage_files:\n return filename, True\n # Try module file\n filename = f\"{base}.py\"\n if filename in self.storage_files:\n return filename, False\n # Not found\n return None, None\n\n def find_spec(self, fullname, path=None, target=None): # pylint: disable=W0613\n \"\"\" Find spec for new module \"\"\"\n name_components = fullname.split(\".\")\n if name_components[:len(self.module_name_components)] != self.module_name_components:\n return None\n #\n filename, is_package = self._fullname_to_filename(\n \".\".join(name_components[len(self.module_name_components):])\n )\n if filename is None:\n return None\n #\n return importlib.machinery.ModuleSpec(\n fullname, self, origin=filename, is_package=is_package\n )\n\n def create_module(self, spec): # pylint: disable=W0613,R0201\n \"\"\" Create new module \"\"\"\n return None\n\n def exec_module(self, module):\n \"\"\" Execute new module \"\"\"\n module.__file__ = module.__spec__.origin\n module.__cached__ = None\n #\n with self.storage.open(module.__file__, \"r\") as file:\n code = compile(\n source=file.read(),\n filename=f\"{self.module_name}:{module.__file__}\",\n mode=\"exec\",\n dont_inherit=True,\n )\n exec(code, module.__dict__) # pylint: disable=W0122\n\n def get_data(self, path):\n \"\"\" Read data resource \"\"\"\n if os.sep != posixpath.sep:\n path = path.replace(os.sep, posixpath.sep)\n #\n try:\n with self.storage.open(path, \"r\") as file:\n data = file.read()\n return data\n except BaseException as exc:\n raise FileNotFoundError(f\"Resource not found: {path}\") from exc\n\n def has_file(self, path):\n \"\"\" Check if file is present in module \"\"\"\n if os.sep != posixpath.sep:\n path = path.replace(os.sep, posixpath.sep)\n #\n return path in self.storage_files\n\n def has_directory(self, path):\n \"\"\" Check if directory is present in module \"\"\"\n if os.sep != posixpath.sep:\n path = path.replace(os.sep, posixpath.sep)\n #\n if not path.endswith(posixpath.sep):\n path = f\"{path}{posixpath.sep}\"\n #\n return path in self.storage_files\n\n def get_resource_reader(self, fullname):\n \"\"\" Get ResourceReader \"\"\"\n name_components = fullname.split(\".\")\n return DataModuleResourceReader(\n self, posixpath.sep.join(name_components[len(self.module_name_components):])\n )\n\n def get_local_path(self):\n \"\"\" Get path to module data \"\"\"\n return None\n\n def get_local_loader(self, temporary_objects=None):\n \"\"\" Get LocalModuleLoader from this module data \"\"\"\n local_path = tempfile.mkdtemp()\n if temporary_objects is not None:\n temporary_objects.append(local_path)\n self.storage.extractall(local_path)\n return LocalModuleLoader(self.module_name, local_path)\n\n\nclass DataModuleProvider(pkg_resources.NullProvider): # pylint: disable=W0223\n \"\"\" Allows to load resources from ZIP in-memory data \"\"\"\n\n def _has(self, path):\n if os.sep != posixpath.sep:\n path = path.replace(os.sep, posixpath.sep)\n #\n return \\\n not path or path in self.loader.storage_files or f\"{path}/\" in self.loader.storage_files\n\n def _isdir(self, path):\n if os.sep != posixpath.sep:\n path = path.replace(os.sep, posixpath.sep)\n #\n if path in self.loader.storage_files:\n return path.endswith(posixpath.sep)\n if not path or f\"{path}/\" in self.loader.storage_files:\n return True\n #\n return False\n\n def _listdir(self, path):\n if os.sep != posixpath.sep:\n path = path.replace(os.sep, posixpath.sep)\n #\n if not self._has(path):\n raise FileNotFoundError(f\"Path not found: {path}\")\n if not self._isdir(path):\n raise NotADirectoryError(f\"Not a directory: {path}\")\n #\n components = len(path.split(posixpath.sep)) if path else 0\n #\n files = [\n item.split(posixpath.sep)[-1] for item in self.loader.storage_files\n if item.split(posixpath.sep)[-1] and\n len(item.split(posixpath.sep)) == components + 1\n ]\n dirs = [\n item.split(posixpath.sep)[-2] for item in self.loader.storage_files\n if not item.split(posixpath.sep)[-1] and\n len(item.split(posixpath.sep)) == components + 2\n ]\n #\n return files + dirs\n\n\nclass DataModuleResourceReader(importlib.abc.ResourceReader):\n \"\"\" Allows to read resources from ZIP in-memory data \"\"\"\n\n def __init__(self, loader, path):\n self.loader = loader\n self.path = path\n\n def open_resource(self, resource):\n \"\"\" Implementation of open_resource \"\"\"\n if os.sep != posixpath.sep:\n resource = resource.replace(os.sep, posixpath.sep)\n #\n try:\n return self.loader.storage.open(resource, \"r\")\n except BaseException as exc:\n raise FileNotFoundError(f\"Resource not found: {resource}\") from exc\n\n def resource_path(self, resource):\n \"\"\" Implementation of resource_path \"\"\"\n if os.sep != posixpath.sep:\n resource = resource.replace(os.sep, posixpath.sep)\n #\n raise FileNotFoundError(f\"Path to resource not found: {resource}\")\n\n def is_resource(self, name):\n \"\"\" Implementation of is_resource \"\"\"\n if os.sep != posixpath.sep:\n name = name.replace(os.sep, posixpath.sep)\n #\n if name in self.loader.storage_files:\n return not name.endswith(posixpath.sep)\n #\n return False\n\n def contents(self):\n \"\"\" Implementation of contents \"\"\"\n path = self.path\n #\n if os.sep != posixpath.sep:\n path = path.replace(os.sep, posixpath.sep)\n #\n components = len(path.split(posixpath.sep)) if path else 0\n #\n files = [\n item.split(posixpath.sep)[-1] for item in self.loader.storage_files\n if item.split(posixpath.sep)[-1] and\n len(item.split(posixpath.sep)) == components + 1\n ]\n dirs = [\n item.split(posixpath.sep)[-2] for item in self.loader.storage_files\n if not item.split(posixpath.sep)[-1] and\n len(item.split(posixpath.sep)) == components + 2\n ]\n #\n return files + dirs\n", "id": "9221641", "language": "Python", "matching_score": 4.588643550872803, "max_stars_count": 0, "path": "pylon/core/tools/module.py" }, { "content": "#!/usr/bin/python3\n# coding=utf-8\n\n# Copyright 2021 getcarrier.io\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\" Module \"\"\"\nfrom pathlib import Path\n\nimport flask # pylint: disable=E0401\nimport jinja2 # pylint: disable=E0401\nfrom flask import request, render_template, redirect, url_for\nfrom pylon.core.tools import log # pylint: disable=E0611,E0401\nfrom pylon.core.tools import module # pylint: disable=E0611,E0401\n\nfrom .components.render_nikto import render_nikto_card\n\n\nclass Module(module.ModuleModel):\n \"\"\" Galloper module \"\"\"\n\n def __init__(self, settings, root_path, context):\n self.settings = settings\n self.root_path = root_path\n self.context = context\n\n def init(self):\n \"\"\" Init module \"\"\"\n log.info(\"Initializing module\")\n bp = flask.Blueprint(\n \"nikto\", \"plugins.security_scanner_nikto.plugin\",\n static_folder=str(Path(__file__).parents[0] / \"static\"),\n static_url_path='/nikto/static/'\n )\n bp.jinja_loader = jinja2.ChoiceLoader([\n jinja2.loaders.PackageLoader(\"plugins.security_scanner_nikto\", \"templates\"),\n ])\n # Register in app\n self.context.app.register_blueprint(bp)\n # Register template slot callback\n self.context.slot_manager.register_callback(\"security_scanners\", render_nikto_card)\n\n from .rpc_worker import get_scanner_parameters\n self.context.rpc_manager.register_function(get_scanner_parameters, name='nikto')\n\n def deinit(self): # pylint: disable=R0201\n \"\"\" De-init module \"\"\"\n log.info(\"De-initializing module\")\n", "id": "645319", "language": "Python", "matching_score": 1.792234182357788, "max_stars_count": 0, "path": "module.py" }, { "content": "from flask import request, render_template\n\n\ndef render_w3af_card(context, slot, payload):\n return render_template(\n f\"w3af_template.html\",\n config=payload\n )", "id": "7927148", "language": "Python", "matching_score": 2.019087076187134, "max_stars_count": 0, "path": "components/render_w3af.py" }, { "content": "from flask import request, render_template\n\n\ndef render_sslyze_card(context, slot, payload):\n return render_template(\n f\"sslyze_template.html\",\n config=payload\n )\n", "id": "9910462", "language": "Python", "matching_score": 2.019087076187134, "max_stars_count": 0, "path": "components/render_sslyze.py" }, { "content": "from flask import request, render_template\n\n\ndef render_nmap_card(context, slot, payload):\n return render_template(\n f\"nmap_template.html\",\n config=payload\n )\n", "id": "10869398", "language": "Python", "matching_score": 2.019087076187134, "max_stars_count": 0, "path": "components/render_nmap.py" }, { "content": "from flask import request, render_template\n\n\ndef render_masscan_card(context, slot, payload):\n return render_template(\n f\"masscan_template.html\",\n config=payload\n )\n", "id": "8077905", "language": "Python", "matching_score": 2.102285623550415, "max_stars_count": 0, "path": "components/render_masscan.py" }, { "content": "from flask import render_template\n\n\ndef render_nikto_card(context, slot, payload):\n return render_template(\n f\"nikto_template.html\",\n config=payload\n )\n", "id": "9288524", "language": "Python", "matching_score": 1.9667389392852783, "max_stars_count": 0, "path": "components/render_nikto.py" } ]
2.192327
EvanPatrick423
[ { "content": "from django.apps import AppConfig\n\n\nclass CssanimationsConfig(AppConfig):\n name = 'cssAnimations'\n", "id": "3333059", "language": "Python", "matching_score": 1.7319813966751099, "max_stars_count": 0, "path": "showcase/cssAnimations/apps.py" }, { "content": "from django.apps import AppConfig\n\n\nclass LandpageConfig(AppConfig):\n name = 'landPage'\n", "id": "5890894", "language": "Python", "matching_score": 1.9458211660385132, "max_stars_count": 0, "path": "showcase/landPage/apps.py" }, { "content": "from django.apps import AppConfig\n\n\nclass TheodinprojectConfig(AppConfig):\n name = 'theodinproject'\n", "id": "11185584", "language": "Python", "matching_score": 0.5990360379219055, "max_stars_count": 0, "path": "showcase/theodinproject/apps.py" }, { "content": "from django.urls import path\nfrom . import views\n\napp_name=\"landPage\"\nurlpatterns = [\n path(\"\", views.index, name='index'),\n path(\"resume\", views.resume, name='resume'),\n]\n", "id": "1829001", "language": "Python", "matching_score": 1.4507691860198975, "max_stars_count": 0, "path": "showcase/landPage/urls.py" }, { "content": "from django.shortcuts import render\nfrom django.http import HttpResponse\nfrom django.template import loader\n\ndef index(request):\n template = loader.get_template('landPage/index.html')\n return render(request, 'landPage/index.html')\n\ndef resume(request):\n template = loader.get_template('landPage/static/landPage/<NAME> Resume Dev.pdf')\n return render(request, 'landPage/static/landPage/<NAME> Resume Dev.pdf')\n", "id": "9830568", "language": "Python", "matching_score": 1.7878270149230957, "max_stars_count": 0, "path": "showcase/landPage/views.py" }, { "content": "from django.shortcuts import render\nfrom django.http import HttpResponse\nfrom django.template import loader\n\ndef index(request):\n template = loader.get_template('cssAnimations/index.html')\n return render(request, 'cssAnimations/index.html')\n", "id": "972109", "language": "Python", "matching_score": 1.991919994354248, "max_stars_count": 0, "path": "showcase/cssAnimations/views.py" }, { "content": "from django.shortcuts import render\n\n# Create your views here.\nfrom django.http import HttpResponse\nfrom django.template import loader\n\ndef index(request):\n template = loader.get_template('theodinproject/index.html')\n return render(request, 'theodinproject/index.html')\n\ndef fizzbizz(request):\n template = loader.get_template('theodinproject/fizzbizz.html')\n return render(request, 'theodinproject/fizzbizz.html')\n\ndef rockpaperscissors(request):\n template = loader.get_template('theodinproject/rockpaperscissors.html')\n return render(request, 'theodinproject/rockpaperscissors.html')\n\ndef DOM(request):\n template = loader.get_template('theodinproject/DOM.html')\n return render(request, 'theodinproject/DOM.html')\n\ndef javascript30(request):\n template = loader.get_template('theodinproject/javascript30/javascript30.html')\n return render(request, 'theodinproject/javascript30/javascript30.html')\n\ndef keyboardDrumKit(request):\n template = loader.get_template('theodinproject/javascript30/keyboardDrumKit/keyboardDrumKit.html')\n return render(request, 'theodinproject/javascript30/keyboardDrumKit/keyboardDrumKit.html')\n\ndef etchASketch (request):\n template = loader.get_template('theodinproject/etchASketch.html')\n return render(request, 'theodinproject/etchASketch.html')\n\ndef calculator (request):\n template = loader.get_template('theodinproject/calculator.html')\n return render(request, 'theodinproject/calculator.html')\n\ndef library (request):\n template = loader.get_template('theodinproject/odin-projects/library.html')\n return render(request, 'theodinproject/odin-projects/library.html')\n\ndef ticTacToe (request):\n template = loader.get_template('theodinproject/odin-projects/tic-tac-toe.html')\n return render(request, 'theodinproject/odin-projects/tic-tac-toe.html')\n\ndef newLibrary(request):\n template = loader.get_template('theodinproject/odin-projects/newLibrary.html')\n return render(request, 'theodinproject/odin-projects/newLibrary.html')\n\n\"\"\"\ndef script(request):\n template = loader.get_template('')\n return render(request, '')\n\"\"\"\n", "id": "6008430", "language": "Python", "matching_score": 4.637050151824951, "max_stars_count": 0, "path": "showcase/theodinproject/views.py" }, { "content": "from django.urls import path\n\nfrom . import views\n\napp_name='theodinproject'\nurlpatterns = [\n path('', views.index, name='index'),\n path('fizzbizz', views.fizzbizz, name='fizzbizz'),\n path('rockpaperscissors', views.rockpaperscissors, name='rockpaperscissors'),\n path('DOM', views.DOM, name='DOM'),\n path('javascript30', views.javascript30, name='javascript30'),\n path('keyboardDrumKit', views.keyboardDrumKit, name='keyboardDrumKit'),\n path('etchASketch', views.etchASketch, name='etchASketch'),\n path('calculator', views.calculator, name='calculator'),\n path('library', views.library, name='library'),\n path('tic-tac-toe', views.ticTacToe, name='tic-tac-toe'),\n path('newLibrary', views.newLibrary, name='newLibrary'),\n]\n", "id": "9818698", "language": "Python", "matching_score": 0.7893372774124146, "max_stars_count": 0, "path": "showcase/theodinproject/urls.py" }, { "content": "# Generated by Django 3.0.6 on 2020-06-29 15:10\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('post', '0001_initial'),\n ]\n\n operations = [\n migrations.RenameModel(\n old_name='blogPost',\n new_name='Post',\n ),\n migrations.RenameField(\n model_name='reply',\n old_name='blogPost',\n new_name='Post',\n ),\n ]\n", "id": "5052825", "language": "Python", "matching_score": 2.8816237449645996, "max_stars_count": 0, "path": "showcase/post/migrations/0002_auto_20200629_1010.py" }, { "content": "# Generated by Django 3.0.6 on 2020-06-27 22:58\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='blogPost',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('post_title', models.CharField(max_length=50)),\n ('post_text', models.CharField(max_length=200)),\n ('pub_date', models.DateTimeField(verbose_name='date published')),\n ],\n ),\n migrations.CreateModel(\n name='Reply',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('reply_text', models.CharField(max_length=1000)),\n ('likes', models.IntegerField(default=0)),\n ('dislikes', models.IntegerField(default=0)),\n ('blogPost', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='post.blogPost')),\n ],\n ),\n ]\n", "id": "3985922", "language": "Python", "matching_score": 4.668325424194336, "max_stars_count": 0, "path": "showcase/post/migrations/0001_initial.py" }, { "content": "from django.db import models\nfrom django.utils import timezone\nimport datetime\n# Create your models here.\nclass Post(models.Model):\n post_title = models.CharField(max_length=50)\n post_text = models.CharField(max_length=200)\n pub_date = models.DateTimeField('date published')\n\n def __str__(self):\n return self.post_title\n\n def was_published_recently(self):\n return self.pub_date >= timezone.now() - datetime.timedelta(days=1)\n\nclass Reply(models.Model):\n Post = models.ForeignKey(Post, on_delete=models.CASCADE)\n reply_text = models.CharField(max_length=1000)\n likes = models.IntegerField(default=0)\n dislikes = models.IntegerField(default=0)\n\n def __str__(self):\n return self.reply_text\n", "id": "6965170", "language": "Python", "matching_score": 3.0426361560821533, "max_stars_count": 0, "path": "showcase/post/models.py" }, { "content": "import datetime\n\nfrom django.test import TestCase\nfrom django.utils import timezone\n\nfrom .models import Question\n\n\nclass QuestionModelTests(TestCase):\n\n def test_was_published_recently_with_future_question(self):\n \"\"\"\n was_published_recently() returns False for questions whose pub_date\n is in the future.\n \"\"\"\n time = timezone.now() + datetime.timedelta(days=30)\n future_question = Question(pub_date=time)\n self.assertIs(future_question.was_published_recently(), False)\n\n def test_was_published_recently_with_old_question(self):\n \"\"\"\n was_published_recently() returns False for questions whose pub_date\n is older than 1 day.\n \"\"\"\n time = timezone.now() - datetime.timedelta(days=1, seconds=1)\n old_question = Question(pub_date=time)\n self.assertIs(old_question.was_published_recently(), False)\n\n def test_was_published_recently_with_recent_question(self):\n \"\"\"\n was_published_recently() returns True for questions whose pub_date\n is within the last day.\n \"\"\"\n time = timezone.now() - datetime.timedelta(hours=23, minutes=59, seconds=59)\n recent_question = Question(pub_date=time)\n self.assertIs(recent_question.was_published_recently(), True)\n\nclass QuestionDetailViewTests(TestCase):\n def test_future_question(self):\n \"\"\"\n The detail view of a question with a pub_date in the future\n returns a 404 not found.\n \"\"\"\n future_question = create_question(question_text='Future question.', days=5)\n url = reverse('polls:detail', args=(future_question.id,))\n response = self.client.get(url)\n self.assertEqual(response.status_code, 404)\n\n def test_past_question(self):\n \"\"\"\n The detail view of a question with a pub_date in the past\n displays the question's text.\n \"\"\"\n past_question = create_question(question_text='Past Question.', days=-5)\n url = reverse('polls:detail', args=(past_question.id,))\n response = self.client.get(url)\n self.assertContains(response, past_question.question_text)\n", "id": "9189058", "language": "Python", "matching_score": 0.5385341048240662, "max_stars_count": 0, "path": "showcase/polls/tests.py" }, { "content": "from django.urls import path\n\nfrom . import views\n\n\napp_name = 'post'\nurlpatterns = [\n path('', views.index, name='index'),\n path('<int:post_id>/', views.detail, name='detail'),\n path('<int:post_id>/reply', views.reply, name='reply'),\n path('<int:post_id>/likes/', views.likes, name='likes'),\n]\n", "id": "342485", "language": "Python", "matching_score": 1.5245792865753174, "max_stars_count": 0, "path": "showcase/post/urls.py" }, { "content": "from django.shortcuts import render, get_object_or_404\nfrom django.template import loader\n# Create your views here.\nfrom django.http import HttpResponse, HttpResponseRedirect\nfrom django.urls import reverse\nfrom .models import Post, Reply\nfrom django.core.exceptions import ObjectDoesNotExist\n\n\ndef index(request):\n latest_post_list = Post.objects.order_by('-pub_date')[:5]\n context = {'latest_post_list': latest_post_list}\n return render(request, 'post/index.html', context)\n\ndef detail(request, post_id):\n post = get_object_or_404(Post, pk=post_id)\n return render(request, 'post/detail.html', {'post':post})\n\ndef reply(request, post_id):\n post = get_object_or_404(Post, pk=post_id)\n return render(request, 'post/results.html', {'post': post})\n\ndef likes(request, post_id):\n post = get_object_or_404(Post, pk=post_id)\n try:\n selected_reply = post.reply_set.get(pk=request.POST['reply'])\n except (KeyError, reply.DoesNotExist):\n # Redisplay the post voting form.\n return render(request, 'post/detail.html', {\n 'post': post,\n 'error_message': \"You didn't select a response.\",\n })\n else:\n selected_reply.likes +=1\n selected_reply.save()\n # Alwasy return an HttpResponseRedirect after successfully dealing\n # with POST data. This prevents data from being posted twice if a\n # user hits the Back button.\n return HttpResponseRedirect(reverse('post:likes', args=(post.id,)))\n", "id": "11265060", "language": "Python", "matching_score": 2.1051626205444336, "max_stars_count": 0, "path": "showcase/post/views.py" }, { "content": "from django.contrib import admin\nfrom .models import Post, Reply\n# Register your models here.\n\n\nadmin.site.register(Post)\nadmin.site.register(Reply)\n", "id": "1943102", "language": "Python", "matching_score": 0.5929005146026611, "max_stars_count": 0, "path": "showcase/post/admin.py" } ]
1.787827
mmubarak0
[ { "content": "# -*- coding: utf-8 -*-\n\ndef print_languages(version):\n\tprint('\\nSupported languages: '),\n\tif version == 1:\n\t\tprint('''\n ┌───────────────────────┬───────────────────────┬───────────────────────┐\n │ Afrikaans - af │ Hawaiian - haw │ Persian - fa │\n │ Albanian - sq │ Hebrew - he │ Polish - pl │\n │ Amharic - am │ Hindi - hi │ Portuguese - pt *│\n │ Arabic - ar │ Hmong - hmv │ Punjabi - pa │\n │ Armenian - hy │ Hungarian - hu │ Romanian - ro │\n │ Azerbaijani - az │ Icelandic - is │ Russian - ru *│\n │ Basque - eu │ Igbo - ig │ Samoan - sm │\n │ Belarusian - be │ Indonesian - id │ Scots Gaelic - gd │\n │ Bengali - bn │ Irish - ga │ Serbian Cyril. - sr-CY│\n │ Bosnian - bs │ Italian - it *│ Serbian Latin - sr-LA│\n │ Bulgarian - bg │ Japanese - ja *│ Sesotho - st │\n │ Catalan - ca │ Javanese - jv │ Shona - sn │\n │ Cebuano - ceb │ Kannada - kn │ Sindhi - sd │\n │ Chichewa - ny │ Kazakh - kk │ Sinhala - si │\n │ Chinese Simp. - zh-CN│ Khmer - km │ Slovak - sk *│\n │ Chinese Trad. - zh-TW│ Korean - ko *│ Slovenian - sl │\n │ Corsican - co │ Kurdish - ku │ Somali - so │\n │ Croatian - hr │ Kyrgyz - ky │ Spanish - es *│\n │ Czech - cs │ Lao - lo │ Sundanese - su │\n │ Danish - da *│ Latin - la │ Swahili - sw │\n │ Dutch - nl *│ Latvian - lv │ Swedish - sv *│\n │ English - en *│ Lithuanian - lt │ Tajik - tg │\n │ Esperanto - eo │ Luxembourgish - lb │ Tamil - ta │\n │ Estonian - et │ Macedonian - mk │ Telugu - te │\n │ Filipino - fil │ Malagasy - mg │ Thai - th │\n │ Finnish - fi │ Malay - ms │ Turkish - tr *│\n │ French - fr *│ Malayalam - ml │ Ukrainian - uk │\n │ Frisian - fy │ Maltese - mt │ Urdu - ur │\n │ Galician - gl │ Maori - mi │ Uzbek - uz │\n │ Georgian - ka │ Marathi - mr │ Vietnamese - vi │\n │ German - de *│ Mongolian - mn │ Welsh - cy │\n │ Greek - el │ Myanmar - my │ Xhosa - xh │\n │ Gujarati - gu │ Nepali - ne │ Yiddish - yi │\n │ Haitian Creole - ht │ Norwegian - no │ Yoruba - yo │\n │ Hausa - ha │ Pashto - ps │ Zulu - zu │\n └───────────────────────┴───────────────────────┴───────────────────────┘''')\n\t\tprint('* indicates support for text-to-speech')\n\telif version == 2:\n\t\tprint('''\n ┌───────────────────────┬───────────────────────┬───────────────────────┐\n │ Afrikaans - af │ Hawaiian - haw │ Persian - fa │\n │ Albanian - sq │ Hebrew - he │ Polish - pl │\n │ Amharic - am │ Hindi - hi │ Portuguese - pt │\n │ Arabic - ar │ Hmong - hmv │ Punjabi - pa │\n │ Armenian - hy │ Hungarian - hu │ Romanian - ro │\n │ Azerbaijani - az │ Icelandic - is │ Russian - ru │\n │ Basque - eu │ Igbo - ig │ Samoan - sm │\n │ Belarusian - be │ Indonesian - id │ Scots Gaelic - gd │\n │ Bengali - bn │ Irish - ga │ Serbian - sr │\n │ Bosnian - bs │ Italian - it │ Sesotho - st │\n │ Bulgarian - bg │ Japanese - ja │ Shona - sn │\n │ Catalan - ca │ Javanese - jv │ Sindhi - sd │\n │ Cebuano - ceb │ Kannada - kn │ Sinhala - si │\n │ Chichewa - ny │ Kazakh - kk │ Slovak - sk │\n │ Chinese Simp. - zh-CN│ Khmer - km │ Slovenian - sl │\n │ Chinese Trad. - zh-TW│ Korean - ko │ Somali - so │\n │ Corsican - co │ Kurdish - ku │ Spanish - es │\n │ Croatian - hr │ Kyrgyz - ky │ Sundanese - su │\n │ Czech - cs │ Lao - lo │ Swahili - sw │\n │ Danish - da │ Latin - la │ Swedish - sv │\n │ Dutch - nl │ Latvian - lv │ Tajik - tg │\n │ English - en │ Lithuanian - lt │ Tamil - ta │\n │ Esperanto - eo │ Luxembourgish - lb │ Telugu - te │\n │ Estonian - et │ Macedonian - mk │ Thai - th │\n │ Filipino - fil │ Malagasy - mg │ Turkish - tr │\n │ Finnish - fi │ Malay - ms │ Ukrainian - uk │\n │ French - fr │ Malayalam - ml │ Urdu - ur │\n │ Frisian - fy │ Maltese - mt │ Uzbek - uz │\n │ Galician - gl │ Maori - mi │ Vietnamese - vi │\n │ Georgian - ka │ Marathi - mr │ Welsh - cy │\n │ German - de │ Mongolian - mn │ Xhosa - xh │\n │ Greek - el │ Myanmar - my │ Yiddish - yi │\n │ Gujarati - gu │ Nepali - ne │ Yoruba - yo │\n │ Haitian Creole - ht │ Norwegian - no │ Zulu - zu │\n │ Hausa - ha │ Pashto - ps │ │\n └───────────────────────┴───────────────────────┴───────────────────────┘''')\n\ndef print_language_name(lang):\n\tif (lang.lower() == 'afrikaans' or lang.lower() == 'af' or lang.lower() == 'afr'):\n\t\tprint('🇿🇦 Afrikaans: '),\n\telif (lang.lower() == 'albanian' or lang.lower() == 'sq' or lang.lower() == 'sqi' or lang.lower() == 'alb'):\n\t\tprint('🇦🇱 Albanian: '),\n\telif (lang.lower() == 'amharic' or lang.lower() == 'am' or lang.lower() == 'amh'):\n\t\tprint('🇪🇹 Amharic: '),\n\telif (lang.lower() == 'arabic' or lang.lower() == 'ar' or lang.lower() == 'ara'):\n\t\tprint(' Arabic: '),\n\telif (lang.lower() == 'armenian' or lang.lower() == 'hy' or lang.lower() == 'arm' or lang.lower() == 'hye'):\n\t\tprint('🇦🇲 Armenian: '),\n\telif (lang.lower() == 'azerbaijani' or lang.lower() == 'az' or lang.lower() == 'aze'):\n\t\tprint('🇦🇿 Azerbaijani: '),\n\telif (lang.lower() == 'basque' or lang.lower() == 'eu' or lang.lower() == 'eur' or lang.lower() == 'baq'):\n\t\tprint(' Basque: '),\n\telif (lang.lower() == 'belarusian' or lang.lower() == 'be' or lang.lower() == 'bel'):\n\t\tprint('🇧🇾 Belarusian: '),\n\telif (lang.lower() == 'bengali' or lang.lower() == 'bn' or lang.lower() == 'ben'):\n\t\tprint('🇧🇩 Bengali:'),\n\telif (lang.lower() == 'bosnian' or lang.lower() == 'bs' or lang.lower() == 'bos'):\n\t\tprint('🇧🇦 Bosnian: '),\n\telif (lang.lower() == 'bulgarian' or lang.lower() == 'bg' or lang.lower() == 'bul'):\n\t\tprint('🇧🇬 Bulgarian: '),\n\telif (lang.lower() == 'catalan' or lang.lower() == 'ca' or lang.lower() == 'cat'):\n\t\tprint(' Catalan: '),\n\telif (lang.lower() == 'cebuano' or lang.lower() == 'ceb'):\n\t\tprint(' Cebuano: '),\n\telif (lang.lower() == 'chichewa' or lang.lower() == 'ny' or lang.lower() == 'nya'):\n\t\tprint('🇲🇼 🇿🇼 Chichewa: '),\n\telif (lang.lower() == 'chinese' or lang.lower() == 'zh' or lang.lower() == 'zh-cn' or lang.lower() == 'zho' or lang.lower() == 'chi'):\n\t\tprint('🇨🇳 Chinese: '),\n\telif (lang.lower() == 'zh-tw'):\n\t\tprint('🇨🇳 Chinese (Traditional): '),\n\telif (lang.lower() == 'corsican' or lang.lower() == 'co' or lang.lower() == 'cos'):\n\t\tprint(' Corsican: '),\n\telif (lang.lower() == 'croatian' or lang.lower() == 'hr' or lang.lower() == 'hrw'):\n\t\tprint('🇭🇷 Croatian: '),\n\telif (lang.lower() == 'czech' or lang.lower() == 'cs' or lang.lower() == 'ces' or lang.lower() == 'cze'):\n\t\tprint('🇨🇿 Czech: '),\n\telif (lang.lower() == 'danish' or lang.lower() == 'da' or lang.lower() == 'dan'):\n\t\tprint('🇩🇰 Danish: '),\n\telif (lang.lower() == 'dutch' or lang.lower() == 'nl' or lang.lower() == 'nld' or lang.lower() == 'dut'):\n\t\tprint('🇳🇱 Dutch: '),\n\telif (lang.lower() == 'english' or lang.lower() == 'en' or lang.lower() == 'eng'):\n\t\tprint('🇬🇧 English: '),\n\telif (lang.lower() == 'esperanto' or lang.lower() == 'eo' or lang.lower() == 'epo'):\n\t\tprint(' Esperanto: '),\n\telif (lang.lower() == 'estonian' or lang.lower() == 'et' or lang.lower() == 'est'):\n\t\tprint('🇪🇪 Estonian: '),\n\telif (lang.lower() == 'filipino' or lang.lower() == 'tl' or lang.lower() == 'fil'):\n\t\tprint('🇵🇭 Filipino: '),\n\telif (lang.lower() == 'finnish' or lang.lower() == 'fi' or lang.lower() == 'fin'):\n\t\tprint('🇫🇮 Finnish: '),\n\telif (lang.lower() == 'french' or lang.lower() == 'fr' or lang.lower() == 'fre' or lang.lower() == 'fra'):\n\t\tprint('🇫🇷 French: '),\n\telif (lang.lower() == 'frisian' or lang.lower() == 'fy' or lang.lower() == 'fry' or lang.lower() == 'frr' or lang.lower() == 'frs'):\n\t\tprint(' Frisian: '),\n\telif (lang.lower() == 'galician' or lang.lower() == 'gl' or lang.lower() == 'glg'):\n\t\tprint(' Galician: '),\n\telif (lang.lower() == 'georgian' or lang.lower() == 'ka' or lang.lower() == 'kat' or lang.lower() == 'geo'):\n\t\tprint('🇬🇪 Georgian: '),\n\telif (lang.lower() == 'german' or lang.lower() == 'de' or lang.lower() == 'deu' or lang.lower() == 'ger'):\n\t\tprint('🇩🇪 German: '),\n\telif (lang.lower() == 'greek' or lang.lower() == 'el' or lang.lower() == 'ell' or lang.lower() == 'gre'):\n\t\tprint('🇬🇷 Greek: '),\n\telif (lang.lower() == 'gujarati' or lang.lower() == 'gu' or lang.lower() == 'guj'):\n\t\tprint(' Gujarati: '),\n\telif (lang.lower() == 'haitian creole' or lang.lower() == 'haitian' or lang.lower() == 'ht' or lang.lower() == 'hat'):\n\t\tprint('🇭🇹 Haitian Creole: '),\n\telif (lang.lower() == 'hausa' or lang.lower() == 'ha' or lang.lower() == 'hau'):\n\t\tprint('🇳🇪 Hausa: '),\n\telif (lang.lower() == 'hawaiian' or lang.lower() == 'haw'):\n\t\tprint(' Hawaiian: '),\n\telif (lang.lower() == 'hebrew' or lang.lower() == 'he' or lang.lower() == 'heb'):\n\t\tprint('🇮🇱 Hebrew: '),\n\telif (lang.lower() == 'hindi' or lang.lower() == 'hi' or lang.lower() == 'hin'):\n\t\tprint('🇮🇳 Hindi: '),\n\telif (lang.lower() == 'hmong' or lang.lower() == 'hmn'):\n\t\tprint(' Hmong: '),\n\telif (lang.lower() == 'hungarian' or lang.lower() == 'hu' or lang.lower() == 'hun'):\n\t\tprint('🇭🇺 Hungarian: '),\n\telif (lang.lower() == 'icelandic' or lang.lower() == 'is' or lang.lower() == 'isl' or lang.lower() == 'ice'):\n\t\tprint('🇮🇸 Icelandic: '),\n\telif (lang.lower() == 'igbo' or lang.lower() == 'ig' or lang.lower() == 'ibo'):\n\t\tprint('🇳🇬 Igbo: '),\n\telif (lang.lower() == 'indonesian' or lang.lower() == 'id' or lang.lower() == 'ind'):\n\t\tprint('🇮🇩 Indonesian: '),\n\telif (lang.lower() == 'irish' or lang.lower() == 'ga' or lang.lower() == 'gle'):\n\t\tprint('🇮🇪 Irish: '),\n\telif (lang.lower() == 'italian' or lang.lower() == 'it' or lang.lower() == 'ita'):\n\t\tprint('🇮🇹 Italian: '),\n\telif (lang.lower() == 'japanese' or lang.lower() == 'ja' or lang.lower() == 'jpn'):\n\t\tprint('🇯🇵 Japanese: '),\n\telif (lang.lower() == 'javanese' or lang.lower() == 'jv' or lang.lower() == 'jav'):\n\t\tprint('🇮🇩 Javanese: '),\n\telif (lang.lower() == 'kannada' or lang.lower() == 'kn' or lang.lower() == 'kan'):\n\t\tprint('🇮🇳 Kannada: '),\n\telif (lang.lower() == 'kazakh' or lang.lower() == 'kk' or lang.lower() == 'kaz'):\n\t\tprint('🇰🇿 Kazakh: '),\n\telif (lang.lower() == 'khmer' or lang.lower() == 'km' or lang.lower() == 'khm'):\n\t\tprint('🇰🇭 Khmer: '),\n\telif (lang.lower() == 'korean' or lang.lower() == 'ko' or lang.lower() == 'kor'):\n\t\tprint('🇰🇷 Korean: '),\n\telif (lang.lower() == 'kurdish' or lang.lower() == 'kurmanji' or lang.lower() == 'ku' or lang.lower() == 'kur'):\n\t\tprint(' Kurdish: '),\n\telif (lang.lower() == 'kyrgyz' or lang.lower() == 'ky' or lang.lower() == 'kir'):\n\t\tprint('🇰🇬 Kyrgyz: '),\n\telif (lang.lower() == 'lao' or lang.lower() == 'lo'):\n\t\tprint('🇱🇦 Lao: '),\n\telif (lang.lower() == 'latin' or lang.lower() == 'la' or lang.lower() == 'lat'):\n\t\tprint('🇻🇦 Latin: '),\n\telif (lang.lower() == 'latvian' or lang.lower() == 'lv' or lang.lower() == 'lav'):\n\t\tprint('🇱🇻 Latvian: '),\n\telif (lang.lower() == 'lithuanian' or lang.lower() == 'lt' or lang.lower() == 'lit'):\n\t\tprint('🇱🇹 Lithuanian: '),\n\telif (lang.lower() == 'luxembourgish' or lang.lower() == 'lb' or lang.lower() == 'ltz'):\n\t\tprint('🇱🇺 Luxembourgish: '),\n\telif (lang.lower() == 'macedonian' or lang.lower() == 'mk' or lang.lower() == 'mkd' or lang.lower() == 'mac'):\n\t\tprint('🇲🇰 Macedonian: '),\n\telif (lang.lower() == 'malagasy' or lang.lower() == 'mg' or lang.lower() == 'mlg'):\n\t\tprint('🇲🇬 Malagasy: '),\n\telif (lang.lower() == 'malay' or lang.lower() == 'ms' or lang.lower() == 'msa' or lang.lower() == 'may'):\n\t\tprint('🇲🇾 Malay: '),\n\telif (lang.lower() == 'malayalam' or lang.lower() == 'ml' or lang.lower() == 'mal'):\n\t\tprint('🇮🇳 Malayalam: '),\n\telif (lang.lower() == 'maltese' or lang.lower() == 'mt' or lang.lower() == 'mlt'):\n\t\tprint('🇲🇹 Maltese: '),\n\telif (lang.lower() == 'maori' or lang.lower() == 'mi' or lang.lower() == 'mao' or lang.lower() == 'mri'):\n\t\tprint(' Maori: '),\n\telif (lang.lower() == 'marathi' or lang.lower() == 'mr' or lang.lower() == 'mar'):\n\t\tprint('🇮🇳 Marathi: '),\n\telif (lang.lower() == 'mongolian' or lang.lower() == 'mn' or lang.lower() == 'mon'):\n\t\tprint('🇲🇳 Mongolian: '),\n\telif (lang.lower() == 'myanmar' or lang.lower() == 'burmese' or lang.lower() == 'my' or lang.lower() == 'mya' or lang.lower() == 'bur'):\n\t\tprint('🇲🇲 Myanmar: '),\n\telif (lang.lower() == 'nepali' or lang.lower() == 'ne' or lang.lower() == 'nep'):\n\t\tprint('🇳🇵 Nepali: '),\n\telif (lang.lower() == 'norwegian' or lang.lower() == 'no' or lang.lower() == 'nor'):\n\t\tprint('🇳🇴 Norwegian: '),\n\telif (lang.lower() == 'pashto' or lang.lower() == 'ps' or lang.lower() == 'pus'):\n\t\tprint('🇦🇫 Pashto: '),\n\telif (lang.lower() == 'persian' or lang.lower() == 'farsi' or lang.lower() == 'fa' or lang.lower() == 'fas' or lang.lower() == 'per'):\n\t\tprint('🇮🇷 Persian: '),\n\telif (lang.lower() == 'polish' or lang.lower() == 'pl' or lang.lower() == 'pol'):\n\t\tprint('🇵🇱 Polish: '),\n\telif (lang.lower() == 'portuguese' or lang.lower() == 'pt' or lang.lower() == 'por'):\n\t\tprint('🇵🇹 Portuguese: '),\n\telif (lang.lower() == 'punjabi' or lang.lower() == 'pa' or lang.lower() == 'pan'):\n\t\tprint('🇵🇰 🇮🇳 Punjabi: '),\n\telif (lang.lower() == 'romanian' or lang.lower() == 'ro' or lang.lower() == 'ron' or lang.lower() == 'rum'):\n\t\tprint('🇷🇴 Romanian: '),\n\telif (lang.lower() == 'russian' or lang.lower() == 'ru' or lang.lower() == 'rus'):\n\t\tprint('🇷🇺 Russian: '),\n\telif (lang.lower() == 'samoan' or lang.lower() == 'sm' or lang.lower() == 'smo'):\n\t\tprint('🇼🇸 Samoan: '),\n\telif (lang.lower() == 'scots gaelic' or lang.lower() == 'scottish gaelic' or lang.lower() == 'gaelic' or lang.lower() == 'gd' or lang.lower() == 'gla'):\n\t\tprint('🏴󠁧󠁢󠁳󠁣󠁴󠁿 Scots Gaelic: '),\n\telif (lang.lower() == 'serbian' or lang.lower() == 'sr' or lang.lower() == 'srp' or lang.lower() == 'sr-latn'):\n\t\tprint('🇷🇸 Serbian: '),\n\telif (lang.lower() == 'sesotho' or lang.lower() == 'sotho' or lang.lower() == 'st' or lang.lower() == 'sot'):\n\t\tprint('🇱🇸 Sesotho: '),\n\telif (lang.lower() == 'shona' or lang.lower() == 'sn' or lang.lower() == 'sna'):\n\t\tprint('🇿🇼 Shona: '),\n\telif (lang.lower() == 'sindhi' or lang.lower() == 'sd' or lang.lower() == 'snd'):\n\t\tprint('🇵🇰 🇮🇳 Sindhi: '),\n\telif (lang.lower() == 'sinhala' or lang.lower() == 'si' or lang.lower() == 'sin'):\n\t\tprint('🇱🇰 Sinhala: '),\n\telif (lang.lower() == 'slovak' or lang.lower() == 'sk' or lang.lower() == 'slk' or lang.lower() == 'slo'):\n\t\tprint('🇸🇰 Slovak: '),\n\telif (lang.lower() == 'slovenian' or lang.lower() == 'sl' or lang.lower() == 'slv'):\n\t\tprint('🇸🇮 Slovenian: '),\n\telif (lang.lower() == 'somali' or lang.lower() == 'so' or lang.lower() == 'som'):\n\t\tprint('🇸🇴 Somali: '),\n\telif (lang.lower() == 'spanish' or lang.lower() == 'es' or lang.lower() == 'spa'):\n\t\tprint('🇪🇸 Spanish: '),\n\telif (lang.lower() == 'sundanese' or lang.lower() == 'su' or lang.lower() == 'sun'):\n\t\tprint('🇸🇩 Sudanese: '),\n\telif (lang.lower() == 'swahili' or lang.lower() == 'sw' or lang.lower() == 'swa'):\n\t\tprint(' Swahili: '),\n\telif (lang.lower() == 'swedish' or lang.lower() == 'sv' or lang.lower() == 'swe'):\n\t\tprint('🇸🇪 Swedish: '),\n\telif (lang.lower() == 'tajik' or lang.lower() == 'tg' or lang.lower() == 'tgk'):\n\t\tprint('🇹🇯 Tajik: '),\n\telif (lang.lower() == 'tamil' or lang.lower() == 'ta' or lang.lower() == 'tam'):\n\t\tprint('🇱🇰 🇮🇳 Tamil: '),\n\telif (lang.lower() == 'telugu' or lang.lower() == 'te' or lang.lower() == 'tel'):\n\t\tprint('🇮🇳 Telugu: '),\n\telif (lang.lower() == 'thai' or lang.lower() == 'th' or lang.lower() == 'tha'):\n\t\tprint('🇹🇭 Thai: '),\n\telif (lang.lower() == 'turkish' or lang.lower() == 'tr' or lang.lower() == 'tur'):\n\t\tprint('🇹🇷 Turkish: '),\n\telif (lang.lower() == 'ukrainian' or lang.lower() == 'uk' or lang.lower() == 'ukr'):\n\t\tprint('🇺🇦 Ukrainian: '),\n\telif (lang.lower() == 'urdu' or lang.lower() == 'ur' or lang.lower() == 'urd'):\n\t\tprint('🇵🇰 🇮🇳 Urdu: '),\n\telif (lang.lower() == 'uzbek' or lang.lower() == 'uz' or lang.lower() == 'uzb'):\n\t\tprint('🇺🇿 Uzbek: '),\n\telif (lang.lower() == 'vietnamese' or lang.lower() == 'vi' or lang.lower() == 'vie'):\n\t\tprint('🇻🇳 Vietnamese: '),\n\telif (lang.lower() == 'welsh' or lang.lower() == 'cy' or lang.lower() == 'cym' or lang.lower() == 'wel'):\n\t\tprint('🏴󠁧󠁢󠁷󠁬󠁳󠁿 Welsh: '),\n\telif (lang.lower() == 'xhosa' or lang.lower() == 'xh' or lang.lower() == 'xho'):\n\t\tprint('🇿🇦 🇿🇼 Xhosa: '),\n\telif (lang.lower() == 'yiddish' or lang.lower() == 'yi' or lang.lower() == 'yid'):\n\t\tprint('🇮🇱 Yiddish: '),\n\telif (lang.lower() == 'yoruba' or lang.lower() == 'yo' or lang.lower() == 'yor'):\n\t\tprint('🇳🇬 🇧🇯 🇹🇬 Yoruba: '),\n\telif (lang.lower() == 'zulu' or lang.lower() == 'zu' or lang.lower() == 'zul'):\n\t\tprint('🇿🇦 Zulu: '),\n", "id": "3703320", "language": "Python", "matching_score": 1.8959347009658813, "max_stars_count": 5, "path": "print_languages.py" }, { "content": "import random\n\ndef random_language():\n\tlangs = ['af', 'sq', 'am', 'ar', 'hy', 'az', 'eu', 'be', 'bn', 'bs', 'bg',\n\t\t'ca', 'ceb', 'ny', 'zh', 'co', 'hr', 'cs', 'da', 'nl', 'en', 'eo', 'et',\n\t\t'tl', 'fi', 'fr', 'fy', 'gl', 'ka', 'de', 'el', 'gu', 'ht', 'ha', 'haw',\n\t\t'he', 'hi', 'hmn', 'hu', 'is', 'ig', 'id', 'ga', 'it', 'ja', 'jv', 'kn',\n\t\t'kk', 'km', 'ko', 'ku', 'ky', 'lo', 'la', 'lv', 'lt', 'lb', 'mk', 'mg',\n\t\t'ms', 'ml', 'mt', 'mi', 'mr', 'mn', 'my', 'ne', 'no', 'ps', 'fa', 'pl',\n\t\t'pt', 'pa', 'ro', 'ru', 'sm', 'gd', 'sr', 'st', 'sn', 'sd', 'si', 'sk',\n\t\t'sl', 'so', 'es', 'su', 'sw', 'sv', 'tg', 'ta', 'te', 'th', 'tr', 'uk',\n\t\t'ur', 'uz', 'vi', 'cy', 'xh', 'yi', 'yo', 'zu']\n\treturn langs[random.randint(0, len(langs) - 1)]\n", "id": "223935", "language": "Python", "matching_score": 0.11151739209890366, "max_stars_count": 5, "path": "rng.py" }, { "content": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\nimport sys\nfrom googletrans import Translator\nfrom language_to_iso import lang_to_iso\nfrom web_page import web_page_translation\nfrom print_languages import print_languages, print_language_name\nfrom helpers import credentials, print_usage, valid_lang\n\nopt_b = None\nopt_c = None\n\n\ndef translate_text(text, target_language):\n if len(text) > 10000:\n print('Error: Text too large. Maximum: 10000 characters')\n sys.exit()\n try:\n translator = Translator()\n # I see the bug occurs in this line ----\n source = translator.detect(text)\n # --------------------------------------\n translation = translator.translate(text, dest=target_language)\n if opt_c == True:\n print('Detected language confidence: '),\n print('{:.2%}'.format(source.confidence)) # convert to %\n if opt_b != True:\n if source.lang != target_language:\n print_language_name(source.lang)\n print(translation.origin)\n print_language_name(target_language)\n# print(decode(translation.text))\n print(translation.text)\n except Exception as e:\n print('Error: '),\n print(e)\n sys.exit()\n\n\ndef file_translation(argv):\n try:\n f = open(argv[2], 'r')\n except Exception:\n print(\"Error: Can't find file or read data\")\n sys.exit()\n else:\n if f.mode == 'r':\n text = f.read(10000)\n if len(text) >= 10000:\n print('Error: File too large. Maximum: 10000 characters')\n f.close()\n sys.exit()\n if text[-1] == '\\n': # if last char is \\n\n text = text[:-1] # remove it\n if len(argv) == 3: # if no language is given, default to English\n translate_text(text, 'en')\n else:\n for l in argv[3:]: # iterate through languages\n lang = lang_to_iso(l, False, False)\n if valid_lang(lang) == True:\n translate_text(text, lang)\n f.close()\n\n\ndef interactive_translation():\n print('Type \\033[1;37mCHANGE\\033[0;0m to change target language')\n print('Type \\033[1;37mEXIT\\033[0;0m to exit')\n try:\n lang = input('Enter target language: ')\n except: # handles Ctrl-D / Ctrl-C\n print('')\n sys.exit()\n if lang == 'EXIT':\n sys.exit()\n lang = lang_to_iso(lang, True, False)\n if valid_lang(lang) == True:\n print('\\033[1;32m✔︎\\033[0;0m')\n text = ''\n try:\n while True:\n while valid_lang(lang) == False or text == 'CHANGE':\n text = ''\n lang = input('Enter target language: ')\n if lang == 'EXIT':\n sys.exit()\n lang = lang_to_iso(lang, True, False)\n if valid_lang(lang) == True:\n print('\\033[1;32m✔︎\\033[0;0m')\n text = input('Enter text to translate: ')\n if text == 'EXIT':\n sys.exit()\n if text == 'CHANGE':\n continue\n translate_text(text, lang)\n except: # handles Ctrl-D / Ctrl-C\n print('')\n sys.exit()\n\n\ndef main(argv):\n global opt_b\n global opt_c\n if '-b' in argv:\n opt_b = True\n argv.remove('-b')\n if '--bare' in argv:\n opt_b = True\n argv.remove('--bare')\n if '-c' in argv:\n opt_c = True\n argv.remove('-c')\n if '--confidence' in argv:\n opt_c = True\n argv.remove('--confidence')\n if len(argv) < 2 or argv[1] == '-h' or argv[1] == '--help':\n print_usage(2, argv[0])\n sys.exit()\n elif len(argv) == 2 and (argv[1] == '-l' or argv[1] == '--languages'):\n print_languages(2)\n sys.exit()\n elif len(argv) >= 3 and (argv[1] == '-f' or argv[1] == '--file'):\n file_translation(argv)\n elif len(argv) >= 3 and (argv[1] == '-u' or argv[1] == '--url'):\n web_page_translation(argv)\n elif len(argv) >= 2 and (argv[1] == '-i' or argv[1] == '--interactive'):\n interactive_translation()\n elif len(argv) == 2:\n translate_text(argv[1], 'en')\n elif len(argv) > 2:\n for l in argv[2:]:\n lang = lang_to_iso(l, False, False)\n if valid_lang(lang) == True:\n translate_text(argv[1], lang)\n\n\nif __name__ == \"__main__\":\n main(sys.argv)\n", "id": "7739829", "language": "Python", "matching_score": 3.353123664855957, "max_stars_count": 0, "path": "v2-translate.py" }, { "content": "# -*- coding: utf-8 -*-\n\n#import HTMLParser\nfrom html.parser import HTMLParser\n\ndef decode(string):\n\tdecode = HTMLParser.HTMLParser()\n\treturn decode.unescape(string)\n\ndef valid_lang(lang):\n\tif lang == False:\n\t\treturn False\n\treturn True\n\ndef credentials():\n\tprint('''\\\n┌──────────────────────────────────────────────────────┐\n│ You need a credentials file to use this version │\n│ Refer to README.md or instructions.txt │\n│ Or use the other version that doesn't need it │\n└──────────────────────────────────────────────────────┘''')\n \t\ndef print_usage(version, name):\n\tif version == 1:\n\t\tprint('''\\\nusage: ''' + name + ''' [options] [Input to translate] [target language [...]]\n\n\\033[1;37moptional arguments:\\033[0;0m\n -h, --help show this help message and exit\n -l, --languages print supported languages and exit\n -b, --bare output the bare translation\n -c, --confidence display detected language confidence level\n -s, --speech activate text-to-speech\n -f, --file FILE translate FILE\n -u, --url URL translate web page (opens browser)\n -i, --interactive interactive mode''')\n\telif version == 2:\n\t\tprint('''\\\nusage: ''' + name + ''' [options] [Input to translate] [target language [...]]\n\n\\033[1;37moptional arguments:\\033[0;0m\n -h, --help show this help message and exit\n -l, --languages print supported languages and exit\n -b, --bare output the bare translation\n -c, --confidence display detected language confidence level\n -f, --file FILE translate FILE\n -u, --url URL translate web page (opens browser)\n -i, --interactive interactive mode''')\n", "id": "9849270", "language": "Python", "matching_score": 1.5587221384048462, "max_stars_count": 0, "path": "helpers.py" }, { "content": "# -*- coding: utf-8 -*-\n\nimport os\nfrom helpers import decode\ntry:\n\tfrom google.cloud import texttospeech\nexcept:\n\tpass\n\n# supported languages: da, de, en, fr, it, ja, ko, nl, pt, ru, sk, sv, tr, es\n\ndef text_to_speech(text, lang):\n\ttry:\n\t\ttext = decode(text)\n\t\tclient = texttospeech.TextToSpeechClient()\n\t\tinput_text = texttospeech.types.SynthesisInput(text=text)\n\t\tvoice = texttospeech.types.VoiceSelectionParams(language_code=lang,\n\t\t\tssml_gender=texttospeech.enums.SsmlVoiceGender.NEUTRAL)\n\t\taudio_config = texttospeech.types.AudioConfig(\n\t\t\taudio_encoding=texttospeech.enums.AudioEncoding.MP3)\n\n\t\tresponse = client.synthesize_speech(input_text, voice, audio_config)\n\texcept:\n\t\treturn\n\ttry:\n\t\tprint('🔉')\n\t\twith open('output-95af1670a84.mp3', 'wb') as out:\n\t\t\tout.write(response.audio_content)\n\t\tos.system('afplay output-95af1670a84.mp3')\n\t\tos.system('rm -f output-95af1670a84.mp3')\n\texcept:\n\t\tprint('Error: Audio could not be played')\n\t\tos.system('rm =f output-95af1670a84.mp3')\n", "id": "8246003", "language": "Python", "matching_score": 0.6806450486183167, "max_stars_count": 5, "path": "speech.py" }, { "content": "import os\nimport sys\nimport urllib3\nfrom timeout import *\nfrom language_to_iso import lang_to_iso\nfrom helpers import valid_lang\n\nTRANSLATE_URL = 'https://translate.google.com/translate?sl=auto&tl='\nTRANSLATE_URL_EN = 'https://translate.google.com/translate?sl=auto&tl=en&u='\n\ndef web_page_translation(argv):\n\turl = argv[2]\n\tif url.startswith('http://') == False and url.startswith('https://') == False:\n\t\turl = 'https://' + url\n\ttry:\n\t\twith time_limit(5):\n\t\t\turllib2.urlopen(url)\n\texcept KeyboardInterrupt:\n\t\tprint('')\n\t\tsys.exit()\n\texcept TimeoutException:\n\t\tprint('Error: Timeout')\n\t\tsys.exit()\n\texcept:\n\t\tprint('Error: Web site could not be reached')\n\t\tsys.exit()\n\tif len(argv) > 3:\n\t\tlang = lang_to_iso(argv[3], False, False)\n\t\tif valid_lang(lang) == False:\n\t\t\tsys.exit()\n\t\turl = TRANSLATE_URL + lang + '&u=' + url\n\telse:\n\t\turl = TRANSLATE_URL_EN + url\n\ttry:\n\t\tos.system(\"open '\" + url + \"'\")\n\texcept:\n\t\tprint('Error: URL could not be opened')\n", "id": "9238668", "language": "Python", "matching_score": 2.581357717514038, "max_stars_count": 0, "path": "web_page.py" }, { "content": "# -*- coding: utf-8 -*-\n\nfrom languages import languages\nfrom timeout import *\ntry:\n\tfrom spellchecker import SpellChecker\nexcept:\n\tpass\n\ndef check_spelling(lang):\n\ttry:\n\t\tspell = SpellChecker()\n\t\tcorrected = spell.correction(lang)\n\texcept:\n\t\treturn False\n\tif lang_to_iso(corrected, False, True) != False:\n\t\tprint(\"Did you mean '\" + corrected.capitalize() + \"'?\")\n\t\treturn corrected\n\treturn False\n\n# Returns iso_639_1 code, checks spelling\n\ndef lang_to_iso(lang, interactive, spell_check):\n\tiso = languages(lang)\n\tif iso != False:\n\t\treturn iso\n\telse:\n\t\tif spell_check == True:\n\t\t\treturn False\n\t\tif interactive == False:\n\t\t\tprint(\"'\" + lang + \"' is not a valid language\")\n\t\telse:\n\t\t\tprint('\\033[1;31m✘\\033[0;0m')\n\t\ttry:\n\t\t\twith time_limit(2):\n\t\t\t\tif lang.isalnum() == True:\n\t\t\t\t\tcorrected = check_spelling(lang.lower())\n\t\t\t\t\tif corrected != False:\n\t\t\t\t\t\treturn languages(corrected)\n\t\texcept TimeoutException:\n\t\t\tpass\n\t\treturn False\n\n\n", "id": "4519259", "language": "Python", "matching_score": 1.4679317474365234, "max_stars_count": 5, "path": "language_to_iso.py" } ]
1.558722
paultroeger
[ { "content": "\"\"\"Manual tests for file encryption.\"\"\"\nfrom BackEnd import actions as act\nfrom os import urandom\n\nif __name__ == '__main__':\n secret_one = urandom(16)\n packages = act.split_secret_into_share_packages(\"SecretOne\", secret_one, 10, 10, [])\n act.push_packages_into_share_buffer(\"SecretOne\", packages)\n act.save_state()\n packages = act.get_packages_from_share_buffer(\"SecretOne\")\n secret_rec = act.recover_secret_from_packages(\"SecretOne\", packages)\n\n print(secret_one)\n print(secret_rec)", "id": "6511762", "language": "Python", "matching_score": 1.278036117553711, "max_stars_count": 8, "path": "21-fs-ias-lec/06-SecretSharing/tests/shareBuffer_test.py" }, { "content": "\"\"\"Script to get data for core functions.\"\"\"\n\nfrom BackEnd import core\n\nfrom timeit import default_timer as timer\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nimport pandas as pd\nimport numpy as np\nimport os\n\nsns.set_theme(color_codes=True)\n\n# time it takes creating share packages with variable secret length\n\nnumber_of_bytes = range(1, 50)\nnumber_of_packages = 5\nnumber_threshold = 5\ndata_points = None\n\nfor i in range(0, len(number_of_bytes)):\n mock_secret = os.urandom(number_of_bytes[i])\n start = timer()\n core.split_large_secret_into_share_packages(mock_secret, number_threshold, number_of_packages)\n stop = timer()\n if i == 0:\n data_points = np.array([[number_of_bytes[i], stop - start]])\n else:\n data_points = np.append(data_points, np.array([[number_of_bytes[i], stop - start]]), axis=0)\n\n\nprint(pd.DataFrame(data_points))\n\ndataframe = pd.DataFrame(data=data_points[:, 1:])\n\nsns.lineplot(data=dataframe)\nplt.title('Large Secret: Time per length of secret in bytes.')\nplt.legend('Data')\nplt.ylabel('Time')\nplt.xlabel('Bytes')\nplt.show()\n\n\nx = data_points[0:, 0]\ny = data_points[0:, 1]\nx, y = pd.Series(x), pd.Series(y)\nsns.regplot(x=x, y=y)\nplt.title('Large Secret: Time per length of secret in bytes.')\nplt.legend('Data')\nplt.ylabel('Time')\nplt.xlabel('Bytes')\nplt.show()\n\n\n# time it takes creating share packages with variable package number\n\nnumber_of_bytes = 32\nnumber_threshold = 5\nnumber_of_packages = range(2, 100)\ndata_points = None\n\nfor i in range(0, len(number_of_packages)):\n mock_secret = os.urandom(number_of_bytes)\n start = timer()\n core.split_large_secret_into_share_packages(mock_secret, number_threshold, number_of_packages[i])\n stop = timer()\n if i == 0:\n data_points = np.array([[number_of_packages[i], stop - start]])\n else:\n data_points = np.append(data_points, np.array([[number_of_packages[i], stop - start]]), axis=0)\n\n\nprint(pd.DataFrame(data_points))\n\ndataframe = pd.DataFrame(data=data_points[:, 1:])\n\nsns.lineplot(data=dataframe)\nplt.title('Large Secret: Time per number of packages created.')\nplt.legend('Data')\nplt.ylabel('Time')\nplt.xlabel('Packages')\nplt.show()\n\n\nx = data_points[0:, 0]\ny = data_points[0:, 1]\nx, y = pd.Series(x), pd.Series(y)\nsns.regplot(x=x, y=y)\nplt.title('Large Secret: Time per number of packages created.')\nplt.legend('Data')\nplt.ylabel('Time')\nplt.xlabel('Packages')\nplt.show()\nplt.show()\n\n\n# time it takes creating share packages with variable threshold number\n\nnumber_of_bytes = 32\nnumber_threshold = range(2, 100)\nnumber_of_packages = 5\ndata_points = None\n\nfor i in range(0, len(number_threshold)):\n mock_secret = os.urandom(number_of_bytes)\n start = timer()\n core.split_large_secret_into_share_packages(mock_secret, number_threshold[i], number_of_packages)\n stop = timer()\n if i == 0:\n data_points = np.array([[number_threshold[i], stop - start]])\n else:\n data_points = np.append(data_points, np.array([[number_threshold[i], stop - start]]), axis=0)\n\n\nprint(pd.DataFrame(data_points))\n\ndataframe = pd.DataFrame(data=data_points[:, 1:])\n\nsns.lineplot(data=dataframe)\nplt.title('Large Secret: Time per threshold of shamir algorithm.')\nplt.legend('Data')\nplt.ylabel('Time')\nplt.xlabel('Threshold')\nplt.show()\n\n\nx = data_points[0:, 0]\ny = data_points[0:, 1]\nx, y = pd.Series(x), pd.Series(y)\nsns.regplot(x=x, y=y, color=\"g\")\nplt.title('Large Secret: Time per threshold of shamir algorithm.')\nplt.legend('Data')\nplt.ylabel('Time')\nplt.xlabel('Threshold')\nplt.show()\n\n\n# time it takes to restore packages from minimum threshold\n\nnumber_of_bytes = 32\nnumber_threshold = range(2, 50)\nnumber_of_packages = 100\ndata_points = None\n\n\nfor i in range(0, len(number_threshold)):\n mock_secret = os.urandom(number_of_bytes)\n packages = core.split_large_secret_into_share_packages(mock_secret, number_threshold[i], number_of_packages)\n start = timer()\n core.recover_large_secret(packages)\n stop = timer()\n if i == 0:\n data_points = np.array([[number_threshold[i], stop - start]])\n else:\n data_points = np.append(data_points, np.array([[number_threshold[i], stop - start]]), axis=0)\n\n\nprint(pd.DataFrame(data_points))\n\ndataframe = pd.DataFrame(data=data_points[:, 1:])\n\nsns.lineplot(data=dataframe)\nplt.title('Large Secret: Time per restore with threshold variance.')\nplt.legend('Data')\nplt.ylabel('Time')\nplt.xlabel('Threshold')\nplt.show()\n\n\nx = data_points[0:, 0]\ny = data_points[0:, 1]\nx, y = pd.Series(x), pd.Series(y)\nsns.regplot(x=x, y=y, color=\"g\", order=2)\nplt.title('Large Secret: Time per restore with threshold variance.')\nplt.legend('Data')\nplt.ylabel('Time')\nplt.xlabel('Threshold')\nplt.show()\n\n\n", "id": "8683630", "language": "Python", "matching_score": 0.7063380479812622, "max_stars_count": 8, "path": "21-fs-ias-lec/06-SecretSharing/BackEnd/stats.py" }, { "content": "from tkinter import *\nfrom tkinter import colorchooser\n\nx, y = 0, 0 # coordinates\ncolor = 'black'\nbgColor = 'white'\n\n\ndef draw(event):\n x, y = event.x, event.y\n x1, y1 = (x - 1), (y - 1)\n x2, y2 = (x + 1), (y + 1)\n canvas.create_rectangle(x1, y1, x2, y2, fill=color, outline=color,\n width=getScaleValue())\n\n\ndef eraseLine():\n global color\n color = \"white\"\n\n\ndef getColor():\n global color\n hex = colorchooser.askcolor(title=\"Edit colors\")\n color = hex[1] # hexadecimal\n return color\n\n\ndef showColor(newColor):\n global color\n color = newColor\n\n\ndef getScaleValue():\n brushSize = str(var.get())\n return brushSize\n\n\ndef deleteCanvas(event):\n canvas.delete('all')\n showPalette()\n\n\ndef createCanvas():\n canvas.delete('all')\n showPalette()\n\n\nwindow = Tk()\n\nwindow.title('Sketch')\n\nwindow.rowconfigure(0, weight=1)\nwindow.columnconfigure(0, weight=1)\n\nmenubar = Menu(window)\nwindow.config(menu=menubar)\nsubmenu = Menu(menubar, tearoff=0)\n\nmenubar.add_cascade(label='File', menu=submenu)\nsubmenu.add_command(label='New Canvas', command=createCanvas)\n\ncanvas = Canvas(window, background=bgColor, width=700, height=600)\ncanvas.grid(row=0, column=0, sticky='nsew')\n\nvar = IntVar()\nscale = Scale(window, from_=0, to=50, orient=HORIZONTAL, variable=var)\nscale.place(x=10, y=320)\nscale.set(10)\n\npaletteButton = Button(window, text=\"Edit colors\", command=getColor)\npaletteButton.place(x=10, y=380)\n\nsendButton = Button(window, text=\"Send image\")\nsendButton.place(x=620, y=10)\n\ncanvas.bind('<B1-Motion>', draw)\ncanvas.bind('<B3-Motion>', deleteCanvas)\n\nphotoEraser = PhotoImage(file=r\"eraser.png\")\neraserImage = photoEraser.subsample(7, 7)\neraser = Button(window, image=eraserImage, command=eraseLine)\neraser.place(x=10, y=420)\n\nphotoBucket = PhotoImage(file=r\"bucket.png\")\nbucketImage = photoBucket.subsample(30, 30)\nfill = Button(window, image=bucketImage,\n command=lambda: canvas.configure(bg=color))\nfill.place(x=10, y=470)\n\n\ndef showPalette():\n blackRectangle = canvas.create_rectangle((10, 10, 30, 30), fill='black')\n canvas.tag_bind(blackRectangle, '<Button-1>', lambda x: showColor('black'))\n\n grayRectangle = canvas.create_rectangle((10, 40, 30, 60), fill='gray')\n canvas.tag_bind(grayRectangle, '<Button-1>', lambda x: showColor('gray'))\n\n brownRectangle = canvas.create_rectangle((10, 70, 30, 90), fill='brown4')\n canvas.tag_bind(brownRectangle, '<Button-1>', lambda x: showColor('brown4'))\n\n redRectangle = canvas.create_rectangle((10, 100, 30, 120), fill='red')\n canvas.tag_bind(redRectangle, '<Button-1>', lambda x: showColor('red'))\n\n orangeRectangle = canvas.create_recButtangle((10, 130, 30, 150),\n fill='orange')\n canvas.tag_bind(orangeRectangle, '<ton-1>', lambda x: showColor('orange'))\n\n yellowRectangle = canvas.create_rectangle((10, 160, 30, 180), fill='yellow')\n canvas.tag_bind(yellowRectangle, '<Button-1>', lambda x: showColor('yellow'))\n\n greenRectangle = canvas.create_rectangle((10, 190, 30, 210), fill='green')\n canvas.tag_bind(greenRectangle, '<Button-1>', lambda x: showColor('green'))\n\n blueRectangle = canvas.create_rectangle((10, 220, 30, 240), fill='blue')\n canvas.tag_bind(blueRectangle, '<Button-1>', lambda x: showColor('blue'))\n\n purpleRectangle = canvas.create_rectangle((10, 250, 30, 270), fill='purple')\n canvas.tag_bind(purpleRectangle, '<Button-1>', lambda x: showColor('purple'))\n\n whiteRectangle = canvas.create_rectangle((10, 280, 30, 300), fill='white')\n canvas.tag_bind(whiteRectangle, '<Button-1>', lambda x: showColor('white'))\n\n\nshowPalette()\n\nwindow.mainloop()\n", "id": "7592850", "language": "Python", "matching_score": 0.7649463415145874, "max_stars_count": 8, "path": "21-fs-ias-lec/02-ChatApplikationSketch/prototype/sketch.py" }, { "content": "# Simple PCAP reading/writing tool\r\n# Authors: <NAME>, <NAME>, <NAME>\r\n# VERSION: 1.0\r\n\r\nimport time\r\n\r\n\r\nclass PCAP:\r\n\r\n @classmethod\r\n def __save_file(cls, path, bytes):\r\n file = open(path + \".pcap\", \"wb\")\r\n file.write(bytes)\r\n file.close()\r\n\r\n @classmethod\r\n def write_pcap(cls, path_to_file, list_of_events):\r\n file_bytes = 0xa1b23c4d.to_bytes(4, 'big') + \\\r\n (2).to_bytes(2, 'big') + \\\r\n (4).to_bytes(2, 'big') + \\\r\n (0).to_bytes(4, 'big') + \\\r\n (0).to_bytes(4, 'big') + \\\r\n (131071).to_bytes(4, 'big') + \\\r\n (147).to_bytes(4, 'big')\r\n maximum_bytes = 131071\r\n current_payload = 0\r\n list_of_processed_events = []\r\n for event in list_of_events:\r\n event_byte_length = len(event)\r\n while event_byte_length % 4 != 0:\r\n event += (0).to_bytes(1, 'big')\r\n event_byte_length += 1\r\n if event_byte_length > maximum_bytes:\r\n continue\r\n if current_payload + event_byte_length > maximum_bytes:\r\n PCAP.write_pcap(path_to_file + '0', list(set(list_of_events) - set(list_of_processed_events)))\r\n PCAP.__save_file(path_to_file, file_bytes)\r\n return\r\n current_payload += event_byte_length\r\n list_of_processed_events.append(event)\r\n timestamp = time.time_ns()\r\n time_sec = int(timestamp / 1000000000)\r\n time_nano = timestamp - time_sec * 1000000000\r\n event_header = time_sec.to_bytes(4, 'big') + \\\r\n time_nano.to_bytes(4, 'big') + \\\r\n len(event).to_bytes(4, 'big') + \\\r\n len(event).to_bytes(4, 'big')\r\n file_bytes += event_header + event\r\n PCAP.__save_file(path_to_file, file_bytes)\r\n\r\n @classmethod\r\n def read_pcap(cls, path_to_file):\r\n packets_list = []\r\n swap_byte_order = False\r\n file = open(path_to_file, \"rb\")\r\n magic_number = file.read(4)\r\n if magic_number == bytes.fromhex(\"4d3cb2a1\") or magic_number == bytes.fromhex(\"d4c3b2a1\"):\r\n swap_byte_order = True\r\n file.read(20)\r\n timestamp = file.read(8)\r\n while timestamp != b'':\r\n packet_length = bytearray(file.read(4))\r\n if swap_byte_order:\r\n packet_length.reverse()\r\n packet_length = int.from_bytes(bytes(packet_length), 'big')\r\n file.read(4)\r\n next_event = file.read(packet_length)\r\n if swap_byte_order:\r\n tmp_arr = bytearray(next_event)\r\n for i in range(len(tmp_arr), step=4):\r\n tmp_arr[i], tmp_arr[i+1], tmp_arr[i+2], tmp_arr[i+3] = tmp_arr[i+3], tmp_arr[i+2], tmp_arr[i+1], tmp_arr[i]\r\n next_event = bytes(tmp_arr)\r\n packets_list.append(next_event)\r\n timestamp = file.read(8)\r\n file.close()\r\n return packets_list\r\n", "id": "12307437", "language": "Python", "matching_score": 0.5708500742912292, "max_stars_count": 8, "path": "21-fs-ias-lec/16-nicknames-forward/PCAP.py" }, { "content": "def getMaxIndex(line, space, length, leftOver, n):\r\n last_working_index = 0\r\n if space >= length: # if the length of the inut is smaller than the space we can print the whole line, no need to check\r\n return length\r\n for i in range(space):\r\n if leftOver <= space:\r\n last_working_index = leftOver\r\n break\r\n if line[i] == \" \":\r\n last_working_index = i + 1 # remove the space by adding +1 so it's all on the same line\r\n if last_working_index == 0 and leftOver != 0: # if the input is >= 25 chars without a space we have to break it anyways\r\n # print(\"line break forced: break at index 25:\")\r\n return space\r\n else:\r\n # print(\"sucessfully wrapped a line at index\", last_working_index,\":\")\r\n return last_working_index\r\n\r\n\r\ndef textWrap(text, N):\r\n text = list(text) # creates a list of chars out of the given string\r\n wrapped_text = list() # where we store our wrapped text\r\n tmp = list() # temporary variable which holds the string to be placed in the spots of wrappedTextArray\r\n wrapped_text_list = list() # array which holds enough space for a max of 40 lines of text\r\n k = 0 # index of the wrappedTextArray\r\n n = N # total number of spaces we want indent the text by\r\n total = 25 # total number of spaces we are able to fill (left to right)\r\n start = 0 # index where we currently are in the input (Text)\r\n space = total - n # how much space we have to write on (before the line break needs to occur)\r\n length = len(text) # length of the text\r\n left_over = length - start # how many chars we have left to process\r\n while start < length: # while we are not through all the chars\r\n for i in range(n): # we start a new line by indenting it by n spaces \r\n wrapped_text.append(\" \") # indent text by n spaces\r\n tmp.append(\" \")\r\n if space > left_over: # so we don't get index out of bounds for inputs smaller than space\r\n sequence = list(\"\".join(text[start:(\r\n start + left_over)])) # give it the max amount of chars which could fit and make it a list again\r\n else:\r\n sequence = list(\"\".join(text[start:(start + space)]))\r\n maxIndex = getMaxIndex(sequence, space, length, left_over,\r\n n) # find out what the maximum index is (end of the last word we can print on the line without cutting it)\r\n for i in range(maxIndex):\r\n wrapped_text.append(text[start + i]) # add as many words as possible on the rest of the line\r\n tmp.append(text[start + i])\r\n start += maxIndex\r\n left_over = length - start # how many chars we have left to process\r\n\r\n wrapped_text_list.append(\"\".join(tmp))\r\n tmp = list()\r\n k += 1\r\n return wrapped_text_list\r\n\r\n\r\ndef shorten_name(name, space):\r\n output = list(name)\r\n if space < len(name) + 3:\r\n del output[space - 3:] # delete chars of the name\r\n output.append('...')\r\n return ''.join(output)\r\n\r\n\r\ndef check_for_abnormal(name, List, Type):\r\n name = list(name)\r\n too_big = List\r\n adjust = 1.5\r\n adjust_value = -1.2\r\n if Type == 'ts':\r\n adjust_value = 0.3\r\n for i in range(len(name)):\r\n for j in range(len(too_big)):\r\n if name[i] == too_big[j]:\r\n adjust += adjust_value\r\n return int(adjust)\r\n\r\n\r\ndef mergeNameCounter(name, counter):\r\n sign = \"(\" + str(counter) + \")\"\r\n if counter == 0: # we don't need to indicate ne messages in this case\r\n sign = \"\"\r\n return str(name + \" \" + sign)\r\n\r\n'''\r\ndef mergeNameCounter(name, counter, space):\r\n output = \"\"\r\n # too_big = ['W','m','w',]\r\n # too_small = ['I','f','i','j','l','t']\r\n # adjust = check_for_abnormal(name, too_big, 'tb')\r\n # adjust += check_for_abnormal(name, too_small, 'ts')\r\n if counter > 9:\r\n counter = '*'\r\n sign = \"(\" + str(counter) + \")\"\r\n if counter == 0: # we don't need to indicate ne messages in this case\r\n sign = \"\"\r\n\r\n if space >= len(name) + 3: # name needs to be extended (too short)\r\n output = '{:<40}{:>3}'.format(name, sign)\r\n elif space < len(name) + 3: # name needs to be cut (too long)\r\n name_space = space - 3 - 3 # + adjust # space the name is allowed to occupy (plus the space 3 dots will occupy to indicate the name was cut off)\r\n output = list(name)\r\n del output[name_space:] # delete chars of the name\r\n output.append('...')\r\n # output.append(sign)\r\n output = ''.join(output)\r\n output = '{:<40}{:>3}'.format(output, sign)\r\n return output\r\n'''\r\n", "id": "9167886", "language": "Python", "matching_score": 1.4920982122421265, "max_stars_count": 8, "path": "21-fs-ias-lec/16-nicknames-forward/subChat/TextWrapper.py" }, { "content": "#version 15:38\r\nimport random\r\nimport string\r\n#name = 'zzz'\r\nset_off = 23\r\n\r\ndef convert(name):\r\n \r\n for i in range(len(name)):\r\n if name[i].lower() == 'i' or name[i].lower() == 'y' or name[i].lower() == '9':\r\n name = list(name)\r\n name[i] = 'g'\r\n name = ''.join(name)\r\n \r\n indx = 0\r\n c=0\r\n while len(name) < 6:\r\n if c >16:\r\n return '#ffb300' # just in case it goes into an infinate Loop (probability is very, very low)\r\n c +=1\r\n new_letter = chr(65 + (ord(name[indx]) + set_off + (indx*6) )%25) # this keeps the char within the range of A-Z in the asci table and adds variation in case the letter is the same (indx*6)\r\n if new_letter.lower() != 'i' and new_letter.lower() != 'y' and new_letter != '9':\r\n name = name + new_letter #add the letter\r\n indx = (indx+1)%len(name)\r\n \r\n if len(name) > 6:\r\n name = name[:6] #cut name if too long\r\n \r\n name = list(name) # make it a list so we can edit it more easily\r\n \r\n for i in range(len(name)):\r\n Integer = (ord(name[i])+set_off)%16\r\n Hex = Integer.to_bytes(((Integer.bit_length() + 7) // 8),\"big\").hex()\r\n #print(\"....\"+Hex)\r\n Hex = Hex[1:]\r\n name[i] = Hex \r\n \r\n name = ''.join(name)\r\n color = '#' + name\r\n return color\r\n \r\ndef name_to_color(name):\r\n color = convert(name)\r\n r = int(color[1:3], 16)\r\n g = int(color[3:5], 16)\r\n b = int(color[5:7], 16)\r\n if r<128 or g<128 or b<128 and len(name) == 7:\r\n return color\r\n else:\r\n return '#00f7ff' # some ord() chars aren't convertable. When we checked all, we found this to be the case with i, y and 9 which is why we prevent the program from outputting them. Just in case there are any other letters that we forgot to check, we added this clause. Should never get here but the presentation got us worried cuase if the color is not exactly 6 digits long, tkinter crashes.\r\n \r\n", "id": "8053438", "language": "Python", "matching_score": 0.8803055286407471, "max_stars_count": 8, "path": "21-fs-ias-lec/16-nicknames-forward/subChat/Colorize.py" }, { "content": "SQLITE = 'sqlite'\r\n\r\nCBORTABLE = 'cborTable'\r\nEVENTTABLE = 'eventTable'\r\nKOTLINTABLE = 'kotlinTable'\r\nMASTERTABLE = 'masterTable'\r\n", "id": "6313147", "language": "Python", "matching_score": 0.2585586905479431, "max_stars_count": 8, "path": "21-fs-ias-lec/16-nicknames-forward/logStore/funcs/constants.py" }, { "content": "from .connection import Function\r\n\r\n\r\nclass ChatFunction(Function):\r\n \"\"\"Connection to the group chat to insert and output the chat elements\"\"\"\r\n\r\n def __init__(self):\r\n super(ChatFunction, self).__init__()\r\n\r\n def insert_chat_msg(self, cbor):\r\n \"\"\"adds a new chat element as cbor\r\n\r\n Tested and works!\"\"\"\r\n self.insert_event(cbor)\r\n\r\n def get_chat_since(self, timestamp, chat_id):\r\n \"\"\"returns all elements which have a higher timestamp and the correct chat id\r\n\r\n Tested and works!\"\"\"\r\n return self._handler.get_event_since('chat', timestamp, chat_id)\r\n\r\n def get_full_chat(self, chat_id):\r\n \"\"\"returns all chat elements with the correct chat id\r\n\r\n Tested and works!\"\"\"\r\n return self._handler.get_all_chat_msgs('chat', chat_id)\r\n", "id": "11532882", "language": "Python", "matching_score": 2.267303228378296, "max_stars_count": 8, "path": "21-fs-ias-lec/16-nicknames-forward/logStore/appconn/chat_connection.py" }, { "content": "from .connection import Function\r\n\r\n\r\nclass KotlinFunction(Function):\r\n \"\"\"Connection to the group kotlin to insert and output the chat elements\"\"\"\r\n\r\n def __init__(self):\r\n super(KotlinFunction, self).__init__()\r\n\r\n def insert_data(self, cbor):\r\n \"\"\"adds a new chat element as cbor\r\n\r\n Tested and works!\"\"\"\r\n self.insert_event(cbor)\r\n\r\n def get_usernames_and_feed_id(self):\r\n \"\"\"returns all current usernames with the corresponding feed id\r\n\r\n Tested and works!\"\"\"\r\n return self._handler.get_usernames_and_feed_id()\r\n\r\n def get_all_entries_by_feed_id(self, feed_id):\r\n \"\"\"returns all elements with the corresponding feed id, thus all events of a user\r\n\r\n Tested and works!\"\"\"\r\n return self._handler.get_all_entries_by_feed_id(feed_id)\r\n\r\n def get_all_kotlin_events(self):\r\n \"\"\"returns all existing kotlin elements that are in the database\r\n\r\n Tested and works!\"\"\"\r\n return self._handler.get_all_kotlin_events()\r\n\r\n def get_last_kotlin_event(self):\r\n \"\"\"returns only the last added kotlin element\r\n\r\n Tested and works!\"\"\"\r\n return self._handler.get_last_kotlin_event()\r\n", "id": "7310992", "language": "Python", "matching_score": 0.9986268281936646, "max_stars_count": 8, "path": "21-fs-ias-lec/16-nicknames-forward/logStore/appconn/kotlin_connection.py" }, { "content": "# The connection api for transport groups\r\n# Authors: <NAME>, <NAME>, <NAME>\r\n# VERSION: 1.0\r\n\r\nimport hashlib\r\nimport hmac\r\nimport nacl.encoding\r\nimport nacl.signing\r\nimport nacl.exceptions\r\nimport os\r\n\r\nfrom Event import Event\r\nfrom PCAP import PCAP\r\n\r\nfrom logStore.transconn.database_connector import DatabaseConnector\r\nfrom logStore.verific.verify_insertion import Verification\r\n\r\n\r\nSIGN_INFO = {'ed25519': 0, 'hmac_sha256': 1}\r\nHASH_INFO = {'sha256': 0}\r\n\r\n\r\nclass LogMerge:\r\n\r\n def __init__(self):\r\n if not os.path.exists('cborDatabase.sqlite'):\r\n import feedCtrl.uiFunctionsHandler\r\n feedCtrl.uiFunctionsHandler.UiFunctionHandler()\r\n self.DB = DatabaseConnector()\r\n self.EV = Verification()\r\n\r\n def get_database_status(self):\r\n list_of_feed_ids = self.DB.get_all_feed_ids()\r\n dict_of_feed_ids_and_corresponding_sequence_numbers = {}\r\n for feed_id in list_of_feed_ids:\r\n print('feed', feed_id.hex())\r\n print(self.EV.check_outgoing(feed_id))\r\n if self.EV.check_outgoing(feed_id):\r\n dict_of_feed_ids_and_corresponding_sequence_numbers[feed_id] = self.DB.get_current_seq_no(feed_id)\r\n return dict_of_feed_ids_and_corresponding_sequence_numbers\r\n\r\n def export_logs(self, path_to_pcap_folder, dict_feed_id_current_seq_no, maximum_events_per_feed_id=-1):\r\n list_of_master_feed_ids = self.DB.get_all_master_ids()\r\n list_of_master_feed_ids.append(self.DB.get_master_feed_id())\r\n for master_feed_id in list_of_master_feed_ids:\r\n #print(master_feed_id)\r\n if master_feed_id not in dict_feed_id_current_seq_no and self.EV.check_outgoing(master_feed_id):\r\n event_list = []\r\n current_seq_no = 0\r\n next_event = self.DB.get_event(master_feed_id, current_seq_no)\r\n while next_event is not None \\\r\n and (maximum_events_per_feed_id == -1 or len(event_list) < maximum_events_per_feed_id):\r\n event_list.append(next_event)\r\n current_seq_no += 1\r\n next_event = self.DB.get_event(master_feed_id, current_seq_no)\r\n PCAP.write_pcap(os.path.join(path_to_pcap_folder, master_feed_id.hex() + \"_v\"), event_list)\r\n for feed_id, current_seq_no in dict_feed_id_current_seq_no.items():\r\n if not self.EV.check_outgoing(feed_id):\r\n continue\r\n event_list = []\r\n current_seq_no += 1\r\n next_event = self.DB.get_event(feed_id, current_seq_no)\r\n while next_event is not None \\\r\n and (maximum_events_per_feed_id == -1 or len(event_list) < maximum_events_per_feed_id):\r\n event_list.append(next_event)\r\n current_seq_no += 1\r\n next_event = self.DB.get_event(feed_id, current_seq_no)\r\n PCAP.write_pcap(os.path.join(path_to_pcap_folder, feed_id.hex() + \"_v\"), event_list)\r\n\r\n def import_logs(self, path_of_pcap_files_folder):\r\n list_of_cbor_events = []\r\n list_of_events = []\r\n list_of_feed_ids = []\r\n paths_of_pcap_files = []\r\n for d, r, f in os.walk(path_of_pcap_files_folder):\r\n for file in f:\r\n if file.lower().endswith('.pcap'):\r\n paths_of_pcap_files.append(os.path.join(d, file))\r\n for path in paths_of_pcap_files:\r\n list_of_cbor_events.extend(PCAP.read_pcap(path))\r\n for event in list_of_cbor_events:\r\n list_of_events.append(Event.from_cbor(event))\r\n for event in list_of_events:\r\n if event.meta.feed_id not in list_of_feed_ids:\r\n list_of_feed_ids.append(event.meta.feed_id)\r\n for feed_id in list_of_feed_ids:\r\n most_recent_seq_no = self.__get_most_recent_seq_no(feed_id, list_of_events)\r\n db_seq_no = self.DB.get_current_seq_no(feed_id)\r\n events_for_feed_id = [e for e in list_of_events if e.meta.feed_id == feed_id]\r\n if db_seq_no is None:\r\n self.__verify_and_add_logs(0, feed_id, events_for_feed_id)\r\n elif most_recent_seq_no <= db_seq_no:\r\n continue\r\n else:\r\n self.__verify_and_add_logs(db_seq_no + 1, feed_id, events_for_feed_id)\r\n\r\n def __get_most_recent_seq_no(self, feed_id, list_of_events):\r\n most_rec_seq_no = -1\r\n for event in list_of_events:\r\n if event.meta.feed_id == feed_id and most_rec_seq_no < event.meta.seq_no:\r\n most_rec_seq_no = event.meta.seq_no\r\n return most_rec_seq_no\r\n\r\n def __verify_and_add_logs(self, start_seq_no, feed_id, list_of_events):\r\n list_of_new_events = []\r\n for event in list_of_events:\r\n if event.meta.seq_no >= start_seq_no:\r\n list_of_new_events.append(event)\r\n if start_seq_no == 0:\r\n prev_event = None\r\n else:\r\n prev_event = Event.from_cbor(self.DB.get_current_event(feed_id))\r\n while list_of_new_events:\r\n event_with_lowest_seq_no = self.__get_event_with_lowest_seq_no_from_list(list_of_new_events)\r\n if self.__verify_event(event_with_lowest_seq_no, prev_event):\r\n self.DB.add_event(event_with_lowest_seq_no.get_as_cbor())\r\n # self.DB.add_event(feed_id, event_with_lowest_seq_no.meta.seq_no, event_with_lowest_seq_no.get_as_cbor())\r\n else:\r\n return\r\n prev_event = event_with_lowest_seq_no\r\n list_of_new_events.remove(prev_event)\r\n\r\n def __get_event_with_lowest_seq_no_from_list(self, list_of_events):\r\n if not list_of_events:\r\n return None\r\n lowest_seq_no = list_of_events[0].meta.seq_no\r\n for event in list_of_events:\r\n if event.meta.seq_no < lowest_seq_no:\r\n lowest_seq_no = event.meta.seq_no\r\n for event in list_of_events:\r\n if event.meta.seq_no == lowest_seq_no:\r\n return event\r\n return None\r\n\r\n def __verify_event(self, event, previous_event=None):\r\n if not self.EV.check_incoming(event.meta.feed_id, event.content.content[0].split('/')[0]):\r\n return False\r\n if previous_event is not None:\r\n previous_hash_type, hash_of_previous = event.meta.hash_of_prev\r\n prev_meta_as_cbor = previous_event.meta.get_as_cbor()\r\n if previous_event.meta.feed_id != event.meta.feed_id:\r\n return False\r\n if event.meta.seq_no - 1 != previous_event.meta.seq_no:\r\n return False\r\n if not(previous_hash_type == 0 and hashlib.sha256(prev_meta_as_cbor).digest() == hash_of_previous):\r\n return False\r\n\r\n content_hash_type, hash_of_content = event.meta.hash_of_content\r\n signature_identifier = event.meta.signature_info\r\n signature = event.signature\r\n\r\n content = event.content.get_as_cbor()\r\n meta_as_cbor = event.meta.get_as_cbor()\r\n\r\n if not(content_hash_type == 0 and hashlib.sha256(content).digest() == hash_of_content):\r\n return False\r\n\r\n if signature_identifier == 0:\r\n verification_key = nacl.signing.VerifyKey(event.meta.feed_id)\r\n try:\r\n verification_key.verify(meta_as_cbor, signature)\r\n except nacl.exceptions.BadSignatureError:\r\n return False\r\n # This code is ready to be used, but nobody is using Hmac right now.\r\n # elif signature_identifier == 1:\r\n # secret_key = self.DB.get_secret_hmac_key(event.meta.feed_id)\r\n # if secret_key is None:\r\n # return False\r\n # generated_signature = hmac.new(secret_key, meta_as_cbor, hashlib.sha256).digest()\r\n # if signature != generated_signature:\r\n # return False\r\n else:\r\n return False\r\n\r\n return True\r\n\r\n\r\nif __name__ == '__main__':\r\n logMerge = LogMerge()\r\n from EventCreationTool import EventFactory\r\n dc = DatabaseConnector()\r\n ef = EventFactory()\r\n first_event = ef.first_event('chat', dc.get_master_feed_id())\r\n second_event = ef.next_event('chat/okletsgo', {'messagekey': 759432, 'timestampkey': 2345, 'chat_id': 745})\r\n PCAP.write_pcap('nameofpcapfile', [first_event, second_event])\r\n logMerge.import_logs(os.getcwd())\r\n logMerge.export_logs(os.getcwd(), {ef.get_feed_id(): -1}, 10)\r\n events = PCAP.read_pcap('nameofpcapfile.pcap')\r\n for event in events:\r\n event = Event.from_cbor(event)\r\n print(event.content.content[1]['master_feed'].hex())\r\n break\r\n", "id": "2378248", "language": "Python", "matching_score": 4.050223350524902, "max_stars_count": 8, "path": "21-fs-ias-lec/16-nicknames-forward/LogMerge.py" }, { "content": "import secrets\r\nimport nacl.signing\r\nimport hashlib\r\nimport nacl.encoding\r\nfrom BACnetstuff import Event\r\nfrom BACnetstuff import Content\r\nfrom BACnetstuff import Meta\r\nfrom BACnetstuff import pcap\r\nSIGN_INFO = {'ed25519': 0, 'hmac_sha256': 1}\r\nHASH_INFO = {'sha256': 0}\r\n\r\n# Only save private key, get signing_key again and then get the public key\r\nprivate_key = secrets.token_bytes(32)\r\nsigning_key = nacl.signing.SigningKey(private_key)\r\npublic_key = signing_key.verify_key.encode()\r\n# public key == feedID\r\n\r\n# Create any Event\r\ncontent = Content('chat/post', 'hello matilda')\r\n\r\n# Get Hash Values of content and of previous, first message = previous_hack is None\r\nhash_of_content = hashlib.sha256(content.get_as_cbor()).digest()\r\nhash_of_prev = None\r\n\r\n# Build header, 0 = sequence number\r\nmeta = Meta(public_key, 0, hash_of_prev, SIGN_INFO['ed25519'], [HASH_INFO['sha256'], hash_of_content])\r\n\r\n# Sign the header\r\nsignature = signing_key.sign(meta.get_as_cbor()).signature\r\n\r\n# Combine header, signature and content to one cbor encoded Event and create a list\r\nevent = Event(meta, signature, content).get_as_cbor()\r\nlist1 = [event]\r\n\r\n# more events and appending them to the list\r\ncontent = Content('chat/post', 'hello Emma, how are you?')\r\n\r\nhash_of_content = hashlib.sha256(content.get_as_cbor()).digest()\r\nhash_of_prev = hashlib.sha256(meta.get_as_cbor()).digest()\r\n\r\nmeta = Meta(public_key, 1, hash_of_prev, SIGN_INFO['ed25519'], [HASH_INFO['sha256'], hash_of_content])\r\n\r\nsignature = signing_key.sign(meta.get_as_cbor()).signature\r\n\r\nevent = Event(meta, signature, content)\r\nevent = event.get_as_cbor()\r\nlist1.append(event)\r\n\r\n\r\ncontent = Content('chat/post', 'Im good, thanks! How are you and the kids?')\r\n\r\nhash_of_content = hashlib.sha256(content.get_as_cbor()).digest()\r\nhash_of_prev = hashlib.sha256(meta.get_as_cbor()).digest()\r\n\r\nmeta = Meta(public_key, 2, hash_of_prev, SIGN_INFO['ed25519'], [HASH_INFO['sha256'], hash_of_content])\r\n\r\nsignature = signing_key.sign(meta.get_as_cbor()).signature\r\n\r\nevent = Event(meta, signature, content).get_as_cbor()\r\nlist1.append(event)\r\n\r\n###NEW FEED\r\n# Only save private key, get signing_key again and then get the public key\r\nprivate_key = secrets.token_bytes(32)\r\nsigning_key = nacl.signing.SigningKey(private_key)\r\npublic_key = signing_key.verify_key.encode()\r\n# public key == feedID\r\n\r\n# Create any Event\r\ncontent = Content('chat/post', 'hello matilda2')\r\n\r\n# Get Hash Values of content and of previous, first message = previous_hack is None\r\nhash_of_content = hashlib.sha256(content.get_as_cbor()).digest()\r\nhash_of_prev = None\r\n\r\n# Build header, 0 = sequence number\r\nmeta = Meta(public_key, 0, hash_of_prev, SIGN_INFO['ed25519'], [HASH_INFO['sha256'], hash_of_content])\r\n\r\n# Sign the header\r\nsignature = signing_key.sign(meta.get_as_cbor()).signature\r\n\r\n# Combine header, signature and content to one cbor encoded Event and create a list\r\nevent = Event(meta, signature, content).get_as_cbor()\r\nlist1.append(event)\r\n\r\n# more events and appending them to the list\r\ncontent = Content('chat/post', 'hello Emma, how are you?2')\r\n\r\nhash_of_content = hashlib.sha256(content.get_as_cbor()).digest()\r\nhash_of_prev = hashlib.sha256(meta.get_as_cbor()).digest()\r\n\r\nmeta = Meta(public_key, 1, hash_of_prev, SIGN_INFO['ed25519'], [HASH_INFO['sha256'], hash_of_content])\r\n\r\nsignature = signing_key.sign(meta.get_as_cbor()).signature\r\n\r\nevent = Event(meta, signature, content)\r\nevent = event.get_as_cbor()\r\nlist1.append(event)\r\n\r\n\r\ncontent = Content('chat/post', 'Im good, thanks! How are you and the kids?2')\r\n\r\nhash_of_content = hashlib.sha256(content.get_as_cbor()).digest()\r\nhash_of_prev = hashlib.sha256(meta.get_as_cbor()).digest()\r\n\r\nmeta = Meta(public_key, 2, hash_of_prev, SIGN_INFO['ed25519'], [HASH_INFO['sha256'], hash_of_content])\r\n\r\nsignature = signing_key.sign(meta.get_as_cbor()).signature\r\n\r\nevent = Event(meta, signature, content).get_as_cbor()\r\n\r\n\r\nif __name__ == \"__main__\":\r\n pcap.write_pcap('lessfulldatabase', list1)\r\n", "id": "7236889", "language": "Python", "matching_score": 0.9791176915168762, "max_stars_count": 8, "path": "21-fs-ias-lec/16-nicknames-forward/createlessfulldb.py" }, { "content": "import os\r\nimport LogMerge\r\n\r\n#### TODO: SHOULD TAKE IT AS A PARAMETER INSTEAD TO AVOID READING THE SAME FILE OVER AND OVER\r\n#### TODO: MAIN METHOD SHOULD CALL getUsersDictionary AND THEN CREATE A USER OBJECT\r\n\r\n# our usersdictionary is a dictionary consisting of usernames as keys and dictionaries as values\r\n# the values are based on the dictionaries returned by logMerge when asked for the current status of feeds\r\n\r\n# this function reads the users.txt file to extract the userdictionary so that we can work with it\r\n# returns the read userdictionary\r\ndef getUsersDictionary(path):\r\n dict = {}\r\n if not os.path.exists(path + '/users.txt'):\r\n open(path + '/users.txt', 'w')\r\n file = open(path + '/users.txt', 'r')\r\n users = file.read().split('+')\r\n try:\r\n for user in users:\r\n feedids = user.split(\";\")\r\n dictoffeeds = {}\r\n for feedid in feedids[1].split(\",\"):\r\n fid_seqNo = feedid.split(\":\")\r\n fid = bytes.fromhex(fid_seqNo[0])\r\n dictoffeeds[fid] = int(fid_seqNo[1])\r\n dict[feedids[0]] = dictoffeeds\r\n except:\r\n file.close()\r\n return {}\r\n file.close()\r\n return dict\r\n\r\n# this function writes the userdictionary to the user.txt file\r\n# naive implementation always deleting all users before dumping the dictionary again\r\n# no return\r\ndef writeUsersDictionary(dict, path):\r\n removeAllUsers(path)\r\n if not os.path.exists(path + '/users.txt'):\r\n open(path + '/users.txt', 'w')\r\n file = open(path + '/users.txt', 'a')\r\n first = True\r\n try:\r\n for name, feed in dict.items():\r\n user = \"\" + name + \";\"\r\n firstfeed = True\r\n for feedID, seqno in feed.items():\r\n if first:\r\n if firstfeed:\r\n feedID = feedID.hex()\r\n user = user+feedID+\":\"+str(seqno)\r\n firstfeed=False\r\n else:\r\n feedID = feedID.hex()\r\n user = user+\",\"+feedID+\":\"+str(seqno)\r\n else:\r\n if firstfeed:\r\n feedID = feedID.hex()\r\n user = user + feedID + \":\" + str(seqno)\r\n firstfeed = False\r\n else:\r\n feedID = feedID.hex()\r\n user = user + \",\" + feedID + \":\" + str(seqno)\r\n if not first:\r\n user=\"+\" + user\r\n first = False\r\n file.write(user)\r\n except KeyError:\r\n print(\"keyerror?\")\r\n\r\n# empties the user.txt file\r\n# no return\r\ndef removeAllUsers(path):\r\n os.remove(path+'/users.txt')\r\n file = open(path+'/users.txt', 'w+')\r\n file.close()\r\n\r\ndef removeAllPCAP(path):\r\n for file in os.listdir(path):\r\n try:\r\n if file.endswith('.pcap'):\r\n os.remove(file)\r\n except OSError as e:\r\n pass\r\n\r\n# removes one specified user identified by their username from the user.txt file\r\n# takes username, no return\r\n# TODO: save the dictionary once on starting the program\r\ndef removeOneUser(username):\r\n dictionary = getUsersDictionary()\r\n if username in dictionary:\r\n print(\"Deleted \", username)\r\n del dictionary[username]\r\n else:\r\n print(username, \" not found.\")\r\n writeUsersDictionary(dictionary)\r\n\r\n# this function returns a dictionary containing information about what events are stored on the device. key is feed id, value is tuple marking from which to which seq_no is stored\r\n# TODO: implement and call where needed (should be only when exporting)\r\ndef getStickStatus():\r\n pass\r\n\r\n# class to represent the user that is currently using the software\r\nclass User:\r\n # username is given from the ui\r\n # usersdictionary is saved between running the program and called via getUsersDictionary\r\n # currentuserdictionary contains feed_id's as key and latest seq_no's as corresponding values\r\n def __init__(self, name, path):\r\n self.log = LogMerge.LogMerge()\r\n self.username = name\r\n self.pcapDumpPath = path\r\n self.usersDictionary = getUsersDictionary(path)\r\n self.readDict()\r\n\r\n\r\n def readDict(self):\r\n self.currentUserDictionary = {}\r\n for user, dict in self.usersDictionary.items():\r\n for feed_id, seq_no in dict.items():\r\n self.currentUserDictionary[feed_id] = -1\r\n self.updateUsersDictionary()\r\n\r\n # this calls the function provided by group 4\r\n # returns a dictionary of feed_id: seq_no for the current user\r\n def updateUsersDictionary(self):\r\n currentUserStatus = self.log.get_database_status()\r\n for feed_id, seq_no in currentUserStatus.items():\r\n self.currentUserDictionary[feed_id] = seq_no\r\n self.usersDictionary[self.username] = self.currentUserDictionary\r\n for user, dict in self.usersDictionary.items():\r\n if user != self.username:\r\n for feed_id, seq_no in currentUserStatus.items():\r\n if feed_id not in dict:\r\n dict[feed_id] = -1\r\n writeUsersDictionary(self.usersDictionary, self.pcapDumpPath)\r\n\r\n def getSequenceNumbers(self):\r\n dict = self.usersDictionary\r\n dict_ = {}\r\n for user in dict:\r\n feeds = dict[user]\r\n for feed in feeds:\r\n try:\r\n if feed in dict_:\r\n if dict_[feed] > feeds[feed]:\r\n dict_[feed] = feeds[feed]\r\n else:\r\n dict_[feed] = feeds[feed]\r\n except KeyError:\r\n dict_[feed] = 0\r\n return dict_\r\n\r\n # This method imports events from the folder on the drive that holds the pcap files created by the export function.\r\n # returns nothing\r\n def importing(self):\r\n self.log.import_logs(self.pcapDumpPath)\r\n self.updateUsersDictionary()\r\n\r\n # this method calls the export_logs() function provided by group 4.\r\n # takes an int specifying the maximum number of events dumped per feed\r\n # returns nothing\r\n def exporting(self, maxEvents=30):\r\n self.importing()\r\n removeAllPCAP(self.pcapDumpPath)\r\n self.log.export_logs(self.pcapDumpPath, self.getSequenceNumbers(), maxEvents)\r\n\r\n\r\n # TODO: implement as follows:\r\n # read every feed and save its sequence number in a dictionary of {feedID:seqNo}\r\n # then compare it to our sequence numbers getSequenceNumbers() which is also {feedID:seqNo}\r\n # delete any event that has a lower seqNo than our getSequenceNumbers() returns\r\n # returns nothing\r\n def update_dict(self, dictionary):\r\n pass\r\n", "id": "1611470", "language": "Python", "matching_score": 1.6671842336654663, "max_stars_count": 8, "path": "21-fs-ias-lec/16-nicknames-forward/sneakernet_functions.py" }, { "content": "import platform # used to determine operating system\nimport sys\nimport threading\n\nimport serial.tools.list_ports # used to list open ports\n\nfrom datetime import datetime\n\nfrom eventCreationTool.EventCreationTool import EventCreationTool, EventFactory\nfrom logMerge.Event import Event\nfrom logMerge.LogMerge import LogMerge\nfrom logMerge.PCAP import PCAP\n\nsys.path.append(\".BACnet/demo/lib\")\n\n# locationVerificationTool\n# used to store logins from the gps-gadget to Feeds\n#\n# to use this script, make sure you connected the arduino\n# after every successful login (key-card on key-card-reader) date, time, uid, long, lat is stored in a pcap file\n# after every successful 5th login, all stored events in the pcap file gets outprinted\n#\n# to share the information with other BACnet users: use feed_control.py and guiSneakernet.py from HS20-project\n\n# Lists all ports currently open.\nports = list(serial.tools.list_ports.comports())\nport = None\nport_list = []\narduino_found = False\n# macOS has its ports stored in the /dev/ directors\nif \"macOS\" in platform.platform():\n for p in ports:\n if \"usbmodem\" in p.name:\n port = '/dev/' + str(p.name)\n break\n\n# Windows uses COM ports.\nelif \"Windows\" in platform.platform():\n for p in ports:\n if \"Arduino\" in p.description or \"USB Serial Device\" in p.description:\n port = str(p.device)\n port_list.append(port)\n arduino_found = True\n\n # no Arduino found\n if not arduino_found:\n print(\"No Arduino found\")\n print(\"no port chosen\")\n # multiple Arduino found\n elif len(port_list) > 1:\n print(\"multiple Arduino's found. Please input wished ports:\")\n for p in port_list:\n print(\"Ports: \", p)\n port = input(\"Enter port: \")\n print(\"chosen port: \", port)\n # only one Arduino found\n else:\n print(\"chosen port: \", port)\n# linux users have to enter their port manually\nelif \"Linux\" in platform.platform():\n port = input(\"please enter port:\")\n print(\"chosen port: \", port)\n\n# Initialize serial connection with chosen port\nser = serial.Serial(port, 9600, timeout=1)\n\n# initialize uid, lat, long\nuid, latitude, longitude = None, None, None\n\n# creating first feed with EventFactory from logMerge-Project (HS2020)\nlm = LogMerge()\necf = EventFactory()\nmaster_feed_id = EventFactory.get_feed_id(ecf)\n# this is our first Event.\nfirst_event = ecf.first_event(\"verificationTool\", master_feed_id)\n\n\n#\n# Reading information from Arduino and store uid, long, lat into global variables.\n# if location or uid is changed, change global variables\n#\nclass SerialReadingThread(threading.Thread):\n\n # standard thread __init__\n def __init__(self, iD, name):\n threading.Thread.__init__(self)\n self.iD = iD\n self.name = name\n\n def run(self):\n # access global variables for further processing\n global uid, latitude, longitude\n while True:\n # for each serial line, check for type of information\n data_raw = ser.readline().decode().strip()\n if data_raw.startswith(\"x1uid\"):\n uid = data_raw[6:]\n elif data_raw.startswith(\"x2lon\"):\n longitude = data_raw[5:]\n elif data_raw.startswith(\"x3lat\"):\n latitude = data_raw[5:]\n else:\n pass\n\n\nt1 = SerialReadingThread(1, \"t1\")\nt1.start()\n# value for counting logins\nlogin_counter = 0\n# list of our events. gets appended every login\nevent_list = [first_event]\n\n\n#\n# prints all feeds stored in a PCAP file (named verificationTool.pcap)\n# make sure this PCAP file exists. Function is called, after every 5th successful login\n# pcap files exists from first successful login\n#\ndef print_events():\n events_with_master = PCAP.read_pcap('verificationTool.pcap')\n events_without_master = events_with_master.copy()\n events_without_master.pop(0)\n for event in events_without_master:\n event = Event.from_cbor(event)\n print(\"events: \", event.content.content[1])\n\n\n#\n# actually script: reacts if key-card is found (login). store events in a event list and write them to a pcap file\n# printing every 5th login all events in the pcap file\n# runs until script is stopped manually or arduino is disconnected\n#\nprint(\"waiting for Login...\")\nwhile True:\n # Login successful (key-card found)\n if uid is not None:\n # increase login counter\n login_counter = login_counter + 1\n print(\"successful Login: writing feed\")\n # generate Message (Time, UID, LONG, LAT)\n msg_to_store = \"Date / Time: \" + datetime.now().strftime(\"%d/%m/%Y %H:%M:%S\") + \", UID: \" + str(uid) + \\\n \", LONG: \" + str(longitude) + \", LAT: \" + str(latitude)\n # this is our new event (feed)\n new_event = ecf.next_event(\"verificationTool/storeFeed\", msg_to_store)\n # append the event to our event list\n event_list.append(new_event)\n PCAP.write_pcap('verificationTool', event_list)\n # reset uid to None --> script waits until next login\n uid = None\n # print every 5th successful login all stored feeds\n if login_counter == 5:\n login_counter = 0\n print_events()\n print(\"waiting for Login...\")\n", "id": "4319264", "language": "Python", "matching_score": 2.8584344387054443, "max_stars_count": 8, "path": "21-fs-ias-lec/9-BACnet-Location-Verification/ArduinoGPS.py" }, { "content": "from logStore.appconn import feed_ctrl_connection\nimport Event, EventCreationTool\n\nimport json\nimport os\nfrom typing import List, Tuple\n\n\nclass Singleton:\n def __init__(self, cls):\n self._cls = cls\n\n def Instance(self):\n try:\n return self._instance\n except AttributeError:\n self._instance = self._cls()\n return self._instance\n\n def __call__(self):\n raise TypeError('Singletons must be accessed through `Instance()`.')\n\n def __instancecheck__(self, inst):\n return isinstance(inst, self._cls)\n\n\n\n@Singleton\nclass RequestHandler:\n \"\"\" This class acts as an Instance for the database connection of logStore and the EventCreationTool of logMerge\"\"\"\n def __init__(self):\n self.num = 0\n self.logged_in = False\n self.event_factory = None\n self.db_connection = feed_ctrl_connection.FeedCtrlConnection()\n print(f\"Host Master Feed Id: {self.db_connection.get_host_master_id()}\")\n self.username = \"\"\n self.load_user()\n\n def load_user(self):\n \"\"\"Mainly for the Secret sharing application\"\"\"\n script_dir = os.path.dirname(__file__)\n rel_path = \"data/keys/user.json\"\n abs_path = os.path.join(script_dir, rel_path)\n\n if os.path.isfile(abs_path):\n with open(abs_path, \"r\") as fd:\n user_dict = json.loads(fd.read())\n feed_id = bytes.fromhex(user_dict[\"feed_id\"])\n self.username = user_dict[\"username\"]\n self.event_factory = EventCreationTool.EventFactory(last_event=self.db_connection.get_current_event(feed_id),\n path_to_keys=os.path.join(script_dir, \"data/keys/\"),\n path_to_keys_relative=False)\n\n def create_user(self, username):\n \"\"\"Mainly for the Secret sharing application\"\"\"\n print(\"creating new user\")\n script_dir = os.path.dirname(__file__)\n rel_path = \"data/keys/user.json\"\n abs_path = os.path.join(script_dir, rel_path)\n self.event_factory = EventCreationTool.EventFactory()\n feed_id = self.event_factory.get_feed_id()\n first_event = self.event_factory.first_event(\"chat\", self.db_connection.get_host_master_id())\n print(first_event)\n if self.db_connection.insert_event(first_event) == -1:\n print(\"Inserting first event failed\")\n print(f\"feed_id:{feed_id} with username: {username} created\")\n os.replace(script_dir +\"/\"+ feed_id.hex() + \".key\", os.path.join(script_dir,\"data/keys/\" + feed_id.hex() + \".key\"))\n self.event_factory.set_path_to_keys(os.path.join(script_dir, \"data/keys/\"))\n with open(abs_path, \"w\") as fd:\n user_dict = {\n \"username\": username,\n \"feed_id\": feed_id.hex()\n }\n fd.write(json.dumps(user_dict, indent=4))\n\n def get_feed_ids(self):\n \"\"\"This Function gets all the feed id's in the database, NO MASTER FEED IDS\"\"\"\n feed_ids = self.db_connection.get_all_feed_ids()\n master_ids = self.db_connection.get_all_master_ids()\n own_ids = self.event_factory.get_own_feed_ids()\n master_ids.append(self.db_connection.get_host_master_id())\n\n # remove master feed ids\n feed_ids = [feed_id for feed_id in feed_ids if feed_id not in master_ids]\n # remove own feed ids\n feed_ids = [feed_id for feed_id in feed_ids if feed_id not in own_ids]\n\n return feed_ids\n\n def insert_new_events(self, events: List[dict]):\n \"\"\"creates new Events with feeds EventFactory and inserts it into the database\"\"\"\n for event in events:\n next_event = self.event_factory.next_event(\"chat/secret\", event)\n self.db_connection.insert_event(next_event)\n\n def pull_new_events(self, feed_seq_tuples: List[Tuple[bytes, int]]):\n \"\"\"pulls Events for specified feed_id starting at the specified seq_no\"\"\"\n event_list = []\n for tuples in feed_seq_tuples:\n feed_id, old_seq_no = tuples\n current_seq_no = self.db_connection.get_current_seq_no(feed_id)\n for seq_no in range(old_seq_no, current_seq_no + 1):\n event = self.db_connection.get_event(feed_id, seq_no)\n event_list.append((Event.Event.from_cbor(event).content.content[1], feed_id))\n return event_list", "id": "8814073", "language": "Python", "matching_score": 3.0566370487213135, "max_stars_count": 8, "path": "21-fs-ias-lec/06-SecretSharing/database_connector.py" }, { "content": "from .connection import Function\r\n\r\n\r\nclass FeedCtrlConnection(Function):\r\n \"\"\"\"Connectivity for the feed control group to check the master feeds and its corresponding child feeds.\"\"\"\r\n\r\n def __init__(self):\r\n super(FeedCtrlConnection, self).__init__()\r\n\r\n def add_event(self, event):\r\n \"\"\"Add an event to the master database as well as the general cbor database.\r\n\r\n @:parameter event: The new cbor event to be added\r\n @:returns 1 if successful, -1 if any error occurred\r\n \"\"\"\r\n return super().insert_event(event)\r\n\r\n def get_trusted(self, master_id):\r\n \"\"\"Get a list of all trusted feed_ids.\r\n\r\n @:parameter master_id: The master id of the user\r\n @:returns -1 if any error occurred or returns a list containing all the trusted feed ids\r\n \"\"\"\r\n return self._handler.get_trusted(master_id)\r\n\r\n def get_blocked(self, master_id):\r\n \"\"\"Get a list of all blocked feed_ids.\r\n\r\n @:parameter master_id: The master id of the user\r\n @:returns -1 if any error occurred or returns a list containing all the blocked feed ids\r\n \"\"\"\r\n return self._handler.get_blocked(master_id)\r\n\r\n def get_all_master_ids(self):\r\n \"\"\"Get a list of all master feed_ids.\r\n\r\n @:returns -1 if any error occurred or returns a list containing all the master ids in the database.\r\n \"\"\"\r\n return self._handler.get_all_master_ids()\r\n\r\n def get_all_master_ids_feed_ids(self, master_id):\r\n \"\"\"Get a list of all feed_ids of one master feed id.\r\n\r\n @:parameter master_id: The master id of the user\r\n @:returns -1 if any error occurred or returns a list containing all the feed ids subscribed to one master\r\n \"\"\"\r\n return self._handler.get_all_master_ids_feed_ids(master_id)\r\n\r\n def get_username(self, master_id):\r\n \"\"\"Get username to feed id.\r\n\r\n @:parameter master_id: A master id of the user\r\n @:returns -1 if any error occurred or returns the currently set username.\r\n \"\"\"\r\n return self._handler.get_username(master_id)\r\n\r\n def get_my_last_event(self):\r\n \"\"\"Get last event posted by master feed.\r\n\r\n @:returns -1 if any error occurred or returns the last event posted by the master feed.\r\n \"\"\"\r\n return self._handler.get_my_last_event()\r\n\r\n def get_host_master_id(self):\r\n \"\"\"Retrieve the master id of the host.\r\n\r\n @:returns -1 if any error occurred or returns the host master id.\r\n \"\"\"\r\n return self._handler.get_host_master_id()\r\n\r\n def get_radius(self):\r\n \"\"\"Get radius of host.\r\n\r\n @:returns -1 if any error occurred or returns the currently set radius of the host feed.\r\n \"\"\"\r\n return self._handler.get_radius()\r\n\r\n def get_master_id_from_feed(self, feed_id):\r\n \"\"\"Get the master feedid to a feed.\r\n\r\n @:parameter feed_id: A feed id from which one would like to know the master feed id\r\n @:returns -1 if any error occurred or returns a master feed id\r\n \"\"\"\r\n return self._handler.get_master_id_from_feed(feed_id)\r\n\r\n def get_application_name(self, feed_id):\r\n \"\"\"Get name of application.\r\n\r\n @:parameter feed_id: A feed id from which one would like to know the application name\r\n @:returns -1 if any error occurred or returns the application name\r\n \"\"\"\r\n return self._handler.get_application_name(feed_id)\r\n\r\n def get_feed_ids_from_application_in_master_id(self, master_id, application_name):\r\n \"\"\"Get feed all feed ids corresponding to an application.\r\n\r\n @:parameter master_id: A master id\r\n @:parameter application_name: An application name\r\n @:returns -1 if any error occurred or returns a list of the feed ids corresponding to one application name.\r\n \"\"\"\r\n return self._handler.get_feed_ids_from_application_in_master_id(master_id, application_name)\r\n\r\n def get_feed_ids_in_radius(self):\r\n \"\"\"Get feed all feed ids in radius of master feed.\r\n\r\n @:returns -1 if any error occurred or returns a list of the feed ids inside the current radius.\r\n \"\"\"\r\n return self._handler.get_feed_ids_in_radius()\r\n\r\n def set_feed_ids_radius(self, feed_id, radius):\r\n \"\"\"Get feed radius of a feed id.\r\n\r\n @:parameter feed_id: A feed id to which we want to set the radius.\r\n @:parameter radius: The radius we want to change it to.\r\n \"\"\"\r\n return self._handler.set_feed_ids_radius(feed_id, radius)", "id": "2122940", "language": "Python", "matching_score": 1.2797887325286865, "max_stars_count": 8, "path": "21-fs-ias-lec/16-nicknames-forward/logStore/appconn/feed_ctrl_connection.py" }, { "content": "from .sql_alchemy_connector import SqLiteDatabase\r\nfrom ..funcs.log import create_logger\r\nfrom ..funcs.constants import SQLITE\r\nfrom ..funcs.singleton import Singleton\r\nfrom ..funcs.event import Event\r\n\r\nlogger = create_logger('ByteArrayHandler')\r\n\"\"\"The byte array handler allows the database handler to insert a new event into the cbor database.\r\n\r\nIt is strictly meant for internal purposes and should not be directly accesses or called by any module importing this\r\nmodule.\r\n\"\"\"\r\n\r\n\r\nclass ByteArrayHandler(metaclass=Singleton):\r\n \"\"\"Byte Array Handler gets created once by the database handler.\r\n\r\n It has the metaclass singleton to be allowed to be created only once as there should not be more than one handler\r\n created by each callee. The init function initiates the database variable and creates the needed tables in\r\n the database.\r\n \"\"\"\r\n\r\n def __init__(self):\r\n self.__sqlAlchemyConnector = SqLiteDatabase(SQLITE, dbname='cborDatabase.sqlite')\r\n self.__sqlAlchemyConnector.create_cbor_db_tables()\r\n\r\n def insert_byte_array(self, event_as_cbor):\r\n \"\"\"\"Insert a new event into the database. For this we extract the sequence number and feed_id and store the\r\n exact cbor event with those values as keys.\"\"\"\r\n event = Event.from_cbor(event_as_cbor)\r\n seq_no = event.meta.seq_no\r\n feed_id = event.meta.feed_id\r\n self.__sqlAlchemyConnector.insert_byte_array(feed_id, seq_no, event_as_cbor)\r\n\r\n def get_current_seq_no(self, feed_id):\r\n \"\"\"\"Return the current sequence number of a given feed_id, returns an integer with the currently largest\r\n sequence number for the given feed. Returns -1 if there is no such feed_id in the database.\"\"\"\r\n return self.__sqlAlchemyConnector.get_current_seq_no(feed_id)\r\n\r\n def get_event(self, feed_id, seq_no):\r\n \"\"\"\"Return a specific cbor event to the callee with the input feed_id and sequence number. Returns None if\r\n there is no such entry.\"\"\"\r\n return self.__sqlAlchemyConnector.get_event(feed_id, seq_no)\r\n\r\n def get_current_event_as_cbor(self, feed_id):\r\n \"\"\"\"Return the newest (the one with the highest sequence number) cbor event for a feed_id. Returns None if\r\n there is no such feed_id in the database.\"\"\"\r\n return self.__sqlAlchemyConnector.get_current_event_as_cbor(feed_id)\r\n\r\n def get_all_feed_ids(self):\r\n \"\"\"\"Return all current feed ids in the database.\"\"\"\r\n return self.__sqlAlchemyConnector.get_all_feed_ids()\r\n\r\n\r\nclass InvalidSequenceNumber(Exception):\r\n def __init__(self, message):\r\n super(InvalidSequenceNumber, self).__init__(message)\r\n", "id": "4276884", "language": "Python", "matching_score": 3.365859270095825, "max_stars_count": 8, "path": "21-fs-ias-lec/16-nicknames-forward/logStore/database/cbor_handler.py" }, { "content": "import pickle\r\n\r\nfrom ..funcs.singleton import Singleton\r\nfrom .sql_alchemy_connector import SqLiteDatabase\r\nfrom ..funcs.constants import SQLITE\r\nfrom ..funcs.event import Event\r\nfrom ..funcs.log import create_logger\r\n\r\nlogger = create_logger('EventHandler')\r\n\r\nconnPep = {}\r\n\r\n\r\nclass EventHandler(metaclass=Singleton):\r\n\r\n def __init__(self):\r\n self.__sqlAlchemyConnector = SqLiteDatabase(SQLITE, dbname='eventDatabase.sqlite')\r\n self.__sqlAlchemyConnector.create_chat_event_table()\r\n self.__sqlAlchemyConnector.create_kotlin_table()\r\n self.__sqlAlchemyConnector.create_master_table()\r\n\r\n def add_event(self, event_as_cbor):\r\n try:\r\n event = Event.from_cbor(event_as_cbor)\r\n seq_no = event.meta.seq_no\r\n feed_id = event.meta.feed_id\r\n content = event.content.content\r\n\r\n cont_ident = content[0].split('/')\r\n application = cont_ident[0]\r\n application_action = cont_ident[1]\r\n\r\n if application == 'chat':\r\n if application_action == 'MASTER':\r\n return\r\n\r\n elif application_action =='sendName':\r\n newName = content[1]['name']\r\n\r\n username_file = open(\"username.pkl\", 'rb')\r\n username_dict = pickle.load(username_file)\r\n username_file.close()\r\n\r\n if not newName == username_dict['username']:\r\n file = open('connectedPerson.pkl', 'rb')\r\n connPep = pickle.load(file)\r\n file.close()\r\n alreadyExists = False\r\n\r\n for entry in connPep:\r\n if entry == newName:\r\n alreadyExists = True\r\n break\r\n\r\n if not alreadyExists:\r\n connPep[newName] = newName\r\n file = open('connectedPerson.pkl', 'wb')\r\n pickle.dump(connPep, file)\r\n file.close()\r\n\r\n elif application_action == 'MyNameChanged':\r\n\r\n username_file = open(\"username.pkl\", 'rb')\r\n username_dict = pickle.load(username_file)\r\n username_file.close()\r\n\r\n if not content[1]['fromUser'] == username_dict['username']:\r\n\r\n f = open('connectedPerson.pkl', 'rb')\r\n entries = pickle.load(f)\r\n f.close()\r\n\r\n for entry in entries:\r\n # entry[0] = key for specific entry, content[1]['fromUser'] = oldUsername\r\n if entry == content[1]['fromUser']:\r\n # there is no nickname for this person\r\n if entry == entries[entry]:\r\n entries[content[1]['newName']] = content[1]['newName']\r\n entries.pop(entry)\r\n break\r\n else:\r\n # there is a nickname for this person\r\n entries[content[1]['newName']] = entries[content[1]['fromUser']]\r\n entries.pop(entry)\r\n break\r\n\r\n f = open('connectedPerson.pkl', 'wb')\r\n pickle.dump(entries, f)\r\n f.close()\r\n\r\n # update personlist.pkl:\r\n f = open('personList.pkl', 'rb')\r\n person = pickle.load(f)\r\n f.close()\r\n\r\n for i in range(len(person)):\r\n if person[i][0] == content[1]['fromUser']:\r\n person[i][0] = content[1]['newName']\r\n\r\n f = open('personList.pkl', 'wb')\r\n pickle.dump(person, f)\r\n f.close()\r\n\r\n elif application_action == 'nameChanged':\r\n\r\n print(\"nameChanged Event\")\r\n\r\n #Someone gave me a nickname\r\n newName = content[1]['newName']\r\n fromUser = content[1]['fromUser']\r\n # only add the name if fromUser is not you:\r\n # therefor use username.pkl\r\n username_file = open(\"username.pkl\", 'rb')\r\n username_dict = pickle.load(username_file)\r\n username_file.close()\r\n\r\n if not fromUser == username_dict['username']:\r\n\r\n nameAcceptable = True\r\n\r\n file = open(\"unwantedNames.txt\", 'r')\r\n names = file.readlines()\r\n\r\n for name in names:\r\n name = name.replace(\"\\n\", \"\")\r\n\r\n if newName.lower().find(name.lower()) != -1:\r\n nameAcceptable = False\r\n break\r\n file.close()\r\n\r\n # add new name to txt, names slpi wi\r\n my_names_file = open(\"my_names.txt\", 'a')\r\n\r\n if nameAcceptable == True:\r\n my_names_file.write(newName + \", \")\r\n my_names_file.close()\r\n print(\"name is acceptable\")\r\n\r\n file = open('resetName.txt', 'w')\r\n file.write(\"False\")\r\n file.close()\r\n\r\n\r\n else:\r\n\r\n oldName = content[1]['oldFriendsUsername']\r\n file = open('resetName.txt', 'w')\r\n file.write(\"True/\"+newName+\"/\"+oldName+\"/\"+fromUser)\r\n file.close()\r\n\r\n \"\"\"\r\n print(\"createUnwantedEvent Aufruf\")\r\n #createUnwantedEvent(newName, fromUser, content[1]['oldName'])\r\n ecf = EventFactory()\r\n unwantedNameEvent = ecf.next_event('chat/unwantedName',{'name': newName, 'fromUser': fromUser,'oldName': content[1]['oldFriendsUsername'] })\r\n chat_function = ChatFunction()\r\n chat_function.insert_event(unwantedNameEvent)\r\n\r\n \"\"\"\r\n\r\n elif application_action == 'unwantedName':\r\n\r\n print(\"unwantedName Methode wird ausgeführt\")\r\n\r\n username_file = open(\"username.pkl\", 'rb')\r\n username_dict = pickle.load(username_file)\r\n username_file.close()\r\n\r\n print(content[1]['fromUser'])\r\n print(username_dict['username'])\r\n\r\n if (content[1]['fromUser'] == username_dict['username']):\r\n print(\"why??\")\r\n\r\n with open('connectedPerson.pkl', 'rb') as f:\r\n file = pickle.load(f)\r\n f.close()\r\n\r\n key = ''\r\n items = file.items()\r\n for t in items:\r\n if t[1] == content[1]['name']:\r\n key = t[0]\r\n\r\n file[key] = content[1]['oldName']\r\n\r\n f = open('connectedPerson.pkl', 'wb')\r\n pickle.dump(file, f)\r\n f.close()\r\n\r\n\r\n else:\r\n chatMsg = content[1]['messagekey']\r\n chat_id = content[1]['chat_id']\r\n timestamp = content[1]['timestampkey']\r\n\r\n self.__sqlAlchemyConnector.insert_event(feed_id=feed_id, seq_no=seq_no, application=application,\r\n chat_id=chat_id,\r\n timestamp=timestamp, data=chatMsg)\r\n\r\n\r\n elif application == 'KotlinUI':\r\n if application_action == 'post':\r\n username = content[1]['username']\r\n timestamp = content[1]['timestamp']\r\n text = content[1]['text']\r\n self.__sqlAlchemyConnector.insert_kotlin_event(feed_id=feed_id, seq_no=seq_no,\r\n application=application_action,\r\n username=username, oldusername='',\r\n timestamp=timestamp, text=text)\r\n\r\n elif application_action == 'username':\r\n username = content[1]['newUsername']\r\n oldusername = content[1]['oldUsername']\r\n\r\n timestamp = content[1]['timestamp']\r\n self.__sqlAlchemyConnector.insert_kotlin_event(feed_id=feed_id, seq_no=seq_no,\r\n application=application_action,\r\n username=username, oldusername=oldusername,\r\n timestamp=timestamp, text='')\r\n\r\n elif application == 'MASTER':\r\n self.master_handler(seq_no, feed_id, content, cont_ident, event_as_cbor)\r\n\r\n else:\r\n raise InvalidApplicationError('Invalid application called %s' % application)\r\n except KeyError as e:\r\n logger.error(e)\r\n return -1\r\n\r\n def get_event_since(self, application, timestamp, chat_id):\r\n return self.__sqlAlchemyConnector.get_all_events_since(application, timestamp, chat_id)\r\n\r\n def get_all_events(self, application, chat_id):\r\n return self.__sqlAlchemyConnector.get_all_event_with_chat_id(application, chat_id)\r\n\r\n def get_Kotlin_usernames(self):\r\n return self.__sqlAlchemyConnector.get_all_usernames()\r\n\r\n def get_all_kotlin_events(self):\r\n return self.__sqlAlchemyConnector.get_all_kotlin_events()\r\n\r\n def get_all_entries_by_feed_id(self, feed_id):\r\n return self.__sqlAlchemyConnector.get_all_entries_by_feed_id(feed_id)\r\n\r\n def get_last_kotlin_event(self):\r\n return self.__sqlAlchemyConnector.get_last_kotlin_event()\r\n\r\n \"\"\"\"Structure of insert_master_event:\r\n insert_master_event(self, master, feed_id, app_feed_id, trust_feed_id, seq_no, trust, name, radius, event_as_cbor, app_name)\"\"\"\r\n\r\n def master_handler(self, seq_no, feed_id, content, cont_ident, event_as_cbor):\r\n \"\"\"Handle master events and insert the events corresponding to their definition:\"\"\"\r\n event = cont_ident[1]\r\n if event == 'MASTER':\r\n self.__sqlAlchemyConnector.insert_master_event(True, feed_id, None, None, seq_no, None, None, 0,\r\n event_as_cbor, None)\r\n elif event == 'Trust':\r\n self.__sqlAlchemyConnector.insert_master_event(False, feed_id, None, content[1]['feed_id'], seq_no, True,\r\n None, None, event_as_cbor, None)\r\n from feedCtrl.radius import Radius\r\n r = Radius()\r\n r.calculate_radius()\r\n elif event == 'Block':\r\n self.__sqlAlchemyConnector.insert_master_event(False, feed_id, None, content[1]['feed_id'], seq_no, False,\r\n None, None, event_as_cbor, None)\r\n from feedCtrl.radius import Radius\r\n r = Radius()\r\n r.calculate_radius()\r\n elif event == 'Name':\r\n self.__sqlAlchemyConnector.insert_master_event(False, feed_id, None, None, seq_no, None,\r\n content[1]['name'], None, event_as_cbor, None)\r\n elif event == 'NewFeed':\r\n self.__sqlAlchemyConnector.insert_master_event(False, feed_id, content[1]['feed_id'], None, seq_no, True,\r\n None, None, event_as_cbor, content[1]['app_name'])\r\n elif event == 'Radius':\r\n self.__sqlAlchemyConnector.insert_master_event(False, feed_id, None, None, seq_no,\r\n None, None, content[1]['radius'], event_as_cbor, None)\r\n elif event == 'ReportName':\r\n file = open(\"connectedPerson.txt\", \"a\")\r\n file.write(\"\\n\")\r\n print(\"event handler \" + content[1]['trustedName'])\r\n file.write(content[1]['trustedName'])\r\n file.close()\r\n else:\r\n raise InvalidApplicationError('Invalid action called %s' % event)\r\n\r\n \"\"\"\"Following come the feed control mechanisms used by database_handler:\"\"\"\r\n\r\n def get_trusted(self, master_id):\r\n return self.__sqlAlchemyConnector.get_trusted(master_id)\r\n\r\n def get_blocked(self, master_id):\r\n return self.__sqlAlchemyConnector.get_blocked(master_id)\r\n\r\n def get_all_master_ids(self):\r\n return self.__sqlAlchemyConnector.get_all_master_ids()\r\n\r\n def get_all_master_ids_feed_ids(self, master_id):\r\n return self.__sqlAlchemyConnector.get_all_master_ids_feed_ids(master_id)\r\n\r\n def get_username(self, master_id):\r\n return self.__sqlAlchemyConnector.get_username(master_id)\r\n\r\n def get_my_last_event(self):\r\n return self.__sqlAlchemyConnector.get_my_last_event()\r\n\r\n def get_host_master_id(self):\r\n return self.__sqlAlchemyConnector.get_host_master_id()\r\n\r\n def get_radius(self):\r\n return self.__sqlAlchemyConnector.get_radius()\r\n\r\n def get_master_id_from_feed(self, feed_id):\r\n return self.__sqlAlchemyConnector.get_master_id_from_feed(feed_id)\r\n\r\n def get_application_name(self, feed_id):\r\n return self.__sqlAlchemyConnector.get_application_name(feed_id)\r\n\r\n def get_feed_ids_from_application_in_master_id(self, master_id, application_name):\r\n return self.__sqlAlchemyConnector.get_feed_ids_from_application_in_master_id(master_id, application_name)\r\n\r\n def get_feed_ids_in_radius(self):\r\n return self.__sqlAlchemyConnector.get_feed_ids_in_radius()\r\n\r\n def set_feed_ids_radius(self, feed_id, radius):\r\n return self.__sqlAlchemyConnector.set_feed_ids_radius(feed_id, radius)\r\n\r\n\r\nclass InvalidApplicationError(Exception):\r\n def __init__(self, message):\r\n super(InvalidApplicationError, self).__init__(message)\r\n\r\n", "id": "11609919", "language": "Python", "matching_score": 3.7791337966918945, "max_stars_count": 8, "path": "21-fs-ias-lec/16-nicknames-forward/logStore/database/event_handler.py" }, { "content": "import sys\r\nimport pickle\r\n\r\n\r\ndef main():\r\n\r\n \"\"\"\r\n # creat connectedPerson.pkl file\r\n dictionary2 = {}\r\n file = open(\"connectedPerson.pkl\", \"wb\")\r\n pickle.dump(dictionary2, file)\r\n file.close()\r\n\r\n \"\"\"\r\n\r\n if sys.argv[1] == 'Nickname':\r\n changeName(sys.argv[2], sys.argv[3])\r\n\r\n elif sys.argv[1] == 'Username':\r\n changeUsername(sys.argv[2], sys.argv[3])\r\n\r\n else:\r\n newName = sys.argv[1]\r\n\r\n username_file = open(\"username.pkl\", 'rb')\r\n username_dict = pickle.load(username_file)\r\n username_file.close()\r\n\r\n if not newName == username_dict['username']:\r\n file = open('connectedPerson.pkl', 'rb')\r\n connPep = pickle.load(file)\r\n file.close()\r\n\r\n alreadyExists = False\r\n\r\n for entry in connPep:\r\n if entry == newName:\r\n alreadyExists = True\r\n break\r\n\r\n if not alreadyExists:\r\n connPep[newName] = newName\r\n file = open('connectedPerson.pkl', 'wb')\r\n pickle.dump(connPep, file)\r\n file.close()\r\n\r\n print(connPep)\r\n\r\n\r\ndef changeName(oldName, newName):\r\n\r\n with open('connectedPerson.pkl', 'rb') as f:\r\n file = pickle.load(f)\r\n f.close()\r\n\r\n key = ''\r\n items = file.items()\r\n for t in items:\r\n if t[1] == oldName:\r\n key = t[0]\r\n\r\n file[key] = newName\r\n\r\n f = open('connectedPerson.pkl', 'wb')\r\n pickle.dump(file, f)\r\n f.close()\r\n\r\n print(file)\r\n\r\ndef changeUsername(oldName, newName):\r\n\r\n username_file = open(\"username.pkl\", 'rb')\r\n username_dict = pickle.load(username_file)\r\n username_file.close()\r\n\r\n if not oldName == username_dict['username']:\r\n\r\n f = open('connectedPerson.pkl', 'rb')\r\n entries = pickle.load(f)\r\n f.close()\r\n\r\n for entry in entries:\r\n # entry[0] = key for specific entry, content[1]['fromUser'] = oldUsername\r\n if entry == oldName:\r\n print(\"entry found\")\r\n # there is no nickname for this person\r\n if entry == entries[entry]:\r\n print(\"no nickname\")\r\n entries[newName] = newName\r\n entries.pop(entry)\r\n break\r\n else:\r\n print(\"with nickname\")\r\n # there is a nickname for this person\r\n entries[newName] = entries[oldName]\r\n entries.pop(entry)\r\n break\r\n\r\n f = open('connectedPerson.pkl', 'wb')\r\n pickle.dump(entries, f)\r\n f.close()\r\n print(entries)\r\n\r\n\r\nif __name__ == '__main__':\r\n main()\r\n", "id": "132505", "language": "Python", "matching_score": 0.07822510600090027, "max_stars_count": 8, "path": "21-fs-ias-lec/16-nicknames-forward/Test.py" }, { "content": "import os\nimport json\nimport logging\n\nlogger = logging.getLogger()\n\n# global folders\nDATA_DIR = os.path.join(os.path.dirname(__file__), \"..\", \"data\")\nif not os.path.isdir(DATA_DIR):\n os.mkdir(DATA_DIR)\nKEY_DIR: os.path = os.path.join(DATA_DIR, \"keys\")\nif not os.path.isdir(KEY_DIR):\n os.mkdir(KEY_DIR)\n# autogenerate database folder\nRECOVERY_DIR: os.path = os.path.join(DATA_DIR, \"recovery\")\nif not os.path.isdir(RECOVERY_DIR):\n os.mkdir(RECOVERY_DIR)\n\n\nclass State(dict):\n \"\"\"Persistent mini database interface class in the SecretSharing API implemented with json.\"\"\"\n def __init__(self, filename: str, directory: os.path, default: dict):\n logger.debug(\n \"Creating {} in directory {} with default content: \\n{}\"\n .format(filename, directory, json.dumps(default, indent=4))\n )\n super().__init__()\n self.abs_path = os.path.join(directory, filename)\n if not os.path.isfile(self.abs_path):\n self.update(default)\n with open(self.abs_path, \"w\") as fd:\n fd.write(json.dumps(default, indent=4))\n fd.close()\n else:\n self.load()\n\n def load(self) -> None:\n with open(self.abs_path, \"r\") as fd:\n state: dict = json.loads(fd.read())\n fd.close()\n self.clear()\n self.update(state)\n\n def save(self) -> None:\n with open(self.abs_path, \"w\") as fd:\n fd.write(json.dumps(dict(self), indent=4))\n fd.close()", "id": "10233836", "language": "Python", "matching_score": 1.2008538246154785, "max_stars_count": 8, "path": "21-fs-ias-lec/06-SecretSharing/BackEnd/settings.py" }, { "content": "\"\"\"Exceptions for the Secret Sharing Project.\"\"\"\n\n\n# Errors\n\n\nclass SecretSharingError(Exception):\n \"\"\"Signals that an error occurred in Secret Sharing.\"\"\"\n def __init__(self, message: str):\n self.message = message\n super().__init__()\n\n def msg(self):\n return self.message\n\n\nclass PasswordError(SecretSharingError):\n \"\"\"Signals that a password was not accepted or didn't fulfill specifications.\"\"\"\n def __init__(self, message: str, password: str):\n self.message = message\n self.password = password\n super().__init__(message)\n\n def password(self) -> str:\n return self.password\n\n\nclass MappingError(SecretSharingError):\n \"\"\"Signals that a Secret-to-Number mapping failed.\"\"\"\n def __init__(self, message: str, mapping: tuple):\n self.message = message\n self.mapping = mapping\n super().__init__(message)\n\n def mapping(self) -> tuple:\n return self.mapping\n\n\nclass SecretPackagingError(SecretSharingError):\n def __init__(self, message: str, secret: bytes):\n self.message = message\n self._secret = secret\n super().__init__(message)\n\n def secret(self) -> bytes:\n return self._secret\n\n\n# Exceptions\n\n\nclass SecretSharingException(Exception):\n def __init__(self, message: str):\n super().__init__(message)\n\n\nclass IncomingRequestException(SecretSharingException):\n def __init__(self, message: str, name: str):\n self.name = name\n super().__init__(message)\n\n def get(self):\n return self.name\n\n\nclass RecoveryFromScratchException(SecretSharingException):\n def __init__(self, message: str, secret: bytes):\n self.secret = secret\n self.message = message\n super().__init__(message)\n\n def get(self):\n return self.secret\n\n\nclass RecoverySuccessException(SecretSharingException):\n def __init__(self, message: str, secret: bytes, secret_name: str, scratch_info=None):\n self.secret = secret\n self.secret_name = secret_name\n self.scratch_info = scratch_info\n self.message = message\n super().__init__(message)\n\n def get_secret(self):\n return self.secret\n\n def get_scratch(self):\n return self.scratch_info\n\n\nclass SubEventDecryptionException(SecretSharingException):\n def __init__(self, message: str, sub_event: dict):\n self.message = message\n self._secret = sub_event\n super().__init__(message)\n\n\nclass StateEncryptedException(SecretSharingException):\n def __init__(self, message: str):\n self.message = message\n super().__init__(message)\n\n\nclass PackageStealException(SecretSharingException):\n def __init__(self, message: str, feed_id: bytes):\n self.message = message\n self.thief = feed_id\n super().__init__(message)\n\n def get_thief(self):\n return self.thief\n", "id": "3741998", "language": "Python", "matching_score": 3.2372803688049316, "max_stars_count": 8, "path": "21-fs-ias-lec/06-SecretSharing/BackEnd/exceptions.py" }, { "content": "\"\"\"\nThe actions script is the interface between the UI and the BackEnd of the SecretSharing project.\n\"\"\"\n\n# BACnet imports\n# import BACnetCore\n# import BACnetTransport\n\nimport logging\nimport atexit\nfrom enum import IntEnum\nfrom threading import Thread\nfrom time import sleep\nfrom typing import List, Tuple\nfrom nacl.exceptions import CryptoError\nimport bcrypt\n\nfrom BackEnd import core\n\nfrom BackEnd import settings\nfrom BackEnd.exceptions import *\nfrom secrets import compare_digest\n\n# ~~~~~~~~~~~~ Logging ~~~~~~~~~~~~\n\nlogger = logging.getLogger(__name__)\n\n# ~~~~~~~~~~~~ Constants ~~~~~~~~~~~~\nSPECIAL_CHARACTERS = ['.', ',', '-', '=', '[', '@', '_', '!', '#', '$', '%', '^', '&', '*',\n '(', ')', '<', '>', '?', '/', '\\\\', '|', '}', '{', '~', ':', ']']\nENCODING = core.ENCODING\nMAP = \"mapping\"\nNAME = \"name\"\nTHR = \"threshold\"\nPARTS = \"parts\"\nSIZE = \"size\"\n\n\n# ~~~~~~~~~~~~ Master Password ~~~~~~~~~~~~\n\nmaster_password = None # session password\n\n\n# ~~~~~~~~~~~~ State Files ~~~~~~~~~~~~\n# State Files are keeping persistent information in json format.\npwd_gate = settings.State(\"pwd_gate\", settings.DATA_DIR, {\"encrypted\": False, \"pwd\": \"\"}) # stores password hash\nSTATE_ENCRYPTED = pwd_gate[\"encrypted\"]\n\nif not STATE_ENCRYPTED:\n preferences = settings.State(\"preferences\", settings.DATA_DIR, {\"auto_save\": 60}) # stores preferences for ui\n shareBuffer = settings.State(\"shareBuffer\", settings.DATA_DIR, {}) # stores shares in between send/recv\n contacts = settings.State(\"contacts\", settings.DATA_DIR, {}) # stores contact information/pubkeys\n secrets = settings.State(\"secrets\", settings.DATA_DIR, {}) # stores secret-specific information,\n # keys = settings.State(\"master\", settings.KEY_DIR, core.generate_keys()) # stores secret-specific information,\nelse:\n # catch and exit application\n raise StateEncryptedException(\"State is encrypted\")\n\n\ndef save_state():\n \"\"\"Saves all states to file.\"\"\"\n logger.debug(\"called\")\n pwd_gate.save()\n preferences.save()\n shareBuffer.save()\n contacts.save()\n secrets.save()\n\n\n# ~~~~~~~~~~~~ Auto Save ~~~~~~~~~~~~\n\nclass SaverDaemon(Thread):\n def __init__(self):\n super().__init__()\n self.isDaemon()\n\n def run(self):\n while True:\n sleep(1)\n auto_save = preferences.get(\"auto_save\")\n sleep(1)\n save_state()\n sleep(auto_save - 2)\n\n\ndef set_auto_save_duration(seconds: int):\n if 10 > seconds:\n raise SecretSharingException(\"Please keep it cool. 10 at minimum.\")\n preferences[\"auto_save\"] = seconds\n\n\ndef run_auto_save_for_session():\n SaverDaemon().start()\n\n\ndef exit_handler():\n \"\"\"Saves state at exit.\"\"\"\n logger.debug(\"Application exit caught.\")\n save_state()\n\n\n# register exit handler\natexit.register(exit_handler)\n\n\n# ~~~~~~~~~~~~ Shamir Interface ~~~~~~~~~~~~\n# All UI interfacing functions for the shamir secret sharing algorithm.\n\nclass S_SIZE(IntEnum):\n SMALL = 1,\n NORMAL = 2,\n LARGE = 3\n\n\ndef s_size(secret: bytes):\n logger.debug(\"called\")\n sz = len(secret)\n if 0 < sz < 16:\n return S_SIZE.SMALL\n elif sz == 16:\n return S_SIZE.NORMAL\n elif 0 < sz < 4096:\n return S_SIZE.LARGE\n else:\n return None\n\n\ndef split_secret_into_share_packages(name: str, secret: bytes, threshold: int, number_of_packages: int, holders: List[str]):\n \"\"\"Interface function to split a secret into share packages. Gives back the packages and a\n dictionary containing useful information about the secret\"\"\"\n logger.debug(\"Called with secret: {}\".format(secret))\n\n if not threshold:\n logger.debug(\"default threshold\")\n threshold = number_of_packages\n\n size = s_size(secret)\n\n if size == S_SIZE.SMALL:\n packages = core.split_small_secret_into_share_packages(secret, threshold, number_of_packages)\n elif size == S_SIZE.NORMAL:\n packages = core.split_normal_secret_into_share_packages(secret, threshold, number_of_packages)\n elif size == S_SIZE.LARGE:\n packages = core.split_large_secret_into_share_packages(secret, threshold, number_of_packages)\n else:\n raise SecretPackagingError(\"The secret given has a size that is not supported, \"\n \"it should be between 0 and 4.096 Kb.\", secret)\n\n add_information(\n name,\n {\n \"Holders\": holders,\n SIZE: size.value,\n PARTS: number_of_packages,\n THR: threshold\n }\n )\n\n logger.debug(\"packages created:\\n{}\".format('\\n'.join('\\t{}: {}'.format(*k) for k in enumerate(packages))))\n\n return packages\n\n\ndef recover_secret_from_packages(name: str, packages: List[bytes]) -> bytes:\n \"\"\"Interface function to recover a secret from packages.\"\"\"\n logger.debug(\"called\")\n\n if name not in secrets:\n raise SecretSharingError(\"No information to recover the secret, try from scratch.\")\n\n size = S_SIZE(secrets.get(name).get(SIZE))\n if size == S_SIZE.SMALL:\n secret = core.unpad(core.recover_normal_secret(packages))\n elif size == S_SIZE.NORMAL:\n secret = core.recover_normal_secret(packages)\n elif size == S_SIZE.LARGE:\n secret = core.recover_large_secret(packages)\n else:\n raise SecretPackagingError(\"The secret given has a size that is not supported, \"\n \"it should be between 0 and 4.096 Kb.\", b'')\n\n logger.debug(\"Secret Reconstructed: {}\".format(secret))\n\n return secret\n\n# ~~~~~~~~~~~~ Secret Information ~~~~~~~~~~~~\n# per name contains a dictionary with size, number of pck and threshold\n\n\ndef add_information_from_scratch(name: str, threshold=None, number_of_packages=None, holders=None, size=None):\n if name in secrets:\n raise SecretSharingError(\"Secret with same name already exists.\")\n\n if not size:\n logger.warning(\"This will create problems, please recover from scratch as soon as you have your shares.\")\n else:\n size = S_SIZE(size).value\n\n secrets[name] = {\n NAME: name,\n THR: threshold,\n PARTS: number_of_packages,\n SIZE: size,\n \"Holders\": holders\n }\n\n\ndef add_information(name: str, info: dict) -> None:\n if name in secrets:\n raise SecretSharingError(\"Secret with same name already exists.\")\n secrets[name] = info\n\n\ndef get_information(name: str) -> dict:\n if name in secrets:\n raise SecretSharingError(\"Secret with same name does not exists.\")\n return secrets.get(name)\n\n\ndef clear_information(name: str) -> None:\n if name not in secrets:\n raise SecretSharingError(\"Secret with same name does not exists.\")\n del secrets[name]\n\n\n# ~~~~~~~~~~~~ Share Buffering ~~~~~~~~~~~~\n# Shares that are not immediately send or restored are buffered in the shareBuffer\n\ndef secret_can_be_recovered(name: str, recover_from_scratch=False) -> bool:\n logger.debug(\"called\")\n \"\"\"True if a buffer contains equal or more shares than its threshold.\"\"\"\n if name in shareBuffer:\n if recover_from_scratch:\n return secret_can_be_recovered_from_scratch(name)\n else:\n return len(shareBuffer[name]) >= secrets[name][THR]\n elif name in secrets:\n return False\n else:\n raise MappingError(\"No such secret exists.\", (name,))\n\n\ndef secret_can_be_recovered_from_scratch(name: str):\n # Todo If Method is called from secret_can_be_recovered with the right parameter: If a secret is recovered\n # here it raises a RecoveryFromScratchException that can be caught and contains the recovered secret.\n logger.warning(\"Recovery from scratch can produce unexpected results.\")\n packages = get_packages_from_share_buffer(name)\n try:\n secret = core.recover_normal_secret(packages)\n raise RecoveryFromScratchException(\"Normal secret was recovered, it could still be padded!\", secret)\n except RecoveryFromScratchException:\n raise\n except Exception:\n logger.debug(\"Failed to recover as 16 byte secret.\")\n pass\n try:\n secret = core.recover_large_secret(packages)\n raise RecoveryFromScratchException(\"Large secret was recovered.\", secret)\n except RecoveryFromScratchException:\n raise\n except Exception:\n logger.debug(\"Failed to recover as large secret.\")\n return False\n\n\ndef push_packages_into_share_buffer(name: str, packages: List[bytes]) -> None:\n logger.debug(\"called\")\n if name in shareBuffer:\n raise SecretSharingError(\"ShareBuffer already exists. Please add packages individually.\")\n shareBuffer[name] = [package.decode(ENCODING) for package in packages]\n\n\ndef push_package_into_share_buffer(name: str, package: bytes) -> None:\n logger.debug(\"called\")\n if name in shareBuffer:\n if package not in shareBuffer[name]:\n shareBuffer[name].append(package.decode(ENCODING))\n else:\n raise SecretPackagingError(\"Duplicate package in shareBuffer, name: {}\".format(name), package)\n else:\n shareBuffer[name] = [package.decode(ENCODING)]\n\n\ndef get_packages_from_share_buffer(name: str) -> List[bytes]:\n logger.debug(\"called with {}\".format(name))\n try:\n return [package.encode(ENCODING) for package in shareBuffer[name]]\n except KeyError:\n raise MappingError(\"No shareBuffer was mapped to this name: {}.\".format(name), (name,))\n\n\ndef delete_packages_from_share_buffer(name: str) -> None:\n logger.debug(\"called with {}\".format(name))\n if name not in shareBuffer:\n raise SecretSharingError(\"BufferEntry with same name does not exists.\")\n del shareBuffer[name]\n\n\n# ~~~~~~~~~~~~ Sub Event Processing ~~~~~~~~~~~~\n# Packages need to be processed and new sub-events created.\n\ndef process_incoming_sub_event(sub_event_tpl: Tuple[core.E_TYPE, bytes, str]):\n \"\"\"Processes incoming secret sharing sub-events.\n Parameters\n ----------\n sub_event_tpl: Tuple[core.E_TYPE, bytes, str]\n A tuple of three; type, package and name. Created by\n \"\"\"\n t, package, name = sub_event_tpl\n if t == core.E_TYPE.SHARE:\n process_incoming_share(name, package)\n elif t == core.E_TYPE.REQUEST:\n raise IncomingRequestException(\"Incoming request.\", name)\n elif t == core.E_TYPE.REPLY:\n process_incoming_reply(name, package)\n else:\n raise SecretSharingError(\"Event Type couldn't be evaluated.\")\n\n\ndef process_incoming_reply(name, package):\n \"\"\"Called if a package returns to the client.\"\"\"\n push_package_into_share_buffer(name, package)\n\n\ndef process_incoming_share(name: str, package: bytes) -> None:\n \"\"\"Called to store a package for a peer.\"\"\"\n push_package_into_share_buffer(name, package)\n\ndef process_incoming_request(private_key: bytes, feed_id: bytes, name: str) -> str:\n \"\"\"Creates a reply package with the requested share.\"\"\"\n try:\n package = shareBuffer[name]\n except KeyError:\n raise SecretSharingError(\"Someone requested a non-existent share.\")\n return core.create_sub_event(core.E_TYPE.REPLY, sk=private_key, pk=feed_id, name=name, shard=package)\n\n\ndef process_outgoing_sub_event(t: core.E_TYPE, private_key: bytes, feed_id: bytes, name: str, password=None, package=None) -> str:\n \"\"\"Processes outgoing events.\n Parameters\n ----------\n t : E_TYPE\n Event type.\n private_key : bytes\n Secret key of sending client.\n feed_id: bytes\n Public key of receiving client.\n password: str\n Password used for nested encryption.\n name: str\n Name of the secret associated with this message.\n package: bytes\n Share package as created by the application.\n \"\"\"\n\n if not password and master_password:\n password = <PASSWORD>\n elif not password:\n raise PasswordError(\"No password or master-password provided\", \"\")\n\n if t == core.E_TYPE.SHARE:\n return process_outgoing_share(private_key, feed_id, name, password, package)\n elif t == core.E_TYPE.REQUEST:\n return process_outgoing_request(private_key, feed_id, name, password)\n elif t == core.E_TYPE.REPLY:\n return process_outgoing_reply(private_key, feed_id, name, package)\n else:\n raise SecretSharingError(\"Event Type couldn't be evaluated.\")\n\n\ndef process_outgoing_share(private_key: bytes, feed_id: bytes, name: str, password: str, package: bytes):\n \"\"\"Called to create a sub-event for sending a share.\"\"\"\n if not package:\n raise SecretSharingError(\"No package given.\")\n return core.create_sub_event(t=core.E_TYPE.SHARE, sk=private_key, pk=feed_id, name=name, shard=package, password=password)\n\n\ndef process_outgoing_request(private_key: bytes, feed_id: bytes, name: str, password: str):\n \"\"\"Called to create an event calling for a package return.\"\"\"\n return core.create_sub_event(t=core.E_TYPE.REQUEST, sk=private_key, pk=feed_id, name=name, password=password)\n\n\ndef process_outgoing_reply(private_key: bytes, feed_id: bytes, name: str, package: bytes):\n \"\"\"Called to create an event replying with a package.\"\"\"\n if not package:\n raise SecretSharingError(\"No package given.\")\n return core.create_sub_event(t=core.E_TYPE.REPLY, sk=private_key, pk=feed_id, name=name, shard=package)\n\n\n# ~~~~~~~~~~~~ Event Processing ~~~~~~~~~~~~\n\ndef handle_incoming_events(events: List[any], private_key: bytes, feed_id: bytes, password: str):\n \"\"\"Handles incoming raw events.\"\"\"\n for event in events:\n try:\n handle_incoming_event(event, private_key, feed_id, password)\n except SubEventDecryptionException:\n logger.warning(\"Skipped event with decryption exception.\")\n pass\n\n\ndef handle_incoming_event(event: any, private_key: bytes, feed_id: bytes, password: str):\n \"\"\"Handles incoming raw event.\"\"\"\n sub_event_tpl = core.decrypt_sub_event(event, private_key, feed_id, password)\n try:\n process_incoming_sub_event(sub_event_tpl)\n except IncomingRequestException as e:\n handle_event_request_exception(e, private_key, feed_id, password)\n\n\ndef handle_event_request_exception(e: IncomingRequestException, private_key: bytes, feed_id: bytes, password: str):\n \"\"\"Handles a request by auto-pushing reply.\"\"\"\n name = e.get()\n if name in secrets: # prevents people from requesting your packages.\n raise PackageStealException(\"Somebody tried to grab packages.\", feed_id)\n elif name not in shareBuffer:\n raise SecretSharingError(\"Somebody requests packages you don't have: {}\".format(name))\n\n packages = get_packages_from_share_buffer(name)\n\n reply_sub_events = [\n process_outgoing_sub_event(core.E_TYPE.REPLY, private_key, feed_id, name, password, package) for package in packages\n ]\n\n handle_outgoing_sub_events(reply_sub_events)\n\n\ndef handle_outgoing_sub_events(sub_events: List[any]):\n \"\"\"Pushes events into the database.\"\"\"\n events = [core.create_event(sub_event) for sub_event in sub_events]\n core.push_events(events)\n\n\ndef handle_new_events(private_key, password=None):\n \"\"\"Handles new events coming from the database.\"\"\"\n if not password and master_password:\n password = <PASSWORD>\n elif not password:\n raise PasswordError(\"No password or master-password provided\", \"\")\n\n # get all feed ids and seq_no from contacts\n feed_seq_tuples = []\n\n for contact in contacts:\n feed_id = get_contact_feed_id(contact)\n feed_seq_tuples.append((feed_id, contacts[contact][\"seq_no\"]))\n\n event_tuples = core.pull_events(feed_seq_tuples)\n for event, feed_id in event_tuples:\n try:\n handle_incoming_event(core.extract_sub_event(event), private_key, feed_id, password)\n except CryptoError:\n # means the event wasn't meant for you\n pass\n except SecretSharingError as sse:\n logger.debug(sse.msg())\n\n for contact in contacts:\n feed_id = get_contact_feed_id(contact)\n current_feed_seq = core.current_sequence_number(feed_id)\n if contacts[contact][\"seq_no\"] != current_feed_seq:\n update_seq_no(contact, current_feed_seq + 1)\n\n\ndef attemptReconstruction(secret_name):\n if secrets.get(secret_name).get(\"size\") is not None:\n if secret_can_be_recovered(secret_name, False):\n secret = recover_secret_from_packages(secret_name, get_packages_from_share_buffer(secret_name))\n raise RecoverySuccessException(f\"The secret \\\"{secret_name}\\\" has been recoverd\", secret, secret_name)\n else:\n try:\n secret_can_be_recovered(secret_name, True)\n except RecoveryFromScratchException as rse:\n raise RecoverySuccessException(\"\", rse.secret, secret_name, rse.message)\n\n# ~~~~~~~~~~~~ Contact Interface ~~~~~~~~~~~~\n# To process identifying information from contacts over BacNet\n\ndef create_new_contact(contact: str, feed_id: bytes) -> None:\n if contact in contacts:\n raise MappingError(\"Contact already exists.\", (contact, feed_id))\n contacts[contact] = {'feed_id': feed_id.decode(ENCODING),\n 'seq_no': 1}\n\n\ndef clear_contact(contact: str) -> None:\n if contact not in contacts:\n raise MappingError(\"Contact doesn't exists.\", (contact, b''))\n del contacts[contact]\n\n\ndef get_contact_feed_id(contact: str) -> bytes:\n if contact not in contacts:\n raise MappingError(\"Contact doesn't exists.\", (contact, b''))\n return contacts.get(contact).get(\"feed_id\").encode(ENCODING)\n\n\ndef get_seq_no(contact: str) -> int:\n if contact not in contacts:\n raise MappingError(\"Contact doesn't exists.\", (contact, b''))\n return contacts[contact][\"seq_no\"]\n\n\ndef update_seq_no(contact: str, seq_no: int) -> None:\n if contact not in contacts:\n raise MappingError(\"Contact doesn't exists.\", (contact, b''))\n contacts[contact][\"seq_no\"] = seq_no\n\n\ndef get_contact_name(feed_id: bytes) -> str:\n for contact in contacts:\n if contacts.get(contact).get(\"feed_id\").encode(ENCODING) == feed_id:\n return contact\n raise MappingError(\"Contact doesn't exists.\", ('', feed_id))\n\n\ndef get_all_contacts_dict() -> dict:\n contact_dict = {}\n for contact in contacts:\n contact_dict[contact] = contacts.get(contact).get(\"feed_id\").encode(ENCODING).hex()\n return contact_dict\n\n# ~~~~~~~~~~~~ Passwords ~~~~~~~~~~~~\n\n\ndef check_password(password: str) -> bool:\n if pwd_gate[\"pwd\"]:\n return bcrypt.checkpw(password.encode(ENCODING), pwd_gate.get(\"pwd\").encode(ENCODING))\n else:\n raise PasswordError(\"No password set.\", password)\n\n\ndef pw_is_viable(password: str) -> bool:\n \"\"\"Returns true if password is 8 \"\"\"\n logging.debug(\"called\")\n if not any([\n not password,\n len(password) < 8,\n not any(map(lambda x: x.isdigit(), password)),\n not any(map(lambda x: x.isupper(), password)),\n not any(map(lambda x: x.islower(), password)),\n not any(map(lambda x: x in SPECIAL_CHARACTERS, password)),\n ]):\n return True\n else:\n raise PasswordError(\"Password should contain at least a digit, an uppercase, a lower case, and special \"\n \"characters and should be at least 8 digits in total.\", password)\n\n\ndef change_password(password: str, old_password=None) -> None:\n \"\"\"Changes the current password, needs old password if there is one.\n Raises PasswordError if not successful.\"\"\"\n logging.debug(\"called\")\n if not pwd_gate:\n raise SecretSharingError(\"No password gate given.\")\n if pwd_gate[\"pwd\"]:\n if not bcrypt.checkpw(old_password.encode(ENCODING), pwd_gate.get(\"pwd\").encode(ENCODING)):\n raise PasswordError(\"Old password doesn't match.\", old_password)\n else:\n if not pw_is_viable(password):\n raise PasswordError(\"Password not complex enough.\", password)\n pwd_gate[\"pwd\"] = bcrypt.hashpw(password.encode(ENCODING), bcrypt.gensalt()).decode(ENCODING)\n else:\n if not pw_is_viable(password):\n raise PasswordError(\"Password not complex enough.\", password)\n pwd_gate[\"pwd\"] = bcrypt.hashpw(password.encode(ENCODING), bcrypt.gensalt()).decode(ENCODING)\n\n\n# ~~~~~~~~~~~~ LOGIN ~~~~~~~~~~~~\n\ndef create_user(username: str, password: str, password_repeat: str) -> None:\n if not core.rq_handler:\n raise SecretSharingError(\"No request handler for database connection.\")\n first_login(password, password_repeat)\n core.create_user(username)\n\n\ndef first_login(password: str, password_repeat: str) -> None:\n if all(first_login_aux(password, password_repeat)):\n change_password(password)\n global master_password\n master_password = password\n else:\n raise PasswordError(\"Please enter a viable combination.\", password)\n\n\ndef first_login_aux(password: str, password_repeat: str) -> [bool]:\n \"\"\"Returns if entries are viable.\"\"\"\n return [\n pw_is_viable(password),\n compare_digest(password_repeat, password)\n ]\n\n\ndef login(password: str) -> None:\n if not pwd_gate.get(\"pwd\"):\n raise PasswordError(\"No password set for the application.\", password)\n if check_password(password):\n global master_password\n master_password = password\n else:\n raise PasswordError(\"Password incorrect.\", password)\n\n\ndef user_exists() -> bool:\n if not pwd_gate.get(\"pwd\"):\n return False\n else:\n return True\n", "id": "12437940", "language": "Python", "matching_score": 6.28399133682251, "max_stars_count": 8, "path": "21-fs-ias-lec/06-SecretSharing/BackEnd/actions.py" }, { "content": "\"\"\"\n::Export Module::\nThe core script contains all functions interfacing with the BACNetCore but not directly with other SecretSharing\nrelated scripts. Other groups can import SecretSharing.BackEnd.core to make use of the functionality without getting\ncomplications.\n\"\"\"\n\n\nimport os\nfrom json import JSONDecodeError\nfrom typing import Tuple, List\n\nfrom BackEnd.exceptions import *\n\nfrom Crypto.Protocol.SecretSharing import Shamir\nfrom nacl.public import PublicKey, PrivateKey, Box\nfrom nacl.signing import SigningKey, VerifyKey\nfrom Crypto.Cipher import AES\nfrom Crypto.Hash import SHA512\nimport enum\n\nfrom os import urandom\nimport json\nimport logging\n\n\n# ~~~~~~~~~~~~ Constants ~~~~~~~~~~~~\n\nlogger = logging.getLogger(__name__)\nENCODING = 'ISO-8859-1'\n\n# Prefix length is 1 byte right now. So MAX supported secret length is < 255*16 bytes.\n# BYTE_O Needed to grab and place the prefixes. But it is possible to expand the prefixes\n# for packages, (ctrl f plaintext info & ctrl f int.from_bytes) to support really large packages in theory,\n# it will just take more time to debug than incentives exist at this time.\n\nBYTE_O = \"little\"\n\n\n# ~~~~~~~~~~~~ Request Handler ~~~~~~~~~~~~\n\nrq_handler = None\n\n\n# ~~~~~~~~~~~~ Utility ~~~~~~~~~~~~\n\ndef pad(data) -> bytes:\n logger.debug(\"called\")\n padding = AES.block_size - len(data) % AES.block_size\n data += bytes([padding]) * padding\n return data\n\n\ndef unpad(data) -> bytes:\n logger.debug(\"called\")\n return data[0:-data[-1]]\n\n\n# ~~~~~~~~~~~~ Events ~~~~~~~~~~~~\n\nclass E_TYPE(enum.IntEnum):\n SHARE = 1,\n REQUEST = 2,\n REPLY = 3\n\n\ndef create_sub_event(t: E_TYPE, sk: bytes, pk: bytes, password=<PASSWORD>, shard=None, name=None) -> str:\n if t == E_TYPE.SHARE:\n logger.debug(\"Creating SHARE Sub-Event:\")\n content = {\"TYPE\": t.value, \"SHARE\": pwd_encrypt_btos(password, shard), \"NAME\": pwd_encrypt_name(password, name)}\n elif t == E_TYPE.REQUEST:\n logger.debug(\"Creating REQUEST Sub-Event:\")\n content = {\"TYPE\": t.value, \"SHARE\": \"None\", \"NAME\": pwd_encrypt_name(password, name)}\n elif t == E_TYPE.REPLY:\n logger.debug(\"Creating REPLY Sub-Event:\")\n content = {\"TYPE\": t.value, \"SHARE\": shard.decode(ENCODING), \"NAME\": name}\n else:\n raise SecretSharingError(\"Unable to identify event-type.\")\n\n # random AES cipher\n key = urandom(16)\n iv = urandom(16)\n aes_cipher = AES.new(key, AES.MODE_CBC, iv=iv)\n # encrypt complete content with aes key\n encrypted_content = b''.join([iv, aes_cipher.encrypt(pad(json.dumps(content).encode(ENCODING)))])\n content_dict = {\n # encrypt aes key with asymmetric encryption\n \"AES\": Box(SigningKey(sk).to_curve25519_private_key(), VerifyKey(pk).to_curve25519_public_key()).encrypt(key).decode(ENCODING),\n \"CONTENT\": encrypted_content.decode(ENCODING)\n }\n return json.dumps(content_dict)\n\n\ndef decrypt_sub_event(sub_event_string: str, sk: bytes, pk: bytes, password: str) -> Tuple[E_TYPE, bytes, str]:\n \"\"\"Decrypts a plaintext event.\"\"\"\n sub_event: dict = json.loads(sub_event_string)\n try:\n key = Box(SigningKey(sk).to_curve25519_private_key(), VerifyKey(pk).to_curve25519_public_key()).decrypt(sub_event.get(\"AES\").encode(ENCODING))\n ciphertext = sub_event.get(\"CONTENT\").encode(ENCODING)\n content = AES.new(key, AES.MODE_CBC, ciphertext[0:16]).decrypt(ciphertext[16:])\n c: dict = json.loads(unpad(content).decode(ENCODING))\n except JSONDecodeError:\n # Trying to decrypt event meant for someone else, means wrong pubkey used to decrypt aes key.\n raise SubEventDecryptionException(\"Can't decrypt sub-event.\", sub_event)\n logger.debug(json.dumps(c, indent=4))\n if E_TYPE(c.get(\"TYPE\")) == E_TYPE.SHARE or E_TYPE(c.get(\"TYPE\")) == E_TYPE.REQUEST:\n return E_TYPE(c.get(\"TYPE\")), c.get(\"SHARE\").encode(ENCODING), c.get(\"NAME\")\n elif E_TYPE(c.get(\"TYPE\")) == E_TYPE.REPLY:\n return E_TYPE(c.get(\"TYPE\")), pwd_decrypt_stob(password, c.get(\"SHARE\")), pwd_decrypt_name(password, c.get(\"NAME\"))\n else:\n raise SecretSharingError(\"Unable to identify event-type.\")\n\n\n# ~~~~~~~~~~~~ Shamir / Packages ~~~~~~~~~~~~\n\n\ndef split_small_secret_into_share_packages(secret: bytes, threshold: int, number_of_packages: int):\n \"\"\"For a secret that is less than 16 bytes. Pads the secret before passing it to split_normal..()\"\"\"\n logger.debug(\"called\")\n return split_normal_secret_into_share_packages(pad(secret), threshold, number_of_packages)\n\n\ndef split_normal_secret_into_share_packages(secret: bytes, threshold: int, number_of_packages: int):\n \"\"\"For a secret that is exactly 16 bytes. No padding required.\"\"\"\n logger.debug(\"called\")\n\n shares = Shamir.split(threshold, number_of_packages, secret, ssss=False)\n\n # plaintext info\n return [\n bytearray(int.to_bytes(index, byteorder=BYTE_O, signed=False, length=1)) +\n bytearray(share) for index, share in shares\n ]\n\n\ndef split_large_secret_into_share_packages(secret: bytes, threshold: int, number_of_packages: int):\n \"\"\"Splits a secret of size 0.016 < s < 4.080 Kb into share packages. To keep it simple the threshold is equal to the\n number of shares created in total. \"\"\"\n logger.debug(\"called\")\n\n if not 0 < len(secret) < 4080:\n raise ValueError(\"Secret size is not supported, expected between 0 and 4.080 Kb.\")\n\n secret_padded = pad(secret) # pad secret so len(s) % 16 == 0\n sub_secrets = [secret_padded[i*16:(i+1)*16] for i in range(len(secret_padded)//16)]\n number_sub_secrets = len(sub_secrets)\n\n buffer = [[] for i in range(0, number_of_packages)]\n\n for i in range(0, len(sub_secrets)): # split and package so none contain 2 shares of same sub secret\n sub_shares = Shamir.split(threshold, number_of_packages, sub_secrets[i], ssss=False)\n\n for j in range(0, number_of_packages):\n sub_idx, sub_share = sub_shares[j]\n\n sub_package = b''.join([\n int.to_bytes(len(sub_share), byteorder=BYTE_O, length=1),\n bytes(sub_share)\n ])\n\n buffer[j].append(sub_package)\n\n return [\n b''.join([ # add plaintext info\n int.to_bytes(number_sub_secrets, byteorder=BYTE_O, length=1),\n int.to_bytes(threshold, byteorder=BYTE_O, length=1),\n int.to_bytes(j, byteorder=BYTE_O, length=1),\n b''.join(buffer[j])\n ]) for j in range(0, number_of_packages)\n ]\n\n\ndef recover_normal_secret(packages):\n \"\"\"Reconstructs a secret original size 16 bytes from packages, padding not removed yet.\"\"\"\n logger.debug(\"called\")\n return Shamir.combine([(int.from_bytes(package[0:1], BYTE_O), package[1:]) for package in packages], ssss=False)\n\n\ndef recover_large_secret(packages):\n \"\"\"Reconstructs a larger secret from packages, padding not removed yet.\"\"\"\n logger.debug(\"called\")\n number_sub_secrets = int.from_bytes(packages[0][0:1], BYTE_O)\n threshold = int.from_bytes(packages[0][1:2], BYTE_O)\n sub_shares = [[] for i in range(number_sub_secrets)]\n\n for i in range(0, threshold): # only iterate over minimum number\n share_id, share = int.from_bytes(packages[i][2:3], byteorder=BYTE_O), \\\n packages[i][3:len(packages[i])]\n\n for j in range(0, number_sub_secrets): # reorder shares according to secret id\n next_length, buffer = int.from_bytes(share[0:1], byteorder=BYTE_O), share[1:]\n sub_shares[j].append((share_id + 1, buffer[0:next_length]))\n share = buffer[next_length:]\n\n _secret = b''\n for i in range(0, len(sub_shares)):\n _secret += Shamir.combine(sub_shares[i], ssss=False) # recombine sub-secrets and concentrate\n\n return unpad(_secret)\n\n\n# ~~~~~~~~~~~~ Password Share Encryption ~~~~~~~~~~~~\n\n# auxiliary for readability\n\ndef pwd_encrypt_stos(password: str, plaintext: str) -> str:\n return pwd_encrypt(password, plaintext.encode(ENCODING)).decode(ENCODING)\n\n\ndef pwd_encrypt_btos(password: str, plaintext: bytes) -> str:\n return pwd_encrypt(password, plaintext).decode(ENCODING)\n\n\ndef pwd_decrypt_stos(password: str, plaintext: str):\n return pwd_decrypt(password, plaintext.encode(ENCODING)).decode(ENCODING)\n\n\ndef pwd_decrypt_stob(password: str, plaintext: str):\n return pwd_decrypt(password, plaintext.encode(ENCODING))\n\n\ndef pwd_encrypt_name(password: str, plain_name: str):\n key = SHA512.new(password.encode(ENCODING)).digest()[0:16]\n cipher = AES.new(key, AES.MODE_ECB)\n data_padded = pad(plain_name.encode(ENCODING))\n ciphertext = cipher.encrypt(data_padded)\n return ciphertext.decode(ENCODING)\n\n\ndef pwd_decrypt_name(password: str, encrypted_name: str):\n key = SHA512.new(password.encode(ENCODING)).digest()[0:16]\n cipher = AES.new(key, AES.MODE_ECB)\n padded_data = cipher.decrypt(encrypted_name.encode(ENCODING))\n name = unpad(padded_data)\n return name.decode(ENCODING)\n\n\ndef pwd_encrypt(password: str, data: bytes) -> bytes:\n logging.debug(\"called\")\n key = SHA512.new(password.encode(ENCODING)).digest()[0:16]\n data_padded = pad(data)\n iv = urandom(16)\n cipher = AES.new(key, AES.MODE_CBC, iv)\n return b''.join([iv, cipher.encrypt(data_padded)])\n\n\ndef pwd_decrypt(password: str, data: bytes) -> bytes:\n logging.debug(\"called\")\n key = SHA512.new(password.encode(ENCODING)).digest()[0:16]\n cipher = AES.new(key, AES.MODE_CBC, IV=data[0:16])\n return unpad(cipher.decrypt(data[16:]))\n\n\ndef encrypt_files(password: str, directory: str, files: List[str]) -> None:\n \"\"\"Encrypts the files stored in the FILENAMES variable.\"\"\"\n logging.debug(\"called\")\n for filename in files:\n with open(os.path.join(directory, filename), \"rb+\") as fd:\n data = fd.read()\n encrypted_data = pwd_encrypt(password, data)\n fd.seek(0)\n fd.write(encrypted_data)\n fd.truncate()\n\n\ndef decrypt_files(password: str, directory: str, files: List[str]) -> None:\n \"\"\"Decrypts the files stored in the FILENAMES variable.\"\"\"\n logging.debug(\"called\")\n for filename in files:\n with open(os.path.join(directory, filename), \"rb+\") as fd:\n encrypted_data = fd.read()\n data = pwd_decrypt(password, encrypted_data)\n fd.seek(0)\n fd.write(data)\n fd.truncate()\n\n\n# ~~~~~~~~~~~~ Keys ~~~~~~~~~~~~\n# Interfacing with nacl\n\ndef generate_keys() -> tuple:\n sk = PrivateKey.generate()\n return sk, sk.public_key\n\n# ~~~~~~~~~~~~ Events ~~~~~~~~~~~~\n\n\ndef create_event(sub_event) -> any:\n \"\"\"Creates an event from a sub_event and returns it in appropriate from.\"\"\"\n content = {\n 'messagekey': sub_event,\n 'chat_id': 'None',\n 'timestampkey': 0\n }\n return content\n\n\ndef extract_sub_event(event) -> any:\n \"\"\"Extracts sub_event from event.\"\"\"\n return event[\"messagekey\"]\n\n\n# ~~~~~~~~~~~~ Database Connection and EventFactory ~~~~~~~~~~~~\n# Interfacing functions here\n\n\ndef push_events(events: List[any]) -> None:\n if not rq_handler:\n raise SecretSharingError(\"No request handler for database connection.\")\n rq_handler.insert_new_events(events)\n return\n\n\ndef pull_events(feed_seq_tuples: List[Tuple[bytes, int]]) -> List[Tuple[any, bytes]]:\n if not rq_handler:\n raise SecretSharingError(\"No request handler for database connection.\")\n return rq_handler.pull_new_events(feed_seq_tuples)\n\n\ndef current_sequence_number(feed_id: bytes) -> int:\n if not rq_handler:\n raise SecretSharingError(\"No request handler for database connection.\")\n return rq_handler.db_connection.get_current_seq_no(feed_id)\n\n\ndef create_user(username: str) -> None:\n if not rq_handler:\n raise SecretSharingError(\"No request handler for database connection.\")\n\n rq_handler.create_user(username)\n\n\ndef do_things_with_the_core():\n print(\":,)\")\n\n", "id": "1349103", "language": "Python", "matching_score": 4.143287658691406, "max_stars_count": 8, "path": "21-fs-ias-lec/06-SecretSharing/BackEnd/core.py" }, { "content": "\"\"\"Very bootleg tests to see that the .core keeps working.\"\"\"\n\nimport unittest\nimport sys\nimport logging\nimport json\nfrom inspect import currentframe\n\nfrom BackEnd import core\n\nENCODING = 'ISO-8859-1'\nUNIT_TEST_START = \"\\n\\n{}: START\\n\".format(currentframe().f_code.co_name)\nUNIT_TEST_END = \"\\n{}: END\\n\\n\".format(currentframe().f_code.co_name)\n\nformatter = logging.Formatter('%(msecs)dms line %(lineno)d %(funcName)s %(message)s')\nlogger = logging.getLogger()\nlogger.level = logging.DEBUG\nstream_handler = logging.StreamHandler(sys.stdout)\nstream_handler.setFormatter(formatter)\nlogger.addHandler(stream_handler)\n\n\nclass Test_Core_Methods(unittest.TestCase):\n def test_password_encryption_and_decryption_correct_pwd(self):\n \"\"\"Tests core.pwd_encrypt and core.pwd_decrypt\"\"\"\n logger.info(UNIT_TEST_START)\n plaintext = b'\\xb3FI\\xda\\xf2\\xa93Rd\\xe2\\x91w\\x7fB\\xa9\\\\'\n pwd = \"<PASSWORD>*\"\n ciphertext = core.pwd_encrypt(pwd, plaintext)\n plaintext2 = core.pwd_decrypt(pwd, ciphertext)\n logger.debug(\"\\n\" + json.dumps(\n {\n \"Original Plaintext\": plaintext.decode(ENCODING),\n \"Password\": <PASSWORD>,\n \"Ciphertext\": ciphertext.decode(ENCODING),\n \"Plaintext Decrypted\": plaintext2.decode(ENCODING),\n },\n indent=4\n ))\n logger.info(UNIT_TEST_END)\n assert plaintext == plaintext2\n\n def test_password_encryption_and_decryption_incorrect_pwd(self):\n \"\"\"Tests core.pwd_encrypt and core.pwd_decrypt\"\"\"\n logger.info(UNIT_TEST_START)\n plaintext = b'\\xb3FI\\xda\\xf2\\xa93Rd\\xe2\\x91w\\x7fB\\xa9\\\\'\n pwd = \"<PASSWORD>*\"\n pwd2 = \"<PASSWORD>anda*\"\n ciphertext = core.pwd_encrypt(pwd, plaintext)\n plaintext2 = core.pwd_decrypt(pwd2, ciphertext)\n logger.debug(\"\\n\" +\n json.dumps(\n {\n \"Original Plaintext\": plaintext.decode(ENCODING),\n \"Password\": <PASSWORD>,\n \"Inc. Password\": <PASSWORD>,\n \"Ciphertext\": ciphertext.decode(ENCODING),\n \"Plaintext Decrypted\": plaintext2.decode(ENCODING),\n },\n indent=4\n )\n )\n logger.info(UNIT_TEST_END)\n assert plaintext != plaintext2\n \n def test_sub_event_creation_and_decryption_share_event(self):\n \"\"\"Tests create_sub_event and core.decrypt_sub_event\"\"\"\n logger.info(UNIT_TEST_START)\n logger.info(\"This test always fails if test_password_encryption_and_decryption fails.\")\n ska = b'\\xcc\\xf1\\xb4U\\x0cM\\xce\\xbb\\xf0HG\\x10F~\\xcd!\\x7f\\x81K\\xb3n\\xb9z\\xc9\\x1c\\xd8`\\x0e\\xda\\x89\\x1b\\xac'\n pka = b'\\xfc\\xcbr\\x96t[\\xe7\\\\p\\xb8Z\\xf0\\xb6|E2\\x98>\\x00\\xe5\\x02$\\x8c\\x04;\\x15\\xd3+@\\xbf\\x0e\\xb2'\n skb = b'\\xfa4\\xa0\\x9dZ\\xd5\\x8e\\xab@\\xaf\\xcb]\\xff\\x99@M\\x90\\xd5p\\xe5_cM\\xe0)\\xf6{O\\x10t\"\\xf9'\n pkb = b'\\xfd\\xe4&\\x00}`\\xb4T\\x02\\xe0\\x03t\\xed\\xea\\x98\\x03b\\x8a\\x0b\\x83\\xc4\\xca\\xa0\\xb3\\xf5|\\xe7\\xf9\\x03\\x96B\\xe0'\n pwda = \"<PASSWORD>#@Panda*\"\n pwdb = \"<PASSWORD>$\" # unused here\n share = b'\\xb3FI\\xda\\xf2\\xa93Rd\\xe2\\x91w\\x7fB\\xa9\\\\'\n name = \"MySecret\" # name of secret\n\n sub_event = core.create_sub_event(core.E_TYPE.SHARE, ska, pkb, password=<PASSWORD>, shard=share, name=name)\n logger.debug(\"\\n\" + json.dumps(json.loads(sub_event), indent=4))\n sub_event_tpl = core.decrypt_sub_event(sub_event, skb, pka, pwdb)\n logger.debug(\"\\n\" + str(sub_event_tpl))\n\n t, share2, name2 = sub_event_tpl\n share2 = core.pwd_decrypt(pwda, share2)\n name2 = core.pwd_decrypt_name(pwda, name2)\n\n logger.info(UNIT_TEST_END)\n assert share == share2 and name == name2 and t == core.E_TYPE.SHARE\n\n def test_sub_event_creation_and_decryption_request_event(self):\n \"\"\"Tests create_sub_event and core.decrypt_sub_event\"\"\"\n logger.info(UNIT_TEST_START)\n logger.info(\"This test always fails if test_password_encryption_and_decryption fails.\")\n ska = b'\\xcc\\xf1\\xb4U\\x0cM\\xce\\xbb\\xf0HG\\x10F~\\xcd!\\x7f\\x81K\\xb3n\\xb9z\\xc9\\x1c\\xd8`\\x0e\\xda\\x89\\x1b\\xac'\n pka = b'\\xfc\\xcbr\\x96t[\\xe7\\\\p\\xb8Z\\xf0\\xb6|E2\\x98>\\x00\\xe5\\x02$\\x8c\\x04;\\x15\\xd3+@\\xbf\\x0e\\xb2'\n skb = b'\\xfa4\\xa0\\x9dZ\\xd5\\x8e\\xab@\\xaf\\xcb]\\xff\\x99@M\\x90\\xd5p\\xe5_cM\\xe0)\\xf6{O\\x10t\"\\xf9'\n pkb = b'\\xfd\\xe4&\\x00}`\\xb4T\\x02\\xe0\\x03t\\xed\\xea\\x98\\x03b\\x8a\\x0b\\x83\\xc4\\xca\\xa0\\xb3\\xf5|\\xe7\\xf9\\x03\\x96B\\xe0'\n pwda = \"m24#@Panda*\"\n pwdb = \"<PASSWORD>@#<PASSWORD>$\" # unused here\n name = \"MySecret\" # name of secret\n\n sub_event = core.create_sub_event(core.E_TYPE.REQUEST, ska, pkb, password=<PASSWORD>, name=name)\n logger.debug(\"\\n\" + json.dumps(json.loads(sub_event), indent=4))\n sub_event_tpl = core.decrypt_sub_event(sub_event, skb, pka, pwdb)\n logger.debug(\"\\n\" + str(sub_event_tpl))\n\n t, _, name2 = sub_event_tpl\n name2 = core.pwd_decrypt_name(pwda, name2)\n\n logger.info(UNIT_TEST_END)\n assert name == name2 and t == core.E_TYPE.REQUEST\n\n def test_sub_event_creation_and_decryption_reply_event(self):\n \"\"\"Tests create_sub_event and core.decrypt_sub_event\"\"\"\n logger.info(UNIT_TEST_START)\n logger.info(\"This test always fails if test_password_encryption_and_decryption fails.\")\n ska = b'\\xcc\\xf1\\xb4U\\x0cM\\xce\\xbb\\xf0HG\\x10F~\\xcd!\\x7f\\x81K\\xb3n\\xb9z\\xc9\\x1c\\xd8`\\x0e\\xda\\x89\\x1b\\xac'\n pka = b'\\xfc\\xcbr\\x96t[\\xe7\\\\p\\xb8Z\\xf0\\xb6|E2\\x98>\\x00\\xe5\\x02$\\x8c\\x04;\\x15\\xd3+@\\xbf\\x0e\\xb2'\n skb = b'\\xfa4\\xa0\\x9dZ\\xd5\\x8e\\xab@\\xaf\\xcb]\\xff\\x99@M\\x90\\xd5p\\xe5_cM\\xe0)\\xf6{O\\x10t\"\\xf9'\n pkb = b'\\xfd\\xe4&\\x00}`\\xb4T\\x02\\xe0\\x03t\\xed\\xea\\x98\\x03b\\x8a\\x0b\\x83\\xc4\\xca\\xa0\\xb3\\xf5|\\xe7\\xf9\\x03\\x96B\\xe0'\n pwda = \"m24#@Panda*\"\n share = b'\\x9a\\x8f\\xe5;\\xc2\\xfd-xG\\x9e\\xb3\\xe7\\xd8h\\xf9%\\xa4\\xea\\x01\\xe2\\xa52?\\x99f\\x92.~\\xd5\\x8b\\xfb\\x0f\\xb5\\xc1{\\x02\\xb9Y\\x92\\xd3\\x83\\x9fnN#\\x1d\\xd9/'\n name = 'Ç,@,óM38\u0019\fY›l\fä\u0010'\n\n sub_event = core.create_sub_event(core.E_TYPE.REPLY, ska, pkb, shard=share, name=name)\n logger.debug(\"\\n\" + json.dumps(json.loads(sub_event), indent=4))\n sub_event_tpl = core.decrypt_sub_event(sub_event, skb, pka, pwda)\n logger.debug(\"\\n\" + str(sub_event_tpl))\n\n t, share2, name2 = sub_event_tpl\n\n logger.info(UNIT_TEST_END)\n assert b'\\xb3FI\\xda\\xf2\\xa93Rd\\xe2\\x91w\\x7fB\\xa9\\\\' == share2 and \"MySecret\" == name2 and t == core.E_TYPE.REPLY\n\n def test_shamir_small(self):\n \"\"\"tests core.split_small_secret_into_share_packages and core.recover_normal_secret\"\"\"\n logger.info(UNIT_TEST_START)\n s = b'\\xb3FI\\xda\\xf2\\xa93Rd\\xe2\\x91w\\\\'\n pck = core.split_small_secret_into_share_packages(s, 3, 5)\n pck.reverse()\n s2 = core.unpad(core.recover_normal_secret(pck[0:3]))\n logger.info(UNIT_TEST_END)\n assert s == s2\n logger.info(UNIT_TEST_END)\n\n def test_shamir_normal(self):\n \"\"\"tests core.split_normal_secret_into_share_packages and core.recover_normal_secret\"\"\"\n logger.info(UNIT_TEST_START)\n s = b'\\xb3FI\\xda\\xf2\\xa93Rd\\xe2\\x91w\\x7fB\\xa9\\\\'\n pck = core.split_normal_secret_into_share_packages(s, 3, 5)\n pck.reverse()\n s2 = core.recover_normal_secret(pck[0:3])\n logger.info(UNIT_TEST_END)\n assert s == s2\n\n def test_shamir_large(self):\n \"\"\"tests core.split_large_secret_into_share_packages and core.recover_large_secret\"\"\"\n logger.info(UNIT_TEST_START)\n s = b'\\xb3FI\\xda\\xf2\\xa93Rd\\xe2\\x91w\\x7fB\\xa9\\x7fB\\xa9\\\\'\n pck = core.split_large_secret_into_share_packages(s, 3, 5)\n pck.reverse()\n s2 = core.recover_large_secret(pck[0:3])\n logger.debug(\"\\n\" + str(s) + \"\\n\" + str(s2) + \"\\n\")\n assert s == s2\n logger.info(UNIT_TEST_END)\n\n\nif __name__ == '__main__':\n suite = unittest.suite.TestSuite([Test_Core_Methods()])\n results = unittest.TextTestRunner(verbosity=2).run(suite)", "id": "7420609", "language": "Python", "matching_score": 1.61782968044281, "max_stars_count": 8, "path": "21-fs-ias-lec/06-SecretSharing/tests/testing.py" }, { "content": "\"\"\"\nScript to encrypt and decrypt files. Run with argument:\n\npython3 fencrypt.py -pw <<password>>\n\nIt will not work if you have not set a password for the Secret Sharing application yet / first login.\n\n\"\"\"\n\nimport argparse\nimport bcrypt\nimport sys\nimport os\nfrom BackEnd import core, settings\n\nFILES = [\"preferences\", \"shareBuffer\", \"secrets\", \"contacts\"]\nENCODING = core.ENCODING\n\n\ndef main(password: str) -> None:\n # setup_logging()\n pwd_gate = settings.State(\"pwd_gate\", settings.DATA_DIR, {\"encrypted\": False, \"pwd\": None})\n\n if not pwd_gate.get(\"pwd\"):\n print(\"No password has been set in the application.\")\n exit(0)\n\n if not bcrypt.checkpw(password.encode(ENCODING), pwd_gate.get(\"pwd\").encode(ENCODING)):\n print(\"Password incorrect.\")\n exit(0)\n\n else:\n\n if pwd_gate.get(\"encrypted\"):\n answer = input(\"State is currently encrypted, do you want to decrypt? (y/n) \\n >>\")\n if answer == \"n\":\n print(\"Ok.\")\n exit(0)\n elif answer == \"y\":\n core.decrypt_files(password, settings.DATA_DIR, FILES)\n pwd_gate[\"encrypted\"] = False\n pwd_gate.save()\n print(\"Decrypted.\")\n exit(0)\n else:\n print(\"Excuse me?\")\n main(password)\n\n else:\n answer = input(\"State is currently decrypted, do you want to encrypt? (y/n) \\n >>\")\n if answer == \"n\":\n print(\"Ok.\")\n exit(0)\n elif answer == \"y\":\n core.encrypt_files(password, settings.DATA_DIR, FILES)\n pwd_gate[\"encrypted\"] = True\n pwd_gate.save()\n print(\"Encrypted.\")\n exit(0)\n else:\n print(\"Excuse me?\")\n main(password)\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='File Encryption/Decryption')\n parser.add_argument('-pw', '--password', help='Input password.', required=True)\n args = parser.parse_args()\n main(args.password)\n", "id": "2909379", "language": "Python", "matching_score": 0.9125569462776184, "max_stars_count": 8, "path": "21-fs-ias-lec/06-SecretSharing/fencrypt.py" }, { "content": "\n# ~~~~~~~~~~~~ Imports ~~~~~~~~~~~~\nfrom PyQt5.QtCore import Qt\nfrom PyQt5.QtGui import QFont\nfrom PyQt5.QtWidgets import (\n QVBoxLayout,\n QHBoxLayout,\n QWidget,\n QListWidget,\n QListWidgetItem,\n QLabel,\n QPushButton,\n QLineEdit,\n QComboBox,\n QScrollArea,\n QFileDialog,\n QCheckBox\n)\n\n\nfrom BackEnd import actions as act\nfrom FrontEnd.Dialogs import NotificationDialog\n\nimport database_connector\nact.core.rq_handler = database_connector.RequestHandler.Instance()\n\n\n# ~~~~~~~~~~~~ Contact Tab ~~~~~~~~~~~~\n\nclass ContactTab(QWidget):\n\n def __init__(self, parent=None):\n super().__init__(parent)\n # Setup Layout\n self.vbox = QVBoxLayout()\n self.setLayout(self.vbox)\n # Saved Contacts box\n qFont = QFont()\n qFont.setBold(True)\n self.contactsLabel = QLabel()\n self.contactsLabel.setText(\"SAVED CONTACTS\")\n self.contactsLabel.setFont(qFont)\n self.contactsLabel.setAlignment(Qt.AlignCenter)\n self.vbox.addWidget(self.contactsLabel)\n #List of saved contacts\n self.contactEntryList = QListWidget()\n self.contactEntries = []\n self.loadContactEntries()\n self.vbox.addWidget(self.contactEntryList)\n # Possible Contacts box\n self.knownFeedsLabel = QLabel()\n self.knownFeedsLabel.setText(\"KNOWN FEED IDS\")\n self.knownFeedsLabel.setFont(qFont)\n self.knownFeedsLabel.setAlignment(Qt.AlignCenter)\n self.vbox.addWidget(self.knownFeedsLabel)\n\n # List of possible contacts\n self.contactAddList = QListWidget()\n self.listElements = []\n self.updatePossibleContacts()\n self.vbox.addWidget(self.contactAddList)\n\n def loadContactEntries(self):\n contact_dict = act.get_all_contacts_dict()\n self.contactEntryList.clear()\n for key in contact_dict:\n feed_id = contact_dict[key]\n contactEntry = ContactEntryWidget(feed_id, key, parent=self)\n myQListWidgetItem = QListWidgetItem(self.contactEntryList)\n myQListWidgetItem.setSizeHint(contactEntry.sizeHint())\n self.contactEntryList.addItem(myQListWidgetItem)\n self.contactEntryList.setItemWidget(myQListWidgetItem, contactEntry)\n self.contactEntries.append(contactEntry)\n\n def updatePossibleContacts(self):\n self.contactAddList.clear()\n contact_feed_ids = act.core.rq_handler.get_feed_ids()\n # remove ids which are already in contacts\n for username in act.get_all_contacts_dict():\n if act.get_contact_feed_id(username) in contact_feed_ids:\n contact_feed_ids.remove(act.get_contact_feed_id(username))\n for feed_id in contact_feed_ids:\n possible_contact = ContactAddWidget(feed_id, parent=self)\n myQListWidgetItem = QListWidgetItem(self.contactAddList)\n myQListWidgetItem.setSizeHint(possible_contact.sizeHint())\n self.contactAddList.addItem(myQListWidgetItem)\n self.contactAddList.setItemWidget(myQListWidgetItem, possible_contact)\n self.listElements.append(possible_contact)\n\n return\n\n# Widget for each contact in ListWidget\nclass ContactEntryWidget(QWidget):\n def __init__(self, feedID, name, parent=None):\n super(ContactEntryWidget, self).__init__(parent)\n # Setup Layout\n self.hbox = QHBoxLayout()\n self.setLayout(self.hbox)\n # Label for feed id\n self.feedLabel = QLabel()\n self.feedLabel.setText(feedID)\n self.hbox.addWidget(self.feedLabel)\n # Label for name associated with feed id\n self.name = QLabel()\n self.name.setText(name)\n self.hbox.addWidget(self.name)\n\n# Widget for each possible contact in ListWidget\nclass ContactAddWidget(QWidget):\n def __init__(self, pubKey, parent=None):\n super(ContactAddWidget, self).__init__(parent)\n self.pub_key = pubKey\n # Setup Layout\n self.hbox = QHBoxLayout()\n self.setLayout(self.hbox)\n # Label for public key of possible contact\n self.requesterLabel = QLabel()\n self.requesterLabel.setText(self.pub_key.hex()[:35]+\"...\")\n self.requesterLabel.setWordWrap(True)\n self.hbox.addWidget(self.requesterLabel)\n # Input for name associated with public key\n self.nameInput = QLineEdit()\n self.nameInput.setPlaceholderText(\"name\")\n self.hbox.addWidget(self.nameInput)\n # Button to add to Contacts\n self.addButton = QPushButton()\n self.addButton.setText(\"add\")\n self.addButton.clicked.connect(self.addToContacts)\n self.hbox.addWidget(self.addButton)\n self.added = False\n\n def addToContacts(self):\n act.contacts.load()\n act.create_new_contact(self.nameInput.text(), self.pub_key)\n act.contacts.save()\n self.added = True\n self.parent().parent().parent().updatePossibleContacts()\n self.parent().parent().parent().loadContactEntries()\n return\n\n\nclass ShareTab(QWidget):\n def __init__(self, parent=None):\n super(ShareTab, self).__init__(parent)\n # setup layout\n self.vbox = QVBoxLayout(self)\n self.setLayout(self.vbox)\n\n self.secret = None\n self.pubInputs = []\n self.contact_usernames = act.get_all_contacts_dict().keys()\n\n # Input for secret name\n self.secretNameInput = QLineEdit()\n self.secretNameInput.setPlaceholderText(\"Name for Secret\")\n self.vbox.addWidget(self.secretNameInput)\n\n # Input for Secret\n self.secretInput = QLineEdit()\n self.secretInput.setPlaceholderText(\"Secret you want to share\")\n self.vbox.addWidget(self.secretInput)\n\n # Input from File:\n self.fileInputWidget = QWidget()\n self.fileInputWidget.setMinimumHeight(75)\n hbox = QHBoxLayout()\n self.fileInputWidget.setLayout(hbox)\n self.pathLabel = QLabel()\n self.pathLabel.setWordWrap(True)\n self.checkbox = QCheckBox()\n self.checkbox.setText(\"secret from file\")\n self.fromFileButton = QPushButton(\"browse\")\n self.fromFileButton.clicked.connect(self.fileButtonHandler)\n self.vbox.addWidget(self.fromFileButton)\n hbox.addWidget(self.pathLabel)\n hbox.addSpacing(20)\n hbox.addWidget(self.checkbox)\n hbox.addWidget(self.fromFileButton)\n hbox.setAlignment(Qt.AlignRight)\n self.vbox.addWidget(self.fileInputWidget)\n\n # Setup ComboBox to set the number of Shards to be created\n self.numShardsLabel = QLabel(\"Number of Shards:\")\n self.vbox.addWidget(self.numShardsLabel)\n self.numShardsInput = QComboBox()\n for i in range(1, 20):\n self.numShardsInput.addItem(str(i))\n self.numShardsInput.currentTextChanged.connect(self.updateWidgets)\n self.vbox.addWidget(self.numShardsInput)\n\n # Setup ComboBox to specify the number of shards required for recovery\n self.numShardsRecLabel = QLabel(\"Number of Shards required for recovery:\")\n self.vbox.addWidget(self.numShardsRecLabel)\n self.numShardsRecInput = QComboBox()\n for i in range(1, 10):\n self.numShardsRecInput.addItem(str(i))\n self.vbox.addWidget(self.numShardsRecInput)\n\n # Label before pub key input section\n self.friendKeyInfoLabel = QLabel(\"Public Keys of friends you want to share the shards with:\")\n self.vbox.addWidget(self.friendKeyInfoLabel)\n\n # Scroll Area for pub key input fields\n self.scroll = QScrollArea(self)\n self.scroll.setWidgetResizable(True)\n self.scroll.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOn)\n self.scroll.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)\n self.scrollContent = QWidget(self.scroll)\n self.scrollLayout = QVBoxLayout(self.scrollContent)\n self.scrollLayout.setAlignment(Qt.AlignTop)\n self.scrollContent.setLayout(self.scrollLayout)\n self.scroll.setWidget(self.scrollContent)\n self.vbox.addWidget(self.scroll)\n\n self.sharButton = QPushButton(\"share\")\n self.sharButton.clicked.connect(self.shareSecret)\n self.vbox.addWidget(self.sharButton)\n self.updateWidgets(self.numShardsInput.currentText())\n\n def updateContacts(self):\n self.contact_usernames = act.get_all_contacts_dict().keys()\n self.updateInputField()\n\n def updateWidgets(self, newValue):\n self.updatenumShardsRec(int(newValue))\n self.updateInputField()\n\n def updatenumShardsRec(self, newValue):\n self.numShardsRecInput.clear()\n for i in range (1, newValue+1):\n self.numShardsRecInput.addItem(str(i))\n\n def updateInputField(self):\n for j in range(len(self.pubInputs), 0, -1):\n self.scrollLayout.removeWidget(self.pubInputs.pop())\n for i in range(int(self.numShardsInput.currentText())):\n pubInput = QComboBox()\n pubInput.addItem(\"-\")\n for name in self.contact_usernames:\n pubInput.addItem(name)\n self.pubInputs.append(pubInput)\n self.scrollLayout.addWidget(pubInput)\n self.scroll.resize(self.scroll.size())\n\n def shareSecret(self):\n if self.checkbox.isChecked():\n self.secret = self.readFile()\n if not 0 < len(self.secret.encode(act.ENCODING)) < 4080:\n notification = NotificationDialog(\"Secret from file has invalid size!\")\n notification.exec_()\n return\n else:\n self.secret = self.secretInput.text()\n empty = False\n recipients = []\n for combobox in self.pubInputs:\n username = combobox.currentText()\n if username == \"-\":\n empty = True\n if combobox.currentText() not in recipients:\n recipients.append(combobox.currentText())\n if not empty:\n secret_name = self.secretNameInput.text()\n secret = self.secret\n num_shares = self.numShardsInput.currentText()\n threshold = self.numShardsRecInput.currentText()\n private_key = act.core.rq_handler.event_factory.get_private_key()\n if not secret_name or not secret or not num_shares or not threshold:\n requiredFieldsDialog = NotificationDialog(\"All Input fields are required!\")\n requiredFieldsDialog.exec_()\n\n holder_feed_ids = list(map(lambda x: act.get_contact_feed_id(x).hex(), recipients))\n\n packages = act.split_secret_into_share_packages(\n name=secret_name,\n secret=secret.encode(act.core.ENCODING),\n threshold=int(threshold),\n number_of_packages=int(num_shares),\n holders=holder_feed_ids\n )\n\n events = []\n counter = 0\n for combobox in self.pubInputs:\n recipient = combobox.currentText()\n events.append(act.process_outgoing_sub_event(t=act.core.E_TYPE.SHARE, private_key=private_key,\n feed_id=act.get_contact_feed_id(recipient), password=<PASSWORD>,\n name=secret_name, package=packages[counter]))\n counter += 1\n act.handle_outgoing_sub_events(events)\n nDialog = NotificationDialog(\"Shards successfully sent!\")\n nDialog.exec_()\n self.resetInputs()\n else:\n nDialog = NotificationDialog(\"Please specify all the receivers\")\n nDialog.exec_()\n return\n\n def resetInputs(self):\n self.numShardsInput.setCurrentIndex(0)\n self.numShardsRecInput.setCurrentIndex(0)\n self.secretNameInput.clear()\n self.secretInput.clear()\n self.pathLabel.clear()\n self.checkbox.setChecked(False)\n return\n\n def fileButtonHandler(self):\n file_dialog = QFileDialog.getOpenFileName()\n self.pathLabel.setText(file_dialog[0])\n self.pathLabel.setMinimumHeight(50)\n\n def readFile(self):\n with open(self.pathLabel.text(), 'rb') as fd:\n secret = fd.read()\n return secret.decode(act.ENCODING)\n# ~~~~~~~~~~~~ Request Tab ~~~~~~~~~~~~\n\nclass RequestTab(QWidget):\n def __init__(self, parent=None):\n super(RequestTab, self).__init__(parent)\n # Setup Layout\n self.vbox = QVBoxLayout(self)\n self.setLayout(self.vbox)\n # fond for boldness\n self.qFont = QFont()\n self.qFont.setBold(True)\n\n # Add automatic recovery part\n self.autoLabel = QLabel()\n self.autoLabel.setText(\"AUTO-REQUEST\")\n self.autoLabel.setFont(self.qFont)\n self.autoLabel.setAlignment(Qt.AlignCenter)\n self.vbox.addWidget(self.autoLabel)\n self.autoRecovery = AutoRequest(self)\n self.vbox.addWidget(self.autoRecovery)\n\n # Add manual recovery part\n self.manualLabel = QLabel()\n self.manualLabel.setText(\"MANUAL REQUEST\")\n self.manualLabel.setFont(self.qFont)\n self.manualLabel.setAlignment(Qt.AlignCenter)\n self.vbox.addWidget(self.manualLabel)\n self.manualRecovery = ManualRequest(self)\n self.vbox.addWidget(self.manualRecovery)\n\n# AutomaticRecovery Widget of the Recovery Tab\nclass AutoRequest(QWidget):\n def __init__(self, parent=None):\n super(AutoRequest, self).__init__(parent)\n # Setup Layout\n self.hbox = QHBoxLayout(self)\n self.setLayout(self.hbox)\n\n # ComboBox containing names of shared secrets\n self.keyNameSelection = QComboBox()\n self.updateComboBox()\n self.hbox.addWidget(self.keyNameSelection)\n # Add Button to initiate recovery\n self.recoverButton = QPushButton(\"Request\")\n self.recoverButton.clicked.connect(self.handleAutoRecovery)\n self.hbox.addWidget(self.recoverButton)\n\n def updateComboBox(self):\n self.keyNameSelection.clear()\n for name in act.secrets.keys():\n if name != \"mapping\":\n self.keyNameSelection.addItem(name)\n return\n\n def handleAutoRecovery(self):\n holders_feed_ids = list(map(lambda x: bytes.fromhex(x),\n act.secrets[self.keyNameSelection.currentText()][\"Holders\"]))\n private_key = act.core.rq_handler.event_factory.get_private_key()\n name = self.keyNameSelection.currentText()\n events = []\n for feed_id in holders_feed_ids:\n events.append(act.process_outgoing_sub_event(t=act.core.E_TYPE.REQUEST, private_key=private_key,\n feed_id=feed_id, password=<PASSWORD>, name=name))\n act.handle_outgoing_sub_events(events)\n dialog = NotificationDialog(\"Request has been sent!\")\n dialog.exec_()\n return\n\n def resetInputs(self):\n self.numShardsInput.setCurrentIndex(0)\n self.numShardsRecInput.setCurrentIndex(0)\n self.secretNameInput.clear()\n self.secretInput.clear()\n self.passInput.clear()\n return\n\n# ManualRequest Widget of the Recovery Tab\nclass ManualRequest(QWidget):\n def __init__(self, parent=None):\n super(ManualRequest, self).__init__(parent)\n # Setup Layout\n self.vbox = QVBoxLayout(self)\n self.setLayout(self.vbox)\n\n # Input field for Name of Secret to be recovered\n self.nameInput = QLineEdit()\n self.nameInput.setPlaceholderText(\"Name of Secret\")\n self.vbox.addWidget(self.nameInput)\n\n # create ScrollArea for friends public key input\n self.scroll = QScrollArea(self)\n self.scroll.setWidgetResizable(True)\n\n # Setup Content Widget and Layout for the Scroll Area\n self.scrollContent = QWidget(self.scroll)\n self.scrollLayout = QVBoxLayout(self.scrollContent)\n self.scrollLayout.setAlignment(Qt.AlignTop)\n self.scrollContent.setLayout(self.scrollLayout)\n\n # Setup Widget containing number of shards and threshold\n self.lbl_box_widget = QWidget()\n self.lbl_box_widget.setLayout(QHBoxLayout())\n self.numShardsLabel = QLabel()\n self.numShardsLabel.setText(\"Number of shards\")\n self.lbl_box_widget.layout().addWidget(self.numShardsLabel)\n self.numShardsInput = QComboBox()\n for i in range(1,20):\n self.numShardsInput.addItem(str(i))\n self.numShardsInput.currentTextChanged.connect(self.updateWidgets)\n self.lbl_box_widget.layout().addWidget(self.numShardsInput)\n self.thresholdLabel = QLabel()\n self.thresholdLabel.setText(\"Threshold\")\n self.lbl_box_widget.layout().addWidget(self.thresholdLabel)\n self.threshold = QComboBox()\n for i in range(1, int(self.numShardsInput.currentText())+1):\n self.threshold.addItem(str(i))\n self.lbl_box_widget.layout().addWidget(self.threshold)\n self.lbl_box_widget.layout().setSpacing(20)\n self.sizeLabel = QLabel()\n self.sizeLabel.setText(\"Size\")\n self.lbl_box_widget.layout().addWidget(self.sizeLabel)\n self.sizeBox = QComboBox()\n self.sizeBox.addItems([\"don't know\", \"SMALL\", \"NORMAL\", \"LARGE\"])\n self.lbl_box_widget.layout().addWidget(self.sizeBox)\n self.vbox.addWidget(self.lbl_box_widget)\n\n # get usernames and empty List holding reference to all the input field of pub keys\n self.contact_usernames = act.get_all_contacts_dict()\n self.added_list = []\n self.pubInputs = []\n # Add 1 Input field to start off\n self.addInputField()\n # Add widget to main Layout\n self.scroll.setWidget(self.scrollContent)\n self.vbox.addWidget(self.scroll)\n self.recButton = QPushButton(\"Request\")\n self.recButton.clicked.connect(self.handleManualRecovery)\n self.vbox.addWidget(self.recButton)\n\n def updateContacts(self):\n self.contact_usernames = act.get_all_contacts_dict()\n self.updateInputField()\n\n def updateWidgets(self, newValue):\n self.updatenumShardsRec(int(newValue))\n self.updateInputField()\n\n def updatenumShardsRec(self, newValue):\n self.threshold.clear()\n for i in range (1, newValue+1):\n self.threshold.addItem(str(i))\n\n def updateInputField(self):\n for j in range(len(self.pubInputs), 0, -1):\n self.scrollLayout.removeWidget(self.pubInputs.pop())\n for i in range(int(self.numShardsInput.currentText())):\n pubInput = QComboBox()\n pubInput.addItem(\"-\")\n for name in self.contact_usernames:\n pubInput.addItem(name)\n self.pubInputs.append(pubInput)\n self.scrollLayout.addWidget(pubInput)\n self.scroll.resize(self.scroll.size())\n\n def addInputField(self):\n pubInput = QComboBox()\n pubInput.addItem(\"-\")\n for name in self.contact_usernames:\n pubInput.addItem(name)\n self.pubInputs.append(pubInput)\n self.scrollLayout.addWidget(pubInput)\n return\n\n def removeInputField(self):\n if len(self.pubInputs) > 1:\n self.scrollLayout.removeWidget(self.pubInputs.pop())\n return\n\n def handleManualRecovery(self):\n holders_feed_ids = []\n for cbox in self.pubInputs:\n holders_feed_ids.append(act.get_contact_feed_id(cbox.currentText()))\n size = self.sizeBox.currentText()\n if size == \"don't know\":\n size = None\n elif size == \"SMALL\":\n size = act.S_SIZE.SMALL\n elif size == \"NORMAL\":\n size = act.S_SIZE.NORMAL\n else:\n size = act.S_SIZE.LARGE\n act.add_information_from_scratch(name=self.nameInput.text(), threshold=int(self.threshold.currentText()),\n number_of_packages=int(self.numShardsInput.currentText()),\n holders=list(map(lambda x: x.hex(), holders_feed_ids)), size=size)\n # create requests\n events = []\n private_key = act.core.rq_handler.event_factory.get_private_key()\n name = self.nameInput.text()\n\n for feed_id in holders_feed_ids:\n events.append(act.process_outgoing_sub_event(t=act.core.E_TYPE.REQUEST, private_key=private_key, feed_id=feed_id,\n name=name, password=None))\n\n act.handle_outgoing_sub_events(events)\n dialog = NotificationDialog(\"Request has been sent!\")\n dialog.exec_()\n return\n", "id": "5446286", "language": "Python", "matching_score": 4.516773700714111, "max_stars_count": 8, "path": "21-fs-ias-lec/06-SecretSharing/FrontEnd/Tabs.py" }, { "content": "import os\n\nfrom PyQt5.QtWidgets import (\n QDialog,\n QDialogButtonBox,\n QVBoxLayout,\n QLabel,\n QLineEdit,\n QFormLayout\n)\n\nfrom BackEnd import actions as act\n\nclass NotificationDialog(QDialog):\n def __init__(self, message):\n super().__init__()\n self.setWindowTitle(\"Notification\")\n self.buttonBox = QDialogButtonBox(QDialogButtonBox.Ok)\n self.buttonBox.accepted.connect(self.accept)\n self.buttonBox.rejected.connect(self.reject)\n self.layout = QVBoxLayout()\n messageLabel = QLabel(message)\n self.layout.addWidget(messageLabel)\n self.layout.addWidget(self.buttonBox)\n self.setLayout(self.layout)\n\n\nclass RegisterDialog(QDialog):\n def __init__(self, parent=None):\n super(RegisterDialog, self).__init__(parent)\n self.setWindowTitle(\"Registration\")\n registerLayout = QFormLayout()\n self.usernameInput = QLineEdit()\n self.passwordInput = QLineEdit()\n self.passwordInput.setEchoMode(QLineEdit.Password)\n self.secondPasswordInput = QLineEdit()\n self.secondPasswordInput.setEchoMode(QLineEdit.Password)\n registerLayout.addRow(\"Username\", self.usernameInput)\n registerLayout.addRow(\"Password\", self.passwordInput)\n registerLayout.addRow(\"Passwort repeated\", self.secondPasswordInput)\n self.buttons = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)\n self.buttons.accepted.connect(self.register)\n self.buttons.rejected.connect(self.reject)\n layout = QVBoxLayout()\n layout.addLayout(registerLayout)\n layout.addWidget(self.buttons)\n self.setLayout(layout)\n\n def register(self):\n try:\n act.create_user(self.usernameInput.text(), self.passwordInput.text(), self.secondPasswordInput.text())\n self.accept()\n except act.PasswordError as pe:\n self.passwordInput.clear()\n self.secondPasswordInput.clear()\n errorDialog = NotificationDialog(pe.message)\n errorDialog.exec_()\n\n\n\n\nclass LoginDialog(QDialog):\n def __init__(self, parent=None):\n super(LoginDialog, self).__init__(parent)\n self.setWindowTitle(\"Login\")\n loginLayout = QFormLayout()\n self.usernameLabel = QLabel()\n self.usernameLabel.setText(act.core.rq_handler.username)\n self.passwordInput = QLineEdit()\n self.passwordInput.setEchoMode(QLineEdit.Password)\n loginLayout.addRow(\"Username: \", self.usernameLabel)\n loginLayout.addRow(\"Password: \", self.passwordInput)\n self.buttons = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)\n self.buttons.accepted.connect(self.login)\n self.buttons.rejected.connect(self.reject)\n layout = QVBoxLayout()\n layout.addLayout(loginLayout)\n layout.addWidget(self.buttons)\n self.setLayout(layout)\n\n def login(self):\n try:\n act.login(self.passwordInput.text())\n self.accept()\n except act.PasswordError as pe:\n self.passwordInput.clear()\n errorDialog = NotificationDialog(pe.message)\n errorDialog.exec_()\n\n\n\nclass RecoveredDialog(QDialog):\n def __init__(self, secret: bytes, message: str, secret_name: str, scratch_info=None, parent=None):\n super(RecoveredDialog, self).__init__(parent)\n self.secret = secret\n self.secret_name = secret_name\n self.setWindowTitle(\"Recovery\")\n self.buttons = QDialogButtonBox(QDialogButtonBox.Yes | QDialogButtonBox.No)\n self.buttons.accepted.connect(self.save_to_file)\n self.buttons.rejected.connect(self.no_save)\n self.layout = QVBoxLayout()\n messageLabel = QLabel(message)\n if scratch_info is not None:\n messageLabel.setText(f\"scratch recovery of secret with name {secret_name}\")\n scratchLabel = QLabel(scratch_info)\n secretLabel = QLabel(secret.decode(act.ENCODING))\n self.layout.addWidget(messageLabel)\n if scratch_info is not None:\n self.layout.addWidget(scratchLabel)\n self.layout.addSpacing(20)\n self.layout.addWidget(secretLabel)\n saveLabel = QLabel(\"Save Secret to \\\"recovered\\\" directory?\")\n self.layout.addWidget(saveLabel)\n self.layout.addWidget(self.buttons)\n self.setLayout(self.layout)\n\n def save_to_file(self):\n with open(os.path.join(act.settings.RECOVERY_DIR, self.secret_name),'wb+') as fd:\n fd.write(self.secret)\n self.remove_from_sharebuffer()\n self.accept()\n\n\n def no_save(self):\n self.remove_from_sharebuffer()\n self.reject()\n\n def remove_from_sharebuffer(self):\n try:\n act.delete_packages_from_share_buffer(self.secret_name)\n except act.SecretSharingError:\n pass\n", "id": "3831611", "language": "Python", "matching_score": 1.9903391599655151, "max_stars_count": 8, "path": "21-fs-ias-lec/06-SecretSharing/FrontEnd/Dialogs.py" }, { "content": "import sys\nimport os\nimport platform\n\n\ndef setup_logging():\n import logging\n log_formatter = logging.Formatter('%(msecs)dms %(funcName)s %(lineno)d %(message)s')\n log_filename = os.path.join(\"secret_sharing.log\")\n log_filemode = \"w\"\n log_level = logging.DEBUG\n\n fh = logging.FileHandler(filename=log_filename, mode=log_filemode)\n fh.setFormatter(log_formatter)\n sh = logging.StreamHandler(sys.stdout)\n sh.setFormatter(log_formatter)\n\n logger = logging.getLogger()\n logger.addHandler(fh)\n logger.addHandler(sh)\n logger.setLevel(log_level)\n\n\nfrom PyQt5.QtWidgets import (\n QApplication,\n QVBoxLayout,\n QWidget,\n QMainWindow,\n QScrollArea,\n QPushButton,\n QTabBar,\n QTabWidget\n)\nfrom PyQt5.QtCore import Qt, QFile, QIODevice, QTextStream\nfrom FrontEnd.Tabs import ContactTab, ShareTab, RequestTab, act\nfrom FrontEnd.Dialogs import LoginDialog, RegisterDialog, RecoveredDialog\nfrom FrontEnd.CustomTab import TabBar, TabWidget\n\nclass MainWindow(QMainWindow):\n def __init__(self):\n super().__init__()\n # Widget holding all the window contents\n self.widget = QWidget()\n # Layout for self.widget\n self.vbox = QVBoxLayout()\n self.widget.setLayout(self.vbox)\n # Setup different Tabs\n # In case we are on macOS we can't use the custom tabs for some reason\n # because it generates a weird tabbar\n if platform.system() == \"Darwin\":\n self.tabs = QTabWidget(self)\n self.tabs.setTabBar(QTabBar())\n else:\n self.tabs = TabWidget(self)\n self.tabs.setTabBar(TabBar())\n self.tabs.setElideMode(Qt.ElideMiddle)\n self.contactTab = ContactTab(self)\n self.tabs.addTab(self.contactTab, \"Contacts\")\n self.shareTab = ShareTab(self)\n self.tabs.addTab(self.shareTab, \"Share\")\n self.recoveryTab = RequestTab(self)\n self.tabs.addTab(self.recoveryTab, \"Request\")\n self.configureUpdates()\n # Add the tabs to the Layout of self.widget\n self.vbox.addWidget(self.tabs)\n # Create Update Button, should pull all the information from ?? and update the contents\n self.updateButton = QPushButton(\"Update/Recover\")\n self.updateButton.clicked.connect(self.updateContents)\n self.vbox.addWidget(self.updateButton)\n\n # add a scroll area to deal with resizing of the window\n self.scroll = QScrollArea()\n # we can turn off the vertical scrollbar because there is a minimum size\n self.scroll.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)\n self.scroll.setWidgetResizable(True)\n # set self.widget in the scroll area\n self.scroll.setWidget(self.widget)\n self.setCentralWidget(self.scroll)\n\n # definition of window size and Title\n self.resize(500, 600)\n self.setMinimumHeight(600)\n self.setWindowTitle(\"Secret Sharing BACnet\")\n self.show()\n return\n #(self, secret: bytes, message: str, secret_name: str, scratch_info=None, parent=None)\n def updateContents(self):\n act.handle_new_events(act.core.rq_handler.event_factory.get_private_key())\n for secret in act.secrets:\n try:\n act.attemptReconstruction(secret)\n except act.RecoverySuccessException as e:\n recovered_dialog = RecoveredDialog(e.secret, e.message, e.secret_name, e.scratch_info, self)\n recovered_dialog.exec_()\n print(\"updated\")\n\n def configureUpdates(self):\n self.tabs.currentChanged.connect(self.shareTab.updateContacts)\n self.tabs.currentChanged.connect(self.recoveryTab.autoRecovery.updateComboBox)\n self.tabs.currentChanged.connect(self.recoveryTab.manualRecovery.updateContacts)\n\n\nif __name__ == \"__main__\":\n app = QApplication(sys.argv)\n # Style from: https://github.com/sommerc/pyqt-stylesheets/blob/master/pyqtcss/src/dark_orange/style.qss\n if act.user_exists():\n login = LoginDialog()\n if not login.exec_():\n sys.exit(-1)\n else:\n register = RegisterDialog()\n if not register.exec_():\n sys.exit(-1)\n\n setup_logging() # connect to backend loggers\n\n qss = \"FrontEnd/styles/style3.qss\"\n stream = QFile(qss)\n stream.open(QIODevice.ReadOnly)\n app.setStyleSheet(QTextStream(stream).readAll())\n stream.close()\n window = MainWindow()\n window.show()\n sys.exit(app.exec_())\n", "id": "8469158", "language": "Python", "matching_score": 2.877553701400757, "max_stars_count": 8, "path": "21-fs-ias-lec/06-SecretSharing/secretsharingUI.py" }, { "content": "from PyQt5.QtWidgets import QTabWidget, QTabBar\n\nclass TabBar(QTabBar):\n def __init__(self, expanded=-1, parent=None):\n super(TabBar, self).__init__(parent)\n self._expanded = expanded\n\n def tabSizeHint(self, index):\n size = super(TabBar, self).tabSizeHint(index)\n if index == self._expanded:\n offset = self.width()\n for index in range(self.count()):\n offset -= super(TabBar, self).tabSizeHint(index).width()\n size.setWidth(max(size.width(), size.width() + offset))\n return size\n\n\nclass TabWidget(QTabWidget):\n def __init__(self, expanded=-1, parent=None):\n super(TabWidget, self).__init__(parent)\n self.setTabBar(TabBar(expanded, self))\n\n def resizeEvent(self, event):\n self.tabBar().setMinimumWidth(self.width())\n super(TabWidget, self).resizeEvent(event)\n\n", "id": "11029191", "language": "Python", "matching_score": 0.023525549098849297, "max_stars_count": 8, "path": "21-fs-ias-lec/06-SecretSharing/FrontEnd/CustomTab.py" }, { "content": "from blocklist import Blocklist\nfrom blocksettings import Blocksettings\n### for testing ###\nimport lib.crypto as crypto\nimport lib.feed as feed\nimport os\nimport time\nimport shutil\nimport cbor2\n\n\ndef testEventFilter():\n if not os.path.isdir(\"testdata\"):\n os.mkdir(\"testdata\")\n os.mkdir(\"testdata/alice\")\n\n # Generate a key pair\n\n alice_digestmod = \"sha256\"\n alcie_h, alice_signer = None, None\n\n if not os.path.isfile(\"testdata/alice/alice-secret.key\"):\n # print(\"Create Alice's key pair at data/alice/alice-secret.key\")\n alice_h = crypto.HMAC(alice_digestmod)\n alice_h.create()\n with open(\"testdata/alice/alice-secret.key\", \"w\") as f:\n f.write('{\\n ' + (',\\n '.join(alice_h.as_string().split(','))[1:-1]) + '\\n}')\n alice_signer = crypto.HMAC(alice_digestmod, alice_h.get_private_key())\n\n # print(\"Read Alice's secret key.\")\n with open(\"testdata/alice/alice-secret.key\", 'r') as f:\n # print(\"Create Bob's key pair at data/bob/bob-secret.key\")\n key = eval(f.read())\n alice_h = crypto.HMAC(alice_digestmod, key[\"private\"], key[\"feed_id\"])\n alice_signer = crypto.HMAC(alice_digestmod, bytes.fromhex(alice_h.get_private_key()))\n\n # print(\"Create or load Alice's feed at data/alice/alice-feed.pcap\")\n alice_feed = feed.FEED(fname=\"testdata/alice/alice-feed.pcap\", fid=alice_h.get_feed_id(), signer=alice_signer,\n create_if_notexisting=True, digestmod=alice_digestmod)\n\n # print(\"Alice and Bob write to their log.\")\n alice_feed.write([\"bacnet/chat\", time.time(), \"Where is the duck ?\"])\n\n bl = Blocklist('myblocklist.json')\n print(bl.blocklist[\"words\"])\n bs = Blocksettings('myblocklistsettings.json')\n print(\"Blocklevel: \" + str(bs.blocklevel))\n chat = []\n\n for event in alice_feed:\n event = bl.filterEvent(bl, bs, event)\n if event.content()[0] == \"bacnet/chat\":\n chat.append({\"sender\": \"alice\", \"time\": event.content()[1], \"text\": event.content()[2]})\n\n chat.sort(key=lambda msg: msg[\"time\"])\n\n for msg in chat:\n print(msg[\"sender\"] + \":\" + msg[\"text\"])\n shutil.rmtree('testdata')\n\n\ndef testJson(blocklistpath):\n b = Blocklist(blocklistpath)\n print(b.getBlocklist())\n b.blockWord(\"test\")\n print(b.getBlocklist())\n b.unblockWord(\"test\")\n b.writeToFile(\"./myblocklist.json\")\n print(b.getBlocklist())\n\n bs = Blocksettings.getStandartSettings()\n bs.valuesToJson()\n bs.writeToFile(\"./myblocklistsettings.json\")\n\n\nif __name__ == '__main__':\n print(\"------------Test Json------------\")\n testJson(\"./myblocklist.json\")\n print(\"------------Test Eventfilter------------\")\n testEventFilter()\n", "id": "10751886", "language": "Python", "matching_score": 3.9056012630462646, "max_stars_count": 8, "path": "21-fs-ias-lec/groups/12-blocklist/src/test.py" }, { "content": "import sys\nimport time\nimport os\n# add the lib to the module folder\nsys.path.append(\"lib\")\n\nimport crypto\nimport feed\nfrom blocklist import Blocklist\nfrom blocksettings import Blocksettings\n\n\ndef demo():\n if not os.path.isdir(\"data\"):\n os.mkdir(\"data\")\n os.mkdir(\"data/alice\")\n os.mkdir(\"data/bob\")\n os.mkdir(\"data/john\")\n\n ## Alice\n alice_digestmod = \"sha256\"\n alcie_h, alice_signer = None, None\n\n # Create Alice's key pair at data/alice/alice-secret.key\n if not os.path.isfile(\"data/alice/alice-secret.key\"):\n alice_h = crypto.HMAC(alice_digestmod)\n alice_h.create()\n with open(\"data/alice/alice-secret.key\", \"w\") as f:\n f.write('{\\n ' + (',\\n '.join(alice_h.as_string().split(','))[1:-1]) + '\\n}')\n alice_signer = crypto.HMAC(alice_digestmod, alice_h.get_private_key())\n\n # Read Alice's secret key\n with open(\"data/alice/alice-secret.key\", 'r') as f:\n key = eval(f.read())\n alice_h = crypto.HMAC(alice_digestmod, key[\"private\"], key[\"feed_id\"])\n alice_signer = crypto.HMAC(alice_digestmod, bytes.fromhex(alice_h.get_private_key()))\n\n # Create or load Alice's feed at data/alice/alice-feed.pcap\n alice_feed = feed.FEED(fname=\"data/alice/alice-feed.pcap\", fid=alice_h.get_feed_id(), signer=alice_signer,\n create_if_notexisting=True, digestmod=alice_digestmod)\n ## Bob\n bob_digestmod = \"sha256\"\n bob_h, bob_signer = None, None\n\n # Create Bob's key pair at data/bob/bob-secret.key\n if not os.path.isfile(\"data/bob/bob-secret.key\"):\n bob_h = crypto.HMAC(bob_digestmod)\n bob_h.create()\n with open(\"data/bob/bob-secret.key\", \"w\") as f:\n f.write('{\\n ' + (',\\n '.join(bob_h.as_string().split(','))[1:-1]) + '\\n}')\n\n # Read Bob's secret key\n with open(\"data/bob/bob-secret.key\", 'r') as f:\n key = eval(f.read())\n bob_h = crypto.HMAC(bob_digestmod, key[\"private\"], key[\"feed_id\"])\n bob_signer = crypto.HMAC(bob_digestmod, bytes.fromhex(bob_h.get_private_key()))\n\n # Create or load Bob's feed at data/bob/bob-feed.pcap\n bob_feed = feed.FEED(fname=\"data/bob/bob-feed.pcap\", fid=bob_h.get_feed_id(), signer=bob_signer,\n create_if_notexisting=True, digestmod=bob_digestmod)\n\n ## John\n john_digestmod = \"sha256\"\n john_h, john_signer = None, None\n\n # Create John's key pair at data/bob/bob-secret.key\n if not os.path.isfile(\"data/john/john-secret.key\"):\n john_h = crypto.HMAC(john_digestmod)\n john_h.create()\n with open(\"data/john/john-secret.key\", \"w\") as f:\n f.write('{\\n ' + (',\\n '.join(john_h.as_string().split(','))[1:-1]) + '\\n}')\n\n # Read John's secret key\n with open(\"data/john/john-secret.key\", 'r') as f:\n key = eval(f.read())\n john_h = crypto.HMAC(john_digestmod, key[\"private\"], key[\"feed_id\"])\n john_signer = crypto.HMAC(john_digestmod, bytes.fromhex(john_h.get_private_key()))\n\n # Create or load John's feed at data/john/john-feed.pcap\n john_feed = feed.FEED(fname=\"data/john/john-feed.pcap\", fid=john_h.get_feed_id(), signer=john_signer,\n create_if_notexisting=True, digestmod=john_digestmod)\n\n # Create Blocklist and add words and authors to block\n\n # Blocklist alice\n blocklist_alice = Blocklist()\n blocklist_alice.loadFromFeed(alice_feed)\n blocklist_alice.blockAuthor(bob_feed.fid) # alice blocks bob\n blocklist_alice.writeToFeed(alice_feed)\n\n # Blocklist bob\n\n blocklist_bob = Blocklist()\n blocklist_bob.loadFromFeed(bob_feed)\n blocklist_bob.blockWord(\"chicken\")\n blocklist_bob.blockWord(\"house\")\n blocklist_bob.writeToFeed(bob_feed)\n\n #Blocklist John\n\n blocklist_john = Blocklist()\n blocklist_john.loadFromFeed(john_feed)\n\n # Create BlocklistSettings\n settings_alice = Blocksettings()\n settings_alice.loadFromFeed(alice_feed)\n settings_alice.changeBlockLevel(Blocksettings.SOFTBLOCK)\n settings_alice.writeToFeed(alice_feed)\n\n settings_bob = Blocksettings()\n settings_bob.loadFromFeed(bob_feed)\n settings_bob.changeBlockLevel(Blocksettings.SOFTBLOCK)\n settings_bob.writeToFeed(bob_feed)\n\n settings_john = Blocksettings()\n settings_john.loadFromFeed(john_feed)\n settings_john.changeSuggBlockSettings(Blocksettings.USESUGGBLOCK)\n\n\n\n # Demo Chat\n alice_feed.write([\"bacnet/chat\", time.time(), \"Hello chicken !\"])\n bob_feed.write([\"bacnet/chat\", time.time(), \"Hello?\"])\n\n blocklist_bob.addBlockSuggestionEvent(bob_feed, alice_feed.fid, blocklist_bob.getBlockedEvents(alice_feed)) #Bob adds a suggestion block to his feed containing events from alice, which contain blcoked words\n\n # Filtering ( we pretend that Alice and Bob already synced their logs and Alice and John too)\n\n # on alice's side\n bob_to_alice_feed = Blocklist.getFilteredFeed(blocklist_alice, settings_alice, bob_feed) # alice filters bob's feed\n # on bob's side\n alice_to_bob_feed = Blocklist.getFilteredFeed(blocklist_bob, settings_bob, alice_feed) # bob filters alice's feed\n # on john's side\n alice_to_john_feed = blocklist_john.getFilteredFeed(blocklist_john, settings_john, alice_feed, bob_feed) # John considers the block recommendations from bob, because we set USESUGGBLOCK\n # --> John blocks same words as bob because John's blocklist is empty\n\n\n chat = []\n for event in bob_to_alice_feed: # on alice's side\n if event.content()[0] == \"bacnet/chat\":\n chat.append({\"sender\": \"<NAME>\", \"time\": event.content()[1], \"text\": event.content()[2]}) # Empty message, because alice blocked bob\n\n for event in alice_to_bob_feed: # on bob's side\n if event.content()[0] == \"bacnet/chat\":\n chat.append({\"sender\": \"<NAME>\", \"time\": event.content()[1], \"text\": event.content()[2]}) # Censored, because bob has enabled SOFTBLOCK and blocked a word which is in the message of alice\n\n for event in alice_to_john_feed: # on john's side\n if event.content()[0] == \"bacnet/chat\":\n chat.append({\"sender\": \"<NAME>\", \"time\": event.content()[1], \"text\": event.content()[2]}) # Empty message, because John enabled USESUGGBLOCK in his settings. He considers bob's block suggestions\n\n chat.sort(key=lambda msg: msg[\"time\"])\n\n for msg in chat:\n print(msg[\"sender\"] + \": \" + msg[\"text\"])\n\n\nif __name__ == \"__main__\":\n demo()\n", "id": "1978521", "language": "Python", "matching_score": 2.4225521087646484, "max_stars_count": 8, "path": "21-fs-ias-lec/groups/12-blocklist/src/demo.py" }, { "content": "import json\nfrom lib.event import serialize\nfrom lib.feed import FEED\nfrom blocksettings import Blocksettings\nimport time\n\n\nclass Blocklist:\n \"\"\"\n Instance of a blocklist.\n Implements functions to load, change, and save a blocklist.\n \"\"\"\n\n def __init__(self, *args):\n \"\"\"\n Constructor of Blocklist.\n\n Parameters\n ----------\n path : string\n path of json file that is used to initialise.\n \"\"\"\n self.blocklist = None\n\n if len(args) > 0:\n self.loadFromFile(args[0])\n else:\n self.blocklist = {\n \"words\": [],\n \"authors\": []\n }\n\n def getBlockedWords(self):\n \"\"\"\n Returns\n -------\n [string]\n The list of all blocked words.\n \"\"\"\n return self.blocklist[\"words\"]\n\n def getBlockedAuthors(self):\n \"\"\"\n Returns\n -------\n [string]\n The list of public keys of blocked authors.\n \"\"\"\n return self.blocklist[\"authors\"]\n\n def loadFromFile(self, path):\n \"\"\"\n Loads blocklist from json file.\n\n Parameters\n ----------\n path : string\n path of json file that is used to initialise.\n \"\"\"\n file = open(path, \"r\")\n self.blocklist = json.load(file)\n\n def loadFromFeed(self, feed):\n \"\"\"\n Loads blocklist from given feed.\n\n Parameters\n ----------\n feed : FEED\n feed that is used to initialise.\n \"\"\"\n e = None\n for event in feed:\n if event.content()[0] == \"bacnet/blocklist\":\n e = event\n if e:\n self.loadFromEvent(e)\n\n def loadFromEvent(self, event):\n \"\"\"\n Loads blocklist from given feed.\n\n Parameters\n ----------\n feed : FEED\n feed that is used to initialise.\n \"\"\"\n self.blocklist = event.content()[2]\n\n def writeToFile(self, path):\n \"\"\"\n Writes blocklist to json file.\n\n Parameters\n ----------\n path : string\n path where json file should be stored.\n \"\"\"\n outfile = open(path, 'w')\n json.dump(self.blocklist, outfile)\n\n def writeToFeed(self, feed):\n \"\"\"\n Writes blocklist to feed.\n\n Parameters\n ----------\n feed : FEED\n feed where the blocklist should be stored.\n \"\"\"\n feed.write([\"bacnet/blocklist\", time.time(), self.blocklist])\n\n def loadFromString(self, s):\n \"\"\"\n Loads a blocklist from a string.\n\n Parameters\n ----------\n s : string\n The string.\n \"\"\"\n self.blocklist = json.load(s)\n\n def getBlocklist(self):\n \"\"\"\n Returns\n -------\n jsondata\n The blocklist saved by this instance.\n \"\"\"\n return self.blocklist\n\n def blockWord(self, word):\n \"\"\"\n Adds a word to the blocklist if word is not already in list.\n\n Parameters\n ----------\n word : string\n The word that should be blocked.\n Returns\n -------\n bool\n true if a change to the list was made.\n \"\"\"\n if word.lower() not in self.getBlockedWords():\n self.blocklist[\"words\"].append(word.lower())\n return True\n return False\n\n def unblockWord(self, word):\n \"\"\"\n Removes a word from the blocklist if word is in list.\n\n Parameters\n ----------\n word : string\n The word that should be unblocked.\n -------\n bool\n true if a change to the list was made.\n \"\"\"\n if word.lower() in self.getBlockedWords():\n self.blocklist[\"words\"].remove(word.lower())\n return True\n return False\n\n def blockAuthor(self, authorkey):\n \"\"\"\n Adds an authorkey to the blocklist if author is not in list.\n\n Parameters\n ----------\n authorkey : string\n The author's publickey as a string that should be added to list.\n -------\n bool\n true if a change to the list was made.\n \"\"\"\n if authorkey not in self.getBlockedAuthors():\n self.blocklist[\"authors\"].append(authorkey)\n return True\n return False\n\n def unblockAuthor(self, authorkey):\n \"\"\"\n Removes an authorkey from the blocklist if author is in list.\n\n Parameters\n ----------\n authorkey : string\n The author's publickey as a string that should be removed from the list.\n -------\n bool\n true if a change to the list was made.\n \"\"\"\n if authorkey in self.getBlockedAuthors():\n self.blocklist[\"authors\"].remove(authorkey)\n\n def combineBlockListsFromFeed(self, own_feed, share_feed):\n \"\"\"\n Combines two Blocklists from two different feeds.\n\n Parameters\n ----------\n own_feed : FEED\n The own feed containing a blocklist\n share_feed : FEED\n The feed conataining a blocklist which will be inserted in the other blocklist\n \"\"\"\n e = None\n for event in share_feed:\n if event.content()[0] == \"bacnet/blocklist\":\n e = event\n if e:\n self.blocklist = self.combineBlockLists(self.blocklist, e.content()[2])\n self.writeToFeed(own_feed)\n\n @staticmethod\n def combineBlockLists(blocklist1, blocklist2):\n \"\"\"\n Combines two Blocklists.\n\n Parameters\n ----------\n blocklist1 : Blocklist\n First blocklist.\n blocklist2 : Blocklist\n First blocklist.\n\n Returns\n -------\n Blocklist\n The new blocklist that combines both lists.\n\n \"\"\"\n newBlocklist = blocklist1\n for w in blocklist2[\"words\"]:\n if w not in newBlocklist[\"words\"]:\n newBlocklist[\"words\"].append(w)\n\n for a in blocklist2[\"authors\"]:\n if a not in newBlocklist[\"authors\"]:\n newBlocklist[\"authors\"].append(a)\n\n return newBlocklist\n\n # TODO filter methods could be in own class\n @staticmethod\n def filterFeed(blocklist, blocksettings, feed):\n \"\"\"\n Applies filters to the content of all events, that are included in the given feed,\n according to the given blocksettings.\n\n Parameters\n ----------\n blocklist : Blocklist\n The blocklist that is used to filter the event.\n blocksettings : Blocksettings\n The settings that are applied to filter the event.\n feed : FEED\n The feed that get's filtered.\n\n Returns\n -------\n FEED\n The filtered feed.\n \"\"\"\n if blocksettings.blocklevel == blocksettings.NOBLOCK:\n return feed\n feed = list(feed)\n for i in range(len(feed)):\n feed[i] = blocklist.filterEvent(blocklist, blocksettings, feed[i])\n return feed\n\n def getBlockedEvents(self, feed):\n \"\"\"\n Iterates over all events of the given feed and stores its seq_no if the content of the event includes a blocked word.\n If the author of the feed is on the blocklist, the sequence numbers of all events are stored.\n\n Parameters\n ----------\n feed : FEED\n The feed that contains the events.\n\n Returns\n -------\n [int]\n List of all sequence numbers of events that are blocked.\n \"\"\"\n if feed.fid in self.getBlockedAuthors():\n return list(range(0, len(feed)))\n\n seqNumList = []\n\n for event in feed:\n if \"bacnet/blocklist\" not in event.content()[0] or \"bacnet/blocksettings\" not in event.content()[0]:\n for blockedWord in self.getBlockedWords():\n if blockedWord in str(event.content()[2]).lower():\n seqNumList.append(event.seq - 1)\n break\n\n return seqNumList\n\n @staticmethod\n def getSuggestedBlockSeqNum(suggblockfeed, feedId):\n \"\"\"\n Iterates over all events in the given feed and returns the sequence number of all events from feed_id which are marked as \"suggested block\" by the owner\n of suggblockfeed.\n\n Parameters\n ----------\n suggblockfeed : FEED\n The feed that contains the events.\n feedId: int\n The id of the feed to which the blocked sequence numbers belong.\n Returns\n -------\n [int]\n List of all sequence numbers of events for which it was recommended to block them\n \"\"\"\n seqnumList = []\n e = None\n for event in suggblockfeed:\n if event.content()[0] == \"bacnet/blocklist_suggblock\":\n e = event\n if e:\n if feedId in e.content()[2]:\n seqnumList += e.content()[2][feedId]\n return seqnumList\n return []\n\n @staticmethod\n def getFilteredContentFromFeed(blocklist, blocksettings, feed, feed_suggblock, seq_num):\n \"\"\"\n Filters the content of the event with the given sequence number according to the given blocksettings and block suggestions from another feed.\n\n Parameters\n ----------\n blocklist : Blocklist\n The blocklist that is used to filter the event.\n blocksettings : Blocksettings\n The settings that are applied to filter the event.\n feed : FEED\n The feed that contains the events.\n feed_suggblock:\n The feed that should be used to get block suggestions\n seq_num : int\n The sequence number of the feed\n Returns\n -------\n string\n Filtered Content\n \"\"\"\n newFeed = list(feed)\n if \"bacnet/blocklist\" in newFeed[seq_num].content()[0] or \"bacnet/blocksettings\" in newFeed[seq_num].content()[0]:\n return newFeed[seq_num].content()[2]\n if feed_suggblock and blocksettings.getSuggBlock() == blocksettings.USESUGGBLOCK:\n seqnumList = Blocklist.getSuggestedBlockSeqNum(feed_suggblock, feed.fid)\n\n if seq_num in seqnumList:\n return \"\"\n if feed.fid not in blocklist.getBlockedAuthors():\n return Blocklist.filterString(blocklist, blocksettings, newFeed[seq_num].content()[2])\n return \"\"\n\n @staticmethod\n def getFilteredFeed(blocklist, blocksettings, feed, feed_suggblock = None):\n \"\"\"\n Filters the content all events of the given feed according to the given blocksettings and block suggestions from another feed.\n The returned list should only be used to display the contents and should not be used for any other purpose, otherwise the Scuttlebutt protocol will be violated.\n\n\n Parameters\n ----------\n blocklist : Blocklist\n The blocklist that is used to filter the event.\n blocksettings : Blocksettings\n The settings that are applied to filter the event.\n feed : FEED\n The feed that get's filtered.\n feed_suggblock:\n (optional)\n The feed that should be used to get block suggestions\n Returns\n -------\n [EVENT]\n List of filtered events of the given feed. This list should only be used to display the contents and should not be used for any other purpose, otherwise the Scuttlebutt protocol will be violated.\n \"\"\"\n newFeed = list(feed)\n for i in range(len(feed)):\n newContent = newFeed[i].content()\n newContent[2] = Blocklist.getFilteredContentFromFeed(blocklist, blocksettings, feed, feed_suggblock, i)\n newFeed[i].contbits = serialize(newContent)\n return newFeed\n\n def addBlockSuggestionEvent(self, feed, feed_id, seqNumList):\n \"\"\"\n Updates the suggested Block entries for the given feed_id with the given sequence number list and writes it to the given feed.\n\n\n Parameters\n ----------\n feed : FEED\n The feed where the suggested Block entries should be updated\n feed_id : int\n The feed id to which the sequence numbers belong\n seqNumList : [int]\n the sequence numbers to be added to the list\n \"\"\"\n e = None\n for event in feed:\n if event.content()[0] == \"bacnet/blocklist_suggblock\":\n e = event\n\n suggDict = {feed_id: []}\n if e:\n suggDict = e.content()[2]\n newSeqNum = []\n for seqNum in seqNumList:\n if seqNum not in suggDict[feed_id]:\n newSeqNum.append(seqNum)\n suggDict[feed_id] = suggDict[feed_id] + newSeqNum\n feed.write([\"bacnet/blocklist_suggblock\", time.time(), suggDict])\n\n # Example\n @staticmethod\n def filterString(blocklist, blocksettings, s):\n \"\"\"\n Applies filter to a string.\n\n Parameters\n ----------\n blocklist : Blocklist\n The blocklist that is used to filter the string.\n blocksettings : Blocksettings\n The settings that are applied to filter the string.\n s : str\n The string that get's filtered.\n\n Returns\n -------\n str\n The filtered string.\n \"\"\"\n splitString = str(s).split()\n\n for i in range(len(splitString)):\n for b in blocklist.blocklist[\"words\"]:\n if b.lower() in splitString[i].lower():\n if blocksettings.getBlocklevel() == Blocksettings.SOFTBLOCK:\n splitString[i] = len(splitString[i]) * \"*\"\n break\n elif blocksettings.getBlocklevel() == Blocksettings.HARDBLOCK:\n splitString[i] = \"\"\n break\n return ' '.join(splitString)\n", "id": "12535208", "language": "Python", "matching_score": 4.09968376159668, "max_stars_count": 8, "path": "21-fs-ias-lec/groups/12-blocklist/src/blocklist.py" }, { "content": "import json\nimport time\n\nclass Blocksettings:\n \"\"\"\n Blocksettings stores settings on how to filter content.\n \"\"\"\n # Blocklevels\n NOBLOCK = 0 # No filters are applied\n SOFTBLOCK = 1 # Blocked words are censored, content of blocked authors are deleted\n HARDBLOCK = 2 # Content that contains blocked words or authors will be deleted\n\n # Suggested Block Settings\n DONTUSESUGGBLOCK = 3 # disables suggested block\n USESUGGBLOCK = 4 # uses the suggested block for content\n\n def __init__(self, *args):\n self.settings = {\n \"blocklevel\": Blocksettings.NOBLOCK,\n \"suggblock\": Blocksettings.DONTUSESUGGBLOCK,\n }\n\n if len(args) > 0:\n self.loadFromFile(args[0])\n\n def loadFromFile(self, path):\n \"\"\"\n Loads settings from a json file.\n\n Parameters\n ----------\n path : str\n path of the file\n \"\"\"\n file = open(path, \"r\")\n self.settings = json.load(file)\n\n def loadFromFeed(self, feed):\n \"\"\"\n Loads settings from given feed.\n If there are no settings included in the feed, default settings are loaded.\n\n Parameters\n ----------\n feed : FEED\n The feed where the settings are stored\n \"\"\"\n e = None\n for event in feed:\n if event.content()[0] == \"bacnet/blocksettings\":\n e = event\n if e:\n self.settings = e.content()[2]\n\n def writeToFile(self, path):\n \"\"\"\n Writes settings to a json file.\n\n Parameters\n ----------\n path : str\n The path where the file is stored.\n \"\"\"\n outfile = open(path, 'w')\n json.dump(self.settings, outfile)\n\n def writeToFeed(self, feed):\n \"\"\"\n Writes settings to given feed.\n\n Parameters\n ----------\n feed : FEED\n The feed where the settings should be saved.\n \"\"\"\n feed.write([\"bacnet/blocksettings\", time.time(), self.settings])\n\n def getBlocklevel(self):\n \"\"\"\n Returns\n -------\n int\n The current blocklevel.\n \"\"\"\n return self.settings[\"blocklevel\"]\n\n def getSuggBlock(self):\n \"\"\"\n Returns\n -------\n int\n The current \"Suggestion Block\" settings.\n\n \"\"\"\n return self.settings[\"suggblock\"]\n\n def changeBlockLevel(self, newSetting):\n \"\"\"\n Changes Blocklevel.\n\n Parameters\n ----------\n newSetting : int\n New Setting that should be stored.\n\n Returns\n -------\n bool\n false if setting was already the same.\n\n \"\"\"\n if (self.getBlocklevel() == newSetting):\n return False\n\n self.settings[\"blocklevel\"] = newSetting\n return True\n\n def changeSuggBlockSettings(self, newSetting):\n \"\"\"\n Changes Suggested Block settings.\n\n Parameters\n ----------\n newSetting : int\n New Setting that should be stored.\n\n Returns\n -------\n bool\n false if setting was already the same.\n\n \"\"\"\n if (self.getSuggBlock() == newSetting):\n return False\n self.settings[\"suggblock\"] = newSetting\n return True\n\n def defaultSettings(self):\n self.settings = {\n \"blocklevel\": Blocksettings.NOBLOCK,\n \"suggblock\": Blocksettings.DONTUSESUGGBLOCK,\n }\n\n\n\n", "id": "4301939", "language": "Python", "matching_score": 0.1911766678094864, "max_stars_count": 8, "path": "21-fs-ias-lec/groups/12-blocklist/src/blocksettings.py" }, { "content": "#!/usr/bin/env python3\n\n\"\"\" A simple beacon transmitter class to send a 1-byte message (0x0f) in regular time intervals. \"\"\"\n\n# Copyright 2015 Mayer Analytics Ltd.\n#\n# This file is part of pySX127x.\n#\n# pySX127x is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public\n# License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later\n# version.\n#\n# pySX127x is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied\n# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more\n# details.\n#\n# You can be released from the requirements of the license by obtaining a commercial license. Such a license is\n# mandatory as soon as you develop commercial activities involving pySX127x without disclosing the source code of your\n# own applications, or shipping pySX127x with a closed source product.\n#\n# You should have received a copy of the GNU General Public License along with pySX127. If not, see\n# <http://www.gnu.org/licenses/>.\n\n# usage:\n# python p2p_send.py -f 433 -b BW125 -s 12\n\nimport sys\nfrom time import sleep\n\nsys.path.insert(0, '..')\nfrom BacNet_SX127x.LoRa import *\nfrom BacNet_SX127x.LoRaArgumentParser import LoRaArgumentParser\nfrom BacNet_SX127x.board_config import BOARD\nimport queue\nimport logging as log\n\n# This class provides the connection to the LoRa Gateway Sx127x\n# an manages the incoming and outgoing messages\n# To send a message this class holds an txQueue.\n# Every message in this Queue, will be send automatically to the Lora Hardware\n# To receive a message this class holds an rxQueue.\n# Every message received, will be forwarded to this Queue\n# To register these Queues, this Class provides an registerRx and registerTx method\n# Further this class check weather the spi bus is still alive\nclass LinkLayer(LoRa):\n tx_counter = 0\n rx_counter = 0\n # if true, detailed messages will be printed\n verbose = False\n args = None\n #send and receive queue references\n rxQueue: queue.Queue = None\n txQueue: queue.Queue = None\n\n # init the driver\n def __init__(self, verbose=False):\n super(LinkLayer, self).__init__(verbose)\n # sleep mode before change mapping\n self.set_mode(MODE.SLEEP)\n # set mapping to receive\n self.set_dio_mapping([1, 0, 0, 0, 0, 0])\n self.verbose = verbose\n\n # Here happens the Magic\n # This Event fires if there is a rx package\n def on_rx_done(self):\n\n # indicate rx\n BOARD.led_on()\n log.info(\"Rx Done Enter\")\n self.clear_irq_flags(RxDone=1)\n\n self.rx_counter += 1\n\n # read the bytestream from loradriver\n byteStream = self.read_payload(nocheck=True)\n # parse bytestream to string\n data = ''.join([chr(c) for c in byteStream])\n log.info(data)\n # add string to queue\n self.rxQueue.put(data)\n\n # reset rx pointer\n self.set_mode(MODE.SLEEP)\n self.reset_ptr_rx()\n BOARD.led_off()\n # start listening again\n self.set_mode(MODE.RXCONT)\n\n # set the led and mode after sending a message\n def on_tx_done(self):\n # Send is done\n BOARD.led_on()\n log.info(\"Tx Done Enter\")\n self.set_mode(MODE.STDBY)\n self.clear_irq_flags(TxDone=1)\n\n self.tx_counter += 1\n\n def on_cad_done(self):\n log.debug(\"\\non_CadDone\")\n log.debug(self.get_irq_flags())\n\n def on_rx_timeout(self):\n log.debug(\"\\non_RxTimeout\")\n log.debug(self.get_irq_flags())\n\n def on_valid_header(self):\n log.debug(\"\\non_ValidHeader\")\n log.debug(self.get_irq_flags())\n\n def on_payload_crc_error(self):\n log.debug(\"\\non_PayloadCrcError\")\n log.debug(self.get_irq_flags())\n\n def on_fhss_change_channel(self):\n log.debug(\"\\non_FhssChangeChannel\")\n log.debug(self.get_irq_flags())\n\n # start the driver\n def start(self):\n log.info(\"start\")\n self.tx_counter = 0\n BOARD.led_on()\n log.info(self.getModeString())\n # Force mode Standby\n self.set_mode(MODE.STDBY)\n\n # driver main routine\n # waits on rec message, till there is something to send\n while True:\n sleep(1)\n # If communication to board is ok\n if self.spiHearthbeat():\n log.info(f\"Alive {self.getModeString()}\")\n\n # There is something to send\n if not self.txQueue.empty():\n # take sting object from queue\n stringToSend = self.txQueue.get()\n # parse string to bytestream\n byteStream = [int(hex(ord(c)), 0) for c in stringToSend]\n # Set the pin mapping to tx\n # this will only be done if there is something to send\n # and only for a short time period\n # fires the on_tx_done event\n self.set_dio_mapping([1, 0, 0, 0, 0, 0])\n # write bytestream into loradriver\n self.write_payload(byteStream)\n # Send\n self.set_mode(MODE.TX)\n # wait till message is sent\n while self.isMode(MODE.TX):\n log.info(self.getModeString())\n sleep(1)\n\n else:\n # if there is nothing to send, map pins to rx\n # this is the normal state\n # fires the on_rx_done event\n self.set_dio_mapping([0, 0, 0, 0, 0, 0])\n if not self.isMode(MODE.RXCONT):\n self.set_mode(MODE.SLEEP)\n self.reset_ptr_rx()\n self.set_mode(MODE.RXCONT)\n # if there is an error in board communication\n # try restart\n else:\n log.warning(\"dead\")\n BOARD.setup()\n\n # Setup the hardware config for LoRa Gateway (See LoraArgumentParser.py)\n def setup(self, args):\n self.args = args\n self.set_pa_config(pa_select=1)\n # lora.set_rx_crc(True)\n # lora.set_agc_auto_on(True)\n # lora.set_lna_gain(GAIN.NOT_USED)\n # lora.set_coding_rate(CODING_RATE.CR4_6)\n # lora.set_implicit_header_mode(False)\n # lora.set_pa_config(max_power=0x04, output_power=0x0F)\n # lora.set_pa_config(max_power=0x04, output_power=0b01000000)\n # lora.set_low_data_rate_optim(True)\n # lora.set_pa_ramp(PA_RAMP.RAMP_50_us)\n log.info(self)\n # assert(lora.get_lna()['lna_gain'] == GAIN.NOT_USED)\n assert (self.get_agc_auto_on() == 1)\n\n # Register the Rx Queue for outgoing messages\n def registerRx(self, qRx: queue.Queue):\n self.rxQueue = qRx\n\n # Register the Tx Queue for incoming messages\n def registerTx(self, qTx: queue.Queue):\n self.txQueue = qTx\n\n # Shutdown the LoRa board\n def shutdown(self):\n self.set_mode(MODE.SLEEP)\n log.info(self)\n BOARD.teardown()\n\n # Dict for Mode to string translation\n mode_lookup = {0x80: \"SLEEP\", 0x82: \"STDBY\", 0x81: \"FSTX\", 0x84: \"TX\", 0x83: \"FSRX\", 0x85: \"RXCONT\",\n 0x86: \"RXSINGLE\", 0x87: \"CAD\", 0x01: \"FSK_STDBY\"}\n\n # Reads tha actual operating mode of LoRa board\n def getModeString(self):\n return self.mode_lookup.get(self.get_mode() | 0x80)\n\n # Checks if the mode is the operating mode\n def isMode(self, mode):\n return (self.get_mode() | 0x80) == mode\n\n # Returns weather the board is alive or notg\n def spiHearthbeat(self):\n return self.isSpiAlive()\n", "id": "5290894", "language": "Python", "matching_score": 3.644106149673462, "max_stars_count": 8, "path": "21-fs-ias-lec/05-LoRaLink/linklayer.py" }, { "content": "from BacNet_SX127x.LoRa import *\nfrom BacNet_SX127x.LoRaArgumentParser import LoRaArgumentParser\nfrom BacNet_SX127x.board_config import BOARD\nfrom linklayer import LinkLayer\nimport threading\nimport queue\nimport logging as log\nfrom transport import TransportLayer\nfrom application import ApplicationLayer\n\n\ndef loggerSetup():\n log.basicConfig(format='%(levelname)s:%(message)s', level=log.WARNING)\n log.debug(\"debug is active\")\n log.info(\"info is active\")\n log.warning(\"warning is active\")\n log.error(\"error is active\")\n log.critical(\"critical is active\")\n\n# Lora Thread\ndef startLoraThread(args,qRx,qTx):\n # Setup GPIO\n BOARD.setup()\n # Instance of Lora driver\n lora = LinkLayer(verbose=False)\n args = parser.parse_args(lora)\n lora.setup(args)\n lora.registerRx(qRx)\n lora.registerTx(qTx)\n lora.start()\n\ndef startTransportLayer(msg_Rx, msg_Tx, qRx, qTx):\n transLayer = TransportLayer()\n transLayer.register_msg_Rx(msg_Rx)\n transLayer.register_msg_Tx(msg_Tx)\n transLayer.register_qRx(qRx)\n transLayer.register_qTx(qTx)\n transLayer.start()\n\n\ndef startApplicationLayer(raw_Rx, raw_Tx):\n applLayer = ApplicationLayer()\n applLayer.register_msg_Rx(raw_Rx)\n applLayer.register_msg_Tx(raw_Tx)\n applLayer.start()\n\n\n\ndef main(parser):\n # Loggers setup, choose log level in method above\n loggerSetup()\n # Create send and receive Queue\n qRx = queue.Queue()\n qTx = queue.Queue()\n q_msg_Rx = queue.Queue()\n q_msg_Tx = queue.Queue()\n # Setup sned/receive link thread\n loraThread = threading.Thread(target=startLoraThread, name=\"Link\", args=(parser, qRx, qTx))\n # Setup Keyboard input thread (application thread)\n input_output_Thread = threading.Thread(target=startApplicationLayer, name=\"Application\", args=(q_msg_Rx, q_msg_Tx))\n # Setup send/receive transport thread\n transportThread = threading.Thread(target=startTransportLayer, name=\"Transport\", args=(q_msg_Rx, q_msg_Tx, qRx, qTx))\n input_output_Thread.start()\n transportThread.start()\n loraThread.start()\n\n\nif __name__ == \"__main__\":\n # Loralink Parser setup\n # Seee BacNet_SX127x package\n parser = LoRaArgumentParser(\"A simple LoRa beacon\")\n # Own Args\n parser.add_argument('--single', '-S', dest='single', default=False, action=\"store_true\", help=\"Single transmission\")\n parser.add_argument('--wait', '-w', dest='wait', default=1, action=\"store\", type=float,\n help=\"Waiting time between transmissions (default is 0s)\")\n\n main(parser)", "id": "12111644", "language": "Python", "matching_score": 1.6209254264831543, "max_stars_count": 8, "path": "21-fs-ias-lec/05-LoRaLink/Root.py" }, { "content": "import queue\nimport hashlib\nimport os\nimport json\nfrom datetime import datetime\n\nLOG = 'log.txt'\n\nclass TransportLayer():\n \"\"\"\n This class is the interface between application layer and link layer.\n\n The transport layer is connected to the application layer via two queues and with two other queues to the link\n layer. Determines if the input is handed over to the application layer or to the link layer. It also creates and\n maintains the append only log.\n\n Attributes:\n msg_rx_queue (Queue): Queue that contains received messages from the application layer.\n msg_tx_queue (Queue): Queue that contains messages that are handed over to application layer.\n rx_queue (Queue): Queue that contains received messages from link layer.\n tx_queue (Queue): Queue that contains messages that are handed over to link layer.\n identity (str): Hardcoded identity of each device.\n duplicates (dict): Contains hash values of received and transmitted messages, if hash is contained message is\n neither transmitted nor handed to application layer.\n sequence_number (int): Current sequence number of the personal log.\n \"\"\"\n\n msg_rx_counter = 0\n msg_tx_counter = 0\n rx_counter = 0\n tx_counter = 0\n msg_rx_queue: queue.Queue = None\n msg_tx_queue: queue.Queue = None\n rx_queue: queue.Queue = None\n tx_queue: queue.Queue = None\n\n def __init__(self):\n self.identity = 'C' # oder B oder C oder D oder E oder F\n print(\"ID = \" + self.identity)\n self.duplicates = {}\n self.initialize_append_only_log()\n self.sequence_number = self.get_sequence_number()\n\n def register_msg_Rx(self, msg_qRx: queue.Queue):\n self.msg_rx_queue = msg_qRx\n\n def register_msg_Tx(self, msg_qTx: queue.Queue):\n self.msg_tx_queue = msg_qTx\n\n def register_qRx(self, qRx: queue.Queue):\n self.rx_queue = qRx\n\n def register_qTx(self, qTx: queue.Queue):\n self.tx_queue = qTx\n\n def get_sequence_number(self) -> int:\n \"\"\"\n Returns the last sequence number occurring in own log.\n\n Returns:\n int: last sequence number in own log.\n \"\"\"\n\n with open(LOG) as json_log:\n log_file = json.load(json_log)\n return int(log_file[str(self.identity)][-1]['sequence'])\n\n def initialize_append_only_log(self):\n \"\"\"\n If the log file is not existing it is created and initialized with it's own log containing an initialization\n entry.\n\n Returns:\n None\n \"\"\"\n\n if not os.path.isfile(\"/home/pi/tmp/loralink_test/bacnet-lora-sms/log.txt\"):\n log = {str(self.identity): []}\n log[str(self.identity)].append({\n 'sequence': int(0),\n 'receiver': str(self.identity),\n 'timestamp': '',\n 'data': 'Initialise log of {ident}'.format(ident=self.identity)\n })\n\n with open(LOG, 'w') as outfile:\n json.dump(log, outfile, indent=4)\n\n def append_to_linklayer(self):\n \"\"\"\n Receives message from application layer, packs it for the link layer, adds the hash value of the message to the\n duplicates dictionary, appends the message to the own log and transmits it to the link layer.\n\n Returns:\n None\n\n \"\"\"\n\n segment = self.msg_tx_queue.get()\n try:\n if self.check_destination_segment(segment):\n # When message is to yourself print it don't use unnecessary bandwidth and directly give back to\n # application layer.\n msg = self.unpack_segment(segment)\n msg_done = str(msg[0] + \";\" + msg[1])\n self.msg_rx_queue.put(msg_done)\n else:\n packed_segment = self.pack_segment(segment)\n hash_value = self.calculate_md5(packed_segment)\n self.duplicates[str(hash_value)] = 'true'\n self.append_to_root_log(packed_segment)\n self.tx_queue.put(packed_segment)\n except Exception:\n print(\"Error while transmitting msg (wrong formatius maximus)\")\n return\n\n def append_to_root_log(self, msg: str) -> None:\n \"\"\"\n Appends message to the personal log after it was received from application layer before it gets transmitted to\n the link layer.\n\n Args:\n msg (str): Message with full transport layer header.\n\n Returns:\n None\n \"\"\"\n\n unpacked = self.unpack(msg)\n with open(LOG) as json_log:\n log_file = json.load(json_log)\n log_file[str(self.identity)].append({\n 'sequence': unpacked[1],\n 'receiver': unpacked[4],\n 'timestamp': unpacked[3],\n 'data': unpacked[5]\n })\n with open(LOG, 'w') as outfile:\n json.dump(log_file, outfile, indent=4)\n\n # if flag 0 = normal message\n def append_to_log(self, unpacked_msg: tuple) -> None:\n \"\"\"\n Appends a message that is received to the general log. If the message is from an unknown sender a new dictionary\n key is created for the sender with an initialization entry. If the sequence numbers are consecutive the message\n is added to the log.\n\n Args:\n unpacked_msg (tuple): Tuple containing all components of a transport layer packet.\n\n Returns:\n None\n \"\"\"\n\n with open(LOG) as json_log:\n log_file = json.load(json_log)\n if not str(unpacked_msg[0]) in log_file:\n log_file[str(unpacked_msg[0])] = []\n log_file[str(unpacked_msg[0])].append({\n 'sequence': int(0),\n 'receiver': str(unpacked_msg[0]),\n 'timestamp': '',\n 'data': 'Initialise log of {ident}'.format(ident=str(unpacked_msg[0]))\n })\n with open(LOG, 'w') as outfile:\n json.dump(log_file, outfile, indent=4)\n if unpacked_msg[2] == \"2\":\n entry = self.unpack_request_response(unpacked_msg)\n if self.check_sequence_num_order_int(entry[0], unpacked_msg, log_file):\n log_file[str(unpacked_msg[0])].append({\n 'sequence': entry[0],\n 'receiver': entry[1],\n 'timestamp': entry[2],\n 'data': entry[3]\n })\n with open(LOG, 'w') as outfile:\n json.dump(log_file, outfile, indent=4)\n\n if self.check_sequence_num_order(unpacked_msg, log_file):\n log_file[str(unpacked_msg[0])].append({\n 'sequence': unpacked_msg[1],\n 'receiver': unpacked_msg[4],\n 'timestamp': unpacked_msg[3],\n 'data': unpacked_msg[5]\n })\n with open(LOG, 'w') as outfile:\n json.dump(log_file, outfile, indent=4)\n else:\n log_request = self.create_log_request(unpacked_msg[0])\n self.tx_queue.put(log_request)\n\n else:\n if unpacked_msg[2] == \"2\":\n entry = self.unpack_request_response(unpacked_msg)\n if self.check_sequence_num_order_int(entry[0], unpacked_msg, log_file):\n log_file[str(unpacked_msg[0])].append({\n 'sequence': entry[0],\n 'receiver': entry[1],\n 'timestamp': entry[2],\n 'data': entry[3]\n })\n with open(LOG, 'w') as outfile:\n json.dump(log_file, outfile, indent=4)\n\n if self.check_sequence_num_order(unpacked_msg, log_file):\n log_file[str(unpacked_msg[0])].append({\n 'sequence': unpacked_msg[1],\n 'receiver': unpacked_msg[4],\n 'timestamp': unpacked_msg[3],\n 'data': unpacked_msg[5]\n })\n with open(LOG, 'w') as outfile:\n json.dump(log_file, outfile, indent=4)\n else:\n log_request = self.create_log_request(unpacked_msg[0])\n self.tx_queue.put(log_request)\n\n @staticmethod\n def check_sequence_num_order(unpacked_msg: tuple, log_file: dict) -> bool:\n \"\"\"\n Checks if the received message is consecutive to the last entry in the log of the sender.\n\n Args:\n unpacked_msg (tuple): Tuple containing all components of a transport layer packet.\n log_file (dict): Dictionary containing all logs of known sender.\n\n Returns:\n boolean\n \"\"\"\n\n return int(log_file[str(unpacked_msg[0])][-1]['sequence']) == (int(unpacked_msg[1]) - 1)\n\n @staticmethod\n def check_sequence_num_order_int(sequence: int, unpacked_msg: tuple, log_file: dict) -> bool:\n \"\"\"\n Checks if the received message is consecutive to the last entry of the log, used for Response Messages.\n\n Args:\n sequence (int): Sequence number of the response.\n unpacked_msg (tuple): Tuple containing all components of a transport layer packet.\n log_file (dict): Dictionary containing all logs of known sender.\n\n \"\"\"\n\n return int(log_file[str(unpacked_msg[0])][-1]['sequence']) == (int(sequence) - 1)\n\n def append_to_application_layer(self):\n \"\"\"\n Checks if message received from link layer is at it's destination and hasn't been received yet. If so it is\n unpacked and passed to the application layer. If not at the right destination and it hasn't been received yet it\n is sent back to the link layer.\n\n Returns:\n None\n \"\"\"\n\n msg = self.rx_queue.get()\n try:\n unpacked = self.unpack(msg)\n hashval = self.calculate_md5(msg)\n if not self.already_received(hashval):\n if unpacked[2] == \"2\":\n self.append_to_log(unpacked)\n if self.check_if_request(unpacked) and unpacked[4] == self.identity:\n self.create_request_response(unpacked)\n else:\n self.append_to_log(unpacked)\n if self.check_destination(msg):\n if not unpacked[2] == \"2\":\n unpacked_msg = unpacked[0] + \";\" + unpacked[5]\n self.msg_rx_queue.put(unpacked_msg)\n else:\n self.tx_queue.put(msg)\n except Exception:\n print(\"Error while parsing message on transport layer\")\n return\n\n # ???\n def get_identity(self):\n return self.identity\n\n def pack_segment(self, msg: str) -> str:\n \"\"\"\n Finish implementation first\n \"\"\"\n\n flag = 0\n self.sequence_number += 1\n segment = self.identity + ';' + str(self.sequence_number) + \";\" + str(flag) + \";\" + msg\n return segment\n\n @staticmethod\n def unpack(msg: str) -> tuple:\n \"\"\"\n Unpacks a message into its components.\n\n Args:\n msg (str): Message with full transport layer header.\n\n Returns:\n tuple: Tuple containing all components of a transport layer packet.\n \"\"\"\n\n sender = msg.split(\";\")[0]\n seq_num = msg.split(\";\")[1]\n flag = msg.split(\";\")[2]\n time = msg.split(\";\")[3]\n receiver = msg.split(\";\")[4]\n if flag == \"2\":\n message = msg.split(\";\")[5:]\n else:\n message = msg.split(\";\")[5]\n return sender, seq_num, flag, time, receiver, message\n\n # Checks if device is intended receiver\n def check_destination(self, msg: str) -> bool:\n \"\"\"\n Checks if the packet has reached its destination.\n\n Args:\n msg (str): Message with full transport layer header.\n\n Returns:\n boolean\n \"\"\"\n\n unpacked = self.unpack(msg)\n return unpacked[4] == self.identity\n\n def check_destination_segment(self, msg: str) -> bool:\n \"\"\"\n Checks if the message was addressed to the sender itself.\n\n Args:\n msg (str): Message with full transport layer header.\n\n Returns:\n boolean\n \"\"\"\n\n unpacked = self.unpack_segment(msg)\n return unpacked[0] == self.identity\n\n @staticmethod\n def unpack_segment(msg: str) -> tuple:\n \"\"\"\n Unpack method for segment in case the message was addressed to the sender himself.\n\n Args:\n msg (str): Message with full transport layer header.\n\n Returns:\n tuple: Tuple containing all components of a application layer packet.\n \"\"\"\n\n receiver = msg.split(\";\")[1]\n data = msg.split(\";\")[2]\n return receiver, data\n\n def already_received(self, hash_val: str) -> bool:\n \"\"\"\n Checks if the message has already been received before (used to avoid echoes). If not received before it is\n added to the duplicates dictionary.\n\n Args:\n hash_val (str): Hash value of the message received.\n\n Returns:\n boolean\n \"\"\"\n\n if str(hash_val) in self.duplicates:\n return True\n else:\n self.duplicates[str(hash_val)] = 'true'\n return False\n\n def calculate_md5(self, msg: str) -> str:\n \"\"\"\n Calculates hash value with the timestamp and data of the message as input. Only used to determine if message has\n already been received.\n\n Args:\n msg (str): Message with full transport layer header.\n\n Returns:\n str: Hash value created wit timestamp and data of message\n \"\"\"\n\n unpacked = self.unpack(msg)\n if unpacked[2] == \"2\":\n string_to_hash = str(unpacked[5][2] + unpacked[5][3] + unpacked[2])\n else:\n string_to_hash = str(unpacked[3] + unpacked[5])\n hashed = hashlib.md5(string_to_hash.encode())\n return hashed.hexdigest()\n\n def create_log_request(self, destination: str) -> str:\n now = str(datetime.now().time())\n with open(LOG) as json_log:\n log_file = json.load(json_log)\n last_sequence = log_file[str(destination)][-1][\"sequence\"]\n request_msg = self.identity + \";\" + \"-1\" + \";\" + \"1\" + \";\" + now + \";\" + str(destination) + \";\" \\\n + str(last_sequence)\n return request_msg\n\n @staticmethod\n def check_if_request(unpacked_msg: tuple) -> bool:\n \"\"\"\n Checks if the flag corresponds to a request flag.\n\n Args:\n unpacked_msg (tuple): Tuple containing all components of a transport layer packet.\n\n Returns:\n boolean\n\n \"\"\"\n\n return unpacked_msg[2] == \"1\"\n\n def create_request_response(self, request_msg: tuple) -> None:\n \"\"\"\n Creates a response message to the requested entry of the log. Calculates the negative of the difference of the\n requested sequence and the current sequence and uses this difference to address the right position in the log.\n From there the needed data is collected and packed into a response message.\n\n Args:\n request_msg (tuple): Tuple containing all components of a request message.\n\n Returns:\n None\n \"\"\"\n\n now = str(datetime.now())\n with open(LOG) as json_log:\n log_file = json.load(json_log)\n last_sequence = int(log_file[str(self.identity)][-1][\"sequence\"])\n last_sequence_request = int(request_msg[5])\n difference = (-1) * abs(last_sequence - last_sequence_request)\n if difference == 0:\n return\n else:\n #for i in range(difference, 0):\n entry_data = str(log_file[str(self.identity)][difference][\"sequence\"]) + \";\" + \\\n str(log_file[str(self.identity)][difference][\"receiver\"]) + \";\" + \\\n str(log_file[str(self.identity)][difference][\"timestamp\"]) + \";\" + \\\n str(log_file[str(self.identity)][difference][\"data\"])\n response = str(self.identity) + \";\" + \"-1\" + \";\" + \"2\" + \";\" + str(now) + \";\" + str(request_msg[0]) + \\\n \";\" + str(entry_data)\n self.tx_queue.put(response)\n\n @staticmethod\n def unpack_request_response(entry: tuple) -> tuple:\n \"\"\"\n Gathers the response data from the whole packet.\n\n Args:\n entry (tuple): Tuple containing all components of a response message.\n\n Returns:\n tuple: Tuple containing the relevant information needed for the log.\n \"\"\"\n\n info = entry[5]\n return info\n\n def start(self):\n while True:\n if not self.msg_tx_queue.empty():\n self.append_to_linklayer()\n\n if not self.rx_queue.empty():\n self.append_to_application_layer()\n", "id": "9841516", "language": "Python", "matching_score": 2.9047865867614746, "max_stars_count": 8, "path": "21-fs-ias-lec/05-LoRaLink/transport.py" }, { "content": "import queue\nimport threading\nfrom datetime import datetime\n\nclass ApplicationLayer():\n \"\"\"\n This class describes the input and output on the user side.\n\n The application layer is connected to the transport layer via two queues. It is responsible for taking user inputs\n and passing it to the transport layer. It is also responsible for printing messages supplied by the transport layer.\n It checks if the user input has the correct format and length.\n\n Attributes:\n msg_rx_queue (Queue): Queue that contains received messages.\n msg_tx_queue (Queue): Queue that contains output messages.\n \"\"\"\n\n msg_rx_counter = 0\n msg_tx_counter = 0\n msg_rx_queue: queue.Queue = None\n msg_tx_queue: queue.Queue = None\n\n def __init__(self):\n self.log = {}\n\n def read_message(self):\n \"\"\"\n Reads input from stdin and puts it to msg_tx_queue to send to transport layer.\n\n Returns:\n None\n \"\"\"\n\n inputString = input(\">> \")\n if self.check_msg_format(inputString) and self.check_data_len(inputString):\n packed_msg = self.pack_msg(inputString)\n #append to log\n self.msg_tx_queue.put(packed_msg)\n else:\n return\n\n def start_input_thread(self):\n \"\"\"\n Starts thread for reading input from stdin.\n\n Returns:\n None\n \"\"\"\n\n while True:\n self.read_message()\n\n def print_msg(self):\n \"\"\"\n Takes message from msg_rx_queue and prints it to stdout.\n\n Returns:\n None\n \"\"\"\n\n msg = self.msg_rx_queue.get()\n unpacked_msg = self.unpack_msg(msg)\n print(unpacked_msg)\n\n def start_output_thread(self):\n \"\"\"\n Starts thread for printing messages.\n\n Returns:\n None\n \"\"\"\n\n while True:\n if not self.msg_rx_queue.empty():\n self.print_msg()\n\n def register_msg_Rx(self, msg_qRx: queue.Queue):\n self.msg_rx_queue = msg_qRx\n\n def register_msg_Tx(self, msg_qTx: queue.Queue):\n self.msg_tx_queue = msg_qTx\n\n def pack_msg(self, msg: str) -> str:\n \"\"\"\n Packs the input messages from stdin with appending the current time.\n\n Args:\n msg (str): Raw user input from stdin.\n\n Returns:\n packed: String containing <current time ; message>\n \"\"\"\n\n now = str(datetime.now().time())\n packed = now + \";\" + msg\n return packed\n\n def check_data_len(self, msg: str) -> bool:\n \"\"\"\n Limits the maximum data length to 100.\n\n Args:\n msg (str): Raw user input from stdin.\n\n Returns:\n boolean\n \"\"\"\n\n if len(msg) <= 100:\n return True\n else:\n print(\"Message to long please shorten your message!\")\n return False\n\n def unpack_msg(self, msg: str) -> str:\n \"\"\"\n Unpack message for printing to stdout.\n\n Args:\n msg (str): Message with prepended receiver.\n\n Returns:\n String: The message that is received with the correct format.\n \"\"\"\n\n sender = msg.split(\";\")[0]\n data = msg.split(\";\")[1]\n return str(\"Received from \" + sender + \": \" + data)\n\n def check_msg_format(self, msg: str) -> bool:\n \"\"\"\n Checks if input from stdin has correct format (<receiver;message>).\n\n Args:\n msg (str): Message with prepended receiver.\n\n Returns:\n boolean\n \"\"\"\n\n try:\n receiver = msg.split(\";\")[0]\n data = msg.split(\";\")[1]\n return True\n except Exception:\n print(\"Wrong format please type like the following: receiver;msg\")\n return False\n\n def start(self):\n \"\"\"\n Starts the input and output thread.\n\n Returns:\n None\n \"\"\"\n\n output_thread = threading.Thread(target= self.start_output_thread)\n output_thread.start()\n input_thread = threading.Thread(target= self.start_input_thread)\n input_thread.start()", "id": "5120217", "language": "Python", "matching_score": 0.5000888109207153, "max_stars_count": 8, "path": "21-fs-ias-lec/05-LoRaLink/application.py" }, { "content": "from Sender import Sender\nfrom Receiver import Receiver\nimport scipy\nimport numpy as np\nimport scipy.io\nimport scipy.io.wavfile\nimport matplotlib.pyplot as plt\nfrom scipy import signal\n\n\ndef readFromFile(path):\n file = open(path, \"rb\")\n data = file.read()\n file.close()\n return data\n\ndef readWav(file_name) -> np.ndarray:\n rate, data = scipy.io.wavfile.read(file_name)\n\n if data.dtype == np.int16:\n return data.astype(np.float32, order='C') / 32768.0\n return data\n\n\ntestData = readWav('testbitsnopilots.wav')\nsubset = readWav('wrongbitstest.wav')\n\n\nr = Receiver()\nrate = 160\n\ncorr = 235292\n\n\noffset = r.findOffsetToFirstChange(testData)\ntruncated = r.truncateToTauS(testData, offset)\n\nplt.plot(testData[corr - len(subset)//2:corr + len(subset)//2])\nplt.show()\n\nplt.plot(subset)\nplt.show()\n\nplt.plot(truncated)\nplt.show()\ndemod = r.demodulate(truncated, 1/16, 1/40)\n\nresult = []\nstart = 0\nfor i in range(20):\n if i == 2:\n a = 5\n plt.plot(truncated[start: start + 10 * 36 * 160])\n plt.show\n a = 6\n #part_demod = r.demodulate(truncated[start: start + 10*36 * 160], 1/16, 1/40)\n #result.append(list(r.repdecode(part_demod, 10)))\n start = start + 10*36*160\n\n\nprint('result', result)\nprint(demod)\nprint(len(demod[1:]))\nprint(repdecode(demod[1:], 10))\n\nsender = Sender()\ndemod = repdecode(demod, 10)\nexpected = sender.getTestDataAsBits()\nerror_sum = np.sum(np.abs(expected - demod))\nprint('error sum', error_sum)\nprint('error weight', np.sum(expected - demod))\nprint('error percentage', error_sum / len(expected) * 100)", "id": "13185", "language": "Python", "matching_score": 3.179816722869873, "max_stars_count": 8, "path": "21-fs-ias-lec/15-AudioLink/Testing.py" }, { "content": "import sounddevice as sd\nimport scipy.io.wavfile\nfrom scipy import signal\nimport numpy as np\nimport simpleaudio as sa\nimport sounddevice as sd\nfrom Sender import Sender\nfrom scipy.io.wavfile import write\nfrom Hamming import Hamming\nfrom matplotlib import pyplot as plt\nimport hashlib\n\nclass Receiver:\n def __init__(self, tauS=160, tau0=20, tau1=80, sample_rate=44100):\n '''\n :param tauS: determines how many samples are used to modulate one bit\n tauS must be multiple of both tau0 and tau1\n :param tau0: determines the frequency of the high modulation note\n :param tau1: determines the frequency of the low modulation\n :param sample_rate: determines how many audio samples are used per second\n '''\n\n # sanity check to see if tauS is indeed a multiple of tau0 and tau1\n checkTau0 = tauS // tau0\n checkTau1 = tauS // tau1\n\n if not (checkTau0 * tau0 == tauS and checkTau1 * tau1 == tauS):\n print('tauS must be multiple of both tau0 and tau1')\n return\n\n self.fs = 1 / tauS\n self.rate = tauS\n self.freq_high = 1 / tau0\n self.freq_low = 1 / tau1\n\n self.weight_high = 1\n self.weight_low = 1\n\n # could be used for double modulation. Not in use as of now\n self.f3 = 1 / 40\n self.f4 = 1 / 16\n\n self.audioSampleRate = sample_rate\n self.audioDeviceId = 0\n\n self.hamming = Hamming()\n # start sequence to sync transmissions\n self.pilot1 = np.array([1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1,\n 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1,\n 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1,\n 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0,\n 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1], dtype=np.uint8)\n\n # end sequence to mark end of transmission\n self.pilot2 = np.array([0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1,\n 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0,\n 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1,\n 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1,\n 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0], dtype=np.uint8)\n\n\n def getTestBits(self, repetitions):\n return np.tile(np.array([1,1,1,1,1,0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,0,0,0,1,1,0,0,1,0,1,0,1,0,1,0]), repetitions)\n\n\n def calibrate(self, plot=False):\n calibration_input = self.recordAudio(10)\n #calibration_input = self.readWav('calibration.wav')\n\n sin_high = self.modulate(self.repencode(np.array([1,1,1,1,1]), self.rate))\n sin_low = self.modulate(self.repencode(np.array([0,0,0,0,0]), self.rate))\n\n offset_sin_high = self.calculateOffsetToTransmission(sin_high, calibration_input)\n offset_sin_low = self.calculateOffsetToTransmission(sin_low, calibration_input)\n\n peak_sin_high = 0\n peak_sin_low = 0\n\n for i in range(20):\n start_high = int(offset_sin_high + i * 1 // self.freq_high)\n end_high = int(offset_sin_high + (i + 1) * 1 // self.freq_high)\n peak_sin_high += np.max(calibration_input[start_high:end_high])\n\n start_low = int(offset_sin_low + i * 1 // self.freq_low)\n end_low = int(offset_sin_low + (i + 1) * 1 // self.freq_low)\n peak_sin_low += np.max(calibration_input[start_low:end_low])\n\n peak_sin_high /= 20\n peak_sin_low /= 20\n\n if plot:\n plt.plot(calibration_input)\n plt.show()\n\n return peak_sin_high, peak_sin_low\n\n\n def readWav(self, file_name) -> np.ndarray:\n rate, data = scipy.io.wavfile.read(file_name)\n\n if data.dtype == np.int16:\n return data.astype(np.float32, order='C') / 32768.0\n return data\n\n def repencode(self, data, n):\n encoded = np.repeat(data, n)\n return encoded\n\n def repdecode(self, data, n):\n try:\n padding = len(data) % n\n if padding > 0:\n print('len', len(data))\n print('padding:', padding)\n data = np.concatenate((data, np.zeros(n - padding)))\n averaged = np.mean(data.reshape(-1, n), axis=1)\n return np.where(averaged > 0.5, 1, 0)\n except:\n print('not divisible by ', n)\n\n def modulate(self, data):\n length = len(data)\n t = np.linspace(0, length, length)\n mod_high = self.weight_high * np.multiply(np.sin(self.freq_high * t * 2 * np.pi), data)\n mod_low = self.weight_low * np.multiply(np.sin(self.freq_low * t * 2 * np.pi), 1 - data)\n return mod_high + mod_low\n\n def demodulate(self, data, freq_high, freq_low):\n t = np.linspace(0, 1 / self.fs, self.rate)\n\n sin_high = np.sin(freq_high * t * 2 * np.pi)\n sin_low = np.sin(freq_low * t * 2 * np.pi)\n\n data_matrix = np.reshape(data, (len(data) // self.rate, self.rate))\n sol_high = np.abs(np.dot(sin_high, np.transpose(data_matrix)))\n sol_low = np.abs(np.dot(sin_low, np.transpose(data_matrix)))\n\n diff = sol_high - sol_low\n demodulated = np.abs(np.ceil(diff / self.rate))\n\n starts = np.transpose(data_matrix)\n starts[0] = np.repeat(np.array([1]), data_matrix.shape[0])\n testStart = 410030\n\n markedStarts = np.transpose(starts)\n '''\n plt.plot(np.reshape(markedStarts[testStart//160:testStart//160 +6], 6*160))\n plt.show()\n plt.plot(sin_high)\n plt.show()\n plt.plot(sin_low)\n plt.show()\n plt.plot(np.dot(sin_high, np.transpose(data_matrix[testStart // 160:testStart // 160 + 6])))\n plt.show()\n plt.plot(np.dot(sin_low, np.transpose(data_matrix[testStart // 160:testStart // 160 + 6])))\n plt.show()\n \n plt.plot(np.dot(sin_low, np.transpose(data_matrix)))\n plt.show()\n \n plt.plot(np.dot(sin_high, np.transpose(data_matrix)))\n plt.xlabel('bits')\n plt.ylabel('integral sine high square')\n plt.show()\n '''\n\n return demodulated\n\n def doubleDemodulate(self, data):\n part1 = self.demodulate(data, self.freq_high, self.freq_low)\n part2 = self.demodulate(data, self.f3, self.f4)\n return np.concatenate((part1, part2))\n\n def calculateOffsetToTransmission(self, zeroOne, data):\n testCorr = signal.correlate(data, zeroOne, mode=\"same\")\n\n # TODO improve this offset calculation\n indices = np.where(testCorr > np.max(testCorr) - 2)\n if len(indices) > 0 and len(indices[0] > 0):\n return indices[0][0]\n else:\n return indices\n\n def truncateToTauS(self, data, offset):\n truncated_start = data[(offset % self.rate):]\n res = truncated_start[:len(truncated_start) - (len(truncated_start) % self.rate)]\n return res\n\n def convertToOneMinusOne(self, data):\n return 2 * data - 1\n\n def removePilots(self, data):\n pilot_1_converted = self.convertToOneMinusOne(self.pilot1.astype(np.float32))\n pilot_2_converted = self.convertToOneMinusOne(self.pilot2.astype(np.float32))\n\n offset_1 = self.calculateOffsetToTransmission(pilot_1_converted, self.convertToOneMinusOne(data)) - len(self.pilot1) // 2\n trunc_1 = data[offset_1 + len(self.pilot1):]\n offset_2 = self.calculateOffsetToTransmission(pilot_2_converted, self.convertToOneMinusOne(trunc_1)) - len(self.pilot2) // 2\n trunc_2 = trunc_1[:offset_2]\n return trunc_2\n\n def removeDoubleModPilots(self, singleDemod, originalData):\n pilot_1_converted = self.convertToOneMinusOne(self.pilot1.astype(np.float32))\n pilot_2_converted = self.convertToOneMinusOne(self.pilot2.astype(np.float32))\n\n offset_1 = self.calculateOffsetToTransmission(pilot_1_converted, self.convertToOneMinusOne(singleDemod)) - len(self.pilot1) // 2\n trunc_1 = singleDemod[offset_1 + len(self.pilot1):]\n offset_2 = self.calculateOffsetToTransmission(pilot_2_converted, self.convertToOneMinusOne(trunc_1)) - len(self.pilot2) // 2\n\n result = originalData[self.rate * (offset_1 + len(self.pilot1)):]\n return result[:self.rate * offset_2]\n\n def findOffsetToFirstChange(self, data):\n firstChange = self.modulate(self.repencode(np.array([1, 0]), self.rate))\n return self.calculateOffsetToTransmission(firstChange, data)\n\n def bitsToBytes(self, bits):\n binaryBites = np.reshape(bits, ((len(bits) // 8), 8))\n dataAsBytes = np.packbits(binaryBites, axis=1).flatten().tobytes()\n return dataAsBytes\n\n def writeToFile(self, path, data):\n file = open(path, \"wb\")\n file.write(data)\n file.close()\n\n def recordAudio(self, duration, save_recording=False, recording_name=None):\n seconds = duration\n myrecording = sd.rec(int(seconds * self.audioSampleRate), samplerate=self.audioSampleRate, channels=1)\n sd.wait() # Wait until recording is finished\n\n recording = np.reshape(myrecording, myrecording.shape[0])\n\n if save_recording:\n file_name = recording_name\n if not recording_name.endswith('.wav'):\n file_name = recording_name + '.wav'\n\n scipy.io.wavfile.write(file_name, self.audioSampleRate, recording.astype(np.float32))\n\n return recording\n\n def getAvailableAudioDevices(self):\n return sd.query_devices(device=None, kind=None)\n\n def setAudioInputDevice(self, device_id):\n self.audioDeviceId = device_id\n sd.default.device = device_id\n\n def gateInput(self, data):\n thresh = 2 * np.max(data[:self.audioSampleRate//2])\n return np.where(np.abs(data) < thresh, 0, data)\n\n def integrityCheck(self, data):\n expected_hash = data[-32:]\n received_hash = hashlib.sha256(data[:len(data) - 32]).digest()\n print('calculated hash:', received_hash)\n return expected_hash == received_hash\n\n\n def test(self, rec_duration, testBitRepetitions, encodeRepetitions, hamming):\n expected = self.getTestBits(testBitRepetitions)\n\n if hamming:\n actual = self.receiveHammingEncoded(rec_duration, repetitions=encodeRepetitions, bits=True,\n save_file=True, recording_name='lastTransmission.wav')\n else:\n actual = self.receiveRepencoded(rec_duration, repetitions=encodeRepetitions, bits=True,\n save_file=True, recording_name='lastTransmission.wav')\n\n print('actual: ', actual)\n print('length of actual:', len(actual))\n\n diff = expected - actual[:len(expected)]\n error_sum = np.sum(np.abs(diff))\n\n print('error sum ', error_sum)\n print('error weight', np.sum(diff))\n print('error percentage', error_sum / len(expected) * 100)\n\n def receiveRepencoded(self, duration, repetitions=3, bits=False, from_file=False, file_path=None,\n save_file=False, recording_name=None, plot=False):\n '''\n Starts a recording or reads audio from a wav file. Then demodulates the input and decodes it\n :param duration: Number of seconds that should be recorded\n :param repetitions: Number of repetitions used to encode each bit. Must be the same as in the sender\n :param bits: If true, the method will return a np.array containing the decoded bits. Else it will return bytes\n :param from_file: If True the input will be read from a wav file and no recording will be started\n :param file_path: Path to the input wav file\n :param save_file: if True the recording will be saved to a wav file\n :param recording_name: Name and path of the file the recording should be saved to\n :param plot: If True the recording will be shown in a plot\n :return: Demodulated and decoded data as bytes or as bits depending on parameter bits.\n '''\n data_in = None\n if from_file:\n data_in = self.readWav(file_path)\n else:\n data_in = self.recordAudio(duration, save_file, recording_name)\n\n off = self.findOffsetToFirstChange(data_in)\n\n if off > self.audioSampleRate // 2 + self.rate // 2:\n data_in = self.gateInput(data_in)\n\n res = np.zeros(len(data_in) // self.rate - 1)\n for i in range(self.rate // 32):\n data_in2 = np.copy(data_in)\n offset = self.findOffsetToFirstChange(data_in2) + 16 * i\n truncated = self.truncateToTauS(data_in2, offset)\n demodulated = self.demodulate(truncated, self.freq_high, self.freq_low)\n res = np.add(res, demodulated[:len(data_in) // self.rate - 1])\n\n demodulated = np.where(res > self.rate // 64, 1, 0)\n '''\n plt.plot(res[2500:3000])\n plt.xlabel('bits')\n plt.ylabel('aggregated demodulation')\n plt.show()\n '''\n no_pilots = self.removePilots(demodulated)\n decoded = self.repdecode(no_pilots, repetitions)\n\n if plot:\n plt.plot(data_in)\n plt.show()\n\n if bits:\n return decoded\n else:\n try:\n data_as_bytes = self.bitsToBytes(decoded)\n if self.integrityCheck(data_as_bytes):\n print('Data received correctly, hashs matched')\n return data_as_bytes[:-32]\n else:\n print('Data seems to be corrupted, the hashs did not match')\n except:\n print('could not convert bits to bytes. \\nData might not be divisible by eight')\n\n def receiveHammingEncoded(self, duration, repetitions=3, bits=False, from_file=False, file_path=None,\n save_file=False, recording_name=None, plot=False):\n '''\n Starts a recording or reads audio from a wav file. Then demodulates the input and decodes it\n Use this method to receive data, if the sender is using Hamming encoding\n :param duration: Number of seconds that should be recorded\n :param repetitions: Number of repetitions used to encode each bit. Must be the same as in the sender\n :param bits: If true, the method will return a np.array containing the decoded bits. Else it will return bytes\n :param from_file: If True the input will be read from a wav file and no recording will be started\n :param file_path: Path to the input wav file\n :param save_file: if True the recording will be saved to a wav file\n :param recording_name: Name and path of the file the recording should be saved to\n :param plot: If True the recording will be shown in a plot\n :return: Demodulated and decoded data as bytes or as bits depending on parameter bits.\n '''\n data_in = None\n if from_file:\n data_in = self.readWav(file_path)\n else:\n data_in = self.recordAudio(duration, save_file, recording_name)\n\n off = self.findOffsetToFirstChange(data_in)\n\n if off > self.audioSampleRate // 2 + self.rate // 2:\n data_in = self.gateInput(data_in)\n\n res = np.zeros(len(data_in) // self.rate - 1)\n for i in range(self.rate // 32):\n data_in2 = np.copy(data_in)\n offset = self.findOffsetToFirstChange(data_in2) + 16 * i\n truncated = self.truncateToTauS(data_in2, offset)\n demodulated = self.demodulate(truncated, self.freq_high, self.freq_low)\n res = np.add(res, demodulated[:len(data_in) // self.rate - 1])\n\n demodulated = np.where(res > self.rate // 64, 1, 0)\n no_pilots = self.removePilots(demodulated)\n rep_decoded = self.repdecode(no_pilots, repetitions)\n decoded = self.hamming.decodeAndCorrectStream(rep_decoded)\n\n if plot:\n plt.plot(data_in)\n plt.show()\n\n if bits:\n return decoded\n else:\n try:\n data_as_bytes = self.bitsToBytes(decoded)\n if self.integrityCheck(data_as_bytes):\n print('Data received correctly, hashs matched')\n return data_as_bytes[:-32]\n else:\n print('Data seems to be corrupted, the hashs did not match')\n except:\n print('could not convert bits to bytes. \\nData might not be divisible by eight')\n\n\n def testDoubleDecode(self):\n #input = self.readWav('test_double.wav')\n input = self.recordAudio()\n truncated = self.truncateToTauS(input, self.findOffsetToFirstChange(input))\n singleDecoded = self.demodulate(truncated, self.freq_high, self.freq_low)\n noPilots = self.removeDoubleModPilots(singleDecoded, truncated)\n doubleDemod = self.doubleDemodulate(noPilots)\n actual = self.repdecode(doubleDemod, 3)\n\n print('actual: ', actual)\n\n def testDecode(self):\n a = self.repdecode(np.array([1,1,1,0,0,1,0,0,1,0,1,1]), 4)\n print(a)", "id": "3566068", "language": "Python", "matching_score": 6.62128210067749, "max_stars_count": 8, "path": "21-fs-ias-lec/15-AudioLink/Receiver.py" }, { "content": "import numpy as np\nimport simpleaudio as sa\nimport scipy.io\nimport scipy.io.wavfile\nfrom Hamming import Hamming\nimport hashlib\n\n\n\nclass Sender:\n\n def __init__(self, tauS=160, tau0=20, tau1=80, sample_rate=44100):\n '''\n :param tauS: determines how many samples are used to modulate one bit\n tauS must be multiple of both tau0 and tau1\n :param tau0: determines the frequency of the high modulation note\n :param tau1: determines the frequency of the low modulation\n :param sample_rate: determines how many audio samples are used per second\n '''\n\n # sanity check to see if tauS is indeed a multiple of tau0 and tau1\n checkTau0 = tauS // tau0\n checkTau1 = tauS // tau1\n\n if not (checkTau0 * tau0 == tauS and checkTau1 * tau1 == tauS):\n print('tauS must be multiple of both tau0 and tau1')\n return\n\n self.fs = 1/tauS\n self.rate = tauS\n self.freq_high = 1 / tau0\n self.freq_low = 1 / tau1\n\n self.weight_high = 1\n self.weight_low = 1\n\n # could be used for double modulation. Not in use as of now\n self.f3 = 1 / 40\n self.f4 = 1 / 16\n\n self.audioSampleRate = sample_rate\n\n self.hamming = Hamming()\n\n # start sequence to sync transmissions\n self.pilot1 = np.array([1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1,\n 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1,\n 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1,\n 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0,\n 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1], dtype=np.uint8)\n\n # end sequence to mark end of transmission\n self.pilot2 = np.array([0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1,\n 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0,\n 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1,\n 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1,\n 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0], dtype=np.uint8)\n\n def playAudio(self, data):\n audio = data * (2 ** 15 - 1) / np.max(np.abs(data))\n audio = audio.astype(np.int16)\n play_onj = sa.play_buffer(audio, 1, 2, self.audioSampleRate)\n play_onj.wait_done()\n\n def getTestTone(self):\n frequency = 440\n seconds = 3\n t = np.linspace(0, seconds, seconds * self.fs, False)\n note = np.sin(frequency * t * 2 * np.pi)\n return note\n\n def getCalibrationTones(self):\n t = np.linspace(0, 2 * self.audioSampleRate, 2 * self.audioSampleRate)\n high = self.weight_high * np.sin(self.freq_high * t * 2 * np.pi)\n low = self.weight_low * np.sin(self.freq_low * t * 2 * np.pi)\n pause = np.zeros(self.audioSampleRate // 2)\n return np.concatenate((high, pause, low))\n\n def sendCalibration(self):\n self.playAudio(self.getCalibrationTones())\n\n def setTransmitionAmplitudes(self, amp_high, amp_low):\n if amp_high > amp_low:\n self.weight_high = amp_low / amp_high\n else:\n self.weight_low = amp_high / amp_low\n #self.weight_high = amp_high\n #self.weight_low = amp_low\n\n def getTestDataAsBits(self, repetitions):\n #s = np.array([1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1], dtype=np.uint8)\n s = np.tile(np.array([1,1,1,1,1,0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,0,0,0,1,1,0,0,1,0,1,0,1,0,1,0], dtype=np.uint8), repetitions)\n return s\n\n def repencode(self, data, n):\n encoded = np.repeat(data, n)\n return encoded\n\n def addPilots(self, data):\n return np.concatenate((self.pilot1, data, self.pilot2))\n\n def addModulatedPilots(self, data):\n modPilot1 = self.modulate(self.repencode(self.pilot1, self.rate))\n modPilot2 = self.modulate(self.repencode(self.pilot2, self.rate))\n return np.concatenate((modPilot1, data, modPilot2))\n\n def modulate(self, data):\n length = len(data)\n t = np.linspace(0, length, length)\n mod_high = self.weight_high * np.multiply(np.sin(self.freq_high * t * 2 * np.pi), data)\n mod_low = self.weight_low * np.multiply(np.sin(self.freq_low * t * 2 * np.pi), 1 - data)\n return mod_high + mod_low\n\n def doubleModulate(self, data):\n if not (len(data) % 2 == 0):\n print('we need padding or something of the sort')\n return\n length = len(data) // 2\n dataPart1 = data[0:length]\n dataPart2 = data[length:]\n\n t = np.linspace(0, length, length)\n mod_1 = np.multiply(np.sin(self.freq_high * t * 2 * np.pi), dataPart1)\n mod_2 = np.multiply(np.sin(self.freq_low * t * 2 * np.pi), 1 - dataPart1)\n mod_3 = np.multiply(np.sin(self.f3 * t * 2 * np.pi), dataPart2)\n mod_4 = np.multiply(np.sin(self.f4 * t * 2 * np.pi), 1 - dataPart2)\n\n return mod_1 + mod_2 + mod_3 + mod_4\n\n def writeToWav(self, data, file_name):\n if not file_name.endswith('.wav'):\n file_name = file_name + '.wav'\n scipy.io.wavfile.write(file_name, self.audioSampleRate, data.astype(np.float32))\n\n def readFromFile(self, path):\n file = open(path, \"rb\")\n data = file.read()\n file.close()\n return data\n\n def writeToFile(self, path, data):\n file = open(path, \"wb\")\n file.write(data)\n file.close()\n\n def test(self):\n hamming = Hamming()\n #by = self.readFromFile('pacman2.bmp')\n #bits = self.bytesToBits(by)\n #data = self.addPilots(self.repencode(bits, 10))\n testbits = self.repencode(hamming.encodeBitStream(self.getTestDataAsBits()), 4)\n data = self.addPilots(self.repencode(hamming.encodeBitStream(testbits), 1))\n #dataBytes = self.readFromFile('penguin.png')\n #data = self.bytesToBits(dataBytes)\n #data = self.addPilots(self.repencode(data, 5))\n encoded = self.repencode(data, self.rate)\n modulated = self.modulate(encoded)\n #self.writeToWav(np.concatenate((np.zeros(3*44100), modulated)))\n #demodulated = self.doubleDemodulate(modulated)\n #demodulated = self.doubleDemodulate(modulated)\n print('data and pilots')\n #print(demodulated)\n print('data only')\n print(self.getTestDataAsBits())\n #b = self.bitsToBytes(demodulated.astype(np.uint8))\n #self.writeToFile(\"pinguuuu.png\", b)\n self.writeToWav(modulated)\n self.playAudio(self.modulate(encoded))\n\n def bytesToBits(self, data):\n dataAsInts = np.fromstring(data, dtype=np.uint8)\n bits = np.unpackbits(dataAsInts)\n return bits\n\n def bitsToBytes(self, bits):\n binaryBites = np.reshape(bits, ((len(bits) // 8), 8))\n dataAsBytes = np.packbits(binaryBites, axis=1).flatten().tobytes()\n return dataAsBytes\n\n def addHash(self, data):\n hash = hashlib.sha256(data).digest()\n print('calculated hash', hash)\n return data + hash\n\n def testConversion(self):\n data = self.readFromFile('testFiles/penguin.png')\n bits = self.bytesToBits(data)\n res = self.bitsToBytes(bits)\n passed = data == res\n\n def testDoubleModulation(self):\n data = self.repencode(self.getTestDataAsBits(), 3)\n encoded = self.repencode(data, self.rate)\n modulatedData = self.doubleModulate(encoded)\n dataWithPilots = self.addModulatedPilots(modulatedData)\n self.writeToWav(dataWithPilots)\n self.playAudio(dataWithPilots)\n\n def sendDataRepencoded(self, data, repetitions=3, bits=False):\n '''\n Encodes, modulates and plays data\n :param data: data to be transmitted, either np.array with bits or bytes\n :param repetitions: number of repetitions per bit\n :param bits: if false the data will be interpreted as bytes\n '''\n if not bits:\n data = self.bytesToBits(self.addHash(data))\n\n repencoded = self.repencode(data, repetitions)\n with_Pilots = self.addPilots(repencoded)\n readyToMod = self.repencode(with_Pilots, self.rate)\n modulated = self.modulate(readyToMod)\n\n self.playAudio(modulated)\n\n def sendDataHamming(self, data, repetitions=3, bits=False):\n '''\n Encodes, modulates and plays data. Data will first be encoded using Hamming(7,4) and then\n every bit will be repeated n times (n = repetitions)\n :param data: data to be transmitted, either np.array with bits or bytes\n :param repetitions: number of repetitions per bit\n :param bits: if false the data will be interpreted as bytes\n '''\n if not bits:\n data = self.bytesToBits(self.addHash(data))\n\n hamming_encoded = self.hamming.encodeBitStream(data)\n rep_encoded = self.repencode(hamming_encoded, repetitions)\n with_pilots = self.addPilots(rep_encoded)\n readyToMod = self.repencode(with_pilots, self.rate)\n modulated = self.modulate(readyToMod)\n\n self.playAudio(modulated)\n", "id": "5010736", "language": "Python", "matching_score": 4.595743656158447, "max_stars_count": 8, "path": "21-fs-ias-lec/15-AudioLink/Sender.py" }, { "content": "from Sender import Sender\nfrom Receiver import Receiver\nimport scipy.io.wavfile\nimport numpy as np\nfrom Hamming import Hamming\n\n'''\nAdjust these values for your input device and calibration for your setup\n'''\n#print(Receiver.getAvailableAudioDevices())\ninput_device_id = 4\namplitude_high = 0.54213\namplitude_low = 0.26384\n\n\ndef sendCalibration():\n sender = Sender()\n sender.sendCalibration()\n\ndef testReceiverFromFile():\n receiver = Receiver()\n sender = Sender()\n testBytes = sender.readFromFile('testFiles/pacman2.bmp')\n actual = receiver.receiveRepencoded(10, 3, plot=True, from_file=True, file_path='testFiles/pacmanAudioNew.wav')\n\n testBits = sender.bytesToBits(testBytes)\n actualBits = sender.bytesToBits(actual)\n\n print('Error sum', np.sum(np.abs(testBits - actualBits)))\n\ndef receiveLongTestRepencoded():\n receiver = Receiver()\n print(receiver.getAvailableAudioDevices())\n receiver.setAudioInputDevice(input_device_id)\n sender = Sender()\n testBytes = sender.readFromFile('testFiles/pacman2.bmp')\n actual = receiver.receiveRepencoded(28, 3, plot=True)\n\n testBits = sender.bytesToBits(testBytes)\n actualBits = sender.bytesToBits(actual)\n\n print('Error sum', np.sum(np.abs(testBits - actualBits)))\n\ndef receiveLongTestHamming():\n receiver = Receiver()\n print(receiver.getAvailableAudioDevices())\n receiver.setAudioInputDevice(input_device_id)\n sender = Sender()\n testBytes = sender.readFromFile('testFiles/pacman2.bmp')\n actual = receiver.receiveHammingEncoded(50, 3, plot=True)\n\n testBits = sender.bytesToBits(testBytes)\n actualBits = sender.bytesToBits(actual)\n\n print('Error sum', np.sum(np.abs(testBits - actualBits)))\n\ndef sendShortTestRepencoded():\n sender = Sender()\n sender.setTransmitionAmplitudes(amplitude_high, amplitude_low)\n testBits = sender.getTestDataAsBits(5)\n sender.sendDataRepencoded(testBits, 3, bits=True)\n\ndef sendShortTestHammingEncoded():\n sender = Sender()\n sender.setTransmitionAmplitudes(amplitude_high, amplitude_low)\n testBits = sender.getTestDataAsBits(5)\n sender.sendDataHamming(testBits, 3, bits=True)\n\ndef sendLongTestRepencoded():\n sender = Sender()\n sender.setTransmitionAmplitudes(amplitude_high, amplitude_low)\n data = sender.readFromFile('testFiles/pacman2.bmp')\n sender.sendDataRepencoded(data, 3)\n\ndef sendLongTestHamming():\n sender = Sender()\n sender.setTransmitionAmplitudes(amplitude_high, amplitude_low)\n data = sender.readFromFile('testFiles/pacman2.bmp')\n sender.sendDataHamming(data, 3)\n\n\ndef receiveShortTestRepencoded():\n receiver = Receiver()\n print(receiver.getAvailableAudioDevices())\n receiver.setAudioInputDevice(input_device_id)\n sender = Sender()\n actual = receiver.receiveRepencoded(10, 3, plot=True)\n\n testBits = sender.getTestDataAsBits(5)\n actualBits = sender.bytesToBits(actual)\n\n print('Error sum', np.sum(np.abs(testBits - actualBits)))\n\ndef receiveShortTestHamming():\n receiver = Receiver()\n print(receiver.getAvailableAudioDevices())\n receiver.setAudioInputDevice(input_device_id)\n sender = Sender()\n actual = receiver.receiveHammingEncoded(10, 3, plot=True)\n\n testBits = sender.getTestDataAsBits(5)\n actualBits = sender.bytesToBits(actual)\n\n print('Error sum', np.sum(np.abs(testBits - actualBits)))\n\ndef testHamming():\n sender = Sender()\n receiver = Receiver()\n hamming = Hamming()\n\n testBits = sender.getTestDataAsBits(5)\n hammEncoded = hamming.encodeBitStream(testBits)\n rependcoded = sender.repencode(hammEncoded, 3)\n withPilots = sender.addPilots(rependcoded)\n\n noPilots = receiver.removePilots(withPilots)\n repdec = receiver.repdecode(noPilots, 3)\n actual = hamming.decodeAndCorrectStream(repdec)\n\n errorSum = np.sum(np.abs(sender.getTestDataAsBits(5) - actual))\n print('errorSum hamming', errorSum)\n\n\n\nsendLongTestHamming()", "id": "9161898", "language": "Python", "matching_score": 2.8928563594818115, "max_stars_count": 8, "path": "21-fs-ias-lec/15-AudioLink/Main.py" }, { "content": "import numpy as np\n\n\nclass Hamming:\n def __init__(self):\n self.G = np.array([[1, 1, 0, 1], [1, 0, 1, 1], [1, 0, 0, 0], [0, 1, 1, 1], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]])\n self.H = np.array([[1, 0, 1, 0, 1, 0, 1], [0, 1, 1, 0, 0, 1, 1], [0, 0, 0, 1, 1, 1, 1]])\n self.R = np.array([[0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0], [0, 0, 0, 0, 0, 1, 0], [0, 0, 0, 0, 0, 0, 1]])\n\n def encode(self, input):\n return np.dot(self.G, input) % 2\n\n def parityCheck(self, input):\n return np.sum(np.dot(self.H, input) % 2) == 0\n\n def errorPositionVector(self, input):\n return np.dot(self.H, input) % 2\n\n def decimalErrorPosition(self, input):\n errorVector = self.errorPositionVector(input)\n res = 4 * errorVector[2] + 2 * errorVector[1] + errorVector[0]\n return int(res - 1)\n\n def decode(self, input):\n return np.dot(self.R, input)\n\n def correct(self, input):\n if not self.parityCheck(input):\n errorPos = self.decimalErrorPosition(input)\n if input[errorPos] == 1:\n input[errorPos] = 0\n else:\n input[errorPos] = 1\n return input\n\n def encodeBitStream(self, input):\n if len(input) % 4 == 0:\n inputMatrix = np.reshape(input, (len(input) // 4, 4))\n encodedMatrix = np.transpose(self.encode(np.transpose(inputMatrix)))\n encodedStream = np.reshape(encodedMatrix, encodedMatrix.size)\n return encodedStream\n\n def decodeAndCorrectStream(self, data):\n if not len(data) % 7 == 0:\n data = data[:len(data) - len(data) % 7]\n print('truncating data for hamming decoding. Data not divisible by 7')\n\n inputMatrix = np.reshape(data, (len(data) // 7, 7))\n res = self.decode(self.correct(inputMatrix[0]))\n for i in range(len(data) // 7 - 1):\n #inputMatrix[i] = self.correct(inputMatrix[i])\n res = np.concatenate((res, self.decode(self.correct(inputMatrix[i + 1]))))\n\n #correctedStream = np.reshape(inputMatrix, inputMatrix.size)\n return res\n\n\n\n\n", "id": "4721422", "language": "Python", "matching_score": 1.0680460929870605, "max_stars_count": 8, "path": "21-fs-ias-lec/15-AudioLink/Hamming.py" } ]
1.990339
A-Hassan7
[ { "content": "import dash\nimport dash_core_components as dcc\nimport dash_html_components as html\n\nstore = html.Div([\n html.Div(id='submissions_store'),\n html.Div(id='stock_price_store'),\n html.Div(id='preprocessed_text_store'),\n html.Div(id='unprocessed_text_store')\n], style={'display': 'none'})\n", "id": "11909446", "language": "Python", "matching_score": 1.872507929801941, "max_stars_count": 2, "path": "app_components/store.py" }, { "content": "import dash\nimport dash_core_components as dcc\nimport dash_html_components as html\n\nfrom . import styles\n\nheader = html.Div([\n dcc.Markdown('''\n # Reddit Sentiment Analyser\n *Source Code: https://github.com/A-Hassan7/Reddit-Sentiment-Analysis*\n\n ###### Analyse Reddit sentiment for popular stocks using the Python Natural Language Toolkit (NLTK) \n\n ''')\n], style=styles.header_style)\n", "id": "5207786", "language": "Python", "matching_score": 1.4130624532699585, "max_stars_count": 2, "path": "app_components/header.py" }, { "content": "import dash\nimport dash_core_components as dcc\nimport dash_html_components as html\n\nfrom .header import header\nfrom .search import search_bar\nfrom .charts import wordcloud, sentiment_chart, submissions_table\nfrom .store import store\nfrom . import styles\n\nlayout = html.Div([\n header,\n search_bar,\n # data\n html.Div([\n dcc.Tabs([\n dcc.Tab([\n wordcloud\n ], label='WordCloud'),\n dcc.Tab([\n sentiment_chart\n ], label='Sentiment Plot'),\n dcc.Tab([\n submissions_table\n ], label='Submissions')\n ]),\n ], style=styles.charts_style),\n\n # hidden storage\n store\n])\n", "id": "9015419", "language": "Python", "matching_score": 2.7388670444488525, "max_stars_count": 2, "path": "app_components/layout.py" }, { "content": "import dash\nimport dash_core_components as dcc\nimport dash_html_components as html\n\nfrom . import styles\n\nwordcloud = html.Div([\n # max words\n html.Div([\n html.Label('Max Words'),\n dcc.Slider(\n id='max_words',\n min=10,\n max=60,\n step=10,\n marks={k: str(k) for k in range(10,110, 10)},\n value=30\n )\n ], style=styles.max_words_slider_style),\n\n dcc.Loading([\n html.Div([\n html.Div([\n # wordcloud image\n dcc.Graph(\n id='wordcloud'\n )\n ], style={'padding': '20px'}),\n\n html.Div([\n # frequency distribution chart\n dcc.Graph(\n id='freqdist'\n )\n ], style={'padding': '20px'}),\n ], style=styles.wordcloud_freqdist_style)\n ])\n], style=styles.tab_content_style)\n\n\n# sentiment chart\nsentiment_chart = html.Div([\n\n # rolling window slider\n html.Div([\n html.Label('Sentiment Smoothness'),\n dcc.Slider(\n id='sentiment_smoothness',\n min=0,\n max=100,\n step=10,\n marks={k: str(k) for k in range(0,110, 10)},\n value=10\n )\n ], style=styles.max_words_slider_style),\n\n dcc.Loading([\n html.Div([\n dcc.Graph(\n id='sentiment'\n )\n ], style=styles.sentiment_chart_style)\n ])\n], style=styles.tab_content_style)\n\n\nsubmissions_table = html.Div(\n id='submissions_table',\n style=styles.submissions_table_style\n)\n\n\ndef generate_table(dataframe, max_rows=1000000000):\n return html.Table([\n html.Thead(\n html.Tr([html.Th(col.replace('_', ' ').upper()) for col in dataframe.columns])\n ),\n html.Tbody([\n html.Tr([\n html.Td(dataframe.iloc[i][col]) for col in dataframe.columns\n ]) for i in range(min(len(dataframe), max_rows))\n ])\n ])\n", "id": "3919837", "language": "Python", "matching_score": 2.036421298980713, "max_stars_count": 2, "path": "app_components/charts.py" }, { "content": "\ncharts_style = {\n 'display': 'block',\n 'width': '63%',\n 'float': 'right',\n 'padding': '30px',\n 'background': '#f9f9f9',\n 'boxShadow': '6px 6px 6px lightgray',\n 'height': '800px',\n 'overflow': 'scroll'\n}\n\nsearch_bar_style = {\n 'display': 'block',\n 'width': '25%',\n 'float': 'left',\n 'height': '65vh',\n 'padding': '50px',\n 'background': '#f9f9f9',\n 'boxShadow': '6px 6px 6px lightgray'\n}\n\nheader_style = {\n 'hight': '10%',\n 'textAlign': 'center'\n}\n\nmax_words_slider_style = {\n 'padding': '7px 180px 0px',\n 'textAlign': 'center'\n}\n\ninput_form_style = {\n 'padding': '5px',\n 'margin': 'auto',\n 'paddingButton': '25px',\n 'height': 'auto',\n 'textAlign': 'center',\n}\n\nwordcloud_freqdist_style = {\n 'display': 'flex',\n 'margin': 'auto',\n 'width': 'max-content'\n}\n\nsentiment_chart_style = {\n 'display': 'flex',\n 'margin': 'auto',\n 'width': 'max-content'\n}\n\ntab_content_style = {\n 'padding': '30px',\n}\n\nsubmissions_table_style = {\n 'width': '90%',\n 'padding': '40px',\n 'fontSize': '14px',\n 'margin': 'auto'\n}\n", "id": "6137681", "language": "Python", "matching_score": 2.7080414295196533, "max_stars_count": 2, "path": "app_components/styles.py" }, { "content": "from datetime import datetime, timedelta\n\nimport dash\nimport dash_core_components as dcc\nimport dash_html_components as html\n\nfrom . import styles\n\nsearch_bar = html.Div([\n \n html.Div([\n html.H3('Search', style={'textAlign': 'center', 'fontSize': 20})\n ]),\n \n html.Hr(),\n \n html.Div([\n # ticker input\n html.Label('Ticker', className='control_label'),\n dcc.Dropdown(\n id='ticker_input',\n options=[\n {'label': 'TSLA', 'value': 'TSLA'},\n {'label': 'GME', 'value': 'GME'},\n {'label': 'AMC', 'value': 'AMC'},\n {'label': 'AAPL', 'value': 'AAPL'},\n {'label': 'AMZN', 'value': 'AMZN'},\n {'label': 'FB', 'value': 'FB'},\n {'label': 'SPY', 'value': 'SPY'}\n ], value='TSLA')\n ], style=styles.input_form_style),\n \n html.Br(),\n \n dcc.Loading([\n html.Div([\n html.Div([\n # submit button\n html.Button(\n 'Submit',\n id='submit_button',\n )\n ], style=styles.input_form_style),\n ])], id='data_loading'),\n \n html.Br(),\n \n html.Div([\n # mimimum upvote\n html.Label('Minimum Upvote'),\n dcc.Slider(\n id='minimum_upvote',\n min=0,\n max=50,\n step=10,\n marks={\n 0: '0',\n 10: '10',\n 20: '20',\n 30: '30',\n 40: '40',\n 50: '50'\n },\n value=0\n )\n ], style={'textAlign': 'center'})\n], style=styles.search_bar_style)\n", "id": "2234392", "language": "Python", "matching_score": 0, "max_stars_count": 2, "path": "app_components/search.py" }, { "content": "financial_jargon = {\n 'moon': 3,\n 'buy': 3,\n 'buying': 3,\n 'long': 3,\n 'rebound': 1,\n 'bounce': 2,\n 'hold': 2,\n 'holding': 2,\n 'call': 3,\n 'calls': 3,\n 'squeeze': 3,\n 'gain': 2,\n 'gains': 2,\n 'liquidate': -3,\n 'liquidated': -3,\n 'put': -3,\n 'puts': -3,\n 'bagholder': -3,\n 'bagholders': -3,\n 'short': -3,\n 'shorts': -3,\n 'sell': -3,\n}", "id": "4352001", "language": "Python", "matching_score": 0, "max_stars_count": 2, "path": "sentiment_analysis/sentiment/custom_lexicon.py" }, { "content": "words = [\n 'im',\n 'u'\n]", "id": "9950373", "language": "Python", "matching_score": 0.6665095090866089, "max_stars_count": 2, "path": "sentiment_analysis/sentiment/additional_stopwords.py" }, { "content": "import re\r\nimport statistics\r\nimport string\r\nfrom pathlib import Path\r\n\r\nimport nltk\r\nimport pandas as pd\r\nfrom nltk import FreqDist, WordNetLemmatizer\r\nfrom nltk.corpus import stopwords, wordnet\r\nfrom nltk.sentiment.vader import SentimentIntensityAnalyzer\r\nfrom nltk.tag import pos_tag\r\nfrom nltk.tokenize import sent_tokenize, word_tokenize\r\nfrom wordcloud import WordCloud\r\n\r\nfrom config import Loggers\r\nfrom . import additional_stopwords, custom_lexicon\r\n\r\n# Set path to nltk data \r\nnltk.data.path.append(Path('sentiment_analysis/nltk_data'))\r\n\r\nclass SentimentAnalyser:\r\n \"\"\"\r\n Uses the Python Natural Language Toolkit (NLTK) to perform sentiment\r\n analysis on language data.\r\n \"\"\"\r\n\r\n def __init__(self):\r\n self.logger = Loggers.console\r\n \r\n def _clean_text(self, text, stop_words, search_patterns):\r\n \"\"\"\r\n Removes unwanted tokens based on RegEx search patters using\r\n python RegEx library.\r\n\r\n Args:\r\n text (str): text to clean\r\n stop_words (list): list of stop words to remove\r\n search_patterns (list): list of python RegEx search patters to remove\r\n \"\"\"\r\n \r\n # substitute search pattern matches\r\n for pattern in search_patterns:\r\n text = re.sub(pattern, \"\", text)\r\n \r\n # tokenize text\r\n tokens = word_tokenize(text)\r\n \r\n # remove punctuation, stopwords\r\n cleaned_tokens = []\r\n for token in tokens:\r\n\r\n # skip unwanted tokens\r\n if (not token \r\n or token in string.punctuation\r\n or token.lower() in stop_words):\r\n continue\r\n\r\n cleaned_tokens.append(token.lower())\r\n\r\n return cleaned_tokens\r\n \r\n def _simplify_tag(self, tag):\r\n \"\"\"\r\n Attempts to map tags to wordnet POS tags for the\r\n wordnet Lemmatizer\r\n \r\n Args:\r\n tag (str): string tag\r\n \"\"\"\r\n\r\n if tag.startswith('J'):\r\n return wordnet.ADJ\r\n elif tag.startswith('V'):\r\n return wordnet.VERB\r\n elif tag.startswith('N'):\r\n return wordnet.NOUN\r\n elif tag.startswith('R'):\r\n return wordnet.ADV\r\n else:\r\n return wordnet.NOUN\r\n \r\n def _lemmatize_tokens(self, tagged_tokens):\r\n \"\"\"\r\n Ruturns root words from tagged tokens e.g. ('running', 'verb').\r\n Lemmmatizing is a Normalising function that uses the words context\r\n i.e. noun, verb, adverb etc. to find the words conocial form or \r\n root word.\r\n\r\n ex: ('running', 'verb') --> 'run'\r\n\r\n Args:\r\n tagged_tokens (list): list of tuples like (word, tag)\r\n \"\"\"\r\n \r\n lemmatizer = WordNetLemmatizer()\r\n \r\n lemmatized_sentence = []\r\n for word, tag in tagged_tokens:\r\n \r\n # simplify tag for wordnet lemmatizer\r\n pos = self._simplify_tag(tag)\r\n \r\n # lemmatize the word\r\n lemmatized_word = lemmatizer.lemmatize(word, pos)\r\n lemmatized_sentence.append(lemmatized_word)\r\n\r\n return lemmatized_sentence\r\n \r\n def _flatten_preprocessed_text(self, preprocessed_text):\r\n \"\"\"\r\n Flatten preprocessed_text into a 1D array\r\n \"\"\"\r\n return [text for entry in preprocessed_text for sublist in entry for text in sublist]\r\n \r\n def _customize_vader(self, vader):\r\n \"\"\"\r\n Updates Vader Lexicon with custom words from financial jargon\r\n \"\"\"\r\n vader.lexicon |= custom_lexicon.financial_jargon\r\n return vader\r\n \r\n def preprocess_text(self, text_list: list) -> list:\r\n \"\"\"\r\n Preprocess text for analysis by cleaning, tagging and lemmatising.\r\n \r\n Args:\r\n text_list (list):\r\n List of strings containing text to preprocessed\r\n ex: [\r\n ['This is a long reddit post'],\r\n ['This is another long reddit post']\r\n ]\r\n \"\"\"\r\n \r\n self.logger.info('Preprocessing text')\r\n \r\n # RegEx search patterns to remove\r\n punctuation_pattern = \"[^-9A-Za-z ]\"\r\n url_pattern = \"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+#]|[!*\\(\\),]|\" \\\r\n \"(?:%[0-9a-fA-F][0-9a-fA-F]))+\"\r\n search_patterns = [url_pattern, punctuation_pattern]\r\n \r\n # add custom stop words\r\n stop_words = stopwords.words('english') + additional_stopwords.words\r\n \r\n preprocessed_text = []\r\n for text in text_list:\r\n \r\n # split text into sentences\r\n preprocessed_sentences = []\r\n sentences = sent_tokenize(text)\r\n for sentence in sentences:\r\n \r\n # remove unwanted text\r\n clean_sentence = self._clean_text(\r\n text=sentence,\r\n stop_words=stop_words,\r\n search_patterns=search_patterns\r\n )\r\n\r\n # lemmatize text\r\n tagged_sentence = pos_tag(clean_sentence)\r\n lemmatized_sentence = self._lemmatize_tokens(tagged_sentence)\r\n \r\n preprocessed_sentences.append(lemmatized_sentence)\r\n \r\n preprocessed_text.append(preprocessed_sentences)\r\n \r\n return preprocessed_text\r\n \r\n def create_wordcloud(\r\n self,\r\n preprocessed_text: list,\r\n max_words: int=30,\r\n colormap: str=None, \r\n background_color: str='white'\r\n ) -> WordCloud:\r\n \"\"\"\r\n Creates a wordcloud image from preprocessed text\r\n\r\n Args:\r\n preprocesed_text (list): \r\n text that has been preprocessed by the preprocess_text function\r\n \"\"\"\r\n \r\n self.logger.info('Creating wordcloud')\r\n \r\n # create frequency distribution dictionary\r\n fd = self.create_freqdist(preprocessed_text, max_words=max_words)\r\n fd = fd.set_index('word')['count'].to_dict()\r\n \r\n # create wordcloud\r\n wordcloud = WordCloud(\r\n max_words=max_words,\r\n colormap=colormap,\r\n background_color=background_color\r\n )\r\n \r\n return wordcloud.fit_words(fd)\r\n \r\n def create_freqdist(\r\n self, \r\n preprocessed_text: list,\r\n max_words: int=30\r\n ) -> pd.DataFrame:\r\n \"\"\"\r\n Create frequency distribution of words in text\r\n \r\n Args:\r\n preprocesed_text (list): \r\n text that has been preprocessed by the preprocess_text function\r\n \"\"\"\r\n \r\n self.logger.info('creating frequency distribution')\r\n \r\n # flatten preprocessed_text list\r\n flattened_text = self._flatten_preprocessed_text(preprocessed_text)\r\n \r\n # create distribution and sort by count\r\n fd = FreqDist(flattened_text)\r\n dist = pd.DataFrame.from_records(\r\n data=fd.most_common(),\r\n columns=['word', 'count']\r\n ).sort_values(by='count', ascending=False)\r\n \r\n return dist[:max_words] if max_words else dist\r\n\r\n def sentiment_score(\r\n self,\r\n preprocessed_text: list,\r\n customise_vader: bool=True\r\n ) -> list:\r\n \"\"\"\r\n Analyse preprocessed_text for sentiment using nltk's pretrained\r\n vader sentiment analyser.\r\n \r\n Args:\r\n preprocessed_text (list):\r\n text that has been preprocessed by the preprocess_text function\r\n customize_vader (bool): \r\n Adds custom financial jargon to vader lexicon of True\r\n \"\"\"\r\n \r\n self.logger.info('generating sentiment scores')\r\n \r\n # Initialise vader sentiment analyser\r\n vader = SentimentIntensityAnalyzer()\r\n vader = self._customize_vader(vader) if customise_vader else vader\r\n \r\n # calculate sentiment for each item\r\n item_scores = []\r\n for item in preprocessed_text:\r\n \r\n # calculate sentiment for each sentence in item\r\n sentence_scores = []\r\n for sentence in item:\r\n\r\n # join sentence list and get compound sentiment\r\n sentence = ' '.join(sentence)\r\n score = vader.polarity_scores(sentence)['compound']\r\n \r\n sentence_scores.append(score)\r\n \r\n # take mean sentement of all sentences\r\n item_scores.append(statistics.mean(sentence_scores))\r\n \r\n return item_scores\r\n", "id": "9838471", "language": "Python", "matching_score": 2.5447845458984375, "max_stars_count": 2, "path": "sentiment_analysis/sentiment/sentiment_analyser.py" }, { "content": "import json\nfrom datetime import timedelta\nfrom pathlib import Path\n\nimport dash\nimport pandas as pd\nimport plotly.express as px\nimport plotly.graph_objects as go\nimport plotly.subplots\nfrom dash.dependencies import Input, Output, State\nfrom pandas_datareader import data\n\nimport app_components.layout\nimport app_components.charts\nfrom sentiment_analysis.sentiment import utils\nfrom sentiment_analysis.sentiment.sentiment_analyser import SentimentAnalyser\n\nsentiment_analyser = SentimentAnalyser()\nreddit_data_path = Path('sentiment_analysis/reddit_data/')\n\napp = dash.Dash(__name__)\nserver = app.server\n\napp.layout = app_components.layout.layout\n\n# get submissions and stock price data\n@app.callback(\n [\n Output('submissions_store', 'children'),\n Output('stock_price_store', 'children'),\n Output('preprocessed_text_store', 'children'),\n Output('unprocessed_text_store', 'children'),\n Output('data_loading', 'debug')\n ],\n [\n Input('submit_button', 'n_clicks'),\n Input('minimum_upvote', 'value'),\n State('ticker_input', 'value'),\n ]\n)\ndef get_data(submit, minimum_upvote, ticker):\n \"\"\"\n Get Reddit submissions\n \"\"\"\n \n # read in submissions\n submissions = pd.read_pickle(reddit_data_path / f'{ticker}.pkl')\n \n # filter submissions\n submissions = submissions[submissions.score >= minimum_upvote]\n\n # preprocess text for analysis\n text = submissions.title.tolist()\n preprocessed_text = sentiment_analyser.preprocess_text(text)\n \n # set dates for stock price data\n start = submissions.created_utc.min().date() - timedelta(weeks=2)\n end = submissions.created_utc.max().date() + timedelta(weeks=2)\n\n # get historical stock price data\n price_data = data.DataReader(\n name=ticker,\n data_source='yahoo',\n start=start,\n end=end,\n )\n \n # convert to json for storage\n submissions = submissions.to_json(date_format='iso')\n price_data = price_data.to_json(date_format='iso')\n preprocessed_text = json.dumps(preprocessed_text)\n text = json.dumps(text)\n \n return [submissions, price_data, preprocessed_text, text, False]\n\n# create wordcloud and freqdist\n@app.callback(\n [\n Output('wordcloud', 'figure'),\n Output('freqdist', 'figure')\n ],\n [\n Input('preprocessed_text_store', 'children'),\n Input('max_words', 'value')\n ]\n)\ndef generate_wordcloud_freqdist(preprocessed_text, max_words):\n \"\"\"\n Create wordcloud and frequency distribution\n \"\"\"\n \n # read preprocessed text\n preprocessed_text = json.loads(preprocessed_text)\n\n # create wordcloud\n wordcloud = sentiment_analyser.create_wordcloud(\n preprocessed_text,\n max_words=max_words\n )\n \n # create freqdist\n freqdist = sentiment_analyser.create_freqdist(\n preprocessed_text,\n max_words=max_words\n )\n\n # create wordcloud figure\n wordcloud_fig = px.imshow(wordcloud)\n wordcloud_fig.update_layout(\n width=500, \n height=400, \n margin=dict(l=2, r=2, b=2, t=2),\n paper_bgcolor='rgba(0,0,0,0)'\n )\n wordcloud_fig.update_xaxes(showticklabels=False)\n wordcloud_fig.update_yaxes(showticklabels=False)\n wordcloud_fig.update_traces(hovertemplate=None, hoverinfo='skip')\n \n # create freqdist figure\n freqdist_fig = px.bar(freqdist, x='count', y='word')\n freqdist_fig.update_layout(\n width=400,\n height=400,\n margin=dict(l=10, r=10, b=10, t=10),\n paper_bgcolor='rgba(0,0,0,0)'\n )\n freqdist_fig.update_yaxes(autorange='reversed')\n\n return [wordcloud_fig, freqdist_fig]\n\n# create sentiment chart and table\n@app.callback(\n [\n Output('sentiment', 'figure'),\n Output('submissions_table', 'children')\n ],\n [\n Input('submissions_store', 'children'),\n Input('stock_price_store', 'children'),\n Input('preprocessed_text_store', 'children'),\n Input('unprocessed_text_store', 'children'),\n Input('sentiment_smoothness', 'value')\n ]\n)\ndef generate_sentiment(\n submissions,\n stock_prices,\n preprocessed_text,\n unprocessed_text,\n window\n ):\n \"\"\"\n Create rolling sentiment chart\n \"\"\"\n \n # get submissions, stock prices, preprocessed and unprocessed text\n submissions = pd.read_json(submissions)\n stock_prices = pd.read_json(stock_prices).Close\n preprocessed_text = json.loads(preprocessed_text)\n unprocessed_text = json.loads(unprocessed_text)\n \n # get sentiment scores\n sentiment = sentiment_analyser.sentiment_score(\n preprocessed_text,\n customise_vader=True\n )\n \n # merge sentiment scores with submissions\n merged = utils.merge_sentiment_submissions(\n sentiment_scores=sentiment,\n text=unprocessed_text,\n submissions=submissions,\n on='title'\n )\n \n # calculate rolling sentiment\n merged['rolling_sentiment'] = merged.sentiment_score \\\n .rolling(window) \\\n .mean()\n\n # create rolling sentiment figure\n fig = plotly.subplots.make_subplots(\n rows=2,\n cols=1,\n shared_xaxes=True,\n subplot_titles=[f'Stock Price', 'Rolling Sentiment'],\n row_heights=[0.7, 0.3],\n vertical_spacing=0.1\n )\n price_line = go.Scatter(\n x=stock_prices.index,\n y=stock_prices.values,\n mode='lines',\n name='close price'\n )\n sentiment_line = go.Scatter(\n x=merged.created_utc,\n y=merged.rolling_sentiment,\n mode='lines',\n name='rolling sentiment'\n )\n fig.add_trace(price_line, row=1, col=1)\n fig.add_trace(sentiment_line, row=2, col=1)\n fig.update_layout(\n width=800,\n height=800,\n showlegend=False,\n paper_bgcolor='rgba(0,0,0,0)'\n )\n \n # create submissions table\n submissions_table = app_components.charts.generate_table(merged)\n \n return [fig, submissions_table]\n\n\nif __name__ == \"__main__\":\n app.run_server(debug=True)\n", "id": "2306051", "language": "Python", "matching_score": 3.949906826019287, "max_stars_count": 2, "path": "app.py" }, { "content": "from datetime import datetime\n\nimport pandas as pd\nfrom pandas_datareader import data\n\nfrom sentiment_analysis.API.reddit import RedditAPI\n\n\ndef merge_sentiment_submissions(\n sentiment_scores: list,\n text: list,\n submissions: pd.DataFrame,\n on: str\n ) -> pd.DataFrame:\n \"\"\"\n Merge sentiment scores from SentimentAnalyser.sentiment_scores with\n reddit submissions from API.reddit.RedditAPI\n\n Args:\n sentiment_scores (list): sentiment scores list\n text (list): text used to generate sentiment scores\n submissions (pd.DataFrame): submissions from reddit API\n \n Note: It is assumed that sentiment scores and text are correctly ordered\n \"\"\"\n \n # create dataframe with text and its corresponding sentiment score\n text_sentiment = pd.DataFrame(\n zip(text, sentiment_scores),\n columns=[on, 'sentiment_score']\n )\n \n # merge with submissions\n merged = pd.merge(\n left=submissions,\n right=text_sentiment,\n on=on\n ).sort_values('created_utc')\n \n return merged\n\ndef get_data(\n ticker: str,\n limit: int,\n subreddits: list,\n before: str,\n after: str,\n minimum_upvote: int\n ) -> pd.DataFrame:\n \"\"\"\n Retrieve submissions from RedditAPI using predefined parameters\n\n Args:\n ticker (str): ticker to search for\n limit (int): maximum number of submissions to return\n subreddits (list): list of subreddits to search from\n before (datetime): maximum submission date\n minimum_upvote (int): filter submissions with minimum_upvote or more\n\n Returns:\n pd.DataFrame: submissions\n \"\"\"\n \n reddit = RedditAPI()\n \n # convert date to epoch format for Pushshift API\n before = datetime.strptime(before[:10], '%Y-%M-%d').strftime('%s')\n after = datetime.strptime(after[:10], '%Y-%M-%d').strftime('%s')\n \n # parameters\n fields = [\n 'id',\n 'created_utc',\n 'subreddit',\n 'title',\n 'selftext',\n 'score'\n ]\n\n params = {\n 'ticker': ticker,\n 'title': ticker,\n 'sort_type': 'created_utc',\n 'sort': 'desc',\n 'limit': limit,\n 'fields': fields,\n 'subreddit': subreddits,\n 'before': before,\n 'after': after,\n 'score': f'>{minimum_upvote}'\n }\n \n # get submissions\n submissions = reddit.get_submissions(params)\n \n # get historical stock price data\n price_data = data.DataReader(\n name=ticker,\n data_source='yahoo',\n start=submissions.created_utc.min().date(),\n end=submissions.created_utc.max().date(),\n )\n \n return submissions, price_data\n", "id": "7869885", "language": "Python", "matching_score": 3.2131357192993164, "max_stars_count": 2, "path": "sentiment_analysis/sentiment/utils.py" }, { "content": "\"\"\"\nRedditScraper is responsible for interfacing with and retrieving data from \nthe PRAW and Pushshift API's.\n\"\"\"\n\nimport pandas as pd\nimport praw\nimport requests\nfrom praw.models import MoreComments\nfrom tqdm import tqdm\n\nfrom config import Loggers, PRAWConfig\n\n\nclass RedditAPI:\n \"\"\"\n The RedditScraper class allows interaction with the Pushshift and PRAW API's.\n\n The Pushshift API allows us to get a large number of submissions for a given query.\n The PRAW API is then used to get the comments from each of those submissions.\n \"\"\"\n\n def __init__(self):\n\n # Create praw.Reddit object with with reddit OAuth creds\n # Reddit application creds created at https://www.reddit.com/prefs/apps\n # Reddit class is a gateway to Reddit's API\n self.reddit = praw.Reddit(\n client_id=PRAWConfig.REDDIT_CLIENT_ID,\n client_secret=PRAWConfig.REDDIT_CLIENT_SECRET,\n user_agent=PRAWConfig.REDDIT_USER_AGENT\n )\n\n # Pushshift API endpoint for sumbissions\n self.submission_endpoint = \"https://api.pushshift.io/reddit/search/submission/\"\n \n self.logger = Loggers.console\n\n def get_submissions(self, params: dict) -> pd.DataFrame:\n \"\"\" Get submissions from Pushshift API\n\n Args:\n params (dict):\n Dictionary with request parameters as keys. full list of\n Pushshit parameters available at https://pushshift.io/api-parameters/\n\n Returns:\n [pd.DataFrame]: Pandas DataFrame of all submissions\n\n Note:\n Using sort parameters other than descending created_utc may\n result in unexpected behavior.\n \"\"\"\n \n self.logger.info('Retrieving submissions based on search criteria')\n \n # check parameters\n params = self._check_params(params)\n \n ticker = params.pop('ticker')\n after = int(params['after'])\n before = int(params['before'])\n limit = int(params['limit'])\n pbar = tqdm(total=limit, desc='submissions')\n before_decay = 18000 # 18000s = 5hrs\n \n submissions = []\n # Request submissions\n while before > after and len(submissions) < limit:\n\n # Request and retrieve data in JSON format\n r = requests.get(url=self.submission_endpoint, params=params)\n \n # try to extract submissions\n try:\n data = r.json()['data']\n \n # Page to next set of submissions by setting before to\n # the last submissions created_utc\n params['before'] = data[-1]['created_utc']\n [submissions.append(post) for post in data]\n \n # Update progress bar\n pbar.update(len(data))\n \n # decrease before if no submissions found\n except IndexError:\n params['before'] -= before_decay\n \n except Exception as e:\n self.logger.warning(e, exc_info=True)\n break\n \n self.logger.info('%s submissions found', len(submissions))\n pbar.close()\n \n # convert submissions to dataframe and update scores\n if submissions:\n submissions = self._process_submissions(submissions, ticker)\n\n return submissions\n\n def get_comments(self, submission_ids: list) -> pd.DataFrame:\n \"\"\"\n Returns list of top level comments in submission\n\n Args:\n submission_ids (list): list of submission_ids\n\n Returns:\n [pd.DataFrame]: Pandas DataFrame of all comments\n \"\"\"\n\n comments = {\n 'created_utc': [],\n 'submission_id': [],\n 'comment_id': [],\n 'body': [],\n 'score': []\n }\n\n self.logger.info('Retrieving comments for requested submissions')\n for submission_id in tqdm(submission_ids, desc='Getting comments'):\n\n # Catch bad ID's\n try:\n # Get submission instance from submission_id\n submission = self.reddit.submission(id=submission_id)\n except:\n self.logger.warning('Bad ID %s', submission_id)\n continue\n\n # Get top level comments\n for top_level_comment in submission.comments:\n # Ignore 'load more comments' and 'continue this thread' links\n if isinstance(top_level_comment, MoreComments):\n continue\n\n # Append data to comments dictionary\n comments['created_utc'].append(top_level_comment.created_utc)\n comments['submission_id'].append(submission_id)\n comments['comment_id'].append(top_level_comment.id)\n comments['body'].append(top_level_comment.body)\n comments['score'].append(top_level_comment.score)\n\n # Convert comments to pandas DataFrame\n comments_df = pd.DataFrame(comments)\n comments_df['created_utc'] = pd.to_datetime(comments_df.created_utc, unit='s')\n \n return comments_df\n\n def _process_submissions(self, submissions, ticker):\n \"\"\"\n Convert submissions to dataframe and update submission scores\n\n Args:\n submissions (list): list of submissions\n ticker (str): relevent ticker\n \"\"\"\n \t\n \t# Convert submissions to pandas dataframe and add ticker\n submissions_df = pd.DataFrame(submissions)\n submissions_df['created_utc'] = pd.to_datetime(submissions_df.created_utc, unit='s')\n submissions_df['ticker'] = ticker\n \n # Get updated submission scores\n updated_scores = self.get_submission_details(\n submission_ids=submissions_df.id,\n fields=['id', 'score']\n )\n\n # Merge updated_scores on id\n submissions_df = pd.merge(\n left=submissions_df,\n right=updated_scores,\n on='id'\n )\n\n # Clean up\n submissions_df.drop('score_x', axis=1, inplace=True)\n submissions_df.rename(\n columns={'id': 'submission_id', 'score_y': 'score'},\n inplace=True\n )\n\n return submissions_df\n\n def get_subreddit_details(self, subreddits: list) -> pd.DataFrame:\n \"\"\"\n Get information about a subreddit \n\n Args:\n subreddit (list): list of strings with subreddit name\n \n Returns:\n [pd.DataFrame]: Subreddit details\n \"\"\"\n\n # Subreddit attributes to collect\n subreddit_info = {\n 'name': [],\n 'subreddit_id': [],\n 'subscribers': []\n }\n \n self.logger.info('Retrieving updated subreddit details')\n # Collect information about each subreddit given\n for subreddit in tqdm(subreddits, desc='subreddit'):\n # Find subreddit through PRAW\n sub = self.reddit.subreddit(subreddit)\n # Add data to dictionary\n subreddit_info['name'].append(subreddit)\n subreddit_info['subreddit_id'].append(sub.name)\n subreddit_info['subscribers'].append(sub.subscribers)\n\n return pd.DataFrame(subreddit_info)\n \n def get_submission_details(self, submission_ids: list, fields: list) -> pd.DataFrame:\n \"\"\"\n Get information about submissions from PRAW API\n \n Args:\n submission_ids (list): stringed list of submission ids\n fields (list): stringed list of fields to get\n \n Returns:\n [pd.DataFrame]: Submission details\n \"\"\"\n \n # Initialise dictionary from fields list\n submissions_info = {k:[] for k in fields}\n \n self.logger.info('Retrieving updated details for %s submissions', len(submission_ids))\n # Get information for each submission_id\n for submission_id in tqdm(submission_ids, desc='submissions'):\n # Create PRAW submission class\n submission = self.reddit.submission(id=submission_id)\n # Append required attribute from submission to the dictionary\n [submissions_info[attr].append(getattr(submission, attr)) for attr in fields]\n \n return pd.DataFrame(submissions_info)\n \n def get_comment_details(self, comment_ids: list, fields: list) -> pd.DataFrame:\n \"\"\"\n Get informaiton for comments from PRAW API\n \n Args:\n comment_ids (list): stringed list of comment ids\n fields (list): stringed list of fields to get\n \n Returns:\n [pd.DataFrame]: Comment details\n \"\"\"\n \n # Initialise dictionary from fields list\n comments_info = {k:[] for k in fields}\n \n self.logger.info('Retrieving updated details for %s comments', len(comment_ids))\n # Get informatino for each comment_id\n for comment_id in tqdm(comment_ids, desc='comments'):\n # Create PRAW comment class\n comment = self.reddit.comment(id=comment_id)\n # Append required attribute from comment to the dicionary\n [comments_info[attr].append(getattr(comment, attr)) for attr in fields] \n\n return pd.DataFrame(comments_info)\n\n def _check_params(self, params: dict) -> dict:\n \"\"\"\n Check if parameters are valid\n\n Args:\n params (dict): parameters dictionary\n \"\"\"\n # Append created_utc to fields if not included\n if 'fields' in list(params.keys()):\n if 'created_utc' not in params['fields']:\n params['fields'].append('created_utc')\n\n return params\n", "id": "5752841", "language": "Python", "matching_score": 3.4072606563568115, "max_stars_count": 2, "path": "sentiment_analysis/API/reddit.py" }, { "content": "import os\nimport logging\nimport logging.config\nfrom pathlib import Path\n\nimport yaml\nfrom dotenv import load_dotenv\n\n# Load environment variables\ndatabase_config_path = Path('.') / '.env'\nload_dotenv(dotenv_path=database_config_path)\n\n# Load logging configurations\nlogging_config_path = Path('.') / 'log_config.yaml'\nwith open(logging_config_path, 'r') as f:\n config = yaml.safe_load(f.read())\n logging.config.dictConfig(config)\n\nclass PRAWConfig:\n \"\"\"\n Set configuration variables from .env file\n \"\"\"\n # PRAW credentials\n REDDIT_CLIENT_ID = os.getenv('REDDIT_CLIENT_ID')\n REDDIT_CLIENT_SECRET = os.getenv('REDDIT_CLIENT_SECRET')\n REDDIT_USER_AGENT = os.getenv('REDDIT_USER_AGENT')\n\nclass Loggers:\n \"\"\"\n Provides pre-configured loggers\n \"\"\"\n console = logging.getLogger('console')\n \n", "id": "1669276", "language": "Python", "matching_score": 0.5595490336418152, "max_stars_count": 2, "path": "config.py" } ]
2.036421
PatrickSantoZZ
[ { "content": "from browser.html import *\n\n\ndef gen_info():\n\tshipstats = {\n\t\t\"Epheria Sailboat\": {\n\t\t\t\"HP\": \"1,000,000\",\n\t\t\t\"Rations\": \"1,000,000\",\n\t\t\t\"Base LT\": \"5,000\",\n\t\t\t\"Speed\": \"100%\",\n\t\t\t\"Accel\": \"100%\",\n\t\t\t\"Turn\": \"110%\",\n\t\t\t\"Brake\": \"110%\",\n\t\t\t\"Inventory\": \"25 slots\",\n\t\t\t\"Cabins\": \"10\",\n\t\t\t\"Cannon Count\": \"1 per side(player)\",\n\t\t\t\"Reload\": \"17s\"\n\t\t},\n\t\t\"Improved Sailboat\": {\n\t\t\t\"HP\": \"1,000,000\",\n\t\t\t\"Rations\": \"1,000,000\",\n\t\t\t\"Base LT\": \"5,000\",\n\t\t\t\"Speed\": \"100%\",\n\t\t\t\"Accel\": \"100%\",\n\t\t\t\"Turn\": \"110%\",\n\t\t\t\"Brake\": \"110%\",\n\t\t\t\"Inventory\": \"25 slots\",\n\t\t\t\"Cabins\": \"10\",\n\t\t\t\"Cannon Count\": \"2 per side(captain)\",\n\t\t\t\"Reload\": \"15s\"\n\t\t},\n\t\t\"<NAME>\": {\n\t\t\t\"HP\": \"1,000,000\",\n\t\t\t\"Rations\": \"1,100,000\",\n\t\t\t\"Base LT\": \"10,000\",\n\t\t\t\"Speed\": \"100%\",\n\t\t\t\"Accel\": \"100%\",\n\t\t\t\"Turn\": \"110%\",\n\t\t\t\"Brake\": \"110%\",\n\t\t\t\"Inventory\": \"30 slots\",\n\t\t\t\"Cabins\": \"30\",\n\t\t\t\"Cannon Count\": \"2 per side(captain)\",\n\t\t\t\"Reload\": \"15s\"\n\t\t},\n\t\t\"Carrack (Advance)\": {\n\t\t\t\"HP\": \"1,350,000\",\n\t\t\t\"Rations\": \"1,300,000\",\n\t\t\t\"Base LT\": \"16,500\",\n\t\t\t\"Speed\": \"110%\",\n\t\t\t\"Accel\": \"100%\",\n\t\t\t\"Turn\": \"115%\",\n\t\t\t\"Brake\": \"115%\",\n\t\t\t\"Inventory\": \"40 slots\",\n\t\t\t\"Cabins\": \"50\",\n\t\t\t\"Cannon Reload\": \"15s\"\n\t\t},\n\t\t\"Carrack (Balance)\": {\n\t\t\t\"HP\": \"1,300,000\",\n\t\t\t\"Rations\": \"1,400,000\",\n\t\t\t\"Base LT\": \"15,000\",\n\t\t\t\"Speed\": \"115%\",\n\t\t\t\"Accel\": \"100%\",\n\t\t\t\"Turn\": \"115%\",\n\t\t\t\"Brake\": \"115%\",\n\t\t\t\"Inventory\": \"35 slots\",\n\t\t\t\"Cabins\": \"50\",\n\t\t\t\"Cannon Reload\": \"14s\"\n\t\t},\n\t\t\"<NAME>\": {\n\t\t\t\"HP\": \"1,200,000\",\n\t\t\t\"Rations\": \"1,000,000\",\n\t\t\t\"Base LT\": \"4,000\",\n\t\t\t\"Speed\": \"110%\",\n\t\t\t\"Accel\": \"110%\",\n\t\t\t\"Turn\": \"120%\",\n\t\t\t\"Brake\": \"120%\",\n\t\t\t\"Inventory\": \"12 slots\",\n\t\t\t\"Cabins\": \"10\",\n\t\t\t\"Cannon Count\": \"2 per side(player)\",\n\t\t\t\"Reload\": \"17s\"\n\t\t},\n\t\t\"Improved Frigate\": {\n\t\t\t\"HP\": \"1,200,000\",\n\t\t\t\"Rations\": \"1,000,000\",\n\t\t\t\"Base LT\": \"4,000\",\n\t\t\t\"Speed\": \"110%\",\n\t\t\t\"Accel\": \"110%\",\n\t\t\t\"Turn\": \"120%\",\n\t\t\t\"Brake\": \"120%\",\n\t\t\t\"Inventory\": \"12 slots\",\n\t\t\t\"Cabins\": \"10\",\n\t\t\t\"Cannon Count\": \"4 per side(captain)\",\n\t\t\t\"Reload\": \"15s\"\n\t\t},\n\t\t\"<NAME>\": {\n\t\t\t\"HP\": \"1,200,000\",\n\t\t\t\"Rations\": \"1,200,000\",\n\t\t\t\"Base LT\": \"8,000\",\n\t\t\t\"Speed\": \"110%\",\n\t\t\t\"Accel\": \"110%\",\n\t\t\t\"Turn\": \"120%\",\n\t\t\t\"Brake\": \"120%\",\n\t\t\t\"Inventory\": \"15 slots\",\n\t\t\t\"Cabins\": \"30\",\n\t\t\t\"Cannon Count\": \"4 per side(captain)\",\n\t\t\t\"Reload\": \"15s\"\n\t\t},\n\t\t\"Carrack (Volante)\": {\n\t\t\t\"HP\": \"1,250,000\",\n\t\t\t\"Rations\": \"1,400,000\",\n\t\t\t\"Base LT\": \"13,500\",\n\t\t\t\"Speed\": \"120%\",\n\t\t\t\"Accel\": \"110%\",\n\t\t\t\"Turn\": \"115%\",\n\t\t\t\"Brake\": \"125%\",\n\t\t\t\"Inventory\": \"20 slots\",\n\t\t\t\"Cabins\": \"50\",\n\t\t\t\"Cannon Reload\": \"13s\"\n\t\t},\n\t\t\"Carrack (Valor)\": {\n\t\t\t\"HP\": \"1,300,000\",\n\t\t\t\"Rations\": \"1,500,000\",\n\t\t\t\"Base LT\": \"13,500\",\n\t\t\t\"Speed\": \"115%\",\n\t\t\t\"Accel\": \"110%\",\n\t\t\t\"Turn\": \"115%\",\n\t\t\t\"Brake\": \"125%\",\n\t\t\t\"Inventory\": \"20 slots\",\n\t\t\t\"Cabins\": \"50\",\n\t\t\t\"Cannon Reload\": \"12s\"\n\t\t},\n\t}\n\torder = [\n\t\t[\"Epheria Sailboat\", \"Improved Sailboat\", \"Epheria Frigate\", \"Improved Frigate\"],\n\t\t[\"<NAME>\", \"<NAME>\"],\n\t\t[\"Carrack (Advance)\", \"Carrack (Balance)\", \"Carrack (Volante)\", \"Carrack (Valor)\"]\n\t]\n\tret = P(\"More information can be found \" + A(\"at this spreadsheet\", href=\"https://docs.google.com/document/d/1basknMfrfcH6AzJD9PkzeUunqrIGTuS6SfXPf3a7pso/preview\", target=\"_blank\") +\" or \" + A(\"these patch notes\", href=\"https://www.blackdesertonline.com/news/view/3216\", target=\"_blank\"))\n\tret += P(\"Barter items that you can trade for ship parts unlock as you finish more trades. You can always trade t1 barter items for verdant stone coupon though.\")\n\tret += P(\"Ship parts used for upgrade need to be full durability.\")\n\tret += P(\"All sea monsters can drop parts for upgrading ships. There is no list of which drops are where yet. (Nov-6)\")\n\tret += P(\"Old Moon Guild quests for the same monster are mutually exclusive(pick 1). EG Nineshark and Young Nineshark.\")\n\tret += H2(\"Upgrade Paths\") + BR() + CANVAS(id='shipchart', width=820, height=400) + BR()\n\tret += H2(\"Base Ship Stats\")\n\tfor table in order:\n\t\tt = TABLE()\n\t\ttr = TR(TH(\"Stat\"))\n\t\tfor ship in table:\n\t\t\ttr <= TH(ship)\n\t\tt <= tr\n\n\t\tfor key in shipstats[table[0]]:\n\t\t\ttr = TR(TD(key))\n\t\t\tfor ship in table:\n\t\t\t\ttr <= TD(shipstats[ship][key])\n\t\t\tt <= tr\n\n\t\tret += t + BR()\n\treturn ret", "id": "6218194", "language": "Python", "matching_score": 1.9474784135818481, "max_stars_count": 5, "path": "information.py" }, { "content": "# Simple dictionary of total coin costs for ships to show a progress %\n\ntccost = {\n\t\"Information\": 0,\n\t\"<NAME>\": 0,\n\t\"Improved Epheria Sailboat\": 0,\n\t\"<NAME>\": 4160,\n\t\"Carrack (Advance)\": 1164950,\n\t\"Carrack (Balance)\": 1180150,\n\t\"<NAME>\": 0,\n\t\"Improved E<NAME>\": 0,\n\t\"<NAME>\": 4000,\n\t\"Carrack (Volante)\": 1344950,\n\t\"Carrack (Valor)\": 1352950\n}\n", "id": "2859962", "language": "Python", "matching_score": 3.6719818115234375, "max_stars_count": 5, "path": "totals.py" }, { "content": "# list of supported ships\n\nships = [\n\t\"Information\",\n\t\"Epheria Sailboat\",\n\t\"Improved Epheria Sailboat\",\n\t\"<NAME>\",\n\t\"Carrack (Advance)\",\n\t\"Carrack (Balance)\",\n\t\"<NAME>\",\n\t\"Improved Epheria Frigate\",\n\t\"<NAME>\",\n\t\"Carrack (Volante)\",\n\t\"Carrack (Valor)\",\n]", "id": "1896540", "language": "Python", "matching_score": 0.34788477420806885, "max_stars_count": 5, "path": "ships.py" }, { "content": "# A recipe dictionary\n\nrecipes = {\n\t# Ships\n\t\"Epheria Sailboat\": {\n\t\t\"Bartali Sailboat\": 1,\n\t\t\"Ship Upgrade Permit: Epheria Sailboat\": 1,\n\t\t\"+10 Bartali Sailboat: Old Prow\": 1,\n\t\t\"+10 Bartali Sailboat: Old Plating\": 1,\n\t\t\"+10 Bartali Sailboat: Old Cannon\": 1,\n\t\t\"+10 Bartali Sailboat: Old Sail\": 1,\n\t\t\"Standardized Timber Square\": 800,\n\t\t\"Steel\": 600,\n\t\t\"Pine Plywood\": 1500,\n\t\t\"Flax Fabric\": 300,\n\t\t\"Hard Pillar\": 100\n\t},\n\t\"Improved Epheria Sailboat\": {\n\t\t\"Epheria Sailboat\": 1,\n\t\t\"Bartali Sailboat: Old Prow\": 1,\n\t\t\"Bartali Sailboat: Old Plating\": 1,\n\t\t\"Bartali Sailboat: Old Cannon\": 1,\n\t\t\"Bartali Sailboat: Old Sail\": 1,\n\t\t\"Standardized Timber Square\": 250,\n\t\t\"Steel\": 200,\n\t\t\"Pine Plywood\": 500,\n\t\t\"Flax Fabric\": 100,\n\t\t\"Hard Pillar\": 30,\n\t\t\"Ultimate Armor Reform Stone\": 10\n\t},\n\t\"<NAME>\": {\n\t\t\"Epheria Sailboat\": 1,\n\t\t\"Ship Upgrade Permit: Epheria Caravel\": 1,\n\t\t\"+10 Epheria: Old Prow\": 1,\n\t\t\"+10 Epheria: Old Plating\": 1,\n\t\t\"+10 Epheria: Old Cannon\": 1,\n\t\t\"+10 Epheria: Old Sail\": 1,\n\t\t\"Graphite Ingot for Upgrade\": 100,\n\t\t\"Timber for Upgrade\": 100,\n\t\t\"Adhesive for Upgrade\": 100,\n\t\t\"Island Tree Coated Plywood\": 100,\n\t\t\"Rock Salt Ingot\": 100,\n\t\t\"Deep Sea Memory Filled Glue\": 4,\n\t\t\"Seaweed Stalk\": 6\n\t},\n\t\"Carrack (Advance)\": {\n\t\t\"<NAME>\": 1,\n\t\t\"+10 Epheria Caravel: Black Dragon Prow\": 1,\n\t\t\"+10 Epheria Caravel: Upgraded Plating (Blue)\": 1,\n\t\t\"+10 Epheria Caravel: <NAME>\": 1,\n\t\t\"+10 Epheria Caravel: Stratus Wind Sail\": 1,\n\t\t\"Moon Vein Flax Fabric\": 180,\n\t\t\"Deep Tide-Dyed Standardized Timber Square\": 144,\n\t\t\"Brilliant Rock Salt Ingot\": 35,\n\t\t\"Tear of the Ocean\": 42,\n\t\t\"Brilliant Pearl Shard\": 35\n\t},\n\t\"Carrack (Balance)\": {\n\t\t\"<NAME>\": 1,\n\t\t\"+10 Epheria Caravel: Black Dragon Prow\": 1,\n\t\t\"+10 Epheria Caravel: Upgraded Plating (Blue)\": 1,\n\t\t\"+10 Epheria Caravel: <NAME>\": 1,\n\t\t\"+10 Epheria Caravel: Stratus Wind Sail\": 1,\n\t\t\"Moon Vein Flax Fabric\": 180,\n\t\t\"Deep Tide-Dyed Standardized Timber Square\": 144,\n\t\t\"Brilliant Rock Salt Ingot\": 30,\n\t\t\"Tear of the Ocean\": 50,\n\t\t\"Brilliant Pearl Shard\": 30\n\t},\n\t\"<NAME>\": {\n\t\t\"Bartali Sailboat\": 1,\n\t\t\"+10 Bartali Sailboat: Old Prow\": 1,\n\t\t\"+10 Bartali Sailboat: Old Plating\": 1,\n\t\t\"+10 Bartali Sailboat: Old Cannon\": 1,\n\t\t\"+10 Bartali Sailboat: Old Sail\": 1,\n\t\t\"Standardized Timber Square\": 1000,\n\t\t\"Jade Coral Ingot\": 600,\n\t\t\"Pine Coated Plywood\": 1500,\n\t\t\"Enhanced Flax Fabric\": 300,\n\t\t\"Hard Pillar\": 100\n\n\t},\n\t\"Improved Epheria Frigate\": {\n\t\t\"Epheria Frigate\": 1,\n\t\t\"Bartali Sailboat: Old Prow\": 1,\n\t\t\"Bartali Sailboat: Old Plating\": 1,\n\t\t\"Bartali Sailboat: Old Cannon\": 1,\n\t\t\"Bartali Sailboat: Old Sail\": 1,\n\t\t\"Standardized Timber Square\": 250,\n\t\t\"Steel\": 200,\n\t\t\"Pine Plywood\": 500,\n\t\t\"Flax Fabric\": 100,\n\t\t\"Hard Pillar\": 30,\n\t\t\"Ultimate Weapon Reform Stone\": 10\n\t},\n\t\"<NAME>\": {\n\t\t\"Epheria Frigate\": 1,\n\t\t\"Ship Upgrade Permit: Epheria Galleass\": 1,\n\t\t\"+10 Epheria: Old Prow\": 1,\n\t\t\"+10 Epheria: Old Plating\": 1,\n\t\t\"+10 Epheria: Old Cannon\": 1,\n\t\t\"+10 Epheria: Old Sail\": 1,\n\t\t\"Graphite Ingot for Upgrade\": 100,\n\t\t\"Timber for Upgrade\": 100,\n\t\t\"Adhesive for Upgrade\": 100,\n\t\t\"Island Tree Coated Plywood\": 100,\n\t\t\"Tide-Dyed Standardized Timber Square\": 6,\n\t\t\"Cobalt Ingot\": 2,\n\t\t\"Moon Scale Plywood\": 10\n\t},\n\t\"Carrack (Volante)\": {\n\t\t\"Epheria Galleass\": 1,\n\t\t\"+10 Epheria Galleass: Black Dragon Prow\": 1,\n\t\t\"+10 Epheria Galleass: Upgraded Plating (Blue)\": 1,\n\t\t\"+10 Epheria Galleass: <NAME>\": 1,\n\t\t\"+10 Epheria Galleass: Stratus Wind Sail\": 1,\n\t\t\"Moon Vein Flax Fabric\": 210,\n\t\t\"Deep Tide-Dyed Standardized Timber Square\": 144,\n\t\t\"Brilliant Rock Salt Ingot\": 30,\n\t\t\"Tear of the Ocean\": 42,\n\t\t\"Brilliant Pearl Shard\": 30\n\t},\n\t\"Carrack (Valor)\": {\n\t\t\"Epheria Galleass\": 1,\n\t\t\"+10 Epheria Galleass: Black Dragon Prow\": 1,\n\t\t\"+10 Epheria Galleass: Upgraded Plating (Blue)\": 1,\n\t\t\"+10 Epheria Galleass: <NAME>\": 1,\n\t\t\"+10 Epheria Galleass: Stratus Wind Sail\": 1,\n\t\t\"Moon Vein Flax Fabric\": 180,\n\t\t\"Deep Tide-Dyed Standardized Timber Square\": 170,\n\t\t\"Brilliant Rock Salt Ingot\": 30,\n\t\t\"Tear of the Ocean\": 42,\n\t\t\"Brilliant Pearl Shard\": 30\n\t},\n\n\t# Upgrade parts\n\t\"Graphite Ingot for Upgrade\": {\"Zinc Ingot\": 100, \"Sea Monster's Ooze\": 1},\n\t\"Timber for Upgrade\": {\"Old Tree Bark\": 100, \"Red Tree Lump\": 100, \"Sea Monster's Ooze\": 1},\n\t\"Adhesive for Upgrade\": {\"White Cedar Sap\": 100, \"Acacia Sap\": 100, \"Elder Tree Sap\": 100, \"Sea Monster's Ooze\": 1},\n\n\t\"Epheria Caravel: Black Dragon Prow\": {\n\t\t\"+10 Epheria Caravel: Brass Prow\": 1,\n\t\t\"Ruddy Manganese Nodule\": 50,\n\t\t\"Enhanced Island Tree Coated Plywood\": 300,\n\t\t\"Seaweed Stalk\": 125,\n\t\t\"Great Ocean Dark Iron\": 150\n\t},\n\t\"E<NAME>: Upgraded Plating (Blue)\": {\n\t\t\"+10 Epheria Caravel: Upgraded Plating (Green)\": 1,\n\t\t\"Pure Pearl Crystal\": 45,\n\t\t\"Cox Pirates' Artifact (Parley Beginner)\": 60,\n\t\t\"Cox Pirates' Artifact (Combat)\": 60,\n\t\t\"Moon Scale Plywood\": 200\n\t},\n\t\"<NAME>: <NAME>\": {\n\t\t\"+10 <NAME>: Verisha Cannon\": 1,\n\t\t\"Tide-Dyed Standardized Timber Square\": 180,\n\t\t\"Cox Pirates' Artifact (Combat)\": 60,\n\t\t\"Moon Scale Plywood\": 200,\n\t\t\"Bright Reef Piece\": 180\n\t},\n\t\"<NAME>: Stratus Wind Sail\": {\n\t\t\"+10 Epheria Caravel: White Wind Sail\": 1,\n\t\t\"Ruddy Manganese Nodule\": 40,\n\t\t\"Cox Pirates' Artifact (Parley Expert)\": 30,\n\t\t\"Seaweed Stalk\": 80,\n\t\t\"Luminous Cobalt Ingot\": 30\n\t},\n\n\t\"<NAME>: Black Dragon Prow\": {\n\t\t\"+10 Epheria Galleass: White Horn Prow\": 1,\n\t\t\"Ruddy Manganese Nodule\": 50,\n\t\t\"Enhanced Island Tree Coated Plywood\": 300,\n\t\t\"Seaweed Stalk\": 125,\n\t\t\"Great Ocean Dark Iron\": 150\n\t},\n\t\"Epheria Galleass: Upgraded Plating (Blue)\": {\n\t\t\"+10 Epheria Galleass: Upgraded Plating (Green)\": 1,\n\t\t\"Pure Pearl Crystal\": 45,\n\t\t\"Cox Pirates' Artifact (Parley Beginner)\": 60,\n\t\t\"Cox Pirates' Artifact (Combat)\": 125,\n\t\t\"Moon Scale Plywood\": 300\n\t},\n\t\"Epheria Galleass: <NAME>\": {\n\t\t\"+10 Epheria Galleass: Verisha Cannon\": 1,\n\t\t\"Tide-Dyed Standardized Timber Square\": 180,\n\t\t\"Cox Pirates' Artifact (Combat)\": 125,\n\t\t\"Moon Scale Plywood\": 300,\n\t\t\"Bright Reef Piece\": 180\n\t},\n\t\"<NAME>: Stratus Wind Sail\": {\n\t\t\"+10 Epheria Galleass: White Wind Sail\": 1,\n\t\t\"Ruddy Manganese Nodule\": 50,\n\t\t\"Cox Pirates' Artifact (Parley Expert)\": 30,\n\t\t\"Seaweed Stalk\": 125,\n\t\t\"Luminous Cobalt Ingot\": 30\n\t},\n\n\t# Gear (used for upgrading ship to next type)\n\t\"+10 Bartali Sailboat: Old Prow\": {\"+9 Bartali Sailboat: Old Prow\": 1, \"Black Stone (Armor)\": 10},\n\t\"+9 Bartali Sailboat: Old Prow\": {\"+8 Bartali Sailboat: Old Prow\": 1, \"Black Stone (Armor)\": 9},\n\t\"+8 Bartali Sailboat: Old Prow\": {\"+7 Bartali Sailboat: Old Prow\": 1, \"Black Stone (Armor)\": 8},\n\t\"+7 Bartali Sailboat: Old Prow\": {\"+6 Bartali Sailboat: Old Prow\": 1, \"Black Stone (Armor)\": 7},\n\t\"+6 Bartali Sailboat: Old Prow\": {\"+5 Bartali Sailboat: Old Prow\": 1, \"Black Stone (Armor)\": 6},\n\t\"+5 Bartali Sailboat: Old Prow\": {\"+4 Bartali Sailboat: Old Prow\": 1, \"Black Stone (Armor)\": 5},\n\t\"+4 Bartali Sailboat: Old Prow\": {\"+3 Bartali Sailboat: Old Prow\": 1, \"Black Stone (Armor)\": 4},\n\t\"+3 Bartali Sailboat: Old Prow\": {\"+2 Bartali Sailboat: Old Prow\": 1, \"Black Stone (Armor)\": 3},\n\t\"+2 Bartali Sailboat: Old Prow\": {\"+1 Bartali Sailboat: Old Prow\": 1, \"Black Stone (Armor)\": 2},\n\t\"+1 Bartali Sailboat: Old Prow\": {\"Bartali Sailboat: Old Prow\": 1, \"Black Stone (Armor)\": 1},\n\t\"+10 Bartali Sailboat: Old Plating\": {\"+9 Bartali Sailboat: Old Plating\": 1, \"Black Stone (Armor)\": 10},\n\t\"+9 Bartali Sailboat: Old Plating\": {\"+8 Bartali Sailboat: Old Plating\": 1, \"Black Stone (Armor)\": 9},\n\t\"+8 Bartali Sailboat: Old Plating\": {\"+7 Bartali Sailboat: Old Plating\": 1, \"Black Stone (Armor)\": 8},\n\t\"+7 Bartali Sailboat: Old Plating\": {\"+6 Bartali Sailboat: Old Plating\": 1, \"Black Stone (Armor)\": 7},\n\t\"+6 Bartali Sailboat: Old Plating\": {\"+5 Bartali Sailboat: Old Plating\": 1, \"Black Stone (Armor)\": 6},\n\t\"+5 Bartali Sailboat: Old Plating\": {\"+4 Bartali Sailboat: Old Plating\": 1, \"Black Stone (Armor)\": 5},\n\t\"+4 Bartali Sailboat: Old Plating\": {\"+3 Bartali Sailboat: Old Plating\": 1, \"Black Stone (Armor)\": 4},\n\t\"+3 Bartali Sailboat: Old Plating\": {\"+2 Bartali Sailboat: Old Plating\": 1, \"Black Stone (Armor)\": 3},\n\t\"+2 Bartali Sailboat: Old Plating\": {\"+1 Bartali Sailboat: Old Plating\": 1, \"Black Stone (Armor)\": 2},\n\t\"+1 Bartali Sailboat: Old Plating\": {\"Bartali Sailboat: Old Plating\": 1, \"Black Stone (Armor)\": 1},\n\t\"+10 Bartali Sailboat: Old Cannon\": {\"+9 Bartali Sailboat: Old Cannon\": 1, \"Black Stone (Armor)\": 10},\n\t\"+9 Bartali Sailboat: Old Cannon\": {\"+8 Bartali Sailboat: Old Cannon\": 1, \"Black Stone (Armor)\": 9},\n\t\"+8 Bartali Sailboat: Old Cannon\": {\"+7 Bartali Sailboat: Old Cannon\": 1, \"Black Stone (Armor)\": 8},\n\t\"+7 Bartali Sailboat: Old Cannon\": {\"+6 Bartali Sailboat: Old Cannon\": 1, \"Black Stone (Armor)\": 7},\n\t\"+6 Bartali Sailboat: Old Cannon\": {\"+5 Bartali Sailboat: Old Cannon\": 1, \"Black Stone (Armor)\": 6},\n\t\"+5 Bartali Sailboat: Old Cannon\": {\"+4 Bartali Sailboat: Old Cannon\": 1, \"Black Stone (Armor)\": 5},\n\t\"+4 Bartali Sailboat: Old Cannon\": {\"+3 Bartali Sailboat: Old Cannon\": 1, \"Black Stone (Armor)\": 4},\n\t\"+3 Bartali Sailboat: Old Cannon\": {\"+2 Bartali Sailboat: Old Cannon\": 1, \"Black Stone (Armor)\": 3},\n\t\"+2 Bartali Sailboat: Old Cannon\": {\"+1 Bartali Sailboat: Old Cannon\": 1, \"Black Stone (Armor)\": 2},\n\t\"+1 Bartali Sailboat: Old Cannon\": {\"Bartali Sailboat: Old Cannon\": 1, \"Black Stone (Armor)\": 1},\n\t\"+10 Bartali Sailboat: Old Sail\": {\"+9 Bartali Sailboat: Old Sail\": 1, \"Black Stone (Armor)\": 10},\n\t\"+9 Bartali Sailboat: Old Sail\": {\"+8 Bartali Sailboat: Old Sail\": 1, \"Black Stone (Armor)\": 9},\n\t\"+8 Bartali Sailboat: Old Sail\": {\"+7 Bartali Sailboat: Old Sail\": 1, \"Black Stone (Armor)\": 8},\n\t\"+7 Bartali Sailboat: Old Sail\": {\"+6 Bartali Sailboat: Old Sail\": 1, \"Black Stone (Armor)\": 7},\n\t\"+6 Bartali Sailboat: Old Sail\": {\"+5 Bartali Sailboat: Old Sail\": 1, \"Black Stone (Armor)\": 6},\n\t\"+5 Bartali Sailboat: Old Sail\": {\"+4 Bartali Sailboat: Old Sail\": 1, \"Black Stone (Armor)\": 5},\n\t\"+4 Bartali Sailboat: Old Sail\": {\"+3 Bartali Sailboat: Old Sail\": 1, \"Black Stone (Armor)\": 4},\n\t\"+3 Bartali Sailboat: Old Sail\": {\"+2 Bartali Sailboat: Old Sail\": 1, \"Black Stone (Armor)\": 3},\n\t\"+2 Bartali Sailboat: Old Sail\": {\"+1 Bartali Sailboat: Old Sail\": 1, \"Black Stone (Armor)\": 2},\n\t\"+1 Bartali Sailboat: Old Sail\": {\"Bartali Sailboat: Old Sail\": 1, \"Black Stone (Armor)\": 1},\n\n\t\"+10 Epheria: Old Prow\": {\"+9 Epheria: Old Prow\": 1, \"Verdant Black Stone\": 10},\n\t\"+9 Epheria: Old Prow\": {\"+8 Epheria: Old Prow\": 1, \"Verdant Black Stone\": 9},\n\t\"+8 Epheria: Old Prow\": {\"+7 Epheria: Old Prow\": 1, \"Verdant Black Stone\": 8},\n\t\"+7 Epheria: Old Prow\": {\"+6 Epheria: Old Prow\": 1, \"Verdant Black Stone\": 7},\n\t\"+6 Epheria: Old Prow\": {\"+5 Epheria: Old Prow\": 1, \"Verdant Black Stone\": 6},\n\t\"+5 Epheria: Old Prow\": {\"+4 Epheria: Old Prow\": 1, \"Verdant Black Stone\": 5},\n\t\"+4 Epheria: Old Prow\": {\"+3 Epheria: Old Prow\": 1, \"Verdant Black Stone\": 4},\n\t\"+3 Epheria: Old Prow\": {\"+2 Epheria: Old Prow\": 1, \"Verdant Black Stone\": 3},\n\t\"+2 Epheria: Old Prow\": {\"+1 Epheria: Old Prow\": 1, \"Verdant Black Stone\": 2},\n\t\"+1 Epheria: Old Prow\": {\"Epheria: Old Prow\": 1, \"Verdant Black Stone\": 1},\n\t\"+10 Epheria: Old Plating\": {\"+9 Epheria: Old Plating\": 1, \"Verdant Black Stone\": 10},\n\t\"+9 Epheria: Old Plating\": {\"+8 Epheria: Old Plating\": 1, \"Verdant Black Stone\": 9},\n\t\"+8 Epheria: Old Plating\": {\"+7 Epheria: Old Plating\": 1, \"Verdant Black Stone\": 8},\n\t\"+7 Epheria: Old Plating\": {\"+6 Epheria: Old Plating\": 1, \"Verdant Black Stone\": 7},\n\t\"+6 Epheria: Old Plating\": {\"+5 Epheria: Old Plating\": 1, \"Verdant Black Stone\": 6},\n\t\"+5 Epheria: Old Plating\": {\"+4 Epheria: Old Plating\": 1, \"Verdant Black Stone\": 5},\n\t\"+4 Epheria: Old Plating\": {\"+3 Epheria: Old Plating\": 1, \"Verdant Black Stone\": 4},\n\t\"+3 Epheria: Old Plating\": {\"+2 Epheria: Old Plating\": 1, \"Verdant Black Stone\": 3},\n\t\"+2 Epheria: Old Plating\": {\"+1 Epheria: Old Plating\": 1, \"Verdant Black Stone\": 2},\n\t\"+1 Epheria: Old Plating\": {\"Epheria: Old Plating\": 1, \"Verdant Black Stone\": 1},\n\t\"+10 Epheria: Old Cannon\": {\"+9 Epheria: Old Cannon\": 1, \"Verdant Black Stone\": 10},\n\t\"+9 Epheria: Old Cannon\": {\"+8 Epheria: Old Cannon\": 1, \"Verdant Black Stone\": 9},\n\t\"+8 Epheria: Old Cannon\": {\"+7 Epheria: Old Cannon\": 1, \"Verdant Black Stone\": 8},\n\t\"+7 Epheria: Old Cannon\": {\"+6 Epheria: Old Cannon\": 1, \"Verdant Black Stone\": 7},\n\t\"+6 Epheria: Old Cannon\": {\"+5 Epheria: Old Cannon\": 1, \"Verdant Black Stone\": 6},\n\t\"+5 Epheria: Old Cannon\": {\"+4 Epheria: Old Cannon\": 1, \"Verdant Black Stone\": 5},\n\t\"+4 Epheria: Old Cannon\": {\"+3 Epheria: Old Cannon\": 1, \"Verdant Black Stone\": 4},\n\t\"+3 Epheria: Old Cannon\": {\"+2 Epheria: Old Cannon\": 1, \"Verdant Black Stone\": 3},\n\t\"+2 Epheria: Old Cannon\": {\"+1 Epheria: Old Cannon\": 1, \"Verdant Black Stone\": 2},\n\t\"+1 Epheria: Old Cannon\": {\"Epheria: Old Cannon\": 1, \"Verdant Black Stone\": 1},\n\t\"+10 Epheria: Old Sail\": {\"+9 Epheria: Old Sail\": 1, \"Verdant Black Stone\": 10},\n\t\"+9 Epheria: Old Sail\": {\"+8 Epheria: Old Sail\": 1, \"Verdant Black Stone\": 9},\n\t\"+8 Epheria: Old Sail\": {\"+7 Epheria: Old Sail\": 1, \"Verdant Black Stone\": 8},\n\t\"+7 Epheria: Old Sail\": {\"+6 Epheria: Old Sail\": 1, \"Verdant Black Stone\": 7},\n\t\"+6 Epheria: Old Sail\": {\"+5 Epheria: Old Sail\": 1, \"Verdant Black Stone\": 6},\n\t\"+5 Epheria: Old Sail\": {\"+4 Epheria: Old Sail\": 1, \"Verdant Black Stone\": 5},\n\t\"+4 Epheria: Old Sail\": {\"+3 Epheria: Old Sail\": 1, \"Verdant Black Stone\": 4},\n\t\"+3 Epheria: Old Sail\": {\"+2 Epheria: Old Sail\": 1, \"Verdant Black Stone\": 3},\n\t\"+2 Epheria: Old Sail\": {\"+1 Epheria: Old Sail\": 1, \"Verdant Black Stone\": 2},\n\t\"+1 Epheria: Old Sail\": {\"Epheria: Old Sail\": 1, \"Verdant Black Stone\": 1},\n\n\t\"+10 Epheria Caravel: Brass Prow\": {\"+9 Epheria Caravel: Brass Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+9 Epheria Caravel: Brass Prow\": {\"+8 Epheria Caravel: Brass Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+8 Epheria Caravel: Brass Prow\": {\"+7 Epheria Caravel: Brass Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+7 Epheria Caravel: Brass Prow\": {\"+6 Epheria Caravel: Brass Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+6 Epheria Caravel: Brass Prow\": {\"+5 Epheria Caravel: Brass Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+5 Epheria Caravel: Brass Prow\": {\"+4 Epheria Caravel: Brass Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+4 Epheria Caravel: Brass Prow\": {\"+3 Epheria Caravel: Brass Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+3 Epheria Caravel: Brass Prow\": {\"+2 Epheria Caravel: Brass Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+2 Epheria Caravel: Brass Prow\": {\"+1 Epheria Caravel: Brass Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+1 Epheria Caravel: Brass Prow\": {\"Epheria Caravel: Brass Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+10 Epheria Caravel: Upgraded Plating (Green)\": {\"+9 Epheria Caravel: Upgraded Plating (Green)\": 1, \"Frosted Black Stone\": 1},\n\t\"+9 Epheria Caravel: Upgraded Plating (Green)\": {\"+8 Epheria Caravel: Upgraded Plating (Green)\": 1, \"Frosted Black Stone\": 1},\n\t\"+8 Epheria Caravel: Upgraded Plating (Green)\": {\"+7 Epheria Caravel: Upgraded Plating (Green)\": 1, \"Frosted Black Stone\": 1},\n\t\"+7 E<NAME>: Upgraded Plating (Green)\": {\"+6 Epheria Caravel: Upgraded Plating (Green)\": 1, \"Frosted Black Stone\": 1},\n\t\"+6 Epheria Caravel: Upgraded Plating (Green)\": {\"+5 Epheria Caravel: Upgraded Plating (Green)\": 1, \"Frosted Black Stone\": 1},\n\t\"+5 Epheria Caravel: Upgraded Plating (Green)\": {\"+4 Epheria Caravel: Upgraded Plating (Green)\": 1, \"Fiery Black Stone\": 1},\n\t\"+4 E<NAME>: Upgraded Plating (Green)\": {\"+3 Epheria Caravel: Upgraded Plating (Green)\": 1, \"Fiery Black Stone\": 1},\n\t\"+3 E<NAME>: Upgraded Plating (Green)\": {\"+2 Epheria Caravel: Upgraded Plating (Green)\": 1, \"Fiery Black Stone\": 1},\n\t\"+2 E<NAME>: Upgraded Plating (Green)\": {\"+1 Epheria Caravel: Upgraded Plating (Green)\": 1, \"Fiery Black Stone\": 1},\n\t\"+1 <NAME>: Upgraded Plating (Green)\": {\"Epheria Caravel: Upgraded Plating (Green)\": 1, \"Fiery Black Stone\": 1},\n\t\"+10 <NAME>: Verisha Cannon\": {\"+9 Epheria Caravel: Verisha Cannon\": 1, \"Frosted Black Stone\": 1},\n\t\"+9 E<NAME>: Verisha Cannon\": {\"+8 Epheria Caravel: Verisha Cannon\": 1, \"Frosted Black Stone\": 1},\n\t\"+8 E<NAME>: Verisha Cannon\": {\"+7 Epheria Caravel: Verisha Cannon\": 1, \"Frosted Black Stone\": 1},\n\t\"+7 E<NAME>: Verisha Cannon\": {\"+6 Epheria Caravel: Verisha Cannon\": 1, \"Frosted Black Stone\": 1},\n\t\"+6 Epheria Caravel: Verisha Cannon\": {\"+5 Epheria Caravel: Verisha Cannon\": 1, \"Frosted Black Stone\": 1},\n\t\"+5 Epheria Caravel: <NAME>\": {\"+4 Epheria Caravel: Verisha Cannon\": 1, \"Fiery Black Stone\": 1},\n\t\"+4 E<NAME>: Ver<NAME>\": {\"+3 Epheria Caravel: Verisha Cannon\": 1, \"Fiery Black Stone\": 1},\n\t\"+3 Epheria Caravel: Ver<NAME>\": {\"+2 Epheria Caravel: Verisha Cannon\": 1, \"Fiery Black Stone\": 1},\n\t\"+2 E<NAME>: Ver<NAME>\": {\"+1 Epheria Caravel: Verisha Cannon\": 1, \"Fiery Black Stone\": 1},\n\t\"+1 Epher<NAME>: <NAME>\": {\"Epheria Caravel: Verisha Cannon\": 1, \"Fiery Black Stone\": 1},\n\t\"+10 Epheria Caravel: White Wind Sail\": {\"+9 Epheria Caravel: White Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+9 Epheria Caravel: White Wind Sail\": {\"+8 Epheria Caravel: White Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+8 Epheria Caravel: White Wind Sail\": {\"+7 Epheria Caravel: White Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+7 Epheria Caravel: White Wind Sail\": {\"+6 Epheria Caravel: White Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+6 Epheria Caravel: White Wind Sail\": {\"+5 Epheria Caravel: White Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+5 Epheria Caravel: White Wind Sail\": {\"+4 Epheria Caravel: White Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\t\"+4 Epheria Caravel: White Wind Sail\": {\"+3 Epheria Caravel: White Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\t\"+3 Epheria Caravel: White Wind Sail\": {\"+2 Epheria Caravel: White Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\t\"+2 Epheria Caravel: White Wind Sail\": {\"+1 Epheria Caravel: White Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\t\"+1 Epheria Caravel: White Wind Sail\": {\"Epheria Caravel: White Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\n\t\"+10 Epheria Galleass: White Horn Prow\": {\"+9 Epheria Galleass: White Horn Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+9 Epheria Galleass: White Horn Prow\": {\"+8 Epheria Galleass: White Horn Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+8 Epheria Galleass: White Horn Prow\": {\"+7 Epheria Galleass: White Horn Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+7 Epheria Galleass: White Horn Prow\": {\"+6 Epheria Galleass: White Horn Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+6 Epheria Galleass: White Horn Prow\": {\"+5 Epheria Galleass: White Horn Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+5 Epheria Galleass: White Horn Prow\": {\"+4 Epheria Galleass: White Horn Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+4 Epheria Galleass: White Horn Prow\": {\"+3 Epheria Galleass: White Horn Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+3 Epheria Galleass: White Horn Prow\": {\"+2 Epheria Galleass: White Horn Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+2 Epheria Galleass: White Horn Prow\": {\"+1 Epheria Galleass: White Horn Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+1 Epheria Galleass: White Horn Prow\": {\"Epheria Galleass: White Horn Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+10 Epheria Galleass: Upgraded Plating (Green)\": {\"+9 Epheria Galleass: Upgraded Plating (Green)\": 1, \"Frosted Black Stone\": 1},\n\t\"+9 Epheria Galleass: Upgraded Plating (Green)\": {\"+8 Epheria Galleass: Upgraded Plating (Green)\": 1, \"Frosted Black Stone\": 1},\n\t\"+8 Epheria Galleass: Upgraded Plating (Green)\": {\"+7 Epheria Galleass: Upgraded Plating (Green)\": 1, \"Frosted Black Stone\": 1},\n\t\"+7 Epheria Galleass: Upgraded Plating (Green)\": {\"+6 Epheria Galleass: Upgraded Plating (Green)\": 1, \"Frosted Black Stone\": 1},\n\t\"+6 Epheria Galleass: Upgraded Plating (Green)\": {\"+5 Epheria Galleass: Upgraded Plating (Green)\": 1, \"Frosted Black Stone\": 1},\n\t\"+5 Epheria Galleass: Upgraded Plating (Green)\": {\"+4 Epheria Galleass: Upgraded Plating (Green)\": 1, \"Fiery Black Stone\": 1},\n\t\"+4 Epheria Galleass: Upgraded Plating (Green)\": {\"+3 Epheria Galleass: Upgraded Plating (Green)\": 1, \"Fiery Black Stone\": 1},\n\t\"+3 Epheria Galleass: Upgraded Plating (Green)\": {\"+2 Epheria Galleass: Upgraded Plating (Green)\": 1, \"Fiery Black Stone\": 1},\n\t\"+2 Epheria Galleass: Upgraded Plating (Green)\": {\"+1 Epheria Galleass: Upgraded Plating (Green)\": 1, \"Fiery Black Stone\": 1},\n\t\"+1 Epheria Galleass: Upgraded Plating (Green)\": {\"Epheria Galleass: Upgraded Plating (Green)\": 1, \"Fiery Black Stone\": 1},\n\t\"+10 Epheria Galleass: Verisha Cannon\": {\"+9 Epheria Galleass: Verisha Cannon\": 1, \"Frosted Black Stone\": 1},\n\t\"+9 Epheria Galleass: Verisha Cannon\": {\"+8 Epheria Galleass: Verisha Cannon\": 1, \"Frosted Black Stone\": 1},\n\t\"+8 Epheria Galleass: Verisha Cannon\": {\"+7 Epheria Galleass: Verisha Cannon\": 1, \"Frosted Black Stone\": 1},\n\t\"+7 Epheria Galleass: Verisha Cannon\": {\"+6 Epheria Galleass: Verisha Cannon\": 1, \"Frosted Black Stone\": 1},\n\t\"+6 Epheria Galleass: Verisha Cannon\": {\"+5 Epheria Galleass: Verisha Cannon\": 1, \"Frosted Black Stone\": 1},\n\t\"+5 Epheria Galleass: Verisha Cannon\": {\"+4 Epheria Galleass: Verisha Cannon\": 1, \"Fiery Black Stone\": 1},\n\t\"+4 Epheria Galleass: Verisha Cannon\": {\"+3 Epheria Galleass: Verisha Cannon\": 1, \"Fiery Black Stone\": 1},\n\t\"+3 Epheria Galleass: Verisha Cannon\": {\"+2 Epheria Galleass: Verisha Cannon\": 1, \"Fiery Black Stone\": 1},\n\t\"+2 Epheria Galleass: Verisha Cannon\": {\"+1 Epheria Galleass: Verisha Cannon\": 1, \"Fiery Black Stone\": 1},\n\t\"+1 Epheria Galleass: Verisha Cannon\": {\"Epheria Galleass: Verisha Cannon\": 1, \"Fiery Black Stone\": 1},\n\t\"+10 Epheria Galleass: White Wind Sail\": {\"+9 Epheria Galleass: White Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+9 Epheria Galleass: White Wind Sail\": {\"+8 Epheria Galleass: White Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+8 Epheria Galleass: White Wind Sail\": {\"+7 Epheria Galleass: White Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+7 Epheria Galleass: White Wind Sail\": {\"+6 Epheria Galleass: White Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+6 Epheria Galleass: White Wind Sail\": {\"+5 Epheria Galleass: White Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+5 Epheria Galleass: White Wind Sail\": {\"+4 Epheria Galleass: White Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\t\"+4 Epheria Galleass: White Wind Sail\": {\"+3 Epheria Galleass: White Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\t\"+3 Epheria Galleass: White Wind Sail\": {\"+2 Epheria Galleass: White Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\t\"+2 Epheria Galleass: White Wind Sail\": {\"+1 Epheria Galleass: White Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\t\"+1 Epheria Galleass: White Wind Sail\": {\"Epheria Galleass: White Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\n\t\"+10 Epheria Caravel: Black Dragon Prow\": {\"+9 Epheria Caravel: Black Dragon Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+9 Epheria Caravel: Black Dragon Prow\": {\"+8 Epheria Caravel: Black Dragon Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+8 Epheria Caravel: Black Dragon Prow\": {\"+7 Epheria Caravel: Black Dragon Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+7 Epheria Caravel: Black Dragon Prow\": {\"+6 Epheria Caravel: Black Dragon Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+6 Epheria Caravel: Black Dragon Prow\": {\"+5 Epheria Caravel: Black Dragon Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+5 Epheria Caravel: Black Dragon Prow\": {\"+4 Epheria Caravel: Black Dragon Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+4 Epheria Caravel: Black Dragon Prow\": {\"+3 Epheria Caravel: Black Dragon Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+3 Epheria Caravel: Black Dragon Prow\": {\"+2 Epheria Caravel: Black Dragon Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+2 Epheria Caravel: Black Dragon Prow\": {\"+1 Epheria Caravel: Black Dragon Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+1 Epheria Caravel: Black Dragon Prow\": {\"Epheria Caravel: Black Dragon Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+10 Epheria Caravel: Upgraded Plating (Blue)\": {\"+9 Epheria Caravel: Upgraded Plating (Blue)\": 1, \"Frosted Black Stone\": 1},\n\t\"+9 Epheria Caravel: Upgraded Plating (Blue)\": {\"+8 Epheria Caravel: Upgraded Plating (Blue)\": 1, \"Frosted Black Stone\": 1},\n\t\"+8 Epheria Caravel: Upgraded Plating (Blue)\": {\"+7 Epheria Caravel: Upgraded Plating (Blue)\": 1, \"Frosted Black Stone\": 1},\n\t\"+7 Epheria Caravel: Upgraded Plating (Blue)\": {\"+6 Epheria Caravel: Upgraded Plating (Blue)\": 1, \"Frosted Black Stone\": 1},\n\t\"+6 Epheria Caravel: Upgraded Plating (Blue)\": {\"+5 Epheria Caravel: Upgraded Plating (Blue)\": 1, \"Frosted Black Stone\": 1},\n\t\"+5 Epheria Caravel: Upgraded Plating (Blue)\": {\"+4 Epheria Caravel: Upgraded Plating (Blue)\": 1, \"Fiery Black Stone\": 1},\n\t\"+4 Epheria Caravel: Upgraded Plating (Blue)\": {\"+3 Epheria Caravel: Upgraded Plating (Blue)\": 1, \"Fiery Black Stone\": 1},\n\t\"+3 Epheria Caravel: Upgraded Plating (Blue)\": {\"+2 Epheria Caravel: Upgraded Plating (Blue)\": 1, \"Fiery Black Stone\": 1},\n\t\"+2 Epheria Caravel: Upgraded Plating (Blue)\": {\"+1 Epheria Caravel: Upgraded Plating (Blue)\": 1, \"Fiery Black Stone\": 1},\n\t\"+1 Epheria Caravel: Upgraded Plating (Blue)\": {\"Epheria Caravel: Upgraded Plating (Blue)\": 1, \"Fiery Black Stone\": 1},\n\t\"+10 Epheria Caravel: <NAME>\": {\"+9 Epheria Caravel: <NAME>\": 1, \"Frosted Black Stone\": 1},\n\t\"+9 Epheria Caravel: <NAME>\": {\"+8 Epheria Caravel: <NAME>\": 1, \"Frosted Black Stone\": 1},\n\t\"+8 Epheria Caravel: <NAME>\": {\"+7 Epheria Caravel: <NAME>\": 1, \"Frosted Black Stone\": 1},\n\t\"+7 Epheria Caravel: <NAME>\": {\"+6 Epheria Caravel: <NAME>\": 1, \"Frosted Black Stone\": 1},\n\t\"+6 Epheria Caravel: <NAME>\": {\"+5 Epheria Caravel: <NAME>\": 1, \"Frosted Black Stone\": 1},\n\t\"+5 Epheria Caravel: <NAME>\": {\"+4 Epheria Caravel: <NAME>\": 1, \"Fiery Black Stone\": 1},\n\t\"+4 Epheria Caravel: <NAME>\": {\"+3 Epheria Caravel: <NAME>\": 1, \"Fiery Black Stone\": 1},\n\t\"+3 Epheria Caravel: <NAME>\": {\"+2 Epheria Caravel: <NAME>\": 1, \"Fiery Black Stone\": 1},\n\t\"+2 Epheria Caravel: <NAME>\": {\"+1 Epheria Caravel: <NAME>\": 1, \"Fiery Black Stone\": 1},\n\t\"+1 Epheria Caravel: <NAME>\": {\"Epheria Caravel: <NAME>\": 1, \"Fiery Black Stone\": 1},\n\t\"+10 Epheria Caravel: Stratus Wind Sail\": {\"+9 Epheria Caravel: Stratus Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+9 Epheria Caravel: Stratus Wind Sail\": {\"+8 Epheria Caravel: Stratus Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+8 Epheria Caravel: Stratus Wind Sail\": {\"+7 Epheria Caravel: Stratus Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+7 Epheria Caravel: Stratus Wind Sail\": {\"+6 Epheria Caravel: Stratus Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+6 Epheria Caravel: Stratus Wind Sail\": {\"+5 Epheria Caravel: Stratus Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+5 Epheria Caravel: Stratus Wind Sail\": {\"+4 Epheria Caravel: Stratus Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\t\"+4 Epheria Caravel: Stratus Wind Sail\": {\"+3 Epheria Caravel: Stratus Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\t\"+3 Epheria Caravel: Stratus Wind Sail\": {\"+2 Epheria Caravel: Stratus Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\t\"+2 Epheria Caravel: Stratus Wind Sail\": {\"+1 Epheria Caravel: Stratus Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\t\"+1 Epheria Caravel: Stratus Wind Sail\": {\"Epheria Caravel: Stratus Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\n\t\"+10 Epheria Galleass: Black Dragon Prow\": {\"+9 Epheria Galleass: Black Dragon Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+9 Epheria Galleass: Black Dragon Prow\": {\"+8 Epheria Galleass: Black Dragon Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+8 Epheria Galleass: Black Dragon Prow\": {\"+7 Epheria Galleass: Black Dragon Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+7 Epheria Galleass: Black Dragon Prow\": {\"+6 Epheria Galleass: Black Dragon Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+6 Epheria Galleass: Black Dragon Prow\": {\"+5 Epheria Galleass: Black Dragon Prow\": 1, \"Frosted Black Stone\": 1},\n\t\"+5 Epheria Galleass: Black Dragon Prow\": {\"+4 Epheria Galleass: Black Dragon Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+4 Epheria Galleass: Black Dragon Prow\": {\"+3 Epheria Galleass: Black Dragon Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+3 Epheria Galleass: Black Dragon Prow\": {\"+2 Epheria Galleass: Black Dragon Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+2 Epheria Galleass: Black Dragon Prow\": {\"+1 Epheria Galleass: Black Dragon Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+1 Epheria Galleass: Black Dragon Prow\": {\"Epheria Galleass: Black Dragon Prow\": 1, \"Fiery Black Stone\": 1},\n\t\"+10 Epheria Galleass: Upgraded Plating (Blue)\": {\"+9 Epheria Galleass: Upgraded Plating (Blue)\": 1, \"Frosted Black Stone\": 1},\n\t\"+9 Epheria Galleass: Upgraded Plating (Blue)\": {\"+8 Epheria Galleass: Upgraded Plating (Blue)\": 1, \"Frosted Black Stone\": 1},\n\t\"+8 Epheria Galleass: Upgraded Plating (Blue)\": {\"+7 Epheria Galleass: Upgraded Plating (Blue)\": 1, \"Frosted Black Stone\": 1},\n\t\"+7 Epheria Galleass: Upgraded Plating (Blue)\": {\"+6 Epheria Galleass: Upgraded Plating (Blue)\": 1, \"Frosted Black Stone\": 1},\n\t\"+6 Epheria Galleass: Upgraded Plating (Blue)\": {\"+5 Epheria Galleass: Upgraded Plating (Blue)\": 1, \"Frosted Black Stone\": 1},\n\t\"+5 Epheria Galleass: Upgraded Plating (Blue)\": {\"+4 Epheria Galleass: Upgraded Plating (Blue)\": 1, \"Fiery Black Stone\": 1},\n\t\"+4 Epheria Galleass: Upgraded Plating (Blue)\": {\"+3 Epheria Galleass: Upgraded Plating (Blue)\": 1, \"Fiery Black Stone\": 1},\n\t\"+3 Epheria Galleass: Upgraded Plating (Blue)\": {\"+2 Epheria Galleass: Upgraded Plating (Blue)\": 1, \"Fiery Black Stone\": 1},\n\t\"+2 Epheria Galleass: Upgraded Plating (Blue)\": {\"+1 Epheria Galleass: Upgraded Plating (Blue)\": 1, \"Fiery Black Stone\": 1},\n\t\"+1 Epheria Galleass: Upgraded Plating (Blue)\": {\"Epheria Galleass: Upgraded Plating (Blue)\": 1, \"Fiery Black Stone\": 1},\n\t\"+10 Epheria Galleass: <NAME>annon\": {\"+9 Epheria Galleass: Mayna Cannon\": 1, \"Frosted Black Stone\": 1},\n\t\"+9 Epheria Galleass: Mayna Cannon\": {\"+8 Epheria Galleass: Mayna Cannon\": 1, \"Frosted Black Stone\": 1},\n\t\"+8 Epheria Galleass: Mayna Cannon\": {\"+7 Epheria Galleass: Mayna Cannon\": 1, \"Frosted Black Stone\": 1},\n\t\"+7 Epheria Galleass: Mayna Cannon\": {\"+6 Epheria Galleass: Mayna Cannon\": 1, \"Frosted Black Stone\": 1},\n\t\"+6 Epheria Galleass: Mayna Cannon\": {\"+5 Epheria Galleass: Mayna Cannon\": 1, \"Frosted Black Stone\": 1},\n\t\"+5 Epheria Galleass: Mayna Cannon\": {\"+4 Epheria Galleass: Mayna Cannon\": 1, \"Fiery Black Stone\": 1},\n\t\"+4 Epheria Galleass: Mayna Cannon\": {\"+3 Epheria Galleass: Mayna Cannon\": 1, \"Fiery Black Stone\": 1},\n\t\"+3 Epheria Galleass: <NAME>\": {\"+2 Epheria Galleass: Mayna Cannon\": 1, \"Fiery Black Stone\": 1},\n\t\"+2 Epheria Galleass: <NAME>\": {\"+1 Epheria Galleass: Mayna Cannon\": 1, \"Fiery Black Stone\": 1},\n\t\"+1 Epheria Galleass: Mayna Cannon\": {\"Epheria Galleass: Mayna Cannon\": 1, \"Fiery Black Stone\": 1},\n\t\"+10 Epheria Galleass: Stratus Wind Sail\": {\"+9 Epheria Galleass: Stratus Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+9 Epheria Galleass: Stratus Wind Sail\": {\"+8 Epheria Galleass: Stratus Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+8 Epheria Galleass: Stratus Wind Sail\": {\"+7 Epheria Galleass: Stratus Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+7 Epheria Galleass: Stratus Wind Sail\": {\"+6 Epheria Galleass: Stratus Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+6 Epheria Galleass: Stratus Wind Sail\": {\"+5 Epheria Galleass: Stratus Wind Sail\": 1, \"Frosted Black Stone\": 1},\n\t\"+5 Epheria Galleass: Stratus Wind Sail\": {\"+4 Epheria Galleass: Stratus Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\t\"+4 Epheria Galleass: Stratus Wind Sail\": {\"+3 Epheria Galleass: Stratus Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\t\"+3 Epheria Galleass: Stratus Wind Sail\": {\"+2 Epheria Galleass: Stratus Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\t\"+2 Epheria Galleass: Stratus Wind Sail\": {\"+1 Epheria Galleass: Stratus Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\t\"+1 Epheria Galleass: Stratus Wind Sail\": {\"Epheria Galleass: Stratus Wind Sail\": 1, \"Fiery Black Stone\": 1},\n\n}\n", "id": "5404240", "language": "Python", "matching_score": 7.173435211181641, "max_stars_count": 0, "path": "recipes.py" }, { "content": "# a dictionary of item notes\n\nitems = {\n\t\"Bartali Sailboat\": [\"Buy from Proix. 10 mil\"],\n\t\"Epheria Sailboat\": [\"Buy on CM or craft from Bartali Sailboat\", \"Improved Sailboat works for Caravel\"],\n\t\"Epheria Frigate\": [\"Buy on CM or craft from Bartali Sailboat\", \"Improved Frigate works for Galleass\"],\n\t\"Epheria Caravel\": [\"Craft from Epheria Sailboat\"],\n\t\"Epheria Galleass\": [\"Craft from Epheria Frigate\"],\n\t\"Ship Upgrade Permit: Epheria Sailboat\": [\"Buy from Falasi. 100 mil.\"],\n\t\"Ship Upgrade Permit: Epheria Frigate\": [\"Buy from Falasi. 200 mil.\"],\n\t\"Ship Upgrade Permit: Epheria Caravel\": [\"Buy from Falasi. 400 mil.\"],\n\t\"Ship Upgrade Permit: Epheria Galleass\": [\"Buy from Falasi. 600 mil.\"],\n\t\"Verdant Black Stone\": [\"100 total from Ravinia's quest rewards day 1&4\", \"5 per completion of the Great Sea MSQ\", \"Rare sea monster drop\"],\n\t\"Ultimate Armor Reform Stone\": [\"Buy from CM\"],\n\t\"Ultimate Weapon Reform Stone\": [\"Buy from CM\"],\n\t\"Black Stone (Armor)\": [\"Buy from CM\"],\n\n\t\"Bartali Sailboat: Old Prow\": [\"Buy from Falasi. 400k.\"],\n\t\"Bartali Sailboat: Old Plating\": [\"Buy from Falasi. 500k.\"],\n\t\"Bartali Sailboat: Old Cannon\": [\"Buy from Falasi. 400k.\"],\n\t\"Bartali Sailboat: Old Sail\": [\"Buy from Falasi. 300k.\"],\n\n\t\"Epheria: Old Prow\": [\"Buy from Falasi. 3.5 mil.\"],\n\t\"Epheria: Old Plating\": [\"Buy from Falasi. 4 mil.\"],\n\t\"Epheria: Old Cannon\": [\"Buy from Falasi. 3.5 mil.\"],\n\t\"Epheria: Old Sail\": [\"Buy from Falasi. 3 mil.\"],\n\n\t\"Epheria Caravel: Brass Prow\": [\"Buy from Falasi. 8 mil.\"],\n\t\"Epheria Caravel: Upgraded Plating (Green)\": [\"Buy from Falasi. 9 mil.\"],\n\t\"Epheria Caravel: Verisha Cannon\": [\"Buy from Falasi. 8 mil.\"],\n\t\"Epheria Caravel: White Wind Sail\": [\"Buy from Falasi. 7 mil.\"],\n\n\t\"Epheria Galleass: White Horn Prow\": [\"Buy from Falasi. 8 mil.\"],\n\t\"Epheria Galleass: Upgraded Plating (Green)\": [\"Buy from Falasi. 9 mil.\"],\n\t\"Epheria Galleass: Verisha Cannon\": [\"Buy from Falasi. 8 mil.\"],\n\t\"Epheria Galleass: White Wind Sail\": [\"Buy from Falasi. 7 mil.\"],\n\n\t\"Epheria Caravel: Black Dragon Prow\": [\"Make at Epheria Ship Parts Shop\"],\n\t\"Epheria Caravel: Upgraded Plating (Blue)\": [\"Make at Epheria Ship Parts Shop\"],\n\t\"Epheria Caravel: Mayna Cannon\": [\"Make at Epheria Ship Parts Shop\"],\n\t\"Epheria Caravel: Stratus Wind Sail\": [\"Make at Epheria Ship Parts Shop\"],\n\n\t\"Epheria Galleass: Black Dragon Prow\": [\"Make at Epheria Ship Parts Shop\"],\n\t\"Epheria Galleass: Upgraded Plating (Blue)\": [\"Make at Epheria Ship Parts Shop\"],\n\t\"Epheria Galleass: Mayna Cannon\": [\"Make at Epheria Ship Parts Shop\"],\n\t\"Epheria Galleass: Stratus Wind Sail\": [\"Make at Epheria Ship Parts Shop\"],\n\n\t\"Sea Monster's Ooze\": [\"Hungry Heraku or Hungry Ocean Stalker drop\", \"Recipe item for Graphite Ingot for Upgrade\", \"Recipe item for Timber for Upgrade\", \"Recipe item for Adhesive for Upgrade\"],\n\t\"Graphite Ingot for Upgrade\": [\"1x per Heating\", \"50 total from Ravinia's quest rewards day 1&4\"],\n\t\"Timber for Upgrade\": [\"1x per Chopping\", \"50 total from Ravinia's quest rewards day 1&4\"],\n\t\"Adhesive for Upgrade\": [\"1x per Heating\", \"50 total from Ravinia's quest rewards day 1&4\"],\n\n\t\"Zinc Ingot\": [\"Buy from CM or process\", \"Recipe item for Graphite Ingot for Upgrade\"],\n\t\"Old Tree Bark\": [\"Buy from CM or gather with workers\", \"Recipe item for Timber for Upgrade\"],\n\t\"Red Tree Lump\": [\"Buy from CM or gather with workers\", \"Recipe item for Timber for Upgrade\"],\n\t\"White Cedar Sap\": [\"Buy from CM or gather with workers\", \"Recipe item for Adhesive for Upgrade\"],\n\t\"Acacia Sap\": [\"Buy from CM or gather with workers\", \"Recipe item for Adhesive for Upgrade\"],\n\t\"Elder Tree Sap\": [\"Buy from CM or gather with workers\", \"Recipe item for Adhesive for Upgrade\"],\n\n\t\"Standardized Timber Square\": [\"Buy from CM or process\"],\n\t\"Steel\": [\"Buy from CM or process\"],\n\t\"Pine Plywood\": [\"Buy from CM or process\"],\n\t\"Flax Fabric\": [\"Buy from CM or process\"],\n\t\"Hard Pillar\": [\"Buy from CM or process\"],\n\t\"Jade Coral Ingot\": [\"Buy from CM or process\"],\n\t\"Pine Coated Plywood\": [\"Buy from CM or process\"],\n\t\"Enhanced Flax Fabric\": [\"Buy from CM or process\"],\n\n#\t\"Khan's Concentrated Magic\": [],\n\t\"Luminous Cobalt Ingot\": [\"Hekaru\", \"Ocean Stalker\", \"Young Nineshark\", \"Young Candidum\", \"Young Black Rust\"],\n\t\"Bright Reef Piece\": [\"8x per [Daily] Ravikel's Test\", \"Hekaru\", \"Ocean Stalker\", \"Young Nineshark\", \"Young Candidum\", \"Young Black Rust\"],\n\t\"Great Ocean Dark Iron\": [\"Hekaru\", \"Ocean Stalker\", \"Young Nineshark\", \"Young Candidum\", \"Young Black Rust\"],\n\t\"Cobalt Ingot\": [\"Young Hekaru\"],\n\t\"Brilliant Rock Salt Ingot\": [\"Black Rust\", \"Candidum\"],\n\t\"Seaweed Stalk\": [\"4x per [Daily] Precious Coral Piece\", \"Suspicious Cargo Ship\"],\n\t\"Enhanced Island Tree Coated Plywood\": [\"10x per [Daily] For the Serendian Soldiers\", \"Hekaru\", \"Ocean Stalker\", \"Young Nineshark\", \"Young Candidum\", \"Young Black Rust\"],\n\t\"Pure Pearl Crystal\": [\"2x per [Daily] Ravikel's Test\", \"Hekaru\", \"Ocean Stalker\", \"Young Nineshark\", \"Young Candidum\", \"Young Black Rust\"],\n#\t\"Cox Pirates' Artifact (Parley Beginner)\": [],\n\t\"Cox Pirates' Artifact (Parley Expert)\": [\"1x per [Daily] For the Serendian Soldiers\"],\n\t\"Cox Pirates' Artifact (Combat)\": [\"3x per [Daily] Do You Have What it Takes?\", \"Cox Pirates' Shadow Ghost drop\", \"Cox Pirate Extermination Seal X200\"],\n\t\"Deep Sea Memory Filled Glue\": [\"8x per [Daily] Ravikel's Test\", \"Young Ocean Stalker\"],\n\t\"Brilliant Pearl Shard\": [\"Candidum\", \"Nineshark\"],\n\t\"Ruddy Manganese Nodule\": [\"2x per [Daily] For the Young Otter Merchants\", \"Suspicious Cargo Ship\"],\n\t\"Tear of the Ocean\": [\"1x per [Daily] OMG's Black Rust Hunter\", \"0.5x per [Daily] OMG's Young Black Rust Hunter\"],\n#\t\"Khan's Tendon\": [],\n#\t\"Khan's Scale\": [],\n#\t\"Frosted Black Stone\": [],\n\t\"Tide-Dyed Standardized Timber Square\": [\"3x per [Daily] Our Guild is not a Charity Group\", \"Cox Pirates' Shadow Ghost drop\"],\n\t\"Deep Tide-Dyed Standardized Timber Square\": [\"3x per [Daily] Win-win Situation\", \"Cox Pirates' Shadow Ghost\"],\n\t\"Moon Vein Flax Fabric\": [\"3x per [Daily] OMG's Nineshark Hunter\", \"1x per [Daily] OMG's Young Nineshark Hunter\", \"10x from drying Khan's Tendon\"],\n\t\"Moon Scale Plywood\": [\"10x per [Daily] OMG's Candidum Hunter\", \"5x per [Daily] OMG's Young Candidum Hunter\", \"10x from drying Khan's Scale\"],\n#\t\"Fiery Black Stone\": [],\n#\t\"Mandragora Essence\": [],\n}", "id": "946597", "language": "Python", "matching_score": 9.26621150970459, "max_stars_count": 5, "path": "vendor_items.py" }, { "content": "coins = {\n\t\"Khan's Concentrated Magic\": 80000,\n\t\"Luminous Cobalt Ingot\": 800,\n\t\"Bright Reef Piece\": 140,\n\t\"Great Ocean Dark Iron\": 160,\n\t\"Cobalt Ingot\": 150,\n\t\"Brilliant Rock Salt Ingot\": 1600,\n\t\"Seaweed Stalk\": 600,\n\t\"Enhanced Island Tree Coated Plywood\": 80,\n\t\"Pure Pearl Crystal\": 550,\n\t\"Cox Pirates' Artifact (Parley Beginner)\": 150,\n\t\"Cox Pirates' Artifact (Parley Expert)\": 800,\n\t\"Cox Pirates' Artifact (Combat)\": 800,\n\t\"Deep Sea Memory Filled Glue\": 140,\n\t\"Brilliant Pearl Shard\": 1600,\n\t\"Ruddy Manganese Nodule\": 1500,\n\t\"Tear of the Ocean\": 3900,\n\t\"Khan's Tendon\": 6000,\n\t\"Khan's Scale\": 1600,\n\t\"Frosted Black Stone\": 20,\n\t\"Tide-Dyed Standardized Timber Square\": 350,\n\t\"Deep Tide-Dyed Standardized Timber Square\": 1000,\n\t\"Moon Vein Flax Fabric\": 600,\n\t\"Moon Scale Plywood\": 160,\n\t\"Fiery Black Stone\": 10,\n\t\"Mandragora Essence\": 100,\n}", "id": "255477", "language": "Python", "matching_score": 2.7985732555389404, "max_stars_count": 5, "path": "sea_coins.py" } ]
3.235278
hirogithu
[ { "content": "import logging\n\n\ndef get_logger(name, level=logging.DEBUG, save_name=\"../test.log\"):\n # create logger\n logger = logging.getLogger(name)\n logger.setLevel(level)\n\n # create console handler and set level to debug\n sh = logging.StreamHandler()\n fh = logging.FileHandler(save_name)\n sh.setLevel(logging.INFO)\n fh.setLevel(logging.DEBUG)\n\n # create formatter\n formatter = logging.Formatter('[%(asctime)s] - [%(name)s] - [%(levelname)s] - %(message)s')\n\n # add formatter to handler\n sh.setFormatter(formatter)\n fh.setFormatter(formatter)\n\n # add handler to logger\n logger.addHandler(sh)\n logger.addHandler(fh)\n return logger", "id": "7278753", "language": "Python", "matching_score": 0.8104314208030701, "max_stars_count": 0, "path": "src/get_logger.py" }, { "content": "import os\nimport time\nimport pandas as pd\nimport urllib.error\nimport urllib.request\n\ndownload_dir = \"../data/100rt/\"\ndownload_csv = \"../https_list_100rt.csv\"\n\ndef get_urls():\n df = pd.read_csv(download_csv, index_col=0)\n return df.values.tolist()\n\ndef download_file(url, savename):\n\n #DL済み\n if os.path.isfile(savename):\n print(\"skip\")\n return\n\n try:\n with urllib.request.urlopen(url) as web_file, open(savename, \"wb\") as local_file:\n local_file.write(web_file.read())\n except urllib.error.URLError as e:\n print(e)\n\ndef download_images(urls):\n for i, url in enumerate(urls):\n print(i, url[0])\n download_file(url[0], download_dir + \"sailor_{:0=4}.png\".format(i))\n time.sleep(1)\n\ndef main():\n urls = get_urls()\n download_images(urls)\n\nif __name__ == \"__main__\":\n main()", "id": "10344446", "language": "Python", "matching_score": 1.7292397022247314, "max_stars_count": 0, "path": "src/download_image.py" }, { "content": "import os\nimport itertools\nimport tweepy\nimport config\nimport get_logger\nimport pandas as pd\nfrom get_args import args\nlogger = get_logger.get_logger(\"hellotweepy\")\n\ndef get_auth():\n\n auth = tweepy.OAuthHandler(config.consumer_key, config.consumer_secret)\n\n auth.set_access_token(config.access_token, config.access_token_secret)\n\n return auth\n\ndef init_list(filename):\n \"\"\"取得済みのツイート(画像)を無視するために,取得済みのURLを初期値にセット\n \"\"\"\n https_list = []\n if os.path.isfile(filename):\n df = pd.read_csv(filename, index_col=0)\n ls = df.values.tolist()\n return list(itertools.chain.from_iterable(ls))\n else:\n return https_list\n\ndef get_https_list(api, q, min_rt, max_rt, max_count_page, filename):\n\n def have_media(tweet):\n if \"media\" in tweet.entities:\n return True\n return False\n\n def is_already_load(media, https_list):\n if media[\"media_url_https\"] in https_list:\n return True\n return False\n\n logger.info(\"Sart Search\")\n\n https_list = init_list(filename)\n logger.debug(\"init_list:{}\".format(https_list))\n count_https = 0\n\n for tweet in tweepy.Cursor(api.search, q=q, count=max_count_page).items():\n\n if have_media(tweet):\n\n if tweet.retweet_count >= min_rt and tweet.retweet_count <= max_rt:\n# logger.debug(\"[Entity]:{}\".format(tweet.entities))\n# logger.debug(\"[User] :{}\".format(tweet.user.name))\n logger.debug(\"[RT] :{}\".format(tweet.retweet_count))\n\n for media in tweet.entities[\"media\"]:\n\n if is_already_load(media, https_list):\n continue\n\n https_list.append(media[\"media_url_https\"])\n count_https += 1\n\n if count_https % 10 == 0:\n save_csv(https_list, filename)\n\n return https_list\n\ndef save_csv(data, save_file_name):\n\n df = pd.DataFrame(data)\n df.to_csv(save_file_name, mode=\"w\")\n logger.debug(\"save csv:{}\".format(df.shape))\n\ndef get_img(filename):\n \n df = pd.read_csv(filename, index_col=0)\n logger.debug(\"load csv:{}\".format(df.shape))\n\ndef main():\n\n logger.info(\"start\")\n\n auth = get_auth()\n\n api = tweepy.API(auth, retry_count=2, retry_delay=10, timeout=60, wait_on_rate_limit_notify=True, wait_on_rate_limit=True)\n\n https = get_https_list(api, args.q, args.min_rt, args.max_rt, args.max_count_page, args.f)\n\n get_img(args.f)\n\n logger.info(\"end\")\n\nif __name__ == \"__main__\":\n\n main()\n\n", "id": "5124628", "language": "Python", "matching_score": 2.664201259613037, "max_stars_count": 0, "path": "src/hello_tweepy.py" }, { "content": "import argparse\n\nparse = argparse.ArgumentParser()\nparse.add_argument(\"-q\", default=\"#sailormoonredraw\")\nparse.add_argument(\"-f\", default=\"../https_list.csv\")\nparse.add_argument(\"-r\", \"--min_rt\", type=int, default=1000)\nparse.add_argument(\"--max_rt\", type=int, default=10000)\nparse.add_argument(\"-p\", \"--max_count_page\", type=int, default=3)\n\nargs = parse.parse_args()\n", "id": "4406380", "language": "Python", "matching_score": 2.251749038696289, "max_stars_count": 0, "path": "src/get_args.py" } ]
1.990494
damjankrstajic
[ { "content": "#!/usr/bin/env python3\n\n# Author: <NAME>\n\nimport argparse\nimport saspy\nimport os.path\nimport sys\n\n# Usage:\n# ./run_sas.py -s example_1.sas \n# ./run_sas.py -s example_1.sas -l out1.log -o out1.lst\n# ./run_sas.py -s example_1.sas -r TEXT\n# ./run_sas.py -s example_1.sas -r HTML \n# ./run_sas.py -s example_1.sas -r htMl -l out2.log -o out2.html \n# ./run_sas.py -s example_1.sas -r teXt -l out3.log -o out3.lst \n# ./run_sas.py -s /home/a/b/c/example_1.sas \n# ./run_sas.py -s example_1.sas -r text -l out4.log -o out4.lst -c ssh\n \n\ndef main():\n parser = argparse.ArgumentParser(description=\"It executes SAS code using saspy.\")\n parser.add_argument('-s', '--sas_fname',help='Name of the SAS file to be executed.')\n parser.add_argument('-l', '--log_fname', help='Name of the output LOG file name. If not specified then it is the same as the sas_fname with .sas removed and .log added.')\n parser.add_argument('-o', '--lst_fname', help='Name of the output LST file. If not specified then it is the same as the sas_fname with .sas removed and .lst/.html added depending on the results format.')\n parser.add_argument('-r', '--results_format', help='Results format for sas_session.submit(). It may be either TEXT or HTML. If not specified it is TEXT by default. It is case incesensitive.')\n parser.add_argument('-c', '--cfgname', help='Name of the Configuration Definition to use for the SASsession. If not specified then just saspy.SASsession() is executed.')\n options = parser.parse_args()\n\n if options.sas_fname is None:\n parser.print_help()\n sys.exit(0)\n elif(not os.path.isfile(options.sas_fname)):\n print(\"\\nSAS file does not exist\\n\")\n sys.exit(0)\n\n sas_fname = options.sas_fname\n\n if options.log_fname is None:\n log_fname = os.path.splitext(sas_fname)[0] + \".log\"\n print(\"log_fname is \" + log_fname )\n else:\n log_fname = options.log_fname\n\n if options.results_format is None:\n results_format = 'TEXT' \n elif options.results_format.upper() in ('HTML','TEXT'):\n results_format = options.results_format\n else:\n parser.print_help()\n sys.exit(0)\n\n if options.lst_fname is None:\n if results_format == 'HTML':\n lst_fname = os.path.splitext(sas_fname)[0] + \".html\"\n else:\n lst_fname = os.path.splitext(sas_fname)[0] + \".lst\"\n print(\"lst_fname is \" + lst_fname )\n else:\n lst_fname = options.lst_fname\n\n sas_file = open(sas_fname,mode='r')\n sas_code_txt = sas_file.read()\n sas_file.close()\n\n if options.cfgname is None:\n sas_session = saspy.SASsession() \n else:\n sas_session = saspy.SASsession(cfgname=options.cfgname)\n\n c = sas_session.submit(sas_code_txt,results=results_format)\n\n with open(log_fname, 'w') as f1:\n f1.write(c[\"LOG\"])\n\n with open(lst_fname, 'w') as f2:\n f2.write(c[\"LST\"])\n\n sas_session.endsas()\n\nif __name__ == '__main__':\n main()\n\n", "id": "285085", "language": "Python", "matching_score": 0, "max_stars_count": 317, "path": "saspy/scripts/run_sas.py" } ]
0
PeiJueChen
[ { "content": "from scrapy import cmdline\n\n# 或者直接在當前目錄下: scrapy crawl spiderName\ncmdline.execute('scrapy crawl template'.split())", "id": "7642043", "language": "Python", "matching_score": 0.9960960149765015, "max_stars_count": 0, "path": "pythonScrapyTemplate/main.py" }, { "content": "import scrapy\n\n\nclass TemplateSpider(scrapy.Spider):\n name = 'template'\n allowed_domains = ['xxxx.com']\n start_urls = ['https://www.baidu.com/']\n\n # def start_requests(self):\n # pass\n\n def parse(self, response):\n print('response:',response.text)\n pass\n", "id": "6256647", "language": "Python", "matching_score": 0.8679538369178772, "max_stars_count": 0, "path": "pythonScrapyTemplate/spiders/template.py" }, { "content": "import pymongo\n\n\nclass Handle_mongo(object):\n\n\n __data_collection_name = 'collection_xxxx_item'\n __db_name = 'db_xxxx'\n def __init__(self):\n super().__init__()\n myclient = pymongo.MongoClient('mongodb://127.0.0.1:27017')\n self.db = myclient[self.__db_name]\n\n @property\n def collection(self):\n return self.db[self.__data_collection_name]\n\n def save_item(self, item):\n if not item:\n print('item is empty')\n return\n item = dict(item)\n if (type(item) is not dict):\n print('item is not dict')\n return\n\n xxxxId = item['xxxxId']\n if not xxxxId:\n print(\"missing xxxxId\",item)\n return\n # 根据id 去更新\n self.collection.update({'xxxxId': xxxxId}, item, True)\n\n def get_item(self):\n return self.collection.find({})\n\nmongo = Handle_mongo()", "id": "1766151", "language": "Python", "matching_score": 0.034003693610429764, "max_stars_count": 0, "path": "pythonScrapyTemplate/mongo/handle_mongo.py" }, { "content": "import os\n\nclass File(object):\n def writeFile(self, content, name=\"1.html\"):\n path = os.path.join(os.path.dirname(\n os.path.dirname(__file__)), 'file-contents')\n self.create_dir(path)\n fullPath = os.path.join(path, name)\n f = None\n try:\n f = open(fullPath, 'w', encoding=\"utf-8\")\n f.write(content)\n f.flush()\n finally:\n if f:\n f.close()\n\n def create_dir(self, dir_path):\n # if not os.path.exists(dir_path): os.mkdir(dir_path)\n # 可以创建多层\n if not self.isExist(dir_path):\n os.makedirs(dir_path)\n\n def isExist(self, dir_path):\n return os.path.exists(dir_path)\n # os.path.join(os.path.dirname(os.path.dirname(__file__)), 'images')\n\n def getCurrentFilePath():\n return os.path.dirname(os.path.dirname(__file__))\n\n\nfileTool = File()\n\n# fs = open('static/1.html', 'w', encoding=\"utf-8\")\n# fs.write(\"content\")\n# fs.close()\n", "id": "3467548", "language": "Python", "matching_score": 1.6107336282730103, "max_stars_count": 0, "path": "pythonScrapyTemplate/tool/file.py" }, { "content": "# MediaTool class\n\nimport subprocess\nimport time\nimport os\n\nclass MediaTool(object):\n\n __outputName = 'outputFile.mp4'\n\n def __init__(self) -> None:\n super().__init__()\n\n def setOutputName(self, name) -> None:\n self.__outputName = name\n\n def concatenate(self, sourcePath, outputName='outputFile.mp4', outputPath='vidoes'):\n \"\"\"\n sourcePath 为待拼接的视频的保存地址\n outputName 为拼接后视频的名称\n outputPath 为拼接后视频保存的地址\n \"\"\"\n\n tempFileTxt = 'video_path_list_temp.txt'\n filesExtensions = [\".flv\", \".mkv\", \".mp4\"]\n if not outputName or len(outputName) == 0:\n outputName = self.__outputName\n\n with open(tempFileTxt, 'w') as f:\n for root, dirs, files in os.walk(sourcePath):\n # 根據名字排序\n soredFiles = sorted(files)\n for file in soredFiles:\n if os.path.splitext(file)[1] in filesExtensions:\n v_path = os.path.join(root, file)\n f.write(\"file '{0}'\\n\".format(v_path))\n\n if os.path.exists(tempFileTxt):\n if not os.path.exists(outputPath):\n os.makedirs(outputPath)\n try:\n print(\"begin merge...\")\n path_name = os.path.join(outputPath, outputName)\n ffmpeg_command = r\"ffmpeg -f concat -safe 0 -i {0} -c copy {1}\".format(\n tempFileTxt, path_name)\n subprocess.call(ffmpeg_command, shell=True)\n # 删除temp文件\n os.remove(tempFileTxt)\n print(\"end merge...\")\n except Exception as e:\n print(\"merge e:\", e)\n\n", "id": "3047092", "language": "Python", "matching_score": 0.4589759409427643, "max_stars_count": 0, "path": "pythonScrapyTemplate/tool/media.py" }, { "content": "# DownloadTask class\nimport random\nimport socket\nfrom time import sleep\nfrom urllib.request import urlretrieve\nimport os\nfrom multiprocessing import Queue\nimport threading\nimport requests\nfrom urllib.parse import quote\nimport string\nfrom contextlib import closing\n\n\nclass File(object):\n def writeFile(self, content, name=\"1.html\"):\n path = os.path.join(os.path.dirname(\n os.path.dirname(__file__)), 'file-contents')\n self.create_dir(path)\n fullPath = os.path.join(path, name)\n f = None\n try:\n f = open(fullPath, 'w', encoding=\"utf-8\")\n f.write(content)\n f.flush()\n finally:\n if f:\n f.close()\n\n def create_dir(self, dir_path):\n # if not os.path.exists(dir_path): os.mkdir(dir_path)\n # 可以创建多层\n if not self.isExist(dir_path):\n os.makedirs(dir_path)\n\n def isExist(self, dir_path):\n return os.path.exists(dir_path)\n\n def getCurrentFilePath():\n return os.path.dirname(os.path.dirname(__file__))\n\n\nfileTool = File()\n\n\nclass ProgressBar(object):\n\n def __init__(self, title,\n count=0.0,\n run_status=None,\n fin_status=None,\n total=100.0,\n unit='', sep='/',\n chunk_size=1.0):\n super(ProgressBar, self).__init__()\n self.info = \"【%s】%s %.2f %s %s %.2f %s\"\n self.title = title\n self.total = total\n self.count = count\n self.chunk_size = chunk_size\n self.status = run_status or \"\"\n self.fin_status = fin_status or \" \" * len(self.status)\n self.unit = unit\n self.seq = sep\n\n def __get_info(self):\n # 【名称】状态 进度 单位 分割线 总数 单位\n _info = self.info % (self.title, self.status,\n self.count/self.chunk_size, self.unit, self.seq, self.total/self.chunk_size, self.unit)\n return _info\n\n def refresh(self, count=1, status=None):\n self.count += count\n # if status is not None:\n self.status = status or self.status\n end_str = \"\\r\"\n if self.count >= self.total:\n end_str = '\\n'\n self.status = status or self.fin_status\n print(self.__get_info(), end=end_str)\n\n\nFLAG = 'queue_flag_const'\n\n\nclass Crawl_thread(threading.Thread):\n def __init__(self, thread_name, queues_, queue_flag, download_method) -> None:\n super(Crawl_thread, self).__init__()\n self.thread_name = thread_name\n self.queues_ = queues_\n self.queue_flag = queue_flag\n self.download_method = download_method\n\n def run(self) -> None:\n # return super().run()\n print('当前启动的处理任务为%s' % self.thread_name)\n while self.queue_flag[FLAG] == False:\n try:\n # 通过get方法,将里面的imageurlget出来,get为空的时候,抛异常\n urlObject = self.queues_.get(block=False)\n self.download_method(urlObject)\n # 可能停1秒\n sleep(random.randint(0, 1))\n except Exception as e:\n print(\"except...\", e)\n pass\n\n\nclass DownloadTask(object):\n def __init__(self) -> None:\n super().__init__()\n self.queue_flag = {FLAG: False}\n\n @property\n def agent(self):\n user_agent_list = [\n \"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1\",\n \"Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11\",\n \"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6\",\n \"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6\",\n \"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1\",\n \"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5\",\n \"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5\",\n \"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3\",\n \"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3\",\n \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/ 536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3\",\n \"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3\",\n \"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3\",\n \"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3\",\n \"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3\",\n \"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3\",\n \"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3\",\n \"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24\",\n \"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24\"\n ]\n # 随机取一个值\n agent = random.choice(user_agent_list)\n return agent\n\n def addAgent(self, agent):\n import urllib.request\n opener = urllib.request.build_opener()\n opener.addheaders = [('User-Agent', agent)]\n urllib.request.install_opener(opener)\n\n def reporthook(self, a, b, c):\n \"\"\"\n 显示下载进度\n :param a: 已经下载的数据块\n :param b: 数据块的大小\n :param c: 远程文件大小\n :return: None\n \"\"\"\n try:\n print(\"\\rdownloading: %5.1f%%\" % (a * b * 100.0 / c), end=\"done\")\n except:\n pass\n\n def downloadFile(self, urlObject):\n \"\"\"\n urlObject: {\n url: xxx, (required)\n folderName:xxx, (optional)\n fileName:xxx (required)\n }\n \"\"\"\n if not urlObject:\n print('missing urlObject')\n return\n\n urlObject = dict(urlObject)\n folderName = urlObject.get(\"folderName\", \"\")\n\n url = urlObject.get('url', None)\n fileName = urlObject.get(\"fileName\", None)\n\n if not url or not fileName:\n print(\"missing url or filename\")\n return\n\n path_ = os.path.join(os.path.dirname(\n os.path.dirname(__file__)), 'download-files')\n itemPath = path_\n\n if folderName and len(folderName) > 0:\n itemPath = os.path.join(path_, folderName)\n\n fileTool.create_dir(itemPath)\n\n itemFullPath = os.path.join(itemPath, fileName)\n if fileTool.isExist(itemFullPath):\n print('fileName:%s is exist' % (fileName))\n return\n\n try:\n print('%s >>> Start Downloading...' % (fileName))\n # 因担心url 中有中文\n url = quote(url, safe=string.printable)\n\n # set timeout\n # socket.setdefaulttimeout(300)\n # self.addAgent(self.agent)\n # urlretrieve(url=url, filename=itemFullPath, reporthook=self.reporthook)\n\n # 单次请求最大值\n chunk_size = 1024\n # 300s\n timeout = 300\n with closing(requests.get(url, stream=True, timeout=timeout)) as response:\n # 内容体总大小\n content_size = int(response.headers['content-length'])\n progress = ProgressBar(fileName, total=content_size, unit=\"KB\", chunk_size=chunk_size, run_status=\"正在下载\", fin_status=\"下载完成\")\n with open(itemFullPath, \"wb\") as file:\n for data in response.iter_content(chunk_size=chunk_size):\n file.write(data)\n file.flush()\n progress.refresh(count=len(data))\n\n print('%s >>> End Downloading...' % (fileName))\n\n except Exception as e:\n print('download error:', e)\n pass\n\n def downloadItems(self, urlObjects):\n \"\"\"\n urlObjects : [{\n url: xxx, (required)\n folderName:xxx, (optional)\n fileName:xxx (required)\n }]\n \"\"\"\n if not urlObjects:\n print('missing urlObjects')\n return\n # urls = itemInfo[urlKey]\n if len(urlObjects) == 0:\n print('urlObjects the len is 0')\n return\n\n self.setupQueue(urlObjects)\n\n def setupQueue(self, urlObjects):\n # 开启队列\n task_queues = Queue()\n\n for url in urlObjects:\n task_queues.put(url)\n\n crawl_urls_list = [\"Task处理线程1号\", \"Task处理线程2号\", \"Task处理线程3号\"]\n\n urlsLength = len(urlObjects)\n if urlsLength < 3:\n crawl_urls_list.clear()\n for i in range(urlsLength):\n str = \"Task处理线程{0}号\".format(i+1)\n crawl_urls_list.append(str)\n\n url_thread_list = []\n for url_thread in crawl_urls_list:\n thread_ = Crawl_thread(\n url_thread, task_queues, self.queue_flag, self.downloadFile)\n # 启动线程\n thread_.start()\n url_thread_list.append(thread_)\n\n while not task_queues.empty():\n pass\n\n self.queue_flag[FLAG] = True\n\n # 结束页码处理线程\n for thread_join in url_thread_list:\n thread_join.join()\n print(thread_join.thread_name, ': 处理结束')\n\n\n# USE\n# urls = [{\n# \"url\": 'https://api.pjue.top/uploads/mdImages/1601994125093.png',\n# 'fileName': '1601994125092.png',\n# \"folderName\": 'pnpn'\n# }]\n# DownloadTask().downloadItems(urls)\n", "id": "7533578", "language": "Python", "matching_score": 3.980862617492676, "max_stars_count": 0, "path": "pythonScrapyTemplate/tool/download.py" }, { "content": "from selenium import webdriver\nfrom bs4 import BeautifulSoup\nfrom lxml import etree\nimport random\n\n# https://www.cnblogs.com/jxldjsn/p/7399263.html\n\n\nclass WebdriverChrome(object):\n\n __myB = None\n\n def __init__(self) -> None:\n super().__init__()\n\n @property\n def agent(self):\n user_agent_list = [\n \"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1\",\n \"Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11\",\n \"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6\",\n \"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6\",\n \"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1\",\n \"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5\",\n \"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5\",\n \"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3\",\n \"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3\",\n \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/ 536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3\",\n \"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3\",\n \"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3\",\n \"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3\",\n \"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3\",\n \"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3\",\n \"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3\",\n \"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24\",\n \"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24\"\n ]\n # 随机取一个值\n agent = random.choice(user_agent_list)\n return agent\n\n def getOptions(self, referer=\"\"):\n chrome_options = webdriver.ChromeOptions()\n # 使用headless无界面浏览器模式,即不需要打开浏览器\n chrome_options.add_argument('--headless') # 增加无界面选项\n chrome_options.add_argument('--disable-gpu') # 如果不加这个选项,有时定位会出现问题\n chrome_options.add_argument('--no-sandbox')\n chrome_options.add_argument(\n 'user-agent=\"{0}\"'.format(self.agent))\n if referer and len(referer) > 0:\n chrome_options.add_argument('Referer=\"{0}\"'.format(referer))\n\n return chrome_options\n\n @property\n def getMyDriver(self):\n return self.__myB\n\n def get(self, url, referer=\"\"):\n # 建立Chrome的驱动\n self.__myB = webdriver.Chrome(options=self.getOptions(referer))\n # 最大窗口\n self.__myB.maximize_window()\n # 隐式等待,动态查找元素\n self.__myB.implicitly_wait(10)\n\n self.__myB.get(url)\n\n return self.__myB\n\n # get element by xpath\n # a = b.find_element_by_xpath(\n # \"//div[@class='area']//div[@class='gohome l']//h1/a\")\n # name = a.get_attribute('innerHTML')\n # # href = a.get_attribute('href')\n\n # # 进入iframe内嵌网页\n # # b.switch_to.frame(\"playbox\")\n # b.switch_to.frame(b.find_elements_by_tag_name(\"iframe\")[0])\n\n # video = b.find_element_by_class_name('video')\n # url = self.getAttribute(video,'url')\n\n # html = b.page_source # 打印页面\n # soup = BeautifulSoup(html, \"html.parser\")\n # # 以标准格式输出\n # prettify = soup.prettify()\n\n # html = etree.HTML(prettify)\n # url = html.xpath(\"//*[@id='dplayer']//video/@src\")[0]\n\n def find_element_by_xpath(self, xpath):\n if not self.__myB:\n print('Please run get method')\n return\n return self.__myB.find_element_by_xpath(xpath)\n\n def switch_to_default(self):\n if not self.__myB:\n print('Please run get method')\n return\n self.__myB.switch_to.default_content()\n\n def switch_to_frame(self, element):\n if not self.__myB:\n print('Please run get method')\n return\n self.__myB.switch_to.frame(element)\n\n # innerHTML / href\n def getAttribute(self, element, attribute):\n return element.get_attribute(attribute)\n\n # can use xpath, 传入的html 最好经过美化\n def etreeHtml(self, html):\n # 打印解析内容str\n # t = etree.tostring(etree.HTML(html), encoding=\"utf-8\", pretty_print=True)\n return etree.HTML(html)\n\n # 美化, 建议使用etree\n def prettify(self):\n if not self.__myB:\n print('Please run get method')\n return\n html = self.HTML_PAGE(self.__myB)\n return BeautifulSoup(html, \"html.parser\").prettify()\n\n # page\n def HTML_PAGE(self):\n if not self.__myB:\n print('Please run get method')\n return\n return self.__myB.page_source\n\n\nmyWebdriver = WebdriverChrome()\n", "id": "10412761", "language": "Python", "matching_score": 3.4255518913269043, "max_stars_count": 0, "path": "pythonScrapyTemplate/tool/webdriverChrome.py" } ]
0.996096
yuejiaxiang
[ { "content": "import re\n\nfrom transformers import BertTokenizer,ElectraTokenizer\nfrom utils.extract_chinese_and_punct import ChineseAndPunctuationExtractor\n# from extract_chinese_and_punct import ChineseAndPunctuationExtractor\nfrom transformers import AutoTokenizer\n\nchineseandpunctuationextractor = ChineseAndPunctuationExtractor()\n# moren_tokenizer = BertTokenizer.from_pretrained(\"bert-large-uncased\",do_lower_case=True)\n# moren_tokenizer = BertTokenizer.from_pretrained('pred_ere/', do_lower_case=True)\n#moren_tokenizer = ElectraTokenizer.from_pretrained('/apdcephfs/common/disease_data/yunyandata/ft_local/pretrain_chinese/electra-large', do_lower_case=True)\n#moren_tokenizer = BertTokenizer.from_pretrained('/apdcephfs/common/disease_data/yunyandata/ft_local/pretrain_chinese/nezha', do_lower_case=True)\n#moren_tokenizer = BertTokenizer.from_pretrained('/apdcephfs/common/disease_data/yunyandata/ft_local/pretrain_chinese/chinese_wwm_ext_pytorch', do_lower_case=True)\ndef covert_to_tokens(text, tokenizer=None, return_orig_index=False, max_seq_length=300):\n if not tokenizer:\n tokenizer =moren_tokenizer\n sub_text = []\n buff = \"\"\n flag_en = False\n flag_digit = False\n\n def _is_delimiter(c):\n if c>='A' and c<=\"Z\":\n return False\n elif c>=\"a\" and c<=\"z\":\n return False\n elif c>=\"0\" and c<=\"9\":\n return False\n else:\n return True\n\n prev_is_delimiter = True\n for idx, c in enumerate(text):\n # if chineseandpunctuationextractor.is_chinese_or_punct(char):\n # if buff != \"\":\n # sub_text.append(buff)\n # buff = \"\"\n # sub_text.append(char)\n # # flag_en = False\n # # flag_digit = False\n # else:\n if _is_delimiter(c):\n prev_is_delimiter = True\n sub_text.append(c)\n else:\n if prev_is_delimiter:\n sub_text.append(c)\n else:\n sub_text[-1] += c\n prev_is_delimiter = False\n # if re.compile('\\d').match(char): # 数字\n # if buff != \"\" and flag_en:\n # sub_text.append(buff)\n # buff = \"\"\n # flag_en = False\n # flag_digit = True\n # buff += char\n # # elif char >='A' and char<='Z':\n # # flag_digit = True\n # # buff += char\n # # elif char >='a' and char<='z':\n # # flag_digit = True\n # # buff += char\n # else:\n # if buff != \"\" and flag_digit:\n # sub_text.append(buff)\n # buff = \"\"\n # flag_digit = False\n # flag_en = True\n # buff += char\n # if buff != \"\":\n # sub_text.append(buff)\n\n tok_to_orig_start_index = []\n tok_to_orig_end_index = []\n tokens = []\n text_tmp = ''\n for (i, token) in enumerate(sub_text):\n sub_tokens = tokenizer.tokenize(token) if token != ' ' else []\n text_tmp += token\n for sub_token in sub_tokens:\n tok_to_orig_start_index.append(len(text_tmp) - len(token))\n tok_to_orig_end_index.append(len(text_tmp) - 1)\n tokens.append(sub_token)\n if len(tokens) >= max_seq_length - 2:\n break\n else:\n continue\n break\n if return_orig_index:\n return tokens, tok_to_orig_start_index, tok_to_orig_end_index\n else:\n return tokens\n\n\ndef search_spo_index(tokens, subject_sub_tokens, object_sub_tokens):\n subject_start_index, object_start_index = -1, -1\n forbidden_index = None\n if len(subject_sub_tokens) > len(object_sub_tokens):\n for index in range(\n len(tokens) - len(subject_sub_tokens) + 1):\n if tokens[index:index + len(\n subject_sub_tokens)] == subject_sub_tokens:\n subject_start_index = index\n forbidden_index = index\n break\n\n for index in range(\n len(tokens) - len(object_sub_tokens) + 1):\n if tokens[index:index + len(\n object_sub_tokens)] == object_sub_tokens:\n if forbidden_index is None:\n object_start_index = index\n break\n # check if labeled already\n elif index < forbidden_index or index >= forbidden_index + len(\n subject_sub_tokens):\n object_start_index = index\n\n break\n\n else:\n for index in range(\n len(tokens) - len(object_sub_tokens) + 1):\n if tokens[index:index + len(\n object_sub_tokens)] == object_sub_tokens:\n object_start_index = index\n forbidden_index = index\n break\n\n for index in range(\n len(tokens) - len(subject_sub_tokens) + 1):\n if tokens[index:index + len(\n subject_sub_tokens)] == subject_sub_tokens:\n if forbidden_index is None:\n subject_start_index = index\n break\n elif index < forbidden_index or index >= forbidden_index + len(\n object_sub_tokens):\n subject_start_index = index\n break\n\n return subject_start_index, object_start_index\n\n\ndef search_first(pattern, sequence):\n \"\"\"从sequence中寻找子串pattern\n 如果找到,返回第一个下标;否则返回-1。\n \"\"\"\n n = len(pattern)\n for i in range(len(sequence)):\n if sequence[i:i + n] == pattern:\n return i\n return -1\n\n\ndef search_all(pattern, sequence):\n n = len(pattern)\n counts = 0\n starts = []\n for i in range(len(sequence)):\n if sequence[i:i+n] == pattern:\n counts += 1\n starts.append(i)\n return starts\n\n# def search_all(word, sentence):\n# count = 0\n# starts = []\n# index = sentence.find(word)\n# while index != -1:\n# starts.append(index)\n# count += 1\n# index = sentence.find(word, index + len(word))\n# return count, starts\n\nif __name__ == '__main__':\n text = 'ct患者1月余前无明显诱因下出现失语,只能发单音,有理解困难,右侧肢体活动不利,表现右上肢无法在床面移动,右下肢制动(石膏固定在位),无恶心呕吐,无神志不清,无发热,无肢体抽搐,无大小便失禁,遂由家属送至我院急诊,查“颅脑CT+肺部CT:右侧基底节区软化灶。心脏二尖瓣置换术后,左心明显增大,冠脉钙化,心包少量积液。左侧胸膜反应”,未予特殊处理,转至神经内科,予“盐酸川穹嗪针活血,依达拉奉针清除氧自由基,华法林(2.5mgqn自备)抗凝,瑞代营养支持,格列齐特控制血糖及对症支持治疗”等,上述症状有所好转。出院后在我科康复治疗,目前患者言语不利,吐词不清,右侧肢体活动不利,右上肢偶可见自主活动,右下肢未见自主活动,有咳嗽咳痰,无恶心呕吐,无发热,无胸痛心悸,无腹痛腹泻等不适'\n # text = 'Eg≈2.0 eV'\n # text='A precision of 1.00 (US = UH) means the hash function is 100% accurate (i.e., it produces a unique hash value for every distinct slice) whereas a precision of 1/US means that the hash function produces the same hash value for every slice leaving UH = 1.'\n # text='•SOC stocks decreased by 12.4% in Costa Rica and 0.13% in Nicaragua after establishment of coffee AFS.•SOC stocks increased in the top 10 cm of soil; greater reduction occurred at 20–40 cm.•Organic management caused a greater increase in 0–10 cm SOC but did not influence reduction at depth.•Shade type effects on SOC were smaller; no significant difference between shaded and unshaded coffee.•SOC stocks tend to converge on a level determined by site environment during establishment.'\n # tokenizer = AutoTokenizer.from_pretrained('/Users/zhangyunyan/Downloads/python库/pretrained/bert-base-uncased')\n tokenizer = AutoTokenizer.from_pretrained('/Users/zhangyunyan/Downloads/python库/pre-trained-chinese/MedBert-base')\n tokens, tok_s, tok_e = covert_to_tokens(text, tokenizer=tokenizer,return_orig_index=True)\n print('1')", "id": "4602860", "language": "Python", "matching_score": 5.812165260314941, "max_stars_count": 0, "path": "utils/data_utils.py" }, { "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (c) 2019 Baidu.com, Inc. All Rights Reserved\n#\n\"\"\"\nrequirements:\nAuthors: daisongtai(<EMAIL>)\nDate: 2019/5/29 6:38 PM\n\"\"\"\nfrom __future__ import print_function\n\nimport re\n\nfrom transformers import BertTokenizer,ElectraTokenizer\n\n\nmax_seq_length = 300\n# tokenizer = BertTokenizer.from_pretrained('bert-large-uncased', do_lower_case=True)\n# tokenizer = BertTokenizer.from_pretrained('pred_ere/', do_lower_case=True)\n# tokenizer = BertTokenizer.from_pretrained('/apdcephfs/common/disease_data/yunyandata/ft_local/pretrain_chinese/chinese_roberta_wwm_large_ext_pytorch', do_lower_case=True)\n#tokenizer = ElectraTokenizer.from_pretrained('/apdcephfs/common/disease_data/yunyandata/ft_local/pretrain_chinese/electra-large', do_lower_case=True)\n#tokenizer = BertTokenizer.from_pretrained('/apdcephfs/common/disease_data/yunyandata/ft_local/pretrain_chinese/chinese_wwm_ext_pytorch', do_lower_case=True)\n#tokenizer = BertTokenizer.from_pretrained('/apdcephfs/common/disease_data/yunyandata/ft_local/pretrain_chinese/nezha', do_lower_case=True)\n\nLHan = [\n [0x2E80, 0x2E99], # Han # So [26] CJK RADICAL REPEAT, CJK RADICAL RAP\n [0x2E9B, 0x2EF3\n ], # Han # So [89] CJK RADICAL CHOKE, CJK RADICAL C-SIMPLIFIED TURTLE\n [0x2F00, 0x2FD5], # Han # So [214] KANGXI RADICAL ONE, KANGXI RADICAL FLUTE\n 0x3005, # Han # Lm IDEOGRAPHIC ITERATION MARK\n 0x3007, # Han # Nl IDEOGRAPHIC NUMBER ZERO\n [0x3021,\n 0x3029], # Han # Nl [9] HANGZHOU NUMERAL ONE, HANGZHOU NUMERAL NINE\n [0x3038,\n 0x303A], # Han # Nl [3] HANGZHOU NUMERAL TEN, HANGZHOU NUMERAL THIRTY\n 0x303B, # Han # Lm VERTICAL IDEOGRAPHIC ITERATION MARK\n [\n 0x3400, 0x4DB5\n ], # Han # Lo [6582] CJK UNIFIED IDEOGRAPH-3400, CJK UNIFIED IDEOGRAPH-4DB5\n [\n 0x4E00, 0x9FC3\n ], # Han # Lo [20932] CJK UNIFIED IDEOGRAPH-4E00, CJK UNIFIED IDEOGRAPH-9FC3\n [\n 0xF900, 0xFA2D\n ], # Han # Lo [302] CJK COMPATIBILITY IDEOGRAPH-F900, CJK COMPATIBILITY IDEOGRAPH-FA2D\n [\n 0xFA30, 0xFA6A\n ], # Han # Lo [59] CJK COMPATIBILITY IDEOGRAPH-FA30, CJK COMPATIBILITY IDEOGRAPH-FA6A\n [\n 0xFA70, 0xFAD9\n ], # Han # Lo [106] CJK COMPATIBILITY IDEOGRAPH-FA70, CJK COMPATIBILITY IDEOGRAPH-FAD9\n [\n 0x20000, 0x2A6D6\n ], # Han # Lo [42711] CJK UNIFIED IDEOGRAPH-20000, CJK UNIFIED IDEOGRAPH-2A6D6\n [0x2F800, 0x2FA1D]\n] # Han # Lo [542] CJK COMPATIBILITY IDEOGRAPH-2F800, CJK COMPATIBILITY IDEOGRAPH-2FA1D\n\nCN_PUNCTS = [(0x3002, \"。\"), (0xFF1F, \"?\"), (0xFF01, \"!\"), (0xFF0C, \",\"),\n (0x3001, \"、\"), (0xFF1B, \";\"), (0xFF1A, \":\"), (0x300C, \"「\"),\n (0x300D, \"」\"), (0x300E, \"『\"), (0x300F, \"』\"), (0x2018, \"‘\"),\n (0x2019, \"’\"), (0x201C, \"“\"), (0x201D, \"”\"), (0xFF08, \"(\"),\n (0xFF09, \")\"), (0x3014, \"〔\"), (0x3015, \"〕\"), (0x3010, \"【\"),\n (0x3011, \"】\"), (0x2014, \"—\"), (0x2026, \"…\"), (0x2013, \"–\"),\n (0xFF0E, \".\"), (0x300A, \"《\"), (0x300B, \"》\"), (0x3008, \"〈\"),\n (0x2460, \"①\"), (0x2461, \"②\"), (0x2462, \"③\"), (0x2463, \"④\"),\n (0x2464, \"⑤\"), (0x2465, \"⑥\"), (0x2466, \"⑦\"), (0x2467, \"⑧\"), (0x2468, \"⑨\"), (0x2469, \"⑩\"),\n (0x3009, \"〉\"), (0x2015, \"―\"), (0xff0d, \"-\"), (0x0020, \" \"), (0xFF5E, \"~\")]\n# (0xFF5E, \"~\"),\n\nEN_PUNCTS = [[0x0021, 0x002F], [0x003A, 0x0040], [0x005B, 0x0060],\n [0x007B, 0x007E]]\n\n\nclass ChineseAndPunctuationExtractor(object):\n def __init__(self):\n self.chinese_re = self.build_re()\n\n def is_chinese_or_punct(self, c):\n if self.chinese_re.match(c):\n return True\n else:\n return False\n\n def build_re(self):\n L = []\n for i in LHan:\n if isinstance(i, list):\n f, t = i\n try:\n f = chr(f)\n t = chr(t)\n L.append('%s-%s' % (f, t))\n except:\n pass # A narrow python build, so can't use chars > 65535 without surrogate pairs!\n\n else:\n try:\n L.append(chr(i))\n except:\n pass\n for j, _ in CN_PUNCTS:\n try:\n L.append(chr(j))\n except:\n pass\n\n for k in EN_PUNCTS:\n f, t = k\n try:\n f = chr(f)\n t = chr(t)\n L.append('%s-%s' % (f, t))\n except:\n raise ValueError()\n pass # A narrow python build, so can't use chars > 65535 without surrogate pairs!\n\n RE = '[%s]' % ''.join(L)\n # print('RE:', RE.encode('utf-8'))\n return re.compile(RE, re.UNICODE)\n\n\nif __name__ == '__main__':\n extractor = ChineseAndPunctuationExtractor()\n # for c in \"韩邦庆(1856~1894)曾用名寄,字子云,别署太仙、大一山人、花也怜侬、三庆\":\n # if extractor.is_chinese_or_punct(c):\n # print(c, 'yes')\n # else:\n # print(c, \"no\")\n #\n # print(\"~\", extractor.is_chinese_or_punct(\"~\"))\n # print(\"~\", extractor.is_chinese_or_punct(\"~\"))\n # print(\"―\", extractor.is_chinese_or_punct(\"―\"))\n # print(\"-\", extractor.is_chinese_or_punct(\"-\"))\n\n text_raw = \"1%~2%ALL患者有Burkitt淋巴瘤的形态学及免疫学特征(即FAB形态学分类中的L3型),常伴有腹部或其余部位的包块性疾病,可以将之视为极晚期Burkitt淋巴瘤,对这类患儿应采用晚期Burkitt淋巴瘤的治疗方案。\"\n\n sub_text = []\n buff = \"\"\n flag_en = False\n flag_digit = False\n for char in text_raw:\n if extractor.is_chinese_or_punct(char):\n if buff != \"\":\n sub_text.append(buff)\n buff = \"\"\n sub_text.append(char)\n flag_en = False\n flag_digit = False\n else:\n if re.compile('\\d').match(char):\n if buff != \"\" and flag_en:\n sub_text.append(buff)\n buff = \"\"\n flag_en = False\n flag_digit = True\n buff += char\n else:\n if buff != \"\" and flag_digit:\n sub_text.append(buff)\n buff = \"\"\n flag_digit = False\n flag_en = True\n buff += char\n\n if buff != \"\":\n sub_text.append(buff)\n\n tok_to_orig_start_index = []\n tok_to_orig_end_index = []\n tokens = []\n text_tmp = ''\n for (i, token) in enumerate(sub_text):\n sub_tokens = tokenizer.tokenize(token) if token != ' ' else []\n text_tmp += token\n for sub_token in sub_tokens:\n tok_to_orig_start_index.append(len(text_tmp) - len(token))\n tok_to_orig_end_index.append(len(text_tmp) - 1)\n tokens.append(sub_token)\n if len(tokens) >= max_seq_length - 2:\n break\n else:\n continue\n break\n\n print(sub_text)\n print(tokens)\n", "id": "3350523", "language": "Python", "matching_score": 1.0681908130645752, "max_stars_count": 0, "path": "utils/extract_chinese_and_punct.py" }, { "content": "# -*- coding: UTF-8 -*-\n# @Time : 2021/2/1\n# @Author : <EMAIL>\n# Apache License\n# Copyright©2020-2021 <EMAIL> All Rights Reserved\nimport pandas as pd\nimport numpy as np\nimport json\n\ndef formated(data):\n data_reranked = {}\n for d in data:\n data_reranked[d[2]] = d\n eid = data_reranked['Quantity'][3]\n return eid, data_reranked\n\n\ndef get_ori_data():\n infile = '../ner_process/pre_tsv_format_concat/union3.tsv'\n data = pd.read_csv(infile, sep='\\t', header=0)\n all_datas = []\n doc_datas = {}\n tmp_datas = []\n doc_rank = []\n last_doc_id = ''\n for index, row in data.iterrows():\n docId = row['docId']\n annotSet = row['annotSet']\n annotType = row['annotType']\n startOffset = int(row['startOffset'])\n endOffset = int(row['endOffset'])\n annotId = row['annotId']\n text = row['text']\n if pd.isnull(row['other']):\n other = {}\n else:\n other = json.loads(row['other'])\n data = [docId, annotSet, annotType, startOffset,\n endOffset, annotId, text, other]\n\n if annotType == 'Quantity':\n if len(tmp_datas) != 0:\n eid, data_formated = formated(tmp_datas)\n doc_datas[(last_doc_id, eid)] = data_formated\n tmp_datas = []\n\n if docId not in doc_rank:\n if doc_datas != {}:\n all_datas.append(doc_datas)\n doc_datas = {}\n doc_rank.append(docId)\n\n tmp_datas.append(data)\n last_doc_id = docId\n\n if len(tmp_datas) != 0:\n eid, data_formated = formated(tmp_datas)\n doc_datas[(last_doc_id, eid)] = data_formated\n if doc_datas != {}:\n all_datas.append(doc_datas)\n doc_datas = {}\n return all_datas\n\n\ndef rerank_data(data):\n reranked_data = []\n\n for doc in data:\n id = 0\n k_rank = sorted(list(doc.keys()), key=lambda x: x[1])\n for k in k_rank:\n id += 1\n anno = doc[k]\n q = anno['Quantity']\n q[1] = id\n q[5] = 'T1-' + str(id)\n reranked_data.append(q)\n if 'MeasuredProperty' in anno and 'MeasuredEntity' not in anno:\n p = anno['MeasuredProperty']\n p[1] = id\n p[5] = 'T2-' + str(id)\n p[7]['HasQuantity'] = q[5]\n reranked_data.append(p)\n if 'MeasuredProperty' not in anno and 'MeasuredEntity' in anno:\n e = anno['MeasuredEntity']\n e[1] = id\n e[5] = 'T3-' + str(id)\n e[7]['HasQuantity'] = q[5]\n reranked_data.append(e)\n if 'MeasuredProperty' in anno and 'MeasuredEntity' in anno:\n p = anno['MeasuredProperty']\n p[1] = id\n p[5] = 'T2-' + str(id)\n p[7]['HasQuantity'] = q[5]\n reranked_data.append(p)\n e = anno['MeasuredEntity']\n e[1] = id\n e[5] = 'T3-' + str(id)\n e[7]['HasProperty'] = p[5]\n reranked_data.append(e)\n\n out_data = ['docId\\tannotSet\\tannotType\\tstartOffset\\tendOffset\\tannotId\\ttext\\tother']\n for d in reranked_data:\n d[7] = json.dumps(d[7])\n d[1] = str(d[1])\n d[3] = str(d[3])\n d[4] = str(d[4])\n newline = [d[0], d[1], d[2], d[3], d[4], d[5], d[6], d[7]]\n out_data.append('\\t'.join(newline))\n\n return out_data\n\n\nimport json\nimport math\ndef get_b(text, item, slice_idx, token):\n token = token.replace('10.0', '10')\n token = token.replace('11.0', '11')\n token = token.replace('12.0', '12')\n token = token.replace('13.0', '13')\n token = token.replace('14.0', '14')\n token = token.replace('15.0', '15')\n token = token.replace('16.0', '16')\n token = token.replace('17.0', '17')\n token = token.replace('18.0', '18')\n token = token.replace('19.0', '19')\n token = token.replace('1.0', '1')\n token = token.replace('2.0', '2')\n token = token.replace('3.0', '3')\n token = token.replace('4.0', '4')\n token = token.replace('5.0', '5')\n token = token.replace('6.0', '6')\n token = token.replace('7.0', '7')\n token = token.replace('8.0', '8')\n token = token.replace('9.0', '9')\n token = token.replace('0.0', '0')\n t1 = 0\n t2 = 1\n try:\n if len(token) > 0:\n if '-' in token:\n x = str(token).split('-')\n else:\n x = str(token).split('$')\n t1 = int(x[0])\n if len(x) > 1:\n t2 = int(x[1])\n except Exception as e:\n return -1\n\n if t1 >= len(slice_idx):\n return -1\n end = slice_idx[t1]\n for i in range(t2):\n b = text.find(item, end)\n end = b + len(item)\n return b\n\ndef isnan(thing):\n if type(thing) != float:\n return False\n return math.isnan(thing)\n\n\ndef build_quantity_dictionary(total_df):\n quantity_dictionary = {}\n time = 0\n for excel_line, d in total_df.iterrows():\n if time == 0:\n time = 1\n continue\n if not isnan(d[0]):\n para_id = d[0] if not isnan(d[0]) else ''\n sent_idx = eval(d[1]) if not isnan(d[1]) else ''\n sent_text = str(d[2]) if not isnan(d[2]) else ''\n slice_idx = json.loads(d[3]) if not isnan(d[3]) else ''\n quantity = str(d[5]) if not isnan(d[5]) else ''\n quantity_line = str(d[6]).strip() if not isnan(d[6]) else ''\n unit = str(d[7]).strip() if not isnan(d[7]) else ''\n mod = str(d[8]).strip().split(' ') if not isnan(d[8]) else ['']\n property = str(d[9]) if not isnan(d[9]) else ''\n property_line = str(d[10]).strip() if not isnan(d[10]) else ''\n entity = str(d[11]) if not isnan(d[11]) else ''\n entity_line = str(d[12]).strip() if not isnan(d[12]) else ''\n score = str(d[14]).strip() if not isnan(d[14]) else ''\n if score not in ['4']:\n continue\n quantity_Tid = ''\n property_Tid = ''\n begin_quantity_index = get_b(sent_text,quantity,slice_idx,quantity_line)+sent_idx[0]\n begin_property_index = get_b(sent_text,property,slice_idx,property_line)+sent_idx[0]\n begin_entity_index = get_b(sent_text,entity,slice_idx,entity_line)+sent_idx[0]\n quantity_key = tuple([para_id,quantity,begin_quantity_index])\n data_dict = {}\n data_dict['unit'] = unit\n data_dict['mod'] = mod\n data_dict['property'] = tuple([property,begin_property_index])\n data_dict['entity'] = tuple([entity,begin_entity_index])\n quantity_dictionary[quantity_key] = data_dict\n return quantity_dictionary\n\n\ndef update_human(data, human):\n hm = {}\n change = 0\n for k,v in human.items():\n hm[(k[0], k[2])] = k\n for pid, d in enumerate(data):\n for k,v in d.items():\n if k in hm:\n anno = human[hm[k]]\n if 'entity' in anno and 'MeasuredEntity' in d[k]:\n if anno['entity'][0] != d[k]['MeasuredEntity'][6]:\n print(pid, k, d[k]['MeasuredEntity'][6], '###', anno['entity'][0])\n d[k]['MeasuredEntity'][6] = anno['entity'][0]\n d[k]['MeasuredEntity'][3] = str(anno['entity'][1])\n d[k]['MeasuredEntity'][4] = str(anno['entity'][1] + len(anno['entity'][0]))\n change += 1\n print(change)\n return data\n\n\n\n\nori_data = get_ori_data()\n# out_data = rerank_data(ori_data)\n\nxlsx_file = 'anno_we.xlsx'\nsheet = pd.read_excel(xlsx_file, header=None, skiprows=1)\ndata = np.array(sheet).tolist()\npd_data = pd.read_excel(xlsx_file, index_col=None, header=None)\nhuman = build_quantity_dictionary(pd_data)\n\nori_data_new = update_human(ori_data, human)\nout_data = rerank_data(ori_data_new)\n\nwith open('out_rerank.tsv', 'w', encoding='utf8') as fout:\n for d in out_data:\n print(d, file=fout)\n", "id": "12132812", "language": "Python", "matching_score": 4.3182573318481445, "max_stars_count": 0, "path": "post_process/rerank_data.py" }, { "content": "# -*- coding: UTF-8 -*-\n# @Time : 2021/1/25\n# @Author : <EMAIL>\n# Apache License\n# Copyright©2020-2021 <EMAIL> All Rights Reserved\nimport json\nimport os\nimport math\nimport pandas as pd\nimport numpy as np\nfrom data_process.change_data_format_unit import excel2tsv_unit, save_data, clean_pre_tsv_format\n\n\ndef isnan(thing):\n if type(thing) != float:\n return False\n return math.isnan(thing)\n\n\ndef get_b(text, item, slice_idx, token):\n token = token.replace('10.0', '10')\n token = token.replace('11.0', '11')\n token = token.replace('12.0', '12')\n token = token.replace('13.0', '13')\n token = token.replace('14.0', '14')\n token = token.replace('15.0', '15')\n token = token.replace('16.0', '16')\n token = token.replace('17.0', '17')\n token = token.replace('18.0', '18')\n token = token.replace('19.0', '19')\n token = token.replace('1.0', '1')\n token = token.replace('2.0', '2')\n token = token.replace('3.0', '3')\n token = token.replace('4.0', '4')\n token = token.replace('5.0', '5')\n token = token.replace('6.0', '6')\n token = token.replace('7.0', '7')\n token = token.replace('8.0', '8')\n token = token.replace('9.0', '9')\n token = token.replace('0.0', '0')\n t1 = 0\n t2 = 1\n try:\n if len(token) > 0:\n if '-' in token:\n x = str(token).split('-')\n else:\n x = str(token).split('$')\n t1 = int(x[0])\n if len(x) > 1:\n t2 = int(x[1])\n except Exception as e:\n return -1\n\n if t1 >= len(slice_idx):\n return -1\n end = slice_idx[t1]\n for i in range(t2):\n b = text.find(item, end)\n end = b + len(item)\n return b\n\n\ndef excel2tsv(data, out_path):\n ann_id = 0\n token_id = 0\n para_id_old = '-1'\n anno_set = {}\n rel = {}\n um = {}\n for excel_line, d in enumerate(data):\n if not isnan(d[0]):\n para_id = d[0] if not isnan(d[0]) else ''\n sent_idx = json.loads(d[1]) if not isnan(d[1]) else ''\n sent_text = str(d[2]) if not isnan(d[2]) else ''\n slice_idx = json.loads(d[3]) if not isnan(d[3]) else ''\n quantity = str(d[5]) if not isnan(d[5]) else ''\n quantity_line = str(d[6]).strip() if not isnan(d[6]) else ''\n unit = str(d[7]).strip() if not isnan(d[7]) else ''\n mod = str(d[8]).strip().split(' ') if not isnan(d[8]) else ['']\n property = str(d[9]) if not isnan(d[9]) else ''\n property_line = str(d[10]).strip() if not isnan(d[10]) else ''\n entity = str(d[11]) if not isnan(d[11]) else ''\n entity_line = str(d[12]).strip() if not isnan(d[12]) else ''\n quantity_Tid = ''\n property_Tid = ''\n\n for mo in mod:\n if mo not in ['', 'IsApproximate', 'IsCount', 'IsRange', 'IsList', 'IsMean', 'IsMedian', 'IsMeanHasSD', 'HasTolerance', 'IsRangeHasTolerance']:\n print('illegal mod {} - {}'.format(excel_line + 2, mo))\n\n if para_id:\n if para_id != para_id_old:\n if len(anno_set) > 0:\n tsv = excel2tsv_unit(anno_set, rel, um, para_id_old)\n out_put_file = os.path.join(out_path, para_id_old + '.tsv')\n save_data(tsv, out_put_file)\n ann_id = 0\n token_id = 0\n anno_set = {}\n rel = {}\n um = {}\n para_id_old = para_id\n\n anno_set[ann_id] = []\n\n if quantity:\n b = get_b(sent_text, quantity, slice_idx, quantity_line)\n e = b + len(quantity)\n if sent_text[b:e] != quantity:\n print('not match {} - {}'.format(excel_line+2, 'quantity'))\n token_name = 'T' + str(ann_id) + '-' + str(token_id)\n quantity_Tid = token_name\n token_id += 1\n um[token_name] = {'Unit': unit, 'modifier': mod}\n anno_set[ann_id].append([b+sent_idx[0], e+sent_idx[0], ann_id, token_name, 'Quantity', quantity])\n\n if property:\n b = get_b(sent_text, property, slice_idx, property_line)\n e = b + len(property)\n if sent_text[b:e] != property:\n print('not match {} - {}'.format(excel_line+2, 'property'))\n token_name = 'T' + str(ann_id) + '-' + str(token_id)\n property_Tid = token_name\n token_id += 1\n anno_set[ann_id].append([b+sent_idx[0], e+sent_idx[0], ann_id, token_name, 'MeasuredProperty', property])\n rel[token_name] = ['HasQuantity', quantity_Tid]\n\n if entity:\n b = get_b(sent_text, entity, slice_idx, entity_line)\n e = b + len(entity)\n if sent_text[b:e] != entity:\n print('not match {} - {}'.format(excel_line+2, 'entity'))\n token_name = 'T' + str(ann_id) + '-' + str(token_id)\n token_id += 1\n anno_set[ann_id].append([b+sent_idx[0], e+sent_idx[0], ann_id, token_name, 'MeasuredEntity', entity])\n if property_Tid:\n rel[token_name] = ['HasProperty', property_Tid]\n else:\n rel[token_name] = ['HasQuantity', quantity_Tid]\n\n ann_id += 1\n if len(anno_set) > 0:\n tsv = excel2tsv_unit(anno_set, rel, um, para_id)\n out_put_file = os.path.join(out_path, para_id_old + '.tsv')\n save_data(tsv, out_put_file)\n return\n\n\ndef generate_tsv(file, out_path):\n clean_pre_tsv_format(path=out_path)\n sheet = pd.read_excel(file, header=None, skiprows=1)\n data = np.array(sheet).tolist()\n excel2tsv(data, out_path)\n\n\nif __name__ == '__main__':\n # generate_tsv('human/test_anno1_20210126.xlsx', '../MeasEval/data/human_eval_anno1')\n # generate_tsv('human/test_anno2.xlsx', '../MeasEval/data/human_eval_anno2')\n # generate_tsv('human/test_anno3.xlsx', '../MeasEval/data/human_eval_anno3')\n generate_tsv('data_enhancement/NER_union_roberta_quantity_with_roberta_joint_ERE_isoQ_MOD1.xlsx',\n '../ner_process/pre_tsv_format')\n", "id": "2023128", "language": "Python", "matching_score": 3.5865604877471924, "max_stars_count": 0, "path": "data_process/change_data_format_excel2tsv.py" }, { "content": "# -*- coding: UTF-8 -*-\n# @Time : 2021/1/20\n# @Author : <EMAIL>\n# Apache License\n# Copyright©2020-2021 <EMAIL> All Rights Reserved\nimport json\nimport pandas as pd\nfrom styleframe import StyleFrame\nfrom tqdm import tqdm\nfrom data_process.change_data_format_unit import read_semeval_list, split_data, choose_key, cut_text, get_slice_line, ambiguity\n\ndef get_excel(whole_ann):\n out_datas = [['paragraph ID', 'sentence ID', 'sentence', 'sliced info', 'sliced sent',\n 'quantity', 'quantity_line', 'unit', 'mod',\n 'property', 'property_line', 'entity', 'entity_line']]\n for ann in tqdm(whole_ann):\n slice_text, slice_ids, slice = cut_text(ann['text'])\n\n id = ann['id']\n text_idx = json.dumps(ann['sentx'])\n raw_text = ann['text']\n line_idx = json.dumps(slice_ids)\n slice_text = slice_text\n\n if len(ann['excel']) == 0:\n out_datas.append([\n id, text_idx, raw_text, line_idx, slice_text,\n '', '', '', '', '', '', '', ''\n ])\n\n for i, caseID in enumerate(ann['excel']):\n case = ann['excel'][caseID]\n quantity = ''\n quantity_line = ''\n unit = ''\n mod = ''\n property = ''\n property_line = ''\n entity = ''\n entity_line = ''\n\n if 'Quantity' in case:\n quantity = case['Quantity'][0]\n if ambiguity(raw_text, quantity):\n quantity_line = get_slice_line(case['Quantity'][1], slice_ids)\n\n if 'unit' in case:\n unit = case['unit']\n\n if 'mods' in case:\n mod = ' '.join(case['mods'])\n\n if 'MeasuredProperty' in case:\n property = case['MeasuredProperty'][0]\n if ambiguity(raw_text, property):\n property_line = get_slice_line(case['MeasuredProperty'][1], slice_ids)\n\n if 'MeasuredEntity' in case:\n entity = case['MeasuredEntity'][0]\n if ambiguity(raw_text, entity):\n entity_line = get_slice_line(case['MeasuredEntity'][1], slice_ids)\n\n if i == 0:\n out_datas.append([\n id, text_idx, raw_text, line_idx, slice_text,\n quantity, quantity_line, unit, mod, property, property_line, entity, entity_line\n ])\n else:\n out_datas.append([\n '', '', '', '', '',\n quantity, quantity_line, unit, mod, property, property_line, entity, entity_line\n ])\n return out_datas\n\ndef generate_gold():\n path_text = [\n '../MeasEval/data/train/text',\n '../MeasEval/data/trial/txt'\n ] # 输入数据的位置\n path_tsv = [\n '../MeasEval/data/train/tsv',\n '../MeasEval/data/trial/tsv'\n ] # train的输入数据的位置\n whole_ann = read_semeval_list(path_tsv, path_text)\n whole_ann = split_data(whole_ann)\n out_datas = get_excel(whole_ann)\n ds = pd.DataFrame(out_datas)\n StyleFrame(ds).to_excel('data_enhancement/train.xlsx', index=False, header=False).save()\n\n\ndef generate_test():\n path_text = [\n '../MeasEval/data/eval/text',\n ] # 输入数据的位置\n path_tsv = [\n '../ner_process/pre_tsv_format',\n ] # train的输入数据的位置\n whole_ann = read_semeval_list(path_tsv, path_text)\n whole_ann = split_data(whole_ann)\n out_datas = get_excel(whole_ann)\n ds = pd.DataFrame(out_datas)\n StyleFrame(ds).to_excel('data_enhancement/NER_union_roberta_quantity_with_roberta_joint_ERE_isoQ_MOD1.xlsx', index=False, header=False).save()\n\n\n\ndef generate_raw(paths, out_file):\n mode = 'test'\n path_text = paths # 输入数据的位置\n path_tsv = ['pass'] # train的输入数据的位置\n # 读入原始数据\n whole_ann = read_semeval_list(path_tsv, path_text, mode=mode)\n whole_ann = split_data(whole_ann, mode=mode)\n out_datas = get_excel(whole_ann)\n ds = pd.DataFrame(out_datas)\n StyleFrame(ds).to_excel(out_file, index=False, header=False).save()\n\n\nif __name__ == '__main__':\n generate_gold()\n # generate_test()\n # generate_raw(['../MeasEval/data/eval/text'], 'data_enhancement/eval.xlsx')\n # generate_raw(['../MeasEval/data/SimpleText_auto'], 'data_enhancement/add.xlsx')", "id": "2479871", "language": "Python", "matching_score": 2.2112390995025635, "max_stars_count": 0, "path": "data_process/change_data_format_tsv2excel.py" }, { "content": "# -*- coding: UTF-8 -*-\n# @Time : 2021/1/27\n# @Author : <EMAIL>\n# Apache License\n# Copyright©2020-2021 <EMAIL> All Rights Reserved\nimport json\nimport csv\nfrom data_process.change_data_format_unit import text2list, read_semeval_list, split_data, choose_key, get_mod_data, split_train_test, clean_pre_tsv_format\n\nmethod_text, expend, method_label, rate_train_eval = [0, 0, 0, 0.8]\n# method_text, expend, method_label, rate_train_eval = [1, 10, 0, 0.8]\n# method_text, expend, method_label, rate_train_eval = [2, 5, 0, 0.8]\n# method_text, expend, method_label, rate_train_eval = [2, 2, 0, 0.8]\n\nmode = 'all' # 'all' or 'part', all will put all data in train\n\npre_dir = '../mod_process/data/t' + str(method_text) + \\\n '_e' + str(expend) + \\\n '_l' + str(method_label) + \\\n '_r' + str(rate_train_eval) + \\\n '_' + mode + '/'\n\npath_text = [\n '../MeasEval/data/train/text',\n '../MeasEval/data/trial/txt'\n] # 输入数据的位置\npath_tsv = [\n '../MeasEval/data/train/tsv',\n '../MeasEval/data/trial/tsv'\n] # train的输入数据的位置\nout_file_all = pre_dir + 'all_tsv'\nout_file_train = pre_dir + 'train.tsv'\nout_file_test = pre_dir + 'dev.tsv'\nclean_pre_tsv_format(path=pre_dir)\n# 读入原始数据\nwhole_ann = read_semeval_list(path_tsv, path_text)\nall_ner_data = get_mod_data(whole_ann, method_text=method_text, expend=expend, method_label=method_label)\n\n# 为'dict' format 输出train数据\ntrain_ner_data, test_ner_data = split_train_test(all_ner_data, rate_train_eval, mode=mode)\nwith open(out_file_all, 'w', encoding='utf8') as fout:\n tsv_w = csv.writer(fout, delimiter='\\t')\n tsv_w.writerows([['text', 'label']] + all_ner_data) # 单行写入\nwith open(out_file_train, 'w', encoding='utf8') as fout:\n tsv_w = csv.writer(fout, delimiter='\\t')\n tsv_w.writerows([['text', 'label']] + train_ner_data) # 单行写入\nwith open(out_file_test, 'w', encoding='utf8') as fout:\n tsv_w = csv.writer(fout, delimiter='\\t')\n tsv_w.writerows([['text', 'label']] + test_ner_data) # 单行写入\n", "id": "1157444", "language": "Python", "matching_score": 2.9413223266601562, "max_stars_count": 0, "path": "data_process/change_data_format_tsv2mod.py" }, { "content": "# -*- coding: UTF-8 -*-\n# @Time : 2021/1/6\n# @Author : <EMAIL>\n# Apache License\n# Copyright©2020-2021 <EMAIL> All Rights Reserved\nimport json\nfrom data_process.change_data_format_unit import text2list, read_semeval_list, split_data, choose_key\n\n# 参数设置\nmode = 'train' # 处理训练集数据还是测试集数据,'train' or 'test'\nmethod = 'cut_sentence' # 切句子的不同模式,按句切分或滑动窗口,'cut_sentence' or 'sliding_window'\n\n# mode = 'train'\nmode = 'train'\npath_text = [\n '../MeasEval/data/train/text',\n '../MeasEval/data/trial/txt'\n] # 输入数据的位置\npath_tsv = [\n '../MeasEval/data/train/tsv',\n '../MeasEval/data/trial/tsv'\n] # train的输入数据的位置\nout_file_dict_format = '_'.join(['ere_train_json_format', method, mode]) # 'dict' format 的输出文件的名称\n# 读入原始数据\nwhole_ann = read_semeval_list(path_tsv, path_text, mode=mode)\nwhole_ann = split_data(whole_ann, mode=mode, method=method)\n# 为'dict' format 输出train数据\nall_ner_data = choose_key(whole_ann, ['id', 'text', 'quantity'])\nall_ner_data_id = choose_key(whole_ann, ['id'])\nwith open(out_file_dict_format, 'w', encoding='utf8') as fout:\n for d, id in zip(all_ner_data, all_ner_data_id):\n xd = json.dumps(d, ensure_ascii=False)\n # print(d['text'])\n print(id['id'])\n print(xd, file=fout)\n\n# mode = 'test'\nmode = 'test'\npath_text = ['../MeasEval/data/eval/text'] # 输入数据的位置\npath_tsv = ['pass'] # train的输入数据的位置\nout_file_dict_format = '_'.join(['ere_dev_json_format', method, mode]) # 'dict' format 的输出文件的名称\nout_file_dict_format_sentx = '_'.join(['ere_dev_json_format_sentx', method, mode]) # 'dict' format 的输出文件的名称\n# 读入原始数据\nwhole_ann = read_semeval_list(path_tsv, path_text, mode=mode)\nwhole_ann = split_data(whole_ann, mode=mode, method=method)\n# 为'dict' format 输出train数据\nall_ner_data = choose_key(whole_ann, ['id', 'text'])\nall_ner_data_id = choose_key(whole_ann, ['id'])\nwith open(out_file_dict_format, 'w', encoding='utf8') as fout:\n for d, id in zip(all_ner_data, all_ner_data_id):\n xd = json.dumps(d, ensure_ascii=False)\n # print(d['text'])\n # print(id['id'])\n print(xd, file=fout)\nall_sentx_data = choose_key(whole_ann, ['text', 'sentx', 'id'])\nwith open(out_file_dict_format_sentx, 'w', encoding='utf8') as fout:\n json.dump(all_sentx_data, fout)\n\n\n\n# 为'dict' format 输出test数据\n\n\n# # for 'list'\n#\n# 'list' format 的输出文件的名称\n# out_file_list_format_1 = '_'.join(['ner_list_format_texts', use_BIO, method, additional_type, mode])\n# out_file_list_format_2 = '_'.join(['ner_list_format_types', use_BIO, method, additional_type, mode])\n#\n#\n# ner_texts = []\n# ner_types = []\n# all_types_set = set()\n#\n# for wa in whole_ann:\n# input_text = wa['text']\n# ann_all = wa['anns']\n#\n# text_list, index2list = text2list(input_text)\n#\n# ner_type = ['O'] * len(text_list)\n# writed_type = [0] * len(text_list) # 保护每个位置最多写一次label\n# for ann in ann_all:\n# startOffset, endOffset, annotType, annotType_append, text = ann\n# all_world = set()\n# for t in range(startOffset, endOffset):\n# x = index2list[t]\n# all_world.add(text_list[x])\n# type_this = annotType\n# if annotType_append and additional_type == 'append':\n# type_this = annotType + '-' + annotType_append\n# all_types_set.add(type_this)\n#\n# if use_BIO == 'useBIO':\n# if t == startOffset:\n# type_this = 'B-' + type_this\n# else:\n# type_this = 'I-' + type_this\n# if writed_type[x] == 0:\n# ner_type[x] = type_this\n# writed_type[x] = 1\n#\n# print(text)\n# print(all_world)\n#\n# assert len(text_list) == len(ner_type)\n# ner_texts.append(text_list)\n# ner_types.append(ner_type)\n#\n# with open(out_file_list_format_1, 'w', encoding='utf8') as fout:\n# for d in ner_texts:\n# print(' '.join(d), file=fout)\n#\n# with open(out_file_list_format_2, 'w', encoding='utf8') as fout:\n# for d in ner_types:\n# print(' '.join(d), file=fout)\n#\n#\n# print(all_types_set)", "id": "12243460", "language": "Python", "matching_score": 3.6976962089538574, "max_stars_count": 0, "path": "data_process/change_data_format_tsv2ere.py" }, { "content": "# -*- coding: UTF-8 -*-\n# @Time : 2021/1/19\n# @Author : <EMAIL>\n# Apache License\n# Copyright©2020-2021 <EMAIL> All Rights Reserved\n\nimport os\nimport json\nimport pandas as pd\n\npath_text = [\n '../MeasEval/data/train/text',\n '../MeasEval/data/trial/txt'\n] # 输入数据的位置\npath_tsv = [\n '../MeasEval/data/train/tsv',\n '../MeasEval/data/trial/tsv'\n] # train的输入数据的位置\n\n\n\nformats = []\nspan_all = []\nall_q = dict()\nall_p = dict()\nall_e = dict()\nall_u = dict()\nall_Qualifier = dict()\np_set = set()\ncount_q = 0\ncount_p = 0\ncount_e = 0\ncount_u = 0\ncount_Qualifier = 0\ncount_mods = {}\ncase_mods = {}\n\nfor path_text, path_tsv in zip(path_text, path_tsv):\n files = os.listdir(path_tsv)\n for file in files:\n full_file = os.path.join(path_tsv, file)\n core, _ = os.path.splitext(file)\n text_p = os.path.join(path_text, core + '.txt')\n with open(text_p, 'r', encoding='utf8') as fin:\n text_all = fin.readlines()\n if len(text_all) > 1:\n print('len(text) > 1: ', text_p)\n input_text = text_all[0].strip()\n data = pd.read_csv(full_file, sep='\\t', header=0)\n\n for index, row in data.iterrows():\n annotSet = int(row['annotSet'])\n annotType = row['annotType']\n startOffset = int(row['startOffset'])\n endOffset = int(row['endOffset'])\n annotId = row['annotId']\n text = row['text']\n if pd.isnull(row['other']):\n other = {}\n else:\n other = json.loads(row['other'])\n\n if 'mods' in other:\n for mod in other['mods']:\n if mod not in count_mods:\n count_mods[mod] = {}\n case_mods[mod] = {}\n if text not in count_mods[mod]:\n count_mods[mod][text] = 0\n case_mods[mod][text] = []\n count_mods[mod][text] += 1\n case_mods[mod][text].append(input_text[startOffset-20: endOffset+20])\n\n if 'unit' in other:\n count_u += 1\n if other['unit'] not in all_u:\n all_u[other['unit']] = 0\n all_u[other['unit']] += 1\n\n if annotType == 'Quantity':\n count_q += 1\n if text not in all_q:\n all_q[text] = 0\n all_q[text] += 1\n if annotType == 'Qualifier':\n count_Qualifier += 1\n if text not in all_Qualifier:\n all_Qualifier[text] = 0\n all_Qualifier[text] += 1\n if annotType == 'MeasuredProperty':\n count_p += 1\n if text not in all_p:\n all_p[text] = 0\n all_p[text] += 1\n if annotType == 'MeasuredEntity':\n count_e += 1\n if text not in all_e:\n all_e[text] = 0\n all_e[text] += 1\n\nprint('q {} {}'.format(len(all_q), count_q))\nprint('p {} {}'.format(len(all_p), count_p))\nprint('e {} {}'.format(len(all_e), count_e))\nprint('u {} {}'.format(len(all_u), count_u))\nprint('Qualifier {} {}'.format(len(all_Qualifier), count_Qualifier))\nwith open('statistic/all_q.txt', 'w', encoding='utf8') as fout:\n data = sorted([[k, v] for k, v in all_q.items()], key=lambda x: [-x[1], x[0]])\n for d in data:\n print(d[0], file=fout)\nwith open('statistic/all_p.txt', 'w', encoding='utf8') as fout:\n data = sorted([[k, v] for k, v in all_p.items()], key=lambda x: [-x[1], x[0]])\n for d in data:\n print(d[0], file=fout)\nwith open('statistic/all_e.txt', 'w', encoding='utf8') as fout:\n data = sorted([[k, v] for k, v in all_e.items()], key=lambda x: [-x[1], x[0]])\n for d in data:\n print(d[0], file=fout)\nwith open('statistic/all_u.txt', 'w', encoding='utf8') as fout:\n data = sorted([[k, v] for k, v in all_u.items()], key=lambda x: [-x[1], x[0]])\n for d in data:\n print(d[0], file=fout)\nwith open('statistic/all_Qualifier.txt', 'w', encoding='utf8') as fout:\n data = sorted([[k, v] for k, v in all_Qualifier.items()], key=lambda x: [-x[1], x[0]])\n for d in data:\n print(d[0], file=fout)\n\nfor k, v in count_mods.items():\n with open('statistic/mod_' + k + '.txt', 'w', encoding='utf8') as fout:\n data = sorted([[k, v] for k, v in count_mods[k].items()], key=lambda x: [-x[1], x[0]])\n for d in data:\n print(d[0], file=fout)\n # print('{}'.format(case_mods[k][d[0]]), file=fout)\n\n\n\npath_text = [\n '../MeasEval/data/train/text',\n '../MeasEval/data/trial/txt'\n] # 输入数据的位置\npath_tsv = [\n '../MeasEval/data/train/tsv',\n '../MeasEval/data/trial/tsv'\n] # train的输入数据的位置\nfrom data_process.change_data_format_unit import text2list, read_semeval_list, split_data, choose_key, cut_text\nwhole_ann = read_semeval_list(path_tsv, path_text)\nwhole_ann = split_data(whole_ann)\ncount_all= {}\nfor ann in whole_ann:\n for item in ann['excel']:\n for k in ann['excel'][item].keys():\n if k not in count_all:\n count_all[k] = 0\n count_all[k] += 1\nprint(count_all)\n", "id": "4494746", "language": "Python", "matching_score": 3.128864049911499, "max_stars_count": 0, "path": "data_process/statistic.py" }, { "content": "# -*- coding: UTF-8 -*-\n# @Time : 2021/1/8\n# @Author : <EMAIL>\n# Apache License\n# Copyright©2020-2021 <EMAIL> All Rights Reserved\n\nimport os\nimport json\nimport pickle\nimport random\nimport shutil\nimport re\nimport pandas as pd\nfrom data_process.rule_base_unit import RuleBaseUnit\nrbu = RuleBaseUnit()\nrandom.seed(10)\n\n\ndef get_mod(ori_text, ori_input=''):\n # IsMean 只处理了10%,带有average的\n # IsMeanHasSD 放弃\n # IsMeanHasTolerance 放弃\n # IsMeanIsRange 放弃\n # IsRangeHasTolerance 放弃\n # 以下是IsList所还没有考虑的情况\n # 20 × 20 degrees\n # 6 kg to 13 kg\n # 85/15%\n mods = set()\n text = ori_text.lower()\n input = ori_input.lower()\n approximate = ['approximately', '∼', '≈', '≳', '≲', 'nominally', 'about', 'around', 'close to', 'circa', 'the order of', 'near', 'roughly']\n range = [' – ', '<', '>', '≤', '≥', '⩽', '⩾', '≳', '≲', 'above', 'at least', 'greater than', 'up to', 'to', 'after', 'as low as', 'as much as', 'at least', 'before', 'below', 'between', 'down to', 'last', 'less than', 'more than', 'over', 'range', 'ranging', 'since', 'top', 'up to', 'upto', 'upper', 'within', 'to']\n if '±' in text:\n mods.add('HasTolerance')\n for app in approximate:\n if app in text:\n mods.add('IsApproximate')\n break\n if 'from' in text and 'to' in text:\n mods.add('IsApproximate')\n if 'and' in text or 'or' in text:\n mods.add('IsList')\n if 'average' in text:\n mods.add('IsMean')\n if 'median' in input or 'median' in text:\n mods.add('IsMedian')\n for ran in range:\n if ran in text:\n mods.add('IsRange')\n break\n if re.search('\\d-\\d', text):\n mods.add('IsRange')\n # if len(mods) == 0:\n # if '.' not in text:\n # mods.add('IsCount')\n return list(mods)\n\n\nclass Mod:\n def __init__(self, text_path, mod_path):\n self.read_data(text_path, mod_path)\n\n def read_data(self, text_path, mod_path):\n with open(text_path, 'r', encoding='utf8') as fin:\n text = [d.strip().split('\\t')[0] for d in fin.readlines()]\n with open(mod_path, 'r', encoding='utf8') as fin:\n mod = [d.strip() for d in fin.readlines()]\n self.mod = dict()\n for t,m in zip(text[1:], mod):\n self.mod[t] = m\n\n def get_mod(self, ori_text, ori_input=''):\n if ori_text in self.mod:\n ori_label = self.mod[ori_text]\n if ori_label == 'Empty':\n return []\n return ori_label.split('&')\n else:\n return get_mod(ori_text, ori_input=ori_input)\n\n\n# 切.有难度,先不切\ndef text2list(text):\n # text: 'ab cd'\n # text_list: ['ab', 'cd']\n # index2list: 00011\n text_list = []\n index2list = {}\n tmp = ''\n for t in range(len(text)):\n index2list[t] = len(text_list)\n if text[t] == ' ':\n if len(tmp) > 0:\n text_list.append(tmp)\n tmp = ''\n else:\n tmp += text[t]\n if len(tmp) > 0:\n text_list.append(tmp)\n return text_list, index2list\n\n\ndef choose_key(data, keyname):\n all_data = []\n for d in data:\n new_d = dict()\n for k in keyname:\n new_d[k] = d[k]\n all_data.append(new_d)\n return all_data\n\n\ndef get_excel_format(ann_all):\n excel_list = {}\n annot_2_q = {}\n annot_2_t = {}\n # add Quantity\n for i, ann in enumerate(ann_all):\n startOffset, endOffset, annotType, annotType_append, text, annotId, other = ann\n if annotType == 'Quantity':\n excel_list[annotId] = {'Quantity': [text, startOffset]}\n for k,v in other.items():\n excel_list[annotId][k] = v\n annot_2_q[annotId] = annotId\n annot_2_t[annotId] = annotType\n # add hasQuantity\n for i, ann in enumerate(ann_all):\n startOffset, endOffset, annotType, annotType_append, text, annotId, other = ann\n for k, v in other.items():\n if k == 'HasQuantity' and v in excel_list:\n excel_list[v][annotType] = [text, startOffset]\n annot_2_q[annotId] = v\n annot_2_t[annotId] = annotType\n # add hasProperty\n for i, ann in enumerate(ann_all):\n startOffset, endOffset, annotType, annotType_append, text, annotId, other = ann\n for k, v in other.items():\n if k == 'HasProperty' and v in annot_2_q:\n excel_list[annot_2_q[v]][annotType] = [text, startOffset]\n annot_2_q[annotId] = annot_2_q[v]\n annot_2_t[annotId] = annotType\n # add Qualifies\n # 不确定是否会有重复\n for i, ann in enumerate(ann_all):\n startOffset, endOffset, annotType, annotType_append, text, annotId, other = ann\n for k, v in other.items():\n if k == 'Qualifies' and v in annot_2_q:\n excel_list[annot_2_q[v]]['Qualifier_' + annot_2_t[v]] = [text, startOffset]\n return excel_list\n\n\ndef get_ere_format(ann_all):\n triples = []\n excel_list = get_excel_format(ann_all)\n for k, v in excel_list.items():\n q_name = v['Quantity']\n for m, p in zip(['MeasuredEntity', 'MeasuredProperty', 'Qualifier_Quantity'],\n ['toEntity', 'toProperty', 'toQualifier']):\n if m in v:\n triples.append([q_name, p, v[m]])\n return triples, excel_list\n\n\ndef get_label_format(ann_all, additional_type='append'):\n this_entity = dict()\n for ann in ann_all:\n startOffset, endOffset, annotType, annotType_append, text, _, _ = ann\n for t in range(startOffset, endOffset):\n type_this = annotType\n if annotType_append and additional_type == 'append':\n type_this = annotType + '-' + annotType_append\n if type_this not in this_entity:\n this_entity[type_this] = {}\n if text not in this_entity[type_this]:\n this_entity[type_this][text] = []\n if [startOffset, endOffset - 1] not in this_entity[type_this][text]:\n this_entity[type_this][text].append([startOffset, endOffset - 1])\n return this_entity\n\n\ndef correct_boundary(b, e, text):\n or_b = b\n or_e = e\n max_id = len(text)\n while text[b].isalpha() and b > 0 and text[b-1].isalpha():\n b -= 1\n while text[e-1].isalpha() and e <= max_id-1 and text[e].isalpha():\n e += 1\n if e != or_e or b != or_b:\n print('### correct_boundary ###')\n print('ori: {}'.format(text[or_b:or_e]))\n print('cor: {}'.format(text[b:e]))\n return b, e, text[b:e]\n\n\ndef read_semeval(path_tsv, path_text, mode='train', additional_type='append', do_correct_boundary=True):\n whole_ann = []\n files = os.listdir(path_text)\n for file in files:\n if '.txt' not in file:\n continue\n full_file = os.path.join(path_text, file)\n core, _ = os.path.splitext(file)\n tsv_p = os.path.join(path_tsv, core + '.tsv')\n with open(full_file, 'r', encoding='utf8') as fin:\n text_all = fin.readlines()\n if len(text_all) > 1:\n print('warning: len(text) > 1: ', full_file)\n text_all = ''.join(text_all)\n text_all = text_all.replace('.\\n', '\\n')\n text_all = [text_all.replace('\\n', '. ')]\n input_text = text_all[0].strip()\n\n if mode == 'test':\n whole_ann.append({'text': input_text, 'id':core})\n continue\n\n if not os.path.exists(tsv_p):\n print('tsv not exist for {}'.format(full_file))\n continue\n data = pd.read_csv(tsv_p, sep='\\t', header=0)\n\n ann_all = []\n for index, row in data.iterrows():\n annotSet = int(row['annotSet'])\n annotType = row['annotType']\n startOffset = int(row['startOffset'])\n endOffset = int(row['endOffset'])\n annotId = row['annotId']\n text = row['text']\n if pd.isnull(row['other']):\n other = {}\n else:\n other = json.loads(row['other'])\n\n if do_correct_boundary:\n startOffset, endOffset, text = correct_boundary(startOffset, endOffset, text_all[0])\n\n if input_text[startOffset: endOffset] != text:\n print('error: text not match: {}'.format(text))\n\n annotType_append = None\n if 'mods' in other:\n if len(other['mods']) > 1:\n # print('mods > 1: {}'.format(core))\n pass\n annotType_append = '&'.join(sorted(other['mods']))\n ann_all.append([startOffset, endOffset, annotType, annotType_append, text, annotId, other])\n\n this_entity = get_label_format(ann_all, additional_type=additional_type)\n whole_ann.append({'text': input_text, 'anns': ann_all, 'label': this_entity, 'id':core})\n return whole_ann\n\n\ndef read_semeval_list(path_tsv_list, path_text_list, mode='train', additional_type='notpad'):\n whole_ann = []\n for path_tsv, path_text in zip(path_tsv_list, path_text_list):\n whole_ann += read_semeval(path_tsv, path_text, mode=mode, additional_type=additional_type)\n return whole_ann\n\n\n##分句函数\ndef cut_sentence(text):\n re_exp = re.compile(\"(?<=[^A-Z]\\.) (?![0-9%])\")\n raw_sentences = re.split(re_exp,text)\n offset = 0\n sentences = []\n for idx,senten in enumerate(raw_sentences):\n if not sentences:\n sentences.append(senten)\n else:\n if len(senten)<100:\n sentences[-1] = sentences[-1]+\" \"+senten\n else:\n sentences.append(senten)\n sentence_offset = []\n for sent in sentences:\n sentence_offset.append([offset, offset + len(sent)])\n offset += (len(sent)+1)\n return (sentences, sentence_offset)\n\n\ndef cut_sentence_old(text):\n sents = []\n sents_indx = []\n p = 0\n for t in range(len(text)):\n if t >= 1 and text[t-1:t+1] == '. ':\n sents.append(text[p:t])\n sents_indx.append([p, t])\n p = t+1\n if p < len(text):\n sents.append(text[p:t+1])\n sents_indx.append([p, t+1])\n print('text: ', text)\n print('sents: ', sents)\n print('sents_indx: ', sents_indx)\n return sents, sents_indx\n\n\ndef sliding_window(text, window=50, step=20):\n sents = []\n sents_indx = []\n max_t = len(text)\n for t in range(max_t):\n if t % step == 0:\n e = min(max_t, t+window)\n sents.append(text[t:e])\n sents_indx.append([t, e])\n return sents, sents_indx\n\ndef split_data(whole_ann, mode='train', method='cut_sentence', additional_type='notpad'):\n new_whole_ann = []\n if method == 'cut_sentence':\n split_method = cut_sentence\n if method == 'sliding_window':\n split_method = sliding_window\n for wann in whole_ann:\n text = wann['text']\n if mode == 'train':\n anns = wann['anns']\n\n sents, sents_indx = split_method(text)\n for sent, sentx in zip(sents, sents_indx):\n if mode == 'test':\n new_whole_ann.append({\n 'text': sent,\n 'sentx':sentx,\n 'id': wann['id'],\n 'quantity': [],\n 'excel': [],\n })\n continue\n new_anns = []\n for ann in anns:\n startOffset, endOffset, annotType, annotType_append, text, annotId, other = ann\n if startOffset >= sentx[0] and endOffset <= sentx[1]:\n new_anns.append([startOffset-sentx[0], endOffset-sentx[0], annotType, annotType_append, text, annotId, other])\n ann_dict_format = get_label_format(new_anns, additional_type=additional_type)\n ere_triple_format, excel_list = get_ere_format(new_anns)\n new_whole_ann.append({\n 'text': sent,\n 'anns': new_anns,\n 'label': ann_dict_format,\n 'quantity': ere_triple_format,\n 'excel': excel_list,\n 'sentx':sentx,\n 'id': wann['id']\n })\n return new_whole_ann\n\n\ndef add_rel(data):\n anno_set = {}\n id = 0\n for type, v in data.items():\n for text in v:\n for vi in v[text]:\n anno_id = vi[2]\n if anno_id not in anno_set:\n anno_set[anno_id] = []\n annotId = 'T' + str(id)\n id += 1\n vi.append(annotId)\n anno_set[anno_id].append(vi + [type, text])\n\n rel = {}\n for anno_id, v in anno_set.items():\n anno_set[anno_id].sort(key=lambda x: x[0])\n q = []\n q_rel = {}\n for i, vi in enumerate(v):\n if vi[4] == 'Quantity':\n q.append(vi)\n q_rel[vi[3]] = []\n for i, vi in enumerate(v):\n if vi[4] == 'Quantity':\n continue\n this_dis = []\n for j, qi in enumerate(q):\n dis = min(abs(qi[0]-vi[1]), abs(qi[1]-vi[0]))\n this_dis.append([dis, j])\n this_dis.sort(key=lambda x: x[0])\n if len(this_dis) > 0:\n q_rel[q[this_dis[0][1]][3]].append(vi)\n\n for k, v in q_rel.items():\n # p2q\n p = []\n for vi in v:\n if vi[4] == 'MeasuredProperty':\n p.append(vi[3])\n rel[vi[3]] = ['HasQuantity', k]\n if vi[4] == 'Qualifier':\n rel[vi[3]] = ['Qualifies', k]\n for vi in v:\n if vi[4] == 'MeasuredEntity':\n if not p:\n rel[vi[3]] = ['HasQuantity', k]\n else:\n rel[vi[3]] = ['HasProperty', p[0]]\n return data, anno_set, rel\n\n\ndef connet(data):\n all_label = {}\n for ann, [idx, d] in enumerate(data):\n for k,v in d['label'].items():\n if k not in all_label:\n all_label[k] = {}\n for k1,v1 in d['label'][k].items():\n if k1 not in all_label[k]:\n all_label[k][k1] = []\n for v2 in d['label'][k][k1]:\n v2[0] = int(v2[0])\n v2[1] = int(v2[1])\n if v2 not in all_label[k][k1]:\n all_label[k][k1].append([v2[0]+idx[0], v2[1]+idx[0]+1, ann])\n return all_label\n\n\ndef connet_ere(data, all_ner_data, append_iso_q=True):\n all_label = {}\n for ann, [idx, d] in enumerate(data):\n text = d['text']\n\n all_ner_quantity = set()\n all_ere_quantity = set()\n if (d['id'], text) not in all_ner_data:\n print('not match in all_ner_data')\n for tn in all_ner_data[(d['id'], text)]:\n all_ner_quantity.add((tn[0], tn[1]))\n\n for rel in d['quantity']:\n all_ere_quantity.add((rel[0][0], rel[0][1]))\n h = rel[0][0]\n h_idx = [rel[0][1]+idx[0], rel[0][1]+idx[0]+ len(h)]\n h_id = (h_idx[0], h_idx[1], text[rel[0][1]:rel[0][1]+len(h)], ann)\n relation = rel[1]\n t = rel[2][0]\n t_idx = [rel[2][1]+idx[0], rel[2][1]+idx[0] + len(t)]\n t_id = (t_idx[0], t_idx[1], text[rel[2][1]:rel[2][1]+len(t)])\n if h_id not in all_label:\n all_label[h_id] = {}\n all_label[h_id][relation] = t_id\n\n iso_quantity = all_ner_quantity - all_ere_quantity\n if append_iso_q:\n for iq in iso_quantity:\n h_idx = [iq[1] + idx[0], iq[1] + idx[0] + len(iq[0])]\n h_id = (h_idx[0], h_idx[1], text[iq[1]:iq[1] + len(iq[0])], ann)\n all_label[h_id] = {}\n\n return all_label\n\n\ndef add_rel_ere(data):\n Tid2type = {}\n anno_set = {}\n raw_rel = {}\n rel = {}\n id = 0\n head_id = 0\n for head, r in data.items():\n idx2Tid = {}\n head_text = head[2]\n if head_text == '':\n print('head_text is empty')\n ann = head[3]\n if ann not in anno_set:\n anno_set[ann] = []\n if (head[0], head[1]) not in idx2Tid:\n Tid = 'T' + str(head_id) + '-' + str(id)\n idx2Tid[(head[0], head[1])] = Tid\n id += 1\n anno_set[ann].append([head[0], head[1], ann, Tid, 'Quantity', head_text])\n Tid2type[Tid] = 'Quantity'\n else:\n Tid = idx2Tid[(head[0], head[1])]\n\n for rel_name, tail in r.items():\n tail_text = tail[2]\n if (tail[0], tail[1]) not in idx2Tid:\n Tid = 'T' + str(head_id) + '-' + str(id)\n idx2Tid[(tail[0], tail[1])] = Tid\n id += 1\n if rel_name == 'toEntity':\n tail_type = 'MeasuredEntity'\n if rel_name == 'toProperty':\n tail_type = 'MeasuredProperty'\n if rel_name == 'toQualifier':\n tail_type = 'Qualifier'\n anno_set[ann].append([tail[0], tail[1], ann, Tid, tail_type, tail_text])\n Tid2type[Tid] = tail_type\n else:\n Tid = idx2Tid[(tail[0], tail[1])]\n\n Tid_head = idx2Tid[(head[0], head[1])]\n for rel_name, tail in r.items():\n Tid_tail = idx2Tid[(tail[0], tail[1])]\n if Tid_head not in raw_rel:\n raw_rel[Tid_head] = {}\n raw_rel[Tid_head][rel_name] = Tid_tail\n\n head_id += 1\n\n for head, r in raw_rel.items():\n for rel_name, tail in r.items():\n if rel_name == 'toQualifier':\n if Tid2type[tail] != 'Qualifier':\n print('rel & type dismatch')\n continue\n rel[tail] = ['Qualifies', head]\n if rel_name == 'toProperty':\n if Tid2type[tail] != 'MeasuredProperty':\n print('rel & type dismatch')\n continue\n rel[tail] = ['HasQuantity', head]\n if rel_name == 'toEntity':\n if Tid2type[tail] != 'MeasuredEntity':\n print('rel & type dismatch')\n continue\n if 'toProperty' not in r:\n rel[tail] = ['HasQuantity', head]\n else:\n rel[tail] = ['HasProperty', r['toProperty']]\n\n return None, anno_set, rel\n\n\ndef add_rel_ere_v2(data):\n Tid2type = {}\n anno_set = {}\n raw_rel = {}\n rel = {}\n id = 0\n head_id = 0\n for head, r in data.items():\n idx2Tid = {}\n head_text = head[2]\n if head_text == '':\n print('head_text is empty')\n ann = head[3]\n if ann not in anno_set:\n anno_set[ann] = []\n if (head[0], head[1]) not in idx2Tid:\n Tid = 'T' + str(head_id) + '-' + str(id)\n idx2Tid[(head[0], head[1])] = Tid\n id += 1\n anno_set[ann].append([head[0], head[1], ann, Tid, 'Quantity', head_text])\n Tid2type[Tid] = 'Quantity'\n else:\n Tid = idx2Tid[(head[0], head[1])]\n\n for rel_name, tail in r.items():\n tail_text = tail[2]\n if (tail[0], tail[1]) not in idx2Tid:\n Tid = 'T' + str(head_id) + '-' + str(id)\n idx2Tid[(tail[0], tail[1])] = Tid\n id += 1\n if rel_name == 'toEntity':\n tail_type = 'MeasuredEntity'\n if rel_name == 'toProperty':\n tail_type = 'MeasuredProperty'\n if rel_name == 'toQualifier':\n tail_type = 'Qualifier'\n anno_set[ann].append([tail[0], tail[1], ann, Tid, tail_type, tail_text])\n Tid2type[Tid] = tail_type\n else:\n Tid = idx2Tid[(tail[0], tail[1])]\n\n Tid_head = idx2Tid[(head[0], head[1])]\n for rel_name, tail in r.items():\n Tid_tail = idx2Tid[(tail[0], tail[1])]\n if Tid_head not in raw_rel:\n raw_rel[Tid_head] = {}\n raw_rel[Tid_head][rel_name] = Tid_tail\n\n head_id += 1\n\n for head, r in raw_rel.items():\n for rel_name, tail in r.items():\n if rel_name == 'toQualifier':\n if Tid2type[tail] != 'Qualifier':\n print('rel & type dismatch')\n continue\n rel[tail] = ['Qualifies', head]\n if rel_name == 'toProperty':\n if Tid2type[tail] != 'MeasuredProperty':\n print('rel & type dismatch')\n continue\n rel[tail] = ['HasQuantity', head]\n if rel_name == 'toEntity':\n if Tid2type[tail] != 'MeasuredEntity':\n print('rel & type dismatch')\n continue\n if 'toProperty' not in r:\n rel[tail] = ['HasQuantity', head]\n else:\n rel[tail] = ['HasProperty', r['toProperty']]\n\n return None, anno_set, rel\n\n\ndef change_annotSet(data, rel):\n QuantityId = 0\n id2ann = {}\n for _, entities in data.items():\n for entity in entities:\n if entity[4] == 'Quantity':\n entity[2] = QuantityId\n id2ann[entity[3]] = entity[2]\n QuantityId += 1\n for _, entities in data.items():\n for entity in entities:\n if entity[4] == 'MeasuredProperty':\n entity[2] = id2ann[rel[entity[3]][1]]\n id2ann[entity[3]] = entity[2]\n for _, entities in data.items():\n for entity in entities:\n if entity[4] == 'MeasuredEntity':\n entity[2] = id2ann[rel[entity[3]][1]]\n return data\n\n\ndef clean_data_for_ner2tsv(data, rel, add_qualifier=False):\n enw_data = {}\n matched_id = set()\n for _, entities in data.items():\n for entity in entities:\n if entity[4] == 'Quantity':\n matched_id.add(entity[3])\n for _, entities in data.items():\n for entity in entities:\n if add_qualifier and entity[4] == 'Qualifier':\n if entity[3] in rel and rel[entity[3]][1] in matched_id:\n matched_id.add(entity[3])\n for _, entities in data.items():\n for entity in entities:\n if entity[4] == 'MeasuredProperty':\n if entity[3] in rel and rel[entity[3]][1] in matched_id:\n matched_id.add(entity[3])\n for _, entities in data.items():\n for entity in entities:\n if entity[4] == 'MeasuredEntity':\n if entity[3] in rel and rel[entity[3]][1] in matched_id:\n matched_id.add(entity[3])\n for setId, entities in data.items():\n for entity in entities:\n annotId = entity[3]\n annotType = entity[4]\n if annotType != 'Quantity' and annotId not in rel:\n print('error: has no rel')\n continue\n if annotId not in matched_id:\n continue\n if setId not in enw_data:\n enw_data[setId] = []\n enw_data[setId].append(entity)\n return enw_data\n\n\ndef ner2tsv(data, rel, id, sent='', mod_tool=None, add_qualifier=False):\n data = clean_data_for_ner2tsv(data, rel, add_qualifier=add_qualifier)\n data = change_annotSet(data, rel)\n\n tsv = ['docId\tannotSet\tannotType\tstartOffset\tendOffset\tannotId\ttext\tother']\n for _, entities in data.items():\n for entity in entities:\n docId = id\n annotSet = str(entity[2])\n annotType = entity[4]\n startOffset = str(entity[0])\n endOffset = str(entity[1])\n annotId = entity[3]\n text = entity[5]\n if annotType == 'Quantity':\n other_all = {}\n other_unit = rbu.get_unit(text)\n if mod_tool:\n base_mod = get_mod(text, sent)\n other_mod = mod_tool.get_mod(text, sent)\n if base_mod != other_mod:\n pass\n else:\n other_mod = get_mod(text, sent)\n if other_unit != '':\n other_all['unit'] = other_unit\n if other_mod != []:\n other_all['mods'] = other_mod\n other = json.dumps(other_all)\n # if other_all != {}:\n # other = json.dumps(other_all)\n # else:\n # other = ''\n else:\n if annotId not in rel:\n print('error: has no rel')\n continue\n other = json.dumps({rel[annotId][0]: rel[annotId][1]})\n\n newline = [docId, annotSet, annotType, startOffset, endOffset, annotId, text, other]\n tsv.append('\\t'.join(newline))\n return tsv\n\n\ndef excel2tsv_unit(data, rel, um, id):\n data = clean_data_for_ner2tsv(data, rel)\n # data = change_annotSet_excel2tsv(data, rel)\n\n tsv = ['docId\tannotSet\tannotType\tstartOffset\tendOffset\tannotId\ttext\tother']\n for _, entities in data.items():\n for entity in entities:\n docId = id\n annotSet = str(entity[2])\n annotType = entity[4]\n startOffset = str(entity[0])\n endOffset = str(entity[1])\n annotId = entity[3]\n text = entity[5]\n if annotType == 'Quantity':\n other_all = {}\n if annotId in um and 'Unit' in um[annotId] and um[annotId]['Unit']:\n other_all['unit'] = um[annotId]['Unit']\n if annotId in um and 'modifier' in um[annotId] and um[annotId]['modifier'] != ['']:\n other_all['mods'] = um[annotId]['modifier']\n other = json.dumps(other_all)\n # if other_all != {}:\n # other = json.dumps(other_all)\n # else:\n # other = ''\n else:\n if annotId not in rel:\n print('error: has no rel')\n continue\n other = json.dumps({rel[annotId][0]: rel[annotId][1]})\n\n newline = [docId, annotSet, annotType, startOffset, endOffset, annotId, text, other]\n if annotSet != annotId[1]:\n xxx = 1\n\n tsv.append('\\t'.join(newline))\n return tsv\n\n\ndef clean_pre_tsv_format(path='/Users/apricot/semeval_task8/ner_process/pre_tsv_format'):\n dir_name = path\n if os.path.exists(dir_name):\n shutil.rmtree(dir_name)\n os.mkdir(dir_name)\n\n\ndef save_data(data, file):\n file_path, _ = os.path.split(file)\n if not os.path.exists(file_path):\n os.mkdir(file_path)\n with open(file, 'w', encoding='utf8') as fout:\n for d in data:\n print(d, file=fout)\n\n\ndef save_data_json(data, file):\n file_path, _ = os.path.split(file)\n if not os.path.exists(file_path):\n os.mkdir(file_path)\n with open(file, 'w', encoding='utf8') as fout:\n json.dump(data, fout)\n\n\ndef save_data_pickle(data, file):\n file_path, _ = os.path.split(file)\n if not os.path.exists(file_path):\n os.mkdir(file_path)\n with open(file, 'wb') as fout:\n pickle.dump(data, fout)\n\n\ndef cut_text(text, line_len=50):\n ids = []\n nums = 0\n slice = []\n out_text = ''\n text_len = len(text)\n text_spl = text.split(' ')\n text_spl_len = len(text_spl)\n i = 0\n while True:\n step = 1\n step_len = len(text_spl[i])\n while i+step < text_spl_len:\n if step_len + len(text_spl[i+step]) + 1 > line_len:\n break\n step_len += len(text_spl[i+step]) + 1\n step += 1\n slice.append(' '.join(text_spl[i: i+step]))\n ids.append(nums)\n i += step\n nums += len(slice[-1]) + 1\n if nums > text_len:\n break\n for i, s in enumerate(slice):\n out_text += str(i) + ' '*(2-len(str(i))) + '$ ' + s + '\\n'\n out_text = out_text.strip()\n return out_text, ids, slice\n\n\ndef get_slice_line(idx, slice_list):\n line = 0\n for i, p in enumerate(slice_list):\n if idx > p:\n line = i\n return line\n\n\ndef ambiguity(text, token):\n if text.count(token) > 1:\n return True\n return False\n\n\ndef select_sentx_pre(sentx, pre):\n out_sentx = []\n out_pre = []\n text2p = {}\n for i, p in enumerate(pre):\n text2p[p['text']] = i\n for sent in sentx:\n if sent['text'] in text2p:\n out_sentx.append(sent)\n out_pre.append(pre[text2p[sent['text']]])\n return out_sentx, out_pre\n\n\ndef get_mod_text(b, e, text, expend=0, method=0):\n if method == 0:\n return text[b:e]\n if method == 1:\n return text[max(0, b-expend):e+expend]\n if method == 2:\n b = max(0, b-expend)\n e = min(len(text), e+expend)\n while b > 0:\n if text[b] == ' ':\n break\n b -= 1\n while e < len(text):\n if text[e] == ' ':\n break\n e += 1\n return text[b+1: e]\n\ndef get_mod_label(label_dic, method=0):\n label = 'Empty'\n if method == 0:\n if 'mods' in label_dic:\n label = '&'.join(sorted(label_dic['mods']))\n return label\n\n\ndef get_mod_data(data, method_text=0, expend=0, method_label=0):\n mod_data = []\n for d in data:\n whole_text = d['text']\n for a in d['anns']:\n if a[2] == 'Quantity':\n text = get_mod_text(a[0], a[1], whole_text, expend=expend, method=method_text)\n label = get_mod_label(a[6], method=method_label)\n mod_data.append([text, label])\n return mod_data\n\n\ndef split_train_test(data, rate, mode='part'):\n train_labels = set()\n train = []\n test = []\n for d in data:\n if d[1] in train_labels and random.random() > rate:\n test.append(d)\n if mode == 'all':\n train.append(d)\n else:\n train.append(d)\n train_labels.add(d[1])\n return train, test\n\n\ndef get_all_ner_quantity(file):\n with open(file, 'r', encoding='utf8') as fin:\n data = [json.loads(d.strip()) for d in fin.readlines()]\n ner = dict()\n for d in data:\n ner[(d['id'], d['text'])] = d['quantity']\n return ner\n\n\nif __name__ == '__main__':\n text = 'ascsssss. b. cdsssscdssssssssssssSSSSSsssSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSScdssssssssssssSSSSSsssSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSssssssssSSSSSsssSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS'\n a = cut_sentence(text)\n print(a)", "id": "9674749", "language": "Python", "matching_score": 4.410433292388916, "max_stars_count": 0, "path": "data_process/change_data_format_unit.py" }, { "content": "# -*- coding: UTF-8 -*-\n# @Time : 2021/1/18\n# @Author : <EMAIL>\n# Apache License\n# Copyright©2020-2021 <EMAIL> All Rights Reserved\nimport os\n\npath_tsv = '/Users/apricot/semeval_task8/ner_process/pre_tsv_format'\noutput = '/Users/apricot/semeval_task8/ner_process/pre_tsv_format_concat/pre_all.tsv'\n# path_tsv = '/Users/apricot/semeval_task8/MeasEval/data/human_eval_anno3'\n# output = '/Users/apricot/semeval_task8/MeasEval/data/human_eval_concat/anno3.tsv'\n\nall_data = ['docId\tannotSet\tannotType\tstartOffset\tendOffset\tannotId\ttext\tother']\nfiles = os.listdir(path_tsv)\nfor file in files:\n if '.tsv' not in file:\n continue\n full_file = os.path.join(path_tsv, file)\n with open(full_file, 'r', encoding='utf8') as fin:\n data = [l.strip() for l in fin.readlines()]\n all_data += data[1:]\n\nwith open(output, 'w', encoding='utf8') as fout:\n for d in all_data:\n print(d, file=fout)\n", "id": "5003706", "language": "Python", "matching_score": 0.6583772897720337, "max_stars_count": 0, "path": "data_process/concat_tsv.py" }, { "content": "# -*- coding: UTF-8 -*-\n# @Time : 2021/1/21\n# @Author : <EMAIL>\n# Apache License\n# Copyright©2020-2021 <EMAIL> All Rights Reserved\n\nimport os\nimport json\nimport sys\nimport getopt\nfrom data_process.change_data_format_unit import ner2tsv, save_data, connet_ere, add_rel_ere, select_sentx_pre, \\\n clean_pre_tsv_format, Mod, get_all_ner_quantity\n\n\ndef main_process(\n sentx_file='',\n ere_in_file='',\n ere_out_file='',\n mod_in_file='',\n mod_out_file='',\n tsv_path='',\n not_use_iso=False,\n not_use_mod=False,\n add_qualifier=False,\n):\n if sentx_file == '':\n sentx_file = 'ner_json_format_sentx_noBIO_cut_sentence_notpad_test'\n if ere_in_file == '':\n ere_in_file = '../ere_process/input/20210118_predict_10_vote_7/ere_input_test'\n if ere_out_file == '':\n ere_out_file = '../ere_process/output/20210122_20200118join/ere_pred.json'\n if mod_in_file == '':\n mod_in_file = '../mod_process/input/20210118_predict_10_vote_7/mod_input_test'\n if mod_out_file == '':\n mod_out_file = '../mod_process/output_t0_e0_l0_r0.8_all/eval_results.txt'\n if tsv_path == '':\n tsv_path = '../ner_process/pre_tsv_format'\n\n # ere_file = '/Users/apricot/semeval_task8/ere_process/output/20210121/ere_json_format_cut_sentence_train'\n # ere_file = '/Users/apricot/semeval_task8/ere_process/output/20210121/ere_pred.json'\n # ere_file = '/Users/apricot/semeval_task8/ere_process/output/20210122_20200118join/ere_pred.json'\n # ere_file = '/Users/apricot/semeval_task8/ere_process/output/20210122_20210120seperate/ere_pred.json'\n # ere_file = '/Users/apricot/semeval_task8/ere_process/output/20210126/pred_train_bycv.json'\n if not_use_iso:\n append_iso_q = False\n else:\n append_iso_q = True\n if not_use_mod:\n mod_tool = None\n else:\n mod_tool = Mod(mod_in_file, mod_out_file)\n\n with open(sentx_file, 'r', encoding='utf8') as fin:\n sentx_data = json.load(fin)\n with open(ere_out_file, 'r', encoding='utf8') as fin:\n pre_data = [json.loads(l.strip()) for l in fin.readlines()]\n clean_pre_tsv_format()\n # with open(ere_file, 'r', encoding='utf8') as fin:\n # pre_data = json.load(fin)\n\n old_id = ''\n all_data_for_one_case = []\n sentx_data, pre_data = select_sentx_pre(sentx_data, pre_data)\n all_ner_data = get_all_ner_quantity(ere_in_file)\n\n for sentx, pre in zip(sentx_data, pre_data):\n if sentx['text'] != pre['text']:\n print('error')\n all_rels = set([p[1] for p in pre['quantity']])\n if 'toQualifier' in all_rels:\n find_toQualifier = True\n if sentx['id'] != old_id:\n if old_id == '':\n old_id = sentx['id']\n else:\n data_pls, anno_set, rel = add_rel_ere(connet_ere(all_data_for_one_case, all_ner_data, append_iso_q=append_iso_q))\n data = ner2tsv(anno_set, rel, old_id, sent=sentx['text'], mod_tool=mod_tool, add_qualifier=add_qualifier)\n out_put_file = os.path.join(tsv_path, old_id + '.tsv')\n save_data(data, out_put_file)\n old_id = sentx['id']\n all_data_for_one_case = []\n all_data_for_one_case.append([sentx['sentx'], pre])\n data_pls, anno_set, rel = add_rel_ere(connet_ere(all_data_for_one_case, all_ner_data, append_iso_q=append_iso_q))\n data = ner2tsv(anno_set, rel, old_id, sent=sentx['text'], mod_tool=mod_tool)\n out_put_file = os.path.join(tsv_path, sentx['id'] + '.tsv')\n save_data(data, out_put_file)\n\n\nif __name__ == '__main__':\n sentx_file = ''\n ere_in_file = ''\n ere_out_file = ''\n mod_in_file = ''\n mod_out_file = ''\n tsv_path = ''\n not_use_iso = False\n not_use_mod = False\n add_qualifier = False\n opts, args = getopt.getopt(\n sys.argv[1:], '-s:-n:-e:-i:-o:-t:', [\n 'sentx_file=',\n 'ere_in_file=',\n 'ere_out_file=',\n 'mod_in_file=',\n 'mod_out_file=',\n 'tsv_path=',\n 'not_use_iso',\n 'not_use_mod',\n 'add_qualifier',\n ])\n for opt_name, opt_value in opts:\n if opt_name in ('-s', '--sentx_file'):\n sentx_file = opt_value\n for opt_name, opt_value in opts:\n if opt_name in ('-n', '--ere_in_file'):\n ere_in_file = opt_value\n for opt_name, opt_value in opts:\n if opt_name in ('-e', '--ere_out_file'):\n ere_out_file = opt_value\n for opt_name, opt_value in opts:\n if opt_name in ('-m', '--mod_in_file'):\n mod_in_file = opt_value\n for opt_name, opt_value in opts:\n if opt_name in ('-m', '--mod_out_file'):\n mod_out_file = opt_value\n for opt_name, opt_value in opts:\n if opt_name in ('-t', '--tsv_path'):\n tsv_path = opt_value\n for opt_name, opt_value in opts:\n if opt_name in ('--not_use_iso'):\n not_use_iso = True\n for opt_name, opt_value in opts:\n if opt_name in ('--not_use_mod'):\n not_use_mod = True\n for opt_name, opt_value in opts:\n if opt_name in ('--add_qualifier'):\n add_qualifier = True\n\n main_process(\n sentx_file=sentx_file,\n ere_in_file=ere_in_file,\n ere_out_file=ere_out_file,\n mod_in_file=mod_in_file,\n mod_out_file=mod_out_file,\n tsv_path=tsv_path,\n not_use_iso=not_use_iso,\n not_use_mod=not_use_mod,\n add_qualifier=add_qualifier,\n )\n\n", "id": "62763", "language": "Python", "matching_score": 5.533588886260986, "max_stars_count": 0, "path": "data_process/change_data_format_ere2tsv.py" }, { "content": "# -*- coding: UTF-8 -*-\n# @Time : 2021/1/15\n# @Author : <EMAIL>\n# Apache License\n# Copyright©2020-2021 <EMAIL> All Rights Reserved\nimport os\nimport json\nimport sys\nimport getopt\n\n\ndef main_process(ner_file='', ere_file='', mod_file='', post_file=''):\n from data_process.change_data_format_unit import ner2tsv, save_data, connet, add_rel, select_sentx_pre, clean_pre_tsv_format, save_data_json, save_data_pickle\n sentx_file = 'ner_json_format_sentx_noBIO_cut_sentence_notpad_test'\n if ner_file == '':\n ner_output_file = '../ner_process/output/20210118/predict_10_vote_7.json'\n else:\n ner_output_file = ner_file\n if ere_file == '':\n ere_input_path = '../ere_process/input/unknown/ere_input_test'\n else:\n ere_input_path = ere_file\n if mod_file == '':\n mod_input_path = '../mod_process/output/unknown/mod_input_test.json'\n else:\n mod_input_path = mod_file\n if post_file == '':\n post_path = '../post_process/unknown/ner_entities.json'\n post_sent_path = '../post_process/unknown/ner_sent_entities.pkl'\n else:\n post_path = post_file\n post_sent_path = os.path.join(os.path.split(post_file)[0], 'ner_sent_entities.pkl')\n tsv_output_path = '../ner_process/pre_tsv_format'\n\n\n with open(sentx_file, 'r', encoding='utf8') as fin:\n sentx_data = json.load(fin)\n with open(ner_output_file, 'r', encoding='utf8') as fin:\n pre_data = [json.loads(l.strip()) for l in fin.readlines()]\n clean_pre_tsv_format()\n old_id = ''\n all_data_for_one_case = []\n post_sent_data = {}\n sentx_data, pre_data = select_sentx_pre(sentx_data, pre_data)\n for sentx, pre in zip(sentx_data, pre_data):\n def update_post_sent_data(post_sent_data, sentx, pre):\n ner_data = pre['label']\n id = (sentx['id'], sentx['text'])\n if id not in post_sent_data:\n post_sent_data[id] = {}\n for k, v in ner_data.items():\n if k not in post_sent_data[id]:\n post_sent_data[id][k] = []\n for vn, vp in v.items():\n for vpi in vp:\n post_sent_data[id][k].append([vn, vpi[0]])\n\n update_post_sent_data(post_sent_data, sentx, pre)\n\n if sentx['text'] != pre['text']:\n print('error')\n if sentx['id'] != old_id:\n if old_id == '':\n old_id = sentx['id']\n else:\n data_pls, anno_set, rel = add_rel(connet(all_data_for_one_case))\n\n data = ner2tsv(anno_set, rel, old_id, sent=sentx['text'])\n out_put_file = os.path.join(tsv_output_path, old_id + '.tsv')\n save_data(data, out_put_file)\n old_id = sentx['id']\n all_data_for_one_case = []\n all_data_for_one_case.append([sentx['sentx'], pre])\n data_pls, anno_set, rel = add_rel(connet(all_data_for_one_case))\n data = ner2tsv(anno_set, rel, old_id)\n out_put_file = os.path.join(tsv_output_path, sentx['id'] + '.tsv')\n save_data(data, out_put_file)\n\n\n # for ere input\n ere_input_data = []\n for sentx, pre in zip(sentx_data, pre_data):\n q_l = []\n if 'Quantity' in pre['label']:\n for qu in pre['label']['Quantity'].keys():\n q_l.append([qu, pre['label']['Quantity'][qu][0][0]])\n ere_input_data.append(json.dumps({\n 'id': sentx['id'],\n 'text': sentx['text'],\n \"quantity\": q_l,\n }))\n save_data(ere_input_data, ere_input_path)\n\n\n # for cls input\n quantities = set()\n for sentx, pre in zip(sentx_data, pre_data):\n if 'Quantity' in pre['label']:\n for qu in pre['label']['Quantity'].keys():\n quantities.add(qu)\n data = ['text\\tlabel']\n for q in sorted(list(quantities)):\n data.append('{}\\tEmpty'.format(q))\n save_data(data, mod_input_path)\n\n # for post sent-level input\n save_data_pickle(post_sent_data, post_sent_path)\n\n # for post input\n post_data = {}\n for sentx, pre in zip(sentx_data, pre_data):\n id = sentx['id']\n pos = sentx['sentx'][0]\n if id not in post_data:\n post_data[id] = {}\n for k, v in pre['label'].items():\n if k not in post_data[id]:\n post_data[id][k] = []\n for vn, vp in v.items():\n for vpi in vp:\n post_data[id][k].append([vn, vpi[0]+pos])\n\n save_data_json(post_data, post_path)\n\n\nif __name__ == '__main__':\n ner_file = ''\n ere_file = ''\n mod_file = ''\n post_file = ''\n opts, args = getopt.getopt(sys.argv[1:], '-n:-e:-m:-p:', ['ner_file=', 'ere_file=', 'mod_file=', 'post_file='])\n for opt_name, opt_value in opts:\n if opt_name in ('-n', '--ner_file'):\n ner_file = opt_value\n for opt_name, opt_value in opts:\n if opt_name in ('-e', '--ere_file'):\n ere_file = opt_value\n for opt_name, opt_value in opts:\n if opt_name in ('-m', '--mod_file'):\n mod_file = opt_value\n for opt_name, opt_value in opts:\n if opt_name in ('-p', '--post_file'):\n post_file = opt_value\n\n\n\n main_process(ner_file=ner_file, ere_file=ere_file, mod_file=mod_file, post_file=post_file)\n", "id": "9097485", "language": "Python", "matching_score": 0.797143816947937, "max_stars_count": 0, "path": "data_process/change_data_format_ner2tsv.py" }, { "content": "import torch\nimport torch.nn as nn\nimport torch.nn.init as init\n\n\ndef initial_parameter(net, initial_method=None):\n r\"\"\"A method used to initialize the weights of PyTorch models.\n\n :param net: a PyTorch model or a List of Pytorch model\n :param str initial_method: one of the following initializations.\n\n - xavier_uniform\n - xavier_normal (default)\n - kaiming_normal, or msra\n - kaiming_uniform\n - orthogonal\n - sparse\n - normal\n - uniform\n\n \"\"\"\n if initial_method == 'xavier_uniform':\n init_method = init.xavier_uniform_\n elif initial_method == 'xavier_normal':\n init_method = init.xavier_normal_\n elif initial_method == 'kaiming_normal' or initial_method == 'msra':\n init_method = init.kaiming_normal_\n elif initial_method == 'kaiming_uniform':\n init_method = init.kaiming_uniform_\n elif initial_method == 'orthogonal':\n init_method = init.orthogonal_\n elif initial_method == 'sparse':\n init_method = init.sparse_\n elif initial_method == 'normal':\n init_method = init.normal_\n elif initial_method == 'uniform':\n init_method = init.uniform_\n else:\n init_method = init.xavier_normal_\n\n def weights_init(m):\n # classname = m.__class__.__name__\n if isinstance(m, nn.Conv2d) or isinstance(m, nn.Conv1d) or isinstance(m, nn.Conv3d): # for all the cnn\n if initial_method is not None:\n init_method(m.weight.data)\n else:\n init.xavier_normal_(m.weight.data)\n init.normal_(m.bias.data)\n elif isinstance(m, nn.LSTM):\n for w in m.parameters():\n if len(w.data.size()) > 1:\n init_method(w.data) # weight\n else:\n init.normal_(w.data) # bias\n elif m is not None and hasattr(m, 'weight') and \\\n hasattr(m.weight, \"requires_grad\"):\n if len(m.weight.size()) > 1:\n init_method(m.weight.data)\n else:\n init.normal_(m.weight.data)\n else:\n for w in m.parameters():\n if w.requires_grad:\n if len(w.data.size()) > 1:\n init_method(w.data) # weight\n else:\n init.normal_(w.data) # bias\n # print(\"init else\")\n\n if isinstance(net, list):\n for n in net:\n n.apply(weights_init)\n else:\n net.apply(weights_init)\n\nclass FGM():\n \"\"\"扰动训练(Fast Gradient Method)\"\"\"\n\n def __init__(self, model):\n self.model = model\n self.backup = {}\n\n def attack(self, epsilon=1., emb_name='bert.embeddings.word_embeddings'):\n \"\"\"在embedding层中加扰动\n :param epsilon: 系数\n :param emb_name: 模型中embedding的参数名\n \"\"\"\n # emb_name这个参数要换成你模型中embedding的参数名\n for name, param in self.model.named_parameters():\n if param.requires_grad and emb_name in name:\n self.backup[name] = param.data.clone()\n norm = torch.norm(param.grad)\n if norm != 0 and not torch.isnan(norm):\n r_at = epsilon * param.grad / norm\n param.data.add_(r_at)\n\n def restore(self, emb_name='char_emb.'):\n \"\"\"恢复扰动前的参数\n :param emb_name: 模型中embedding的参数名\n \"\"\"\n # emb_name这个参数要换成你模型中embedding的参数名\n for name, param in self.model.named_parameters():\n if param.requires_grad and emb_name in name:\n assert name in self.backup\n param.data = self.backup[name]\n self.backup = {}\n\n\nclass PGD():\n \"\"\"扰动训练(Projected Gradient Descent)\"\"\"\n\n def __init__(self, model):\n self.model = model\n self.emb_backup = {}\n self.grad_backup = {}\n\n def attack(self, epsilon=1., alpha=0.3, emb_name='char_emb.', is_first_attack=False):\n # emb_name这个参数要换成你模型中embedding的参数名\n for name, param in self.model.named_parameters():\n if param.requires_grad and emb_name in name:\n if is_first_attack:\n self.emb_backup[name] = param.data.clone()\n norm = torch.norm(param.grad)\n if norm != 0 and not torch.isnan(norm):\n r_at = alpha * param.grad / norm\n param.data.add_(r_at)\n param.data = self.project(name, param.data, epsilon)\n\n def restore(self, emb_name='char_emb.'):\n # emb_name这个参数要换成你模型中embedding的参数名\n for name, param in self.model.named_parameters():\n if param.requires_grad and emb_name in name:\n assert name in self.emb_backup\n param.data = self.emb_backup[name]\n self.emb_backup = {}\n\n def project(self, param_name, param_data, epsilon):\n r = param_data - self.emb_backup[param_name]\n if torch.norm(r) > epsilon:\n r = epsilon * r / torch.norm(r)\n return self.emb_backup[param_name] + r\n\n def backup_grad(self):\n for name, param in self.model.named_parameters():\n if param.requires_grad:\n self.grad_backup[name] = param.grad.clone()\n\n def restore_grad(self):\n for name, param in self.model.named_parameters():\n if param.requires_grad:\n param.grad = self.grad_backup[name]\n", "id": "10368381", "language": "Python", "matching_score": 2.511540174484253, "max_stars_count": 0, "path": "utils/train_util.py" }, { "content": "from torch import optim\n\nfrom layers.encoders.transformers.bert.bert_optimization import BertAdam\n\n\ndef set_optimizer(args, model, train_steps=None):\n if args.warm_up:\n print('using BertAdam')\n param_optimizer = list(model.named_parameters())\n param_optimizer = [n for n in param_optimizer if 'pooler' not in n[0]]\n no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight']\n optimizer_grouped_parameters = [\n {'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)],\n 'weight_decay': 0.01},\n {'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0}\n ]\n\n optimizer = BertAdam(optimizer_grouped_parameters,\n lr=args.learning_rate,\n warmup=args.warmup_proportion,\n t_total=train_steps)\n return optimizer\n else:\n print('using optim Adam')\n parameters_trainable = list(filter(lambda p: p.requires_grad, model.parameters()))\n optimizer = optim.Adam(parameters_trainable, lr=args.learning_rate)\n return optimizer\n", "id": "577008", "language": "Python", "matching_score": 0.00855974480509758, "max_stars_count": 0, "path": "utils/optimizer_util.py" }, { "content": "from abc import abstractmethod\nfrom collections import defaultdict\n\n\nclass MetricBase(object):\n\n @abstractmethod\n def evaluate(self, *args, **kwargs):\n raise NotImplementedError\n\n @abstractmethod\n def get_metric(self, reset=True):\n raise NotImplemented\n\n def __call__(self, p_ids, pred, eval_file):\n return self.evaluate(p_ids, pred, eval_file)\n\n\ndef _bmeso_tag_to_spans(tags, ignore_labels=None):\n \"\"\"\n 给定一个tags的lis,比如['O', 'B-singer', 'M-singer', 'E-singer', 'O', 'O']。\n 返回[('singer', (1, 4))] (左闭右开区间)\n\n :param tags: List[str],\n :param ignore_labels: List[str], 在该list中的label将被忽略\n :return: List[Tuple[str, List[int, int]]]. [(label,[start, end])]\n \"\"\"\n ignore_labels = set(ignore_labels) if ignore_labels else set()\n\n spans = []\n prev_bmes_tag = None\n for idx, tag in enumerate(tags):\n tag = tag.lower()\n bmes_tag, label = tag[:1], tag[2:]\n if bmes_tag in ('b', 's'):\n spans.append((label, [idx, idx]))\n elif bmes_tag in ('m', 'e') and prev_bmes_tag in ('b', 'm') and label == spans[-1][0]:\n spans[-1][1][1] = idx\n elif bmes_tag == 'o':\n pass\n else:\n spans.append((label, [idx, idx]))\n prev_bmes_tag = bmes_tag\n return [(span[0], (span[1][0], span[1][1] + 1))\n for span in spans\n if span[0] not in ignore_labels\n ]\n\n\nclass SpanFPreRecMetric(MetricBase):\n def __init__(self, tag_type, pred=None, target=None, encoding_type='bmeso',\n only_gross=True, f_type='micro', beta=1):\n self.tag_type = tag_type\n self.only_gross = only_gross\n self.f_type = f_type\n self.beta = beta\n self.beta_square = self.beta ** 2\n self.encoding_type = encoding_type\n if self.encoding_type == 'bmeso':\n self.tag_to_span_func = _bmeso_tag_to_spans\n\n self._true_positives = defaultdict(int)\n self._false_positives = defaultdict(int)\n self._false_negatives = defaultdict(int)\n\n def evaluate(self, p_ids, preds, eval_file):\n answer_dict = {}\n for p_id, pred in zip(p_ids.tolist(), preds.tolist()):\n gold_ = eval_file[p_id].gold_answer\n pred_ = [self.tag_type[tag] for tag in pred]\n # gold_str_tags = [self.tag_type[tag] for tag in gold]\n pred_spans = self.tag_to_span_func(pred_)\n gold_spans = self.tag_to_span_func(gold_)\n answer_dict[str(p_id)] = [pred_spans, gold_spans]\n for span in pred_spans:\n if span in gold_spans:\n self._true_positives[span[0]] += 1\n gold_spans.remove(span)\n else:\n self._false_positives[span[0]] += 1\n for span in gold_spans:\n self._false_negatives[span[0]] += 1\n return answer_dict\n\n def get_metric(self, reset=True):\n \"\"\"get_metric函数将根据evaluate函数累计的评价指标统计量来计算最终的评价结果.\"\"\"\n evaluate_result = {}\n if not self.only_gross or self.f_type == 'macro':\n tags = set(self._false_negatives.keys())\n tags.update(set(self._false_positives.keys()))\n tags.update(set(self._true_positives.keys()))\n f_sum = 0\n pre_sum = 0\n rec_sum = 0\n for tag in tags:\n tp = self._true_positives[tag]\n fn = self._false_negatives[tag]\n fp = self._false_positives[tag]\n f, pre, rec = self._compute_f_pre_rec(tp, fn, fp)\n f_sum += f\n pre_sum += pre\n rec_sum += rec\n if not self.only_gross and tag != '': # tag!=''防止无tag的情况\n f_key = 'f-{}'.format(tag)\n pre_key = 'pre-{}'.format(tag)\n rec_key = 'rec-{}'.format(tag)\n evaluate_result[f_key] = f\n evaluate_result[pre_key] = pre\n evaluate_result[rec_key] = rec\n\n if self.f_type == 'macro':\n evaluate_result['f'] = f_sum / len(tags)\n evaluate_result['pre'] = pre_sum / len(tags)\n evaluate_result['rec'] = rec_sum / len(tags)\n\n if self.f_type == 'micro':\n f, pre, rec,em,pre_num,gold_num = self._compute_f_pre_rec(sum(self._true_positives.values()),\n sum(self._false_negatives.values()),\n sum(self._false_positives.values()))\n evaluate_result['f'] = f\n evaluate_result['pre'] = pre\n evaluate_result['rec'] = rec\n evaluate_result['em'] = em\n evaluate_result['pre_num'] = pre_num\n evaluate_result['gold'] = gold_num\n\n\n if reset:\n self._true_positives = defaultdict(int)\n self._false_positives = defaultdict(int)\n self._false_negatives = defaultdict(int)\n\n for key, value in evaluate_result.items():\n evaluate_result[key] = round(value, 6)\n print(evaluate_result)\n return evaluate_result\n\n def _compute_f_pre_rec(self, tp, fn, fp):\n \"\"\"\n\n :param tp: int, true positive\n :param fn: int, false negative\n :param fp: int, false positive\n :return: (f, pre, rec)\n \"\"\"\n pre = tp / (fp + tp + 1e-13)\n rec = tp / (fn + tp + 1e-13)\n f = (1 + self.beta_square) * pre * rec / (self.beta_square * pre + rec + 1e-13)\n\n\n return f, pre, rec,tp,fp + tp,fn + tp\n", "id": "3577177", "language": "Python", "matching_score": 1.1532074213027954, "max_stars_count": 0, "path": "utils/metrics.py" }, { "content": "import re\nimport unicodedata\n\nimport numpy as np\nimport torch\n\ndef get_ner_BIO(label_list):\n # list_len = len(word_list)\n # assert(list_len == len(label_list)), \"word list size unmatch with label list\"\n list_len = len(label_list)\n begin_label = 'B'\n inside_label = 'I'\n whole_tag = ''\n index_tag = ''\n tag_list = []\n stand_matrix = []\n for i in range(0, list_len):\n # wordlabel = word_list[i]\n current_label = label_list[i].upper()\n if begin_label in current_label:\n if index_tag == '':\n # whole_tag = 'answer' + '[' + str(i)\n whole_tag = '[' + str(i)\n index_tag = 'answer'\n # whole_tag = current_label.replace(begin_label,\"\",1) +'[' +str(i)\n # index_tag = current_label.replace(begin_label,\"\",1)\n else:\n tag_list.append(whole_tag + ',' + str(i-1))\n # whole_tag = 'answer' + '[' + str(i)\n whole_tag = '[' + str(i)\n index_tag = 'answer'\n # whole_tag = current_label.replace(begin_label,\"\",1) + '[' + str(i)\n # index_tag = current_label.replace(begin_label,\"\",1)\n\n elif inside_label in current_label:\n if 'answer' == index_tag:\n whole_tag = whole_tag\n else:\n if (whole_tag != '')&(index_tag != ''):\n tag_list.append(whole_tag + ',' + str(i-1))\n whole_tag = ''\n index_tag = ''\n else:\n if (whole_tag != '')&(index_tag != ''):\n tag_list.append(whole_tag + ',' + str(i-1))\n whole_tag = ''\n index_tag = ''\n\n # if (whole_tag != '')&(index_tag != ''):\n # tag_list.append(whole_tag)\n if whole_tag !='' and ',' not in whole_tag:\n tag_list.append(whole_tag + ',' + str(list_len - 1))\n tag_list_len = len(tag_list)\n\n for i in range(0, tag_list_len):\n if len(tag_list[i]) > 0:\n tag_list[i] = tag_list[i] + ']'\n insert_list = reverse_style(tag_list[i])\n stand_matrix.append(eval(insert_list))\n return stand_matrix\n\n\ndef reverse_style(input_string):\n target_position = input_string.index('[')\n input_len = len(input_string)\n output_string = input_string[target_position:input_len] + input_string[0:target_position]\n return output_string\n\ndef is_string(s):\n \"\"\"判断是否是字符串\n \"\"\"\n return isinstance(s, str)\n\n\ndef padding(seqs, is_float=False, batch_first=False):\n lengths = [len(s) for s in seqs]\n\n seqs = [torch.Tensor(s) for s in seqs]\n batch_length = max(lengths)\n\n seq_tensor = torch.FloatTensor(batch_length, len(seqs)).fill_(float(0)) if is_float \\\n else torch.LongTensor(batch_length, len(seqs)).fill_(0)\n\n for i, s in enumerate(seqs):\n end_seq = lengths[i]\n seq_tensor[:end_seq, i].copy_(s[:end_seq])\n\n if batch_first:\n seq_tensor = seq_tensor.t()\n\n return seq_tensor, lengths\n\n\ndef mpn_padding(seqs, label, class_num, is_float=False, use_bert=False):\n lengths = [len(s) for s in seqs]\n\n seqs = [torch.Tensor(s) for s in seqs]\n batch_length = max(lengths)\n\n o1_tensor = torch.FloatTensor(len(seqs), batch_length, class_num).fill_(float(0)) if is_float \\\n else torch.LongTensor(len(seqs), batch_length, class_num).fill_(0)\n o2_tensor = torch.FloatTensor(len(seqs), batch_length, class_num).fill_(float(0)) if is_float \\\n else torch.LongTensor(len(seqs), batch_length, class_num).fill_(0)\n for i, label_ in enumerate(label):\n for attr in label_:\n if use_bert:\n o1_tensor[i, attr.value_pos_start + 1, attr.attr_type_id] = 1\n o2_tensor[i, attr.value_pos_end, attr.attr_type_id] = 1\n else:\n o1_tensor[i, attr.value_pos_start, attr.attr_type_id] = 1\n o2_tensor[i, attr.value_pos_end - 1, attr.attr_type_id] = 1\n\n return o1_tensor, o2_tensor\n\n\ndef spo_padding(seqs, label, class_num, is_float=False, use_bert=False):\n lengths = [len(s) for s in seqs]\n\n seqs = [torch.Tensor(s) for s in seqs]\n batch_length = max(lengths)\n\n o1_tensor = torch.FloatTensor(len(seqs), batch_length, class_num).fill_(float(0)) if is_float \\\n else torch.LongTensor(len(seqs), batch_length, class_num).fill_(0)\n o2_tensor = torch.FloatTensor(len(seqs), batch_length, class_num).fill_(float(0)) if is_float \\\n else torch.LongTensor(len(seqs), batch_length, class_num).fill_(0)\n for i, label_ in enumerate(label):\n for po in label_:\n if use_bert:\n o1_tensor[i, po.object_start + 1, po.predict_type_id] = 1\n o2_tensor[i, po.object_end, po.predict_type_id] = 1\n else:\n o1_tensor[i, po.object_start, po.predict_type_id] = 1\n o2_tensor[i, po.object_end - 1, po.predict_type_id] = 1\n\n return o1_tensor, o2_tensor\n\n\ndef _handle_pos_limit(pos, limit=30):\n for i, p in enumerate(pos):\n if p > limit:\n pos[i] = limit\n if p < -limit:\n pos[i] = -limit\n return [p + limit + 1 for p in pos]\n\n\ndef find_position(entity_name, text):\n start = text.find(entity_name, 0)\n return start, start + len(entity_name)\n\n\nclass BasicTokenizer(object):\n \"\"\"分词器基类\n \"\"\"\n\n def __init__(self, do_lower_case=False):\n \"\"\"初始化\n \"\"\"\n self._token_pad = '[PAD]'\n self._token_cls = '[CLS]'\n self._token_sep = '[SEP]'\n self._token_unk = '[UNK]'\n self._token_mask = '[MASK]'\n self._do_lower_case = do_lower_case\n\n def tokenize(self, text, add_cls=True, add_sep=True, max_length=None):\n \"\"\"分词函数\n \"\"\"\n if self._do_lower_case:\n text = unicodedata.normalize('NFD', text)\n text = ''.join(\n [ch for ch in text if unicodedata.category(ch) != 'Mn'])\n text = text.lower()\n\n tokens = self._tokenize(text)\n if add_cls:\n tokens.insert(0, self._token_cls)\n if add_sep:\n tokens.append(self._token_sep)\n\n if max_length is not None:\n self.truncate_sequence(max_length, tokens, None, -2)\n\n return tokens\n\n def token_to_id(self, token):\n \"\"\"token转换为对应的id\n \"\"\"\n raise NotImplementedError\n\n def tokens_to_ids(self, tokens):\n \"\"\"token序列转换为对应的id序列\n \"\"\"\n return [self.token_to_id(token) for token in tokens]\n\n def truncate_sequence(self,\n max_length,\n first_sequence,\n second_sequence=None,\n pop_index=-1):\n \"\"\"截断总长度\n \"\"\"\n if second_sequence is None:\n second_sequence = []\n\n while True:\n total_length = len(first_sequence) + len(second_sequence)\n if total_length <= max_length:\n break\n elif len(first_sequence) > len(second_sequence):\n first_sequence.pop(pop_index)\n else:\n second_sequence.pop(pop_index)\n\n def encode(self,\n first_text,\n second_text=None,\n max_length=None,\n first_length=None,\n second_length=None):\n \"\"\"输出文本对应token id和segment id\n 如果传入first_length,则强行padding第一个句子到指定长度;\n 同理,如果传入second_length,则强行padding第二个句子到指定长度。\n \"\"\"\n if is_string(first_text):\n first_tokens = self.tokenize(first_text)\n else:\n first_tokens = first_text\n\n if second_text is None:\n second_tokens = None\n elif is_string(second_text):\n second_tokens = self.tokenize(second_text, add_cls=False)\n else:\n second_tokens = second_text\n\n if max_length is not None:\n self.truncate_sequence(max_length, first_tokens, second_tokens, -2)\n\n first_token_ids = self.tokens_to_ids(first_tokens)\n if first_length is not None:\n first_token_ids = first_token_ids[:first_length]\n first_token_ids.extend([self._token_pad_id] *\n (first_length - len(first_token_ids)))\n first_segment_ids = [0] * len(first_token_ids)\n\n if second_text is not None:\n second_token_ids = self.tokens_to_ids(second_tokens)\n if second_length is not None:\n second_token_ids = second_token_ids[:second_length]\n second_token_ids.extend(\n [self._token_pad_id] *\n (second_length - len(second_token_ids)))\n second_segment_ids = [1] * len(second_token_ids)\n\n first_token_ids.extend(second_token_ids)\n first_segment_ids.extend(second_segment_ids)\n\n return first_token_ids, first_segment_ids\n\n def id_to_token(self, i):\n \"\"\"id序列为对应的token\n \"\"\"\n raise NotImplementedError\n\n def ids_to_tokens(self, ids):\n \"\"\"id序列转换为对应的token序列\n \"\"\"\n return [self.id_to_token(i) for i in ids]\n\n def decode(self, ids):\n \"\"\"转为可读文本\n \"\"\"\n raise NotImplementedError\n\n def _tokenize(self, text):\n \"\"\"基本分词函数\n \"\"\"\n raise NotImplementedError\n\n\nclass Tokenizer(BasicTokenizer):\n \"\"\"Bert原生分词器\n 纯Python实现,代码修改自keras_bert的tokenizer实现\n \"\"\"\n\n def __init__(self, token_dict, do_lower_case=False):\n \"\"\"初始化\n \"\"\"\n super(Tokenizer, self).__init__(do_lower_case)\n if is_string(token_dict):\n token_dict = load_vocab(token_dict)\n\n self._token_dict = token_dict\n self._token_dict_inv = {v: k for k, v in token_dict.items()}\n for token in ['pad', 'cls', 'sep', 'unk', 'mask']:\n try:\n _token_id = token_dict[getattr(self, '_token_%s' % token)]\n setattr(self, '_token_%s_id' % token, _token_id)\n except:\n pass\n self._vocab_size = len(token_dict)\n\n def token_to_id(self, token):\n \"\"\"token转换为对应的id\n \"\"\"\n return self._token_dict.get(token, self._token_unk_id)\n\n def id_to_token(self, i):\n \"\"\"id转换为对应的token\n \"\"\"\n return self._token_dict_inv[i]\n\n def decode(self, ids, tokens=None):\n \"\"\"转为可读文本\n \"\"\"\n tokens = tokens or self.ids_to_tokens(ids)\n tokens = [token for token in tokens if not self._is_special(token)]\n\n text, flag = '', False\n for i, token in enumerate(tokens):\n if token[:2] == '##':\n text += token[2:]\n elif len(token) == 1 and self._is_cjk_character(token):\n text += token\n elif len(token) == 1 and self._is_punctuation(token):\n text += token\n text += ' '\n elif i > 0 and self._is_cjk_character(text[-1]):\n text += token\n else:\n text += ' '\n text += token\n\n text = re.sub(' +', ' ', text)\n text = re.sub('\\' (re|m|s|t|ve|d|ll) ', '\\'\\\\1 ', text)\n punctuation = self._cjk_punctuation() + '+-/={(<['\n punctuation_regex = '|'.join([re.escape(p) for p in punctuation])\n punctuation_regex = '(%s) ' % punctuation_regex\n text = re.sub(punctuation_regex, '\\\\1', text)\n text = re.sub('(\\d\\.) (\\d)', '\\\\1\\\\2', text)\n\n return text.strip()\n\n def _tokenize(self, text):\n \"\"\"基本分词函数\n \"\"\"\n spaced = ''\n for ch in text:\n if self._is_punctuation(ch) or self._is_cjk_character(ch):\n spaced += ' ' + ch + ' '\n elif self._is_space(ch):\n spaced += ' '\n elif ord(ch) == 0 or ord(ch) == 0xfffd or self._is_control(ch):\n continue\n else:\n spaced += ch\n\n tokens = []\n for word in spaced.strip().split():\n tokens.extend(self._word_piece_tokenize(word))\n\n return tokens\n\n def _word_piece_tokenize(self, word):\n \"\"\"word内分成subword\n \"\"\"\n if word in self._token_dict:\n return [word]\n\n tokens = []\n start, stop = 0, 0\n while start < len(word):\n stop = len(word)\n while stop > start:\n sub = word[start:stop]\n if start > 0:\n sub = '##' + sub\n if sub in self._token_dict:\n break\n stop -= 1\n if start == stop:\n stop += 1\n tokens.append(sub)\n start = stop\n\n return tokens\n\n @staticmethod\n def _is_space(ch):\n \"\"\"空格类字符判断\n \"\"\"\n return ch == ' ' or ch == '\\n' or ch == '\\r' or ch == '\\t' or \\\n unicodedata.category(ch) == 'Zs'\n\n @staticmethod\n def _is_punctuation(ch):\n \"\"\"标点符号类字符判断(全/半角均在此内)\n \"\"\"\n code = ord(ch)\n return 33 <= code <= 47 or \\\n 58 <= code <= 64 or \\\n 91 <= code <= 96 or \\\n 123 <= code <= 126 or \\\n unicodedata.category(ch).startswith('P')\n\n @staticmethod\n def _cjk_punctuation():\n return u'\\uff02\\uff03\\uff04\\uff05\\uff06\\uff07\\uff08\\uff09\\uff0a\\uff0b\\uff0c\\uff0d\\uff0f\\uff1a\\uff1b\\uff1c\\uff1d\\uff1e\\uff20\\uff3b\\uff3c\\uff3d\\uff3e\\uff3f\\uff40\\uff5b\\uff5c\\uff5d\\uff5e\\uff5f\\uff60\\uff62\\uff63\\uff64\\u3000\\u3001\\u3003\\u3008\\u3009\\u300a\\u300b\\u300c\\u300d\\u300e\\u300f\\u3010\\u3011\\u3014\\u3015\\u3016\\u3017\\u3018\\u3019\\u301a\\u301b\\u301c\\u301d\\u301e\\u301f\\u3030\\u303e\\u303f\\u2013\\u2014\\u2018\\u2019\\u201b\\u201c\\u201d\\u201e\\u201f\\u2026\\u2027\\ufe4f\\ufe51\\ufe54\\xb7\\uff01\\uff1f\\uff61\\u3002'\n\n @staticmethod\n def _is_cjk_character(ch):\n \"\"\"CJK类字符判断(包括中文字符也在此列)\n 参考:https://en.wikipedia.org/wiki/CJK_Unified_Ideographs_(Unicode_block)\n \"\"\"\n code = ord(ch)\n return 0x4E00 <= code <= 0x9FFF or \\\n 0x3400 <= code <= 0x4DBF or \\\n 0x20000 <= code <= 0x2A6DF or \\\n 0x2A700 <= code <= 0x2B73F or \\\n 0x2B740 <= code <= 0x2B81F or \\\n 0x2B820 <= code <= 0x2CEAF or \\\n 0xF900 <= code <= 0xFAFF or \\\n 0x2F800 <= code <= 0x2FA1F\n\n @staticmethod\n def _is_control(ch):\n \"\"\"控制类字符判断\n \"\"\"\n return unicodedata.category(ch) in ('Cc', 'Cf')\n\n @staticmethod\n def _is_special(ch):\n \"\"\"判断是不是有特殊含义的符号\n \"\"\"\n return bool(ch) and (ch[0] == '[') and (ch[-1] == ']')\n\n\ndef load_vocab(dict_path, encoding='utf-8', simplified=False, startwith=None):\n \"\"\"从bert的词典文件中读取词典\n \"\"\"\n token_dict = {}\n with open(dict_path, encoding=encoding) as reader:\n for line in reader:\n token = line.strip()\n token_dict[token] = len(token_dict)\n\n if simplified: # 过滤冗余部分token\n new_token_dict, keep_tokens = {}, []\n startwith = startwith or []\n for t in startwith:\n new_token_dict[t] = len(new_token_dict)\n keep_tokens.append(token_dict[t])\n\n for t, _ in sorted(token_dict.items(), key=lambda s: s[1]):\n if t not in new_token_dict:\n keep = True\n if len(t) > 1:\n for c in (t[2:] if t[:2] == '##' else t):\n if (Tokenizer._is_cjk_character(c)\n or Tokenizer._is_punctuation(c)):\n keep = False\n break\n if keep:\n new_token_dict[t] = len(new_token_dict)\n keep_tokens.append(token_dict[t])\n\n return new_token_dict, keep_tokens\n else:\n return token_dict\n\n\ndef search(pattern, sequence):\n \"\"\"从sequence中寻找子串pattern\n 如果找到,返回第一个下标;否则返回-1。\n \"\"\"\n n = len(pattern)\n for i in range(len(sequence)):\n if sequence[i:i + n] == pattern:\n return i\n return -1\n\n\ndef sequence_padding(inputs, length=None, padding=0, is_float=False):\n \"\"\"Numpy函数,将序列padding到同一长度\n \"\"\"\n if length is None:\n length = max([len(x) for x in inputs])\n\n outputs = np.array([\n np.concatenate([x, [padding] * (length - len(x))])\n if len(x) < length else x[:length] for x in inputs\n ])\n\n out_tensor = torch.FloatTensor(outputs) if is_float \\\n else torch.LongTensor(outputs)\n return torch.tensor(out_tensor)\n\n\ndef batch_gather(data: torch.Tensor, index: torch.Tensor):\n length = index.shape[0]\n t_index = index.cpu().numpy()\n t_data = data.cpu().data.numpy()\n result = []\n for i in range(length):\n result.append(t_data[i, t_index[i], :])\n\n return torch.from_numpy(np.array(result)).to(data.device)\n\n\ndef select_padding(seqs, select, is_float=False, class_num=None):\n lengths = [len(s) for s in seqs]\n batch_length = max(lengths)\n\n seq_tensor = torch.FloatTensor(len(seqs), batch_length, class_num, batch_length).fill_(float(0)) if is_float \\\n else torch.LongTensor(len(seqs), batch_length, class_num, batch_length).fill_(0)\n\n # NA = BAIDU_SELECT['NA']\n # seq_tensor[:, :, NA, :] = 1\n for i, triplet_list in enumerate(select):\n for triplet in triplet_list:\n subject_pos = triplet[0]\n object_pos = triplet[1]\n predicate = triplet[2]\n\n seq_tensor[i, subject_pos, predicate, object_pos] = 1\n # seq_tensor[i, subject_pos, NA, object_pos] = 0\n\n return seq_tensor\n\nif __name__=='__main__':\n a = ['O', 'B', 'I', 'I', 'O', 'O', 'O', 'B', 'I']\n print(get_ner_BIO(a))", "id": "12770047", "language": "Python", "matching_score": 1.4109864234924316, "max_stars_count": 0, "path": "utils/data_util.py" }, { "content": "import json\nimport logging\nimport os\nimport pickle\nimport sys\n\nlogger = logging.getLogger()\nlogging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n\n\ndef pickle_dump_large_file(obj, filepath):\n max_bytes = 2 ** 31 - 1\n bytes_out = pickle.dumps(obj)\n n_bytes = sys.getsizeof(bytes_out)\n with open(filepath, 'wb') as f_out:\n for idx in range(0, n_bytes, max_bytes):\n f_out.write(bytes_out[idx:idx + max_bytes])\n\n\ndef pickle_load_large_file(filepath):\n max_bytes = 2 ** 31 - 1\n input_size = os.path.getsize(filepath)\n bytes_in = bytearray(0)\n with open(filepath, 'rb') as f_in:\n for _ in range(0, input_size, max_bytes):\n bytes_in += f_in.read(max_bytes)\n obj = pickle.loads(bytes_in)\n return obj\n\n\ndef save(filepath, obj, message=None):\n if message is not None:\n logging.info(\"Saving {}...\".format(message))\n pickle_dump_large_file(obj, filepath)\n\n\ndef load(filepath):\n return pickle_load_large_file(filepath)\n\n\ndef read_json(path):\n with open(path, 'r') as f:\n return json.load(f)\n\n\ndef write_json(obj, path):\n with open(path, 'wb') as f:\n f.write(json.dumps(obj, indent=2, ensure_ascii=False).\n encode('utf-8'))\n\n\ndef _read_conll(path, encoding='utf-8', indexes=2, dropna=True):\n \"\"\"\n Construct a generator to read conll items.\n\n :param path: file path\n :param encoding: file's encoding, default: utf-8\n :param indexes: conll object's column indexes that needed, if None, all columns are needed. default: None\n :param dropna: weather to ignore and drop invalid data,\n :if False, raise ValueError when reading invalid data. default: True\n :return: generator, every time yield (line number, conll item)\n \"\"\"\n\n def parse_conll(sample):\n sample = list(map(list, zip(*sample)))\n sample = [sample[i] for i in range(indexes)]\n for f in sample:\n if len(f) <= 0:\n raise ValueError('empty field')\n return sample\n\n with open(path, 'r', encoding=encoding) as f:\n sample = []\n start = next(f).strip()\n if start != '':\n sample.append(start.split())\n for line_idx, line in enumerate(f, 1):\n line = line.strip()\n if line == '':\n if len(sample):\n try:\n res = parse_conll(sample)\n sample = []\n yield line_idx, res\n except Exception as e:\n if dropna:\n logger.warning('Invalid instance which ends at line: {} has been dropped.'.format(line_idx))\n continue\n raise ValueError('Invalid instance which ends at line: {}'.format(line_idx))\n elif line.startswith('#'):\n continue\n else:\n sample.append(line.split('\\t'))\n if len(sample) > 0:\n try:\n res = parse_conll(sample)\n yield line_idx, res\n except Exception as e:\n if dropna:\n return\n logger.error('invalid instance ends at line: {}'.format(line_idx))\n raise e\n", "id": "4291783", "language": "Python", "matching_score": 0.5855976939201355, "max_stars_count": 0, "path": "utils/file_util.py" }, { "content": "# -*- coding: UTF-8 -*-\n# @Time : 2021/1/19\n# @Author : <EMAIL>\n# Apache License\n# Copyright©2020-2021 <EMAIL> All Rights Reserved\n\n\nclass RuleBaseUnit:\n def __init__(self):\n with open('statistic/all_u.txt', 'r', encoding='utf8') as fin:\n self.data = [l.strip() for l in fin.readlines()]\n self.data.sort(key=lambda x: len(x), reverse=True)\n self.stopword = set([' ',])\n\n def get_unit_match(self, text):\n for t in self.data:\n if t in text:\n if text[-len(t):] == t:\n return t\n return ''\n\n def get_unit_split(self, text):\n text = text.strip()\n if 'and' in text:\n text = text.split('and')[-1].strip()\n p = len(text)-1\n if ' ' in text:\n idx = text.find(' ')\n if text[idx-1].isalpha():\n p = idx-1\n else:\n return text[idx+1:].strip()\n test = p\n while (not text[test].isdigit()) and test >= 0:\n test -= 1\n if test == -1:\n return text[p+1:].strip()\n return text[test+1:].strip()\n\n def get_unit(self, text):\n unit = self.get_unit_match(text)\n if unit == '':\n unit = self.get_unit_split(text)\n if unit in ['/']:\n unit = ''\n return unit\n\n\nif __name__ == '__main__':\n rbu = RuleBaseUnit()\n print('### test for get_unit_match ###')\n print(rbu.get_unit_match('1 μbar'))\n print(rbu.get_unit_match('0 and 2000 ppm'))\n print(rbu.get_unit_match('5%'))\n print(rbu.get_unit_match('5Rp'))\n print(rbu.get_unit_match('eight'))\n print('### test for get_unit_split ###')\n print(rbu.get_unit_split('1 μbar'))\n print(rbu.get_unit_split('0 and 2000 ppm'))\n print(rbu.get_unit_split('5%'))\n print(rbu.get_unit_split('5Rp'))\n print(rbu.get_unit_split('eight'))\n print('### test for get_unit ###')\n print(rbu.get_unit('1 μbar'))\n print(rbu.get_unit('0 and 2000 ppm'))\n print(rbu.get_unit('5%'))\n print(rbu.get_unit('5Rp'))\n print(rbu.get_unit('5Rp'))", "id": "5195945", "language": "Python", "matching_score": 3.3675010204315186, "max_stars_count": 0, "path": "data_process/rule_base_unit.py" }, { "content": "# -*- coding: UTF-8 -*-\n# @Time : 2021/1/12\n# @Author : <EMAIL>\n# Apache License\n# Copyright©2020-2021 <EMAIL> All Rights Reserved\n", "id": "8378786", "language": "Python", "matching_score": 0.11104819923639297, "max_stars_count": 0, "path": "data_process/__init__.py" }, { "content": "# -*- coding: UTF-8 -*-\n# Copyright©2020 <EMAIL> All Rights Reserved\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"\nGet proxy from http://www.xiladaili.com/\n\"\"\"\n\nfrom typing import Dict, Union, List, Any, NoReturn\nfrom abc import ABCMeta, abstractmethod\nimport time\nimport os\nimport requests\nfrom threading import Thread, Lock\nfrom random import choice\nfrom aitool import file_exist, save_json, load_json\nfrom .proxy_base import ProxyBase\nimport logging\n\nURL = str\nPath = str\n\n\nclass WebPageProxy(ProxyBase, metaclass=ABCMeta):\n def __init__(\n self,\n time_interval: float = 5.0,\n record_file_path: Path = 'record.json',\n expiration_data: int = 100000,\n proxies_bare_minimum: int = 1\n ):\n logging.info('initializing {}'.format(self.__class__))\n self._available = False\n self._time_interval = time_interval\n self._record_file_path = record_file_path\n self._expiration_data = expiration_data\n self._proxies_bare_minimum = proxies_bare_minimum\n\n self._update_time = None\n self._http_proxy_storage = []\n self._https_proxy_storage = []\n self._confirm_proxy_availability()\n Thread(target=self._automatic_update_monitor).start()\n\n def _confirm_proxy_availability(self) -> NoReturn:\n if file_exist(self._record_file_path):\n self._http_proxy_storage, self._https_proxy_storage, self._update_time = load_json(self._record_file_path)\n if not self._update_time or time.time() - self._update_time > self._expiration_data:\n self._update_proxy_storage()\n\n def _update_proxy_storage(self) -> NoReturn:\n print('updating proxy storage of {}'.format(__class__))\n logging.info('updating proxy storage of {}'.format(__class__))\n print('This step will cost several minutes, please be patient')\n self._update_time = time.time()\n self._tmp_http_proxy_storage = set()\n self._tmp_https_proxy_storage = set()\n lock = Lock()\n threads = []\n for results in self._analysis_homepage():\n for proxy, tag in results:\n if 'HTTP' in tag:\n t = Thread(target=self._check_proxy_available, args=(proxy, 'HTTP', lock,))\n t.start()\n threads.append(t)\n if 'HTTPS' in tag:\n t = Thread(target=self._check_proxy_available, args=(proxy, 'HTTPS', lock,))\n t.start()\n threads.append(t)\n time.sleep(self._time_interval)\n for t in threads:\n t.join()\n print('{} http proxies have been collected by {}'.format(__class__, len(self._tmp_http_proxy_storage)))\n print('{} https proxies have been collected by {}'.format(__class__, len(self._tmp_https_proxy_storage)))\n logging.info('The http proxies collected by {}: {}'.format(__class__, self._http_proxy_storage))\n logging.info('The https proxies collected by {}: {}'.format(__class__, self._https_proxy_storage))\n self._http_proxy_storage = list(self._tmp_http_proxy_storage)\n self._https_proxy_storage = list(self._tmp_https_proxy_storage)\n save_json(self._record_file_path, [self._http_proxy_storage, self._https_proxy_storage, self._update_time])\n if len(self._tmp_http_proxy_storage) >= self._proxies_bare_minimum and \\\n len(self._tmp_https_proxy_storage) >= self._proxies_bare_minimum:\n print('{} is available'.format(__class__))\n logging.info('{} is available'.format(__class__))\n self._available = True\n else:\n print('{} is not available while the number of http proxies or https proxies is little than {}'.\n format(__class__, self._proxies_bare_minimum))\n logging.info('{} is not available while the number of http proxies or https proxies is little than {}'.\n format(__class__, self._proxies_bare_minimum))\n\n @abstractmethod\n def _analysis_homepage(self) -> List:\n raise NotImplementedError\n\n def get_proxy(self) -> Union[None, dict]:\n if not self._available:\n return None\n return {\n 'http': choice(self._http_proxy_storage),\n 'https': choice(self._https_proxy_storage),\n }\n\n def _check_proxy_available(self, proxy: str, protocol: str, lock: Lock) -> NoReturn:\n if protocol == 'HTTP' and self._is_http_proxy_available(proxy):\n lock.acquire()\n self._tmp_http_proxy_storage.add(proxy)\n lock.release()\n if protocol == 'HTTPS' and self._is_https_proxy_available(proxy):\n lock.acquire()\n self._tmp_https_proxy_storage.add(proxy)\n lock.release()\n\n def _automatic_update_monitor(self, time_watch_interval: int = 10000) -> NoReturn:\n while True:\n time.sleep(time_watch_interval)\n if time.time() - self._update_time > self._expiration_data:\n self._confirm_proxy_availability()\n\n @property\n def http_proxy_storage(self):\n return self._http_proxy_storage\n\n @property\n def https_proxy_storage(self):\n return self._https_proxy_storage\n", "id": "8822725", "language": "Python", "matching_score": 5.100337982177734, "max_stars_count": 0, "path": "easyproxy/web_page_proxy.py" }, { "content": "# -*- coding: UTF-8 -*-\n# Copyright©2020 <EMAIL> All Rights Reserved\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"\n\n\"\"\"\nfrom typing import Dict, Union, List, Any\nfrom .xila_proxy.xila_proxy import XiLaProxy\nfrom aitool import singleton\nfrom random import choice\nfrom .proxy_base import ProxyBase\nimport time\nimport logging\n\n\n@singleton\nclass ProxyManager(ProxyBase):\n def __init__(\n self,\n expiration_data: int = 1000,\n proxies_bare_minimum: int = 1\n ):\n self._available = False\n self.proxy_collectors = [\n XiLaProxy(),\n ]\n self._expiration_data = expiration_data\n self._proxies_bare_minimum = proxies_bare_minimum\n self._http_proxy_storage = []\n self._https_proxy_storage = []\n self._update_time = None\n\n def get_proxy(self):\n if not self._update_time or time.time() - self._update_time > self._expiration_data:\n self._update_proxy_storage()\n if not self._available:\n return None\n return {\n 'http': choice(self._http_proxy_storage),\n 'https': choice(self._https_proxy_storage),\n }\n\n def _update_proxy_storage(self):\n self._update_time = time.time()\n set_http_proxy_storage = set()\n set_https_proxy_storage = set()\n for proxy_collector in self.proxy_collectors:\n for proxy in proxy_collector.http_proxy_storage:\n set_http_proxy_storage.add(proxy)\n for proxy in proxy_collector.https_proxy_storage:\n set_https_proxy_storage.add(proxy)\n self._http_proxy_storage = list(set_http_proxy_storage)\n self._https_proxy_storage = list(set_https_proxy_storage)\n logging.info('The http proxies collected by {}: {}'.format(__class__, self._http_proxy_storage))\n logging.info('The https proxies collected by {}: {}'.format(__class__, self._https_proxy_storage))\n if len(self._http_proxy_storage) >= self._proxies_bare_minimum and \\\n len(self._https_proxy_storage) >= self._proxies_bare_minimum:\n print('{} is available'.format(__class__))\n logging.info('{} is available'.format(__class__))\n self._available = True\n", "id": "9533354", "language": "Python", "matching_score": 2.106203317642212, "max_stars_count": 0, "path": "easyproxy/proxy_manager.py" }, { "content": "# -*- coding: UTF-8 -*-\n# Copyright©2020 <EMAIL> All Rights Reserved\n#\n# Licensed under the Apache License, Version 2.0 (the 'License');\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an 'AS IS' BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"\nThis module contains some base classes which can assist developer to\nimplement spider functions faster and clearer.\n\"\"\"\nfrom typing import Union\nimport requests\nfrom abc import ABCMeta, abstractmethod\nfrom .header import header\nimport logging\n\n\nclass ProxyBase(metaclass=ABCMeta):\n \"\"\"\n The interface Class of proxy.\n \"\"\"\n @abstractmethod\n def get_proxy(self) -> Union[None, dict]:\n raise NotImplementedError\n\n @classmethod\n def _is_http_proxy_available(cls, proxy: str) -> bool:\n proxies = {'http': proxy}\n test_url = 'http://ip.tool.chinaz.com/'\n try:\n logging.debug('url: {}, proxies: {}')\n requests.get(url=test_url, proxies=proxies, headers=header, timeout=5).text\n except Exception as e:\n logging.debug('{} unavailable, {}'.format(proxy, e))\n return False\n logging.info('{} available for http'.format(proxy))\n return True\n\n @classmethod\n def _is_https_proxy_available(cls, proxy: str) -> bool:\n proxies = {'https': r'https://' + proxy}\n test_url = 'https://blog.csdn.net/luoyangIT'\n try:\n logging.debug('url: {}, proxies: {}')\n requests.get(url=test_url, headers=header, proxies=proxies, timeout=5).text\n except Exception as e:\n logging.debug('{} unavailable, {}'.format(proxy, e))\n return False\n logging.info('{} available for https'.format(proxy))\n return True\n\n @classmethod\n def _is_proxy_available(cls, proxy: str, protocol: str) -> bool:\n if protocol == 'HTTP' and cls._is_http_proxy_available(proxy):\n return True\n if protocol == 'HTTPS' and cls._is_https_proxy_available(proxy):\n return True\n return False\n", "id": "290305", "language": "Python", "matching_score": 2.8643712997436523, "max_stars_count": 0, "path": "easyproxy/proxy_base.py" }, { "content": "# -*- coding: UTF-8 -*-\n# Copyright©2020 <EMAIL> All Rights Reserved\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"\n\n\"\"\"\nimport logging\nfrom .proxy_manager import ProxyManager\n\nlogging.basicConfig(\n filename='log.txt',\n format='[%(asctime)s-%(filename)s-%(levelname)s:%(message)s]',\n level=logging.DEBUG,\n filemode='a',\n datefmt='%Y-%m-%d%I:%M:%S %p'\n)\n\nproxy_manager = ProxyManager()\nproxy = get_proxy = proxy_manager.get_proxy\n", "id": "11100850", "language": "Python", "matching_score": 1.2142252922058105, "max_stars_count": 0, "path": "easyproxy/__init__.py" }, { "content": "# -*- coding: UTF-8 -*-\n# Copyright©2020 <EMAIL> All Rights Reserved\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"\nGet proxy from http://www.xiladaili.com/\n\"\"\"\nfrom typing import List\nimport requests\nfrom bs4 import BeautifulSoup\nfrom aitool import singleton\nfrom ..web_page_proxy import WebPageProxy\nimport logging\nimport os\nfrom .__init__ import XiLaPath\n\n\n@singleton\nclass XiLaProxy(WebPageProxy):\n def __init__(self):\n super().__init__(record_file_path=os.path.join(XiLaPath, 'record.json'))\n\n def _analysis_homepage(self) -> List:\n for page_index in range(1, 10):\n results = []\n try:\n homepage = 'http://www.xiladaili.com/gaoni/{}/'.format(page_index)\n html = requests.get(url=homepage).text\n soup = BeautifulSoup(html, 'lxml')\n trs = soup.select_one('.fl-table').select_one('tbody').select('tr')\n results = [[tr.select('td')[0].text, set(tr.select('td')[1].text.replace('代理', '').split(','))] for tr in trs]\n except Exception as e:\n print(e)\n logging.error(e)\n print('{} candidate proxies collected from {}'.format(len(results), homepage))\n logging.info('{} candidate proxies collected from {}'.format(len(results), homepage))\n yield results\n", "id": "496243", "language": "Python", "matching_score": 2.9427497386932373, "max_stars_count": 0, "path": "easyproxy/xila_proxy/xila_proxy.py" }, { "content": "# -*- coding: UTF-8 -*-\n# @Time : 2021/6/23\n# @Author : <EMAIL>\n# Apache License\n# Copyright©2020-2021 <EMAIL> All Rights Reserved\nimport os\n\nXiLaPath = os.path.dirname(__file__)\n", "id": "1886572", "language": "Python", "matching_score": 0.35161709785461426, "max_stars_count": 0, "path": "easyproxy/xila_proxy/__init__.py" } ]
2.51154
aishee
[ { "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport os\n\nfrom sqlalchemy import *\n\nfrom migrate.exceptions import MigrateDeprecationWarning\nfrom migrate.tests import fixture\nfrom migrate.tests.fixture.warnings import catch_warnings\nfrom migrate.versioning.util import *\nfrom migrate.versioning import api\n\nimport warnings\n\nclass TestUtil(fixture.Pathed):\n\n def test_construct_engine(self):\n \"\"\"Construct engine the smart way\"\"\"\n url = 'sqlite://'\n\n engine = construct_engine(url)\n self.assert_(engine.name == 'sqlite')\n\n # keyword arg\n engine = construct_engine(url, engine_arg_encoding='utf-8')\n self.assertEquals(engine.dialect.encoding, 'utf-8')\n\n # dict\n engine = construct_engine(url, engine_dict={'encoding': 'utf-8'})\n self.assertEquals(engine.dialect.encoding, 'utf-8')\n\n # engine parameter\n engine_orig = create_engine('sqlite://')\n engine = construct_engine(engine_orig)\n self.assertEqual(engine, engine_orig)\n\n # test precedance\n engine = construct_engine(url, engine_dict={'encoding': 'iso-8859-1'},\n engine_arg_encoding='utf-8')\n self.assertEquals(engine.dialect.encoding, 'utf-8')\n\n # deprecated echo=True parameter\n try:\n # py 2.4 compatability :-/\n cw = catch_warnings(record=True)\n w = cw.__enter__()\n \n warnings.simplefilter(\"always\")\n engine = construct_engine(url, echo='True')\n self.assertTrue(engine.echo)\n\n self.assertEqual(len(w),1)\n self.assertTrue(issubclass(w[-1].category,\n MigrateDeprecationWarning))\n self.assertEqual(\n 'echo=True parameter is deprecated, pass '\n 'engine_arg_echo=True or engine_dict={\"echo\": True}',\n str(w[-1].message))\n\n finally:\n cw.__exit__()\n\n # unsupported argument\n self.assertRaises(ValueError, construct_engine, 1)\n\n def test_passing_engine(self):\n repo = self.tmp_repos()\n api.create(repo, 'temp')\n api.script('First Version', repo)\n engine = construct_engine('sqlite:///:memory:')\n \n api.version_control(engine, repo)\n api.upgrade(engine, repo)\n\n def test_asbool(self):\n \"\"\"test asbool parsing\"\"\"\n result = asbool(True)\n self.assertEqual(result, True)\n\n result = asbool(False)\n self.assertEqual(result, False)\n\n result = asbool('y')\n self.assertEqual(result, True)\n\n result = asbool('n')\n self.assertEqual(result, False)\n\n self.assertRaises(ValueError, asbool, 'test')\n self.assertRaises(ValueError, asbool, object)\n\n\n def test_load_model(self):\n \"\"\"load model from dotted name\"\"\"\n model_path = os.path.join(self.temp_usable_dir, 'test_load_model.py')\n\n f = open(model_path, 'w')\n f.write(\"class FakeFloat(int): pass\")\n f.close()\n\n try:\n # py 2.4 compatability :-/\n cw = catch_warnings(record=True)\n w = cw.__enter__()\n \n warnings.simplefilter(\"always\")\n\n # deprecated spelling\n FakeFloat = load_model('test_load_model.FakeFloat')\n self.assert_(isinstance(FakeFloat(), int))\n\n self.assertEqual(len(w),1)\n self.assertTrue(issubclass(w[-1].category,\n MigrateDeprecationWarning))\n self.assertEqual(\n 'model should be in form of module.model:User '\n 'and not module.model.User',\n str(w[-1].message))\n \n finally:\n cw.__exit__()\n\n FakeFloat = load_model('test_load_model:FakeFloat')\n self.assert_(isinstance(FakeFloat(), int))\n\n FakeFloat = load_model(FakeFloat)\n self.assert_(isinstance(FakeFloat(), int))\n\n def test_guess_obj_type(self):\n \"\"\"guess object type from string\"\"\"\n result = guess_obj_type('7')\n self.assertEqual(result, 7)\n\n result = guess_obj_type('y')\n self.assertEqual(result, True)\n\n result = guess_obj_type('test')\n self.assertEqual(result, 'test')\n", "id": "9586193", "language": "Python", "matching_score": 1.598140835762024, "max_stars_count": 0, "path": "migrate/tests/versioning/test_util.py" }, { "content": "import getopt, sys, os, urllib, urllib2, re, urlparse, os, threading, signal\nfrom optparse import OptionParser, OptionGroup\n\nclass Service(threading.Thread):\n def __init__(self, type, url, regex, exit_on_match, hash):\n threading.Thread.__init__(self)\n self.type = type\n self.url = url\n self.hash = hash if hash != None else \"\"\n self.regex = regex.replace('{HASH}', self.hash)\n self.args = {}\n self.name = urlparse.urlparse(url)[1]\n self.exit_on_match = exit_on_match\n\n if self.type == 'POST':\n self.args = self.__parseArgs()\n\n def run(self):\n global cracked\n cleartext = self.__crack(self.hash)\n if cleartext != None:\n print(\"!!!\\t The plaintext of %s is '%s' (found on %s)\" % (self.hash, cleartext, self.name))\n if self.exit_on_match == True:\n os.kill(os.getpid(), signal.SIGTERM)\n\n def __crack(self, hash):\n data = ''\n try:\n if self.type == 'GET':\n url = self.url.replace('{HASH}', hash)\n data = self.__exec_get(url)\n else:\n url = self.url.replace('{HASH}', hash)\n args = self.args\n for name, value in args.interitems():\n args[name] = value.replace('{HASH}', hash)\n data = self.__exec_post(url, args)\n except:\n pass\n return self.__xtract_data(data)\n\n def __xtract_data(self, data):\n m = re.search(self.regex, data)\n return m.group(1) if m is not None else None\n\n def __exec_get(self, url):\n return urllib2.urlopen(urllib2.Request(url)).read()\n def __exec_post(self, url):\n return urllib2.urlopen(urllib2.Request(url, urllib.urlencode(data))).read()\n def __parseArgs(self):\n m = re.search(\"([^\\[]+)\\[([^\\]]+)\\]\", self.url)\n self.url = m.group(1)\n params = m.group(2)\n params = params.split(',')\n args = {}\n\n for param in params:\n (k,v) = param.split(':')\n args[k] = v\n return args\ntry:\n parser = OptionParser(usage=\"usage: %prog [option] [--hash <hash>]\\n\\n\" + \"EXAMPLE:\\n\" + \" %prog --hash 7815696ecbf1c96e6894b779456d330e\\n\" + \"%prog --thread 10 --exit-first --hash 7815696ecbf1c96e6894b779456d330e\\n\" + \"%prog --input hashlist.txt\\n\" + \"%prog --list\")\n\n parser.add_option(\"-H\", \"--hash\", action=\"store\", dest=\"hash\", default=None, help=\"The hash to crack, mandatory.\")\n parser.add_option(\"-t\", \"--thread\", action=\"store\", dest=\"thread\", default=10, help=\"Specify how many threads to use, default 10\")\n parser.add_option(\"-e\", \"--exit-first\", action=\"store\", dest=\"exit_on_first\", default=False, help=\"Stop execution upon first positive match\")\n parser.add_option(\"-i\", \"--input\", action=\"store\", dest=\"input\", default=None, help=\"Read a list of hashes from the given file\")\n (o, args) = parser.parse_args()\n conf = open(\"md5.conf\", \"rt\")\n services = []\n hashes = []\n\n if o.input != None:\n o.exit_on_first = False\n hashlist = open(o.input, \"rt\")\n for line in hashlist:\n md5 = line.rstrip()\n if md5 != '':\n hashes.append(md5)\n elif o.hash != None:\n hashes.append(o.hash)\n else:\n parser.error(\"No hash specified!\")\n for line in conf:\n (type , url, regex) = line.rstrip().split('|')\n for md5_hash in hashes:\n services.append(Service(type, url, regex, o.exit_on_first, md5_hash))\n conf.close()\n\n i = 0\n for si,service in enumerate(services):\n print(\"Searching for '%s' on %s...\"%(service.hash, service.name))\n service.start()\n i += 1\n if i > o.thread or si >= len(services):\n service.join()\n i = 0\nexcept IOError as e:\n print e\nexcept:\n raise\n", "id": "7615450", "language": "Python", "matching_score": 0.34506726264953613, "max_stars_count": 0, "path": "md5crack.py" }, { "content": "\"\"\"\n Database schema version management.\n\"\"\"\nimport sys\nimport logging\n\nfrom sqlalchemy import (Table, Column, MetaData, String, Text, Integer,\n create_engine)\nfrom sqlalchemy.sql import and_\nfrom sqlalchemy import exc as sa_exceptions\nfrom sqlalchemy.sql import bindparam\n\nfrom migrate import exceptions\nfrom migrate.changeset import SQLA_07\nfrom migrate.versioning import genmodel, schemadiff\nfrom migrate.versioning.repository import Repository\nfrom migrate.versioning.util import load_model\nfrom migrate.versioning.version import VerNum\n\n\nlog = logging.getLogger(__name__)\n\nclass ControlledSchema(object):\n \"\"\"A database under version control\"\"\"\n\n def __init__(self, engine, repository):\n if isinstance(repository, basestring):\n repository = Repository(repository)\n self.engine = engine\n self.repository = repository\n self.meta = MetaData(engine)\n self.load()\n\n def __eq__(self, other):\n \"\"\"Compare two schemas by repositories and versions\"\"\"\n return (self.repository is other.repository \\\n and self.version == other.version)\n\n def load(self):\n \"\"\"Load controlled schema version info from DB\"\"\"\n tname = self.repository.version_table\n try:\n if not hasattr(self, 'table') or self.table is None:\n self.table = Table(tname, self.meta, autoload=True)\n\n result = self.engine.execute(self.table.select(\n self.table.c.repository_id == str(self.repository.id)))\n\n data = list(result)[0]\n except:\n cls, exc, tb = sys.exc_info()\n raise exceptions.DatabaseNotControlledError, exc.__str__(), tb\n\n self.version = data['version']\n return data\n\n def drop(self):\n \"\"\"\n Remove version control from a database.\n \"\"\"\n if SQLA_07:\n try:\n self.table.drop()\n except sa_exceptions.DatabaseError:\n raise exceptions.DatabaseNotControlledError(str(self.table))\n else:\n try:\n self.table.drop()\n except (sa_exceptions.SQLError):\n raise exceptions.DatabaseNotControlledError(str(self.table))\n\n def changeset(self, version=None):\n \"\"\"API to Changeset creation.\n\n Uses self.version for start version and engine.name\n to get database name.\n \"\"\"\n database = self.engine.name\n start_ver = self.version\n changeset = self.repository.changeset(database, start_ver, version)\n return changeset\n\n def runchange(self, ver, change, step):\n startver = ver\n endver = ver + step\n # Current database version must be correct! Don't run if corrupt!\n if self.version != startver:\n raise exceptions.InvalidVersionError(\"%s is not %s\" % \\\n (self.version, startver))\n # Run the change\n change.run(self.engine, step)\n\n # Update/refresh database version\n self.update_repository_table(startver, endver)\n self.load()\n\n def update_repository_table(self, startver, endver):\n \"\"\"Update version_table with new information\"\"\"\n update = self.table.update(and_(self.table.c.version == int(startver),\n self.table.c.repository_id == str(self.repository.id)))\n self.engine.execute(update, version=int(endver))\n\n def upgrade(self, version=None):\n \"\"\"\n Upgrade (or downgrade) to a specified version, or latest version.\n \"\"\"\n changeset = self.changeset(version)\n for ver, change in changeset:\n self.runchange(ver, change, changeset.step)\n\n def update_db_from_model(self, model):\n \"\"\"\n Modify the database to match the structure of the current Python model.\n \"\"\"\n model = load_model(model)\n\n diff = schemadiff.getDiffOfModelAgainstDatabase(\n model, self.engine, excludeTables=[self.repository.version_table]\n )\n genmodel.ModelGenerator(diff,self.engine).runB2A()\n\n self.update_repository_table(self.version, int(self.repository.latest))\n\n self.load()\n\n @classmethod\n def create(cls, engine, repository, version=None):\n \"\"\"\n Declare a database to be under a repository's version control.\n\n :raises: :exc:`DatabaseAlreadyControlledError`\n :returns: :class:`ControlledSchema`\n \"\"\"\n # Confirm that the version # is valid: positive, integer,\n # exists in repos\n if isinstance(repository, basestring):\n repository = Repository(repository)\n version = cls._validate_version(repository, version)\n table = cls._create_table_version(engine, repository, version)\n # TODO: history table\n # Load repository information and return\n return cls(engine, repository)\n\n @classmethod\n def _validate_version(cls, repository, version):\n \"\"\"\n Ensures this is a valid version number for this repository.\n\n :raises: :exc:`InvalidVersionError` if invalid\n :return: valid version number\n \"\"\"\n if version is None:\n version = 0\n try:\n version = VerNum(version) # raises valueerror\n if version < 0 or version > repository.latest:\n raise ValueError()\n except ValueError:\n raise exceptions.InvalidVersionError(version)\n return version\n\n @classmethod\n def _create_table_version(cls, engine, repository, version):\n \"\"\"\n Creates the versioning table in a database.\n\n :raises: :exc:`DatabaseAlreadyControlledError`\n \"\"\"\n # Create tables\n tname = repository.version_table\n meta = MetaData(engine)\n\n table = Table(\n tname, meta,\n Column('repository_id', String(250), primary_key=True),\n Column('repository_path', Text),\n Column('version', Integer), )\n\n # there can be multiple repositories/schemas in the same db\n if not table.exists():\n table.create()\n\n # test for existing repository_id\n s = table.select(table.c.repository_id == bindparam(\"repository_id\"))\n result = engine.execute(s, repository_id=repository.id)\n if result.fetchone():\n raise exceptions.DatabaseAlreadyControlledError\n\n # Insert data\n engine.execute(table.insert().values(\n repository_id=repository.id,\n repository_path=repository.path,\n version=int(version)))\n return table\n\n @classmethod\n def compare_model_to_db(cls, engine, model, repository):\n \"\"\"\n Compare the current model against the current database.\n \"\"\"\n if isinstance(repository, basestring):\n repository = Repository(repository)\n model = load_model(model)\n\n diff = schemadiff.getDiffOfModelAgainstDatabase(\n model, engine, excludeTables=[repository.version_table])\n return diff\n\n @classmethod\n def create_model(cls, engine, repository, declarative=False):\n \"\"\"\n Dump the current database as a Python model.\n \"\"\"\n if isinstance(repository, basestring):\n repository = Repository(repository)\n\n diff = schemadiff.getDiffOfModelAgainstDatabase(\n MetaData(), engine, excludeTables=[repository.version_table]\n )\n return genmodel.ModelGenerator(diff, engine, declarative).genBDefinition()\n", "id": "12123002", "language": "Python", "matching_score": 1.6627109050750732, "max_stars_count": 1, "path": "migrate/versioning/schema.py" }, { "content": "#!/usr/bin/env python3\n\n\"\"\"RSA\"\"\"\n\nimport math\nimport random\nfrom tkinter import *\nfrom tkinter import ttk\n\n#SPECIFICATION\n\n\"\"\"Public key\nP and Q are prime number\nN = P * Q\nM = ( P - 1) * (Q -1)\nC = prime number of M, GCD(C,M) = 1\nPublic key = (N,C)\n\"\"\"\n\"\"\"\nPrivate key\nWe have need U\nC * U + M * V = 1\nU is ]2,M[\n2 < U - K *M < M\nPRIVATE KEY = (U,N)\n\"\"\"\n\"\"\" ENCRYPTION\n1) Take the message\n2) Convert all the character in ASCII\n3) Calcul all ASCII code with (ASCII)^C mod(N)\n4) Message is encrypted\n\"\"\"\n\"\"\" DECRYPTION\n1) Take an encrypted message\n2) Apply (code)^U mod(N) for all character\n3) Converse ASCII in letter\n4) Message is decrypted\n\"\"\"\n#KEY\nclass Key:\n def __init__(self):\n self.p = 0\n self.q = 0\n self.m = 0\n self.c = 0\n self.n = 0\n self.u = 0\n def generatePQ(self, size):\n self.p = self.generatePrimeNumber(size)\n self.q = self.generatePrimeNumber(size)\n \n def generateKey(self, size):\n self.generatePQ(size)\n self.generateKeyWithPQ()\n \n def generateKeyWithPQ(self):\n if self.isPrimeNumber(self.p) and self.isPrimeNumber(self.q):\n self.computeN()\n self.computeM()\n self.generateC()\n self.generateU()\n def computeN(self):\n self.n = self.p * self.q\n \n def computeM(self):\n self.m = (self.p -1) * (self.q -1)\n def generateC(self):\n c = 0\n while self.gcd(c, self.m) != 1:\n c = random.randrange(1, self.m)\n self.c = c\n def generateU(self):\n number = self.extEuclid(self.c, self.m)\n i = 0\n while number <= 2:\n number = number - i * self.m\n i = i -1\n self.u = number\n # Generate a ramdom prime number\n def generatePrimeNumber(self, size):\n number = 0\n \n while not self.isPrimeNumber(number):\n number = random.randrange(2, 10 ** size)\n return number\n \n #Fermat's little theorem\n #Implementation from http://www.daniweb.com/software-development/python/code/216880/check-if-a-number-is-a-prime-number-python\n def isPrimeNumber(self, number):\n if number == 2:\n return True\n if not(number & 1):\n return False\n return pow(2, number - 1, number) == 1\n #Recursive implementation of gcd algorithme\n def gcd(self, a, b):\n if b == 0:\n return a\n else:\n return self.gcd(b, a % b)\n #Implementation of introduction to IT security cours\n def extEuclid(self, a, b):\n m, n = a, b\n r, q = 0, 0\n s0, s1 = 1, 0\n t0, t1 = 0, 1\n \n while n != 0:\n q = int(m / n)\n r = m % n\n m = n\n n = r\n \n s = s0-s1 * q\n s0 = s1\n s1 = s\n \n t = t0-t1 * q\n t0 = t1\n t1 = t\n s = s0\n t = t0\n \n if m <0 :\n m = -m\n s = -s\n t = -t\n return s\n def resetValue(self):\n self.p = 0\n self.q = 0\n self.m = 0\n self.c = 0\n self.n = 0\n self.u = 0\n#RSA\nclass RSA:\n def __init__(self):\n self.clearMessage = \"\"\n self.encryptedMessage = \"\"\n self.key = Key()\n def computeEncription(self, letter):\n return str(self.modExponential(ord(letter), self.key.c, self.key.n))\n def computeDecription(self, number):\n return chr(self.modExponential(number, self.key.u, self.key.n))\n \n def encryption(self):\n self.encryptedMessage = \"\"\n for letter in self.clearMessage:\n self.encryptedMessage += self.computeEncription(letter) + \" \"\n def decryption(self):\n self.clearMessage = \"\"\n for number in self.encryptedMessage.split():\n self.clearMessage += self.computeDecription(int(number))\n \n def modExponential(self, a, b, c):\n return pow(a, b, c)\n\n#INTERFACE\nclass Interface(object):\n def __init__(self, root, rsa):\n self.rsa = rsa\n self.root = root\n \n #FRAME\n self.content = ttk.Frame(self.root)\n self.content['padding'] = (5, 10)\n \n self.separation = ttk.Frame(self.content, height = 50)\n self.separation['padding'] = (5, 10)\n \n #LABEL\n self.labelPublicKey = ttk.Label(self.content, text='Public key (C, N)')\n self.labelPrivateKey = ttk.Label(self.content, text='Private key (N, U)')\n \n self.labelP = ttk.Label(self.content, text='P ')\n self.labelQ = ttk.Label(self.content, text='Q ')\n self.labelN = ttk.Label(self.content, text='N ')\n self.labelC = ttk.Label(self.content, text='C ')\n self.labelU = ttk.Label(self.content, text='U ')\n \n #Button\n self.randomKey = ttk.Button(self.content, text = \"Random Key\", command=self.buttonRandomKey)\n self.computeKey = ttk.Button(self.content, text=\"Compute Key\", command=self.buttonComputeKey)\n \n self.encript = ttk.Button(self.content, text=\"Encrypt\", command=self.buttonEncrypt)\n self.decript = ttk.Button(self.content, text=\"Decrypt\", command=self.buttonDecrypt)\n \n self.reset = ttk.Button(self.content, text=\"Reset\", command=self.buttonReset)\n \n #INPUT\n self.sizeKey = StringVar()\n self.inputsizeKey = Spinbox(self.content, from_=1.0, to=300.0, textvariable=self.sizeKey, width=9)\n self.inputsizeKey.insert(0, 'Size of key... ')\n \n self.inputP = Text(self.content, width=60, height=3)\n self.inputQ = Text(self.content, width=60, height=3)\n self.inputN = Text(self.content, width=60, height=3)\n self.inputC = Text(self.content, width=60, height=3)\n self.inputU = Text(self.content, width=60, height=3)\n \n self.clearText = Text(self.content, width=60, height=10)\n self.criptedText = Text(self.content, width=60, height=10)\n \n self.defaultValue()\n \n #EVENT\n self.inputN.bind('<KeyRelease>', lambda e: self.buttonSaveEncodeKey())\n self.inputC.bind('<KeyRelease>', lambda e: self.buttonSaveEncodeKey())\n self.inputU.bind('<KeyRelease>', lambda e: self.buttonSaveEncodeKey())\n \n #GRID\n self.content.grid(column=0, row=0)\n \n self.labelP.grid(column=0, row=1, padx=5, sticky=(N))\n self.labelQ.grid(column=0, row=1, sticky=(N))\n self.labelC.grid(column=0, row=1, sticky=(N))\n self.labelN.grid(column=0, row=1, sticky=(N))\n self.labelU.grid(column=0, row=1, sticky=(N))\n \n self.inputQ.grid(column=1, row=0)\n self.inputP.grid(column=1, row=1)\n self.computeKey.grid(column=1, row=2, pady=5)\n \n self.inputC.grid(column=1, row=3)\n self.inputP.grid(column=1, row=4)\n self.inputU.grid(column=1, row=5)\n \n self.separation.grid(column=1, row=6)\n \n self.clearText.grid(column=1, row=7)\n self.encript.grid(column=2, row=8, pady=5)\n self.decript.grid(column=2, row=9)\n self.criptedText.grid(column=1, row=10)\n \n self.randomKey.grid(column=2, row=0, padx=25, sticky=(S))\n self.inputsizeKey.grid(column=2, row=1, pady=4, sticky=(N))\n \n self.labelPublicKey.grid(column=2, row=3)\n self.labelPrivateKey.grid(column=2, row=5)\n \n self.reset.grid(column=2, row=10)\n \n def displayValKey(self):\n self.inputQ.delete('1.0', 'end')\n self.inputQ.insert('1.0', self.rsa.key.q)\n \n self.inputP.delete('1.0', 'end')\n self.inputP.insert('1.0', self.rsa.key.p)\n \n self.inputN.delete('1.0', 'end')\n self.inputN.insert('1.0', self.rsa.key.n)\n \n self.inputC.delete('1.0', 'end')\n self.inputC.insert('1.0', self.rsa.key.c)\n \n self.inputU.delete('1.0', 'end')\n self.inputU.insert('1.0', self.rsa.key.u)\n \n \n def buttonRandomKey(self):\n size = int(self.sizeKey.get())\n self.rsa.key.generateKey(size)\n self.displayValKey()\n \n def buttonComputeKey(self):\n self.rsa.key.p = int(self.inputP.get('1.0', 'end'))\n self.rsa.key.q = int(self.inputQ.get('1.0', 'end'))\n self.rsa.key.generateKeyWithPQ()\n self.displayValKey()\n \n def buttonSaveEncodeKey(self):\n self.rsa.key.n = int(self.inputN.get('1.0', 'end'))\n self.rsa.key.c = int(self.inputC.get('1.0', 'end'))\n self.rsa.key.u = int(self.inputU.get('1.0', 'end'))\n self.displayValKey()\n \n def buttonEncrypt(self):\n self.rsa.clearMessage = str(self.clearText.get('1.0', 'end'))\n self.rsa.encryption()\n self.criptedText.delete('1.0', 'end')\n self.criptedText.insert('1.0', self.rsa.encryptedMessage)\n \n def buttonDecrypt(self):\n self.rsa.encryptedMessage = str(self.criptedText.get('1.0', 'end'))\n self.rsa.decryption()\n self.clearText.delete('1.0', 'end')\n self.clearText.insert('1.0', self.rsa.clearMessage)\n \n \n def buttonReset(self):\n self.defaultValue()\n self.rsa.key.resetValue()\n \n def defaultValue(self):\n self.inputP.delete('1.0', 'end')\n self.inputQ.delete('1.0', 'end')\n self.inputN.delete('1.0', 'end')\n self.inputC.delete('1.0', 'end')\n self.inputU.delete('1.0', 'end')\n self.clearText.delete('1.0', 'end')\n self.criptedText.delete('1.0', 'end')\n \n self.inputP.insert('1.0', 'Insert P...')\n self.inputQ.insert('1.0', 'Insert Q...')\n self.inputN.insert('1.0', 'Insert N...')\n self.inputU.insert('1.0', 'Insert U...')\n self.inputC.insert('1.0', 'Insert C...')\n self.clearText.insert('1.0', 'Insert clear text her....')\n self.criptedText.insert('1.0', 'Insert encrypted text her')\n \n def mainloop(self):\n self.root.mainloop()\n \nrsa=RSA()\nroot= Tk()\nroot.title(\"RSA Encrypt\")\ninterface = Interface(root, rsa)\ninterface.mainloop()\n \n \n \n \n \n \n \n ", "id": "8061275", "language": "Python", "matching_score": 0.4537365138530731, "max_stars_count": 0, "path": "rsa.py" }, { "content": "#!/usr/bin/env python\n\nimport random\nimport string\nimport crypt\n\n\ndef genPass(size=16, chars=string.ascii_letters + string.digits):\n return ''.join(random.choice(chars) for _ in range(size))\n\n\ndef genSalt(salt):\n '''\n Generate a random salt.\n '''\n ret = ''\n if not salt:\n with open('/dev/urandom', 'rb') as urandom:\n while True:\n byte = urandom.read(1)\n if byte in (\n 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'\n './0123456789'):\n ret += byte\n if len(ret) == 16:\n break\n return '$6$%s' % ret\n else:\n return '$6$%s' % salt\n\n\ndef main():\n module = AnsibleModule(\n argument_spec=dict(\n salt=dict(required=False, default=None),\n password=dict(\n no_log=True, required=False, default='random', type='str'),\n ))\n salt = module.params['salt']\n password = module.params['password']\n if password == 'random':\n password = <PASSWORD>()\n sha512Salt = genSalt(salt)\n saltedPass = crypt.crypt(password, sha512Salt)\n module.exit_json(changed=False, passhash=saltedPass)\n\n\nfrom ansible.module_utils.basic import *\nif __name__ == '__main__':\n main()\n", "id": "5108135", "language": "Python", "matching_score": 0.5208575129508972, "max_stars_count": 2, "path": "library/grub_crypt.py" }, { "content": "# -*- coding: utf-8 -*-\n\nfrom app import app, db\nfrom app.models import User, Book, Log, Role\n\napp_ctx = app.app_context()\napp_ctx.push()\ndb.create_all()\nRole.insert_roles()\n\nadmin = User(name=u'aishee', email='<EMAIL>', password='<PASSWORD>', major='administrator',\n headline=u\"A temporary administrator\", about_me=u\"He graduated from the Department of Management,Interested in reading,Therefore, part-time librarian.\")\nuser1 = User(name=u'test1', email='<EMAIL>', password='<PASSWORD>', major='Computer Science', headline=u\"Ordinary student\")\nuser2 = User(name=u'test', email='<EMAIL>', password='<PASSWORD>')\nuser3 = User(name=u'test2', email='<EMAIL>', password='<PASSWORD>')\nuser4 = User(name=u'test3', email='<EMAIL>', password='<PASSWORD>')\n\nbook1 = Book(title=u\"Flask Web\", subtitle=u\"XXX\", author=u\"<NAME>\", isbn='9787115373991',\n tags_string=u\"xx,xx,xxx\", image='http://img3.douban.com/lpic/s27906700.jpg',\n summary=u\"\"\"\n# xx.\n\n* xx;\n* xx;\n* xx;\n* xx;\n* xx;\n* xx;\n* xx;\n* xx;\n\"\"\")\nbook2 = Book(title=u\"xx\", subtitle=u\"xx xx\", author=u\"侯捷\", isbn='9787560926995',\n tags_string=u\"xx,xx,C++\", image='http://img3.doubanio.com/lpic/s1092076.jpg',\n summary=u\"\"\"* xx\n* xx Black xx xx/xx xx\"\"\")\nbook3 = Book(title=u\"xx\", subtitle=u\"xx\",\n author=\"<NAME> / <NAME> / <NAME> / <NAME> \", isbn=\"9787111251217\",\n tags_string=u\"xx,xx\", image='http://img3.douban.com/lpic/s3392161.jpg',\n summary=u\"\"\"* xx\n* xx\"\"\")\nbook4 = Book(title=u\"xx\", author=\"<NAME> / <NAME>\", isbn=\"9787111321330\",\n tags_string=u\"xx,xx\", image='http://img3.douban.com/lpic/s4510534.jpg',\n summary=u\"\"\"* xx/xx\n* xx\n* xx\"\"\")\nbook5 = Book(title=u\"xx#\", subtitle=u\"C#5.xx\", author=u\"xx (<NAME>) / xx (<NAME>)\",\n isbn=\"9787517010845\", tags_string=u\"xx,xx,C#\", image='http://img3.douban.com/lpic/s28152290.jpg',\n summary=u\"\"\"* xx#——c#5.xx.xx.xx#5.xx#4.xx\n* xx#——c#5.xx#xx#xx#xx\"\"\")\nbook6 = Book(title=u\"xx\",\n author=\"<NAME> / <NAME> / <NAME> / <NAME>\",\n isbn=\"9787111187776\", tags_string=u\"xx,xx\", image='http://img3.doubanio.com/lpic/s1959967.jpg',\n summary=u\"xx\")\nlogs = [Log(user1, book2), Log(user1, book3), Log(user1, book4), Log(user1, book6),\n Log(user2, book1), Log(user2, book3), Log(user2, book5),\n Log(user3, book2), Log(user3, book5)]\n\ndb.session.add_all([admin, user1, user2, user3, user4, book1, book2, book3, book4, book5, book6] + logs)\ndb.session.commit()\n\napp_ctx.pop()\n", "id": "12248916", "language": "Python", "matching_score": 1.4990421533584595, "max_stars_count": 0, "path": "app/db_fill.py" }, { "content": "from app import db\nfrom app.models import User, Book, Comment, Log, Permission\nfrom flask import render_template\nfrom flask.ext.login import current_user\nfrom . import main\nfrom ..book.forms import SearchForm\n\n\n@main.app_context_processor\ndef inject_permissions():\n return dict(Permission=Permission)\n\n\n@main.route('/')\ndef index():\n search_form = SearchForm()\n the_books = Book.query\n if not current_user.can(Permission.UPDATE_BOOK_INFORMATION):\n the_books = the_books.filter_by(hidden=0)\n popular_books = the_books.outerjoin(Log).group_by(Book.id).order_by(db.func.count(Log.id).desc()).limit(5)\n popular_users = User.query.outerjoin(Log).group_by(User.id).order_by(db.func.count(Log.id).desc()).limit(5)\n recently_comments = Comment.query.filter_by(deleted=0).order_by(Comment.edit_timestamp.desc()).limit(5)\n return render_template(\"index.html\", books=popular_books, users=popular_users, recently_comments=recently_comments,\n search_form=search_form)\n", "id": "2139712", "language": "Python", "matching_score": 0.4308030307292938, "max_stars_count": 0, "path": "app/main/index/views.py" }, { "content": "# -*- coding:utf-8 -*-\nfrom flask.ext.wtf import Form\nfrom wtforms import StringField, SubmitField\nfrom wtforms.validators import Length, DataRequired, URL\nfrom flask.ext.pagedown.fields import PageDownField\nfrom flask.ext.wtf.file import FileField, FileAllowed\nfrom app import avatars\n\n\nclass EditProfileForm(Form):\n name = StringField(u'Username', validators=[DataRequired(message=u\"The forgot to fill in the!\"), Length(1, 64, message=u\"A length of 1-64 characters\")])\n major = StringField(u'Major', validators=[Length(0, 128, message=u\"A length of 0-128 characters\")])\n headline = StringField(u'Introduce', validators=[Length(0, 32, message=u\"A length of 32 characters or less\")])\n about_me = PageDownField(u\"Personal profile\")\n submit = SubmitField(u\"Save Changes\")\n\n\nclass AvatarEditForm(Form):\n avatar_url = StringField('', validators=[Length(1, 100, message=u\"Length limit of 100 characters or less\"), URL(message=u\"Please fill in the correct URL\")])\n submit = SubmitField(u\"Save\")\n\n\nclass AvatarUploadForm(Form):\n avatar = FileField('', validators=[FileAllowed(avatars, message=u\"Only allowed to upload pictures\")])\n", "id": "7248373", "language": "Python", "matching_score": 4.034473896026611, "max_stars_count": 0, "path": "app/main/user/forms.py" }, { "content": "# -*- coding:utf-8 -*-\nfrom app.models import Book\nfrom flask.ext.pagedown.fields import PageDownField\nfrom flask.ext.wtf import Form\nfrom wtforms import StringField, SubmitField, IntegerField\nfrom wtforms import ValidationError\nfrom wtforms.validators import Length, DataRequired, Regexp\n\n\nclass EditBookForm(Form):\n isbn = StringField(u\"ISBN\",\n validators=[DataRequired(message=u\"The forgot to fill in the!\"),\n Regexp('[0-9]{13,13}', message=u\"ISBN must be 13 digits\")])\n title = StringField(u\"Title\",\n validators=[DataRequired(message=u\"The forgot to fill in the!\"), Length(1, 128, message=u\"A length of 1-128 characters\")])\n origin_title = StringField(u\"Original name\", validators=[Length(0, 128, message=u\"A length of 0-128 characters\")])\n subtitle = StringField(u\"Subtitle\", validators=[Length(0, 128, message=u\"A length of 0-128 characters\")])\n author = StringField(u\"Author\", validators=[Length(0, 128, message=u\"A length of 0-64 characters\")])\n translator = StringField(u\"Translator\",\n validators=[Length(0, 64, message=u\"A length of 0-64 characters\")])\n publisher = StringField(u\"Public\", validators=[Length(0, 64, message=u\"A length of 0-64 characters\")])\n image = StringField(u\"Images address\", validators=[Length(0, 128, message=u\"A length of 0-128 characters\")])\n pubdate = StringField(u\"Public Date\", validators=[Length(0, 32, message=u\"A length of 0-32 characters\")])\n tags = StringField(u\"Label\", validators=[Length(0, 128, message=u\"A length of 0-128 characters\")])\n pages = IntegerField(u\"Pages\")\n price = StringField(u\"Pricing\", validators=[Length(0, 64, message=u\"A length of 0-32 characters\")])\n binding = StringField(u\"Bind\", validators=[Length(0, 16, message=u\"A length of 0-16 characters\")])\n numbers = IntegerField(u\"Collection\", validators=[DataRequired(message=u\"The forgot to fill in the!\")])\n summary = PageDownField(u\"Introduction\")\n catalog = PageDownField(u\"Table of Contents\")\n submit = SubmitField(u\"Save Changes\")\n\n\nclass AddBookForm(EditBookForm):\n def validate_isbn(self, filed):\n if Book.query.filter_by(isbn=filed.data).count():\n raise ValidationError(u'Already exists same ISBN,please check carefully whether the book inventory.')\n\n\nclass SearchForm(Form):\n search = StringField(validators=[DataRequired()])\n submit = SubmitField(u\"Search\")\n\n\n", "id": "10522402", "language": "Python", "matching_score": 3.9441113471984863, "max_stars_count": 0, "path": "app/main/book/forms.py" }, { "content": "# -*- coding:utf-8 -*-\nfrom app import db\nfrom app.models import User\nfrom flask.ext.wtf import Form\nfrom wtforms import StringField, PasswordField, SubmitField, BooleanField\nfrom wtforms import ValidationError\nfrom wtforms.validators import Email, Length, DataRequired, EqualTo\n\n\nclass LoginForm(Form):\n email = StringField('Email',\n validators=[DataRequired(message=u\"The forgot to fill in the!\"), Length(1, 64), Email(message=u\"Are you sure this is Email ?\")])\n password = PasswordField(u'Password', validators=[DataRequired(message=u\"The forgot to fill in the!\"), Length(6, 32)])\n remember_me = BooleanField(u\"Keep me signed in\", default=True)\n submit = SubmitField(u'Sign in')\n\n\nclass RegistrationForm(Form):\n email = StringField('Email',\n validators=[DataRequired(message=u\"The forgot to fill in the!\"), Length(1, 64), Email(message=u\"Are you sure this is Email ?\")])\n name = StringField(u'Username', validators=[DataRequired(message=u\"The forgot to fill in the!\"), Length(1, 64)])\n password = PasswordField(u'Password',\n validators=[DataRequired(message=u\"The forgot to fill in the!\"), EqualTo('<PASSWORD>', message=u'Passwords must match'),\n Length(6, 32)])\n password2 = PasswordField(u'<PASSWORD>', validators=[DataRequired(message=u\"The forgot to fill in the!\")])\n submit = SubmitField(u'Register')\n\n def validate_email(self, filed):\n if User.query.filter(db.func.lower(User.email) == db.func.lower(filed.data)).first():\n raise ValidationError(u'The Email is already registered')\n\n\nclass ChangePasswordForm(Form):\n old_password = PasswordField(u'Old password', validators=[DataRequired(message=u\"The forgot to fill in the!\")])\n new_password = PasswordField(u'New password', validators=[DataRequired(message=u\"The forgot to fill in the!\"),\n EqualTo('confirm_password', message=u'Passwords must match'),\n Length(6, 32)])\n confirm_password = PasswordField(u'<PASSWORD>', validators=[DataRequired(message=u\"The forgot to fill in the!\")])\n submit = SubmitField(u\"Save Password\")\n\n def validate_old_password(self, filed):\n from flask.ext.login import current_user\n if not current_user.verify_password(filed.data):\n raise ValidationError(u'Old password is wrong')\n", "id": "2733890", "language": "Python", "matching_score": 3.4845681190490723, "max_stars_count": 0, "path": "app/main/auth/forms.py" }, { "content": "# -*- coding:utf-8 -*-\nfrom flask.ext.wtf import Form\nfrom wtforms import SubmitField, TextAreaField\nfrom wtforms.validators import Length, DataRequired\n\n\nclass CommentForm(Form):\n comment = TextAreaField(u\"Your book review\",\n validators=[DataRequired(message=u\"The content can not be blank\"), Length(1, 1024, message=u\"Book review length is limited to 1024 characters or fewer\")])\n submit = SubmitField(u\"Release\")\n", "id": "10096771", "language": "Python", "matching_score": 2.5036463737487793, "max_stars_count": 0, "path": "app/main/comment/forms.py" } ]
1.598141
gustavotcustodio
[ { "content": "import re\nimport os\nimport numpy as np\nimport pandas as pd\nfrom lxml import etree\nfrom stemming.porter2 import stem\nfrom nltk.tokenize import word_tokenize\nfrom nltk.corpus import stopwords\nfrom config import xmls_dir, preprocessed_dir\n\nABSTRACT = 0\nINTRODUCTION = 1\nCONCLUSION = 2\nOTHERS = 3\n\n\ndef get_keywords(xml_paper):\n return [keyword.text for keyword in\n xml_paper.find('keywords').findall('keyword')]\n\n\ndef get_section_sentences(xml_paper, section=OTHERS):\n if section == ABSTRACT:\n xml_abstract = xml_paper.find('abstract')\n return [sent.text for sent in xml_abstract.findall('sentence')]\n elif section == INTRODUCTION:\n return [sent.text for sent in xml_paper.findall('section')[0]]\n elif section == CONCLUSION:\n return [sent.text for sent in xml_paper.findall('section')[-1]]\n else:\n return [sent.text for xml_section in xml_paper.findall('section')[1:-1]\n for sent in xml_section.findall('sentence')]\n return []\n\n\ndef get_section_similarity(tokens_in_sentence, tokens_in_section):\n \"\"\" Computes the similarity of a paper' section with a sentence.\n\n Parameters\n ----------\n tokens_in_sentence: list\n All tokens in the sentence.\n tokens_in_section: list\n All tokens in the selected section.\n\n Returns\n -------\n float\n The similarity measure\n \"\"\"\n matches = [1 if t in tokens_in_section else 0 for t in tokens_in_sentence]\n if len(matches) <= 0:\n return 0\n return 2.0*sum(matches) / (len(tokens_in_sentence)+len(tokens_in_section))\n\n\ndef calc_column_values(list_tokenized_sents, tokenized_sents_section):\n \"\"\" Get the attribute values of a single column in the attribute-value\n table.\n\n Parameters\n ----------\n list_tokenized_sents: list(list(string))\n List of tokens from each sentence in the paper (including highlights).\n tokenized_sents_section: list(list(string))\n List of tokens from each sentence in a specific section from the paper.\n\n Returns\n -------\n list(float)\n Values of a column in the attribute-value table.\n \"\"\"\n tokens_in_section = [token for tokens_sentence in tokenized_sents_section\n for token in tokens_sentence]\n\n return [get_section_similarity(tokens_in_sentence, tokens_in_section)\n for tokens_in_sentence in list_tokenized_sents]\n\n\ndef get_words_frequency(list_tokenized_sents):\n \"\"\" Get the frequency of each word inside the paper excluding keywords.\n\n Returns\n -------\n list(float)\n The sum of word frequency for each sentence.\n \"\"\"\n all_words = np.array([token for tokenized_sentence in list_tokenized_sents\n for token in tokenized_sentence])\n # Sum the number of occurences of a word in the text and divides by the\n # total number of words\n unique_words, count = np.unique(all_words, return_counts=True)\n freqs = [float(n) / len(unique_words) for n in count]\n dict_words_freqs = dict(zip(unique_words, freqs))\n\n # Use the dictionary to get the sum of word frequencies for each sentence.\n return [sum(dict_words_freqs[w] for w in tokenized_sentence)\n for tokenized_sentence in list_tokenized_sents]\n\n\ndef normalize(columns_values):\n \"\"\" Normalize the column values from 0 to 1. \"\"\"\n max_val = max(columns_values)\n min_val = min(columns_values)\n if max_val == min_val: # Avoid dividing by zero\n return columns_values\n return [(val-min_val) / (max_val-min_val) for val in columns_values]\n\n\ndef get_attribute_value_table(tokenized_keywords, tokenized_sents_abstract,\n tokenized_sents_intro, tokenized_sents_conclu,\n tokenized_sents_other):\n \"\"\" Creates an attribute-value table containing the similarity degree\n between a sentence and a section belonging to the paper, where each row\n represents a sentence and each column represents a section.\n \"\"\"\n list_tokenized_sents = tokenized_sents_abstract + tokenized_sents_intro + \\\n tokenized_sents_other + tokenized_sents_conclu\n attrib_value_table = {}\n attrib_value_table['abstract'] = calc_column_values(\n list_tokenized_sents, tokenized_sents_abstract)\n\n for i in range(len(tokenized_keywords)):\n attrib_value_table['keyword%d' % i] = calc_column_values(\n list_tokenized_sents, tokenized_keywords[i])\n\n attrib_value_table['introduction'] = calc_column_values(\n list_tokenized_sents, tokenized_sents_intro)\n attrib_value_table['conclusion'] = calc_column_values(\n list_tokenized_sents, tokenized_sents_conclu)\n attrib_value_table['text'] = calc_column_values(\n list_tokenized_sents, tokenized_sents_other)\n # Get the sum of word frequency in each sentence\n attrib_value_table['word freq. in sentence'] = get_words_frequency(\n list_tokenized_sents)\n # Normalize columns\n for col in attrib_value_table.keys():\n attrib_value_table[col] = normalize(attrib_value_table[col])\n return attrib_value_table\n\n\ndef clean_text_and_tokenize(text):\n words = word_tokenize(text)\n words_lower = map(lambda w: w.lower(), words)\n words_no_stop = filter(lambda w: w not in stopwords.words('english'),\n words_lower)\n words_no_symbols = filter(re.compile(r'[a-z1-9].*').search, words_no_stop)\n return list(map(stem, words_no_symbols))\n\n\ndef preprocess():\n print('Starting preprocessing...')\n for xml in os.listdir(xmls_dir):\n with open(os.path.join(xmls_dir, xml)) as xmlreader:\n xml_paper = etree.parse(xmlreader)\n keywords = get_keywords(xml_paper)\n sentences_abstract = get_section_sentences(xml_paper, ABSTRACT)\n sentences_intro = get_section_sentences(xml_paper, INTRODUCTION)\n sentences_conclusion = get_section_sentences(xml_paper, CONCLUSION)\n other_sentences = get_section_sentences(xml_paper)\n\n # Get tokenized sentences for each section\n tokens_keywords = [clean_text_and_tokenize(k) for k in keywords]\n tokens_abstract = [clean_text_and_tokenize(s)\n for s in sentences_abstract]\n tokens_intro = [clean_text_and_tokenize(s) for s in sentences_intro]\n tokens_conclu = [clean_text_and_tokenize(s)\n for s in sentences_conclusion]\n tokens_other = [clean_text_and_tokenize(s) for s in other_sentences]\n\n # Get attribute-value table\n attrib_value_table = get_attribute_value_table(\n tokens_keywords, tokens_abstract, tokens_intro, tokens_conclu,\n tokens_other)\n # Saves the preprocessed text to a csv value\n outfile = os.path.join(preprocessed_dir, xml.replace('.xml', '.csv'))\n pd.DataFrame(attrib_value_table).to_csv(outfile)\n print('%s saved successfully.' % xml.replace('.xml', '.csv'))\n\n\nif __name__ == '__main__':\n preprocess()\n", "id": "5080403", "language": "Python", "matching_score": 4.806942462921143, "max_stars_count": 1, "path": "SaKS_DataClassification/preprocessing.py" }, { "content": "from nltk.tokenize import sent_tokenize, word_tokenize\nfrom nltk.corpus import stopwords\nfrom nltk.cluster.kmeans import KMeansClusterer\nfrom scipy.spatial import distance\nfrom stemming.porter2 import stem\nimport pandas as pd\nimport numpy as np\nimport re\nimport os\nimport io\nimport math\nimport functools\nimport pdb; pdb.set_trace() # XXX BREAKPOINT\n\n\ndef get_max_number_keywords(list_of_keywords):\n n_keywords = []\n\n for keywords in list_of_keywords:\n n_keywords.append(len(keywords.split(',')))\n\n return max(n_keywords)\n\n\ndef get_words_frequency(full_text):\n # Counting words\n\n words_no_symbols = clean_text_and_tokenize(full_text)\n\n final_words, count = np.unique(words_no_symbols, return_counts=True)\n\n count = map(lambda n: float(n)/len(final_words), count)\n\n return zip(final_words, count)\n\n\ndef clean_text_and_tokenize(text):\n words = word_tokenize(text)\n\n words_lower = map(lambda w: w.lower(), words)\n words_no_stop = filter(lambda w: w not in stopwords.words('english'),\n words_lower)\n words_no_symbols = filter(re.compile(r'[a-z1-9].*').search, words_no_stop)\n\n return map(stem, words_no_symbols)\n\n\ndef sum_word_freq(words_in_sentence, word_freq):\n # Sum the frequency of words in a sentence\n n_words = len(words_in_sentence)\n\n sum_freq = sum([word_freq[w]/n_words for w in words_in_sentence\n if w in word_freq])\n return sum_freq\n\n\ndef get_keywords_similarity(words_in_sentence, keywords):\n keywords_match = []\n\n for words in keywords:\n matches = map(lambda w: 1 if w in words_in_sentence else 0, words)\n keywords_match.append(2.0 * sum(matches) / (\n len(words) + len(words_in_sentence)))\n return keywords_match\n\n\ndef get_section_similarity(words_in_sentence, words_in_section):\n matches = map(lambda w: 1 if w in words_in_section else 0,\n words_in_sentence)\n if len(matches) <= 0:\n return 0\n return 2.0 * sum(matches)/(len(words_in_sentence) + len(words_in_section))\n\n\ndef get_title(text):\n return text.split('\\n')[0]\n\n\ndef get_highlights(file_path):\n \"\"\" Read the txt file with the research highlights of the respective files\n \"\"\"\n text_file = io.open(file_path, mode='r', encoding='utf-8')\n highlights = text_file.read().split('\\n')\n # highlights = '^~_'.join(text_file.read().split('\\n'))\n text_file.close()\n return highlights\n\n\ndef get_session_lines(text, session):\n lines = text.split('\\n')\n\n if session == 'a': # abstract\n r_start = re.compile(\"^Abstract$\")\n r_end = re.compile(\"Keywords|Abbreviations\")\n\n elif session == 'i': # introduction\n r_start = re.compile(r'1.\\s+Introduction\\s*')\n r_end = re.compile(r'2.\\s+[A-Z0-9][a-zA-Z0-9]+.*')\n\n else: # conclusion\n r_start = re.compile(r'[1-9][0-9]?.\\s+(Conclu.*|Discussion.*|Summary'\n '*|.*conclu.*|.*future.*.|Results.*|Final.*)')\n r_end = re.compile(r'(Append.*|^1$)')\n session_lines = []\n candidate_sessions = []\n found_session = False\n\n for i in range(len(lines)):\n if r_start.match(lines[i]):\n candidate_sessions.append(i)\n found_session = True\n if found_session:\n session_lines.append(candidate_sessions[-1])\n i = session_lines[0] + 1\n\n while i < len(lines) and not(r_end.match(lines[i])):\n session_lines.append(i)\n i += 1\n return session_lines\n\n\ndef extract_keywords(text):\n \"\"\" After finding the string \"Keywords\", each line\n is a keyword until an empty line is found \"\"\"\n keywords = list()\n reading_keywords = False\n all_lines = text.split('\\n')\n\n for line in all_lines:\n if 'Keywords' in line:\n reading_keywords = True\n\n # nothing in line\n elif not line and reading_keywords:\n return ','.join(keywords)\n\n elif reading_keywords:\n keywords.append(line)\n return ','.join(keywords)\n\n\ndef extract_content(path):\n \"\"\"\n Extracts the keywords, highlights and the text in a article\n 'path': name of the file\n \"\"\"\n article = io.open(path, mode=\"r\", encoding=\"utf-8\")\n\n abstract, introduction, conclusion, final_text = '', '', '', ''\n\n full_text = article.read()\n full_text_split = np.array(full_text.split('\\n'))\n\n abstract_lines = get_session_lines(full_text, 'a')\n abstract = '\\n'.join(full_text_split[abstract_lines])\n\n # get the lines containing the introduction\n intro_lines = get_session_lines(full_text, 'i')\n introduction = '\\n'.join(full_text_split[intro_lines])\n\n text_without_intro = '\\n'.join(full_text_split[(intro_lines[-1]+1):])\n text_without_intro_split = np.array(text_without_intro.split('\\n'))\n\n conclu_lines = get_session_lines(text_without_intro, 'c')\n\n if conclu_lines:\n conclusion = '\\n'.join(text_without_intro_split[conclu_lines])\n\n text_without_conclu_1 = '\\n'.join(text_without_intro_split[\n 0:conclu_lines[0]])\n text_without_conclu_2 = '' if(conclu_lines[-1]+1) >= \\\n len(text_without_intro_split) else \\\n '\\n'.join(text_without_intro_split[(conclu_lines[-1]+1):])\n\n final_text = text_without_conclu_1 + text_without_conclu_2\n else:\n final_text = text_without_intro\n\n return get_title(full_text), extract_keywords(full_text), abstract, \\\n introduction, conclusion, final_text\n\n\ndef create_sentences_table(list_of_files, highlights=False):\n if highlights:\n cols = ['title', 'keywords', 'abstract', 'introduction', 'conclusion',\n 'text', 'highlights']\n df = pd.DataFrame([list(extract_content(f)) + [get_highlights(f)]\n for f in list_of_files], columns=cols)\n else:\n cols = ['title', 'keywords', 'abstract', 'introduction', 'conclusion',\n 'text']\n df = pd.DataFrame([list(extract_content(f)\n ) for f in list_of_files], columns=cols)\n\n df.to_csv(\"articles_highlights.csv\", sep='\\t', encoding='utf-8',\n index=False)\n\n\ndef calc_df(word, sentences):\n n_sents_with_word = 0\n for sent in sentences:\n n_sents_with_word += 1 if word in sent else 0\n return n_sents_with_word\n\n\ndef calc_tf_idf_word(word, sentences):\n df = calc_df(word, sentences)\n N = len(sentences)\n\n tfidf_vals = []\n\n for sent in sentences:\n tf = float(sent.count(word)) / len(sent)\n\n idf = math.log(float(N) / df)\n\n tfidf_vals.append(tf * idf)\n\n return np.array(tfidf_vals)\n\n\ndef create_bag_of_words(tokenized_sentences):\n\n word_list = np.concatenate(tokenized_sentences)\n word_list = np.unique(word_list)\n\n n_sents = len(tokenized_sentences)\n n_words = word_list.shape[0]\n\n bag_of_words = np.zeros((n_sents, n_words))\n for w in range(n_words):\n bag_of_words[:, w] = calc_tf_idf_word(word_list[w],\n tokenized_sentences)\n return bag_of_words\n\n\ndef create_sents_vector(tokenized_sentences, sentences_vectors,\n sents_in_section, keywords, word_freq):\n for s in tokenized_sentences:\n # Add sentence to the cluster\n\n keywords_match = get_keywords_similarity(s, keywords)\n\n # get sentence's degree of similarity with the abstract\n abstract_match = get_section_similarity(s, functools.reduce(\n lambda x, y: x+y, sents_in_section['abstract']))\n\n intro_match = get_section_similarity(s, functools.reduce(\n lambda x, y: x+y, sents_in_section['introduction']))\n\n text_match = get_section_similarity(s, functools.reduce(\n lambda x, y: x+y, sents_in_section['text']))\n\n conclu_match = get_section_similarity(s, functools.reduce(\n lambda x, y: x+y, sents_in_section['conclusion']))\n\n # sum of freq. of words in the sentence\n word_freq_sentence = sum_word_freq(s, word_freq)\n\n index = len(sentences_vectors)\n sentences_vectors.loc[index] = [abstract_match] + keywords_match + \\\n [intro_match, text_match, conclu_match, word_freq_sentence]\n\n\ndef cluster_sents(sents_vecs, n_clusters):\n kclusterer = KMeansClusterer(n_clusters, repeats=1,\n distance=distance.euclidean,\n avoid_empty_clusters=True)\n labels = kclusterer.cluster(sents_vecs.values, assign_clusters=True)\n\n centroids = np.array(kclusterer.means())\n\n return np.array(labels), centroids\n\n\ndef count_num_sents_cluster(sents_vectors, sections_sents, n_clusters):\n \"\"\"\n Cluster sentences and count the number of times that sentences from each\n section appear in each cluster.\n Ex: 4 sents from introduction and 3 sentences from conclusion in cluster x.\n \"\"\"\n labels, centroids = cluster_sents(sents_vectors, n_clusters)\n sections = ['abstract', 'introduction', 'conclusion', 'text']\n\n sents_cluster_values = []\n n_sents_by_cluster = []\n\n for c in range(n_clusters):\n n_sents = {}\n\n for sec in sections:\n n_sents[sec] = 0.0\n\n # Get indices in c cluster\n indices_cluster = np.where(labels == c)[0]\n\n for i in indices_cluster:\n if sections_sents[i] != 'highlights':\n n_sents[sections_sents[i]] += 1\n\n n_sents_by_cluster.append(n_sents)\n\n for lbl in labels:\n sents_cluster_values.append(n_sents_by_cluster[lbl].values())\n\n columns = ['n_sents_intro', 'n_sents_text', 'n_sents_abst',\n 'n_sents_conclu']\n return np.array(sents_cluster_values), columns\n\n\ndef map_article_to_vec(article_path, highlights=False):\n\n sections_content = list(extract_content(article_path))\n\n if highlights:\n path_highl = article_path.replace('files_txt', 'files_highlights')\n highlights = get_highlights(path_highl)\n\n sections_names = ['title', 'keywords', 'abstract', 'introduction',\n 'conclusion', 'text']\n content = dict(zip(sections_names, sections_content))\n\n n_keywords = len(content['keywords'].split(','))\n\n sentences_vectors = pd.DataFrame(columns=['abstract'] + [\n 'keyword'+str(i+1) for i in range(n_keywords)] + sections_names[3::]\n + ['word freq. in sentence'])\n\n word_freq = dict(get_words_frequency('.'.join(sections_content)))\n\n all_sentences = []\n tokenized_sentences = []\n sents_in_section = {}\n sections_sents = []\n\n for col in ['abstract', 'introduction', 'text', 'conclusion']:\n sents_in_section[col] = sent_tokenize(content[col])\n token_section = map(clean_text_and_tokenize, sents_in_section[col])\n\n indices_valid_sents = get_valid_sents_indices(token_section)\n\n # Sections in which the sentences belong\n sections_sents += len(indices_valid_sents) * [col]\n\n tokenized_sentences += [token_section[i] for i in indices_valid_sents]\n\n sents_in_section[col] = [sents_in_section[col][i]\n for i in indices_valid_sents]\n all_sentences += sents_in_section[col]\n\n if highlights:\n all_sentences += highlights\n tokenized_sentences += map(clean_text_and_tokenize, highlights)\n sections_sents += len(highlights) * ['highlights']\n\n keywords = map(clean_text_and_tokenize, content['keywords'].split(','))\n\n create_sents_vector(tokenized_sentences, sentences_vectors,\n sents_in_section, keywords, word_freq)\n\n normalize_cols(sentences_vectors)\n\n return all_sentences, sentences_vectors, tokenized_sentences\n\n\ndef normalize_cols(sents_vecs):\n for col in sents_vecs.columns:\n max_val = sents_vecs[col].max()\n min_val = sents_vecs[col].min()\n\n if (max_val - min_val) > 0:\n sents_vecs[col] = (sents_vecs[col] - min_val) / (max_val - min_val)\n return sents_vecs\n\n\ndef get_valid_sents_indices(token_sents):\n indices = []\n\n for i in range(len(token_sents)):\n if len(token_sents[i]) > 2:\n indices.append(i)\n\n elif len(token_sents[i]) == 2:\n word_1_not_num = not(re.match(r'^[0-9]\\.*[0-9]*$',\n token_sents[i][0]))\n word_2_not_num = not(re.match(r'^[0-9]\\.*[0-9]*$',\n token_sents[i][1]))\n if word_1_not_num and word_2_not_num:\n indices.append(i)\n return indices\n\n\ndef calc_similarity_matrix(token_sents):\n n_sentences = len(token_sents)\n\n similarity_matrix = np.zeros((n_sentences, n_sentences))\n\n for i in range(n_sentences):\n for j in range(n_sentences):\n if i == j:\n similarity_matrix[i, j] = 1.0\n else:\n similarity_matrix[i, j] = get_section_similarity(\n token_sents[i], token_sents[j])\n return similarity_matrix\n\n\ndef mark_highlights(sents_vecs, n_highlights):\n n_sents = sents_vecs.shape[0]\n\n highlight_indicator = (n_sents - n_highlights)*[0.0] + n_highlights*[1.0]\n sents_vecs.insert(0, 'is_a_highlight', highlight_indicator)\n\n\ndef save_similarity_matrix(similarity_matrix, file_name):\n np.savetxt(file_name, similarity_matrix, delimiter=';')\n\n\nif __name__ == \"__main__\":\n dir_txts = os.path.join(os.path.dirname(__file__), 'files_txt')\n list_of_files_no_dir = os.listdir(dir_txts)\n list_of_files = [os.path.join(dir_txts, f) for f in list_of_files_no_dir]\n\n for f in list_of_files:\n\n sentences, sents_vecs, token_sents = map_article_to_vec(f, True)\n\n file_name = f.replace('.txt', '.csv').replace(\n 'files_txt', 'files_cluster_values_2')\n\n highlights = get_highlights(f.replace('files_txt', 'files_highlights'))\n mark_highlights(sents_vecs, len(highlights))\n\n sents_vecs.to_csv(file_name, sep='\\t', encoding='utf-8', index=False)\n\n print('Arquivo ' + file_name + ' salvo.')\n", "id": "451849", "language": "Python", "matching_score": 4.768211841583252, "max_stars_count": 1, "path": "SaKS_DataClassification/tokenizer.py" }, { "content": "import re\nimport os\nimport numpy as np\nfrom stemming.porter2 import stem\nfrom nltk.tokenize import word_tokenize\nfrom nltk.corpus import stopwords\nimport tokenizer\n\n\ndef clean_text_and_tokenize(text):\n \"\"\"\n Remove stopwords, numbers and symbols, tokenize the sentence\n in words and get the word-stem of the remaining words.\n\n Parameters\n ----------\n text: string\n Sentence that we wish to clean and split in words.\n\n Returns\n -------\n word_stems: list[string]\n List of word stems after the cleaning process is finished.\n\n \"\"\"\n words = word_tokenize(text)\n words_lower = map(lambda w: w.lower(), words)\n words_no_stop = filter(lambda w: w not in stopwords.words('english'),\n words_lower)\n words_no_symbols = filter(re.compile(r'[a-z1-9].*').search, words_no_stop)\n\n return map(stem, words_no_symbols)\n\n\ndef get_sentences_similarity(words_in_sentence_1, words_in_sentence_2):\n \"\"\"\n Calculate the similarity between two sentences by the number of words in\n common.\n\n Parameters\n ----------\n words_in_sentence_1: list [string]\n First sentence to compare.\n words_in_sentence_2: list [string]\n Second sentence to compare.\n\n Returns\n -------\n similarity: float\n Value between 0 and 1 that gives the similarity between two sentences.\n\n \"\"\"\n matches = map(lambda w: 1 if w in words_in_sentence_1 else 0,\n words_in_sentence_2)\n\n if len(matches) <= 0:\n return 0\n\n return 2.0 * sum(matches) / (len(words_in_sentence_1) +\n len(words_in_sentence_2))\n\n\ndef calc_similarity_matrix(token_sents):\n n_sentences = len(token_sents)\n\n similarity_matrix = np.zeros((n_sentences, n_sentences))\n\n for i in range(n_sentences):\n for j in range(n_sentences):\n if i == j:\n similarity_matrix[i, j] = 1.0\n else:\n similarity_matrix[i, j] = get_sentences_similarity(\n token_sents[i], token_sents[j])\n return similarity_matrix\n\n\ndef save_similarity_matrix(similarity_matrix, file_name):\n np.savetxt(file_name, similarity_matrix, delimiter=';')\n\n\nif __name__ == \"__main__\":\n dir_txts = os.path.join(os.path.dirname(__file__), 'files_txt')\n list_of_files = [os.path.join(dir_txts, f) for f in os.listdir(dir_txts)]\n\n for f in list_of_files:\n print(f)\n\n _, _, token_sents = tokenizer.map_article_to_vec(f)\n\n similarity_matrix = calc_similarity_matrix(token_sents)\n\n file_name = f.replace('.txt', '.csv'\n ).replace('files_txt', 'files_sim_matrices')\n\n save_similarity_matrix(similarity_matrix, file_name)\n", "id": "7277745", "language": "Python", "matching_score": 0.6679207682609558, "max_stars_count": 1, "path": "SaKS_DataClassification/similar_matrix.py" }, { "content": "import os\nimport re\nfrom nltk.tokenize import sent_tokenize\nfrom lxml import etree\nfrom config import xmls_dir, papers_dir\n\n\ndef attach_sentences_to_xml(xml, sentences):\n for sentence in sentences:\n if not re.match(r'([1-9][0-9]?\\.)+', sentence):\n sentence = sentence.replace('Download full-size image', '')\n xml_sentence = etree.Element('sentence')\n xml_sentence.text = re.sub(r'\\s+|\\n', ' ', sentence).strip()\n xml.append(xml_sentence)\n\n\ndef get_title(buffreader):\n title = etree.Element('title')\n title.text = buffreader.readline().strip()\n return title\n\n\ndef get_abstract(buffreader):\n \"\"\" Retorns the abstract to xml format. \"\"\"\n xml_abstract = etree.Element('abstract')\n reading_abstract = False\n abstract_lines = []\n for line in buffreader:\n if line.strip() == 'Abstract':\n reading_abstract = True\n elif line.strip() == 'Keywords':\n break\n elif reading_abstract:\n abstract_lines.append(line.strip())\n sentences_abstract = sent_tokenize(' '.join(abstract_lines))\n # Tokeniza o abstract em frases e adiciona elas ao xml\n attach_sentences_to_xml(xml_abstract, sentences_abstract)\n return xml_abstract\n\n\ndef get_keywords(buffreader):\n \"\"\" Adiciona as palavras-chave do artigo para o xml.\"\"\"\n xml_keywords = etree.Element('keywords')\n for line in buffreader:\n if not line.strip():\n break\n else:\n keyword = etree.Element('keyword')\n keyword.text = line.strip()\n xml_keywords.append(keyword)\n return xml_keywords\n\n\ndef get_article_sections(buffreader):\n list_xml_sections = []\n section_content = \"\"\n for line in buffreader:\n if re.match(r'^[1-9][0-9]?\\.\\s+.{3,}$', line):\n if list_xml_sections: # Salvar informações da seção anterior\n attach_sentences_to_xml(list_xml_sections[-1], sent_tokenize(\n section_content, 'english'))\n # Título de seção novo encontrado\n section_name = re.sub(r'^[1-9][0-9]?\\.\\s+', '', line).strip()\n list_xml_sections.append(\n etree.Element(\"section\", attrib={\"name\": section_name}))\n section_content = \"\"\n else:\n section_content += line.replace('Fig.', 'Fig'\n ).replace('Eq.', 'Eq')\n attach_sentences_to_xml(list_xml_sections[-1],\n sent_tokenize(section_content, 'english'))\n return list_xml_sections\n\n\npaperlist = os.listdir(papers_dir)\n\nprint('Converting papers to xml format...')\n\nfor paper in paperlist:\n with open(os.path.join(papers_dir, paper)) as buffreader:\n xml_article = etree.Element('article')\n xml_article.append(get_title(buffreader))\n xml_article.append(get_abstract(buffreader))\n xml_article.append(get_keywords(buffreader))\n list_xml_sections = (get_article_sections(buffreader))\n for section in list_xml_sections:\n xml_article.append(section)\n\n with open(os.path.join(xmls_dir, paper.replace('.txt', '.xml')), 'w',\n encoding='utf-8') as xmlfile:\n xmlfile.write(etree.tostring(xml_article, encoding='utf-8',\n pretty_print=True).decode('utf-8'))\nprint('Papers successfully converted to xml.')\n", "id": "2815472", "language": "Python", "matching_score": 1.778361439704895, "max_stars_count": 1, "path": "SaKS_DataClassification/sent_tokenizer_xml.py" }, { "content": "import nltk\nimport yaml\n\nnltk.download('punkt')\nwith open('config.yml') as yaml_file:\n params = yaml.load(yaml_file, Loader=yaml.FullLoader)\n xmls_dir = params['xmls_dir']\n papers_dir = params['papers_dir']\n highlights_dir = params['highlights_dir']\n preprocessed_dir = params['preprocessed_dir']\n", "id": "7313518", "language": "Python", "matching_score": 0.15228652954101562, "max_stars_count": 1, "path": "SaKS_DataClassification/config.py" }, { "content": "####################################\n# Author: <NAME>\n# Date: September 2016\n# Project: Document Summarization\n# H2020 Summa Project\n####################################\n\n\"\"\"\nDocument Summarization Modules and Models\n\"\"\"\n\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport numpy as np\nimport tensorflow as tf\nimport random\nimport os\nimport re\nimport os.path\n\nfrom pyrouge import Rouge155\nimport json\nfrom multiprocessing import Pool\nfrom contextlib import closing\n\nfrom my_flags import FLAGS\n\ndef _rouge(system_dir, gold_dir):\n # Run rouge\n r = Rouge155()\n r.system_dir = system_dir\n r.model_dir = gold_dir\n r.system_filename_pattern = '([a-zA-Z0-9]*).txt.model'\n r.model_filename_pattern = '#ID#.txt' # '#ID#.gold'\n output = r.convert_and_evaluate(\n rouge_args=\"-e /home/gustavo/Downloads/pyrouge/tools/ROUGE-1.5.5/data -a -c 95 -m -n 4 -w 1.2\")\n # print output\n output_dict = r.output_to_dict(output)\n # print output_dict\n\n # avg_rscore = 0\n # if FLAGS.rouge_reward_fscore:\n # avg_rscore = (output_dict[\"rouge_1_f_score\"]+output_dict[\"rouge_2_f_score\"]+\n # output_dict[\"rouge_3_f_score\"]+output_dict[\"rouge_4_f_score\"]+\n # output_dict[\"rouge_l_f_score\"])/5.0\n # else:\n # avg_rscore = (output_dict[\"rouge_1_recall\"]+output_dict[\"rouge_2_recall\"]+\n # output_dict[\"rouge_3_recall\"]+output_dict[\"rouge_4_recall\"]+\n # output_dict[\"rouge_l_recall\"])/5.0\n\n avg_rscore = (output_dict[\"rouge_1_f_score\"]+output_dict[\"rouge_2_f_score\"]+output_dict[\"rouge_l_f_score\"])/3.0\n\n return avg_rscore\n\ndef _rouge_wrapper_traindata(docname, final_labels, final_labels_str):\n # Gold Summary Directory : Always use original sentences\n gold_summary_directory = FLAGS.gold_summary_directory + \"/\" + FLAGS.data_mode\n gold_summary_fileaddress = gold_summary_directory + \"/\" + docname + \".gold\"\n\n # Prepare Gold Model File\n os.system(\"mkdir -p \"+FLAGS.tmp_directory+\"/gold-\"+docname+\"-\"+final_labels_str)\n os.system(\"cp \"+gold_summary_fileaddress+\" \"+FLAGS.tmp_directory+\"/gold-\"+docname+\"-\"+final_labels_str+\"/\")\n\n # Document Sentence: Always use original sentences to generate summaries\n doc_sent_fileaddress = FLAGS.doc_sentence_directory + \"/\" + FLAGS.data_mode + \"/training-sent/\"+docname+\".summary.final.org_sents\"\n doc_sents = open(doc_sent_fileaddress).readlines()\n\n # Prepare Model file\n os.system(\"mkdir -p \"+FLAGS.tmp_directory+\"/model-\"+docname+\"-\"+final_labels_str)\n\n # Write selected sentences\n labels_ones = [idx for idx in range(len(final_labels[:len(doc_sents)])) if final_labels[idx]==\"1\"]\n model_highlights = [doc_sents[idx] for idx in labels_ones]\n foutput = open(FLAGS.tmp_directory+\"/model-\"+docname+\"-\"+final_labels_str+\"/\"+docname+\".model\" , \"w\")\n foutput.write(\"\".join(model_highlights))\n foutput.close()\n\n return _rouge(FLAGS.tmp_directory+\"/model-\"+docname+\"-\"+final_labels_str, FLAGS.tmp_directory+\"/gold-\"+docname+\"-\"+final_labels_str)\n\ndef _multi_run_wrapper(args):\n return _rouge_wrapper_traindata(*args)\n\ndef _get_lcs(a, b):\n lengths = [[0 for j in range(len(b)+1)] for i in range(len(a)+1)]\n # row 0 and column 0 are initialized to 0 already\n for i, x in enumerate(a):\n for j, y in enumerate(b):\n if x == y:\n lengths[i+1][j+1] = lengths[i][j] + 1\n else:\n lengths[i+1][j+1] = max(lengths[i+1][j], lengths[i][j+1])\n # read the substring out from the matrix\n result = []\n x, y = len(a), len(b)\n while x != 0 and y != 0:\n if lengths[x][y] == lengths[x-1][y]:\n x -= 1\n elif lengths[x][y] == lengths[x][y-1]:\n y -= 1\n else:\n assert a[x-1] == b[y-1]\n result = [a[x-1]] + result\n x -= 1\n y -= 1\n return len(result)\n\ndef _get_ngram_sets(highlights):\n set_1gram = set()\n set_2gram = set()\n set_3gram = set()\n set_4gram = set()\n fullen = len(highlights)\n for widx in range(fullen):\n # 1gram\n set_1gram.add(str(highlights[widx]))\n # 2gram\n if (widx+1) < fullen:\n set_2gram.add(str(highlights[widx])+\"-\"+str(highlights[widx+1]))\n # 3gram\n if (widx+2) < fullen:\n set_3gram.add(str(highlights[widx])+\"-\"+str(highlights[widx+1])+\"-\"+str(highlights[widx+2]))\n # 4gram\n if (widx+3) < fullen:\n set_4gram.add(str(highlights[widx])+\"-\"+str(highlights[widx+1])+\"-\"+str(highlights[widx+2])+\"-\"+str(highlights[widx+3]))\n return set_1gram, set_2gram, set_3gram, set_4gram\n\ndef _rouge_wrapper_traindata_nopyrouge(docname, final_labels_str, document, highlights):\n cand_highlights_full = []\n for sentidx in final_labels_str.split(\"-\"):\n cand_highlights_full += [wordid for wordid in document[int(sentidx)] if wordid != 0]\n cand_highlights_full.append(0)\n highlights_full = []\n for sent in highlights:\n highlights_full += sent\n highlights_full.append(0)\n # print(cand_highlights_full,highlights_full)\n\n # Get sets\n cand_1gram, cand_2gram, cand_3gram, cand_4gram = _get_ngram_sets(cand_highlights_full)\n # print(cand_1gram, cand_2gram, cand_3gram, cand_4gram)\n gold_1gram, gold_2gram, gold_3gram, gold_4gram = _get_ngram_sets(highlights_full)\n # print(gold_1gram, gold_2gram, gold_3gram, gold_4gram)\n\n # Get ROUGE-N recalls\n rouge_recall_1 = 0\n if len(gold_1gram) != 0:\n rouge_recall_1 = float(len(gold_1gram.intersection(cand_1gram)))/float(len(gold_1gram))\n rouge_recall_2 = 0\n if len(gold_2gram) != 0:\n rouge_recall_2 = float(len(gold_2gram.intersection(cand_2gram)))/float(len(gold_2gram))\n rouge_recall_3 = 0\n if len(gold_3gram) != 0:\n rouge_recall_3 = float(len(gold_3gram.intersection(cand_3gram)))/float(len(gold_3gram))\n rouge_recall_4 = 0\n if len(gold_4gram) != 0:\n rouge_recall_4 = float(len(gold_4gram.intersection(cand_4gram)))/float(len(gold_4gram))\n\n # Get ROUGE-L\n len_lcs = _get_lcs(cand_highlights_full, highlights_full)\n r = 0 if (len_lcs == 0) else (float(len_lcs)/len(cand_highlights_full))\n p = 0 if (len_lcs == 0) else (float(len_lcs)/len(highlights_full))\n b = 0 if (r == 0) else (p / r)\n rouge_recall_l = 0 if (len_lcs == 0) else (((1+(b*b))*r*p)/(r+(b*b*p)))\n\n rouge_recall_average = (rouge_recall_1+rouge_recall_2+rouge_recall_3+rouge_recall_4+rouge_recall_l)/5.0\n # print(rouge_recall_1, rouge_recall_2, rouge_recall_3, rouge_recall_4, rouge_recall_l, rouge_recall_average)\n\n # Get final labels\n final_labels = [[1, 0] if (str(sentidx) in final_labels_str.split(\"-\")) else [0, 1] for sentidx in range(FLAGS.max_doc_length)] # [max_doc_length, target_label_size]\n\n return rouge_recall_average, final_labels\n\ndef _multi_run_wrapper_nopyrouge(args):\n return _rouge_wrapper_traindata_nopyrouge(*args)\n\nclass Reward_Generator:\n def __init__(self):\n self.rouge_dict = {}\n\n # Start a pool\n self.pool = Pool(10)\n\n def save_rouge_dict(self):\n with open(FLAGS.train_dir+\"/rouge-dict.json\", 'w') as outfile:\n json.dump(self.rouge_dict, outfile)\n\n def restore_rouge_dict(self):\n self.rouge_dict = {}\n if os.path.isfile(FLAGS.train_dir+\"/rouge-dict.json\"):\n with open(FLAGS.train_dir+\"/rouge-dict.json\") as data_file:\n self.rouge_dict = json.load(data_file)\n\n def get_full_rouge(self, system_dir, datatype):\n # Gold Directory: Always use original files\n gold_summary_directory = FLAGS.gold_summary_directory + \"/\" + datatype\n\n rouge_score = _rouge(system_dir, gold_summary_directory)\n\n # Delete any tmp file\n os.system(\"rm -r \"+FLAGS.tmp_directory+\"/tmp*\")\n\n return rouge_score\n\n # def get_batch_rouge(self, batch_docnames, batch_predicted_labels):\n\n # # Numpy dtype\n # dtype = np.float16 if FLAGS.use_fp16 else np.float32\n\n # # Batch Size\n # batch_size = len(batch_docnames)\n\n # # batch_rouge\n # batch_rouge = np.empty(batch_size, dtype=dtype)\n\n # # Estimate list of arguments to run pool\n # didx_list = []\n # docname_labels_list = []\n # for docindex in range(batch_size):\n # docname = batch_docnames[docindex]\n # predicted_labels = batch_predicted_labels[docindex]\n\n # # Prepare final labels for summary generation\n # final_labels = [str(int(predicted_labels[sentidx][0])) for sentidx in range(FLAGS.max_doc_length)]\n # # print(final_labels)\n\n # isfound = False\n # rougescore = 0.0\n # if docname in self.rouge_dict:\n # final_labels_string = \"\".join(final_labels)\n # if final_labels_string in self.rouge_dict[docname]:\n # rougescore = self.rouge_dict[docname][final_labels_string]\n # isfound = True\n\n # if isfound:\n # # Update batch_rouge\n # batch_rouge[docindex] = rougescore\n # else:\n # didx_list.append(docindex)\n # docname_labels_list.append((docname, final_labels))\n\n # # Run parallel pool\n # if(len(didx_list) > 0):\n # # Run in parallel\n # rougescore_list = self.pool.map(_multi_run_wrapper,docname_labels_list)\n # # Process results\n # for didx, rougescore, docname_labels in zip(didx_list, rougescore_list, docname_labels_list):\n # # Update batch_rouge\n # batch_rouge[didx] = rougescore\n\n # # Update rouge dict\n # docname = docname_labels[0]\n # final_labels_string = \"\".join(docname_labels[1])\n # if docname not in self.rouge_dict:\n # self.rouge_dict[docname] = {final_labels_string:rougescore}\n # else:\n # self.rouge_dict[docname][final_labels_string] = rougescore\n # # Delete any tmp file\n # os.system(\"rm -r \"+ FLAGS.tmp_directory+\"/tmp* \" + FLAGS.tmp_directory+\"/gold-* \" + FLAGS.tmp_directory+\"/model-*\")\n # # print(self.rouge_dict)\n # return batch_rouge\n\n def get_batch_rouge_withmultisample(self, batch_docnames, batch_predicted_labels_multisample):\n \"\"\"\n Args:\n batch_docnames: [batch_size]\n batch_predicted_labels_multisample: [batch_size, rollout_count, FLAGS.max_doc_length, FLAGS.target_label_size]\n Return:\n rougescore: [batch_size, FLAGS.num_sample_rollout]\n \"\"\"\n\n # Numpy dtype\n dtype = np.float16 if FLAGS.use_fp16 else np.float32\n\n # Batch Size and sample rollout count\n batch_size = len(batch_docnames)\n rollout_count = batch_predicted_labels_multisample.shape[1]\n\n # batch_rouge\n batch_rouge_multisample = np.empty((batch_size, rollout_count), dtype=dtype)\n\n # Prepare of all rollout labels dict and prepare docname_labels_list to run\n docname_labels_rollout_dict = {}\n docname_labels_list = []\n for docindex in range(batch_size):\n docname = batch_docnames[docindex]\n # print(docname)\n\n for rolloutidx in range(rollout_count):\n predicted_labels = batch_predicted_labels_multisample[docindex][rolloutidx] # [FLAGS.max_doc_length, FLAGS.target_label_size]\n # Prepare final labels for summary generation\n final_labels = []\n final_labels_sindices = []\n for sentidx in range(FLAGS.max_doc_length):\n final_labels.append(str(int(predicted_labels[sentidx][0])))\n if int(predicted_labels[sentidx][0]) == 1:\n final_labels_sindices.append(str(sentidx+1))\n final_labels_string = \"-\".join(final_labels_sindices)\n\n # print(final_labels,final_labels_string)\n\n isfound = False\n rougescore = 0.0\n if docname in self.rouge_dict:\n if final_labels_string in self.rouge_dict[docname]:\n rougescore = self.rouge_dict[docname][final_labels_string]\n isfound = True\n\n if isfound:\n # Update batch_rouge\n batch_rouge_multisample[docindex][rolloutidx] = rougescore\n else:\n if docname not in docname_labels_rollout_dict:\n docname_labels_rollout_dict[docname] = [docindex, {final_labels_string:[rolloutidx]}]\n docname_labels_list.append((docname, final_labels, final_labels_string))\n else:\n if final_labels_string not in docname_labels_rollout_dict[docname][1]:\n docname_labels_rollout_dict[docname][1][final_labels_string] = [rolloutidx]\n docname_labels_list.append((docname, final_labels, final_labels_string))\n else:\n docname_labels_rollout_dict[docname][1][final_labels_string].append(rolloutidx)\n # no need to add to docname_labels_list\n\n # print(docname_labels_list)\n # Run parallel pool\n if(len(docname_labels_list) > 0):\n # Run in parallel\n with closing(Pool(10)) as mypool:\n rougescore_list = mypool.map(_multi_run_wrapper,docname_labels_list)\n # rougescore_list = self.pool.map(_multi_run_wrapper,docname_labels_list)\n\n # Process results\n for rougescore, docname_labels in zip(rougescore_list, docname_labels_list):\n docname = docname_labels[0]\n final_labels = docname_labels[1]\n final_labels_string = docname_labels[2]\n\n # Update batch_rouge\n docindex = docname_labels_rollout_dict[docname][0]\n for rolloutidx in docname_labels_rollout_dict[docname][1][final_labels_string]:\n batch_rouge_multisample[docindex][rolloutidx] = rougescore\n\n # Update rouge dict\n if docname not in self.rouge_dict:\n self.rouge_dict[docname] = {final_labels_string:rougescore}\n else:\n self.rouge_dict[docname][final_labels_string] = rougescore\n # Delete any tmp file\n os.system(\"rm -r \"+ FLAGS.tmp_directory+\"/tmp* \" + FLAGS.tmp_directory+\"/gold-* \" + FLAGS.tmp_directory+\"/model-*\")\n # print(self.rouge_dict)\n\n return batch_rouge_multisample\n\n def get_batch_rouge_withmultisample_nopyrouge(self, batch_docnames, batch_predicted_labels_multisample_str, batch_docs, batch_highlights_nonnumpy):\n \"\"\"\n Args:\n batch_docnames: [batch_size]\n batch_predicted_labels_multisample_str: [batch_size, rollout_count]\n batch_docs: [batch_size, FLAGS.max_doc_length, FLAGS.max_sent_length]\n batch_highlights_nonnumpy: [batch_size, highlights_lengths, each_highlights]\n\n Return:\n rougescore: [batch_size, FLAGS.num_sample_rollout]\n batch_gold_sampled_label_multisample: [batch_size, FLAGS.num_sample_rollout, FLAGS.max_doc_length, FLAGS.target_label_size]\n \"\"\"\n\n # Numpy dtype\n dtype = np.float16 if FLAGS.use_fp16 else np.float32\n\n # Batch Size and sample rollout count\n batch_size = len(batch_docnames)\n rollout_count = batch_predicted_labels_multisample_str.shape[1]\n\n # batch_rouge\n batch_rouge_multisample = np.empty((batch_size, rollout_count), dtype=dtype)\n batch_gold_sampled_label_multisample = np.empty((batch_size, rollout_count, FLAGS.max_doc_length, FLAGS.target_label_size), dtype=dtype)\n\n # Prepare of all rollout labels dict and prepare docname_labels_list to run\n docname_labels_rollout_dict = {}\n docname_labels_list = []\n for docindex in range(batch_size):\n docname = batch_docnames[docindex]\n document = batch_docs[docindex]\n highlights = batch_highlights_nonnumpy[docindex]\n # print(docname)\n\n for rolloutidx in range(rollout_count):\n final_labels_string = batch_predicted_labels_multisample_str[docindex][rolloutidx]\n # print(final_labels_string)\n\n if docname not in docname_labels_rollout_dict:\n docname_labels_rollout_dict[docname] = [docindex, {final_labels_string:[rolloutidx]}]\n docname_labels_list.append((docname, final_labels_string, document, highlights))\n else:\n if final_labels_string not in docname_labels_rollout_dict[docname][1]:\n docname_labels_rollout_dict[docname][1][final_labels_string] = [rolloutidx]\n docname_labels_list.append((docname, final_labels_string, document, highlights))\n else:\n docname_labels_rollout_dict[docname][1][final_labels_string].append(rolloutidx)\n # no need to add to docname_labels_list\n\n # isfound = False\n # rougescore = 0.0\n # if docname in self.rouge_dict:\n # if final_labels_string in self.rouge_dict[docname]:\n # rougescore = self.rouge_dict[docname][final_labels_string]\n # isfound = True\n\n # if isfound:\n # # Update batch_rouge\n # batch_rouge_multisample[docindex][rolloutidx] = rougescore\n # else:\n # if docname not in docname_labels_rollout_dict:\n # docname_labels_rollout_dict[docname] = [docindex, {final_labels_string:[rolloutidx]}]\n # docname_labels_list.append((docname, final_labels_string, document, highlights))\n # else:\n # if final_labels_string not in docname_labels_rollout_dict[docname][1]:\n # docname_labels_rollout_dict[docname][1][final_labels_string] = [rolloutidx]\n # docname_labels_list.append((docname, final_labels_string, document, highlights))\n # else:\n # docname_labels_rollout_dict[docname][1][final_labels_string].append(rolloutidx)\n # # no need to add to docname_labels_list\n\n # print(docname_labels_rollout_dict )\n # print(docname_labels_list)\n\n # Run parallel pool\n if(len(docname_labels_list) > 0):\n # Run in parallel\n # with closing(Pool(10)) as mypool:\n # rougescore_finallabels_list = mypool.map(_multi_run_wrapper_nopyrouge,docname_labels_list)\n rougescore_finallabels_list = self.pool.map(_multi_run_wrapper_nopyrouge,docname_labels_list)\n\n # Process results\n for rougescore_finallabels, docname_labels in zip(rougescore_finallabels_list, docname_labels_list):\n rougescore = rougescore_finallabels[0]\n finallabels = rougescore_finallabels[1]\n docname = docname_labels[0]\n final_labels_string = docname_labels[1]\n\n # Update batch_rouge\n docindex = docname_labels_rollout_dict[docname][0]\n for rolloutidx in docname_labels_rollout_dict[docname][1][final_labels_string]:\n batch_rouge_multisample[docindex][rolloutidx] = rougescore\n batch_gold_sampled_label_multisample[docindex][rolloutidx] = np.array(finallabels[:], dtype=dtype)\n\n # # Update rouge dict\n # if docname not in self.rouge_dict:\n # self.rouge_dict[docname] = {final_labels_string:rougescore}\n # else:\n # self.rouge_dict[docname][final_labels_string] = rougescore\n\n # print(self.rouge_dict)\n\n return batch_rouge_multisample, batch_gold_sampled_label_multisample\n\n", "id": "1113426", "language": "Python", "matching_score": 4.445868492126465, "max_stars_count": 1, "path": "SaKS_Refresh/reward_utils.py" }, { "content": "####################################\n# Author: <NAME>\n# Date: September 2016\n# Project: Document Summarization\n# H2020 Summa Project\n####################################\n\n\"\"\"\nDocument Summarization Modules and Models\n\"\"\"\n\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport numpy as np\nimport tensorflow as tf\nimport random\nimport os\n\nfrom my_flags import FLAGS\nfrom model_utils import convert_logits_to_softmax, predict_topranked\n\n# Special IDs\nPAD_ID = 0\nUNK_ID = 1\n\nclass Data:\n def __init__(self, vocab_dict, data_type):\n self.filenames = []\n self.docs = []\n self.titles = []\n self.images = []\n self.labels = []\n self.rewards = []\n self.weights = []\n\n self.fileindices = []\n\n self.data_type = data_type\n\n # populate the data\n self.populate_data(vocab_dict, data_type)\n\n # Write to files\n self.write_to_files(data_type)\n\n def write_prediction_summaries(self, pred_logits, modelname, session=None):\n print(\"Writing predictions and final summaries ...\")\n\n # Convert to softmax logits\n pred_logits = convert_logits_to_softmax(pred_logits, session=session)\n # Save Output Logits\n np.save(FLAGS.train_dir+\"/\"+modelname+\".\"+self.data_type+\"-prediction\", pred_logits)\n\n # Writing\n pred_labels = predict_topranked(pred_logits, self.weights, self.filenames)\n self.write_predictions(modelname+\".\"+self.data_type, pred_logits, pred_labels)\n self.process_predictions_topranked(modelname+\".\"+self.data_type)\n\n def write_predictions(self, file_prefix, np_predictions, np_labels):\n foutput = open(FLAGS.train_dir+\"/\"+file_prefix+\".predictions\", \"w\")\n for fileindex in self.fileindices:\n filename = self.filenames[fileindex]\n foutput.write(filename+\"\\n\")\n\n sentcount = 0\n for sentpred, sentlabel in zip(np_predictions[fileindex], np_labels[fileindex]):\n one_prob = sentpred[0]\n label = sentlabel[0]\n\n if sentcount < len(self.weights[fileindex]):\n foutput.write(str(int(label))+\"\\t\"+str(one_prob)+\"\\n\")\n else:\n break\n\n sentcount += 1\n foutput.write(\"\\n\")\n foutput.close()\n\n def process_predictions_topranked(self, file_prefix):\n predictiondata = open(FLAGS.train_dir+\"/\"+file_prefix+\".predictions\").read().strip().split(\"\\n\\n\")\n # print len(predictiondata)\n\n summary_dirname = FLAGS.train_dir+\"/\"+file_prefix+\"-summary-topranked\"\n os.system(\"mkdir \"+summary_dirname)\n\n for item in predictiondata:\n # print(item)\n\n itemdata = item.strip().split(\"\\n\")\n # print len(itemdata)\n\n filename = itemdata[0]\n # print filename\n\n # predictions file already have top three sentences marked\n final_sentids = []\n for sentid in range(len(itemdata[1:])):\n label_score = itemdata[sentid+1].split()\n if label_score[0] == \"1\":\n final_sentids.append(sentid)\n\n # Create final summary files\n fileid = filename.split(\"-\")[-1] # cnn-fileid, dailymail-fileid\n summary_file = open(summary_dirname+\"/\"+fileid+\".model\", \"w\")\n\n # Read Sents in the document : Always use original sentences\n sent_filename = FLAGS.doc_sentence_directory + \"/\" + self.data_type +\"/\"+fileid\n docsents = open(sent_filename).readlines()\n\n # Top Ranked three sentences\n selected_sents = [docsents[sentid] for sentid in final_sentids if sentid < len(docsents)]\n # print(selected_sents)\n\n summary_file.write(\"\".join(selected_sents)+\"\\n\")\n summary_file.close()\n\n def get_batch(self, startidx, endidx):\n # This is very fast if you keep everything in Numpy\n\n def process_to_chop_pad(orgids, requiredsize):\n if (len(orgids) >= requiredsize):\n return orgids[:requiredsize]\n else:\n padids = [PAD_ID] * (requiredsize - len(orgids))\n return (orgids + padids)\n\n # Numpy dtype\n dtype = np.float16 if FLAGS.use_fp16 else np.float32\n\n # For train, (endidx-startidx)=FLAGS.batch_size, for others its as specified\n batch_docnames = np.empty((endidx-startidx), dtype=\"S60\") # File ID of size \"cnn-\" or \"dailymail-\" with fileid of size 40\n batch_docs = np.empty(((endidx-startidx), (FLAGS.max_doc_length + FLAGS.max_title_length + FLAGS.max_image_length), FLAGS.max_sent_length), dtype=\"int32\")\n batch_label = np.empty(((endidx-startidx), FLAGS.max_doc_length, FLAGS.target_label_size), dtype=dtype) # Single best oracle, used for JP models or accuracy estimation\n batch_weight = np.empty(((endidx-startidx), FLAGS.max_doc_length), dtype=dtype)\n batch_oracle_multiple = np.empty(((endidx-startidx), 1, FLAGS.max_doc_length, FLAGS.target_label_size), dtype=dtype)\n batch_reward_multiple = np.empty(((endidx-startidx), 1), dtype=dtype)\n\n batch_idx = 0\n for fileindex in self.fileindices[startidx:endidx]:\n # Document Names\n batch_docnames[batch_idx] = self.filenames[fileindex]\n\n # Document\n doc_wordids = [] # [FLAGS.max_doc_length + FLAGS.max_title_length + FLAGS.max_image_length, FLAGS.max_sent_length]\n for idx in range(FLAGS.max_doc_length):\n thissent = []\n if idx < len(self.docs[fileindex]):\n thissent = self.docs[fileindex][idx][:]\n thissent = process_to_chop_pad(thissent, FLAGS.max_sent_length) # [FLAGS.max_sent_length]\n doc_wordids.append(thissent)\n for idx in range(FLAGS.max_title_length):\n thissent = []\n if idx < len(self.titles[fileindex]):\n thissent = self.titles[fileindex][idx][:]\n thissent = process_to_chop_pad(thissent, FLAGS.max_sent_length) # [FLAGS.max_sent_length]\n doc_wordids.append(thissent)\n for idx in range(FLAGS.max_image_length):\n thissent = []\n if idx < len(self.images[fileindex]):\n thissent = self.images[fileindex][idx][:]\n thissent = process_to_chop_pad(thissent, FLAGS.max_sent_length) # [FLAGS.max_sent_length]\n doc_wordids.append(thissent)\n batch_docs[batch_idx] = np.array(doc_wordids[:], dtype=\"int32\")\n\n # Labels: Select the single best\n labels_vecs = [[1, 0] if (item in self.labels[fileindex][0]) else [0, 1] for item in range(FLAGS.max_doc_length)]\n batch_label[batch_idx] = np.array(labels_vecs[:], dtype=dtype)\n\n # Weights\n weights = process_to_chop_pad(self.weights[fileindex][:], FLAGS.max_doc_length)\n batch_weight[batch_idx] = np.array(weights[:], dtype=dtype)\n\n # Multiple Labels and rewards\n labels_set = [] # FLAGS.num_sample_rollout, FLAGS.max_doc_length, FLAGS.target_label_size\n reward_set = [] # FLAGS.num_sample_rollout, FLAGS.max_doc_length, FLAGS.target_label_size\n for idx in range(FLAGS.num_sample_rollout):\n thislabels = []\n if idx < len(self.labels[fileindex]):\n thislabels = [[1, 0] if (item in self.labels[fileindex][idx])\n else [0, 1] for item in range(FLAGS.max_doc_length)]\n reward_set.append(self.rewards[fileindex][idx])\n else:\n # Simply copy the best one\n thislabels = [[1, 0] if (item in self.labels[fileindex][0])\n else [0, 1] for item in range(FLAGS.max_doc_length)]\n reward_set.append(self.rewards[fileindex][0])\n labels_set.append(thislabels)\n # Randomly Sample one oracle label\n randidx_oracle = random.randint(0, (FLAGS.num_sample_rollout-1))\n batch_oracle_multiple[batch_idx][0] = np.array(labels_set[randidx_oracle][:], dtype=dtype)\n batch_reward_multiple[batch_idx] = np.array([reward_set[randidx_oracle]], dtype=dtype)\n\n # increase batch count\n batch_idx += 1\n\n return batch_docnames, batch_docs, batch_label, batch_weight, batch_oracle_multiple, batch_reward_multiple\n\n def shuffle_fileindices(self):\n self.fileindices = list(self.fileindices)\n random.shuffle(self.fileindices)\n\n def write_to_files(self, data_type):\n full_data_file_prefix = FLAGS.train_dir + \"/\" + FLAGS.data_mode + \".\" + data_type\n print(\"Writing data files with prefix (.filename, .doc, .title, .image, .label, .weight, .rewards): %s\"%full_data_file_prefix)\n\n ffilenames = open(full_data_file_prefix+\".filename\", \"w\")\n fdoc = open(full_data_file_prefix+\".doc\", \"w\")\n flabel = open(full_data_file_prefix+\".label\", \"w\")\n fweight = open(full_data_file_prefix+\".weight\", \"w\")\n freward = open(full_data_file_prefix+\".reward\", \"w\")\n\n for filename, doc, title, image, label, weight, reward in zip(self.filenames, self.docs, self.titles, self.images, self.labels, self.weights, self.rewards):\n ffilenames.write(filename+\"\\n\")\n fdoc.write(\"\\n\".join([\" \".join([str(item) for item in itemlist]) for itemlist in doc])+\"\\n\\n\")\n flabel.write(\"\\n\".join([\" \".join([str(item) for item in itemlist]) for itemlist in label])+\"\\n\\n\")\n fweight.write(\" \".join([str(item) for item in weight])+\"\\n\")\n freward.write(\" \".join([str(item) for item in reward])+\"\\n\")\n\n ffilenames.close()\n fdoc.close()\n flabel.close()\n fweight.close()\n freward.close()\n\n def populate_data(self, vocab_dict, data_type):\n\n full_data_file_prefix = FLAGS.preprocessed_data_directory + \"/paperlist\" \".\" + data_type\n print(\"Data file prefix (.doc, .title, .image, .label.multipleoracle): %s\"%full_data_file_prefix)\n\n # Process doc, title, image, label\n doc_data_list = open(full_data_file_prefix+\".doc\").read().strip().split(\"\\n\\n\")\n # title_data_list = open(full_data_file_prefix+\".title\").read().strip().split(\"\\n\\n\")\n # image_data_list = open(full_data_file_prefix+\".image\").read().strip().split(\"\\n\\n\")\n label_data_list = open(full_data_file_prefix+\".label.multipleoracle\").read().strip().split(\"\\n\\n\")\n\n print(\"Data sizes: %d %d\"%(len(doc_data_list), len(label_data_list)))\n\n print(\"Reading data (no padding to save memory) ...\")\n doccount = 0\n for doc_data, label_data in zip(doc_data_list, label_data_list):\n\n doc_lines = doc_data.strip().split(\"\\n\")\n label_lines = label_data.strip().split(\"\\n\")\n\n filename = doc_lines[0].strip()\n\n if (filename == label_lines[0].strip()):\n # Put filename\n self.filenames.append(filename)\n\n # Doc\n thisdoc = []\n for line in doc_lines[1:FLAGS.max_doc_length+1]:\n thissent = [float(item) for item in line.strip().split()]\n thisdoc.append(thissent)\n self.docs.append(thisdoc)\n\n # Weights\n originaldoclen = int(label_lines[1].strip())\n thisweight = [1 for item in range(originaldoclen)][:FLAGS.max_doc_length]\n self.weights.append(thisweight)\n\n # Labels (multiple oracles and preestimated rewards)\n thislabel = []\n thisreward = []\n for line in label_lines[2:FLAGS.num_sample_rollout+2]:\n thislabel.append([int(item) for item in line.split()[:-1]])\n thisreward.append(float(line.split()[-1]))\n self.labels.append(thislabel)\n self.rewards.append(thisreward)\n\n else:\n print(\"Some problem with %s.* files. Exiting!\"%full_data_file_prefix)\n exit(0)\n\n if doccount%10000==0:\n print(\"%d ...\"%doccount)\n doccount += 1\n\n # Set Fileindices\n self.fileindices = range(len(self.filenames))\n\nclass DataProcessor:\n def prepare_news_data(self, vocab_dict, data_type=\"training\"):\n data = Data(vocab_dict, data_type)\n return data\n\n def prepare_vocab_embeddingdict(self):\n # Numpy dtype\n dtype = np.float16 if FLAGS.use_fp16 else np.float32\n\n vocab_dict = {}\n word_embedding_array = []\n\n # Add padding\n vocab_dict[\"_PAD\"] = PAD_ID\n # Add UNK\n vocab_dict[\"_UNK\"] = UNK_ID\n\n # Read word embedding file\n wordembed_filename = FLAGS.pretrained_wordembedding\n print(\"Reading pretrained word embeddings file: %s\"%wordembed_filename)\n\n embed_line = \"\"\n linecount = 0\n vocabsize = 1\n # with open(wordembed_filename, \"r\") as fembedd:\n # for line in fembedd:\n # if linecount == 0:\n # vocabsize = int(line.split()[0])\n # # Initiate fixed size empty array\n # word_embedding_array = np.empty((vocabsize, FLAGS.wordembed_size), dtype=dtype)\n # else:\n # linedata = line.split()\n # vocab_dict[linedata[0]] = linecount + 1\n # embeddata = [float(item) for item in linedata[1:]][0:FLAGS.wordembed_size]\n # word_embedding_array[linecount-1] = np.array(embeddata, dtype=dtype)\n\n # if linecount%100000 == 0:\n # print(str(linecount)+\" ...\")\n # linecount += 1\n word_embedding_array = np.empty((vocabsize, FLAGS.wordembed_size), dtype=dtype)\n print(\"Read pretrained embeddings: %s\"%str(word_embedding_array.shape))\n\n print(\"Size of vocab: %d (_PAD:0, _UNK:1)\"%len(vocab_dict))\n vocabfilename = FLAGS.train_dir+\"/vocab.txt\"\n print(\"Writing vocab file: %s\"%vocabfilename)\n\n foutput = open(vocabfilename,\"w\")\n vocab_list = [(vocab_dict[key], key) for key in vocab_dict.keys()]\n vocab_list.sort()\n vocab_list = [item[1] for item in vocab_list]\n foutput.write(\"\\n\".join(vocab_list)+\"\\n\")\n foutput.close()\n return vocab_dict, word_embedding_array\n\n", "id": "4772527", "language": "Python", "matching_score": 4.8240556716918945, "max_stars_count": 1, "path": "SaKS_Refresh/data_utils.py" }, { "content": "####################################\n# Author: <NAME>\n# Date: September 2016\n# Project: Document Summarization\n# H2020 Summa Project\n# Comments: Jan 2017\n# Improved for Reinforcement Learning\n####################################\n\n\"\"\"\nDocument Summarization System\n\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport math\nimport os\nimport random\nimport sys\nimport time\n\nimport numpy as np\nimport tensorflow as tf\n\nfrom reward_utils import Reward_Generator\nfrom data_utils import DataProcessor\nfrom my_flags import FLAGS\nfrom my_model import MY_Model\n\n\n######################## Batch Testing a model on some dataset ############\n\ndef batch_predict_with_a_model(data, model, session=None):\n\n data_logits = []\n data_labels = []\n data_weights = []\n\n step = 1\n while (step * FLAGS.batch_size) <= len(data.fileindices):\n # Get batch data as Numpy Arrays : Without shuffling\n batch_docnames, batch_docs, batch_label, batch_weight, batch_oracle_multiple, batch_reward_multiple = data.get_batch(((step-1)*FLAGS.batch_size), (step * FLAGS.batch_size))\n batch_logits = session.run(model.logits, feed_dict={model.document_placeholder: batch_docs})\n\n data_logits.append(batch_logits)\n data_labels.append(batch_label)\n data_weights.append(batch_weight)\n\n # Increase step\n step += 1\n\n # Check if any data left\n if (len(data.fileindices) > ((step-1)*FLAGS.batch_size)):\n # Get last batch as Numpy Arrays\n batch_docnames, batch_docs, batch_label, batch_weight, batch_oracle_multiple, batch_reward_multiple = data.get_batch(((step-1)*FLAGS.batch_size), len(data.fileindices))\n batch_logits = session.run(model.logits, feed_dict={model.document_placeholder: batch_docs})\n\n data_logits.append(batch_logits)\n data_labels.append(batch_label)\n data_weights.append(batch_weight)\n # print(data_logits)\n\n # Convert list to tensors\n data_logits = tf.concat(0, data_logits)\n data_lables = tf.concat(0, data_labels)\n data_weights = tf.concat(0, data_weights)\n # print(data_logits,data_lables,data_weights)\n return data_logits, data_lables, data_weights\n\n######################## Training Mode ###########################\n\ndef train():\n \"\"\"\n Training Mode: Create a new model and train the network\n \"\"\"\n\n # Training: use the tf default graph\n with tf.Graph().as_default() and tf.device(FLAGS.use_gpu):\n\n config = tf.ConfigProto(allow_soft_placement = True)\n\n # Start a session\n with tf.Session(config = config) as sess:\n\n ### Prepare data for training\n print(\"Prepare vocab dict and read pretrained word embeddings ...\")\n vocab_dict, word_embedding_array = DataProcessor().prepare_vocab_embeddingdict()\n # vocab_dict contains _PAD and _UNK but not word_embedding_array\n\n print(\"Prepare training data ...\")\n train_data = DataProcessor().prepare_news_data(vocab_dict, data_type=\"training\")\n\n print(\"Prepare validation data ...\")\n validation_data = DataProcessor().prepare_news_data(vocab_dict, data_type=\"validation\")\n\n print(\"Prepare ROUGE reward generator ...\")\n rouge_generator = Reward_Generator()\n\n # Create Model with various operations\n model = MY_Model(sess, len(vocab_dict)-2)\n\n # Start training with some pretrained model\n start_epoch = 1\n # selected_modelpath = FLAGS.train_dir+\"/model.ckpt.epoch-\"+str(start_epoch-1)\n # if not (os.path.isfile(selected_modelpath)):\n # print(\"Model not found in checkpoint folder.\")\n # exit(0)\n # # Reload saved model and test\n # print(\"Reading model parameters from %s\" % selected_modelpath)\n # model.saver.restore(sess, selected_modelpath)\n # print(\"Model loaded.\")\n\n # Initialize word embedding before training\n print(\"Initialize word embedding vocabulary with pretrained embeddings ...\")\n sess.run(model.vocab_embed_variable.assign(word_embedding_array))\n\n ########### Start (No Mixer) Training : Reinforcement learning ################\n # Reward aware training as part of Reward weighted CE ,\n # No Curriculam learning: No annealing, consider full document like in MRT\n # Multiple Samples (include gold sample), No future reward, Similar to MRT\n # During training does not use PYROUGE to avoid multiple file rewritings\n # Approximate MRT with multiple pre-estimated oracle samples\n # June 2017: Use Single sample from multiple oracles\n ###############################################################################\n\n print(\"Start Reinforcement Training (single rollout from largest prob mass) ...\")\n\n for epoch in range(start_epoch, FLAGS.train_epoch_wce + 1):\n print(\"MRT: Epoch \"+str(epoch))\n\n print(\"MRT: Epoch \"+str(epoch)+\" : Reshuffle training document indices\")\n train_data.shuffle_fileindices()\n\n print(\"MRT: Epoch \"+str(epoch)+\" : Restore Rouge Dict\")\n rouge_generator.restore_rouge_dict()\n\n # Start Batch Training\n step = 1\n while (step * FLAGS.batch_size) <= len(train_data.fileindices):\n # Get batch data as Numpy Arrays\n batch_docnames, batch_docs, batch_label, batch_weight, batch_oracle_multiple, batch_reward_multiple = train_data.get_batch(((step-1)*FLAGS.batch_size),\n (step * FLAGS.batch_size))\n # print(batch_docnames)\n # print(batch_label[0])\n # print(batch_weight[0])\n # print(batch_oracle_multiple[0])\n # print(batch_reward_multiple[0])\n # exit(0)\n\n # Print the progress\n if (step % FLAGS.training_checkpoint) == 0:\n\n ce_loss_val, ce_loss_sum, acc_val, acc_sum = sess.run([model.rewardweighted_cross_entropy_loss_multisample, model.rewardweighted_ce_multisample_loss_summary,\n model.accuracy, model.taccuracy_summary],\n feed_dict={model.document_placeholder: batch_docs,\n model.predicted_multisample_label_placeholder: batch_oracle_multiple,\n model.actual_reward_multisample_placeholder: batch_reward_multiple,\n model.label_placeholder: batch_label,\n model.weight_placeholder: batch_weight})\n # Print Summary to Tensor Board\n model.summary_writer.add_summary(ce_loss_sum, ((epoch-1)*len(train_data.fileindices)+ step*FLAGS.batch_size))\n model.summary_writer.add_summary(acc_sum, ((epoch-1)*len(train_data.fileindices)+step*FLAGS.batch_size))\n\n print(\"MRT: Epoch \"+str(epoch)+\" : Covered \" + str(step*FLAGS.batch_size)+\"/\"+str(len(train_data.fileindices)) +\n \" : Minibatch Reward Weighted Multisample CE Loss= {:.6f}\".format(ce_loss_val) + \" : Minibatch training accuracy= {:.6f}\".format(acc_val))\n\n # Run optimizer: optimize policy network\n sess.run([model.train_op_policynet_expreward], feed_dict={model.document_placeholder: batch_docs,\n model.predicted_multisample_label_placeholder: batch_oracle_multiple,\n model.actual_reward_multisample_placeholder: batch_reward_multiple,\n model.weight_placeholder: batch_weight})\n\n # Increase step\n step += 1\n\n # if step == 20:\n # break\n\n # Save Model\n print(\"MRT: Epoch \"+str(epoch)+\" : Saving model after epoch completion\")\n checkpoint_path = os.path.join(FLAGS.train_dir, \"model.ckpt.epoch-\"+str(epoch))\n model.saver.save(sess, checkpoint_path)\n\n # Backup Rouge Dict\n print(\"MRT: Epoch \"+str(epoch)+\" : Saving rouge dictionary\")\n rouge_generator.save_rouge_dict()\n\n # Performance on the validation set\n print(\"MRT: Epoch \"+str(epoch)+\" : Performance on the validation data\")\n # Get Predictions: Prohibit the use of gold labels\n validation_logits, validation_labels, validation_weights = batch_predict_with_a_model(validation_data, model, session=sess)\n # Validation Accuracy and Prediction\n validation_acc, validation_sum = sess.run([model.final_accuracy, model.vaccuracy_summary], feed_dict={model.logits_placeholder: validation_logits.eval(session=sess),\n model.label_placeholder: validation_labels.eval(session=sess),\n model.weight_placeholder: validation_weights.eval(session=sess)})\n # Print Validation Summary\n model.summary_writer.add_summary(validation_sum, (epoch*len(train_data.fileindices)))\n\n print(\"MRT: Epoch \"+str(epoch)+\" : Validation (\"+str(len(validation_data.fileindices))+\") accuracy= {:.6f}\".format(validation_acc))\n # Writing validation predictions and final summaries\n print(\"MRT: Epoch \"+str(epoch)+\" : Writing final validation summaries\")\n validation_data.write_prediction_summaries(validation_logits, \"model.ckpt.epoch-\"+str(epoch), session=sess)\n # Extimate Rouge Scores\n rouge_score = rouge_generator.get_full_rouge(FLAGS.train_dir+\"/model.ckpt.epoch-\"+str(epoch)+\".validation-summary-topranked\", \"validation\")\n print(\"MRT: Epoch \"+str(epoch)+\" : Validation (\"+str(len(validation_data.fileindices))+\") rouge= {:.6f}\".format(rouge_score))\n\n # break\n\n print(\"Optimization Finished!\")\n\n# ######################## Test Mode ###########################\n\ndef test():\n \"\"\"\n Test Mode: Loads an existing model and test it on the test set\n \"\"\"\n\n # Training: use the tf default graph\n\n with tf.Graph().as_default() and tf.device(FLAGS.use_gpu):\n\n config = tf.ConfigProto(allow_soft_placement = True)\n\n # Start a session\n with tf.Session(config = config) as sess:\n\n ### Prepare data for training\n print(\"Prepare vocab dict and read pretrained word embeddings ...\")\n vocab_dict, word_embedding_array = DataProcessor().prepare_vocab_embeddingdict()\n # vocab_dict contains _PAD and _UNK but not word_embedding_array\n\n print(\"Prepare test data ...\")\n test_data = DataProcessor().prepare_news_data(vocab_dict, data_type=\"test\")\n\n # Create Model with various operations\n model = MY_Model(sess, len(vocab_dict)-2)\n\n # # Initialize word embedding before training\n # print(\"Initialize word embedding vocabulary with pretrained embeddings ...\")\n # sess.run(model.vocab_embed_variable.assign(word_embedding_array))\n\n # Select the model\n if (os.path.isfile(FLAGS.train_dir+\"/model.ckpt.epoch-\"+str(FLAGS.model_to_load))):\n selected_modelpath = FLAGS.train_dir+\"/model.ckpt.epoch-\"+str(FLAGS.model_to_load)\n else:\n print(\"Model not found in checkpoint folder.\")\n exit(0)\n\n # Reload saved model and test\n print(\"Reading model parameters from %s\" % selected_modelpath)\n model.saver.restore(sess, selected_modelpath)\n print(\"Model loaded.\")\n\n # Initialize word embedding before training\n print(\"Initialize word embedding vocabulary with pretrained embeddings ...\")\n sess.run(model.vocab_embed_variable.assign(word_embedding_array))\n\n # Test Accuracy and Prediction\n print(\"Performance on the test data:\")\n FLAGS.authorise_gold_label = False\n test_logits, test_labels, test_weights = batch_predict_with_a_model(test_data, model, session=sess)\n test_acc = sess.run(model.final_accuracy, feed_dict={model.logits_placeholder: test_logits.eval(session=sess),\n model.label_placeholder: test_labels.eval(session=sess),\n model.weight_placeholder: test_weights.eval(session=sess)})\n # Print Test Summary\n print(\"Test (\"+str(len(test_data.fileindices))+\") accuracy= {:.6f}\".format(test_acc))\n # Writing test predictions and final summaries\n test_data.write_prediction_summaries(test_logits, \"model.ckpt.epoch-\"+str(FLAGS.model_to_load), session=sess)\n\n######################## Main Function ###########################\n\ndef main(_):\n if FLAGS.exp_mode == \"train\":\n train()\n else:\n test()\n\nif __name__ == \"__main__\":\n tf.app.run()\n\n\n\n\n", "id": "2000661", "language": "Python", "matching_score": 1.3537342548370361, "max_stars_count": 1, "path": "RefreshWordEmbeddings/document_summarizer_training_testing.py" }, { "content": "import os\n# import random\nimport numpy as np\n\ndef score_sentence(sent_rouge):\n return float(sent_rouge.split(' - ')[0])\n\n\ndef save_multipleoracle(filename, summary_indexes, rouge_score, n_sents):\n str_indexes = \" \".join([str(i) for i in summary_indexes])\n with open('./multipleoracle/%s' % filename, 'w') as f:\n f.write('%d\\n' % n_sents)\n f.write('%s %s' % (str_indexes, rouge_score))\n f.close()\n\n\ndef generate_multipleoracle(all_files):\n for filename in all_files:\n # Ler valor do rouge\n rouge_file = open(\"./rouge/%s\" % filename).read().split('\\n\\n')[1]\n sentences_rouge = rouge_file.split('\\n')\n rouge_scores = np.array([score_sentence(sent_rouge) for sent_rouge in\n sentences_rouge])\n labels = np.array(open(\"./labels/%s\" % filename).read().split('\\n'))\n summary_indexes = list(np.where(labels == '1')[0])\n rouge_mean = rouge_scores[summary_indexes].mean()\n save_multipleoracle(filename, summary_indexes, rouge_mean, len(labels))\n\n\ndef create_data(filelist, data_type, doc_or_label, dirname):\n with open('paperlist.%s.%s' % (data_type, doc_or_label), 'w') as f:\n for filename in filelist:\n content = open(os.path.join(dirname, filename)).read()\n f.write(filename + '\\n')\n f.write(content.strip('\\n') + '\\n\\n')\n f.close()\n\ntraining_list = [\n'zaharia2017.txt', 'yu2011.txt', 'bajo2012.txt', 'chang2011.txt', 'ponce2015.txt',\n'rosasromero2016.txt', 'bae2012.txt', 'long2015.txt', 'clempner2017.txt', 'tollo2012.txt',\n'khoshjavan2011.txt', 'ismail2011.txt', 'garci2013.txt', 'khashei2012.txt', 'duan2014.txt',\n'les2013.txt', 'chou2014.txt', 'cardamone2013.txt', 'casabay2015.txt', 'rahman2011.txt',\n'chiang2015.txt', 'yu2011a.txt', 'mostafa2011.txt', 'ferreira2012.txt', 'navarro2012.txt',\n'ticknor2013.txt', 'chaaraoui2012.txt', 'tsui2014.txt', 'chow2013.txt',\n'alvaradoiniesta2013.txt', 'chou2013.txt', 'garcaalonso2012.txt', 'asensio2014.txt',\n'dahal2015.txt', 'chandwani2015.txt', 'jadhav2013.txt', 'hernndezdelolmo2012.txt',\n'gurupur2015.txt', 'patel2011.txt', 'yanto2012.txt', 'ahn2012.txt', 'neokosmidis2013.txt',\n'cavalcante2016.txt', 'deng2011.txt', 'xin2016.txt', 'silva2015.txt', 'nikoli2013.txt',\n'buyukozkan2016.txt', 'garca2012a.txt', 'nunes2013.txt', 'crespo2013.txt', 'zheng2011.txt',\n'deb2011.txt', 'li2014.txt', 'chou2014a.txt', 'clempner2016.txt', 'oliveira2013.txt',\n'choudhury2010.txt', 'wang2013.txt', 'pandi2011.txt', 'laurentys2011.txt', 'abelln2017.txt',\n'ahn2012a.txt', 'behera2012.txt', 'bielecki2013.txt', 'li2015.txt', 'patel2015.txt',\n'segundo2017.txt', 'omoteso2012.txt', 'escario2015.txt', 'olawoyin2013.txt', 'wu2012.txt',\n'soyguder2011.txt', 'titapiccolo2013.txt', 'gao2012.txt', 'adusumilli2013.txt',\n'onieva2013.txt', 'affonso2015.txt', 'poggiolini2013.txt', 'lima2016.txt', 'cullar2011.txt',\n'brady2017.txt', 'tan2011.txt', 'rmoreno2014.txt', 'esfahanipour2011.txt',\n'kadadevaramath2012.txt', 'krishnasamy2014.txt', 'labib2011.txt', 'elsebakhy2011.txt',\n'capozzoli2015.txt', 'marqus2012.txt', 'stavropoulos2013.txt', 'zelenkov2017.txt',\n'zhang2013.txt', 'leite2014.txt', 'ramachandran2013.txt', 'maleszka2015.txt',\n'chen2012.txt', 'gao2015.txt', 'wu2011a.txt', 'oreski2012.txt', 'bogaerd2011.txt',\n'park2011.txt', 'vidoni2011.txt', 'castelli2013.txt', 'dias2013.txt', 'naranje2014.txt',\n'marqus2012a.txt', 'leong2015.txt', 'falavigna2012.txt', 'montes2016.txt', 'araujo2014.txt',\n'zhou2011.txt', 'ng2012.txt', 'coronato2014.txt', 'kele2011.txt', 'prezrodrguez2012.txt',\n'yaghobi2011.txt', 'nascimento2013.txt', 'rouhi2015.txt', 'zhang2016a.txt',\n'parente2015.txt', 'nikoli2013a.txt', 'duan2012.txt', 'gorriz2017.txt', 'ho2012.txt',\n'das2014.txt', 'froz2017.txt', 'buche2011.txt', 'moncayomartnez2016.txt', 'paula2014.txt',\n'brock2015.txt', 'leony2013.txt', 'cui2012.txt', 'boloncanedo2016.txt', 'tagluk2011.txt',\n'wang2012.txt', 'samanta2011.txt', 'tasdemir2011.txt', 'sabzi2016.txt', 'villanueva2013.txt',\n'hilaire2013.txt', 'parkinson2012.txt', 'mohdali2015.txt', 'nahar2012.txt',\n'laalaoui2014.txt', 'liukkonen2012.txt', 'kovandi2016.txt', 'teodorovi2014.txt',\n'wang2011.txt', 'hajizadeh2012.txt', 'duguleana2016.txt', 'alpar2015.txt', 'er2012.txt',\n'garca2012.txt', 'tsai2011.txt', 'garcacrespo2011.txt', 'henriet2013.txt', 'lien2012.txt',\n'rodrguezgonzlez2011.txt']\n\nval_list = [\n'pai2012.txt', 'yeow2014.txt', 'wu2011.txt', 'zhang2016.txt',\n'mullen2013.txt', 'kizilkan2011.txt', 'gil2012.txt',\n'saridakis2015.txt', 'mlakar2016.txt', 'moro2015.txt',\n'krstanovi2016.txt', 'horng2011.txt', 'floresfernndez2012.txt',\n'manahov2014.txt', 'subashini2016.txt', 'lpezcuadrado2012.txt',\n'reis2014.txt', 'altnkaya2014.txt', 'ponce2014.txt',\n'elsebakhy2012.txt',]\n\ntest_list = [\n'herrero2011.txt', 'su2011.txt', 'hong2011.txt', 'aleksendri2012.txt',\n'chou2011.txt', 'atici2011.txt', 'ardestani2014.txt', 'ribas2015.txt',\n'yang2017.txt', 'ghiassi2012.txt', 'rafiei2011.txt', 'asiltrk2011.txt',\n'bourguet2013.txt', 'pirovano2014.txt', 'oviedo2014.txt',\n'andrades2013.txt', 'garg2014.txt', 'li2015a.txt', 'garcatorres2014.txt']\n\nall_files = training_list + val_list + test_list\n\ngenerate_multipleoracle(all_files)\n\ncreate_data(all_files[:160], 'training', 'doc', './my_papers')\ncreate_data(all_files[:160], 'training', 'label.singleoracle', './labels')\ncreate_data(all_files[:160], 'training', 'label.multipleoracle',\n './multipleoracle')\ncreate_data(all_files[160:180], 'validation', 'doc', './my_papers')\ncreate_data(all_files[160:180], 'validation', 'label.singleoracle', './labels')\ncreate_data(all_files[160:180], 'validation', 'label.multipleoracle',\n './multipleoracle')\ncreate_data(all_files[180:], 'test', 'doc', './my_papers')\ncreate_data(all_files[180:], 'test', 'label.singleoracle', './labels')\ncreate_data(all_files[180:], 'test', 'label.multipleoracle', './multipleoracle'\n )\n", "id": "7251320", "language": "Python", "matching_score": 3.5595703125, "max_stars_count": 1, "path": "SaKS_Refresh/data/mergirarquivos.py" }, { "content": "import os\nimport random\nimport numpy as np\n\nall_files = os.listdir('labels')\nrandom.shuffle(all_files)\n\n\ndef score_sentence(sent_rouge):\n return float(sent_rouge.split(' - ')[0])\n\n\ndef save_multipleoracle(filename, summary_indexes, rouge_score, n_sents):\n str_indexes = \" \".join([str(i) for i in summary_indexes])\n with open('./multipleoracle/%s' % filename, 'w') as f:\n f.write('%d\\n' % n_sents)\n f.write('%s %s' % (str_indexes, rouge_score))\n f.close()\n\n\ndef generate_multipleoracle(all_files):\n for filename in all_files:\n # Ler valor do rouge\n rouge_file = open(\"./rouge/%s\" % filename).read().split('\\n\\n')[1]\n sentences_rouge = rouge_file.split('\\n')\n rouge_scores = np.array([score_sentence(sent_rouge) for sent_rouge in\n sentences_rouge])\n labels = np.array(open(\"./labels/%s\" % filename).read().split('\\n'))\n summary_indexes = list(np.where(labels == '1')[0])\n rouge_mean = rouge_scores[summary_indexes].mean()\n save_multipleoracle(filename, summary_indexes, rouge_mean, len(labels))\n\n\ndef create_data(filelist, data_type, doc_or_label, dirname):\n with open('paperlist.%s.%s' % (data_type, doc_or_label), 'w') as f:\n for filename in filelist:\n content = open(os.path.join(dirname, filename)).read()\n f.write(filename + '\\n')\n f.write(content.strip('\\n') + '\\n\\n')\n f.close()\n\n\ngenerate_multipleoracle(all_files)\n\ncreate_data(all_files[:160], 'training', 'doc', './my_papers')\ncreate_data(all_files[:160], 'training', 'label.singleoracle', './labels')\ncreate_data(all_files[:160], 'training', 'label.multipleoracle',\n './multipleoracle')\ncreate_data(all_files[160:180], 'validation', 'doc', './my_papers')\ncreate_data(all_files[160:180], 'validation', 'label.singleoracle', './labels')\ncreate_data(all_files[160:180], 'validation', 'label.multipleoracle',\n './multipleoracle')\ncreate_data(all_files[180:], 'test', 'doc', './my_papers')\ncreate_data(all_files[180:], 'test', 'label.singleoracle', './labels')\ncreate_data(all_files[180:], 'test', 'label.multipleoracle', './multipleoracle'\n )\n", "id": "5163686", "language": "Python", "matching_score": 0.8898701667785645, "max_stars_count": 1, "path": "RefreshWordEmbeddings/data/mergirarquivos.py" }, { "content": "import matplotlib.pyplot as plt\nimport pandas as pd\nimport numpy as np \nimport os\n\n\ndef create_table_weights (files_names, directory):\n '''\n \n '''\n weights = pd.DataFrame()\n file_number = 1\n\n for f in files_names:\n df = pd.read_csv (os.path.join (\n directory , f), delimiter = \"\\t\", encoding = 'utf-8')\n alphas = df.alpha.unique()\n for a in alphas:\n weights = weights.append (df.loc[df['alpha']==a].tail(1), ignore_index=True)\n file_number += 1\n\n return weights\n\n\ndef create_table_results (files_names, directory): \n '''\n Read 5 experiment files for a article and returns\n a Dataframe with the data.\n\n Parameters\n ----------\n files_names: list[string]\n Names of the 5 experiment files for a same article. \n Example: article_1.csv, article_2.csv, ..., article_5.csv.\n\n Returns\n -------\n results: DataFrame\n DataFrame containing the experiments results for the\n current article.\n '''\n\n results = pd.DataFrame()\n file_number = 1\n\n for f in files_names:\n df = pd.read_csv (os.path.join (\n directory , f), delimiter = \"\\t\", encoding = 'utf-8')\n df['run'] = [file_number] * df.shape[0]\n results = results.append (df, ignore_index=True)\n file_number += 1\n\n return results\n\n\ndef plot_disp_chart(data, alphas, runs, file_name, n_iters = 50, type='avg'):\n\n lines_labels = ['Silhouette', 'Avg. Distance Highlight-Centroid', \n 'Objective Function']\n\n column = ['silhouettes', 'distances', 'evals']\n n_columns = len(column)\n\n x_label = 'Iter.'\n colors = ['green', 'blue', 'red'] \n\n if type=='avg':\n fig = plt.figure(figsize=(18, 7 ))\n fig.subplots_adjust(bottom=0.4)\n else:\n fig = plt.figure(figsize=(18, 25))\n\n col = 0\n for a in alphas:\n relevant_data = data.loc[np.isclose(data['alpha'], a)]\n title = 'Silhouette: ' + str(round(\n a,2)) + ' . Avg. Distance: ' + str(1-round(a,2))\n\n if type=='avg':\n filtered_data = relevant_data.groupby(\n 'run').tail(n_iters).drop(columns=['run','alpha'])\n\n iterations = [i%n_iters for i in range(runs*n_iters)]\n filtered_data['iter'] = iterations\n\n mean = filtered_data.groupby('iter').mean()\n std = filtered_data.groupby('iter').std ()\n n_clusters = filtered_data['n_clusters'].mean()\n\n plt.subplot (1, n_columns, col+1)\n\n for j in range(n_columns):\n plt.plot (range(n_iters), mean[column[j]], \n color=colors[j], label=lines_labels[j])\n\n plt.errorbar (range(n_iters), mean[column[j]], \n yerr=std[column[j]], fmt='o', color=colors[j])\n\n plt.title ( title )\n plt.xlabel ( x_label )\n plt.text (0, 0.6, str(n_clusters)+' clusters',\n verticalalignment='top', horizontalalignment='left')\n if col==1:\n plt.legend(lines_labels, bbox_to_anchor=(0.5, -0.2), loc=\"upper center\")\n else:\n for row in range(0,runs):\n data_plot = relevant_data.loc [relevant_data['run']==(row+1)]\n total_iters = data_plot.shape[0]\n plt.subplot (runs, n_columns, row*n_columns+col+1)\n\n for j in range(n_columns):\n plt.plot (range(total_iters), data_plot[column[j]], \n color=colors[j], label=lines_labels[j])\n \n plt.title ( title )\n plt.xlabel ( x_label )\n\n for pos_x in range(n_iters, total_iters, (n_iters+1)):\n n_clusters = data_plot['n_clusters'].iloc[pos_x]\n\n plt.text ( pos_x-n_iters, 0.6, str(n_clusters)+' clusters',\n verticalalignment='top', horizontalalignment='left')\n\n plt.axvline (x = pos_x, color='black', linestyle='dashed')\n\n if col==1 and row==4:\n plt.legend(lines_labels, bbox_to_anchor=(0.5, -0.2), loc=\"upper center\")\n\n col = (col+1) % 3 \n \n fig.savefig (file_name) # save the figure to file\n plt.close(fig)\n\n\ndef draw_bar_chart (bars, n_clusters, alpha, row, col, \n n_charts_row=3, n_charts_col=5, std_dev=None):\n n_bars = bars.shape[0]\n x_labels = ['w'+str(i) for i in range(n_bars)]\n y_label = 'Weight values (n. clusters = ' + str(n_clusters) + ')'\n\n title = 'Silhouette: ' + str(round(\n alpha,2)) + ' . Avg. Distance: ' + str(1-round(alpha,2))\n\n if std_dev is not None:\n plt.subplot (1, n_charts_row, col)\n else:\n plt.subplot (n_charts_col, n_charts_row, row*n_charts_row+col)\n\n plt.bar (x_labels, bars, 0.8, color='green')\n if std_dev is not None:\n plt.errorbar (x_labels, bars, yerr=std_dev, fmt='o')\n\n plt.title (title)\n plt.ylabel (y_label)\n\n\ndef plot_bar_chart (data, alphas, file_name, type='avg'):\n \n if type=='avg':\n fig = plt.figure(figsize=(18, 4.5))\n else:\n fig = plt.figure(figsize=(18, 25 ))\n\n col = 0\n for a in alphas:\n relevant_data = data.loc[data['alpha']==a]\n filtered_data = relevant_data.drop (columns=['n_clusters','alpha'])\n\n if type=='avg':\n bars = filtered_data.mean()\n std = filtered_data.std ()\n n_clusters = relevant_data['n_clusters'].mean()\n draw_bar_chart (bars, n_clusters, a, 0, col+1, std_dev=std)\n else:\n n_rows = 5\n bars = filtered_data\n for row in range (0, n_rows):\n n_clusters = relevant_data['n_clusters'].iloc[row]\n draw_bar_chart (bars.iloc[row], n_clusters, a, row, col+1)\n\n col = (col+1) % 3 \n\n fig.savefig (file_name) # save the figure to file\n plt.close(fig) \n \n\nif __name__ == \"__main__\":\n\n directory = os.path.join (os.path.dirname(__file__), 'files_results')\n\n files_general = sorted (os.listdir (directory))\n\n files_evals = filter (lambda e: '_evals_' in e , files_general)\n files_weights = filter (lambda e: '_weights_' in e , files_general)\n\n while files_evals and files_weights:\n table_results = create_table_results (files_evals [0:5], directory)\n table_weights = create_table_weights (files_weights[0:5], directory)\n\n cluster_values = table_results.drop_duplicates (\n subset=['alpha','run'], keep='last')['n_clusters']\n\n table_weights['n_clusters'] = cluster_values.tolist()\n\n name_e = files_evals[0].replace('.csv','').strip('_1')\n name_w = files_weights[0].replace('.csv','').strip('_1')\n\n dir_charts = os.path.join (os.path.dirname(__file__), 'files_charts')\n\n file_name_eval = os.path.join (os.path.join (\n dir_charts, 'evals'), name_e)\n file_name_weight = os.path.join (os.path.join (\n dir_charts, 'weights'), name_w)\n\n\n #plot_bar_chart (table_weights, [1.0/ 2, 2.0/ 3, 1.0/ 3], \n # file_name_weight + '_avg.pdf', type='avg')\n\n #plot_bar_chart (table_weights, [1.0/ 2, 2.0/ 3, 1.0/ 3], \n # file_name_weight + '_multiple.pdf', type='multiple')\n\n plot_disp_chart (table_results, [1.0/ 2, 2.0/ 3, 1.0/ 3],\n 5, file_name_eval + '_avg.pdf', type='avg')\n\n plot_disp_chart (table_results, [1.0/ 2, 2.0/ 3, 1.0/ 3],\n 5, file_name_eval + '_multiple.pdf', type='multiple')\n\n files_evals = files_evals [5::]\n files_weights = files_weights [5::]", "id": "9867398", "language": "Python", "matching_score": 2.3728954792022705, "max_stars_count": 1, "path": "SaKS_DataClassification/charts_generator.py" }, { "content": "import numpy as np \nimport pandas as pd\nimport sys\nimport os \nimport io\nimport tokenizer\nfrom nltk.cluster.kmeans import KMeansClusterer\nfrom sklearn.cluster import KMeans\nfrom cluster import Pfcm\nfrom scipy.spatial import distance\nfrom evaluator import evaluator_weights_features as ev\n\n\ndef cluster_sentences (sents_vecs, num_clusters, weights=None):\n if weights is not None:\n print 'com pesos'\n dist_metric = ev.weighted_euclidean (weights)\n else:\n print 'sem pesos'\n dist_metric = distance.euclidean\n\n kclusterer = KMeansClusterer (num_clusters, repeats=1, distance=dist_metric,\n avoid_empty_clusters=True) \n\n labels = kclusterer.cluster (sents_vecs, assign_clusters=True)\n \n centroids = np.array (kclusterer.means())\n\n return np.array(labels), centroids\n\n\ndef rank_sentences (data, labels, centroids):\n '''\n Returns\n -------\n ranked_sentences: dict (int -> 2d array)\n For each cluster returns a ranking containing the\n closest sentences to the centroid of that cluster.\n ''' \n n_clusters = centroids.shape[0]\n\n ranked_sentences = {}\n\n for c in range(n_clusters):\n\n indices = np.argwhere(labels==c).flatten()\n dists = [np.linalg.norm (data[i]-centroids[c]) for i in indices]\n \n index_dists = zip (indices, dists)\n\n ranked_sentences[c] = sorted (index_dists, key=lambda tup: tup[1])\n return ranked_sentences\n\n\ndef save_ranking_file (ranking, sentences, ranked_highlights, file_name):\n file_to_write = io.open (file_name, mode='w', encoding='utf-8')\n\n for r in ranking:\n file_to_write.write (u'\\n---------- Cluster ' + str(r) + ' ----------\\n')\n\n for item in ranking[r]:\n sent = sentences[item[0]].replace('\\n', ' ').encode('utf-8')\n row = '{:<5}=>{}\\n'.format (item[0], sent)\n file_to_write.write (row.decode('utf-8')) \n\n file_to_write.write (u'\\n-------------------------------\\n')\n file_to_write.write (u'Cluster Rank pos Sentence\\n')\n file_to_write.write ( u'-------------------------------\\n')\n\n for tup in ranked_highlights:\n sent = sentences[tup[2]].replace('\\n', ' ').encode('utf-8')\n row = '{:<11}{:<12}{}\\n'.format (tup[0], tup[1]+1, sent)\n file_to_write.write (row.decode('utf-8'))\n\n file_to_write.close()\n\n\ndef mark_highlights (sentences, n_highlights, file_name):\n file_name = file_name.replace('files_txt', 'files_highlights')\n n_highlights = len(tokenizer.get_highlights(file_name))\n n_sentences = len(sentences)\n\n for i in range(n_sentences-n_highlights, n_sentences):\n sentences[i] = '(HIGHLIGHT) ' + sentences[i]\n\n\ndef define_weights (n_attr):\n w_abstract = 0.5\n w_others = (1.0 - w_abstract) / (n_attr-1)\n\n weights = [w_abstract]\n weights += [w_others for _ in range(n_attr-1)]\n return np.array (weights)\n\n\ndef get_ranking_highlights (sentences, ranked_sents, n_highl):\n n_sents = len(sentences)\n indices_highl = range(n_sents-n_highl, n_sents)\n cluster_rank_index = []\n\n for c in ranked_sents:\n indices_cluster = zip(*ranked_sents[c])[0]\n highl_positions = np.in1d (indices_cluster, indices_highl)\n \n ranking_pos = np.where (highl_positions)[0]\n\n cluster_rank_index += zip ([c]*ranking_pos.shape[0], \n ranking_pos, np.array(indices_cluster)[highl_positions])\n\n return cluster_rank_index\n\n\ndef load_weights (file_name, alpha, n_attributes):\n \n weights = pd.read_csv (file_name, sep='\\t', encoding='utf-8')\n\n weights = weights [weights.alpha == alpha].drop(columns=['alpha'])\n weights = np.asarray (weights)[0]\n\n n_cols_tfidf = n_attributes - weights.shape[0] + 1\n array_tfidf = np.full (n_cols_tfidf, weights[-1]/n_cols_tfidf)\n\n return np.concatenate ( (weights[0:-1], array_tfidf) ) \n \n\nif __name__ == \"__main__\":\n \n dir_articles = os.path.join (os.path.dirname(__file__), 'files_txt')\n\n list_of_files_no_dir = os.listdir (dir_articles)\n list_of_files = [ os.path.join (dir_articles, f) for f in list_of_files_no_dir ]\n\n #for f_index in range(5):\n file_name = os.path.join (dir_articles, 'rosasromero2016.txt')\n file_weights = file_name.replace('.txt', '_weights_3.csv').replace(\n 'files_txt', 'files_results')\n\n sentences, sents_vecs, _ = tokenizer.map_article_to_vec(\n file_name, highlights=True, bag_of_words=True)\n\n print len (sentences)\n\n highl_file_name = file_name.replace('files_txt', 'files_highlights')\n n_highlights = len(tokenizer.get_highlights (highl_file_name))\n\n weights = load_weights (file_weights, 1.5/3.0, sents_vecs.shape[1])\n\n\n labels, centroids = cluster_sentences (sents_vecs.values, \n n_highlights, weights=weights)\n ranked_sents = rank_sentences (sents_vecs.values, labels, centroids)\n \n file_save = file_name.replace('files_txt', 'files_tests')\n\n ranked_highlights = get_ranking_highlights (sentences, ranked_sents, \n n_highlights)\n mark_highlights (sentences, n_highlights, file_name)\n\n try:\n save_ranking_file (ranked_sents, sentences, ranked_highlights, file_save)\n print 'Arquivo \"' + file_save + '\" salvo com sucesso.'\n except IOError:\n print 'Erro ao salvar arquivo \"' + file_save + '\".'", "id": "10592663", "language": "Python", "matching_score": 3.2610676288604736, "max_stars_count": 1, "path": "SaKS_DataClassification/sumarizer.py" }, { "content": "import pandas as pd\nimport random as rd\nimport numpy as np\nfrom nltk.cluster.kmeans import KMeansClusterer\nfrom sklearn.metrics import silhouette_score\nimport cluster\n#import datetime\n\ndef weighted_euclidean (weights):\n \"\"\"\n Compute the weighted euclidean distance.\n\n Parameters\n ----------\n weights: 1d array [float]\n Weights of features.\n\n Returns\n -------\n ( 1d array [float], 1d array [float] ) -> float\n Distance function.\n \"\"\"\n def calc_distance (point1, point2):\n \n return np.sqrt(sum(weights*(point1-point2)**2))\n \n return calc_distance\n\n\nclass Evaluator_clustering:\n \"\"\"\n The class wraps the k-means algorithm, the objective function and the dataset.\n\n Attributes\n ----------\n data : 2d array [float]\n Text's sentences in bag-of-words format.\n highlights: 2d array [float]\n Research highlights in bag-of-words format.\n weights: 1d array [float]\n Weights for attributes\n indexes_highl: 1d array [int]\n List of sentences indices that are highlights.\n n_clusters: int\n Number of clusters for the clustering algorithm.\n alpha: float\n Weighting factor for the objective function.\n lbound: float\n Min value for each dimension of a particle.\n ubound: float\n Max value for each dimension of a particle.\n labels_results: 1d array [string]\n Labels for the Dataframe containing the results of experiments.\n df_results: DataFrame\n Dataframe storing the fitness function's values in experiments.\n df_weights: DataFrame\n Dataframe storing the weights found in experiments.\n algorithm: string\n Clustering algorithm applied.\n \"\"\"\n \n def __init__ (self, data, highlights, weights, indexes_highl, n_clusters, \n alpha, lbound, ubound, algorithm, sim_matrix = None):\n self.__data = data\n self.__highlights = highlights\n self.__indexes_highl = indexes_highl\n self.__n_clusters = n_clusters\n self.__alpha = alpha\n self.__lbound = lbound\n self.__ubound = ubound\n\n self.__labels_results = ['alpha', 'silhouettes', 'distances', 'evals', \n 'n_clusters', 'max_dists_centro_highl']\n self.__df_results = pd.DataFrame (columns = self.__labels_results)\n\n self.__labels_weights = ['alpha'] + [str(i+1) + ' weights' \n for i in range (weights.shape[0])]\n self.__df_weights = pd.DataFrame (columns = self.__labels_weights)\n\n self.__algorithm = algorithm\n self.__rnd_seed = rd.randint (0, 100)\n self.__define_clusterer (sim_matrix)\n\n\n #def _init_weights_labels (self):\n \n # n_labels_weights = len (filter (lambda d: 'tfidf' not in d, \n # self.__df_results.columns))\n\n # labels = ['alpha'] + [str(i+1) + ' weights' \n # for i in range (n_labels_weights)]\n # print self.__df_results.columns\n # print labels\n\n # return labels\n\n def __define_clusterer (self, sim_matrix=None):\n if self.__algorithm == 'kmeans':\n\n self.__clusterer = KMeansClusterer (self.__n_clusters, distance = None, rng=None,\n avoid_empty_clusters=True, repeats=1 )\n\n elif self.__algorithm == 'pfcm':\n learning = 0.08\n delta = 0.1\n epsilon = 0.2\n\n self.__clusterer = cluster.Pfcm (delta, epsilon, learning, self.__data, sim_matrix=sim_matrix)\n\n\n def update_alpha (self, alpha):\n \"\"\"\n Update the alpha and the number of clusters applied in the fitness function.\n\n Parameters\n ----------\n alpha: float\n Weighting factor to adjust the terms of the fitness function.\n\n n_clusters: int\n Number of clusters.\n \"\"\"\n self.__alpha = alpha\n\n\n def save_results_and_weights(self, file_name):\n \"\"\"\n Save the results from the weighted k-means execution to csv files.\n \n Parameters\n ----------\n file_name: str\n Name of file.\n \"\"\"\n self.__df_results.to_csv (file_name, \n sep = '\\t', encoding = 'utf-8', index = False)\n \n self.__df_weights.to_csv (file_name.replace('_evals_', '_weights_'), \n sep = '\\t', encoding = 'utf-8', index = False)\n\n\n def get_col_results (self, col_name):\n \"\"\"\n Get a column from the Dataframe containing experiment's results.\n\n Returns\n -------\n columns: 1d array\n Array containing the values of either columns: \n alpha, silhouettes, distances, evals, n_clusters, max_dists_centro_highl.\n\n \"\"\"\n return self.__df_results [col_name]\n\n\n def calc_dists_centro_highl (self, centroids, clusters_highl, norm_weights):\n \"\"\"\n Compute the average distance between centroids and highlights.\n\n Parameters\n ----------\n centroids: 1d array [float]\n Centroid values for all clusters.\n clusters_highl: 1d array [int]\n Contains the cluster in which each highlight belongs. \n The array's indices correspond to the highlights' indices.\n norm_weights: 1d array [float]\n Weights for features normalized between 0 and 1.\n\n Returns\n -------\n dist_centro_highl: list [(1d array [float], int)]\n List with tuples that contain the distances for all highlights\n in that cluster and the cluster index.\n\n \"\"\"\n return [(np.sqrt(sum(w*(centroids[c]-highlight)**2)) , c)\n for c, highlight, w in zip (clusters_highl, self.__highlights, norm_weights)]\n\n\n def calc_max_radius_centro_highl (self, dists_centro_highl):\n \"\"\"\n Compute the distances for the furthest highlights in each cluster.\n\n Parameters\n ----------\n dists_centro_highl: list [(array_like[float], int)]\n List with tuples that contain a cluster index and the distances for all \n highlights in that cluster.\n\n Returns\n -------\n radius_indices: list [(float, int)]\n Distance of the furthest highlight in that cluster and the cluster index.\n \"\"\"\n clusters = np.unique (zip (*dists_centro_highl)[1])\n groupby = [filter (lambda p: p[1]==c, dists_centro_highl) for c in clusters]\n\n return [(max(zip(*g)[0]), g[0][1]) for g in groupby]\n\n\n def sum_dists_centro_highl (self, dist_centro_highl):\n \"\"\"\n Sum all the distances between centroids and highlights.\n\n\t\tParameters\n ----------\n dist_centro_highl: list [(1d array [float], int)]\n List with tuples that contain the distances for all highlights\n in that cluster and the cluster index.\n\t\t\t\n\t\tReturns\n\t\t-------\n\t\tsum_dist_centro_highl: float\n\t\t\tSum of distances between highlights and centroids.\n\t\t\"\"\"\n \n return sum (zip(*dist_centro_highl)[0])\n \n\n def append_solution (self, weights):\n \"\"\"\n Stores 'silhouette', 'weights', 'distances between highlights and centroids'\n\t\tand 'objective function value' for the current iteration.\n\t\t\n\t\tParameters\n ----------\n\t\tweights: 1d array [float]\n Weights for features.\n\t\t\"\"\"\n\t\t\n ev, silh, avg_dists, radius, norm_weights = self.run_weighted_clustering (\n weights, save_params=True)\n\n self.__df_results = self.__df_results.append ( pd.DataFrame ( \n np.array([[ self.__alpha, silh, avg_dists, ev, self.__n_clusters, str(radius) ]]), \n columns = self.__labels_results ) )\n\n alpha_and_weights = np.array([np.concatenate([[self.__alpha], norm_weights])])\n\n self.__df_weights = self.__df_weights.append ( pd.DataFrame ( \n alpha_and_weights, columns = self.__labels_weights ) )\n\n\n def normalize_weights (self, weights):\n \"\"\"\n Normalize weights between 0 and 1, having the sum always being 1.\n \n Parameters\n ----------\n weights: 1d array [float]\n Weights for features.\n \n Returns\n -------\n norm_weights: 1d array [float]\n Weights normalized between 0 and 1.\n \"\"\"\n norm_weights = (weights - self.__lbound) / (self.__ubound - self.__lbound)\n return norm_weights / sum (norm_weights)\n\n\n def count_clusters_no_highl (self, clusters_highl): \n \"\"\"\n Count the number of clusters not containing highlights.\n \n Parameters\n ----------\n clusters_highl: 1d array [int]\n The cluster to which each highlight belongs.\n \n Returns\n -------\n n_clusters_contain_highl: int\n Number of clusters not containing highlights: \n \"\"\" \n n_clusters_no_highl = 0\n\n for i in range (self.__n_clusters):\n if i not in clusters_highl:\n n_clusters_no_highl += 1 \n\n return n_clusters_no_highl\n \n\n def run_weighted_clustering (self, weights, save_params = False):\n \"\"\"\n Run the Weighted clustering algorithm.\n\n Parameters\n ----------\n weights: 1d array [float]\n Weights of features.\n save_params: boolean\n Define if the current solution wii be added to the DataFrame.\n\n Returns\n -------\n evaluation: float\n Fitness value.\n silhouette: float\n Average silhouette value for all data points.\n avg_dist_centro_highl: float\n Average distance between centroids and highlights.\n radius_centro_highl: 1d array [(float,int)]\n Array containing distances of furthest highlights in each cluster and the cluster index.\n norm_weights: 1d array [float]\n Weights normalized between 0 and 1.\n The sum of these weights is 1.\n \"\"\"\n # Weights normalized between lbound and ubound\n weights_experiments = self.normalize_weights (weights)\n \n if self.__algorithm == 'kmeans':\n # Run K-Means using a weighted euclidean distance\n rng=rd.Random()\n rng.seed(self.__rnd_seed)\n\n self.__clusterer._rng = rng\n self.__clusterer._distance = weighted_euclidean (weights_experiments)\n \n assigned_clusters = self.__clusterer.cluster (self.__data, assign_clusters=True)\n\n centroids = np.array (self.__clusterer.means())\n\t\t\t\n elif self.__algorithm == 'pfcm':\n\t\t\t# Run then P-FCM algorithm with weights\n self.__clusterer.set_randomizer (self.__rnd_seed)\n u, centroids = self.__clusterer.run_pfmc()\n\t\n assigned_clusters = np.argmax (u, axis=0)\n\t\t\n else:\n # Run Fuzzy-C-Means using a weighted euclidean distance\n n_data = self.__data.shape[0]\n u_starter = np.random.rand (self.__n_clusters, n_data)\n u_starter = cluster._cmeans.normalize_columns (u_starter)\n\n centroids, u, u0, d, jm, p, fpc = cluster.cmeans(self.__data.T, self.__n_clusters, \n 2, error=0.005, maxiter=500, init=u_starter, weights= weights_experiments)\n\n assigned_clusters = np.argmax (u, axis=0)\n\n # Clusters whose the highlights are associated\n clusters_highl = assigned_clusters [\n self.__indexes_highl[0]:self.__indexes_highl[-1]+1]\n\n # Number of clusters that do not contain any highlights\n #n_clusters_no_highl = self.count_clusters_no_highl (clusters_highl)\n \n # All distances between centroids and highlights\n dists_centro_highl = self.calc_dists_centro_highl (\n centroids, clusters_highl, weights_experiments) \n \n silhouette = (silhouette_score (self.__data, assigned_clusters, \n metric=weighted_euclidean (weights_experiments)) + 1.0) / 2.0\n \n # Average distance between clusters and highlights\n avg_dist_centro_highl = self.sum_dists_centro_highl (\n dists_centro_highl) / self.__highlights.shape[0]\n \n evaluation = self.__alpha * silhouette - (1-self.__alpha) * avg_dist_centro_highl \n \n if save_params:\n radius_centro_highl = self.calc_max_radius_centro_highl (dists_centro_highl)\n return evaluation, silhouette, avg_dist_centro_highl, radius_centro_highl, weights_experiments\n else: \n return evaluation\n", "id": "9281430", "language": "Python", "matching_score": 5.16703462600708, "max_stars_count": 1, "path": "SaKS_DataClassification/evaluator/evaluator_weights_features.py" }, { "content": "import os\nimport math\nimport pandas as pd\nimport numpy as np\n\nfrom swarm import pso_exec as Optim\nfrom evaluator import evaluator_weights_features as Eval\n\n\ndef _get_best_n_clusters(data, highlights, indexes_highl, max_n_clusters,\n alpha, weights, lbound, ubound, cluster_alg):\n \"\"\"\n Get the optimal number of clusters according to the silhouette.\n\n Parameters\n ----------\n data: 2d array [float]\n Text's sentences in bag-of-words format.\n highlights: 2d array [float]\n Research highlights in bag-of-words format.\n indexes_highl: 1d array [int]\n List of sentences indices that are highlights.\n max_n_clusters: int\n Max number of clusters for silhouette score.\n alpha: float\n Weighting factor for the objective function.\n weights: 1d array [float]\n Weights of features.\n lbound: float\n Min value for each weight.\n ubound: float\n Max value for each weight.\n\n Returns\n -------\n best_n_clusters: int\n Number of clusters with best silhouette score.\n \"\"\"\n silhouettes_test = []\n\n # Test all possible number of clusters from 2 to sqrt(num_data_instances)\n for n_clusters in range(2, int(max_n_clusters+1)):\n\n evaluator = Eval.Evaluator_clustering(\n data, highlights, weights, indexes_highl, n_clusters, alpha,\n lbound, ubound, cluster_alg)\n\n evaluator.run_weighted_clustering(weights)\n evaluator.append_solution(weights)\n\n silhouettes_test.append(evaluator.get_col_results('silhouettes')[0])\n # Get the number of cluster with highest average silhouette\n\n return np.argmax(silhouettes_test) + 2\n\n\nif __name__ == \"__main__\":\n folder_name = 'files_cluster_values'\n\n dir_cluster_values = os.path.join(os.path.dirname(__file__), folder_name)\n\n files = os.listdir(dir_cluster_values)\n files_value_arrays = [os.path.join(dir_cluster_values, f) for f in files]\n\n files_value_arrays = sorted(files_value_arrays)\n\n cluster_alg = 'pfcm'\n # Possible values for weights in the fitness function\n alphas = [1.0/2.0, 2.0/3.0, 1.0/3.0]\n # Read all files in the folder \"files_cluster_values\"\n for f in files_value_arrays:\n print(f)\n raw_data = pd.read_csv(f, delimiter=\"\\t\", encoding='utf-8')\n\n sim_matrix = np.loadtxt(f.replace(folder_name, 'files_sim_matrices'),\n delimiter=\";\", encoding='utf-8')\n\n cols = filter(lambda c: 'tfidf' not in c, raw_data.columns)\n\n n_cols = len(cols) - 1\n\n # Indexes of highlights sentences\n indexes_highl = raw_data.index[raw_data['is_a_highlight'] == 1.0\n ].tolist()\n # Normalize the data from 0 to 1\n data = np.apply_along_axis(\n lambda x: (x-min(x)) / (max(x)-min(x))\n if max(x) > 0 else 0, 0, raw_data.values[:, 1:])\n\n highlights = data[indexes_highl]\n n_clusters = len(highlights)\n\n # The maximum number of clusters is defined as the square root of the\n # number of data instances\n max_n_clusters = math.floor(np.sqrt(data.shape[0]))\n\n lbound, ubound = -1.0, 1.0\n\n for run in range(1, 6):\n weights = np.array([1.0/n_cols for _ in range(n_cols)])\n\n if cluster_alg == 'pfcm':\n evaluator = Eval.Evaluator_clustering(\n data, highlights, weights, indexes_highl, n_clusters, 0.5,\n lbound, ubound, cluster_alg, sim_matrix=sim_matrix)\n else:\n evaluator = Eval.Evaluator_clustering(\n data, highlights, weights, indexes_highl, n_clusters, 0.5,\n lbound, ubound, cluster_alg)\n\n for a in alphas:\n weights = np.array([1.0/n_cols for _ in range(n_cols)])\n\n # Change the evaluator settings\n evaluator.update_alpha(a)\n\n fitness_func = evaluator.run_weighted_clustering\n\n print('******************************************************')\n print('Starting pso...')\n\n # default: 100 particles 50 iters ....30\n solution = Optim.Pso(fitness_func, n_cols, n_part=100,\n max_iter=10, evaluator=evaluator\n ).run_pso()\n print('======================================================')\n\n path = os.path.join(os.path.dirname(__file__), 'files_results')\n basename = os.path.split(f)[-1].replace('.csv', '')\n file_name = '%s_evals_%d.csv' % (basename, run)\n\n full_file_name = os.path.join(path, file_name)\n evaluator.save_results_and_weights(full_file_name)\n print('Files saved')\n", "id": "2031926", "language": "Python", "matching_score": 1.3073447942733765, "max_stars_count": 1, "path": "SaKS_DataClassification/weight_optimization.py" }, { "content": "from sklearn.svm import SVC\nfrom sklearn.naive_bayes import GaussianNB\nfrom sklearn.svm import OneClassSVM\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.ensemble import RandomForestClassifier\nfrom sklearn.model_selection import GridSearchCV\nfrom sklearn.neighbors import KNeighborsClassifier\nimport numpy as np\nimport os\nimport file_reader\n\n\ndef run_one_class_svm(X_normal, X_highl, file_name):\n\n n_folds = X_highl.shape[0]\n\n print('\\n=========================== ' + file_name +\n ' ===========================')\n\n clf = OneClassSVM(nu=0.1, kernel='rbf', gamma=0.1)\n\n grid_param = {'nu': [0.1, 0.2, 0.5, 0.8],\n 'kernel': ['rbf', 'linear'],\n 'gamma': [0.1, 0.25, 0.5]}\n\n y = X_highl.shape[0] * [1]\n gd_sr = do_grid_search(X_highl, y, clf, grid_param, n_folds)\n\n y_pred = gd_sr.predict(X_normal)\n\n print('Number of non-highlights classified as highlights: ')\n nonhighlights = len(np.where(y_pred == 1)[0])\n allsentences = X_normal.shape[0]\n print('%d of %d sentences' % (nonhighlights, allsentences))\n print(nonhighlights / float(allsentences))\n\n\ndef do_grid_search(X, y, clf, grid_param, n_folds):\n gd_sr = GridSearchCV(estimator=clf, param_grid=grid_param,\n scoring='accuracy', cv=n_folds)\n gd_sr.fit(X, y)\n n_sets_params = len(gd_sr.cv_results_['std_train_score'])\n\n best_results_train = np.zeros((n_folds, n_sets_params))\n best_results_test = np.zeros((n_folds, n_sets_params))\n\n for i in range(n_folds):\n best_results_train[i] = gd_sr.cv_results_[\n 'split' + str(i) + '_train_score']\n\n best_results_test[i] = gd_sr.cv_results_[\n 'split' + str(i) + '_test_score']\n\n for i in range(n_sets_params):\n\n print(gd_sr.cv_results_['params'][i])\n print('Training')\n print(best_results_train[:, i])\n print('Test')\n print(best_results_test[:, i])\n print\n\n print('best params: ')\n print(gd_sr.best_params_)\n print\n return gd_sr\n\n\ndef run_training_test(data, labels, n_folds, algorithm):\n Xy = np.column_stack((data, labels))\n\n np.random.shuffle(Xy)\n\n X, y = Xy[:, 0:-1], Xy[:, -1]\n\n if algorithm == 'logistic_regression':\n clf = LogisticRegression(C=1, penalty='l2')\n grid_param = {'C': [1, 10, 100, 1000], 'tol': [1e-5, 1e-4, 1e-3]}\n\n elif algorithm == 'svm':\n clf = SVC(gamma=1e-3, kernel='rbf')\n grid_param = {'C': [1, 10, 100, 1000],\n 'gamma': [1e-3, 1e-4], 'kernel': ['rbf', 'linear']}\n\n elif algorithm == 'random_forest':\n clf = RandomForestClassifier(n_estimators=50, criterion='gini')\n grid_param = {'n_estimators': [50, 100, 200, 300],\n 'criterion': ['gini', 'entropy']}\n\n elif algorithm == 'knn':\n clf = KNeighborsClassifier(n_neighbors=3)\n grid_param = {'n_neighbors': [3, 5, 7], 'p': [1, 2]}\n\n elif algorithm == 'naive_bayes':\n clf = GaussianNB()\n grid_param = {}\n\n do_grid_search(X, y, clf, grid_param, n_folds)\n\n\ndef run_classification(data, labels):\n ind_highl = np.where(labels == 1)[0]\n np.random.shuffle(ind_highl)\n\n ind_normal = np.where(labels == 0)[0]\n np.random.shuffle(ind_normal)\n ind_normal = ind_normal[0:ind_highl.shape[0]]\n\n print(ind_normal)\n\n data = np.vstack((data[ind_normal], data[ind_highl]))\n labels = np.hstack((labels[ind_normal], labels[ind_highl]))\n\n n_folds = 10\n\n print('------------------- Naive Bayes -------------------')\n run_training_test(data, labels, n_folds, 'naive_bayes')\n\n print('------------------- Logistic Regression -------------------')\n run_training_test(data, labels, n_folds, 'logistic_regression')\n\n print('------------------- Random Forest -------------------')\n run_training_test(data, labels, n_folds, 'random_forest')\n\n print('------------------- KNN -------------------')\n run_training_test(data, labels, n_folds, 'knn')\n\n print('------------------- SVM -------------------')\n run_training_test(data, labels, n_folds, 'svm')\n\n\nif __name__ == '__main__':\n path_files = os.path.join(os.path.dirname(__file__), 'files_cluster_values'\n )\n files_names = os.listdir(path_files)\n\n files_names = map(lambda f: os.path.join(path_files, f), files_names)\n job = 'one_class'\n\n if job == 'classification':\n data = np.array([])\n labels = np.array([]).reshape((0,))\n\n for f in files_names:\n X, y = file_reader.load_train_and_test(f)\n\n data = file_reader.add_to_data(X, data)\n labels = np.hstack((labels, y))\n\n run_classification(data, labels)\n else:\n for f in files_names[5:10]:\n X, y = file_reader.load_train_and_test(f)\n\n X_normal = X[np.where(y == 0)[0]]\n X_highl = X[np.where(y == 1)[0]]\n\n run_one_class_svm(X_normal, X_highl, os.path.split(f)[-1])\n", "id": "10193447", "language": "Python", "matching_score": 3.2722506523132324, "max_stars_count": 1, "path": "SaKS_DataClassification/classifier.py" }, { "content": "import numpy as np\nimport pandas as pd\n\n\ndef get_single_label(data, labels, lbl_value):\n indices = np.where(labels == lbl_value)[0]\n return data[indices]\n\n\ndef load_train_and_test(file_name):\n data = pd.read_csv(file_name, delimiter=\"\\t\", encoding='utf-8')\n X = data.values[:, 1:]\n y = data['is_a_highlight'].values\n return X, y\n\n\ndef add_to_data(X, data):\n if data.size == 0:\n return X\n\n # Number of attributes after keywords\n n_atribs_k = 8\n if data.shape[1] > X.shape[1]:\n n_extra_cols = data.shape[1] - X.shape[1]\n ind_complete = X.shape[1] - n_atribs_k\n\n zeros = np.zeros((X.shape[0], n_extra_cols))\n\n X_half = np.hstack((zeros, X[:, -n_atribs_k:]))\n X_full = np.hstack((X[:, :ind_complete], X_half))\n return np.vstack((data, X_full))\n elif data.shape[1] < X.shape[1]:\n n_extra_cols = X.shape[1] - data.shape[1]\n ind_complete = data.shape[1] - n_atribs_k\n\n zeros = np.zeros((data.shape[0], n_extra_cols))\n\n data_half = np.hstack((zeros, data[:, -n_atribs_k:]))\n data_full = np.hstack((data[:, :ind_complete], data_half))\n\n return np.vstack((data_full, X))\n\n else:\n return np.vstack((data, X))\n\n\ndef create_dataset(list_of_files, instances='all'):\n data = np.array([])\n labels = np.array([]).reshape((0,))\n for f in list_of_files:\n X, y = load_train_and_test(f)\n\n data = add_to_data(X, data)\n labels = np.hstack((labels, y))\n\n if instances == 'highlights':\n data = get_single_label(data, labels, 1)\n labels = np.array([1]*data.shape[0])\n\n elif instances == 'normal':\n data = get_single_label(data, labels, 0)\n labels = np.array([1]*data.shape[0])\n\n return data, labels\n", "id": "8954171", "language": "Python", "matching_score": 0.644015908241272, "max_stars_count": 1, "path": "SaKS_DataClassification/file_reader.py" }, { "content": "import os\nimport numpy as np\n\n\ndef completezeroes(attribs, max_cols):\n diff_cols = max_cols - attribs.shape[1]\n zeroes_cols = np.zeros((attribs.shape[0], diff_cols))\n return np.hstack((attribs[:, 0:1], zeroes_cols, attribs[:, 1:]))\n\n\nlist_papers = os.listdir(\"preprocessed_papers\")\n\nmax_columns = 0\npaper_attrs = {}\nfor paper in list_papers:\n with open(os.path.join(\"preprocessed_papers\", paper)) as f:\n attribs = np.loadtxt(f, delimiter=',', skiprows=1)[:, 1:]\n if attribs.shape[1] > max_columns:\n max_columns = attribs.shape[1]\n paper_attrs[paper] = attribs\n\nfor paper in list_papers:\n cols = paper_attrs[paper].shape[1]\n if cols < max_columns:\n paper_attrs[paper] = completezeroes(paper_attrs[paper], max_columns)\n np.savetxt(os.path.join(\"my_papers\", paper[:-4]), paper_attrs[paper])\n# 0 1>-4 -4>\n", "id": "7820523", "language": "Python", "matching_score": 0.09978196769952774, "max_stars_count": 1, "path": "SaKS_DataClassification/transformar_artigo.py" }, { "content": "import os\nimport re\n\n\ndef get_word(word, dict_embedding):\n word = word.replace(\"…\", \"\")\n if word not in dict_embedding:\n if word.capitalize in dict_embedding:\n return word.capitalize\n words = word.lower().split('-')\n embeds = \" \".join([str(dict_embedding[word]) for word in words\n if word in dict_embedding])\n return str(embeds)\n else:\n return str(dict_embedding[word])\n\n\ndict_embedding = {}\nwith open('1-billion-word-language-modeling-benchmark-r13output.word2vec.vec',\n encoding='utf8') as f:\n print(\"Lendo palavras\")\n for i in range(2, 559185):\n value = f.readline().split(' ')[0]\n dict_embedding[value] = i\n f.close()\n\ntxts_list = os.listdir(\"./sentences\")\n\nfor txt in txts_list:\n with open('./sentences/%s' % txt, encoding='utf8') as f:\n list_of_embeddings = []\n sentences = f.read().split('\\n')\n for sent in sentences:\n embeddings = [get_word(word, dict_embedding) for word in\n sent.split(\" \")]\n list_of_embeddings.append(\" \".join(embeddings).strip())\n f.close()\n\n with open('./my_papers/%s' % txt, 'w', encoding='utf8') as f:\n output = re.sub(r'\\s\\s+', ' ', '\\n'.join(list_of_embeddings))\n f.write(output)\n f.close()\n print(\"%s salvo com sucesso\" % txt)\n", "id": "7373676", "language": "Python", "matching_score": 1.1470504999160767, "max_stars_count": 1, "path": "RefreshWordEmbeddings/data/sentences_to_wordembeddings.py" }, { "content": "import os\n\n\ndef main():\n filelist = os.listdir('./rouge')\n for filename in filelist:\n\n with open(os.path.join('./rouge', filename)) as f:\n block_sententes = f.read().split('\\n\\n')[1]\n sents = [sent.split(' - ')[1]\n for sent in block_sententes.split('\\n')]\n f.close()\n\n with open(os.path.join('./sentences', filename[:-4]), 'w') as f:\n f.write(\"\\n\".join(sents))\n f.close()\n\n\nif __name__ == \"__main__\":\n main()\n", "id": "10252288", "language": "Python", "matching_score": 0.9957253336906433, "max_stars_count": 1, "path": "RefreshWordEmbeddings/data/extrair_frases.py" }, { "content": "import os\nimport numpy as np\nimport rouge\n\ndef score_sentence(sent_rouge, highlights, scorer):\n scores = scorer.get_scores(sent_rouge, highlights)\n avg_rouge = (scores['rouge-1']['f'] + scores['rouge-2']['f'] +\n scores['rouge-l']['f']) / 3.0\n return avg_rouge\n\ndef save_rouge(filename, content_rouge_file, rouge_scores, sentences_rouge):\n # str_indexes = \" \".join([str(i) for i in summary_indexes])\n new_scores_sents = [\"%f - %s\" % (score, sent) for score, sent in\n zip(rouge_scores, sentences_rouge)]\n content_rouge_file[1] = \"\\n\".join(new_scores_sents)\n with open('./rouge2/%s' % filename, 'w') as f:\n f.write('\\n\\n'.join(content_rouge_file))\n f.close()\n print(filename + \" salvo com sucesso.\")\n\ndef generate_multioracle(all_files):\n scorer = rouge.Rouge(['rouge-n', 'rouge-l'], max_n=2, stemming=True)\n for filename in all_files:\n # Ler valor do rouge\n content_rouge_file = open(\"./rouge/%s\" % filename).read().split('\\n\\n')\n highlights = open(\"./papers_highlights_rouge/%s\" % filename\n ).read().split('\\n')\n sentences_rouge = content_rouge_file[1].split('\\n')\n sentences_rouge = [sent.split(' - ')[1] for sent in sentences_rouge]\n rouge_scores = np.array([score_sentence(sent_rouge, highlights, scorer)\n for sent_rouge in sentences_rouge])\n save_rouge(filename, content_rouge_file, rouge_scores, sentences_rouge)\n # labels = np.array(open(\"./labels/%s\" % filename).read().split('\\n'))\n # summary_indexes = list(np.where(labels == '1')[0])\n # rouge_mean = rouge_scores[summary_indexes].mean()\n # save_multioracle(filename, summary_indexes, rouge_mean, len(labels))\n\nif __name__ == \"__main__\":\n all_files = os.listdir('labels')\n generate_multioracle(all_files)\n", "id": "12653781", "language": "Python", "matching_score": 1.3741692304611206, "max_stars_count": 1, "path": "RefreshWordEmbeddings/data/estimate_rouge.py" }, { "content": "import os\n\nfor filename in os.listdir('./rouge'):\n fullfilename = './rouge/%s' % filename\n with open(fullfilename) as f:\n parts = f.read().split('\\n\\n')\n label1 = [int(sent.split(']')[0][7:])\n for sent in parts[2].split('\\n')[:-1]]\n n_sents = len(parts[1].split('\\n'))\n label_list = [1 if i+1 in label1 else 0 for i in range(n_sents)]\n f.close()\n\n with open('./labels/%s' % filename, 'w') as f:\n f.write('\\n'.join(str(l) for l in label_list))\n f.close()\n print('Labels salvos com sucesso')\n", "id": "4354519", "language": "Python", "matching_score": 0.8281528949737549, "max_stars_count": 1, "path": "SaKS_DataClassification/create_labels.py" }, { "content": "import random as rd\nimport numpy as np\n\n\n# PSO algorithm\nclass Pso:\n \"\"\"\n PSO algorithm implementation.\n\n Attributes\n ----------\n fitness: ( 1d array [float] ) -> float\n Evaluation function for particles.\n length_part: int\n Particle's length.\n n_part: int\n Number of particles.\n evaluator: Evaluator_clustering\n Class containing the fitness function and the data.\n max_iter: int\n Max number of iterations for the PSO.\n inertia: float\n PSO constant.\n c1: float\n PSO constant.\n c2: float\n PSO constant.\n ubound: float\n Max value for each dimension of a particle.\n lbound: float\n Min value for each dimension of a particle.\n \"\"\"\n\n def __init__(self, fitness, length_part, n_part, evaluator=[],\n max_iter=100, inertia=0.7298, c1=1.49618, c2=1.49618,\n ubound=1, lbound=-1):\n self._fitness = fitness\n self._length_part = length_part\n self._n_part = n_part\n self._evaluator = evaluator\n self._inertia = inertia\n self._c1 = c1\n self._c2 = c2\n self._ubound = ubound\n self._lbound = lbound\n self._max_iter = max_iter\n\n def _get_bound_args(self, x):\n \"\"\"\n Return the array's indexes with value lower than the lower bound\n and value higher than the upper bound.\n\n Parameters\n ----------\n x: 1d array [float]\n Position or velocity of particle.\n\n Returns\n -------\n indices_x_upper: 1d array [int]\n Array containing the indices violating the upper bound.\n indices_x_lower: 1d array [int]\n Array containing the indices violating the lower bound.\n \"\"\"\n return np.argwhere(x > self._ubound).flatten(), np.argwhere(\n x < self._lbound).flatten()\n\n def bound_handling(self, pos_part_i, vel_part_i):\n \"\"\"\n Check if a particle is out of the search space bounds, and fix it.\n\n Parameters\n ----------\n pos_part_i: 1d array [float]\n Position of particle i.\n vel_part_i: 1d array [float]\n Velocity of particle i.\n \"\"\"\n coef_vel = 0.5\n\n ind_ubounds, ind_lbounds = self._get_bound_args(pos_part_i)\n if ind_ubounds.shape[0]:\n pos_part_i[ind_ubounds] = 2*self._ubound - pos_part_i[ind_ubounds]\n if ind_lbounds.shape[0]:\n pos_part_i[ind_lbounds] = 2*self._lbound - pos_part_i[ind_lbounds]\n\n vel_part_i = -coef_vel * vel_part_i\n\n def limit_velocity(self, vel_part_i):\n \"\"\"\n Check if velocity is in the bounds and change the values that are not.\n\n Parameters\n ----------\n vel_part_i: 1d array [float]\n Velocity of particle i.\n \"\"\"\n ind_ubounds = np.argwhere(vel_part_i > self._ubound).flatten()\n vel_part_i[ind_ubounds] = self._ubound\n\n ind_lbounds = np.argwhere(vel_part_i < self._lbound).flatten()\n vel_part_i[ind_lbounds] = self._lbound\n\n def run_pso(self):\n \"\"\"\n Run PSO algorithm.\n\n Returns\n -------\n g: 1d array [float]\n Best solution found by the PSO.\n \"\"\"\n # Position of particles in the search space\n pos_part = np.array([[rd.random() for _ in range(self._length_part)]\n for i in range(self._n_part)])\n fitness_pos = np.apply_along_axis(\n self._fitness, 1, pos_part) # Fitness of particles\n\n # Velocity of particles\n vel_part = np.array([[rd.uniform(\n -abs(self._ubound-self._lbound), abs(self._ubound-self._lbound))\n for _ in range(self._length_part)] for i in range(self._n_part)])\n\n pbest = np.copy(pos_part)\n fitness_pbest = np.copy(fitness_pos)\n\n gbest = np.copy(pbest[np.argmax(fitness_pbest)])\n fitness_gbest = np.max(fitness_pbest)\n\n # Store the current solution for the current iteration\n self._evaluator.append_solution(gbest)\n\n print('-------------------- eval: '+str(fitness_gbest) +\n \" --------------------\")\n\n n_iter = 0\n while n_iter < self._max_iter:\n for i in range(self._n_part):\n # Update velocity:\n # v = w v + c1 r1 pbest[i] - x[i]) + c2 r2 (gbest - x[i])\n vel_part[i] = self._inertia * vel_part[i] + \\\n self._c1 * rd.random() * (pbest[i] - pos_part[i]) + \\\n self._c2 * rd.random() * (gbest - pos_part[i])\n\n # Put velocity between lbound and ubound\n self.limit_velocity(vel_part[i])\n\n # Update position\n pos_part[i] += vel_part[i]\n\n # Put particle between lbound and ubound\n self.bound_handling(pos_part[i], vel_part[i])\n\n fitness_pos[i] = self._fitness(pos_part[i])\n\n if fitness_pos[i] > fitness_pbest[i]:\n pbest[i] = np.copy(pos_part[i])\n fitness_pbest[i] = fitness_pos[i]\n\n if fitness_pos[i] > fitness_gbest:\n gbest = np.copy(pos_part[i])\n fitness_gbest = fitness_pos[i]\n\n n_iter += 1\n\n # Store the global solution for this iteration\n self._evaluator.append_solution(gbest)\n print('-------------------- eval: '+str(fitness_gbest) +\n \" --------------------\")\n return gbest\n", "id": "8585885", "language": "Python", "matching_score": 1.5166305303573608, "max_stars_count": 1, "path": "SaKS_DataClassification/swarm/pso_exec.py" }, { "content": "import numpy as np\r\nimport random\r\nfrom scipy.spatial import distance_matrix\r\nimport _cmeans as cm\r\n\r\nAXIS_ROW = 0\r\nAXIS_COL = 1\r\n\r\n\r\nclass Pfcm:\r\n def __init__(self, delta, epsilon, learning, data, dists=None,\r\n prox_hints=None, sim_matrix=None):\r\n self.delta = delta\r\n self.epsilon = epsilon\r\n self.learning = learning\r\n self.data = data\r\n\r\n if dists is not None:\r\n self.dists = dists\r\n else:\r\n self.calc_data_dists()\r\n\r\n if prox_hints is not None:\r\n self.prox = prox_hints\r\n elif sim_matrix is not None:\r\n self.calc_prox_hints(sim_matrix)\r\n\r\n def set_randomizer(self, random_seed):\r\n self._random_seed = random_seed\r\n\r\n def calc_data_dists(self):\r\n self.dists = distance_matrix(self.data, self.data)\r\n\r\n def calc_prox_hints(self, sim_matrix):\r\n prox = []\r\n for i in range(sim_matrix.shape[0]):\r\n indices_hints = np.argwhere(sim_matrix[i] > 0)[0]\r\n for j in indices_hints:\r\n prox += [[i, j, sim_matrix[i, j]]]\r\n\r\n self.prox = np.array(prox)\r\n\r\n def _calc_induced_proximity(self, u_i, u_j):\r\n indices_min = np.where(u_i < u_j)\r\n return np.sum(np.delete(u_j, indices_min)) + np.sum(u_i[indices_min])\r\n\r\n def _calc_v(self, u):\r\n v = 0.0\r\n for p in self.prox:\r\n i, j, prox_value = int(p[0]), int(p[1]), p[2]\r\n p_induced = self._calc_induced_proximity(u[:, i], u[:, j])\r\n v += (p_induced - prox_value)**2 * self.dists[i, j]\r\n return v\r\n\r\n def _inner_deriv(self, u, s, t, i, j):\r\n if (t == i and u[s,i] <= u[s,j]) or (t == j and u[s,j] <= u[s,i]):\r\n return 1\r\n else:\r\n return 0\r\n\r\n\r\n # Testado\r\n def _calc_partial_deriv (self, u, s, t):\r\n\r\n sum_proximities = 0\r\n\r\n for p in self.prox:\r\n i, j, prox_value = int(p[0]), int(p[1]), p[2]\r\n\r\n if self._inner_deriv (u, s, t, i, j):\r\n induced_prox = self._calc_induced_proximity (u[:,i], u[:,j])\r\n sum_proximities += (induced_prox - prox_value) #* self.dists[i,j]\r\n\r\n return 2 * sum_proximities\r\n\r\n\r\n def _gradient_optimization (self, u):\r\n v = float(\"inf\")\r\n v_previous = 0\r\n n_iters = 0\r\n\r\n # Repeat while the distance is higher than epsilon\r\n while abs(v - v_previous) > self.epsilon and n_iters < 3:\r\n v_previous = v\r\n\r\n for s in range(u.shape[0]):\r\n for t in range(u.shape[1]):\r\n\r\n partial = self._calc_partial_deriv (u, s, t)\r\n\r\n u[s,t] = np.clip( u[s,t] - self.learning * partial, 0, 1)\r\n\r\n # Performance index to be minimized\r\n v = self._calc_v (u)\r\n n_iters += 1\r\n\r\n sum_clusters = np.sum (u, axis = 0)\r\n\r\n partit_m = u / sum_clusters\r\n partit_m [np.isnan (partit_m)] = 1.0 / 3.0\r\n\r\n return partit_m\r\n\r\n\r\n def _normalize_part_matrix (self, u):\r\n f = lambda x: x / np.sum(x)\r\n return np.apply_along_axis (f, AXIS_ROW, u)\r\n\r\n\r\n def _get_dist_u (self, u, u_ant):\r\n if u is None:\r\n return float('inf')\r\n return np.linalg.norm(u - u_ant)\r\n\r\n\r\n def _calc_centers (self, u):\r\n\r\n return u.dot (self.data) / np.atleast_2d(np.sum(u,axis=1)).T\r\n\r\n\r\n def run_pfmc (self):\r\n n_iters = 0\r\n\r\n m = 2\r\n n_centers = self.prox.shape[1]\r\n\r\n np.random.seed (seed=self._random_seed)\r\n u = np.random.rand (n_centers, self.data.shape[0])\r\n u = self._normalize_part_matrix (u)\r\n stopping = False\r\n\r\n # Repeat while the distance is higher than delta\r\n while not(stopping) and n_iters < 4:\r\n\r\n u_ant = np.copy(u)\r\n\r\n # Run 1 iter of FCM\r\n centers, u, u0, d, jm, p, fpc = cm.cmeans(\r\n self.data.T, n_centers, m, error=0.0, maxiter=2, init=u)\r\n\r\n # Internal optimization loop\r\n u = self._gradient_optimization (u)\r\n\r\n stopping = self._get_dist_u (u, u_ant) <= self.delta\r\n n_iters += 1\r\n\r\n return u, self._calc_centers (u**m)\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\r\n n_centers = 3\r\n\r\n data = np.random.rand (100, 8)\r\n\r\n dists = distance_matrix (data, data)\r\n\r\n indices_hints = np.array( range(data.shape[0]) )\r\n np.random.shuffle (indices_hints)\r\n indices_hints = indices_hints[:20]\r\n\r\n prox = []\r\n\r\n for i in range (0, indices_hints.shape[0], 2):\r\n i1 = indices_hints[i]\r\n i2 = indices_hints[i+1]\r\n prox_value = random.random()\r\n prox += [[i1, i2, prox_value]]\r\n prox += [[i2, i1, prox_value]]\r\n\r\n for i in range(data.shape[0]):\r\n prox += [[i,i, 1.0]]\r\n prox = np.array ( prox )\r\n\r\n learning = 0.05\r\n delta = 0.005\r\n epsilon = 0.05\r\n\r\n pfcm = Pfcm (data, prox, dists, delta, epsilon, learning)\r\n print(pfcm.run_pfmc())\r\n", "id": "836401", "language": "Python", "matching_score": 2.6588709354400635, "max_stars_count": 1, "path": "SaKS_DataClassification/cluster/_pfcm.py" }, { "content": "\"\"\"\ncmeans.py : Fuzzy C-means clustering algorithm.\n\"\"\"\nimport numpy as np\nfrom scipy.spatial.distance import cdist\nfrom .normalize_columns import normalize_columns, normalize_power_columns\n\n\ndef _cmeans0(data, u_old, c, m, metric, weights=None):\n \"\"\"\n Single step in generic fuzzy c-means clustering algorithm.\n\n Modified from Ross, Fuzzy Logic w/Engineering Applications (2010),\n pages 352-353, equations 10.28 - 10.35.\n\n Parameters inherited from cmeans()\n \"\"\"\n # Normalizing, then eliminating any potential zero values.\n u_old = normalize_columns(u_old)\n u_old = np.fmax(u_old, np.finfo(np.float64).eps)\n\n um = u_old ** m\n\n # Calculate cluster centers\n data = data.T\n cntr = um.dot(data) / np.atleast_2d(um.sum(axis=1)).T\n\n if weights is not None:\n d = _distance_weights(data, cntr, weights)\n else:\n d = _distance(data, cntr, metric)\n d = np.fmax(d, np.finfo(np.float64).eps)\n\n jm = (um * d ** 2).sum()\n\n u = normalize_power_columns(d, - 2. / (m - 1))\n\n return cntr, u, jm, d\n\n\ndef _distance(data, centers, metric='euclidean'):\n \"\"\"\n Euclidean distance from each point to each cluster center.\n\n Parameters\n ----------\n data : 2d array (N x Q)\n Data to be analyzed. There are N data points.\n centers : 2d array (C x Q)\n Cluster centers. There are C clusters, with Q features.\n metric: string\n By default is set to euclidean. Passes any option accepted by\n ``scipy.spatial.distance.cdist``.\n Returns\n -------\n dist : 2d array (C x N)\n Euclidean distance from each point, to each cluster center.\n\n See Also\n --------\n scipy.spatial.distance.cdist\n \"\"\"\n return cdist(data, centers, metric=metric).T\n\n\ndef _distance_weights(data, centers, weights):\n \"\"\"\n Euclidean distance considering weights for attributes.\n\n Parameters\n ----------\n data : 2d array (N x Q)\n N data points.\n centers : 2d array (C x Q)\n C centers with Q features each.\n weights: string\n By default is set to euclidean. Passes any option accepted by\n ``scipy.spatial.distance.cdist``.\n Returns\n -------\n dist : 2d array (C x N)\n Weighted euclidean distance from each point, to each cluster center.\n \"\"\"\n dists = np.random.rand(data.shape[0], centers.shape[0])\n\n for i in range(data.shape[0]):\n for j in range(centers.shape[0]):\n dists[i][j] = sum(weights * (data[i] - centers[j])**2)\n return np.sqrt(dists).T\n\n\ndef _fp_coeff(u):\n \"\"\"\n Fuzzy partition coefficient `fpc` relative to fuzzy c-partitioned\n matrix `u`. Measures 'fuzziness' in partitioned clustering.\n\n Parameters\n ----------\n u : 2d array (C, N)\n Fuzzy c-partitioned matrix; N = number of data points and C = number\n of clusters.\n\n Returns\n -------\n fpc : float\n Fuzzy partition coefficient.\n\n \"\"\"\n n = u.shape[1]\n\n return np.trace(u.dot(u.T)) / float(n)\n\n\ndef cmeans(data, c, m, error, maxiter, metric='euclidean', init=None,\n seed=None, weights=None):\n \"\"\"\n Fuzzy c-means clustering algorithm [1].\n\n Parameters\n ----------\n data : 2d array, size (S, N)\n Data to be clustered. N is the number of data sets; S is the number\n of features within each sample vector.\n c : int\n Desired number of clusters or classes.\n m : float\n Array exponentiation applied to the membership function u_old at each\n iteration, where U_new = u_old ** m.\n error : float\n Stopping criterion; stop early if the norm of (u[p] - u[p-1]) < error.\n maxiter : int\n Maximum number of iterations allowed.\n metric: string\n By default is set to euclidean. Passes any option accepted by\n ``scipy.spatial.distance.cdist``.\n init : 2d array, size (S, N)\n Initial fuzzy c-partitioned matrix. If none provided, algorithm is\n randomly initialized.\n seed : int\n If provided, sets random seed of init. No effect if init is\n provided. Mainly for debug/testing purposes.\n weights : 1d array, length S\n It contains wieghts for the data attributes.\n\n Returns\n -------\n cntr : 2d array, size (S, c)\n Cluster centers. Data for each center along each feature provided\n for every cluster (of the `c` requested clusters).\n u : 2d array, (S, N)\n Final fuzzy c-partitioned matrix.\n u0 : 2d array, (S, N)\n Initial guess at fuzzy c-partitioned matrix (either provided init or\n random guess used if init was not provided).\n d : 2d array, (S, N)\n Final Euclidian distance matrix.\n jm : 1d array, length P\n Objective function history.\n p : int\n Number of iterations run.\n fpc : float\n Final fuzzy partition coefficient.\n\n\n Notes\n -----\n The algorithm implemented is from Ross et al. [1]_.\n\n Fuzzy C-Means has a known problem with high dimensionality datasets, where\n the majority of cluster centers are pulled into the overall center of\n gravity. If you are clustering data with very high dimensionality and\n encounter this issue, another clustering method may be required. For more\n information and the theory behind this, see Winkler et al. [2]_.\n\n References\n ----------\n .. [1] Ross, <NAME>. Fuzzy Logic With Engineering Applications, 3rd ed.\n Wiley. 2010. ISBN 978-0-470-74376-8 pp 352-353, eq 10.28 - 10.35.\n\n .. [2] <NAME>., <NAME>., & <NAME>. Fuzzy c-means in high\n dimensional spaces. 2012. Contemporary Theory and Pragmatic\n Approaches in Fuzzy Computing Utilization, 1.\n \"\"\"\n # Setup u0\n if init is None:\n if seed is not None:\n np.random.seed(seed=seed)\n n = data.shape[1]\n u0 = np.random.rand(c, n)\n u0 = normalize_columns(u0)\n init = u0.copy()\n u0 = init\n u = np.fmax(u0, np.finfo(np.float64).eps)\n\n # Initialize loop parameters\n jm = np.zeros(0)\n p = 0\n\n # Main cmeans loop\n while p < maxiter - 1:\n u2 = u.copy()\n [cntr, u, Jjm, d] = _cmeans0(data, u2, c, m, metric, weights)\n jm = np.hstack((jm, Jjm))\n p += 1\n\n # Stopping rule\n if np.linalg.norm(u - u2) < error:\n break\n\n # Final calculations\n error = np.linalg.norm(u - u2)\n fpc = _fp_coeff(u)\n\n return cntr, u, u0, d, jm, p, fpc\n\n\ndef cmeans_predict(test_data, cntr_trained, m, error, maxiter,\n metric='euclidean', init=None, seed=None):\n \"\"\"\n Prediction of new data in given a trained fuzzy c-means framework [1].\n\n Parameters\n ----------\n test_data : 2d array, size (S, N)\n New, independent data set to be predicted based on trained c-means\n from ``cmeans``. N is the number of data sets; S is the number of\n features within each sample vector.\n cntr_trained : 2d array, size (S, c)\n Location of trained centers from prior training c-means.\n m : float\n Array exponentiation applied to the membership function u_old at each\n iteration, where U_new = u_old ** m.\n error : float\n Stopping criterion; stop early if the norm of (u[p] - u[p-1]) < error.\n maxiter : int\n Maximum number of iterations allowed.\n metric: string\n By default is set to euclidean. Passes any option accepted by\n ``scipy.spatial.distance.cdist``.\n init : 2d array, size (S, N)\n Initial fuzzy c-partitioned matrix. If none provided, algorithm is\n randomly initialized.\n seed : int\n If provided, sets random seed of init. No effect if init is\n provided. Mainly for debug/testing purposes.\n\n Returns\n -------\n u : 2d array, (S, N)\n Final fuzzy c-partitioned matrix.\n u0 : 2d array, (S, N)\n Initial guess at fuzzy c-partitioned matrix (either provided init or\n random guess used if init was not provided).\n d : 2d array, (S, N)\n Final Euclidian distance matrix.\n jm : 1d array, length P\n Objective function history.\n p : int\n Number of iterations run.\n fpc : float\n Final fuzzy partition coefficient.\n\n Notes\n -----\n Ross et al. [1]_ did not include a prediction algorithm to go along with\n fuzzy c-means. This prediction algorithm works by repeating the clustering\n with fixed centers, then efficiently finds the fuzzy membership at all\n points.\n\n References\n ----------\n .. [1] Ross, <NAME>. Fuzzy Logic With Engineering Applications, 3rd ed.\n Wiley. 2010. ISBN 978-0-470-74376-8 pp 352-353, eq 10.28 - 10.35.\n \"\"\"\n c = cntr_trained.shape[0]\n\n # Setup u0\n if init is None:\n if seed is not None:\n np.random.seed(seed=seed)\n n = test_data.shape[1]\n u0 = np.random.rand(c, n)\n u0 = normalize_columns(u0)\n init = u0.copy()\n u0 = init\n u = np.fmax(u0, np.finfo(np.float64).eps)\n\n # Initialize loop parameters\n jm = np.zeros(0)\n p = 0\n\n # Main cmeans loop\n while p < maxiter - 1:\n u2 = u.copy()\n [u, Jjm, d] = _cmeans_predict0(test_data, cntr_trained, u2, c, m,\n metric)\n jm = np.hstack((jm, Jjm))\n p += 1\n\n # Stopping rule\n if np.linalg.norm(u - u2) < error:\n break\n\n # Final calculations\n error = np.linalg.norm(u - u2)\n fpc = _fp_coeff(u)\n\n return u, u0, d, jm, p, fpc\n\n\ndef _cmeans_predict0(test_data, cntr, u_old, c, m, metric):\n \"\"\"\n Single step in fuzzy c-means prediction algorithm. Clustering algorithm\n modified from Ross, Fuzzy Logic w/Engineering Applications (2010)\n p.352-353, equations 10.28 - 10.35, but this method to generate fuzzy\n predictions was independently derived by <NAME>.\n\n Parameters inherited from cmeans()\n\n Very similar to initial clustering, except `cntr` is not updated, thus\n the new test data are forced into known (trained) clusters.\n \"\"\"\n # Normalizing, then eliminating any potential zero values.\n u_old = normalize_columns(u_old)\n u_old = np.fmax(u_old, np.finfo(np.float64).eps)\n\n um = u_old ** m\n test_data = test_data.T\n\n # For prediction, we do not recalculate cluster centers. The test_data is\n # forced to conform to the prior clustering.\n\n d = _distance(test_data, cntr, metric)\n d = np.fmax(d, np.finfo(np.float64).eps)\n\n jm = (um * d ** 2).sum()\n\n u = normalize_power_columns(d, - 2. / (m - 1))\n\n return u, jm, d\n", "id": "772265", "language": "Python", "matching_score": 1.9000076055526733, "max_stars_count": 1, "path": "SaKS_DataClassification/cluster/_cmeans.py" }, { "content": "\"\"\"\nFuzzy clustering subpackage, containing fuzzy c-means clustering algorithm.\nThis can be either supervised or unsupervised, depending if U_init kwarg is\nused (if guesses are provided, it is supervised).\n\n\"\"\"\n__all__ = ['cmeans','Pfcm']\n\nfrom ._cmeans import cmeans\nfrom ._pfcm import Pfcm", "id": "504238", "language": "Python", "matching_score": 0.7466645240783691, "max_stars_count": 1, "path": "SaKS_DataClassification/cluster/__init__.py" }, { "content": "\n__all__ = ['Evaluator_clustering', 'weighted_euclidean']\n\nfrom .evaluator_weights_features import Evaluator_clustering\nfrom .evaluator_weights_features import weighted_euclidean\n", "id": "8576399", "language": "Python", "matching_score": 0.6948113441467285, "max_stars_count": 1, "path": "SaKS_DataClassification/evaluator/__init__.py" }, { "content": "\n__all__ = ['Pso']\n\nfrom .pso_exec import Pso\n", "id": "10783233", "language": "Python", "matching_score": 0.3616619408130646, "max_stars_count": 1, "path": "SaKS_DataClassification/swarm/__init__.py" } ]
1.374169
gusajz
[ { "content": "# coding=utf-8\n\nfrom django import forms\n\nfrom .models import Document, Affidavit\n\n\nclass DocumentForm(forms.ModelForm):\n # el archivo debería ser read only si es update.\n pass\n\n\nclass AffidavitForm(forms.ModelForm):\n\n class Meta:\n model = Affidavit\n", "id": "6127525", "language": "Python", "matching_score": 0.9171995520591736, "max_stars_count": 0, "path": "ddjj/ddjjapp/forms.py" }, { "content": "from django.contrib import admin\n\nfrom .models import Document, AffidavitTemplate, Person\n\nadmin.site.register(Document)\nadmin.site.register(AffidavitTemplate)\nadmin.site.register(Person)\n", "id": "11956322", "language": "Python", "matching_score": 0.430477112531662, "max_stars_count": 0, "path": "ddjj/ddjjapp/admin.py" }, { "content": "# -*- coding: utf-8 -*-\n# Create your views here.\n\nfrom django.views.generic import CreateView, UpdateView, DetailView, ListView, FormView\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.http import Http404\n\nfrom django.core.urlresolvers import reverse\n\n\n#from braces.views import LoginRequiredMixin\n\nfrom .models import Person, Document, Affidavit\n\nfrom .forms import DocumentForm, AffidavitForm\n\n\nclass PersonCreateView(CreateView):\n model = Person\n\n\n# class PersonUpdateView(LoginRequiredMixin, UpdateView):\nclass PersonUpdateView(UpdateView):\n model = Person\n\n\nclass PersonDetailView(DetailView):\n model = Person\n\n\nclass PersonResultsView(PersonDetailView):\n template_name = \"person/results.html\"\n\n\nclass PersonListView(ListView):\n model = Person\n template_name = \"person/list.jade\"\n\n\nclass DocumentEditMixin(object):\n template_name = \"document/form.jade\"\n #form_class = DocumentForm\n model = Document\n\n def get_success_url(self):\n return reverse('document-detail', kwargs={'pk': self.object.pk})\n\n\nclass DocumentCreateView(DocumentEditMixin, CreateView):\n pass\n\n\nclass DocumentUpdateView(DocumentEditMixin, UpdateView):\n pass\n\n\nclass DocumentDetailView(DocumentEditMixin, DetailView):\n template_name = \"document/detail.jade\"\n model = Document\n\n\nclass DocumentListView(ListView):\n model = Document\n template_name = \"document/list.jade\"\n\n\nclass AffidavitUpdateView(UpdateView):\n model = Affidavit\n template_name = \"affidavit/update.jade\"\n form_class = AffidavitForm\n\n\nclass AffidavitScrapView(UpdateView):\n model = Affidavit\n template_name = \"affidavit/scrap.jade\"\n #form_class = AffidavitForm\n\n def get_object(self, queryset=None):\n if queryset is None:\n queryset = self.get_queryset()\n\n pk = self.kwargs.get(self.pk_url_kwarg, None)\n\n try:\n obj = queryset.get_or_create(original_document_id=pk)\n except ObjectDoesNotExist:\n raise Http404(\"No %(verbose_name)s found matching the query\" %\n {'verbose_name': queryset.model._meta.verbose_name})\n return obj\n\n\n\n# def get_object(self, queryset=None):\n# import ipdb\n# ipdb.set_trace()\n# return super(CreateView)\n# Si no existe, debería crearlo (con el id)\n", "id": "5675494", "language": "Python", "matching_score": 2.6513760089874268, "max_stars_count": 0, "path": "ddjj/ddjjapp/views.py" }, { "content": "from django.conf.urls import patterns, url\n\nfrom .views import PersonCreateView, PersonUpdateView, PersonDetailView, PersonResultsView, PersonListView\nfrom .views import DocumentDetailView, DocumentListView, DocumentUpdateView, DocumentCreateView, AffidavitScrapView\n\n\nurlpatterns = patterns(\"\",\n url(\n regex=r\"^$\",\n view=PersonListView.as_view(),\n name=\"index\"\n ),\n url(\n regex=r\"^(?P<pk>\\d+)/$\",\n view=PersonDetailView.as_view(),\n name=\"detail\"\n ),\n url(\n regex=r\"^(?P<pk>\\d+)/results/$\",\n view=PersonResultsView.as_view(),\n name=\"results\"\n ),\n\n # Documents\n\n url(\n regex=r\"^document/$\",\n view=DocumentListView.as_view(),\n name=\"document-list\"\n ),\n\n url(\n regex=r\"^document/update/(?P<pk>\\d+)/$\",\n view=DocumentUpdateView.as_view(),\n name=\"document-update\"\n ),\n\n\n url(\n regex=r\"^document/create/$\",\n view=DocumentCreateView.as_view(),\n name=\"document-create\"\n ),\n\n\n url(\n regex=r\"^document/(?P<pk>\\d+)/$\",\n view=DocumentDetailView.as_view(),\n name=\"document-detail\"\n ),\n\n # Affidavit\n\n url(\n regex=r\"^affidavit/(?P<pk>\\d+)/$\",\n view=AffidavitScrapView.as_view(),\n name=\"affidavit-scrap\"\n ),\n\n )\n", "id": "5555755", "language": "Python", "matching_score": 0.06326179951429367, "max_stars_count": 0, "path": "ddjj/ddjjapp/urls.py" }, { "content": "# -*- coding: utf-8 -*-\nimport datetime\nfrom south.db import db\nfrom south.v2 import SchemaMigration\nfrom django.db import models\n\n\nclass Migration(SchemaMigration):\n\n def forwards(self, orm):\n # Deleting model 'Property'\n db.delete_table(u'ddjjapp_property')\n\n # Deleting field 'PaperProperty.property_ptr'\n db.delete_column(u'ddjjapp_paperproperty', u'property_ptr_id')\n\n # Adding field 'PaperProperty.id'\n db.add_column(u'ddjjapp_paperproperty', u'id',\n self.gf('django.db.models.fields.AutoField')(default=None, primary_key=True),\n keep_default=False)\n\n # Adding field 'PaperProperty.percentage'\n db.add_column(u'ddjjapp_paperproperty', 'percentage',\n self.gf('django.db.models.fields.DecimalField')(default=None, max_digits=3, decimal_places=2),\n keep_default=False)\n\n # Adding field 'PaperProperty.buy_year'\n db.add_column(u'ddjjapp_paperproperty', 'buy_year',\n self.gf('django.db.models.fields.DateField')(default=None),\n keep_default=False)\n\n # Adding field 'PaperProperty.bought_value'\n db.add_column(u'ddjjapp_paperproperty', 'bought_value',\n self.gf('django.db.models.fields.DecimalField')(default=None, max_digits=10, decimal_places=2),\n keep_default=False)\n\n # Adding field 'PaperProperty.affidavit'\n db.add_column(u'ddjjapp_paperproperty', 'affidavit',\n self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['ddjjapp.Affidavit']),\n keep_default=False)\n\n # Adding field 'PaperProperty.money_origin'\n db.add_column(u'ddjjapp_paperproperty', 'money_origin',\n self.gf('django.db.models.fields.CharField')(default=None, max_length=100),\n keep_default=False)\n\n # Deleting field 'FiscalProperty.property_ptr'\n db.delete_column(u'ddjjapp_fiscalproperty', u'property_ptr_id')\n\n # Adding field 'FiscalProperty.id'\n db.add_column(u'ddjjapp_fiscalproperty', u'id',\n self.gf('django.db.models.fields.AutoField')(default=None, primary_key=True),\n keep_default=False)\n\n # Adding field 'FiscalProperty.percentage'\n db.add_column(u'ddjjapp_fiscalproperty', 'percentage',\n self.gf('django.db.models.fields.DecimalField')(default=None, max_digits=3, decimal_places=2),\n keep_default=False)\n\n # Adding field 'FiscalProperty.buy_year'\n db.add_column(u'ddjjapp_fiscalproperty', 'buy_year',\n self.gf('django.db.models.fields.DateField')(default=None),\n keep_default=False)\n\n # Adding field 'FiscalProperty.bought_value'\n db.add_column(u'ddjjapp_fiscalproperty', 'bought_value',\n self.gf('django.db.models.fields.DecimalField')(default=None, max_digits=10, decimal_places=2),\n keep_default=False)\n\n # Adding field 'FiscalProperty.affidavit'\n db.add_column(u'ddjjapp_fiscalproperty', 'affidavit',\n self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['ddjjapp.Affidavit']),\n keep_default=False)\n\n # Adding field 'FiscalProperty.money_origin'\n db.add_column(u'ddjjapp_fiscalproperty', 'money_origin',\n self.gf('django.db.models.fields.CharField')(default=None, max_length=100),\n keep_default=False)\n\n\n def backwards(self, orm):\n # Adding model 'Property'\n db.create_table(u'ddjjapp_property', (\n ('percentage', self.gf('django.db.models.fields.DecimalField')(max_digits=3, decimal_places=2)),\n ('polymorphic_ctype', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'polymorphic_ddjjapp.property_set', null=True, to=orm['contenttypes.ContentType'])),\n ('buy_year', self.gf('django.db.models.fields.DateField')()),\n ('money_origin', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ('bought_value', self.gf('django.db.models.fields.DecimalField')(max_digits=10, decimal_places=2)),\n ('affidavit', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['ddjjapp.Affidavit'])),\n (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ))\n db.send_create_signal(u'ddjjapp', ['Property'])\n\n # Adding field 'PaperProperty.property_ptr'\n db.add_column(u'ddjjapp_paperproperty', u'property_ptr',\n self.gf('django.db.models.fields.related.OneToOneField')(default=None, to=orm['ddjjapp.Property'], unique=True, primary_key=True),\n keep_default=False)\n\n # Deleting field 'PaperProperty.id'\n db.delete_column(u'ddjjapp_paperproperty', u'id')\n\n # Deleting field 'PaperProperty.percentage'\n db.delete_column(u'ddjjapp_paperproperty', 'percentage')\n\n # Deleting field 'PaperProperty.buy_year'\n db.delete_column(u'ddjjapp_paperproperty', 'buy_year')\n\n # Deleting field 'PaperProperty.bought_value'\n db.delete_column(u'ddjjapp_paperproperty', 'bought_value')\n\n # Deleting field 'PaperProperty.affidavit'\n db.delete_column(u'ddjjapp_paperproperty', 'affidavit_id')\n\n # Deleting field 'PaperProperty.money_origin'\n db.delete_column(u'ddjjapp_paperproperty', 'money_origin')\n\n # Adding field 'FiscalProperty.property_ptr'\n db.add_column(u'ddjjapp_fiscalproperty', u'property_ptr',\n self.gf('django.db.models.fields.related.OneToOneField')(default=None, to=orm['ddjjapp.Property'], unique=True, primary_key=True),\n keep_default=False)\n\n # Deleting field 'FiscalProperty.id'\n db.delete_column(u'ddjjapp_fiscalproperty', u'id')\n\n # Deleting field 'FiscalProperty.percentage'\n db.delete_column(u'ddjjapp_fiscalproperty', 'percentage')\n\n # Deleting field 'FiscalProperty.buy_year'\n db.delete_column(u'ddjjapp_fiscalproperty', 'buy_year')\n\n # Deleting field 'FiscalProperty.bought_value'\n db.delete_column(u'ddjjapp_fiscalproperty', 'bought_value')\n\n # Deleting field 'FiscalProperty.affidavit'\n db.delete_column(u'ddjjapp_fiscalproperty', 'affidavit_id')\n\n # Deleting field 'FiscalProperty.money_origin'\n db.delete_column(u'ddjjapp_fiscalproperty', 'money_origin')\n\n\n models = {\n u'ddjjapp.affidavit': {\n 'Meta': {'object_name': 'Affidavit'},\n 'anual_net_salary': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),\n 'contract_type': ('django.db.models.fields.CharField', [], {'max_length': '50'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'marital_status': ('django.db.models.fields.CharField', [], {'default': \"'M'\", 'max_length': '1'}),\n 'original_document': ('django.db.models.fields.related.OneToOneField', [], {'to': u\"orm['ddjjapp.Document']\", 'unique': 'True'}),\n 'person': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.Person']\"}),\n 'position': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.Position']\"}),\n 'position_entry_date': ('django.db.models.fields.DateField', [], {}),\n 'sons': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': \"'+'\", 'symmetrical': 'False', 'to': u\"orm['ddjjapp.Person']\"}),\n 'spouse': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'+'\", 'to': u\"orm['ddjjapp.Person']\"}),\n 'studies': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'submission_date': ('django.db.models.fields.DateTimeField', [], {}),\n 'voluntary_retirement': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),\n 'worked_before_position': ('django.db.models.fields.BooleanField', [], {'default': 'False'})\n },\n u'ddjjapp.affidavittemplate': {\n 'Meta': {'object_name': 'AffidavitTemplate'},\n 'finish_date': ('django.db.models.fields.DateField', [], {}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'init_date': ('django.db.models.fields.DateField', [], {}),\n 'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.Jurisdiction']\"}),\n 'power': ('django.db.models.fields.CharField', [], {'default': \"'E'\", 'max_length': '1'}),\n 'sample_document': ('django.db.models.fields.files.FileField', [], {'max_length': '100'})\n },\n u'ddjjapp.bankaccount': {\n 'Meta': {'object_name': 'BankAccount'},\n 'currency': ('django.db.models.fields.CharField', [], {'default': \"'ARS'\", 'max_length': '3'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'property_type': ('django.db.models.fields.CharField', [], {'default': \"'C'\", 'max_length': '1'})\n },\n u'ddjjapp.companyshare': {\n 'Meta': {'object_name': 'CompanyShare', '_ormbases': [u'ddjjapp.PaperProperty']},\n 'company_activity': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n u'paperproperty_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u\"orm['ddjjapp.PaperProperty']\", 'unique': 'True', 'primary_key': 'True'}),\n 'society_type': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'state_related': ('django.db.models.fields.BooleanField', [], {'default': 'False'})\n },\n u'ddjjapp.document': {\n 'Meta': {'object_name': 'Document'},\n 'document_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'notes': ('django.db.models.fields.TextField', [], {}),\n 'template': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.AffidavitTemplate']\"}),\n 'upload_date': ('django.db.models.fields.DateTimeField', [], {})\n },\n u'ddjjapp.financtialproperty': {\n 'Meta': {'object_name': 'FinanctialProperty', '_ormbases': [u'ddjjapp.PaperProperty']},\n 'company_activity': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n u'paperproperty_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u\"orm['ddjjapp.PaperProperty']\", 'unique': 'True', 'primary_key': 'True'}),\n 'property_type': ('django.db.models.fields.CharField', [], {'default': \"'S'\", 'max_length': '1'}),\n 'quantity': ('django.db.models.fields.IntegerField', [], {})\n },\n u'ddjjapp.fiscalproperty': {\n 'Meta': {'object_name': 'FiscalProperty'},\n 'affidavit': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.Affidavit']\"}),\n 'bought_value': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '2'}),\n 'buy_year': ('django.db.models.fields.DateField', [], {}),\n 'fiscal_value': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '2'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'money_origin': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'percentage': ('django.db.models.fields.DecimalField', [], {'max_digits': '3', 'decimal_places': '2'})\n },\n u'ddjjapp.income': {\n 'Meta': {'object_name': 'Income'},\n 'affidavit': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.Affidavit']\"}),\n 'ammount_year': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'notes': ('django.db.models.fields.TextField', [], {})\n },\n u'ddjjapp.jurisdiction': {\n 'Meta': {'object_name': 'Jurisdiction'},\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),\n 'parent': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u\"orm['ddjjapp.Jurisdiction']\", 'null': 'True', 'blank': 'True'})\n },\n u'ddjjapp.office': {\n 'Meta': {'object_name': 'Office'},\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.Jurisdiction']\"}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.Office']\"})\n },\n u'ddjjapp.otheractivities': {\n 'Meta': {'object_name': 'OtherActivities'},\n 'activity': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'company': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'finish_date': ('django.db.models.fields.DateField', [], {}),\n 'hours_per_week': ('django.db.models.fields.DecimalField', [], {'max_digits': '3', 'decimal_places': '2'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'init_date': ('django.db.models.fields.DateField', [], {}),\n 'leave': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),\n 'position': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'state_related': ('django.db.models.fields.BooleanField', [], {'default': 'False'})\n },\n u'ddjjapp.paperproperty': {\n 'Meta': {'object_name': 'PaperProperty'},\n 'affidavit': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.Affidavit']\"}),\n 'bought_value': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '2'}),\n 'buy_year': ('django.db.models.fields.DateField', [], {}),\n 'current_value': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '2'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'money_origin': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'percentage': ('django.db.models.fields.DecimalField', [], {'max_digits': '3', 'decimal_places': '2'})\n },\n u'ddjjapp.person': {\n 'Meta': {'object_name': 'Person'},\n 'birth_date': ('django.db.models.fields.DateField', [], {}),\n 'gender': ('django.db.models.fields.CharField', [], {'default': \"'M'\", 'max_length': '1'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'id_number': ('django.db.models.fields.CharField', [], {'max_length': '15'}),\n 'id_type': ('django.db.models.fields.CharField', [], {'default': \"'D'\", 'max_length': '1'}),\n 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'notes': ('django.db.models.fields.TextField', [], {})\n },\n u'ddjjapp.personalproperty': {\n 'Meta': {'object_name': 'PersonalProperty', '_ormbases': [u'ddjjapp.FiscalProperty']},\n 'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'fabrication_year': ('django.db.models.fields.DateField', [], {}),\n u'fiscalproperty_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u\"orm['ddjjapp.FiscalProperty']\", 'unique': 'True', 'primary_key': 'True'}),\n 'property_type': ('django.db.models.fields.CharField', [], {'default': \"'C'\", 'max_length': '1'})\n },\n u'ddjjapp.position': {\n 'Meta': {'object_name': 'Position'},\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'notes': ('django.db.models.fields.TextField', [], {}),\n 'office': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.Office']\"})\n },\n u'ddjjapp.previousjob': {\n 'Meta': {'object_name': 'PreviousJob'},\n 'begin_date': ('django.db.models.fields.DateField', [], {}),\n 'company': ('django.db.models.fields.CharField', [], {'max_length': '50'}),\n 'company_activity': ('django.db.models.fields.CharField', [], {'max_length': '50'}),\n 'end_date': ('django.db.models.fields.DateField', [], {}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'position': ('django.db.models.fields.CharField', [], {'max_length': '50'}),\n 'was_state_related': ('django.db.models.fields.BooleanField', [], {'default': 'False'})\n },\n u'ddjjapp.realstate': {\n 'Meta': {'object_name': 'RealState', '_ormbases': [u'ddjjapp.FiscalProperty']},\n 'city': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'country': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n u'fiscalproperty_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u\"orm['ddjjapp.FiscalProperty']\", 'unique': 'True', 'primary_key': 'True'}),\n 'improvements': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '2'}),\n 'neighborhood': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'property_type': ('django.db.models.fields.CharField', [], {'default': \"'H'\", 'max_length': '1'}),\n 'size': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '2'}),\n 'state': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'unidad': ('django.db.models.fields.CharField', [], {'max_length': '10'})\n }\n }\n\n complete_apps = ['ddjjapp']", "id": "10557094", "language": "Python", "matching_score": 6.5326948165893555, "max_stars_count": 0, "path": "ddjj/ddjjapp/migrations/0003_auto__del_property__del_field_paperproperty_property_ptr__add_field_pa.py" }, { "content": "# -*- coding: utf-8 -*-\nimport datetime\nfrom south.db import db\nfrom south.v2 import SchemaMigration\nfrom django.db import models\n\n\nclass Migration(SchemaMigration):\n\n def forwards(self, orm):\n # Adding model 'PreviousJob'\n db.create_table(u'ddjjapp_previousjob', (\n (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('begin_date', self.gf('django.db.models.fields.DateField')()),\n ('end_date', self.gf('django.db.models.fields.DateField')()),\n ('company', self.gf('django.db.models.fields.CharField')(max_length=50)),\n ('company_activity', self.gf('django.db.models.fields.CharField')(max_length=50)),\n ('position', self.gf('django.db.models.fields.CharField')(max_length=50)),\n ('was_state_related', self.gf('django.db.models.fields.BooleanField')(default=False)),\n ))\n db.send_create_signal(u'ddjjapp', ['PreviousJob'])\n\n # Adding model 'AffidavitTemplate'\n db.create_table(u'ddjjapp_affidavittemplate', (\n (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('power', self.gf('django.db.models.fields.CharField')(default='E', max_length=1)),\n ('jurisdiction', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['ddjjapp.Jurisdiction'])),\n ('sample_document', self.gf('django.db.models.fields.files.FileField')(max_length=100)),\n ('init_date', self.gf('django.db.models.fields.DateField')()),\n ('finish_date', self.gf('django.db.models.fields.DateField')()),\n ))\n db.send_create_signal(u'ddjjapp', ['AffidavitTemplate'])\n\n # Adding model 'Document'\n db.create_table(u'ddjjapp_document', (\n (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('document_file', self.gf('django.db.models.fields.files.FileField')(max_length=100)),\n ('upload_date', self.gf('django.db.models.fields.DateTimeField')()),\n ('template', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['ddjjapp.AffidavitTemplate'])),\n ('notes', self.gf('django.db.models.fields.TextField')()),\n ))\n db.send_create_signal(u'ddjjapp', ['Document'])\n\n # Adding model 'Affidavit'\n db.create_table(u'ddjjapp_affidavit', (\n (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('original_document', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['ddjjapp.Document'], unique=True)),\n ('submission_date', self.gf('django.db.models.fields.DateTimeField')()),\n ('person', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['ddjjapp.Person'])),\n ('marital_status', self.gf('django.db.models.fields.CharField')(default='M', max_length=1)),\n ('position_entry_date', self.gf('django.db.models.fields.DateField')()),\n ('position', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['ddjjapp.Position'])),\n ('contract_type', self.gf('django.db.models.fields.CharField')(max_length=50)),\n ('anual_net_salary', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)),\n ('studies', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ('spouse', self.gf('django.db.models.fields.related.ForeignKey')(related_name='+', to=orm['ddjjapp.Person'])),\n ('voluntary_retirement', self.gf('django.db.models.fields.BooleanField')(default=False)),\n ('worked_before_position', self.gf('django.db.models.fields.BooleanField')(default=False)),\n ))\n db.send_create_signal(u'ddjjapp', ['Affidavit'])\n\n # Adding M2M table for field sons on 'Affidavit'\n m2m_table_name = db.shorten_name(u'ddjjapp_affidavit_sons')\n db.create_table(m2m_table_name, (\n ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),\n ('affidavit', models.ForeignKey(orm[u'ddjjapp.affidavit'], null=False)),\n ('person', models.ForeignKey(orm[u'ddjjapp.person'], null=False))\n ))\n db.create_unique(m2m_table_name, ['affidavit_id', 'person_id'])\n\n # Adding model 'Income'\n db.create_table(u'ddjjapp_income', (\n (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('affidavit', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['ddjjapp.Affidavit'])),\n ('ammount_year', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)),\n ('notes', self.gf('django.db.models.fields.TextField')()),\n ))\n db.send_create_signal(u'ddjjapp', ['Income'])\n\n # Adding model 'Property'\n db.create_table(u'ddjjapp_property', (\n (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('polymorphic_ctype', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'polymorphic_ddjjapp.property_set', null=True, to=orm['contenttypes.ContentType'])),\n ('percentage', self.gf('django.db.models.fields.DecimalField')(max_digits=3, decimal_places=2)),\n ('buy_year', self.gf('django.db.models.fields.DateField')()),\n ('bought_value', self.gf('django.db.models.fields.DecimalField')(max_digits=10, decimal_places=2)),\n ('affidavit', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['ddjjapp.Affidavit'])),\n ('money_origin', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ))\n db.send_create_signal(u'ddjjapp', ['Property'])\n\n # Adding model 'FiscalProperty'\n db.create_table(u'ddjjapp_fiscalproperty', (\n (u'property_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['ddjjapp.Property'], unique=True, primary_key=True)),\n ('fiscal_value', self.gf('django.db.models.fields.DecimalField')(max_digits=10, decimal_places=2)),\n ))\n db.send_create_signal(u'ddjjapp', ['FiscalProperty'])\n\n # Adding model 'PaperProperty'\n db.create_table(u'ddjjapp_paperproperty', (\n (u'property_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['ddjjapp.Property'], unique=True, primary_key=True)),\n ('current_value', self.gf('django.db.models.fields.DecimalField')(max_digits=10, decimal_places=2)),\n ))\n db.send_create_signal(u'ddjjapp', ['PaperProperty'])\n\n # Adding model 'RealState'\n db.create_table(u'ddjjapp_realstate', (\n (u'fiscalproperty_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['ddjjapp.FiscalProperty'], unique=True, primary_key=True)),\n ('property_type', self.gf('django.db.models.fields.CharField')(default='H', max_length=1)),\n ('neighborhood', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ('city', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ('state', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ('country', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ('size', self.gf('django.db.models.fields.DecimalField')(max_digits=10, decimal_places=2)),\n ('unidad', self.gf('django.db.models.fields.CharField')(max_length=10)),\n ('improvements', self.gf('django.db.models.fields.DecimalField')(max_digits=10, decimal_places=2)),\n ))\n db.send_create_signal(u'ddjjapp', ['RealState'])\n\n # Adding model 'CompanyShare'\n db.create_table(u'ddjjapp_companyshare', (\n (u'paperproperty_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['ddjjapp.PaperProperty'], unique=True, primary_key=True)),\n ('society_type', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ('company_activity', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ('state_related', self.gf('django.db.models.fields.BooleanField')(default=False)),\n ))\n db.send_create_signal(u'ddjjapp', ['CompanyShare'])\n\n # Adding model 'FinanctialProperty'\n db.create_table(u'ddjjapp_financtialproperty', (\n (u'paperproperty_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['ddjjapp.PaperProperty'], unique=True, primary_key=True)),\n ('property_type', self.gf('django.db.models.fields.CharField')(default='S', max_length=1)),\n ('description', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ('company_activity', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ('quantity', self.gf('django.db.models.fields.IntegerField')()),\n ))\n db.send_create_signal(u'ddjjapp', ['FinanctialProperty'])\n\n # Adding model 'BankAccount'\n db.create_table(u'ddjjapp_bankaccount', (\n (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('property_type', self.gf('django.db.models.fields.CharField')(default='C', max_length=1)),\n ('currency', self.gf('django.db.models.fields.CharField')(default='ARS', max_length=3)),\n ))\n db.send_create_signal(u'ddjjapp', ['BankAccount'])\n\n # Adding model 'PersonalProperty'\n db.create_table(u'ddjjapp_personalproperty', (\n (u'fiscalproperty_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['ddjjapp.FiscalProperty'], unique=True, primary_key=True)),\n ('property_type', self.gf('django.db.models.fields.CharField')(default='C', max_length=1)),\n ('description', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ('fabrication_year', self.gf('django.db.models.fields.DateField')()),\n ))\n db.send_create_signal(u'ddjjapp', ['PersonalProperty'])\n\n # Adding model 'Jurisdiction'\n db.create_table(u'ddjjapp_jurisdiction', (\n (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('name', self.gf('django.db.models.fields.CharField')(max_length=50)),\n ('parent', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['ddjjapp.Jurisdiction'])),\n ))\n db.send_create_signal(u'ddjjapp', ['Jurisdiction'])\n\n # Adding model 'OtherActivities'\n db.create_table(u'ddjjapp_otheractivities', (\n (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('company', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ('activity', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ('position', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ('init_date', self.gf('django.db.models.fields.DateField')()),\n ('finish_date', self.gf('django.db.models.fields.DateField')()),\n ('leave', self.gf('django.db.models.fields.BooleanField')(default=False)),\n ('hours_per_week', self.gf('django.db.models.fields.DecimalField')(max_digits=3, decimal_places=2)),\n ('state_related', self.gf('django.db.models.fields.BooleanField')(default=False)),\n ))\n db.send_create_signal(u'ddjjapp', ['OtherActivities'])\n\n # Adding model 'Person'\n db.create_table(u'ddjjapp_person', (\n (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('id_type', self.gf('django.db.models.fields.CharField')(default='D', max_length=1)),\n ('id_number', self.gf('django.db.models.fields.CharField')(max_length=15)),\n ('name', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ('last_name', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ('birth_date', self.gf('django.db.models.fields.DateField')()),\n ('gender', self.gf('django.db.models.fields.CharField')(default='M', max_length=1)),\n ('notes', self.gf('django.db.models.fields.TextField')()),\n ))\n db.send_create_signal(u'ddjjapp', ['Person'])\n\n # Adding model 'Office'\n db.create_table(u'ddjjapp_office', (\n (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('name', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ('parent', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['ddjjapp.Office'])),\n ('jurisdiction', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['ddjjapp.Jurisdiction'])),\n ))\n db.send_create_signal(u'ddjjapp', ['Office'])\n\n # Adding model 'Position'\n db.create_table(u'ddjjapp_position', (\n (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('name', self.gf('django.db.models.fields.CharField')(max_length=100)),\n ('office', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['ddjjapp.Office'])),\n ('notes', self.gf('django.db.models.fields.TextField')()),\n ))\n db.send_create_signal(u'ddjjapp', ['Position'])\n\n\n def backwards(self, orm):\n # Deleting model 'PreviousJob'\n db.delete_table(u'ddjjapp_previousjob')\n\n # Deleting model 'AffidavitTemplate'\n db.delete_table(u'ddjjapp_affidavittemplate')\n\n # Deleting model 'Document'\n db.delete_table(u'ddjjapp_document')\n\n # Deleting model 'Affidavit'\n db.delete_table(u'ddjjapp_affidavit')\n\n # Removing M2M table for field sons on 'Affidavit'\n db.delete_table(db.shorten_name(u'ddjjapp_affidavit_sons'))\n\n # Deleting model 'Income'\n db.delete_table(u'ddjjapp_income')\n\n # Deleting model 'Property'\n db.delete_table(u'ddjjapp_property')\n\n # Deleting model 'FiscalProperty'\n db.delete_table(u'ddjjapp_fiscalproperty')\n\n # Deleting model 'PaperProperty'\n db.delete_table(u'ddjjapp_paperproperty')\n\n # Deleting model 'RealState'\n db.delete_table(u'ddjjapp_realstate')\n\n # Deleting model 'CompanyShare'\n db.delete_table(u'ddjjapp_companyshare')\n\n # Deleting model 'FinanctialProperty'\n db.delete_table(u'ddjjapp_financtialproperty')\n\n # Deleting model 'BankAccount'\n db.delete_table(u'ddjjapp_bankaccount')\n\n # Deleting model 'PersonalProperty'\n db.delete_table(u'ddjjapp_personalproperty')\n\n # Deleting model 'Jurisdiction'\n db.delete_table(u'ddjjapp_jurisdiction')\n\n # Deleting model 'OtherActivities'\n db.delete_table(u'ddjjapp_otheractivities')\n\n # Deleting model 'Person'\n db.delete_table(u'ddjjapp_person')\n\n # Deleting model 'Office'\n db.delete_table(u'ddjjapp_office')\n\n # Deleting model 'Position'\n db.delete_table(u'ddjjapp_position')\n\n\n models = {\n u'contenttypes.contenttype': {\n 'Meta': {'ordering': \"('name',)\", 'unique_together': \"(('app_label', 'model'),)\", 'object_name': 'ContentType', 'db_table': \"'django_content_type'\"},\n 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})\n },\n u'ddjjapp.affidavit': {\n 'Meta': {'object_name': 'Affidavit'},\n 'anual_net_salary': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),\n 'contract_type': ('django.db.models.fields.CharField', [], {'max_length': '50'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'marital_status': ('django.db.models.fields.CharField', [], {'default': \"'M'\", 'max_length': '1'}),\n 'original_document': ('django.db.models.fields.related.OneToOneField', [], {'to': u\"orm['ddjjapp.Document']\", 'unique': 'True'}),\n 'person': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.Person']\"}),\n 'position': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.Position']\"}),\n 'position_entry_date': ('django.db.models.fields.DateField', [], {}),\n 'sons': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': \"'+'\", 'symmetrical': 'False', 'to': u\"orm['ddjjapp.Person']\"}),\n 'spouse': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'+'\", 'to': u\"orm['ddjjapp.Person']\"}),\n 'studies': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'submission_date': ('django.db.models.fields.DateTimeField', [], {}),\n 'voluntary_retirement': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),\n 'worked_before_position': ('django.db.models.fields.BooleanField', [], {'default': 'False'})\n },\n u'ddjjapp.affidavittemplate': {\n 'Meta': {'object_name': 'AffidavitTemplate'},\n 'finish_date': ('django.db.models.fields.DateField', [], {}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'init_date': ('django.db.models.fields.DateField', [], {}),\n 'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.Jurisdiction']\"}),\n 'power': ('django.db.models.fields.CharField', [], {'default': \"'E'\", 'max_length': '1'}),\n 'sample_document': ('django.db.models.fields.files.FileField', [], {'max_length': '100'})\n },\n u'ddjjapp.bankaccount': {\n 'Meta': {'object_name': 'BankAccount'},\n 'currency': ('django.db.models.fields.CharField', [], {'default': \"'ARS'\", 'max_length': '3'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'property_type': ('django.db.models.fields.CharField', [], {'default': \"'C'\", 'max_length': '1'})\n },\n u'ddjjapp.companyshare': {\n 'Meta': {'object_name': 'CompanyShare', '_ormbases': [u'ddjjapp.PaperProperty']},\n 'company_activity': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n u'paperproperty_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u\"orm['ddjjapp.PaperProperty']\", 'unique': 'True', 'primary_key': 'True'}),\n 'society_type': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'state_related': ('django.db.models.fields.BooleanField', [], {'default': 'False'})\n },\n u'ddjjapp.document': {\n 'Meta': {'object_name': 'Document'},\n 'document_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'notes': ('django.db.models.fields.TextField', [], {}),\n 'template': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.AffidavitTemplate']\"}),\n 'upload_date': ('django.db.models.fields.DateTimeField', [], {})\n },\n u'ddjjapp.financtialproperty': {\n 'Meta': {'object_name': 'FinanctialProperty', '_ormbases': [u'ddjjapp.PaperProperty']},\n 'company_activity': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n u'paperproperty_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u\"orm['ddjjapp.PaperProperty']\", 'unique': 'True', 'primary_key': 'True'}),\n 'property_type': ('django.db.models.fields.CharField', [], {'default': \"'S'\", 'max_length': '1'}),\n 'quantity': ('django.db.models.fields.IntegerField', [], {})\n },\n u'ddjjapp.fiscalproperty': {\n 'Meta': {'object_name': 'FiscalProperty', '_ormbases': [u'ddjjapp.Property']},\n 'fiscal_value': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '2'}),\n u'property_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u\"orm['ddjjapp.Property']\", 'unique': 'True', 'primary_key': 'True'})\n },\n u'ddjjapp.income': {\n 'Meta': {'object_name': 'Income'},\n 'affidavit': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.Affidavit']\"}),\n 'ammount_year': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'notes': ('django.db.models.fields.TextField', [], {})\n },\n u'ddjjapp.jurisdiction': {\n 'Meta': {'object_name': 'Jurisdiction'},\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),\n 'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.Jurisdiction']\"})\n },\n u'ddjjapp.office': {\n 'Meta': {'object_name': 'Office'},\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.Jurisdiction']\"}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.Office']\"})\n },\n u'ddjjapp.otheractivities': {\n 'Meta': {'object_name': 'OtherActivities'},\n 'activity': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'company': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'finish_date': ('django.db.models.fields.DateField', [], {}),\n 'hours_per_week': ('django.db.models.fields.DecimalField', [], {'max_digits': '3', 'decimal_places': '2'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'init_date': ('django.db.models.fields.DateField', [], {}),\n 'leave': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),\n 'position': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'state_related': ('django.db.models.fields.BooleanField', [], {'default': 'False'})\n },\n u'ddjjapp.paperproperty': {\n 'Meta': {'object_name': 'PaperProperty', '_ormbases': [u'ddjjapp.Property']},\n 'current_value': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '2'}),\n u'property_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u\"orm['ddjjapp.Property']\", 'unique': 'True', 'primary_key': 'True'})\n },\n u'ddjjapp.person': {\n 'Meta': {'object_name': 'Person'},\n 'birth_date': ('django.db.models.fields.DateField', [], {}),\n 'gender': ('django.db.models.fields.CharField', [], {'default': \"'M'\", 'max_length': '1'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'id_number': ('django.db.models.fields.CharField', [], {'max_length': '15'}),\n 'id_type': ('django.db.models.fields.CharField', [], {'default': \"'D'\", 'max_length': '1'}),\n 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'notes': ('django.db.models.fields.TextField', [], {})\n },\n u'ddjjapp.personalproperty': {\n 'Meta': {'object_name': 'PersonalProperty', '_ormbases': [u'ddjjapp.FiscalProperty']},\n 'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'fabrication_year': ('django.db.models.fields.DateField', [], {}),\n u'fiscalproperty_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u\"orm['ddjjapp.FiscalProperty']\", 'unique': 'True', 'primary_key': 'True'}),\n 'property_type': ('django.db.models.fields.CharField', [], {'default': \"'C'\", 'max_length': '1'})\n },\n u'ddjjapp.position': {\n 'Meta': {'object_name': 'Position'},\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'notes': ('django.db.models.fields.TextField', [], {}),\n 'office': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.Office']\"})\n },\n u'ddjjapp.previousjob': {\n 'Meta': {'object_name': 'PreviousJob'},\n 'begin_date': ('django.db.models.fields.DateField', [], {}),\n 'company': ('django.db.models.fields.CharField', [], {'max_length': '50'}),\n 'company_activity': ('django.db.models.fields.CharField', [], {'max_length': '50'}),\n 'end_date': ('django.db.models.fields.DateField', [], {}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'position': ('django.db.models.fields.CharField', [], {'max_length': '50'}),\n 'was_state_related': ('django.db.models.fields.BooleanField', [], {'default': 'False'})\n },\n u'ddjjapp.property': {\n 'Meta': {'object_name': 'Property'},\n 'affidavit': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['ddjjapp.Affidavit']\"}),\n 'bought_value': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '2'}),\n 'buy_year': ('django.db.models.fields.DateField', [], {}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'money_origin': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'percentage': ('django.db.models.fields.DecimalField', [], {'max_digits': '3', 'decimal_places': '2'}),\n 'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"u'polymorphic_ddjjapp.property_set'\", 'null': 'True', 'to': u\"orm['contenttypes.ContentType']\"})\n },\n u'ddjjapp.realstate': {\n 'Meta': {'object_name': 'RealState', '_ormbases': [u'ddjjapp.FiscalProperty']},\n 'city': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'country': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n u'fiscalproperty_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u\"orm['ddjjapp.FiscalProperty']\", 'unique': 'True', 'primary_key': 'True'}),\n 'improvements': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '2'}),\n 'neighborhood': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'property_type': ('django.db.models.fields.CharField', [], {'default': \"'H'\", 'max_length': '1'}),\n 'size': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '2'}),\n 'state': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'unidad': ('django.db.models.fields.CharField', [], {'max_length': '10'})\n }\n }\n\n complete_apps = ['ddjjapp']", "id": "12697299", "language": "Python", "matching_score": 5.529505729675293, "max_stars_count": 0, "path": "ddjj/ddjjapp/migrations/0001_initial.py" }, { "content": "# -*- coding: utf-8 -*-\n\nfrom django.db import models\n\nfrom model_utils.models import TimeStampedModel\n\n\nclass PreviousJob(models.Model):\n begin_date = models.DateField()\n end_date = models.DateField()\n company = models.CharField(max_length=50)\n company_activity = models.CharField(max_length=50)\n position = models.CharField(max_length=50)\n was_state_related = models.BooleanField()\n\n\nclass AffidavitTemplate(models.Model):\n\n \"\"\"\n El modelo de una declaración jurada.\n Por ejemplo:\n Las DDJJ de la Ciudad eran completadas a mano creo que hasta 2009.\n Las DDJJ del Poder Judicial, tienen otras características.\n Acá debería poner de qué juridiscción, poder, etc. y la fecha\n o rango defechas.\n\n Entonces se debería poder buscar: del Buenos Aires, el Poder Ejecutivo,\n de tal a tal fecha, cuál es el template usado.\n \"\"\"\n\n LEGISLATURE = 'L'\n EXECUTIVE = 'E'\n JUDICIARY = 'J'\n POWER_CHOICES = (\n (LEGISLATURE, 'Legislativo'),\n (EXECUTIVE, 'Ejecutivo'),\n (JUDICIARY, 'Judicial'),\n )\n\n power = models.CharField(\n max_length=1, choices=POWER_CHOICES, default=EXECUTIVE)\n jurisdiction = models.ForeignKey('Jurisdiction')\n # El instructivo para completar.\n sample_document = models.FileField(upload_to='templates')\n init_date = models.DateField()\n finish_date = models.DateField()\n\n\nclass Document(TimeStampedModel):\n\n \"\"\"\n Un documento scaneado con una declaración jurada.\n\n Tal vez debería usar esto. \n from model_utils.models import StatusModel\n from model_utils import Choices\n \"\"\"\n document_file = models.FileField(upload_to='documents')\n\n # qué formato tiene la declaración jurada.\n template = models.ForeignKey(\n 'AffidavitTemplate', null=True, blank=True, default=None)\n\n notes = models.TextField(null=True, blank=True, default=None)\n\n def __unicode__(self):\n return unicode(self.document_file)\n\n\nclass Affidavit(models.Model):\n\n \"\"\"\n Una declaración jurada.\n \"\"\"\n MARRIED = 'M'\n SINGLE = 'S'\n DIVORCED = 'D'\n WIDOWER = 'W'\n\n MARITAL_STATUS_CHOICES = (\n (MARRIED, 'Casado'),\n (SINGLE, 'Soltero'),\n (DIVORCED, 'Divorciado'),\n (WIDOWER, 'Viudo'),\n )\n # que a su vez tiene un template de donde se saca la jurisdicción y el\n # poder.\n original_document = models.OneToOneField(Document, primary_key=True)\n\n submission_date = models.DateTimeField()\n\n # Datos personales que no cambian de una declaración jurada a otra (DNI,\n # nombre, etc.)\n person = models.ForeignKey('Person', )\n marital_status = models.CharField(\n max_length=1, choices=MARITAL_STATUS_CHOICES, default=MARRIED)\n\n # Datos del cargo.\n position_entry_date = models.DateField()\n position = models.ForeignKey('Position')\n # tipo de contrato, probablemente lo debería hacer CHOICES\n contract_type = models.CharField(max_length=50)\n anual_net_salary = models.DecimalField(\n max_digits=12, decimal_places=2)\n\n # Estudios\n # REVIEW\n studies = models.CharField(max_length=100)\n\n spouse = models.ForeignKey('Person', related_name='+')\n sons = models.ManyToManyField('Person', related_name='+')\n\n voluntary_retirement = models.BooleanField()\n\n worked_before_position = models.BooleanField()\n\n\nclass Income(models.Model):\n affidavit = models.ForeignKey('Affidavit')\n ammount_year = models.DecimalField(\n max_digits=12, decimal_places=2)\n notes = models.TextField()\n\n\nclass Property(models.Model):\n percentage = models.DecimalField(max_digits=3, decimal_places=2)\n buy_year = models.DateField()\n\n bought_value = models.DecimalField(\n max_digits=10, decimal_places=2)\n\n affidavit = models.ForeignKey('Affidavit')\n\n # REVIEW\n money_origin = models.CharField(max_length=100)\n\n class Meta:\n abstract = True\n\n\nclass FiscalProperty(Property):\n\n \"\"\"\n Propiedades con valor fiscal\n \"\"\"\n\n # Only valid value.\n fiscal_value = models.DecimalField(\n max_digits=10, decimal_places=2)\n\n\nclass PaperProperty(Property):\n\n \"\"\"\n \"\"\"\n current_value = models.DecimalField(\n max_digits=10, decimal_places=2)\n\n\nclass RealState(FiscalProperty):\n HOUSE = 'H'\n PROPERTY_TYPE_CHOICES = (\n (HOUSE, 'Casa'),\n )\n\n property_type = models.CharField(\n max_length=1, choices=PROPERTY_TYPE_CHOICES, default=HOUSE)\n\n neighborhood = models.CharField(max_length=100)\n city = models.CharField(max_length=100)\n state = models.CharField(max_length=100)\n country = models.CharField(max_length=100)\n\n size = models.DecimalField(max_digits=10, decimal_places=2)\n\n # no tengo idea de lo que es esto.\n unidad = models.CharField(max_length=10)\n\n improvements = models.DecimalField(\n max_digits=10, decimal_places=2)\n\n\nclass CompanyShare(PaperProperty):\n\n society_type = models.CharField(max_length=100)\n\n company_activity = models.CharField(max_length=100)\n\n state_related = models.BooleanField()\n\n\nclass FinanctialProperty(PaperProperty):\n\n \"\"\"\n Acciones\n \"\"\"\n STOCKS = 'S'\n PROPERTY_TYPE_CHOICES = (\n (STOCKS, 'Acciones'),\n )\n\n property_type = models.CharField(\n max_length=1, choices=PROPERTY_TYPE_CHOICES, default=STOCKS)\n\n description = models.CharField(max_length=100)\n\n company_activity = models.CharField(max_length=100)\n\n quantity = models.IntegerField()\n\n\nclass BankAccount(models.Model):\n CHECKING_ACCOUNT = 'C'\n SAVINGS_ACCOUNT = 'S'\n\n ACCOUNT_TYPE_CHOICES = (\n (CHECKING_ACCOUNT, 'Cuenta Corriente'),\n (SAVINGS_ACCOUNT, 'Cuenta Corriente'),\n )\n\n property_type = models.CharField(\n max_length=1, choices=ACCOUNT_TYPE_CHOICES, default=CHECKING_ACCOUNT)\n currency = models.CharField(max_length=3, default='ARS')\n\n\nclass PersonalProperty(FiscalProperty):\n\n \"\"\"\n Bienes muebles\n Tal vez todas las propiedades deberían tener un padre común.\n \"\"\"\n\n CAR = 'C'\n PROPERTY_TYPE_CHOICES = (\n (CAR, 'Automotor'),\n )\n\n property_type = models.CharField(\n max_length=1, choices=PROPERTY_TYPE_CHOICES, default=CAR)\n description = models.CharField(max_length=100)\n\n fabrication_year = models.DateField()\n\n\nclass Jurisdiction(models.Model):\n\n \"\"\"\n Provincia, Ciudad, Nación\n\n Esto no se si tiene sentido.\n Por ahí debería ser directamente el departamento.\n Ministerio de Seguridad de la\n\n\n Ejemplo:\n name: La Pampa\n parent = Argentina\n\n name: Argentina\n parent: None\n\n name: Bolivia\n parent: None\n \"\"\"\n name = models.CharField(max_length=50)\n parent = models.ForeignKey(\n 'Jurisdiction', null=True, blank=True, default=None)\n\n def __unicode__(self):\n return unicode(self.name)\n\n\nclass OtherActivities(models.Model):\n company = models.CharField(max_length=100)\n activity = models.CharField(max_length=100)\n position = models.CharField(max_length=100)\n init_date = models.DateField()\n finish_date = models.DateField()\n leave = models.BooleanField() # licencia\n hours_per_week = models.DecimalField(\n max_digits=3, decimal_places=2)\n state_related = models.BooleanField()\n\n\nclass Person(models.Model):\n\n \"\"\"\n Datos que no cambian de una persona de una a otra declaración jurada.\n Lo que puede cambiar (cargos, de estado civil, de bienes, etc. va en la declaración)\n \"\"\"\n DNI = 'D'\n ID_TYPE_CHOICES = (\n (DNI, 'DNI'),\n )\n\n MALE = 'M'\n FEMALE = 'F'\n GENDER_CHOICES = (\n (MALE, 'Masculino'),\n (FEMALE, 'Femenino'),\n )\n\n id_type = models.CharField(\n max_length=1, choices=ID_TYPE_CHOICES, default=DNI)\n id_number = models.CharField(max_length=15)\n name = models.CharField(max_length=100)\n last_name = models.CharField(max_length=100)\n birth_date = models.DateField()\n gender = models.CharField(\n max_length=1, choices=GENDER_CHOICES, default=MALE)\n notes = models.TextField()\n\n def __unicode__(self):\n return unicode(self.name + \" \" + self.last_name)\n\n\nclass Office(models.Model):\n\n \"\"\"\n Ministerio, subsecretaría, etc.\n Nota:\n Si se elimina un ministerio o subsecretaría, no se elimina de la base de datos porque arruinaría la integridad referencial.\n En ese caso, se lo deja, pero simplemente no tiene más declaraciones juradas. Por ahí se podría poner una fecha de creación y eliminación.\n Si se crea uno, pasa lo mismo.\n\n Si se cambia de padre (una subsecretaría pasa a depender de otro ministerio), no pasa nada, simplemente, se crea una nueva oficina con otro padre.\n\n \"\"\"\n name = models.CharField(max_length=100)\n parent = models.ForeignKey('Office')\n\n # Hay que pensar esto mejor: El problema es que cuando se crea un documento con un Document Template, no se sabe a que oficina corresponde.\n # Solamente se sabe que corresponde a una jurisdicción determinada.\n # La oficina se conoce cuando se crea la DDJJ, pero esa oficina tiene que tener una jurisdicción\n # Entonces, se crea una referencia circular.\n jurisdiction = models.ForeignKey('Jurisdiction')\n\n\nclass Position(models.Model):\n\n \"\"\"\n Cargo\n \"\"\"\n name = models.CharField(max_length=100)\n office = models.ForeignKey('Office')\n\n notes = models.TextField()\n", "id": "11639968", "language": "Python", "matching_score": 1.4245682954788208, "max_stars_count": 0, "path": "ddjj/ddjjapp/models.py" }, { "content": "import json\n\nfrom django.core.management.base import BaseCommand, CommandError\n\nfrom ddjjapp.models import Jurisdiction\n\n\nclass Command(BaseCommand):\n args = '<file1 file2 ...>'\n help = 'Carga jurisdicciones de un archivo json a la base de datos'\n\n def _load_jurisdiction(self, jurisdiction_list):\n\n if jurisdiction_list is None:\n return []\n\n jurisdiction_models = []\n for jurisdiction in jurisdiction_list:\n current_jurisdiction_model = Jurisdiction.objects.create(\n name=jurisdiction['name'], parent=None)\n\n current_jurisdiction_model.save()\n\n if 'divisions' in jurisdiction:\n current_jurisdiction_model.jurisdiction_set = self._load_jurisdiction(\n jurisdiction['divisions'])\n\n jurisdiction_models.append(current_jurisdiction_model)\n\n return jurisdiction_models\n\n def handle(self, *args, **options):\n for file_name in args:\n jurisdiction_list = json.load(open(file_name, 'r'))\n\n self._load_jurisdiction(jurisdiction_list)\n\n self.stdout.write('Successfully loaded Jurisdictions')\n", "id": "7421900", "language": "Python", "matching_score": 0.5275781750679016, "max_stars_count": 0, "path": "ddjj/ddjjapp/management/commands/load_jurisdictions.py" }, { "content": "# -*- coding: utf-8 -*-\nimport os\nimport glob\nimport json\n\n_config = {}\n\ndef _dict_merge(dest, origin):\n if origin is None:\n return\n\n for origin_key, origin_value in origin.items():\n dest_value = dest.get(origin_key)\n\n if (not isinstance(origin_value, dict)) or (not isinstance(dest_value, dict)):\n dest[origin_key] = origin_value\n\n else: # They are both dictionaries.\n _dict_merge(dest_value, origin_value)\n\n\ndef load_from_dict(new__config):\n _dict_merge(_config, new__config)\n\ndef load_from_directory(_config_base_dir):\n if _config_base_dir is None:\n raise Exception('Configuration directory not defined')\n \n new__config = {}\n for filename in glob.iglob(os.path.join(_config_base_dir, '*.json')):\n key = os.path.splitext(os.path.split(filename)[1])[0]\n\n with open(filename) as f:\n new__config[key] = json.load(f)\n\n _dict_merge(_config, new__config)\n\ndef load_from_env_directory(env_var):\n load_from_directory(os.environ.get(env_var))\n\ndef get_config():\n return _config", "id": "10588240", "language": "Python", "matching_score": 1.012706995010376, "max_stars_count": 0, "path": "config_magic/config.py" }, { "content": "import unittest\nimport datetime\nimport os.path\nimport os\nimport tempfile\nimport shutil\n\nfrom pelican_do.post import post, PostError\n\ndef create_empty_config():\n open('pelicanconf.py', 'a').close()\n\nclass PostTest(unittest.TestCase):\n def setUp(self):\n self.cwd = tempfile.mkdtemp()\n os.chdir(self.cwd)\n\n def tearDown(self):\n shutil.rmtree(self.cwd)\n self.cwd = None\n\nclass PostNoConfigError(PostTest):\n def shortDescription(self):\n return 'Post action must throw an exception if no \"pelicanconf.py\" file exists in current directory'\n\n def test(self):\n os.mkdir('content')\n\n today = datetime.datetime(2012, 9, 16, 23, 12, 11)\n name = 'a post name'\n format = 'rst'\n category = 'new category'\n tags = ['tag1', 'tag2']\n authors = [ 'joe smith', '<NAME>']\n summary = 'This is a summary'\n title = 'a title'\n\n self.assertRaises(PostError, post, today, name, format, title, category, authors, tags, summary)\n\n\nclass PostNoContentError(PostTest):\n def shortDescription(self):\n return 'Post action must throw an exception if no content directory exist'\n\n def test(self):\n create_empty_config()\n\n today = datetime.datetime(2012, 9, 16, 23, 12, 11)\n name = 'a post name'\n format = 'rst'\n category = 'new category'\n tags = ['tag1', 'tag2']\n authors = [ 'joe smith', '<NAME>']\n summary = 'This is a summary'\n title = 'a title'\n\n self.assertRaises(PostError, post, today, name, format, title, category, authors, tags, summary)\n\nclass PostRstTest(PostTest):\n def shortDescription(self):\n return 'Create a post with filename based on post name slug and date and RST with metadata'\n\n def test(self):\n create_empty_config()\n os.mkdir('content')\n\n today = datetime.datetime(2012, 9, 16, 23, 12, 11)\n name = 'a post name'\n format = 'rst'\n category = 'new category'\n tags = ['tag1', 'tag2']\n authors = [ '<NAME>', '<NAME>']\n summary = 'This is a summary'\n title = 'a title'\n\n post(today, name, format, title, category, authors, tags, summary)\n\n content_dir = os.path.join(self.cwd, 'content', category)\n article_path = os.path.join(content_dir, '2012-09-16-a-post-name.rst')\n\n self.assertTrue(os.path.isdir(content_dir))\n self.assertTrue(os.path.isfile(article_path))\n\n with open(article_path, 'r') as f:\n content = f.read()\n\n # title\n self.assertTrue(\n 'a title\\n#######\\n' in content\n )\n\n self.assertTrue(\n ':category: new category\\n' in content\n )\n\n self.assertTrue(\n ':date: 2012-09-16 23:12\\n' in content\n )\n\n self.assertTrue(\n ':tags: tag1, tag2\\n' in content\n )\n\n self.assertTrue(\n ':authors: <NAME>, <NAME>\\n' in content\n )\n\n self.assertTrue(\n ':summary: This is a summary\\n' in content\n )\n\nclass PostRstDefaultTest(PostTest):\n def shortDescription(self):\n return 'Create a post with filename based on post name slug and date and RST with default metadata'\n\n def test(self):\n create_empty_config()\n os.mkdir('content')\n\n\n today = datetime.datetime(2015, 12, 1, 3, 1, 21)\n name = 'Post name With defaults'\n format = 'rst'\n category = 'a category'\n tags = None\n authors = None\n summary = None\n title = 'This is a title'\n\n post(today, name, format, title, category, authors, tags, summary)\n\n content_dir = os.path.join(self.cwd, 'content', category)\n article_path = os.path.join(content_dir, '2015-12-01-post-name-with-defaults.rst')\n\n self.assertTrue(os.path.isdir(content_dir))\n self.assertTrue(os.path.isfile(article_path))\n\n with open(article_path, 'r') as f:\n content = f.read()\n\n # title\n self.assertTrue(\n 'This is a title\\n###############\\n\\n' in content\n )\n\n self.assertTrue(\n ':date: 2015-12-01 03:01\\n' in content\n )\n\n self.assertTrue(\n ':tags:' not in content\n )\n\n self.assertTrue(\n ':authors:' not in content\n )\n\n self.assertTrue(\n ':summary:' not in content\n )\n\n\n\nclass PostMdTest(PostTest):\n def shortDescription(self):\n return 'Create a post with filename based on post name slug and date and Markdown with metadata'\n\n def test(self):\n create_empty_config()\n os.mkdir('content')\n\n today = datetime.datetime(2012, 9, 16, 23, 12, 11)\n name = 'a post name'\n format = 'md'\n category = 'new category'\n tags = ['tag1', 'tag2']\n authors = [ '<NAME>', '<NAME>']\n summary = 'This is a summary'\n title = 'a title'\n\n post(today, name, format, title, category, authors, tags, summary)\n\n content_dir = os.path.join(self.cwd, 'content', category)\n article_path = os.path.join(content_dir, '2012-09-16-a-post-name.md')\n\n self.assertTrue(os.path.isdir(content_dir))\n self.assertTrue(os.path.isfile(article_path))\n\n with open(article_path, 'r') as f:\n content = f.read()\n\n # title\n self.assertTrue(\n 'Title: a title\\n' in content\n )\n\n self.assertTrue(\n 'Category: new category\\n' in content\n )\n\n self.assertTrue(\n 'Date: 2012-09-16 23:12\\n' in content\n )\n\n self.assertTrue(\n 'Tags: tag1, tag2\\n' in content\n )\n\n self.assertTrue(\n 'Authors: <NAME>, <NAME>\\n' in content\n )\n\n self.assertTrue(\n 'Summary: This is a summary\\n' in content\n )\nclass PostMdDefaultTest(PostTest):\n def shortDescription(self):\n return 'Create a post with filename based on post name slug and date and Markdown with default metadata'\n\n def test(self):\n create_empty_config()\n os.mkdir('content')\n\n today = datetime.datetime(2015, 12, 1, 3, 1, 21)\n name = 'Post name With defaults'\n format = 'md'\n category = 'a category'\n tags = None\n authors = None\n summary = None\n title = 'This is a title'\n\n post(today, name, format, title, category, authors, tags, summary)\n\n content_dir = os.path.join(self.cwd, 'content', category)\n article_path = os.path.join(content_dir, '2015-12-01-post-name-with-defaults.md')\n\n self.assertTrue(os.path.isdir(content_dir))\n self.assertTrue(os.path.isfile(article_path))\n\n with open(article_path, 'r') as f:\n content = f.read()\n\n # title\n self.assertTrue(\n 'Title: This is a title\\n' in content\n )\n\n self.assertTrue(\n 'Date: 2015-12-01 03:01\\n' in content\n )\n\n self.assertTrue(\n 'Category: a category' in content\n )\n\n self.assertTrue(\n 'Authors:' not in content\n )\n\n self.assertTrue(\n 'Summary:' not in content\n )\n\n\n", "id": "11038654", "language": "Python", "matching_score": 2.228478193283081, "max_stars_count": 0, "path": "tests/test_post.py" }, { "content": "from jinja2 import Environment\n\nimport datetime\nimport textwrap\n\nfrom pelican_do.utils.filters import *\nimport slugify\n\nimport os\nimport errno\n\nclass PostError(Exception):\n pass\n\ntemplates = {\n 'rst': textwrap.dedent('''\n {{title|rst_title}}\n :date: {{date|pelican_datetime}}\n {% if tags %}\n :tags: {{tags|join(', ')}}\n {% endif %}\n :category: {{category}}\n :slug: {{slug}}\n {% if authors %}\n :authors: {{authors|join(', ')}}\n {% endif %}\n {% if summary %}\n :summary: {{summary}}\n {% endif %}\n '''),\n\n 'md': textwrap.dedent('''\n Title: {{title}}\n Date: {{date|pelican_datetime}}\n Category: {{category}}\n {% if tags %}\n Tags: {{tags|join(', ')}}\n {% endif %}\n Slug: {{slug}}\n {% if authors %}\n Authors: {{authors|join(', ')}}\n {% endif %}\n {% if summary %}\n Summary: {{summary}}\n {% endif %}\n\n This is the content of my super blog post.\n ''')\n}\n\ndef post(today, name, format, title, category, authors, tags, summary):\n\n if not os.path.isfile('pelicanconf.py'):\n raise PostError('\"pelicanconf.py\" must exist in current directory')\n\n if not os.path.isdir('content'):\n raise PostError('\"content\" directory does not exist')\n\n title = title or name\n\n jinja_environment = Environment()\n jinja_environment.filters['rst_title'] = rst_title\n jinja_environment.filters['pelican_datetime'] = pelican_datetime\n\n template = jinja_environment.from_string(templates[format])\n#\n slug = slugify.slugify(title, to_lower=True)\n\n filename = '%s-%s.%s' % (today.strftime('%Y-%m-%d'), slugify.slugify(name, to_lower=True), format)\n\n article_path = os.path.join('content', category)\n\n try:\n os.makedirs(article_path)\n\n except OSError as exception:\n if exception.errno != errno.EEXIST:\n raise\n\n with open(os.path.join(article_path, filename), 'w') as f:\n f.write(template.render(\n title=title,\n date=today,\n tags=tags,\n slug=slug,\n category=category,\n summary=summary,\n authors=authors))\n\n", "id": "12439659", "language": "Python", "matching_score": 2.327277898788452, "max_stars_count": 0, "path": "pelican_do/post.py" }, { "content": "import pytest\n\nfrom pelican_do.utils.filters import rst_title, pelican_datetime\n\nimport re\nimport datetime\ndef test_rst_title():\n assert rst_title('This is a title') == 'This is a title\\n###############\\n'\n\ndef test_pelican_datetime():\n assert pelican_datetime(datetime.datetime(1982, 9, 1, 22, 11, 10)) == '1982-09-01 22:11'\n\n", "id": "3215921", "language": "Python", "matching_score": 2.000966787338257, "max_stars_count": 0, "path": "tests/test_filters.py" }, { "content": "\n# Custom filter method\ndef rst_title(s):\n return s + '\\n' + '#' * len(s) + '\\n'\n\ndef pelican_datetime(value):\n return value.strftime('%Y-%m-%d %H:%M')\n\n\n\n", "id": "9573193", "language": "Python", "matching_score": 0.10640890151262283, "max_stars_count": 0, "path": "pelican_do/utils/filters.py" }, { "content": "import click\nimport datetime\n\nimport pelican_do.post\n\n@click.group()\ndef main():\n pass\n\n@main.command()\n@click.argument('name')\n@click.option('--format', default='rst', type=click.Choice(['rst', 'md']), help='Format used to write the article.')\n@click.option('--title', type=str, help='Title for the article. By default, it will be the name.', default=None)\n@click.option('--category', type=str, help='category for the article.', required=True)\n@click.option('--tags', '-t', multiple=True, type=str, default=None, help='Tags for the article.')\n@click.option('--summary', type=str, help='Summary for the article.', default=None)\n@click.option('--authors', '-a', multiple=True, type=str, default=None, help='Authors for the article.')\ndef post(name, format, title, category, tags, summary, authors):\n today = datetime.datetime.now()\n pelican_do.post.post(today, name, format, title, category, authors, tags, summary)\n\nif __name__ == '__main__':\n main()\n", "id": "5265134", "language": "Python", "matching_score": 2.4336156845092773, "max_stars_count": 0, "path": "pelican_do/main.py" }, { "content": "import pytest\nimport click\nimport re\n\nfrom click.testing import CliRunner\nfrom pelican_do.main import main\n\nimport pelican_do.post\n\n\ndef test_post_command_help():\n runner = CliRunner()\n result = runner.invoke(main, ['post', '--help'])\n assert re.search('Usage: .* post \\[OPTIONS\\] NAME', result.output)\n\n\ndef test_post_command_help(monkeypatch):\n def mockreturn(today, name, format, title, category, authors, tags, summary):\n\n assert name == 'a post name'\n assert format == 'rst'\n assert title == 'some title'\n assert category == 'some category'\n assert authors == ('Mosca', 'Smith')\n assert tags == ('first tag', 'second tag')\n assert summary == 'a summary'\n\n return True\n\n monkeypatch.setattr(pelican_do.post, 'post', mockreturn)\n\n runner = CliRunner()\n\n result = runner.invoke(main, ['post', 'a post name', '--format', 'rst', '--title',\n 'some title', '--category', 'some category', '--authors',\n 'Mosca', '--authors', 'Smith', '--tags', 'first tag', '--tags', 'second tag',\n '--summary', 'a summary'])\n assert not result.exception\n\n print 'Output: %s' % result.output\n assert True\n # assert re.search('Usage: .* post \\[OPTIONS\\] NAME', result.output)\n\n\n# with runner.isolated_filesystem():\n# with open('hello.txt', 'w') as f:\n# f.write('Hello World!')\n\n# result = runner.invoke(main, ['post', '--help'])\n# Usage: pelican-do post [OPTIONS] NAME\n# assert result.exit_code == 0\n# assert result.output == 'Hello World!\\n'\n# #\n", "id": "5441329", "language": "Python", "matching_score": 1.8160291910171509, "max_stars_count": 0, "path": "tests/test_command_line.py" }, { "content": "try:\n from setuptools import setup, find_packages\nexcept ImportError:\n from distutils.core import setup, find_packages\n\n\nconfig = {\n 'name': 'pelican-do',\n 'version': '0.1.1-alpha3',\n 'description': 'Commands to automate common pelican tasks',\n 'long_description': open('README.rst').read(),\n 'license': 'MIT',\n 'author': '<NAME>',\n 'author_email': '<EMAIL>',\n 'url': 'https://github.com/gusajz/pelican-do',\n 'keywords': ['blog', 'pelican'],\n 'install_requires': [\n 'click==6.2',\n 'Jinja2==2.8',\n 'awesome-slugify==1.6.5',\n ],\n 'extras_require': {\n 'development': [\n ],\n },\n 'setup_requires': [\n 'pytest-runner',\n ],\n 'tests_require': [\n 'pytest>=2.6.4',\n 'pytest-cov==2.2.0'\n ],\n 'classifiers': [\n 'Development Status :: 1 - Planning',\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Information Technology',\n 'Intended Audience :: Other Audience',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: OS Independent',\n 'Topic :: Utilities',\n 'Programming Language :: Python :: 2.7',\n ],\n 'packages': find_packages(),\n 'scripts': [],\n 'entry_points': {\n 'console_scripts': ['pelican-do=pelican_do.main:main']\n }\n}\n\nsetup(**config)\n", "id": "539110", "language": "Python", "matching_score": 1.3681809902191162, "max_stars_count": 0, "path": "setup.py" } ]
1.620299
squiroga6
[ { "content": "# Example: Mandatory coin-flip example\n\nfrom IPython.core.pylabtools import figsize\nimport numpy as np\nfrom matplotlib import pyplot as plt\nfigsize(11, 9)\n\nimport scipy.stats as stats\n\ndist = stats.beta\nn_trials = [0, 1, 2, 3, 4, 5, 8, 15, 50, 500]\ndata = stats.bernoulli.rvs(0.5, size=n_trials[-1])\nx = np.linspace(0, 1, 100)\n\n# For the already prepared, I'm using Binomial's conj. prior.\nfor k, N in enumerate(n_trials):\n sx = plt.subplot(len(n_trials)/2, 2, k+1)\n # plt.xlabel(\"$p$, probability of heads\") if k in [0, len(n_trials)-1] else None\n # plt.setp(sx.get_yticklabels(), visible=False)\n heads = data[:N].sum()\n y = dist.pdf(x, 1 + heads, 1 + N - heads)\n plt.plot(x, y, label=\"observe %d tosses,\\n %d heads\" % (N, heads))\n # plt.fill_between(x, 0, y, color=\"#348ABD\", alpha=0.4)\n # plt.vlines(0.5, 0, 4, color=\"k\", linestyles=\"--\", lw=1)\n\n leg = plt.legend()\n leg.get_frame().set_alpha(0.4)\n plt.autoscale(tight=True)\n\n\nplt.suptitle(\"Bayesian updating of posterior probabilities\",\n y=1.02,\n fontsize=14)\n\n# Example: Bug, or just sweet, unintended feature?\nimport numpy as np\nfigsize(12.5, 4)\np = np.linspace(0, 1, 50)\nplt.plot(p, 2*p/(1+p), color=\"#348ABD\", lw=3)\n#plt.fill_between(p, 2*p/(1+p), alpha=.5, facecolor=[\"#A60628\"])\nplt.scatter(0.2, 2*(0.2)/1.2, s=140, c=\"#348ABD\")\nplt.xlim(0, 1)\nplt.ylim(0, 1)\nplt.xlabel(\"Prior, $P(A) = p$\")\nplt.ylabel(\"Posterior, $P(A|X)$, with $P(A) = p$\")\nplt.title(\"Are there bugs in my code?\");\n\n# Continuous Case\nimport scipy.stats as stats\nimport numpy as np\na = np.linspace(0, 4, 100)\nexpo = stats.expon\nlambda_ = [0.5, 1, 2]\n\nfor l, c in zip(lambda_, colours):\n plt.plot(a, expo.pdf(a, scale=1./l), lw=3,\n color=c, label=\"$\\lambda = %.1f$\" % l)\n plt.fill_between(a, expo.pdf(a, scale=1./l), color=c, alpha=.33)\n", "id": "11003350", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "Chapter1_Introduction/notes.py" }, { "content": "import sys\n\nimport numpy as np\nfrom IPython.core.display import Image\n\nimport praw\nimport json\n\nwith open(\"../config.json\", \"r\") as read_file:\n data = json.load(read_file)\n\n# reddit = praw.Reddit(\"BayesianMethodsForHackers\")\n\nreddit = praw.Reddit(client_id=data['user_id'],\n client_secret=data['secret'],\n user_agent=\"Bayes methods for hackers course\",\n username=data['username'],\n password=data['password'])\n\nsubreddit = reddit.subreddit(\"showerthoughts\")\n\ntop_submissions = subreddit.top(limit=100)\n\nn_sub = int( sys.argv[1] ) if sys.argv[1] else 1\n\ni = 0\nwhile i < n_sub:\n top_submission = next(top_submissions)\n i+=1\n\ntop_post = top_submission.title\n\nupvotes = []\ndownvotes = []\ncontents = []\n\n# for sub in top_submissions:\n# try:\n# ratio = reddit.get_submission(sub.permalink).upvote_ratio\n# ups = int(round((ratio*sub.score)/(2*ratio - 1)) if ratio != 0.5 else round(sub.score/2))\n# upvotes.append(ups)\n# downvotes.append(ups - sub.score)\n# contents.append(sub.title)\n# except Exception as e:\n# continue\n\nfor sub in top_submissions:\n try:\n ratio = sub.upvote_ratio\n ups = int(round((ratio*sub.score)/(2*ratio - 1)) if ratio != 0.5 else round(sub.score/2))\n upvotes.append(sub.ups)\n downvotes.append(ups - sub.score)\n contents.append(sub.title)\n except Exception as e:\n raise e\n \nvotes = np.array( [ upvotes, downvotes] ).T", "id": "11897625", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "Chapter4_TheGreatestTheoremNeverTold/top_showerthoughts_submissions.py" } ]
0
daniilov
[ { "content": "from model.group import Group\n\n\ndef test_add_group(app):\n app.group.create(Group(name=\"create group\", header=\"create logo\", footer=\"create comment\"))\n\n\ndef test_add_empty_group(app):\n app.group.create(Group(name=\"\", header=\"\", footer=\"\"))\n", "id": "11248600", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "test_add_group.py" } ]
0
Apollo3zehn
[ { "content": "import requests\r\nfrom signalrcore_async.hub_connection_builder import HubConnectionBuilder\r\n\r\n\r\ndef input_with_default(input_text, default_value):\r\n value = input(input_text.format(default_value))\r\n return default_value if value is None or value.strip() == \"\" else value\r\n\r\n\r\ndef signalr_core_example_login(url, user, username_password):\r\n response = requests.post(url, data={\"email\": user, \"password\": username_password}, verify=False)\r\n return response.json()[\"token\"]\r\n\r\n\r\nlogin_url = input_with_default('Enter your server login url({0}):', \"https://localhost:50746/account/token\")\r\nserver_url = input_with_default('Enter your server url(default: {0}): ', \"wss://localhost:50746/hubs/chat\")\r\nusername = input_with_default('Enter your username (default: {0}): ', \"<EMAIL>\")\r\npassword = input_with_default('Enter your password (default: {0}): ', \"<PASSWORD>?\")\r\n\r\nhub_connection = HubConnectionBuilder()\\\r\n .with_url(server_url, options={\r\n \"access_token_factory\": lambda: signalr_core_example_login(login_url, username, password),\r\n \"verify_ssl\": False\r\n }).with_automatic_reconnect({\r\n \"type\": \"interval\",\r\n \"keep_alive_interval\": 10,\r\n \"intervals\": [1, 3, 5, 6, 7, 87, 3]\r\n })\\\r\n .build()\r\n\r\nhub_connection.on_open(lambda: print(\"connection opened and handshake received ready to send messages\"))\r\nhub_connection.on_close(lambda: print(\"connection closed\"))\r\n\r\nhub_connection.on(\"ReceiveSystemMessage\", print)\r\nhub_connection.on(\"ReceiveChatMessage\", print)\r\nhub_connection.on(\"ReceiveDirectMessage\", print)\r\n\r\nhub_connection.start()\r\nmessage = None\r\nwhile message != \"exit()\":\r\n message = input(\">> \")\r\n if message is not None and message is not \"\" and message is not \"exit()\":\r\n hub_connection.send(\"Send\", [message])\r\nhub_connection.stop()\r\n", "id": "11862403", "language": "Python", "matching_score": 4.982269763946533, "max_stars_count": 2, "path": "test/examples/chat_auth.py" }, { "content": "import logging\nimport sys\nfrom signalrcore_async.hub_connection_builder import HubConnectionBuilder\n\n\ndef input_with_default(input_text, default_value):\n value = input(input_text.format(default_value))\n return default_value if value is None or value.strip() == \"\" else value\n\n\nserver_url = input_with_default('Enter your server url(default: {0}): ', \"wss://localhost:44376/chatHub\")\nusername = input_with_default('Enter your username (default: {0}): ', \"mandrewcito\")\nhandler = logging.StreamHandler()\nhandler.setLevel(logging.DEBUG)\nhub_connection = HubConnectionBuilder()\\\n .with_url(server_url, options={\"verify_ssl\": False}) \\\n .configure_logging(logging.DEBUG, socket_trace=True, handler=handler) \\\n .with_automatic_reconnect({\n \"type\": \"interval\",\n \"keep_alive_interval\": 10,\n \"intervals\": [1, 3, 5, 6, 7, 87, 3]\n }).build()\n\nhub_connection.on_open(lambda: print(\"connection opened and handshake received ready to send messages\"))\nhub_connection.on_close(lambda: print(\"connection closed\"))\n\nhub_connection.on(\"ReceiveMessage\", print)\nhub_connection.start()\nmessage = None\n\n# Do login\n\nwhile message != \"exit()\":\n message = input(\">> \")\n if message is not None and message is not \"\" and message is not \"exit()\":\n hub_connection.send(\"SendMessage\", [username, message])\n\nhub_connection.stop()\n\nsys.exit(0)", "id": "2980894", "language": "Python", "matching_score": 4.310746669769287, "max_stars_count": 2, "path": "test/examples/chat.py" }, { "content": "import os\nimport unittest\nimport logging\nimport time\nimport uuid\nimport requests\nfrom subprocess import Popen, PIPE\nfrom signalrcore_async.hub_connection_builder import HubConnectionBuilder\n\n\nclass TestSendAuthMethod(unittest.TestCase):\n container_id = \"netcore_chat_app\"\n connection = None\n server_url = \"wss://localhost:5001/authHub\"\n login_url = \"https://localhost:5001/users/authenticate\"\n email = \"test\"\n password = \"<PASSWORD>\"\n received = False\n connected = False\n message = None\n\n def login(self):\n response = requests.post(\n self.login_url,\n json={\n \"username\": self.email,\n \"password\": self.password\n },verify=False)\n return response.json()[\"token\"]\n\n def setUp(self):\n self.connection = HubConnectionBuilder()\\\n .with_url(self.server_url,\n options={\n \"verify_ssl\": False,\n \"access_token_factory\": self.login,\n \"headers\": {\n \"mycustomheader\": \"mycustomheadervalue\"\n }\n })\\\n .configure_logging(logging.DEBUG)\\\n .with_automatic_reconnect({\n \"type\": \"raw\",\n \"keep_alive_interval\": 10,\n \"reconnect_interval\": 5,\n \"max_attempts\": 5\n }).build()\n self.connection.on(\"ReceiveMessage\", self.receive_message)\n self.connection.on_open(self.on_open)\n self.connection.on_close(self.on_close)\n self.connection.start()\n while not self.connected:\n time.sleep(0.1)\n\n def tearDown(self):\n self.connection.stop()\n\n def on_open(self):\n self.connected = True\n\n def on_close(self):\n self.connected = False\n\n def receive_message(self, args):\n self.assertEqual(args[0], self.message)\n self.received = True\n\n def test_send(self):\n self.message = \"new message {0}\".format(uuid.uuid4())\n self.username = \"mandrewcito\"\n time.sleep(1)\n self.received = False\n self.connection.send(\"SendMessage\", [self.message])\n while not self.received:\n time.sleep(0.1)\n \n\n", "id": "6517512", "language": "Python", "matching_score": 6.129029750823975, "max_stars_count": 2, "path": "test/send_auth_test.py" }, { "content": "import os\nimport unittest\nimport logging\nimport time\nimport uuid\n\nfrom subprocess import Popen, PIPE\nfrom signalrcore_async.hub_connection_builder import HubConnectionBuilder\nfrom signalrcore_async.subject import Subject\nclass TestSendMethod(unittest.TestCase):\n container_id = \"netcore_stream_app\"\n connection = None\n server_url = \"wss://localhost:5001/chatHub\"\n received = False\n connected = False\n items = list(range(0,10))\n\n def setUp(self):\n self.connection = HubConnectionBuilder()\\\n .with_url(self.server_url, options={\"verify_ssl\":False})\\\n .configure_logging(logging.DEBUG)\\\n .with_automatic_reconnect({\n \"type\": \"raw\",\n \"keep_alive_interval\": 10,\n \"reconnect_interval\": 5,\n \"max_attempts\": 5\n })\\\n .build()\n self.connection.on_open(self.on_open)\n self.connection.on_close(self.on_close)\n self.connection.start()\n while not self.connected:\n time.sleep(0.1)\n\n def tearDown(self):\n self.connection.stop()\n\n def on_open(self):\n print(\"opene\")\n self.connected = True\n\n def on_close(self):\n self.connected = False\n\n def on_complete(self, x):\n self.complete = True\n \n def on_error(self, x):\n pass\n\n def test_stream(self):\n self.complete = False\n self.items = list(range(0,10))\n subject = Subject()\n self.connection.send(\"UploadStream\", subject)\n while(len(self.items) > 0):\n subject.next(str(self.items.pop()))\n subject.complete()\n self.assertTrue(len(self.items) == 0)\n", "id": "6181030", "language": "Python", "matching_score": 1.6806812286376953, "max_stars_count": 2, "path": "test/client_streaming_test.py" }, { "content": "import asyncio\nimport ssl\nimport threading\nimport time\nimport uuid\n\nimport requests\nimport websockets\nfrom signalrcore_async.helpers import Helpers\nfrom signalrcore_async.messages import (InvocationMessage, MessageType,\n PingMessage, StreamInvocationMessage)\n\nfrom ..protocol.json import JsonHubProtocol\nfrom .connection_state import ConnectionState\nfrom .errors import HubError, UnAuthorizedHubError\nfrom .reconnection import ConnectionStateChecker\n\n\nclass StreamHandler(object):\n def __init__(self, event, invocation_id):\n self.event = event\n self.invocation_id = invocation_id\n self.next_callback = None\n\n def subscribe(self, subscribe_callbacks):\n if subscribe_callbacks is None:\n raise ValueError(\" subscribe object must be {0}\".format({\n \"next\": None\n }))\n self.next_callback = subscribe_callbacks[\"next\"]\n\nclass async_event(asyncio.Event):\n def set(self):\n self._loop.call_soon_threadsafe(super().set)\n\nclass WebSocketsConnection(object):\n\n last_result = None\n last_invocation_id = None\n last_error = None\n event = None\n loop = None\n\n def __init__(self, hubConnection):\n self._hub_connection = hubConnection\n\n async def run(self):\n url = self._hub_connection.url\n headers = self._hub_connection.headers\n max_size = 1_000_000_000\n \n # connect\n self._ws = await websockets.connect(url, max_size=max_size, extra_headers=headers)\n self._hub_connection.logger.debug(\"-- web socket open --\")\n\n # handshake\n msg = self._hub_connection.protocol.handshake_message()\n self._hub_connection._internal_send(msg, self._hub_connection.handshake_protocol)\n response = await self._ws.recv()\n self._hub_connection.evaluate_handshake(response)\n\n if self._hub_connection.on_connect is not None and callable(self._hub_connection.on_connect):\n self._hub_connection.state = ConnectionState.connected\n self._hub_connection.on_connect()\n\n # message loop\n self.loop = asyncio.create_task(self._receive_messages())\n \n async def invoke(self, data, invocationId):\n\n self.event = async_event()\n self.last_invocation_id = invocationId\n \n await self._ws.send(data)\n await self.event.wait()\n\n if (self.last_error is not None):\n raise Exception(self.last_error)\n else:\n return self.last_result\n\n def send(self, data):\n asyncio.create_task(self._ws.send(data))\n\n def handle_completion(self, message):\n if message.invocation_id == self.last_invocation_id:\n if message.error is not None:\n self.last_result = None\n self.last_error = message.error\n self.event.set() \n else:\n self.last_result = message.result\n self.last_error = None\n self.event.set()\n\n self.last_invocation_id = -1\n\n async def close(self):\n self._hub_connection.logger.debug(\"-- web socket close --\")\n\n if self._hub_connection.on_disconnect is not None and callable(self._hub_connection.on_disconnect):\n self._hub_connection.on_disconnect()\n\n if (self._ws is not None):\n await self._ws.close()\n\n self.last_error = \"The connection was closed unexpectedly.\"\n\n if (self.event is not None):\n self.event.set()\n\n if (self.loop is not None):\n self.loop.cancel()\n\n async def _receive_messages(self):\n while (True):\n raw_message = await self._ws.recv()\n self._hub_connection.on_message(raw_message)\n\nclass BaseHubConnection(object):\n def __init__(\n self,\n url,\n protocol,\n headers={},\n keep_alive_interval=15,\n reconnection_handler=None,\n verify_ssl=False,\n skip_negotiation=False):\n self.skip_negotiation = skip_negotiation\n self.logger = Helpers.get_logger()\n self.url = url\n self.protocol = protocol\n self.handshake_protocol = JsonHubProtocol()\n self.headers = headers\n self.handshake_received = False\n self.token = None # auth\n self.state = ConnectionState.disconnected\n self.connection_alive = False\n self.handlers = []\n self.stream_handlers = []\n self._thread = None\n self._ws = None\n self.verify_ssl = verify_ssl\n self.connection_checker = ConnectionStateChecker(\n lambda: self._internal_send(PingMessage()),\n keep_alive_interval\n )\n self.reconnection_handler = reconnection_handler\n self.on_connect = None\n self.on_disconnect = None\n\n def negotiate(self):\n negotiate_url = Helpers.get_negotiate_url(self.url)\n self.logger.debug(\"Negotiate url:{0}\".format(negotiate_url))\n\n response = requests.post(negotiate_url, headers=self.headers, verify=self.verify_ssl)\n self.logger.debug(\"Response status code{0}\".format(response.status_code))\n\n if response.status_code != 200:\n raise HubError(response.status_code) if response.status_code != 401 else UnAuthorizedHubError()\n data = response.json()\n if \"connectionId\" in data.keys():\n self.url = Helpers.encode_connection_id(self.url, data[\"connectionId\"])\n\n # Azure\n if 'url' in data.keys() and 'accessToken' in data.keys():\n Helpers.get_logger().debug(\"Azure url, reformat headers, token and url {0}\".format(data))\n self.url = data[\"url\"] if data[\"url\"].startswith(\"ws\") else Helpers.http_to_websocket(data[\"url\"])\n self.token = data[\"accessToken\"]\n self.headers = {\"Authorization\": \"Bearer \" + self.token}\n\n async def start(self):\n if not self.skip_negotiation:\n self.negotiate()\n self.logger.debug(\"Connection started\")\n if self.state == ConnectionState.connected:\n self.logger.warning(\"Already connected unable to start\")\n return\n self.state = ConnectionState.connecting\n self.logger.debug(\"start url:\" + self.url)\n\n self._ws = WebSocketsConnection(self)\n await self._ws.run()\n\n async def stop(self):\n self.logger.debug(\"Connection stop\")\n if self.state == ConnectionState.connected:\n await self._ws.close()\n self.connection_checker.stop()\n self.state == ConnectionState.disconnected\n\n def register_handler(self, event, callback):\n self.logger.debug(\"Handler registered started {0}\".format(event))\n self.handlers.append((event, callback))\n\n def evaluate_handshake(self, message):\n self.logger.debug(\"Evaluating handshake {0}\".format(message))\n msg = self.handshake_protocol.decode_handshake(message)\n if msg.error is None or msg.error == \"\":\n self.handshake_received = True\n self.state = ConnectionState.connected\n if self.reconnection_handler is not None:\n self.reconnection_handler.reconnecting = False\n if not self.connection_checker.running:\n self.connection_checker.start()\n else:\n self.logger.error(msg.error)\n raise ValueError(\"Handshake error {0}\".format(msg.error))\n\n def on_message(self, raw_message):\n\n # self.logger.debug(\"Message received{0}\".format(raw_message))\n self.connection_checker.last_message = time.time()\n messages = self.protocol.parse_messages(raw_message)\n\n for message in messages:\n if message.type == MessageType.invocation_binding_failure:\n self.logger.error(message)\n continue\n if message.type == MessageType.ping:\n continue\n\n if message.type == MessageType.invocation:\n fired_handlers = list(\n filter(\n lambda h: h[0] == message.target,\n self.handlers))\n if len(fired_handlers) == 0:\n self.logger.warning(\n \"event '{0}' hasn't fire any handler\".format(\n message.target))\n for _, handler in fired_handlers:\n handler(message.arguments)\n\n if message.type == MessageType.close:\n self.logger.info(\"Close message received from server\")\n asyncio.create_task(self.stop())\n return\n\n if message.type == MessageType.completion:\n self._ws.handle_completion(message)\n\n if message.type == MessageType.stream_item:\n fired_handlers = list(\n filter(\n lambda h: h.invocation_id == message.invocation_id,\n self.stream_handlers))\n if len(fired_handlers) == 0:\n self.logger.warning(\n \"id '{0}' hasn't fire any stream handler\".format(\n message.invocation_id))\n for handler in fired_handlers:\n handler.next_callback(message.item)\n\n if message.type == MessageType.stream_invocation:\n pass\n\n if message.type == MessageType.cancel_invocation:\n pass # not implemented\n\n async def invoke(self, method, arguments):\n if type(arguments) is not list:\n raise HubConnectionError(\"Arguments of a message must be a list\")\n\n if type(arguments) is list:\n invocation_id = str(uuid.uuid4())\n message = InvocationMessage({}, invocation_id, method, arguments)\n return await self._internal_invoke(message)\n\n\n async def _internal_invoke(self, message, protocol=None):\n\n self.logger.debug(\"Sending message.\".format(message))\n\n try:\n protocol = self.protocol if protocol is None else protocol\n invocation_id = message.invocation_id\n result = await self._ws.invoke(protocol.encode(message), invocation_id)\n\n self.connection_checker.last_message = time.time()\n\n if self.reconnection_handler is not None:\n self.reconnection_handler.reset()\n\n return result\n \n except Exception as ex:\n raise ex\n\n def send(self, method, arguments):\n if type(arguments) is not list and type(arguments) is not Subject:\n raise HubConnectionError(\"Arguments of a message must be a list or subject\")\n\n if type(arguments) is list:\n self._internal_send(InvocationMessage(\n {},\n 0,\n method,\n arguments))\n\n if type(arguments) is Subject:\n arguments.connection = self\n arguments.target = method\n arguments.start()\n\n def _internal_send(self, message, protocol=None):\n\n self.logger.debug(\"Sending message {0}\".format(message))\n\n try:\n protocol = self.protocol if protocol is None else protocol\n\n self._ws.send(protocol.encode(message))\n self.connection_checker.last_message = time.time()\n\n if self.reconnection_handler is not None:\n self.reconnection_handler.reset()\n\n except Exception as ex:\n raise ex\n\n def handle_reconnect(self):\n self.reconnection_handler.reconnecting = True\n try:\n self.stop()\n self.start()\n except Exception as ex:\n self.logger.error(ex)\n sleep_time = self.reconnection_handler.next()\n threading.Thread(\n target=self.deferred_reconnect,\n args=(sleep_time,)\n )\n\n def deferred_reconnect(self, sleep_time):\n time.sleep(sleep_time)\n try:\n if not self.connection_alive:\n self._send_ping()\n except Exception as ex:\n self.reconnection_handler.reconnecting = False\n self.connection_alive = False\n\n async def stream(self, event, event_params, on_next_item):\n invocation_id = str(uuid.uuid4())\n stream_obj = StreamHandler(event, invocation_id)\n stream_obj.subscribe({ \"next\": on_next_item })\n self.stream_handlers.append(stream_obj)\n await self._internal_invoke(\n StreamInvocationMessage(\n {},\n invocation_id,\n event,\n event_params))\n\n def on_close(self, callback):\n self.on_disconnect = callback\n\n def on_open(self, callback):\n self.on_connect = callback\n\n def on(self, event, callback_function):\n \"\"\"\n Register a callback on the specified event\n :param event: Event name\n :param callback_function: callback function, arguments will be binded\n :return:\n \"\"\"\n self.register_handler(event, callback_function)\n", "id": "11191153", "language": "Python", "matching_score": 4.788690090179443, "max_stars_count": 2, "path": "signalrcore_async/hub/base_hub_connection.py" }, { "content": "from .base_hub_connection import BaseHubConnection\nfrom ..helpers import Helpers\n\n\nclass AuthHubConnection(BaseHubConnection):\n def __init__(self, url, protocol, auth_function, keep_alive_interval=15, reconnection_handler=None,\n headers={}, verify_ssl=False, skip_negotiation=False):\n self.headers = headers\n self.auth_function = auth_function\n super(AuthHubConnection, self).__init__(\n url,\n protocol,\n headers=headers,\n keep_alive_interval=keep_alive_interval,\n reconnection_handler=reconnection_handler,\n verify_ssl=verify_ssl,\n skip_negotiation=skip_negotiation)\n\n async def start(self):\n try:\n Helpers.get_logger().debug(\"Starting connection ...\")\n self.token = self.auth_function()\n Helpers.get_logger().debug(\"auth function result {0}\".format(self.token))\n self.headers[\"Authorization\"] = \"Bearer \" + self.token\n await super(AuthHubConnection, self).start()\n except Exception as ex:\n Helpers.get_logger().error(self.__class__.__name__)\n Helpers.get_logger().error(str(ex))\n raise ex\n\n", "id": "8563024", "language": "Python", "matching_score": 0.29112377762794495, "max_stars_count": 2, "path": "signalrcore_async/hub/auth_hub_connection.py" }, { "content": "import json\n\nfrom ..messages import *\n\n\nclass BaseHubProtocol(object):\n def __init__(self, protocol, version, transfer_format, record_separator):\n self.protocol = protocol\n self.version = version\n self.transfer_format = transfer_format\n self.record_separator = record_separator\n\n @staticmethod\n def get_message(dict_message):\n message_type = MessageType(dict_message[\"type\"])\n if message_type is MessageType.invocation:\n return InvocationMessage(\n dict_message[\"headers\"]\n if \"headers\" in dict_message.keys() else {},\n dict_message[\"invocationId\"]\n if \"invocationId\" in dict_message.keys() else None,\n dict_message[\"target\"],\n dict_message[\"arguments\"])\n if message_type is MessageType.stream_item:\n return StreamItemMessage(\n dict_message[\"headers\"]\n if \"headers\" in dict_message.keys() else {},\n dict_message[\"invocationId\"]\n if \"invocationId\" in dict_message.keys() else None,\n dict_message[\"item\"]\n )\n if message_type is MessageType.completion:\n return CompletionMessage(\n dict_message[\"headers\"]\n if \"headers\" in dict_message.keys() else {},\n dict_message[\"invocationId\"],\n dict_message[\"result\"]\n if \"result\" in dict_message.keys() else None,\n dict_message[\"error\"]\n if \"error\" in dict_message.keys() else None\n )\n if message_type is MessageType.stream_invocation:\n return StreamInvocationMessage(\n dict_message[\"headers\"]\n if \"headers\" in dict_message.keys() else {},\n dict_message[\"invocationId\"],\n dict_message[\"target\"],\n dict_message[\"arguments\"])\n if message_type is MessageType.cancel_invocation:\n return CancelInvocationMessage(\n dict_message[\"headers\"]\n if \"headers\" in dict_message.keys() else {},\n dict_message[\"invocationId\"])\n if message_type is MessageType.ping:\n return PingMessage()\n if message_type is MessageType.close:\n return CloseMessage(\n dict_message[\"headers\"]\n if \"headers\" in dict_message.keys() else {},\n dict_message[\"error\"]\n if \"error\" in dict_message.keys() else None\n )\n\n def decode_handshake(self, raw_message):\n return HandshakeResponseMessage(None)\n # messages = raw_message.split(self.record_separator)\n # data = json.loads(messages[0])\n # return HandshakeResponseMessage(\n # data[\"error\"] if \"error\" in data.keys() else None)\n\n def handshake_message(self):\n return HandshakeRequestMessage(self.protocol, self.version)\n\n def parse_messages(self, raw_message):\n raise ValueError(\"Protocol must implement this method\")\n\n def write_message(self, hub_message):\n raise ValueError(\"Protocol must implement this method\")\n", "id": "4194808", "language": "Python", "matching_score": 3.475663661956787, "max_stars_count": 2, "path": "signalrcore_async/protocol/base_hub_protocol.py" }, { "content": "from datetime import date, datetime, timezone\nfrom enum import Enum\n\nimport msgpack\nfrom msgpack.ext import Timestamp\n\nfrom ..messages import *\nfrom .base_hub_protocol import BaseHubProtocol\n\n\nclass MessagePackHubProtocol(BaseHubProtocol):\n\n _priority = [\n \"type\",\n \"headers\",\n \"invocation_id\",\n \"target\",\n \"arguments\",\n \"item\",\n \"result_kind\",\n \"result\",\n \"stream_ids\"\n ]\n\n def __init__(self):\n super(MessagePackHubProtocol, self).__init__(\n \"messagepack\", 1, \"Text\", chr(0x1E))\n\n def parse_messages(self, raw):\n offset = 0\n messages = []\n total_length = len(raw)\n\n while True:\n (length, length_size) = self._from_varint(raw, offset)\n offset += length_size\n sliced_data = raw[offset:offset + length]\n offset += length\n\n message = self._decode_message(msgpack.unpackb(sliced_data))\n messages.append(message)\n\n if (offset >= total_length):\n break\n\n return messages\n\n def encode(self, message):\n encoded_message = msgpack.packb(message, default=self._encode_message)\n varint_length = self._to_varint(len(encoded_message))\n return varint_length + encoded_message\n\n def _encode_message(self, message):\n\n # prepare arguments\n if hasattr(message, \"arguments\"):\n\n for i, argument in enumerate(message.arguments):\n\n # date\n if type(argument) is date:\n argument = datetime(argument.year, argument.month, argument.day, tzinfo=timezone.utc)\n\n # date/time\n if isinstance(argument, datetime):\n date_time = argument\n timestamp = date_time.timestamp()\n seconds = int(timestamp)\n nanoseconds = int((timestamp - int(timestamp)) * 1e9)\n message.arguments[i] = Timestamp(seconds, nanoseconds)\n\n # message type\n elif isinstance(argument, Enum):\n message.arguments[i] = argument.name\n\n result = []\n\n # sort attributes\n for attribute in self._priority:\n if hasattr(message, attribute):\n if (attribute == \"type\"):\n result.append(getattr(message, attribute).value) \n else:\n result.append(getattr(message, attribute))\n\n return result\n\n def _decode_message(self, raw):\n\n # [1, Headers, InvocationId, Target, [Arguments], [StreamIds]]\n # [2, Headers, InvocationId, Item]\n # [3, Headers, InvocationId, ResultKind, Result]\n # [4, Headers, InvocationId, Target, [Arguments], [StreamIds]]\n # [5, Headers, InvocationId]\n # [6]\n # [7, Error, AllowReconnect?]\n\n if raw[0] == 1: # InvocationMessage\n if len(raw[5]) > 0:\n return InvocationClientStreamMessage(headers=raw[1], stream_ids=raw[5], target=raw[3], arguments=raw[4])\n else:\n return InvocationMessage(headers=raw[1], invocation_id=raw[2], target=raw[3], arguments=raw[4])\n\n elif raw[0] == 2: # StreamItemMessage\n return StreamItemMessage(headers=raw[1], invocation_id=raw[2], item=raw[3])\n\n elif raw[0] == 3: # CompletionMessage\n result_kind = raw[3]\n\n if result_kind == 1:\n return CompletionMessage(headers=raw[1], invocation_id=raw[2], result=None, error=raw[4])\n\n elif result_kind == 2:\n return CompletionMessage(headers=raw[1], invocation_id=raw[2], result=None, error=None)\n\n elif result_kind == 3:\n return CompletionMessage(headers=raw[1], invocation_id=raw[2], result=raw[4], error=None)\n\n else: \n raise Exception(\"Unknown result kind.\")\n\n elif raw[0] == 4: # StreamInvocationMessage\n return StreamInvocationMessage(headers=raw[1], invocation_id=raw[2], target=raw[3], arguments=raw[4]) # stream_id missing?\n\n elif raw[0] == 5: # CancelInvocationMessage\n return CancelInvocationMessage(headers=raw[1], invocation_id=raw[2])\n\n elif raw[0] == 6: # PingMessageEncoding\n return PingMessage()\n\n elif raw[0] == 7: # CloseMessageEncoding\n return CloseMessage(error=raw[1]) # AllowReconnect is missing\n\n raise Exception(\"Unknown message type.\")\n\n def _from_varint(self, buffer, offset):\n shift = 0\n value = 0\n i = offset\n\n while True:\n byte = buffer[i]\n value |= (byte & 0x7f) << shift\n shift += 7\n\n if not (byte & 0x80):\n break\n\n i += 1\n\n return (value, i + 1)\n\n def _to_varint(self, value):\n buffer = b''\n\n while True:\n\n byte = value & 0x7f\n value >>= 7\n\n if value:\n buffer += bytes((byte | 0x80, ))\n else:\n buffer += bytes((byte, ))\n break\n\n return buffer\n", "id": "422675", "language": "Python", "matching_score": 4.3668036460876465, "max_stars_count": 2, "path": "signalrcore_async/protocol/msgpack.py" }, { "content": "from enum import Enum\n\n\nclass MessageType(Enum):\n invocation = 1\n stream_item = 2\n completion = 3\n stream_invocation = 4\n cancel_invocation = 5\n ping = 6\n close = 7\n invocation_binding_failure = -1\n\nclass BaseMessage(object):\n def __init__(self, message_type):\n self.type = MessageType(message_type)\n\nclass PingMessage(BaseMessage):\n def __init__(\n self):\n super(PingMessage, self).__init__(6)\n\nclass CloseMessage(BaseMessage):\n def __init__(\n self,\n error):\n super(CloseMessage, self).__init__(7)\n self.error = error\n\nclass BaseHeadersMessage(BaseMessage):\n def __init__(self, message_type, headers):\n super(BaseHeadersMessage, self).__init__(message_type)\n self.headers = headers\n\nclass HandshakeRequestMessage(object):\n\n def __init__(self, protocol, version):\n self.protocol = protocol\n self.version = version\n\nclass HandshakeResponseMessage(object):\n\n def __init__(self, error):\n self.error = error\n\nclass CancelInvocationMessage(BaseHeadersMessage):\n def __init__(\n self,\n headers,\n invocation_id):\n super(CancelInvocationMessage, self).__init__(5, headers)\n self.invocation_id = invocation_id\n\nclass CompletionClientStreamMessage(BaseHeadersMessage):\n def __init__(\n self,\n headers,\n invocation_id):\n super(CompletionClientStreamMessage, self).__init__(3, headers)\n self.invocation_id = invocation_id\n\n\nclass CompletionMessage(BaseHeadersMessage):\n def __init__(\n self,\n headers,\n invocation_id,\n result,\n error):\n super(CompletionMessage, self).__init__(3, headers)\n self.invocation_id = invocation_id\n self.result = result\n self.error = error\n\nclass InvocationMessage(BaseHeadersMessage):\n def __init__(\n self,\n headers,\n invocation_id,\n target,\n arguments):\n super(InvocationMessage, self).__init__(1, headers)\n self.invocation_id = invocation_id\n self.target = target\n self.arguments = arguments\n\n def __repr__(self):\n if (self.invocation_id == 0):\n return \"InvocationMessage: target {1}, arguments {2}\".format(self.invocation_id, self.target, self.arguments)\n else:\n return \"InvocationMessage: invocation_id {0}, target {1}, arguments {2}\".format(self.invocation_id, self.target, self.arguments)\n\nclass InvocationClientStreamMessage(BaseHeadersMessage):\n def __init__(\n self,\n headers,\n stream_ids,\n target,\n arguments):\n super(InvocationClientStreamMessage, self).__init__(1, headers)\n self.target = target\n self.arguments = arguments\n self.stream_ids = stream_ids\n\n def __repr__(self):\n return \"InvocationMessage: stream_ids {0}, target {1}, arguments {2}\".format(\n self.stream_ids, self.target, self.arguments)\n\nclass StreamInvocationMessage(BaseHeadersMessage):\n def __init__(\n self,\n headers,\n invocation_id,\n target,\n arguments):\n super(StreamInvocationMessage, self).__init__(4, headers)\n self.invocation_id = invocation_id\n self.target = target\n self.arguments = arguments\n self.stream_ids = []\n\nclass StreamItemMessage(BaseHeadersMessage):\n def __init__(\n self,\n headers,\n invocation_id,\n item):\n super(StreamItemMessage, self).__init__(2, headers)\n self.invocation_id = invocation_id\n self.item = item\n", "id": "4365725", "language": "Python", "matching_score": 0.39191877841949463, "max_stars_count": 2, "path": "signalrcore_async/messages.py" }, { "content": "import setuptools\n\nwith open(\"README.md\", \"r\") as fh:\n long_description = fh.read()\n\nsetuptools.setup(\n name=\"signalrcore-async\",\n version=\"0.5.4\",\n author=\"Apollo3zehn\",\n description=\"Asynchronous fork of signalrcore with MessagePack support: A Python SignalR Core client, with invocation auth and two way streaming. Compatible with azure / serverless functions. Also with automatic reconnect and manual reconnect.\",\n keywords=\"signalr core client 3.1\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/apollo3zehn/signalrcore\",\n packages=setuptools.find_packages(),\n classifiers=[\n \"Programming Language :: Python :: 3\"\n ],\n install_requires=[\n \"msgpack>=1.0.0\",\n \"requests>=2.21.0\",\n \"websockets>=8.1\"\n ]\n)\n", "id": "9421027", "language": "Python", "matching_score": 0, "max_stars_count": 2, "path": "setup.py" }, { "content": "import sys\nif sys.version_info.major is 2:\n from aenum import Enum\nelse:\n from enum import Enum\n\n\nclass ConnectionState(Enum):\n connecting = 0\n connected = 1\n reconnecting = 2\n disconnected = 4\n", "id": "8995844", "language": "Python", "matching_score": 0, "max_stars_count": 2, "path": "signalrcore_async/hub/connection_state.py" }, { "content": "class HubError(OSError):\n pass\n\n\nclass UnAuthorizedHubError(HubError):\n pass\n", "id": "2841501", "language": "Python", "matching_score": 0, "max_stars_count": 2, "path": "signalrcore_async/hub/errors.py" } ]
2.578172
tkluck
[ { "content": "from __future__ import absolute_import\nfrom .accumulation_tree import AccumulationTree\n", "id": "6053825", "language": "Python", "matching_score": 0, "max_stars_count": 10, "path": "accumulation_tree/__init__.py" }, { "content": "import os\nfrom setuptools import setup, Extension\n\nsetup(\n name = 'accumulation_tree',\n version = '0.6.2',\n description = 'Red/black tree with support for fast accumulation of values in a key range',\n author = '<NAME>',\n author_email = '<EMAIL>',\n url='https://github.com/tkluck/accumulation_tree',\n long_description=\"\"\"\nA red/black tree which also stores partial aggregations at each node, making\ngetting aggregations of key range slices an O(log(N)) operation.\n\"\"\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"License :: OSI Approved :: MIT License\",\n \"Programming Language :: Python\",\n \"Topic :: Scientific/Engineering\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 3\",\n ],\n license='MIT',\n keywords='BST, data structure, accumulation',\n setup_requires=[\"cython\"],\n packages=['accumulation_tree'],\n ext_modules = [\n Extension('accumulation_tree.accumulation_tree', ['accumulation_tree/accumulation_tree.pyx'])\n ],\n)\n", "id": "6671387", "language": "Python", "matching_score": 0, "max_stars_count": 10, "path": "setup.py" } ]
0
chrstrom
[ { "content": "from matplotlib import pyplot as plt\nimport numpy as np\nfrom tqdm import tqdm\n\nfrom utils.dataloader import load_data\nfrom utils.interactive_plot import InteractivePlot\nfrom utils.plotting import plot_NEES\nfrom utils.multivargaussian import MultiVarGaussian\nfrom utils.ekf import EKF\nfrom utils.dynamicmodels import WhitenoiseAcceleration2D\nfrom utils.measurementmodels import CartesianPosition2D\n\nimport config\nfrom pdaf import PDAF\nfrom tuninghints import tuninghints\n\n\ndef run_pdaf(init_state_gauss, measurement_data, Ts):\n\n dynamic_model = WhitenoiseAcceleration2D(config.sigma_a)\n sensor_model = CartesianPosition2D(config.sigma_z)\n ekf = EKF(dynamic_model, sensor_model)\n pdaf = PDAF(ekf,\n config.clutter_density,\n config.detection_prob,\n config.gate_percentile)\n\n state_upd_prev_gauss = init_state_gauss\n filter_data = []\n for measurements in tqdm(measurement_data, \"Working\",\n len(measurement_data), None):\n (state_pred_gauss,\n measurement_pred_gauss,\n state_upd_gauss) = pdaf.step_with_info(state_upd_prev_gauss,\n measurements, Ts)\n filter_data.append([state_pred_gauss,\n measurement_pred_gauss,\n state_upd_gauss])\n\n state_upd_prev_gauss = state_upd_gauss\n\n filter_data = list(map(list, zip(*filter_data))) # transpose list of lists\n return pdaf, filter_data\n\n\ndef main():\n (N_data,\n Ts,\n state_gt_data,\n measurement_data,\n association_gt_data) = load_data()\n\n tuninghints(measurement_data, association_gt_data)\n\n init_cov = np.eye(4)*5\n init_mean = np.random.multivariate_normal(state_gt_data[0, :4], np.eye(4))\n init_state_gauss = MultiVarGaussian(init_mean, init_cov)\n\n pdaf, filter_data = run_pdaf(init_state_gauss, measurement_data, Ts)\n\n (state_pred_gauss_seq,\n measurement_pred_gauss_seq,\n state_upd_gauss_seq) = filter_data\n\n pos_upd_gauss_seq = [gauss.marginalize([0, 1])\n for gauss in state_upd_gauss_seq]\n\n state: MultiVarGaussian = None\n pos_NEES = [state.marginalize([0, 1]).mahalanobis_distance_sq(gt[:2])\n for state, gt in zip(state_upd_gauss_seq, state_gt_data)]\n\n pos_error_sq = [np.sum((state.mean[:2] - gt[:2])**2)\n for state, gt in zip(state_upd_gauss_seq, state_gt_data)]\n pos_RMSE = np.sqrt(sum(pos_error_sq)/len(state_pred_gauss_seq))\n\n vel_error_sq = [np.sum((state.mean[2:4] - gt[2:4])**2)\n for state, gt in zip(state_upd_gauss_seq, state_gt_data)]\n vel_RMSE = np.sqrt(sum(vel_error_sq)/len(state_pred_gauss_seq))\n\n plot_NEES(pos_NEES)\n inter = InteractivePlot(pdaf,\n state_gt_data[:, :2],\n pos_upd_gauss_seq,\n measurement_pred_gauss_seq,\n measurement_data,\n association_gt_data,\n pos_RMSE,\n vel_RMSE)\n plt.show(block=True)\n\n\nif __name__ == '__main__':\n main()\n", "id": "4778722", "language": "Python", "matching_score": 3.956636428833008, "max_stars_count": 0, "path": "Assignments/Assignment_05/pda/run.py" }, { "content": "import numpy as np\nfrom numpy import ndarray\nfrom scipy.stats import chi2\nfrom dataclasses import dataclass, field\nfrom typing import Optional, Sequence, Tuple\n\nfrom gaussmix import GaussianMuxture\nfrom utils.multivargaussian import MultiVarGaussian\nfrom utils.ekf import EKF\n\nimport solution\n\n\n@dataclass\nclass PDAF:\n\n ekf: EKF\n clutter_density: float\n detection_prob: float\n gate_percentile: float\n gate_size_sq: float = field(init=False)\n\n def __post_init__(self):\n self.gate_size_sq = chi2.ppf(self.gate_percentile,\n self.ekf.sensor_model.ndim)\n\n def predict_state(self, state_upd_prev_gauss: MultiVarGaussian, Ts: float\n ) -> MultiVarGaussian:\n \"\"\"Prediction step\n\n Args:\n state_upd_prev_gauss (MultiVarGaussian): previous update gaussian\n Ts (float): timestep\n\n Returns:\n state_pred_gauss (MultiVarGaussian): predicted state gaussian\n \"\"\"\n state_pred_gauss = self.ekf.predict_state(state_upd_prev_gauss, Ts)\n return state_pred_gauss\n\n def predict_measurement(self, state_pred_gauss: MultiVarGaussian\n ) -> MultiVarGaussian:\n \"\"\"Measurement prediction step\n\n Args:\n state_pred_gauss (MultiVarGaussian): predicted state gaussian\n\n Returns:\n z_pred_gauss (MultiVarGaussian): predicted measurement gaussian\n \"\"\"\n z_pred_gauss = self.ekf.predict_measurement(state_pred_gauss)\n return z_pred_gauss\n\n def gate(self,\n z_pred_gauss: MultiVarGaussian,\n measurements: Sequence[ndarray]) -> ndarray:\n \"\"\"Gate the incoming measurements. That is remove the measurements \n that have a mahalanobis distance higher than a certain threshold. \n\n Args:\n z_pred_gauss (MultiVarGaussian): predicted measurement gaussian \n measurements (Sequence[ndarray]): sequence of measurements\n\n Returns:\n gated_measurements (ndarray[:,2]): array of accepted measurements\n \"\"\"\n gated_measurements = [\n m for m in measurements\n if z_pred_gauss.mahalanobis_distance_sq(m) < self.gate_size_sq\n ]\n\n return gated_measurements\n\n def get_association_prob(self, z_pred_gauss: MultiVarGaussian,\n gated_measurements: ndarray\n ) -> ndarray:\n \"\"\"Finds the association probabilities.\n\n associations_probs[0]: prob that no association is correct\n associations_probs[1]: prob that gated_measurements[0] is correct\n associations_probs[2]: prob that gated_measurements[1] is correct\n ...\n\n the sum of associations_probs should be 1\n\n Args:\n z_pred_gauss (MultiVarGaussian): predicted measurement gaussian \n gated_measurements (ndarray[:,2]): array of accepted measurements\n\n Returns:\n associations_probs (ndarray[:]): the association probabilities\n \"\"\"\n\n m_k = len(gated_measurements)\n P_D = self.detection_prob\n \n # Implementing Corollary 7.3.2\n associations_probs = []\n associations_probs.append(m_k * (1 - P_D) * self.clutter_density) # a_k = 0\n for i in range(m_k):\n associations_probs.append(P_D*z_pred_gauss.pdf(gated_measurements[i])) # a_k > 0\n \n associations_probs = np.array(associations_probs)\n if associations_probs.sum() != 0:\n associations_probs /= associations_probs.sum()\n else:\n associations_probs += 1/associations_probs.size\n\n\n return associations_probs\n\n def get_cond_update_gaussians(self, state_pred_gauss: MultiVarGaussian,\n z_pred_gauss: MultiVarGaussian,\n gated_measurements: ndarray\n ) -> Sequence[MultiVarGaussian]:\n \"\"\"Get the conditional updated state gaussians \n for every association hypothesis\n\n update_gaussians[0]: update given that no measurement is correct\n update_gaussians[1]: update given that gated_measurements[0] is correct\n update_gaussians[2]: update given that gated_measurements[1] is correct\n ...\n\n Args:\n state_pred_gauss (MultiVarGaussian): predicted state gaussian\n z_pred_gauss (MultiVarGaussian): predicted measurement gaussian\n gated_measurements (ndarray[:,2]): array of accepted measurements\n\n Returns:\n Sequence[MultiVarGaussian]: The sequence of conditional updates\n \"\"\"\n x_pred, P_pred = state_pred_gauss\n z_pred, S_pred = z_pred_gauss\n H = self.ekf.sensor_model.jac(x_pred)\n W = P_pred@H.T@np.linalg.inv(S_pred)\n\n # Implementing 7.20 and 7.21\n update_gaussians = []\n update_gaussians.append(MultiVarGaussian(x_pred, P_pred)) #a_k = 0\n for z_k in gated_measurements:\n mean = x_pred + W@(z_k - z_pred)\n cov = (np.eye(4) - W@H)@P_pred\n update_gaussians.append(MultiVarGaussian(mean, cov))\n\n return update_gaussians\n\n def update(self, state_pred_gauss: MultiVarGaussian,\n z_pred_gauss: MultiVarGaussian,\n measurements: Sequence[ndarray]):\n \"\"\"Perform the update step of the PDA filter\n\n Args:\n state_pred_gauss (MultiVarGaussian): predicted state gaussian\n z_pred_gauss (MultiVarGaussian): predicted measurement gaussian\n measurements (Sequence[ndarray]): sequence of measurements\n\n Returns:\n state_upd_gauss (MultiVarGaussian): updated state gaussian\n \"\"\"\n\n gated_measurements = self.gate(z_pred_gauss, measurements)\n beta = self.get_association_prob(z_pred_gauss, gated_measurements)\n conditional_gaussians = self.get_cond_update_gaussians(state_pred_gauss, z_pred_gauss, gated_measurements)\n\n # Not sure why this one isn't working\n #state_upd_gauss = GaussianMuxture(beta, conditional_gaussians).reduce()\n\n state_upd_gauss = solution.pdaf.PDAF.update(\n self, state_pred_gauss, z_pred_gauss, measurements)\n return state_upd_gauss\n\n def step_with_info(self,\n state_upd_prev_gauss: MultiVarGaussian,\n measurements: Sequence[ndarray],\n Ts: float\n ) -> Tuple[MultiVarGaussian,\n MultiVarGaussian,\n MultiVarGaussian]:\n \"\"\"Perform a full step and return usefull info\n\n Hint: you should not need to write any new code here, \n just use the methods you have implemented\n\n Args:\n state_upd_prev_gauss (MultiVarGaussian): previous updated gaussian\n measurements (Sequence[ndarray]): sequence of measurements\n Ts (float): timestep\n\n Returns:\n state_pred_gauss (MultiVarGaussian): predicted state gaussian\n z_pred_gauss (MultiVarGaussian): predicted measurement gaussian\n state_upd_gauss (MultiVarGaussian): updated state gaussian\n \"\"\"\n state_pred_gauss = self.predict_state(state_upd_prev_gauss, Ts)\n z_pred_gauss = self.predict_measurement(state_pred_gauss)\n state_upd_gauss = self.update(state_pred_gauss, z_pred_gauss, measurements)\n\n return state_pred_gauss, z_pred_gauss, state_upd_gauss\n\n def step(self, state_upd_prev_gauss, measurements, Ts):\n _, _, state_upd_gauss = self.step_with_info(state_upd_prev_gauss,\n measurements,\n Ts)\n return state_upd_gauss\n", "id": "11490254", "language": "Python", "matching_score": 4.194084167480469, "max_stars_count": 0, "path": "Assignments/Assignment_05/pda/pdaf.py" }, { "content": "\"\"\"\nNotation:\n----------\nx is generally used for either the state or the mean of a gaussian. It should be clear from context which it is.\nP is used about the state covariance\nz is a single measurement\nZ are multiple measurements so that z = Z[k] at a given time step k\nv is the innovation z - h(x)\nS is the innovation covariance\n\"\"\"\nfrom typing import Optional\nfrom dataclasses import dataclass\nimport numpy as np\nimport scipy.linalg as la\n\nfrom utils.dynamicmodels import DynamicModel\nfrom utils.measurementmodels import MeasurementModel\nfrom utils.multivargaussian import MultiVarGaussian\n\n\n@dataclass\nclass EKF:\n dynamic_model: DynamicModel\n sensor_model: MeasurementModel\n\n def predict_state(self,\n state_upd_prev_gauss: MultiVarGaussian,\n Ts: float,\n ) -> MultiVarGaussian:\n \"\"\"Predict the EKF state Ts seconds ahead.\"\"\"\n x_upd_prev, P = state_upd_prev_gauss\n\n F = self.dynamic_model.jac(x_upd_prev, Ts)\n Q = self.dynamic_model.Q(x_upd_prev, Ts)\n\n x_pred = self.dynamic_model.f(x_upd_prev, Ts)\n P_pred = F @ P @ F.T + Q\n\n state_pred_gauss = MultiVarGaussian(x_pred, P_pred)\n\n return state_pred_gauss\n\n def predict_measurement(self,\n state_pred_gauss: MultiVarGaussian\n ) -> MultiVarGaussian:\n \"\"\"Predict measurement pdf from using state pdf and model.\"\"\"\n if False:\n x_bar, P = state_pred_gauss\n H = self.sensor_model.H(x_bar)\n R = self.sensor_model.R(x_bar)\n z_bar = np.zeros(2) # TODO\n S = np.eye(2) # TODO\n measure_pred_gauss = MultiVarGaussian(z_bar, S)\n\n x_pred, P = state_pred_gauss\n\n # Calculate mean of measurement\n z_pred = self.sensor_model.h(x_pred)\n\n # Calculate the measurement (innovation) covariance\n # for ekfstate at z in sensorstate.\n H = self.sensor_model.jac(x_pred)\n R = self.sensor_model.R(x_pred)\n S = H @ P @ H.T + R\n\n # Create Gaussian\n measure_pred_gauss = MultiVarGaussian(z_pred, S)\n\n return measure_pred_gauss\n\n def update(self,\n state_pred_gauss: MultiVarGaussian,\n measurement: np.ndarray,\n measurement_pred_gauss: Optional[MultiVarGaussian] = None,\n ) -> MultiVarGaussian:\n \"\"\"Given the prediction and innovation, \n find the updated state estimate.\"\"\"\n\n x_pred, P = state_pred_gauss\n\n if measurement_pred_gauss is None:\n measurement_pred_gauss = self.predict_measurement(state_pred_gauss)\n\n z_bar, S = measurement_pred_gauss\n innovation = measurement - z_bar\n\n H = self.sensor_model.jac(x_pred)\n\n # Kalman gain\n W = P @ la.solve(S, H).T\n\n # mean update\n x_upd = x_pred + W @ innovation\n\n # covariance update\n # P_upd = P - W @ H @ P # simple standard form\n\n # It might be better to use the more numerically stable Joseph form\n I = np.eye(*P.shape)\n P_upd = ((I - W @ H) @ P @ (I - W @ H).T\n + W @ self.sensor_model.R(state_pred_gauss) @ W.T)\n\n state_upd_gauss = MultiVarGaussian(x_upd, P_upd)\n\n return state_upd_gauss\n\n def step_with_info(self,\n state_upd_prev_gauss: MultiVarGaussian,\n measurement: np.ndarray,\n Ts: float,\n ) -> tuple[MultiVarGaussian,\n MultiVarGaussian,\n MultiVarGaussian]:\n \"\"\"Predict ekfstate Ts units ahead and then \n update this prediction with z.\n\n Returns:\n state_pred_gauss: The state prediction\n measurement_pred_gauss: \n The measurement prediction after state prediction\n state_upd_gauss: The predicted state updated with measurement\n \"\"\"\n\n state_pred_gauss = self.predict_state(state_upd_prev_gauss, Ts)\n measurement_pred_gauss = self.predict_measurement(state_pred_gauss)\n state_upd_gauss = self.update(state_pred_gauss, measurement,\n measurement_pred_gauss)\n\n return state_pred_gauss, measurement_pred_gauss, state_upd_gauss\n\n def step(self,\n state_upd_prev_gauss: MultiVarGaussian,\n measurement: np.ndarray,\n Ts: float,\n ) -> MultiVarGaussian:\n\n _, _, state_upd_gauss = self.step_with_info(state_upd_prev_gauss,\n measurement, Ts)\n return state_upd_gauss\n", "id": "9754727", "language": "Python", "matching_score": 5.050653457641602, "max_stars_count": 0, "path": "Assignments/Assignment_05/pda/utils/ekf.py" }, { "content": "\"\"\"\nNotation:\n----------\nx is generally used for either the state or the mean of a gaussian. It should be clear from context which it is.\nP is used about the state covariance\nz is a single measurement\nZ are multiple measurements so that z = Z[k] at a given time step k\nv is the innovation z - h(x)\nS is the innovation covariance\n\"\"\"\nfrom typing import Optional\nfrom dataclasses import dataclass, field\nimport numpy as np\nimport scipy.linalg as la\n\nfrom config import DEBUG\nfrom dynamicmodels import DynamicModel\nfrom measurementmodels import MeasurementModel\nfrom utils.gaussparams import MultiVarGaussian\n\n# %% The EKF\n\n\n@dataclass\nclass EKF:\n dynamic_model: DynamicModel\n sensor_model: MeasurementModel\n\n def predict(self,\n state_upd_prev_gauss: MultiVarGaussian,\n Ts: float,\n ) -> MultiVarGaussian:\n \"\"\"Predict the EKF state Ts seconds ahead.\"\"\"\n x_km1 = state_upd_prev_gauss.mean\n P_km1 = state_upd_prev_gauss.cov\n F = self.dynamic_model.F(x_km1, Ts)\n Q = self.dynamic_model.Q(x_km1, Ts)\n\n predicted_mean = self.dynamic_model.f(x_km1, Ts)\n predicted_cov = F@P_km1@F.T + Q\n\n state_pred_gauss = MultiVarGaussian(predicted_mean, predicted_cov)\n\n return state_pred_gauss\n\n def predict_measurement(self,\n state_pred_gauss: MultiVarGaussian\n ) -> MultiVarGaussian:\n \"\"\"Predict measurement pdf from using state pdf and model.\"\"\"\n x_bar, P = state_pred_gauss\n H = self.sensor_model.H(x_bar)\n R = self.sensor_model.R(x_bar)\n\n z_bar = self.sensor_model.h(x_bar)\n S = H@P@H.T + R\n measure_pred_gauss = MultiVarGaussian(z_bar, S)\n\n return measure_pred_gauss\n\n def update(self,\n z: np.ndarray,\n state_pred_gauss: MultiVarGaussian,\n measurement_gauss: Optional[MultiVarGaussian] = None,\n ) -> MultiVarGaussian:\n \"\"\"Given the prediction and innovation, \n find the updated state estimate.\"\"\"\n x_pred, P = state_pred_gauss\n if measurement_gauss is None:\n measurement_gauss = self.predict_measurement(state_pred_gauss)\n\n z_bar, S = measurement_gauss\n H = self.sensor_model.H(x_pred)\n W = P@H.T@la.inv(S)\n\n x_upd = x_pred + W@(z - z_bar)\n P_upd = (np.eye(4) - W@H)@P\n state_upd_gauss = MultiVarGaussian(x_upd, P_upd)\n\n return state_upd_gauss\n\n def step_with_info(self,\n state_upd_prev_gauss: MultiVarGaussian,\n z: np.ndarray,\n Ts: float,\n ) -> tuple[MultiVarGaussian,\n MultiVarGaussian,\n MultiVarGaussian]:\n \"\"\"\n Predict ekfstate Ts units ahead and then update this prediction with z.\n\n Returns:\n state_pred_gauss: The state prediction\n measurement_pred_gauss: \n The measurement prediction after state prediction\n state_upd_gauss: The predicted state updated with measurement\n \"\"\"\n state_pred_gauss = self.predict(state_upd_prev_gauss, Ts)\n measurement_pred_gauss = self.predict_measurement(state_upd_prev_gauss)\n state_upd_gauss = self.update(z, state_pred_gauss)\n\n return state_pred_gauss, measurement_pred_gauss, state_upd_gauss\n\n def step(self,\n state_upd_prev_gauss: MultiVarGaussian,\n z: np.ndarray,\n Ts: float,\n ) -> MultiVarGaussian:\n\n _, _, state_upd_gauss = self.step_with_info(state_upd_prev_gauss,\n z, Ts)\n return state_upd_gauss\n", "id": "12044040", "language": "Python", "matching_score": 1.8059765100479126, "max_stars_count": 0, "path": "Assignments/Assignment_03/ekf/ekf.py" }, { "content": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nDynamic models to be used with eg. EKF.\n\n@author: <NAME>, <EMAIL>\n\"\"\"\n# %%\nfrom dataclasses import dataclass\n\nimport numpy as np\nfrom numpy import ndarray\n\n\n@dataclass\nclass DynamicModel:\n \"\"\"\n Parent class for dynamic models.\n\n A model includes the discrete prediction equation f, its Jacobian F, and\n the process noise covariance Q.\n \"\"\"\n\n def f(self, x: ndarray, Ts: float, **kwargs) -> ndarray:\n \"\"\"Calculate the zero noise Ts time units transition from x.\n\n Args:\n x (ndarray): state\n Ts (float): time step\n\n Returns:\n x_kp1 (ndarray): x_k+1, the next state\n \"\"\"\n raise NotImplementedError\n\n def jac(self, x: ndarray, Ts: float, **kwargs) -> ndarray:\n \"\"\"Calculate the transition function jacobian for Ts time units at x.\n Args:\n x (ndarray): state\n Ts (float): time step\n\n Returns:\n F (ndarray): Discrete ransition function jacobian,\n for linear systems: x_k+1 = F @ x_k\n \"\"\"\n raise NotImplementedError\n\n def Q(self, x: ndarray, Ts: float, **kwargs) -> ndarray:\n \"\"\"Calculate the Ts time units transition Covariance.\n Args:\n x (ndarray): state\n Ts (float): time step\n\n Returns:\n Q (ndarray): covariance matrix\n \"\"\"\n raise NotImplementedError\n\n\n@dataclass\nclass WhitenoiseAcceleration2D(DynamicModel):\n \"\"\"\n A white noise acceleration model, also known as constan velocity.\n States are position and speed.\n \"\"\"\n\n # noise standard deviation\n sigma_a: float\n ndim: int = 4\n\n def f(self, x: ndarray, Ts: float,) -> ndarray:\n \"\"\"Calculate the zero noise Ts time units transition from x.\n See DynamicModel for variable documentation\n \"\"\"\n x_kp1 = self.jac(x, Ts) @ x\n return x_kp1\n\n def jac(self, x: ndarray, Ts: float,) -> ndarray:\n \"\"\"Calculate the transition function jacobian for Ts time units at x.\n See DynamicModel for variable documentation\"\"\"\n F = np.eye(4)\n F[[0, 1], [2, 3]] = Ts\n return F\n\n def Q(self, x: ndarray, Ts: float,) -> ndarray:\n \"\"\"Calculate the Ts time units transition Covariance.\n See(4.64) in the book.\n See DynamicModel for variable documentation\"\"\"\n\n Q00 = self.sigma_a**2 * (Ts**3/3) * np.eye(2)\n Q11 = self.sigma_a**2 * Ts * np.eye(2)\n Q01 = self.sigma_a**2 * (Ts**2/2) * np.eye(2)\n Q = np.block([[Q00, Q01], [Q01, Q11]])\n return Q\n", "id": "3530975", "language": "Python", "matching_score": 5.640514373779297, "max_stars_count": 0, "path": "Assignments/Assignment_05/pda/utils/dynamicmodels.py" }, { "content": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nDynamic models to be used with eg. EKF.\n\n@author: <NAME>, <EMAIL>\n\"\"\"\n# %%\nfrom dataclasses import dataclass\n\nimport numpy as np\nfrom numpy import ndarray\nimport solution\n\n\n@dataclass\nclass DynamicModel:\n \"\"\"\n Parent class for dynamic models.\n\n A model includes the discrete prediction equation f, its Jacobian F, and\n the process noise covariance Q.\n \"\"\"\n\n def f(self, x: ndarray, Ts: float, **kwargs) -> ndarray:\n \"\"\"Calculate the zero noise Ts time units transition from x.\n\n Args:\n x (ndarray): state\n Ts (float): time step\n\n Returns:\n x_kp1 (ndarray): x_k+1, the next state\n \"\"\"\n raise NotImplementedError\n\n def F(self, x: ndarray, Ts: float, **kwargs) -> ndarray:\n \"\"\"Calculate the transition function jacobian for Ts time units at x.\n Args:\n x (ndarray): state\n Ts (float): time step\n\n Returns:\n F (ndarray): Discrete ransition function jacobian,\n for linear systems: x_k+1 = F @ x_k\n \"\"\"\n raise NotImplementedError\n\n def Q(self, x: ndarray, Ts: float, **kwargs) -> ndarray:\n \"\"\"Calculate the Ts time units transition Covariance.\n Args:\n x (ndarray): state\n Ts (float): time step\n\n Returns:\n Q (ndarray): covariance matrix\n \"\"\"\n raise NotImplementedError\n\n\n@dataclass\nclass WhitenoiseAcceleration2D(DynamicModel):\n \"\"\"\n A white noise acceleration model, also known as constan velocity.\n States are position and speed.\n \"\"\"\n\n # noise standard deviation\n sigma_a: float\n\n def f(self, x: ndarray, Ts: float,) -> ndarray:\n \"\"\"Calculate the zero noise Ts time units transition from x.\n See DynamicModel for variable documentation\n \"\"\"\n\n #x_kp1 = solution.dynamicmodels.WhitenoiseAcceleration2D.f(self, x, Ts)\n x_kp1 = self.F(x, Ts) @ x\n return x_kp1\n\n def F(self, x: ndarray, Ts: float,) -> ndarray:\n \"\"\"Calculate the transition function jacobian for Ts time units at x.\n See DynamicModel for variable documentation\"\"\"\n #F = solution.dynamicmodels.WhitenoiseAcceleration2D.F(self, x, Ts)\n F = np.array([[1, 0, Ts, 0],\n [0, 1, 0, Ts],\n [0, 0, 1, 0],\n [0, 0, 0, 1]])\n\n return F\n\n def Q(self, x: ndarray, Ts: float,) -> ndarray:\n \"\"\"Calculate the Ts time units transition Covariance.\n See(4.64) in the book.\n See DynamicModel for variable documentation\"\"\"\n T3 = Ts**3 / 3.0\n T2 = Ts**2 / 2.0\n\n Q = np.array([[T3, 0, T2, 0],\n [ 0, T3, 0, T2],\n [T2, 0, Ts, 0],\n [ 0, T2, 0, Ts]]) * self.sigma_a**2\n return Q\n", "id": "9799618", "language": "Python", "matching_score": 2.3679916858673096, "max_stars_count": 0, "path": "Assignments/Assignment_03/ekf/dynamicmodels.py" }, { "content": "# %% Imports\nfrom typing import Any, Dict\nfrom dataclasses import dataclass\nimport numpy as np\nfrom numpy import ndarray\n# %% Measurement models interface declaration\n\n\n@dataclass\nclass MeasurementModel:\n def h(self, x: ndarray, **kwargs) -> ndarray:\n \"\"\"Calculate the noise free measurement location at x in sensor_state.\n Args:\n x (ndarray): state\n \"\"\"\n raise NotImplementedError\n\n def jac(self, x: ndarray, **kwargs) -> ndarray:\n \"\"\"Calculate the measurement Jacobian matrix at x in sensor_state.\n Args:\n x (ndarray): state\n \"\"\"\n raise NotImplementedError\n\n def R(self, x: ndarray, **kwargs) -> ndarray:\n \"\"\"Calculate the measurement covariance matrix at x in sensor_state.\n Args:\n x (ndarray): state\n \"\"\"\n raise NotImplementedError\n\n\n@dataclass\nclass CartesianPosition2D(MeasurementModel):\n sigma_z: float\n ndim: int = 2\n\n def h(self, x: ndarray) -> ndarray:\n \"\"\"Calculate the noise free measurement location at x in sensor_state.\n \"\"\"\n x_h = x[:2]\n return x_h\n\n def jac(self, x: ndarray) -> ndarray:\n \"\"\"Calculate the measurement Jacobian matrix at x in sensor_state.\"\"\"\n H = np.array([[1, 0, 0, 0],\n [0, 1, 0, 0]])\n return H\n\n def R(self, x: ndarray) -> ndarray:\n \"\"\"Calculate the measurement covariance matrix at x in sensor_state.\"\"\"\n R = self.sigma_z**2 * np.eye(2)\n return R\n", "id": "11873817", "language": "Python", "matching_score": 5.085323810577393, "max_stars_count": 0, "path": "Assignments/Assignment_05/pda/utils/measurementmodels.py" }, { "content": "# %% Imports\nfrom typing import Any, Dict\nfrom dataclasses import dataclass\nimport numpy as np\nfrom numpy import ndarray\n\n# %% Measurement models interface declaration\n\n\n@dataclass\nclass MeasurementModel:\n def h(self, x: ndarray, **kwargs) -> ndarray:\n \"\"\"Calculate the noise free measurement location at x in sensor_state.\n Args:\n x (ndarray): state\n \"\"\"\n raise NotImplementedError\n\n def H(self, x: ndarray, **kwargs) -> ndarray:\n \"\"\"Calculate the measurement Jacobian matrix at x in sensor_state.\n Args:\n x (ndarray): state\n \"\"\"\n raise NotImplementedError\n\n def R(self, x: ndarray, **kwargs) -> ndarray:\n \"\"\"Calculate the measurement covariance matrix at x in sensor_state.\n Args:\n x (ndarray): state\n \"\"\"\n raise NotImplementedError\n\n\n@dataclass\nclass CartesianPosition2D(MeasurementModel):\n sigma_z: float\n\n def h(self, x: ndarray) -> ndarray:\n \"\"\"Calculate the noise free measurement location at x in sensor_state.\n \"\"\"\n x_h = self.H(x)@x\n return x_h\n\n def H(self, x: ndarray) -> ndarray:\n \"\"\"Calculate the measurement Jacobian matrix at x in sensor_state.\"\"\"\n H = np.eye(2, 4)\n return H\n\n def R(self, x: ndarray) -> ndarray:\n \"\"\"Calculate the measurement covariance matrix at x in sensor_state.\"\"\"\n\n # TODO replace this with your own code\n R = np.eye(2)*self.sigma_z**2\n\n return R\n", "id": "10930049", "language": "Python", "matching_score": 1.8844635486602783, "max_stars_count": 0, "path": "Assignments/Assignment_03/ekf/measurementmodels.py" }, { "content": "import pickle\nfrom numpy.core.numeric import isscalar\nimport pytest\nfrom copy import deepcopy\nimport sys\nfrom pathlib import Path\nimport numpy as np\nimport os\nfrom dataclasses import is_dataclass\n\nassignment_name = \"ekf\"\n\nthis_file = Path(__file__)\ntests_folder = this_file.parent\ntest_data_file = tests_folder.joinpath(\"test_data.pickle\")\nproject_folder = tests_folder.parent\ncode_folder = project_folder.joinpath(assignment_name)\n\nsys.path.insert(0, str(code_folder))\n\nimport solution # nopep8\nimport measurementmodels # nopep8\n\n\n@pytest.fixture\ndef test_data():\n with open(test_data_file, \"rb\") as file:\n test_data = pickle.load(file)\n return test_data\n\n\ndef compare(a, b):\n if (\n isinstance(a, np.ndarray)\n or isinstance(b, np.ndarray)\n or np.isscalar(a)\n or np.isscalar(b)\n ):\n return np.allclose(a, b)\n elif is_dataclass(a) or is_dataclass(b):\n return str(a) == str(b)\n else:\n return a == b\n\n\nclass TestOutput:\n\n def test_output__CartesianPosition2D_h(self, test_data):\n for finput in test_data[\"measurementmodels.CartesianPosition2D.h\"]:\n params = tuple(finput.values())\n\n self_1, x_1 = deepcopy(params)\n\n self_2, x_2 = deepcopy(params)\n\n x_h_1 = measurementmodels.CartesianPosition2D.h(self_1, x_1)\n\n x_h_2 = solution.measurementmodels.CartesianPosition2D.h(\n self_2, x_2)\n\n assert compare(x_h_1, x_h_2)\n\n assert compare(self_1, self_2)\n assert compare(x_1, x_2)\n\n def test_output__CartesianPosition2D_H(self, test_data):\n for finput in test_data[\"measurementmodels.CartesianPosition2D.H\"]:\n params = tuple(finput.values())\n\n self_1, x_1 = deepcopy(params)\n\n self_2, x_2 = deepcopy(params)\n\n H_1 = measurementmodels.CartesianPosition2D.H(self_1, x_1)\n\n H_2 = solution.measurementmodels.CartesianPosition2D.H(self_2, x_2)\n\n assert compare(H_1, H_2)\n\n assert compare(self_1, self_2)\n assert compare(x_1, x_2)\n\n def test_output__CartesianPosition2D_R(self, test_data):\n for finput in test_data[\"measurementmodels.CartesianPosition2D.R\"]:\n params = tuple(finput.values())\n\n self_1, x_1 = deepcopy(params)\n\n self_2, x_2 = deepcopy(params)\n\n R_1 = measurementmodels.CartesianPosition2D.R(self_1, x_1)\n\n R_2 = solution.measurementmodels.CartesianPosition2D.R(self_2, x_2)\n\n assert compare(R_1, R_2)\n\n assert compare(self_1, self_2)\n assert compare(x_1, x_2)\n\n\nclass TestSolutionUsage:\n\n def test_solution_usage__CartesianPosition2D_h(self, test_data):\n for finput in test_data[\"measurementmodels.CartesianPosition2D.h\"][:1]:\n params = finput\n\n solution.used[\"measurementmodels.CartesianPosition2D.h\"] = False\n\n measurementmodels.CartesianPosition2D.h(**params)\n\n assert not solution.used[\"measurementmodels.CartesianPosition2D.h\"], \"The function uses the solution\"\n\n def test_solution_usage__CartesianPosition2D_H(self, test_data):\n for finput in test_data[\"measurementmodels.CartesianPosition2D.H\"][:1]:\n params = finput\n\n solution.used[\"measurementmodels.CartesianPosition2D.H\"] = False\n\n measurementmodels.CartesianPosition2D.H(**params)\n\n assert not solution.used[\"measurementmodels.CartesianPosition2D.H\"], \"The function uses the solution\"\n\n def test_solution_usage__CartesianPosition2D_R(self, test_data):\n for finput in test_data[\"measurementmodels.CartesianPosition2D.R\"][:1]:\n params = finput\n\n solution.used[\"measurementmodels.CartesianPosition2D.R\"] = False\n\n measurementmodels.CartesianPosition2D.R(**params)\n\n assert not solution.used[\"measurementmodels.CartesianPosition2D.R\"], \"The function uses the solution\"\n\n\nif __name__ == \"__main__\":\n os.environ[\"_PYTEST_RAISE\"] = \"1\"\n pytest.main()\n", "id": "2097622", "language": "Python", "matching_score": 2.85650372505188, "max_stars_count": 0, "path": "Assignments/Assignment_03/tests/test_measurementmodels.py" }, { "content": "import pickle\nfrom numpy.core.numeric import isscalar\nimport pytest\nfrom copy import deepcopy\nimport sys\nfrom pathlib import Path\nimport numpy as np\nimport os\nfrom dataclasses import is_dataclass\n\nassignment_name = \"ekf\"\n\nthis_file = Path(__file__)\ntests_folder = this_file.parent\ntest_data_file = tests_folder.joinpath(\"test_data.pickle\")\nproject_folder = tests_folder.parent\ncode_folder = project_folder.joinpath(assignment_name)\n\nsys.path.insert(0, str(code_folder))\n\nimport solution # nopep8\nimport dynamicmodels # nopep8\n\n\n@pytest.fixture\ndef test_data():\n with open(test_data_file, \"rb\") as file:\n test_data = pickle.load(file)\n return test_data\n\n\ndef compare(a, b):\n if (\n isinstance(a, np.ndarray)\n or isinstance(b, np.ndarray)\n or np.isscalar(a)\n or np.isscalar(b)\n ):\n return np.allclose(a, b)\n elif is_dataclass(a) or is_dataclass(b):\n return str(a) == str(b)\n else:\n return a == b\n\n\nclass TestOutput:\n\n def test_output__WhitenoiseAcceleration2D_f(self, test_data):\n for finput in test_data[\"dynamicmodels.WhitenoiseAcceleration2D.f\"]:\n params = tuple(finput.values())\n\n self_1, x_1, Ts_1 = deepcopy(params)\n\n self_2, x_2, Ts_2 = deepcopy(params)\n\n x_kp1_1 = dynamicmodels.WhitenoiseAcceleration2D.f(\n self_1, x_1, Ts_1)\n\n x_kp1_2 = solution.dynamicmodels.WhitenoiseAcceleration2D.f(\n self_2, x_2, Ts_2)\n\n assert compare(x_kp1_1, x_kp1_2)\n\n assert compare(self_1, self_2)\n assert compare(x_1, x_2)\n assert compare(Ts_1, Ts_2)\n\n def test_output__WhitenoiseAcceleration2D_F(self, test_data):\n for finput in test_data[\"dynamicmodels.WhitenoiseAcceleration2D.F\"]:\n params = tuple(finput.values())\n\n self_1, x_1, Ts_1 = deepcopy(params)\n\n self_2, x_2, Ts_2 = deepcopy(params)\n\n F_1 = dynamicmodels.WhitenoiseAcceleration2D.F(self_1, x_1, Ts_1)\n\n F_2 = solution.dynamicmodels.WhitenoiseAcceleration2D.F(\n self_2, x_2, Ts_2)\n\n assert compare(F_1, F_2)\n\n assert compare(self_1, self_2)\n assert compare(x_1, x_2)\n assert compare(Ts_1, Ts_2)\n\n def test_output__WhitenoiseAcceleration2D_Q(self, test_data):\n for finput in test_data[\"dynamicmodels.WhitenoiseAcceleration2D.Q\"]:\n params = tuple(finput.values())\n\n self_1, x_1, Ts_1 = deepcopy(params)\n\n self_2, x_2, Ts_2 = deepcopy(params)\n\n Q_1 = dynamicmodels.WhitenoiseAcceleration2D.Q(self_1, x_1, Ts_1)\n\n Q_2 = solution.dynamicmodels.WhitenoiseAcceleration2D.Q(\n self_2, x_2, Ts_2)\n\n assert compare(Q_1, Q_2)\n\n assert compare(self_1, self_2)\n assert compare(x_1, x_2)\n assert compare(Ts_1, Ts_2)\n\n\nclass TestSolutionUsage:\n\n def test_solution_usage__WhitenoiseAcceleration2D_f(self, test_data):\n for finput in test_data[\"dynamicmodels.WhitenoiseAcceleration2D.f\"][:1]:\n params = finput\n\n solution.used[\"dynamicmodels.WhitenoiseAcceleration2D.f\"] = False\n\n dynamicmodels.WhitenoiseAcceleration2D.f(**params)\n\n assert not solution.used[\"dynamicmodels.WhitenoiseAcceleration2D.f\"], \"The function uses the solution\"\n\n def test_solution_usage__WhitenoiseAcceleration2D_F(self, test_data):\n for finput in test_data[\"dynamicmodels.WhitenoiseAcceleration2D.F\"][:1]:\n params = finput\n\n solution.used[\"dynamicmodels.WhitenoiseAcceleration2D.F\"] = False\n\n dynamicmodels.WhitenoiseAcceleration2D.F(**params)\n\n assert not solution.used[\"dynamicmodels.WhitenoiseAcceleration2D.F\"], \"The function uses the solution\"\n\n def test_solution_usage__WhitenoiseAcceleration2D_Q(self, test_data):\n for finput in test_data[\"dynamicmodels.WhitenoiseAcceleration2D.Q\"][:1]:\n params = finput\n\n solution.used[\"dynamicmodels.WhitenoiseAcceleration2D.Q\"] = False\n\n dynamicmodels.WhitenoiseAcceleration2D.Q(**params)\n\n assert not solution.used[\"dynamicmodels.WhitenoiseAcceleration2D.Q\"], \"The function uses the solution\"\n\n\nif __name__ == \"__main__\":\n os.environ[\"_PYTEST_RAISE\"] = \"1\"\n pytest.main()\n", "id": "11763838", "language": "Python", "matching_score": 2.3299925327301025, "max_stars_count": 0, "path": "Assignments/Assignment_03/tests/test_dynamicmodels.py" }, { "content": "import pickle\nfrom numpy.core.numeric import isscalar\nimport pytest\nfrom copy import deepcopy\nimport sys\nfrom pathlib import Path\nimport numpy as np\nimport os\nfrom dataclasses import is_dataclass\n\nassignment_name = \"pda\"\n\nthis_file = Path(__file__)\ntests_folder = this_file.parent\ntest_data_file = tests_folder.joinpath(\"test_data.pickle\")\nproject_folder = tests_folder.parent\ncode_folder = project_folder.joinpath(assignment_name)\n\nsys.path.insert(0, str(code_folder))\n\nimport solution # nopep8\nimport gaussmix # nopep8\n\n\n@pytest.fixture\ndef test_data():\n with open(test_data_file, \"rb\") as file:\n test_data = pickle.load(file)\n return test_data\n\n\ndef compare(a, b):\n if (\n isinstance(a, np.ndarray)\n or isinstance(b, np.ndarray)\n or np.isscalar(a)\n or np.isscalar(b)\n ):\n return np.allclose(a, b)\n elif is_dataclass(a) or is_dataclass(b):\n return str(a) == str(b)\n else:\n return a == b\n\n\nclass Test_GaussianMuxture_get_mean:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"gaussmix.GaussianMuxture.get_mean\"]:\n params = tuple(finput.values())\n\n self_1, = deepcopy(params)\n\n self_2, = deepcopy(params)\n\n mean_1 = gaussmix.GaussianMuxture.get_mean(self_1,)\n\n mean_2 = solution.gaussmix.GaussianMuxture.get_mean(self_2,)\n\n assert compare(mean_1, mean_2)\n\n assert compare(self_1, self_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"gaussmix.GaussianMuxture.get_mean\"][:1]:\n params = finput\n\n solution.used[\"gaussmix.GaussianMuxture.get_mean\"] = False\n\n gaussmix.GaussianMuxture.get_mean(**params)\n\n assert not solution.used[\"gaussmix.GaussianMuxture.get_mean\"], \"The function uses the solution\"\n\n\nclass Test_GaussianMuxture_get_cov:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"gaussmix.GaussianMuxture.get_cov\"]:\n params = tuple(finput.values())\n\n self_1, = deepcopy(params)\n\n self_2, = deepcopy(params)\n\n cov_1 = gaussmix.GaussianMuxture.get_cov(self_1,)\n\n cov_2 = solution.gaussmix.GaussianMuxture.get_cov(self_2,)\n\n assert compare(cov_1, cov_2)\n\n assert compare(self_1, self_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"gaussmix.GaussianMuxture.get_cov\"][:1]:\n params = finput\n\n solution.used[\"gaussmix.GaussianMuxture.get_cov\"] = False\n\n gaussmix.GaussianMuxture.get_cov(**params)\n\n assert not solution.used[\"gaussmix.GaussianMuxture.get_cov\"], \"The function uses the solution\"\n\n\nclass Test_GaussianMuxture_reduce:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"gaussmix.GaussianMuxture.reduce\"]:\n params = tuple(finput.values())\n\n self_1, = deepcopy(params)\n\n self_2, = deepcopy(params)\n\n reduction_1 = gaussmix.GaussianMuxture.reduce(self_1,)\n\n reduction_2 = solution.gaussmix.GaussianMuxture.reduce(self_2,)\n\n assert compare(reduction_1, reduction_2)\n\n assert compare(self_1, self_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"gaussmix.GaussianMuxture.reduce\"][:1]:\n params = finput\n\n solution.used[\"gaussmix.GaussianMuxture.reduce\"] = False\n\n gaussmix.GaussianMuxture.reduce(**params)\n\n assert not solution.used[\"gaussmix.GaussianMuxture.reduce\"], \"The function uses the solution\"\n\n\nif __name__ == \"__main__\":\n os.environ[\"_PYTEST_RAISE\"] = \"1\"\n pytest.main()\n", "id": "9443051", "language": "Python", "matching_score": 3.381411552429199, "max_stars_count": 0, "path": "Assignments/Assignment_05/tests/test_gaussmix.py" }, { "content": "import pickle\nfrom numpy.core.numeric import isscalar\nimport pytest\nfrom copy import deepcopy\nimport sys\nfrom pathlib import Path\nimport numpy as np\nimport os\nfrom dataclasses import is_dataclass, astuple\nfrom collections.abc import Iterable\n\nassignment_name = \"slam\"\n\nthis_file = Path(__file__)\ntests_folder = this_file.parent\ntest_data_file = tests_folder.joinpath(\"test_data.pickle\")\nproject_folder = tests_folder.parent\ncode_folder = project_folder.joinpath(assignment_name)\n\nsys.path.insert(0, str(code_folder))\n\nimport solution # nopep8\nimport EKFSLAM # nopep8\n\n\n@pytest.fixture\ndef test_data():\n with open(test_data_file, \"rb\") as file:\n test_data = pickle.load(file)\n return test_data\n\n\ndef compare(a, b):\n if isinstance(b, np.ndarray) or np.isscalar(b):\n if isinstance(b, np.ndarray) and a.shape != b.shape:\n return False\n return np.allclose(a, b, atol=1e-6)\n\n elif is_dataclass(b):\n if type(a).__name__ != type(b).__name__:\n return False\n a_tup, b_tup = astuple(a), astuple(b)\n return all([compare(i, j) for i, j in zip(a_tup, b_tup)])\n\n elif isinstance(b, Iterable):\n return all([compare(i, j) for i, j in zip(a, b)])\n\n else:\n return a == b\n\n\nclass Test_EKFSLAM_f:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"EKFSLAM.EKFSLAM.f\"]:\n params = tuple(finput.values())\n\n self_1, x_1, u_1 = deepcopy(params)\n\n self_2, x_2, u_2 = deepcopy(params)\n\n xpred_1 = EKFSLAM.EKFSLAM.f(self_1, x_1, u_1)\n\n xpred_2 = solution.EKFSLAM.EKFSLAM.f(self_2, x_2, u_2)\n \n assert compare(xpred_1, xpred_2)\n \n assert compare(self_1, self_2)\n assert compare(x_1, x_2)\n assert compare(u_1, u_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"EKFSLAM.EKFSLAM.f\"][:1]:\n params = finput\n\n solution.used[\"EKFSLAM.EKFSLAM.f\"] = False\n\n EKFSLAM.EKFSLAM.f(**params)\n\n assert not solution.used[\"EKFSLAM.EKFSLAM.f\"], \"The function uses the solution\"\n\n\nclass Test_EKFSLAM_Fx:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"EKFSLAM.EKFSLAM.Fx\"]:\n params = tuple(finput.values())\n\n self_1, x_1, u_1 = deepcopy(params)\n\n self_2, x_2, u_2 = deepcopy(params)\n\n Fx_1 = EKFSLAM.EKFSLAM.Fx(self_1, x_1, u_1)\n\n Fx_2 = solution.EKFSLAM.EKFSLAM.Fx(self_2, x_2, u_2)\n \n assert compare(Fx_1, Fx_2)\n \n assert compare(self_1, self_2)\n assert compare(x_1, x_2)\n assert compare(u_1, u_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"EKFSLAM.EKFSLAM.Fx\"][:1]:\n params = finput\n\n solution.used[\"EKFSLAM.EKFSLAM.Fx\"] = False\n\n EKFSLAM.EKFSLAM.Fx(**params)\n\n assert not solution.used[\"EKFSLAM.EKFSLAM.Fx\"], \"The function uses the solution\"\n\n\nclass Test_EKFSLAM_Fu:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"EKFSLAM.EKFSLAM.Fu\"]:\n params = tuple(finput.values())\n\n self_1, x_1, u_1 = deepcopy(params)\n\n self_2, x_2, u_2 = deepcopy(params)\n\n Fu_1 = EKFSLAM.EKFSLAM.Fu(self_1, x_1, u_1)\n\n Fu_2 = solution.EKFSLAM.EKFSLAM.Fu(self_2, x_2, u_2)\n \n assert compare(Fu_1, Fu_2)\n \n assert compare(self_1, self_2)\n assert compare(x_1, x_2)\n assert compare(u_1, u_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"EKFSLAM.EKFSLAM.Fu\"][:1]:\n params = finput\n\n solution.used[\"EKFSLAM.EKFSLAM.Fu\"] = False\n\n EKFSLAM.EKFSLAM.Fu(**params)\n\n assert not solution.used[\"EKFSLAM.EKFSLAM.Fu\"], \"The function uses the solution\"\n\n\nclass Test_EKFSLAM_predict:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"EKFSLAM.EKFSLAM.predict\"]:\n params = tuple(finput.values())\n\n self_1, eta_1, P_1, z_odo_1 = deepcopy(params)\n\n self_2, eta_2, P_2, z_odo_2 = deepcopy(params)\n\n etapred_1, P_1 = EKFSLAM.EKFSLAM.predict(self_1, eta_1, P_1, z_odo_1)\n\n etapred_2, P_2 = solution.EKFSLAM.EKFSLAM.predict(self_2, eta_2, P_2, z_odo_2)\n \n assert compare(etapred_1, etapred_2)\n assert compare(P_1, P_2)\n \n assert compare(self_1, self_2)\n assert compare(eta_1, eta_2)\n assert compare(P_1, P_2)\n assert compare(z_odo_1, z_odo_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"EKFSLAM.EKFSLAM.predict\"][:1]:\n params = finput\n\n solution.used[\"EKFSLAM.EKFSLAM.predict\"] = False\n\n EKFSLAM.EKFSLAM.predict(**params)\n\n assert not solution.used[\"EKFSLAM.EKFSLAM.predict\"], \"The function uses the solution\"\n\n\nclass Test_EKFSLAM_h:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"EKFSLAM.EKFSLAM.h\"]:\n params = tuple(finput.values())\n\n self_1, eta_1 = deepcopy(params)\n\n self_2, eta_2 = deepcopy(params)\n\n zpred_1 = EKFSLAM.EKFSLAM.h(self_1, eta_1)\n\n zpred_2 = solution.EKFSLAM.EKFSLAM.h(self_2, eta_2)\n \n assert compare(zpred_1, zpred_2)\n \n assert compare(self_1, self_2)\n assert compare(eta_1, eta_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"EKFSLAM.EKFSLAM.h\"][:1]:\n params = finput\n\n solution.used[\"EKFSLAM.EKFSLAM.h\"] = False\n\n EKFSLAM.EKFSLAM.h(**params)\n\n assert not solution.used[\"EKFSLAM.EKFSLAM.h\"], \"The function uses the solution\"\n\n\nclass Test_EKFSLAM_h_jac:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"EKFSLAM.EKFSLAM.h_jac\"]:\n params = tuple(finput.values())\n\n self_1, eta_1 = deepcopy(params)\n\n self_2, eta_2 = deepcopy(params)\n\n H_1 = EKFSLAM.EKFSLAM.h_jac(self_1, eta_1)\n\n H_2 = solution.EKFSLAM.EKFSLAM.h_jac(self_2, eta_2)\n \n assert compare(H_1, H_2)\n \n assert compare(self_1, self_2)\n assert compare(eta_1, eta_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"EKFSLAM.EKFSLAM.h_jac\"][:1]:\n params = finput\n\n solution.used[\"EKFSLAM.EKFSLAM.h_jac\"] = False\n\n EKFSLAM.EKFSLAM.h_jac(**params)\n\n assert not solution.used[\"EKFSLAM.EKFSLAM.h_jac\"], \"The function uses the solution\"\n\n\nclass Test_EKFSLAM_add_landmarks:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"EKFSLAM.EKFSLAM.add_landmarks\"]:\n params = tuple(finput.values())\n\n self_1, eta_1, P_1, z_1 = deepcopy(params)\n\n self_2, eta_2, P_2, z_2 = deepcopy(params)\n\n etaadded_1, Padded_1 = EKFSLAM.EKFSLAM.add_landmarks(self_1, eta_1, P_1, z_1)\n\n etaadded_2, Padded_2 = solution.EKFSLAM.EKFSLAM.add_landmarks(self_2, eta_2, P_2, z_2)\n \n assert compare(etaadded_1, etaadded_2)\n assert compare(Padded_1, Padded_2)\n \n assert compare(self_1, self_2)\n assert compare(eta_1, eta_2)\n assert compare(P_1, P_2)\n assert compare(z_1, z_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"EKFSLAM.EKFSLAM.add_landmarks\"][:1]:\n params = finput\n\n solution.used[\"EKFSLAM.EKFSLAM.add_landmarks\"] = False\n\n EKFSLAM.EKFSLAM.add_landmarks(**params)\n\n assert not solution.used[\"EKFSLAM.EKFSLAM.add_landmarks\"], \"The function uses the solution\"\n\n\nclass Test_EKFSLAM_update:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"EKFSLAM.EKFSLAM.update\"]:\n params = tuple(finput.values())\n\n self_1, eta_1, P_1, z_1 = deepcopy(params)\n\n self_2, eta_2, P_2, z_2 = deepcopy(params)\n\n etaupd_1, Pupd_1, NIS_1, a_1 = EKFSLAM.EKFSLAM.update(self_1, eta_1, P_1, z_1)\n\n etaupd_2, Pupd_2, NIS_2, a_2 = solution.EKFSLAM.EKFSLAM.update(self_2, eta_2, P_2, z_2)\n \n assert compare(etaupd_1, etaupd_2)\n assert compare(Pupd_1, Pupd_2)\n assert compare(NIS_1, NIS_2)\n assert compare(a_1, a_2)\n \n assert compare(self_1, self_2)\n assert compare(eta_1, eta_2)\n assert compare(P_1, P_2)\n assert compare(z_1, z_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"EKFSLAM.EKFSLAM.update\"][:1]:\n params = finput\n\n solution.used[\"EKFSLAM.EKFSLAM.update\"] = False\n\n EKFSLAM.EKFSLAM.update(**params)\n\n assert not solution.used[\"EKFSLAM.EKFSLAM.update\"], \"The function uses the solution\"\n\n\nif __name__ == \"__main__\":\n os.environ[\"_PYTEST_RAISE\"] = \"1\"\n pytest.main()\n", "id": "2916924", "language": "Python", "matching_score": 3.188002109527588, "max_stars_count": 0, "path": "Graded/G3/tests/test_EKFSLAM.py" }, { "content": "import pickle\nfrom numpy.core.numeric import isscalar\nimport pytest\nfrom copy import deepcopy\nimport sys\nfrom pathlib import Path\nimport numpy as np\nimport os\nfrom dataclasses import is_dataclass, astuple\nfrom collections.abc import Iterable\n\nassignment_name = \"eskf\"\n\nthis_file = Path(__file__)\ntests_folder = this_file.parent\ntest_data_file = tests_folder.joinpath(\"test_data.pickle\")\nproject_folder = tests_folder.parent\ncode_folder = project_folder.joinpath(assignment_name)\n\nsys.path.insert(0, str(code_folder))\n\nimport solution # nopep8\nimport cross_matrix, eskf, nis_nees, quaternion # nopep8\n\n\n@pytest.fixture\ndef test_data():\n with open(test_data_file, \"rb\") as file:\n test_data = pickle.load(file)\n return test_data\n\n\ndef compare(a, b):\n if isinstance(b, np.ndarray) or np.isscalar(b):\n return np.allclose(a, b, atol=1e-6)\n\n elif is_dataclass(b):\n if type(a).__name__ != type(b).__name__:\n return False\n a_tup, b_tup = astuple(a), astuple(b)\n return all([compare(i, j) for i, j in zip(a_tup, b_tup)])\n\n elif isinstance(b, Iterable):\n return all([compare(i, j) for i, j in zip(a, b)])\n\n else:\n return a == b\n\n\nclass Test_ESKF_correct_z_imu:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"eskf.ESKF.correct_z_imu\"]:\n params = tuple(finput.values())\n\n self_1, x_nom_prev_1, z_imu_1 = deepcopy(params)\n\n self_2, x_nom_prev_2, z_imu_2 = deepcopy(params)\n\n z_corr_1 = eskf.ESKF.correct_z_imu(self_1, x_nom_prev_1, z_imu_1)\n\n z_corr_2 = solution.eskf.ESKF.correct_z_imu(self_2, x_nom_prev_2, z_imu_2)\n \n assert compare(z_corr_1, z_corr_2)\n \n assert compare(self_1, self_2)\n assert compare(x_nom_prev_1, x_nom_prev_2)\n assert compare(z_imu_1, z_imu_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"eskf.ESKF.correct_z_imu\"][:1]:\n params = finput\n\n solution.used[\"eskf.ESKF.correct_z_imu\"] = False\n\n eskf.ESKF.correct_z_imu(**params)\n\n assert not solution.used[\"eskf.ESKF.correct_z_imu\"], \"The function uses the solution\"\n\n\nclass Test_ESKF_predict_nominal:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"eskf.ESKF.predict_nominal\"]:\n params = tuple(finput.values())\n\n self_1, x_nom_prev_1, z_corr_1 = deepcopy(params)\n\n self_2, x_nom_prev_2, z_corr_2 = deepcopy(params)\n\n x_nom_pred_1 = eskf.ESKF.predict_nominal(self_1, x_nom_prev_1, z_corr_1)\n\n x_nom_pred_2 = solution.eskf.ESKF.predict_nominal(self_2, x_nom_prev_2, z_corr_2)\n \n assert compare(x_nom_pred_1, x_nom_pred_2)\n \n assert compare(self_1, self_2)\n assert compare(x_nom_prev_1, x_nom_prev_2)\n assert compare(z_corr_1, z_corr_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"eskf.ESKF.predict_nominal\"][:1]:\n params = finput\n\n solution.used[\"eskf.ESKF.predict_nominal\"] = False\n\n eskf.ESKF.predict_nominal(**params)\n\n assert not solution.used[\"eskf.ESKF.predict_nominal\"], \"The function uses the solution\"\n\n\nclass Test_ESKF_get_error_A_continous:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"eskf.ESKF.get_error_A_continous\"]:\n params = tuple(finput.values())\n\n self_1, x_nom_prev_1, z_corr_1 = deepcopy(params)\n\n self_2, x_nom_prev_2, z_corr_2 = deepcopy(params)\n\n A_1 = eskf.ESKF.get_error_A_continous(self_1, x_nom_prev_1, z_corr_1)\n\n A_2 = solution.eskf.ESKF.get_error_A_continous(self_2, x_nom_prev_2, z_corr_2)\n \n assert compare(A_1, A_2)\n \n assert compare(self_1, self_2)\n assert compare(x_nom_prev_1, x_nom_prev_2)\n assert compare(z_corr_1, z_corr_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"eskf.ESKF.get_error_A_continous\"][:1]:\n params = finput\n\n solution.used[\"eskf.ESKF.get_error_A_continous\"] = False\n\n eskf.ESKF.get_error_A_continous(**params)\n\n assert not solution.used[\"eskf.ESKF.get_error_A_continous\"], \"The function uses the solution\"\n\n\nclass Test_ESKF_get_error_GQGT_continous:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"eskf.ESKF.get_error_GQGT_continous\"]:\n params = tuple(finput.values())\n\n self_1, x_nom_prev_1 = deepcopy(params)\n\n self_2, x_nom_prev_2 = deepcopy(params)\n\n GQGT_1 = eskf.ESKF.get_error_GQGT_continous(self_1, x_nom_prev_1)\n\n GQGT_2 = solution.eskf.ESKF.get_error_GQGT_continous(self_2, x_nom_prev_2)\n \n assert compare(GQGT_1, GQGT_2)\n \n assert compare(self_1, self_2)\n assert compare(x_nom_prev_1, x_nom_prev_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"eskf.ESKF.get_error_GQGT_continous\"][:1]:\n params = finput\n\n solution.used[\"eskf.ESKF.get_error_GQGT_continous\"] = False\n\n eskf.ESKF.get_error_GQGT_continous(**params)\n\n assert not solution.used[\"eskf.ESKF.get_error_GQGT_continous\"], \"The function uses the solution\"\n\n\nclass Test_ESKF_get_discrete_error_diff:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"eskf.ESKF.get_discrete_error_diff\"]:\n params = tuple(finput.values())\n\n self_1, x_nom_prev_1, z_corr_1 = deepcopy(params)\n\n self_2, x_nom_prev_2, z_corr_2 = deepcopy(params)\n\n Ad_1, GQGTd_1 = eskf.ESKF.get_discrete_error_diff(self_1, x_nom_prev_1, z_corr_1)\n\n Ad_2, GQGTd_2 = solution.eskf.ESKF.get_discrete_error_diff(self_2, x_nom_prev_2, z_corr_2)\n \n assert compare(Ad_1, Ad_2)\n assert compare(GQGTd_1, GQGTd_2)\n \n assert compare(self_1, self_2)\n assert compare(x_nom_prev_1, x_nom_prev_2)\n assert compare(z_corr_1, z_corr_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"eskf.ESKF.get_discrete_error_diff\"][:1]:\n params = finput\n\n solution.used[\"eskf.ESKF.get_discrete_error_diff\"] = False\n\n eskf.ESKF.get_discrete_error_diff(**params)\n\n assert not solution.used[\"eskf.ESKF.get_discrete_error_diff\"], \"The function uses the solution\"\n\n\nclass Test_ESKF_predict_x_err:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"eskf.ESKF.predict_x_err\"]:\n params = tuple(finput.values())\n\n self_1, x_nom_prev_1, x_err_prev_gauss_1, z_corr_1 = deepcopy(params)\n\n self_2, x_nom_prev_2, x_err_prev_gauss_2, z_corr_2 = deepcopy(params)\n\n x_err_pred_1 = eskf.ESKF.predict_x_err(self_1, x_nom_prev_1, x_err_prev_gauss_1, z_corr_1)\n\n x_err_pred_2 = solution.eskf.ESKF.predict_x_err(self_2, x_nom_prev_2, x_err_prev_gauss_2, z_corr_2)\n \n assert compare(x_err_pred_1, x_err_pred_2)\n \n assert compare(self_1, self_2)\n assert compare(x_nom_prev_1, x_nom_prev_2)\n assert compare(x_err_prev_gauss_1, x_err_prev_gauss_2)\n assert compare(z_corr_1, z_corr_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"eskf.ESKF.predict_x_err\"][:1]:\n params = finput\n\n solution.used[\"eskf.ESKF.predict_x_err\"] = False\n\n eskf.ESKF.predict_x_err(**params)\n\n assert not solution.used[\"eskf.ESKF.predict_x_err\"], \"The function uses the solution\"\n\n\nclass Test_ESKF_predict_from_imu:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"eskf.ESKF.predict_from_imu\"]:\n params = tuple(finput.values())\n\n self_1, x_nom_prev_1, x_err_gauss_1, z_imu_1 = deepcopy(params)\n\n self_2, x_nom_prev_2, x_err_gauss_2, z_imu_2 = deepcopy(params)\n\n x_nom_pred_1, x_err_pred_1 = eskf.ESKF.predict_from_imu(self_1, x_nom_prev_1, x_err_gauss_1, z_imu_1)\n\n x_nom_pred_2, x_err_pred_2 = solution.eskf.ESKF.predict_from_imu(self_2, x_nom_prev_2, x_err_gauss_2, z_imu_2)\n \n assert compare(x_nom_pred_1, x_nom_pred_2)\n assert compare(x_err_pred_1, x_err_pred_2)\n \n assert compare(self_1, self_2)\n assert compare(x_nom_prev_1, x_nom_prev_2)\n assert compare(x_err_gauss_1, x_err_gauss_2)\n assert compare(z_imu_1, z_imu_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"eskf.ESKF.predict_from_imu\"][:1]:\n params = finput\n\n solution.used[\"eskf.ESKF.predict_from_imu\"] = False\n\n eskf.ESKF.predict_from_imu(**params)\n\n assert not solution.used[\"eskf.ESKF.predict_from_imu\"], \"The function uses the solution\"\n\n\nclass Test_ESKF_get_gnss_measurment_jac:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"eskf.ESKF.get_gnss_measurment_jac\"]:\n params = tuple(finput.values())\n\n self_1, x_nom_1 = deepcopy(params)\n\n self_2, x_nom_2 = deepcopy(params)\n\n H_1 = eskf.ESKF.get_gnss_measurment_jac(self_1, x_nom_1)\n\n H_2 = solution.eskf.ESKF.get_gnss_measurment_jac(self_2, x_nom_2)\n \n assert compare(H_1, H_2)\n \n assert compare(self_1, self_2)\n assert compare(x_nom_1, x_nom_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"eskf.ESKF.get_gnss_measurment_jac\"][:1]:\n params = finput\n\n solution.used[\"eskf.ESKF.get_gnss_measurment_jac\"] = False\n\n eskf.ESKF.get_gnss_measurment_jac(**params)\n\n assert not solution.used[\"eskf.ESKF.get_gnss_measurment_jac\"], \"The function uses the solution\"\n\n\nclass Test_ESKF_predict_gnss_measurement:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"eskf.ESKF.predict_gnss_measurement\"]:\n params = tuple(finput.values())\n\n self_1, x_nom_1, x_err_1, z_gnss_1 = deepcopy(params)\n\n self_2, x_nom_2, x_err_2, z_gnss_2 = deepcopy(params)\n\n z_gnss_pred_gauss_1 = eskf.ESKF.predict_gnss_measurement(self_1, x_nom_1, x_err_1, z_gnss_1)\n\n z_gnss_pred_gauss_2 = solution.eskf.ESKF.predict_gnss_measurement(self_2, x_nom_2, x_err_2, z_gnss_2)\n \n assert compare(z_gnss_pred_gauss_1, z_gnss_pred_gauss_2)\n \n assert compare(self_1, self_2)\n assert compare(x_nom_1, x_nom_2)\n assert compare(x_err_1, x_err_2)\n assert compare(z_gnss_1, z_gnss_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"eskf.ESKF.predict_gnss_measurement\"][:1]:\n params = finput\n\n solution.used[\"eskf.ESKF.predict_gnss_measurement\"] = False\n\n eskf.ESKF.predict_gnss_measurement(**params)\n\n assert not solution.used[\"eskf.ESKF.predict_gnss_measurement\"], \"The function uses the solution\"\n\n\nclass Test_ESKF_get_x_err_upd:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"eskf.ESKF.get_x_err_upd\"]:\n params = tuple(finput.values())\n\n self_1, x_nom_1, x_err_1, z_gnss_pred_gauss_1, z_gnss_1 = deepcopy(params)\n\n self_2, x_nom_2, x_err_2, z_gnss_pred_gauss_2, z_gnss_2 = deepcopy(params)\n\n x_err_upd_gauss_1 = eskf.ESKF.get_x_err_upd(self_1, x_nom_1, x_err_1, z_gnss_pred_gauss_1, z_gnss_1)\n\n x_err_upd_gauss_2 = solution.eskf.ESKF.get_x_err_upd(self_2, x_nom_2, x_err_2, z_gnss_pred_gauss_2, z_gnss_2)\n \n assert compare(x_err_upd_gauss_1, x_err_upd_gauss_2)\n \n assert compare(self_1, self_2)\n assert compare(x_nom_1, x_nom_2)\n assert compare(x_err_1, x_err_2)\n assert compare(z_gnss_pred_gauss_1, z_gnss_pred_gauss_2)\n assert compare(z_gnss_1, z_gnss_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"eskf.ESKF.get_x_err_upd\"][:1]:\n params = finput\n\n solution.used[\"eskf.ESKF.get_x_err_upd\"] = False\n\n eskf.ESKF.get_x_err_upd(**params)\n\n assert not solution.used[\"eskf.ESKF.get_x_err_upd\"], \"The function uses the solution\"\n\n\nclass Test_ESKF_inject:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"eskf.ESKF.inject\"]:\n params = tuple(finput.values())\n\n self_1, x_nom_prev_1, x_err_upd_1 = deepcopy(params)\n\n self_2, x_nom_prev_2, x_err_upd_2 = deepcopy(params)\n\n x_nom_inj_1, x_err_inj_1 = eskf.ESKF.inject(self_1, x_nom_prev_1, x_err_upd_1)\n\n x_nom_inj_2, x_err_inj_2 = solution.eskf.ESKF.inject(self_2, x_nom_prev_2, x_err_upd_2)\n \n assert compare(x_nom_inj_1, x_nom_inj_2)\n assert compare(x_err_inj_1, x_err_inj_2)\n \n assert compare(self_1, self_2)\n assert compare(x_nom_prev_1, x_nom_prev_2)\n assert compare(x_err_upd_1, x_err_upd_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"eskf.ESKF.inject\"][:1]:\n params = finput\n\n solution.used[\"eskf.ESKF.inject\"] = False\n\n eskf.ESKF.inject(**params)\n\n assert not solution.used[\"eskf.ESKF.inject\"], \"The function uses the solution\"\n\n\nclass Test_ESKF_update_from_gnss:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"eskf.ESKF.update_from_gnss\"]:\n params = tuple(finput.values())\n\n self_1, x_nom_prev_1, x_err_prev_1, z_gnss_1 = deepcopy(params)\n\n self_2, x_nom_prev_2, x_err_prev_2, z_gnss_2 = deepcopy(params)\n\n x_nom_inj_1, x_err_inj_1, z_gnss_pred_gauss_1 = eskf.ESKF.update_from_gnss(self_1, x_nom_prev_1, x_err_prev_1, z_gnss_1)\n\n x_nom_inj_2, x_err_inj_2, z_gnss_pred_gauss_2 = solution.eskf.ESKF.update_from_gnss(self_2, x_nom_prev_2, x_err_prev_2, z_gnss_2)\n \n assert compare(x_nom_inj_1, x_nom_inj_2)\n assert compare(x_err_inj_1, x_err_inj_2)\n assert compare(z_gnss_pred_gauss_1, z_gnss_pred_gauss_2)\n \n assert compare(self_1, self_2)\n assert compare(x_nom_prev_1, x_nom_prev_2)\n assert compare(x_err_prev_1, x_err_prev_2)\n assert compare(z_gnss_1, z_gnss_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"eskf.ESKF.update_from_gnss\"][:1]:\n params = finput\n\n solution.used[\"eskf.ESKF.update_from_gnss\"] = False\n\n eskf.ESKF.update_from_gnss(**params)\n\n assert not solution.used[\"eskf.ESKF.update_from_gnss\"], \"The function uses the solution\"\n\n\nif __name__ == \"__main__\":\n os.environ[\"_PYTEST_RAISE\"] = \"1\"\n pytest.main()\n", "id": "12467286", "language": "Python", "matching_score": 5.740917682647705, "max_stars_count": 0, "path": "Graded/G2/tests/test_eskf.py" }, { "content": "import pickle\nfrom numpy.core.numeric import isscalar\nimport pytest\nfrom copy import deepcopy\nimport sys\nfrom pathlib import Path\nimport numpy as np\nimport os\nfrom dataclasses import is_dataclass, astuple\nfrom collections.abc import Iterable\n\nassignment_name = \"eskf\"\n\nthis_file = Path(__file__)\ntests_folder = this_file.parent\ntest_data_file = tests_folder.joinpath(\"test_data.pickle\")\nproject_folder = tests_folder.parent\ncode_folder = project_folder.joinpath(assignment_name)\n\nsys.path.insert(0, str(code_folder))\n\nimport solution # nopep8\nimport cross_matrix, eskf, nis_nees, quaternion # nopep8\n\n\n@pytest.fixture\ndef test_data():\n with open(test_data_file, \"rb\") as file:\n test_data = pickle.load(file)\n return test_data\n\n\ndef compare(a, b):\n if isinstance(b, np.ndarray) or np.isscalar(b):\n return np.allclose(a, b, atol=1e-6)\n\n elif is_dataclass(b):\n if type(a).__name__ != type(b).__name__:\n return False\n a_tup, b_tup = astuple(a), astuple(b)\n return all([compare(i, j) for i, j in zip(a_tup, b_tup)])\n\n elif isinstance(b, Iterable):\n return all([compare(i, j) for i, j in zip(a, b)])\n\n else:\n return a == b\n\n\nclass Test_get_NIS:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"nis_nees.get_NIS\"]:\n params = tuple(finput.values())\n\n z_gnss_1, z_gnss_pred_gauss_1, marginal_idxs_1 = deepcopy(params)\n\n z_gnss_2, z_gnss_pred_gauss_2, marginal_idxs_2 = deepcopy(params)\n\n NIS_1 = nis_nees.get_NIS(z_gnss_1, z_gnss_pred_gauss_1, marginal_idxs_1)\n\n NIS_2 = solution.nis_nees.get_NIS(z_gnss_2, z_gnss_pred_gauss_2, marginal_idxs_2)\n \n assert compare(NIS_1, NIS_2)\n \n assert compare(z_gnss_1, z_gnss_2)\n assert compare(z_gnss_pred_gauss_1, z_gnss_pred_gauss_2)\n assert compare(marginal_idxs_1, marginal_idxs_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"nis_nees.get_NIS\"][:1]:\n params = finput\n\n solution.used[\"nis_nees.get_NIS\"] = False\n\n nis_nees.get_NIS(**params)\n\n assert not solution.used[\"nis_nees.get_NIS\"], \"The function uses the solution\"\n\n\nclass Test_get_error:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"nis_nees.get_error\"]:\n params = tuple(finput.values())\n\n x_true_1, x_nom_1 = deepcopy(params)\n\n x_true_2, x_nom_2 = deepcopy(params)\n\n error_1 = nis_nees.get_error(x_true_1, x_nom_1)\n\n error_2 = solution.nis_nees.get_error(x_true_2, x_nom_2)\n \n assert compare(error_1, error_2)\n \n assert compare(x_true_1, x_true_2)\n assert compare(x_nom_1, x_nom_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"nis_nees.get_error\"][:1]:\n params = finput\n\n solution.used[\"nis_nees.get_error\"] = False\n\n nis_nees.get_error(**params)\n\n assert not solution.used[\"nis_nees.get_error\"], \"The function uses the solution\"\n\n\nclass Test_get_NEES:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"nis_nees.get_NEES\"]:\n params = tuple(finput.values())\n\n error_1, x_err_1, marginal_idxs_1 = deepcopy(params)\n\n error_2, x_err_2, marginal_idxs_2 = deepcopy(params)\n\n NEES_1 = nis_nees.get_NEES(error_1, x_err_1, marginal_idxs_1)\n\n NEES_2 = solution.nis_nees.get_NEES(error_2, x_err_2, marginal_idxs_2)\n \n assert compare(NEES_1, NEES_2)\n \n assert compare(error_1, error_2)\n assert compare(x_err_1, x_err_2)\n assert compare(marginal_idxs_1, marginal_idxs_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"nis_nees.get_NEES\"][:1]:\n params = finput\n\n solution.used[\"nis_nees.get_NEES\"] = False\n\n nis_nees.get_NEES(**params)\n\n assert not solution.used[\"nis_nees.get_NEES\"], \"The function uses the solution\"\n\n\nif __name__ == \"__main__\":\n os.environ[\"_PYTEST_RAISE\"] = \"1\"\n pytest.main()\n", "id": "6213333", "language": "Python", "matching_score": 3.238236665725708, "max_stars_count": 0, "path": "Graded/G2/tests/test_nis_nees.py" }, { "content": "import pickle\nfrom numpy.core.numeric import isscalar\nimport pytest\nfrom copy import deepcopy\nimport sys\nfrom pathlib import Path\nimport numpy as np\nimport os\nfrom dataclasses import is_dataclass, astuple\nfrom collections.abc import Iterable\n\nassignment_name = \"eskf\"\n\nthis_file = Path(__file__)\ntests_folder = this_file.parent\ntest_data_file = tests_folder.joinpath(\"test_data.pickle\")\nproject_folder = tests_folder.parent\ncode_folder = project_folder.joinpath(assignment_name)\n\nsys.path.insert(0, str(code_folder))\n\nimport solution # nopep8\nimport cross_matrix, eskf, nis_nees, quaternion # nopep8\n\n\n@pytest.fixture\ndef test_data():\n with open(test_data_file, \"rb\") as file:\n test_data = pickle.load(file)\n return test_data\n\n\ndef compare(a, b):\n if isinstance(b, np.ndarray) or np.isscalar(b):\n return np.allclose(a, b, atol=1e-6)\n\n elif is_dataclass(b):\n if type(a).__name__ != type(b).__name__:\n return False\n a_tup, b_tup = astuple(a), astuple(b)\n return all([compare(i, j) for i, j in zip(a_tup, b_tup)])\n\n elif isinstance(b, Iterable):\n return all([compare(i, j) for i, j in zip(a, b)])\n\n else:\n return a == b\n\n\nclass Test_RotationQuaterion_multiply:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"quaternion.RotationQuaterion.multiply\"]:\n params = tuple(finput.values())\n\n self_1, other_1 = deepcopy(params)\n\n self_2, other_2 = deepcopy(params)\n\n quaternion_product_1 = quaternion.RotationQuaterion.multiply(self_1, other_1)\n\n quaternion_product_2 = solution.quaternion.RotationQuaterion.multiply(self_2, other_2)\n \n assert compare(quaternion_product_1, quaternion_product_2)\n \n assert compare(self_1, self_2)\n assert compare(other_1, other_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"quaternion.RotationQuaterion.multiply\"][:1]:\n params = finput\n\n solution.used[\"quaternion.RotationQuaterion.multiply\"] = False\n\n quaternion.RotationQuaterion.multiply(**params)\n\n assert not solution.used[\"quaternion.RotationQuaterion.multiply\"], \"The function uses the solution\"\n\n\nclass Test_RotationQuaterion_conjugate:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"quaternion.RotationQuaterion.conjugate\"]:\n params = tuple(finput.values())\n\n self_1, = deepcopy(params)\n\n self_2, = deepcopy(params)\n\n conj_1 = quaternion.RotationQuaterion.conjugate(self_1,)\n\n conj_2 = solution.quaternion.RotationQuaterion.conjugate(self_2,)\n \n assert compare(conj_1, conj_2)\n \n assert compare(self_1, self_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"quaternion.RotationQuaterion.conjugate\"][:1]:\n params = finput\n\n solution.used[\"quaternion.RotationQuaterion.conjugate\"] = False\n\n quaternion.RotationQuaterion.conjugate(**params)\n\n assert not solution.used[\"quaternion.RotationQuaterion.conjugate\"], \"The function uses the solution\"\n\n\nclass Test_RotationQuaterion_as_rotmat:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"quaternion.RotationQuaterion.as_rotmat\"]:\n params = tuple(finput.values())\n\n self_1, = deepcopy(params)\n\n self_2, = deepcopy(params)\n\n R_1 = quaternion.RotationQuaterion.as_rotmat(self_1,)\n\n R_2 = solution.quaternion.RotationQuaterion.as_rotmat(self_2,)\n \n assert compare(R_1, R_2)\n \n assert compare(self_1, self_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"quaternion.RotationQuaterion.as_rotmat\"][:1]:\n params = finput\n\n solution.used[\"quaternion.RotationQuaterion.as_rotmat\"] = False\n\n quaternion.RotationQuaterion.as_rotmat(**params)\n\n assert not solution.used[\"quaternion.RotationQuaterion.as_rotmat\"], \"The function uses the solution\"\n\n\nclass Test_RotationQuaterion_as_euler:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"quaternion.RotationQuaterion.as_euler\"]:\n params = tuple(finput.values())\n\n self_1, = deepcopy(params)\n\n self_2, = deepcopy(params)\n\n euler_1 = quaternion.RotationQuaterion.as_euler(self_1,)\n\n euler_2 = solution.quaternion.RotationQuaterion.as_euler(self_2,)\n \n assert compare(euler_1, euler_2)\n \n assert compare(self_1, self_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"quaternion.RotationQuaterion.as_euler\"][:1]:\n params = finput\n\n solution.used[\"quaternion.RotationQuaterion.as_euler\"] = False\n\n quaternion.RotationQuaterion.as_euler(**params)\n\n assert not solution.used[\"quaternion.RotationQuaterion.as_euler\"], \"The function uses the solution\"\n\n\nclass Test_RotationQuaterion_as_avec:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"quaternion.RotationQuaterion.as_avec\"]:\n params = tuple(finput.values())\n\n self_1, = deepcopy(params)\n\n self_2, = deepcopy(params)\n\n avec_1 = quaternion.RotationQuaterion.as_avec(self_1,)\n\n avec_2 = solution.quaternion.RotationQuaterion.as_avec(self_2,)\n \n assert compare(avec_1, avec_2)\n \n assert compare(self_1, self_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"quaternion.RotationQuaterion.as_avec\"][:1]:\n params = finput\n\n solution.used[\"quaternion.RotationQuaterion.as_avec\"] = False\n\n quaternion.RotationQuaterion.as_avec(**params)\n\n assert not solution.used[\"quaternion.RotationQuaterion.as_avec\"], \"The function uses the solution\"\n\n\nif __name__ == \"__main__\":\n os.environ[\"_PYTEST_RAISE\"] = \"1\"\n pytest.main()\n", "id": "5796764", "language": "Python", "matching_score": 3.946951389312744, "max_stars_count": 0, "path": "Graded/G2/tests/test_quaternion.py" }, { "content": "import pickle\nfrom numpy.core.numeric import isscalar\nimport pytest\nfrom copy import deepcopy\nimport sys\nfrom pathlib import Path\nimport numpy as np\nimport os\nfrom dataclasses import is_dataclass, astuple\nfrom collections.abc import Iterable\n\nassignment_name = \"eskf\"\n\nthis_file = Path(__file__)\ntests_folder = this_file.parent\ntest_data_file = tests_folder.joinpath(\"test_data.pickle\")\nproject_folder = tests_folder.parent\ncode_folder = project_folder.joinpath(assignment_name)\n\nsys.path.insert(0, str(code_folder))\n\nimport solution # nopep8\nimport cross_matrix, eskf, nis_nees, quaternion # nopep8\n\n\n@pytest.fixture\ndef test_data():\n with open(test_data_file, \"rb\") as file:\n test_data = pickle.load(file)\n return test_data\n\n\ndef compare(a, b):\n if isinstance(b, np.ndarray) or np.isscalar(b):\n return np.allclose(a, b, atol=1e-6)\n\n elif is_dataclass(b):\n if type(a).__name__ != type(b).__name__:\n return False\n a_tup, b_tup = astuple(a), astuple(b)\n return all([compare(i, j) for i, j in zip(a_tup, b_tup)])\n\n elif isinstance(b, Iterable):\n return all([compare(i, j) for i, j in zip(a, b)])\n\n else:\n return a == b\n\n\nclass Test_get_cross_matrix:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"cross_matrix.get_cross_matrix\"]:\n params = tuple(finput.values())\n\n vec_1, = deepcopy(params)\n\n vec_2, = deepcopy(params)\n\n S_1 = cross_matrix.get_cross_matrix(vec_1,)\n\n S_2 = solution.cross_matrix.get_cross_matrix(vec_2,)\n \n assert compare(S_1, S_2)\n \n assert compare(vec_1, vec_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"cross_matrix.get_cross_matrix\"][:1]:\n params = finput\n\n solution.used[\"cross_matrix.get_cross_matrix\"] = False\n\n cross_matrix.get_cross_matrix(**params)\n\n assert not solution.used[\"cross_matrix.get_cross_matrix\"], \"The function uses the solution\"\n\n\nif __name__ == \"__main__\":\n os.environ[\"_PYTEST_RAISE\"] = \"1\"\n pytest.main()\n", "id": "3427614", "language": "Python", "matching_score": 1.853520154953003, "max_stars_count": 0, "path": "Graded/G2/tests/test_cross_matrix.py" }, { "content": "import pickle\nfrom numpy.core.numeric import isscalar\nimport pytest\nfrom copy import deepcopy\nimport sys\nfrom pathlib import Path\nimport numpy as np\nimport os\nfrom dataclasses import is_dataclass\n\nassignment_name = \"pda\"\n\nthis_file = Path(__file__)\ntests_folder = this_file.parent\ntest_data_file = tests_folder.joinpath(\"test_data.pickle\")\nproject_folder = tests_folder.parent\ncode_folder = project_folder.joinpath(assignment_name)\n\nsys.path.insert(0, str(code_folder))\n\nimport solution # nopep8\nimport pdaf # nopep8\n\n\n@pytest.fixture\ndef test_data():\n with open(test_data_file, \"rb\") as file:\n test_data = pickle.load(file)\n return test_data\n\n\ndef compare(a, b):\n if (\n isinstance(a, np.ndarray)\n or isinstance(b, np.ndarray)\n or np.isscalar(a)\n or np.isscalar(b)\n ):\n return np.allclose(a, b)\n elif is_dataclass(a) or is_dataclass(b):\n return str(a) == str(b)\n else:\n return a == b\n\n\nclass Test_PDAF_predict_state:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"pdaf.PDAF.predict_state\"]:\n params = tuple(finput.values())\n\n self_1, state_upd_prev_gauss_1, Ts_1 = deepcopy(params)\n\n self_2, state_upd_prev_gauss_2, Ts_2 = deepcopy(params)\n\n state_pred_gauus_1 = pdaf.PDAF.predict_state(\n self_1, state_upd_prev_gauss_1, Ts_1)\n\n state_pred_gauus_2 = solution.pdaf.PDAF.predict_state(\n self_2, state_upd_prev_gauss_2, Ts_2)\n\n assert compare(state_pred_gauus_1, state_pred_gauus_2)\n\n assert compare(self_1, self_2)\n assert compare(state_upd_prev_gauss_1, state_upd_prev_gauss_2)\n assert compare(Ts_1, Ts_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"pdaf.PDAF.predict_state\"][:1]:\n params = finput\n\n solution.used[\"pdaf.PDAF.predict_state\"] = False\n\n pdaf.PDAF.predict_state(**params)\n\n assert not solution.used[\"pdaf.PDAF.predict_state\"], \"The function uses the solution\"\n\n\nclass Test_PDAF_predict_measurement:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"pdaf.PDAF.predict_measurement\"]:\n params = tuple(finput.values())\n\n self_1, state_pred_gauss_1 = deepcopy(params)\n\n self_2, state_pred_gauss_2 = deepcopy(params)\n\n z_pred_gauss_1 = pdaf.PDAF.predict_measurement(\n self_1, state_pred_gauss_1)\n\n z_pred_gauss_2 = solution.pdaf.PDAF.predict_measurement(\n self_2, state_pred_gauss_2)\n\n assert compare(z_pred_gauss_1, z_pred_gauss_2)\n\n assert compare(self_1, self_2)\n assert compare(state_pred_gauss_1, state_pred_gauss_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"pdaf.PDAF.predict_measurement\"][:1]:\n params = finput\n\n solution.used[\"pdaf.PDAF.predict_measurement\"] = False\n\n pdaf.PDAF.predict_measurement(**params)\n\n assert not solution.used[\"pdaf.PDAF.predict_measurement\"], \"The function uses the solution\"\n\n\nclass Test_PDAF_gate:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"pdaf.PDAF.gate\"]:\n params = tuple(finput.values())\n\n self_1, z_pred_gauss_1, measurements_1 = deepcopy(params)\n\n self_2, z_pred_gauss_2, measurements_2 = deepcopy(params)\n\n gated_measurements_1 = pdaf.PDAF.gate(\n self_1, z_pred_gauss_1, measurements_1)\n\n gated_measurements_2 = solution.pdaf.PDAF.gate(\n self_2, z_pred_gauss_2, measurements_2)\n\n assert compare(gated_measurements_1, gated_measurements_2)\n\n assert compare(self_1, self_2)\n assert compare(z_pred_gauss_1, z_pred_gauss_2)\n assert compare(measurements_1, measurements_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"pdaf.PDAF.gate\"][:1]:\n params = finput\n\n solution.used[\"pdaf.PDAF.gate\"] = False\n\n pdaf.PDAF.gate(**params)\n\n assert not solution.used[\"pdaf.PDAF.gate\"], \"The function uses the solution\"\n\n\nclass Test_PDAF_get_association_prob:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"pdaf.PDAF.get_association_prob\"]:\n params = tuple(finput.values())\n\n self_1, z_pred_gauss_1, gated_measurements_1 = deepcopy(params)\n\n self_2, z_pred_gauss_2, gated_measurements_2 = deepcopy(params)\n\n associations_probs_1 = pdaf.PDAF.get_association_prob(\n self_1, z_pred_gauss_1, gated_measurements_1)\n\n associations_probs_2 = solution.pdaf.PDAF.get_association_prob(\n self_2, z_pred_gauss_2, gated_measurements_2)\n\n assert compare(associations_probs_1, associations_probs_2)\n\n assert compare(self_1, self_2)\n assert compare(z_pred_gauss_1, z_pred_gauss_2)\n assert compare(gated_measurements_1, gated_measurements_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"pdaf.PDAF.get_association_prob\"][:1]:\n params = finput\n\n solution.used[\"pdaf.PDAF.get_association_prob\"] = False\n\n pdaf.PDAF.get_association_prob(**params)\n\n assert not solution.used[\"pdaf.PDAF.get_association_prob\"], \"The function uses the solution\"\n\n\nclass Test_PDAF_get_cond_update_gaussians:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"pdaf.PDAF.get_cond_update_gaussians\"]:\n params = tuple(finput.values())\n\n self_1, state_pred_gauss_1, z_pred_gauss_1, gated_measurements_1 = deepcopy(\n params)\n\n self_2, state_pred_gauss_2, z_pred_gauss_2, gated_measurements_2 = deepcopy(\n params)\n\n update_gaussians_1 = pdaf.PDAF.get_cond_update_gaussians(\n self_1, state_pred_gauss_1, z_pred_gauss_1, gated_measurements_1)\n\n update_gaussians_2 = solution.pdaf.PDAF.get_cond_update_gaussians(\n self_2, state_pred_gauss_2, z_pred_gauss_2, gated_measurements_2)\n\n assert compare(update_gaussians_1, update_gaussians_2)\n\n assert compare(self_1, self_2)\n assert compare(state_pred_gauss_1, state_pred_gauss_2)\n assert compare(z_pred_gauss_1, z_pred_gauss_2)\n assert compare(gated_measurements_1, gated_measurements_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"pdaf.PDAF.get_cond_update_gaussians\"][:1]:\n params = finput\n\n solution.used[\"pdaf.PDAF.get_cond_update_gaussians\"] = False\n\n pdaf.PDAF.get_cond_update_gaussians(**params)\n\n assert not solution.used[\"pdaf.PDAF.get_cond_update_gaussians\"], \"The function uses the solution\"\n\n\nclass Test_PDAF_update:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"pdaf.PDAF.update\"]:\n params = tuple(finput.values())\n\n self_1, state_pred_gauss_1, z_pred_gauss_1, measurements_1 = deepcopy(\n params)\n\n self_2, state_pred_gauss_2, z_pred_gauss_2, measurements_2 = deepcopy(\n params)\n\n state_upd_gauss_1 = pdaf.PDAF.update(\n self_1, state_pred_gauss_1, z_pred_gauss_1, measurements_1)\n\n state_upd_gauss_2 = solution.pdaf.PDAF.update(\n self_2, state_pred_gauss_2, z_pred_gauss_2, measurements_2)\n\n assert compare(state_upd_gauss_1, state_upd_gauss_2)\n\n assert compare(self_1, self_2)\n assert compare(state_pred_gauss_1, state_pred_gauss_2)\n assert compare(z_pred_gauss_1, z_pred_gauss_2)\n assert compare(measurements_1, measurements_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"pdaf.PDAF.update\"][:1]:\n params = finput\n\n solution.used[\"pdaf.PDAF.update\"] = False\n\n pdaf.PDAF.update(**params)\n\n assert not solution.used[\"pdaf.PDAF.update\"], \"The function uses the solution\"\n\n\nclass Test_PDAF_step_with_info:\n def test_output(self, test_data):\n \"\"\"Tests if the function is correct by comparing the output\n with the output of the solution\n\n As python always use pass by reference, not by copy, it also checks if the\n input is changed (or not) in the same way as the in solution\n \"\"\"\n for finput in test_data[\"pdaf.PDAF.step_with_info\"]:\n params = tuple(finput.values())\n\n self_1, state_upd_prev_gauss_1, measurements_1, Ts_1 = deepcopy(\n params)\n\n self_2, state_upd_prev_gauss_2, measurements_2, Ts_2 = deepcopy(\n params)\n\n state_pred_gauss_1, z_pred_gauss_1, state_upd_gauss_1 = pdaf.PDAF.step_with_info(\n self_1, state_upd_prev_gauss_1, measurements_1, Ts_1)\n\n state_pred_gauss_2, z_pred_gauss_2, state_upd_gauss_2 = solution.pdaf.PDAF.step_with_info(\n self_2, state_upd_prev_gauss_2, measurements_2, Ts_2)\n\n assert compare(state_pred_gauss_1, state_pred_gauss_2)\n assert compare(z_pred_gauss_1, z_pred_gauss_2)\n assert compare(state_upd_gauss_1, state_upd_gauss_2)\n\n assert compare(self_1, self_2)\n assert compare(state_upd_prev_gauss_1, state_upd_prev_gauss_2)\n assert compare(measurements_1, measurements_2)\n assert compare(Ts_1, Ts_2)\n\n def test_solution_usage(self, test_data):\n \"\"\"Tests if the solution is used in the function\"\"\"\n for finput in test_data[\"pdaf.PDAF.step_with_info\"][:1]:\n params = finput\n\n solution.used[\"pdaf.PDAF.step_with_info\"] = False\n\n pdaf.PDAF.step_with_info(**params)\n\n assert not solution.used[\"pdaf.PDAF.step_with_info\"], \"The function uses the solution\"\n\n\nif __name__ == \"__main__\":\n os.environ[\"_PYTEST_RAISE\"] = \"1\"\n pytest.main()\n", "id": "9709901", "language": "Python", "matching_score": 4.890771865844727, "max_stars_count": 0, "path": "Assignments/Assignment_05/tests/test_pdaf.py" }, { "content": "import pickle\nfrom numpy.core.numeric import isscalar\nimport pytest\nfrom copy import deepcopy\nimport sys\nfrom pathlib import Path\nimport numpy as np\nimport os\nfrom dataclasses import is_dataclass\n\nassignment_name = \"ekf\"\n\nthis_file = Path(__file__)\ntests_folder = this_file.parent\ntest_data_file = tests_folder.joinpath(\"test_data.pickle\")\nproject_folder = tests_folder.parent\ncode_folder = project_folder.joinpath(assignment_name)\n\nsys.path.insert(0, str(code_folder))\n\nimport solution # nopep8\nimport ekf # nopep8\n\n\n@pytest.fixture\ndef test_data():\n with open(test_data_file, \"rb\") as file:\n test_data = pickle.load(file)\n return test_data\n\n\ndef compare(a, b):\n if (\n isinstance(a, np.ndarray)\n or isinstance(b, np.ndarray)\n or np.isscalar(a)\n or np.isscalar(b)\n ):\n return np.allclose(a, b)\n elif is_dataclass(a) or is_dataclass(b):\n return str(a) == str(b)\n else:\n return a == b\n\n\nclass TestOutput:\n\n def test_output__EKF_predict(self, test_data):\n for finput in test_data[\"ekf.EKF.predict\"]:\n params = tuple(finput.values())\n\n self_1, state_upd_prev_gauss_1, Ts_1 = deepcopy(params)\n\n self_2, state_upd_prev_gauss_2, Ts_2 = deepcopy(params)\n\n state_pred_gauss_1 = ekf.EKF.predict(\n self_1, state_upd_prev_gauss_1, Ts_1)\n\n state_pred_gauss_2 = solution.ekf.EKF.predict(\n self_2, state_upd_prev_gauss_2, Ts_2)\n\n assert compare(state_pred_gauss_1, state_pred_gauss_2)\n\n assert compare(self_1, self_2)\n assert compare(state_upd_prev_gauss_1, state_upd_prev_gauss_2)\n assert compare(Ts_1, Ts_2)\n\n def test_output__EKF_predict_measurement(self, test_data):\n for finput in test_data[\"ekf.EKF.predict_measurement\"]:\n params = tuple(finput.values())\n\n self_1, state_pred_gauss_1 = deepcopy(params)\n\n self_2, state_pred_gauss_2 = deepcopy(params)\n\n measure_pred_gauss_1 = ekf.EKF.predict_measurement(\n self_1, state_pred_gauss_1)\n\n measure_pred_gauss_2 = solution.ekf.EKF.predict_measurement(\n self_2, state_pred_gauss_2)\n\n assert compare(measure_pred_gauss_1, measure_pred_gauss_2)\n\n assert compare(self_1, self_2)\n assert compare(state_pred_gauss_1, state_pred_gauss_2)\n\n def test_output__EKF_update(self, test_data):\n for finput in test_data[\"ekf.EKF.update\"]:\n params = tuple(finput.values())\n\n self_1, z_1, state_pred_gauss_1, measurement_gauss_1 = deepcopy(\n params)\n\n self_2, z_2, state_pred_gauss_2, measurement_gauss_2 = deepcopy(\n params)\n\n state_upd_gauss_1 = ekf.EKF.update(\n self_1, z_1, state_pred_gauss_1, measurement_gauss_1)\n\n state_upd_gauss_2 = solution.ekf.EKF.update(\n self_2, z_2, state_pred_gauss_2, measurement_gauss_2)\n\n assert compare(state_upd_gauss_1, state_upd_gauss_2)\n\n assert compare(self_1, self_2)\n assert compare(z_1, z_2)\n assert compare(state_pred_gauss_1, state_pred_gauss_2)\n assert compare(measurement_gauss_1, measurement_gauss_2)\n\n def test_output__EKF_step_with_info(self, test_data):\n for finput in test_data[\"ekf.EKF.step_with_info\"]:\n params = tuple(finput.values())\n\n self_1, state_upd_prev_gauss_1, z_1, Ts_1 = deepcopy(params)\n\n self_2, state_upd_prev_gauss_2, z_2, Ts_2 = deepcopy(params)\n\n state_pred_gauss_1, measurement_pred_gauss_1, state_upd_gauss_1 = ekf.EKF.step_with_info(\n self_1, state_upd_prev_gauss_1, z_1, Ts_1)\n\n state_pred_gauss_2, measurement_pred_gauss_2, state_upd_gauss_2 = solution.ekf.EKF.step_with_info(\n self_2, state_upd_prev_gauss_2, z_2, Ts_2)\n\n assert compare(state_pred_gauss_1, state_pred_gauss_2)\n assert compare(measurement_pred_gauss_1, measurement_pred_gauss_2)\n assert compare(state_upd_gauss_1, state_upd_gauss_2)\n\n assert compare(self_1, self_2)\n assert compare(state_upd_prev_gauss_1, state_upd_prev_gauss_2)\n assert compare(z_1, z_2)\n assert compare(Ts_1, Ts_2)\n\n\nclass TestSolutionUsage:\n\n def test_solution_usage__EKF_predict(self, test_data):\n for finput in test_data[\"ekf.EKF.predict\"][:1]:\n params = finput\n\n solution.used[\"ekf.EKF.predict\"] = False\n\n ekf.EKF.predict(**params)\n\n assert not solution.used[\"ekf.EKF.predict\"], \"The function uses the solution\"\n\n def test_solution_usage__EKF_predict_measurement(self, test_data):\n for finput in test_data[\"ekf.EKF.predict_measurement\"][:1]:\n params = finput\n\n solution.used[\"ekf.EKF.predict_measurement\"] = False\n\n ekf.EKF.predict_measurement(**params)\n\n assert not solution.used[\"ekf.EKF.predict_measurement\"], \"The function uses the solution\"\n\n def test_solution_usage__EKF_update(self, test_data):\n for finput in test_data[\"ekf.EKF.update\"][:1]:\n params = finput\n\n solution.used[\"ekf.EKF.update\"] = False\n\n ekf.EKF.update(**params)\n\n assert not solution.used[\"ekf.EKF.update\"], \"The function uses the solution\"\n\n def test_solution_usage__EKF_step_with_info(self, test_data):\n for finput in test_data[\"ekf.EKF.step_with_info\"][:1]:\n params = finput\n\n solution.used[\"ekf.EKF.step_with_info\"] = False\n\n ekf.EKF.step_with_info(**params)\n\n assert not solution.used[\"ekf.EKF.step_with_info\"], \"The function uses the solution\"\n\n\nif __name__ == \"__main__\":\n os.environ[\"_PYTEST_RAISE\"] = \"1\"\n pytest.main()\n", "id": "12570102", "language": "Python", "matching_score": 0.542907178401947, "max_stars_count": 0, "path": "Assignments/Assignment_03/tests/test_ekf.py" }, { "content": "import numpy as np\nfrom numpy import linalg as nla\nimport matplotlib.pyplot as plt\nfrom utils.multivargaussian import MultiVarGaussian\nimport matplotlib as mpl\n\nfrom pdaf import PDAF\n\nmpl.rcParams['keymap.back'].remove('left')\nmpl.rcParams['keymap.forward'].remove('right')\n\nc_gt = 'green'\nc_measurement = 'blue'\nc_z_true = 'cyan'\nc_estimate = 'red'\nc_cov = 'orange'\nc_gate = 'purple'\n\n\ndef get_ellipse_points(gaussian: MultiVarGaussian, scale=1):\n t = np.linspace(0, 2*np.pi, 91)\n circle_points = np.array([np.cos(t), np.sin(t)]) * scale\n\n mean, cov = gaussian\n lower = nla.cholesky(cov)\n return lower@circle_points + mean[:, None]\n\n\nclass InteractivePlot:\n def __init__(self,\n pdaf: PDAF,\n pos_gt_data,\n pos_upd_gauss_seq,\n measurement_pred_gauss_seq,\n measurement_seq,\n association_gt_seq,\n pos_RMSE, vel_RMSE):\n self.pdaf = pdaf\n self.pos_gt_data = pos_gt_data\n self.pos_upd_gauss_seq = pos_upd_gauss_seq\n self.measurement_pred_gauss_seq = measurement_pred_gauss_seq\n\n self.measurement_seq = measurement_seq\n self.association_gt_seq = association_gt_seq\n\n self.gate_scaling = np.sqrt(self.pdaf.gate_size_sq)\n\n self.cur_idx = 0\n self.cur_state_gauss = self.pos_upd_gauss_seq[self.cur_idx]\n self.cur_z_pred_gauss = self.measurement_pred_gauss_seq[self.cur_idx]\n\n self.max_len_meas = max(len(zs) for zs in measurement_seq)\n self.cur_meas = np.empty((self.max_len_meas, 2))\n self.cur_meas[:] = np.nan\n cur_meas = self.measurement_seq[self.cur_idx]\n self.cur_meas[:len(cur_meas)] = cur_meas\n\n self.pos_history = self.cur_state_gauss.mean[None, :]\n self.pos_gt_history = self.pos_gt_data[self.cur_idx][None, :]\n self.paused = True\n self.fig, self.ax = plt.subplots(figsize=(10, 6))\n self.ax.set_title(\"Trajectory visualization\\n\"\n f\"$RMSE_{{pos}}={pos_RMSE:.2f}m$, \"\n f\"$RMSE_{{vel}}={vel_RMSE:.2f}m/s$\\n\"\n \"Controls: space=play, arrows=step, r=reset\")\n\n self.ax.set_xlim((0, 700))\n self.ax.set_ylim((-100, 300))\n self.ax.set_autoscale_on(False)\n self.ax.set_aspect('equal')\n self.step_label = plt.plot([], [], ' ', label=f\"K={0: 3d}\")[0]\n\n self.pos_history_plot = self.ax.plot(\n [], [], c=c_estimate, marker='.', markersize=5,\n label=r\"$\\mathbf{\\hat x}_{0:k}$\", animated=True)[0]\n\n self.state_cov_plot = self.ax.plot(\n *get_ellipse_points(self.cur_state_gauss),\n c=c_cov, label=r\"$\\mathbf{P}_k$\", animated=True)[0]\n\n self.gate_plot = self.ax.plot(\n *get_ellipse_points(self.cur_z_pred_gauss, self.gate_scaling),\n c=c_gate, ls=':', animated=True, label=\"gate\")[0]\n\n self.pos_gt_history_plot = self.ax.plot(\n [], [], c=c_gt, marker='.', markersize=5,\n label=r\"$\\mathbf{x}_{gt 0:k}$\", animated=True)[0]\n\n self.measurement_scatter = self.ax.scatter(\n *self.cur_meas.T, c=c_measurement, s=10,\n label=r\"$\\mathbf{Z}_k$ (cyan is correct)\", animated=True)\n\n self.legend = self.ax.legend(loc='upper left', framealpha=1)\n self.fig.tight_layout()\n\n self.canvas = self.fig.canvas\n self.timer = self.canvas.new_timer(100)\n self.timer.add_callback(self.cb_timer)\n self.timer.start()\n\n self.draw_event_cid = self.canvas.mpl_connect('draw_event',\n self.cb_fullraw)\n self.canvas.mpl_connect('key_press_event', self.cb_key_press)\n\n def cb_fullraw(self, event):\n self.background = self.canvas.copy_from_bbox(self.ax.bbox)\n self.draw(False)\n\n def draw(self, blit=True):\n self.cur_state_gauss = self.pos_upd_gauss_seq[self.cur_idx]\n self.cur_z_pred_gauss = self.measurement_pred_gauss_seq[self.cur_idx]\n\n self.pos_history = np.array([\n state.mean for state in self.pos_upd_gauss_seq[:self.cur_idx+1]]).T\n cur_meas = self.measurement_seq[self.cur_idx]\n\n self.cur_meas[:] = np.nan\n self.cur_meas[:len(cur_meas)] = cur_meas\n\n self.canvas.restore_region(self.background)\n self.step_label.set_label(f\"K={self.cur_idx: 3d}\")\n self.legend = self.ax.legend(loc='upper left', framealpha=1)\n self.state_cov_plot.set_data(*get_ellipse_points(self.cur_state_gauss))\n self.gate_plot.set_data(*get_ellipse_points(self.cur_z_pred_gauss,\n self.gate_scaling))\n self.pos_history_plot.set_data(*self.pos_history)\n self.pos_gt_history_plot.set_data(*self.pos_gt_data[:self.cur_idx].T)\n\n self.measurement_scatter.set_offsets(self.cur_meas)\n colors = [c_measurement]*self.max_len_meas\n if self.association_gt_seq[self.cur_idx]:\n colors[self.association_gt_seq[self.cur_idx]-1] = c_z_true\n self.measurement_scatter.set_color(colors)\n\n self.ax.draw_artist(self.state_cov_plot)\n self.ax.draw_artist(self.gate_plot)\n self.ax.draw_artist(self.pos_history_plot)\n self.ax.draw_artist(self.pos_gt_history_plot)\n self.ax.draw_artist(self.measurement_scatter)\n self.ax.draw_artist(self.legend)\n\n if blit:\n self.canvas.blit(self.ax.bbox)\n\n def cb_timer(self):\n if not self.paused:\n self.cur_idx = min(self.cur_idx+1, len(self.pos_upd_gauss_seq)-1)\n self.draw()\n\n def cb_key_press(self, event):\n\n self.cur_state_gauss\n if event.key == 'right':\n self.cur_idx = (self.cur_idx+1) % len(self.pos_upd_gauss_seq)\n elif event.key == 'left':\n self.cur_idx = (self.cur_idx-1) % len(self.pos_upd_gauss_seq)\n\n if event.key == 'up':\n self.cur_idx = (self.cur_idx+10) % len(self.pos_upd_gauss_seq)\n elif event.key == 'down':\n self.cur_idx = (self.cur_idx-10) % len(self.pos_upd_gauss_seq)\n\n elif event.key == ' ':\n self.paused = self.paused ^ True\n elif event.key == 'r':\n self.ax.set_xlim((0, 700))\n self.ax.set_ylim((-100, 300))\n self.ax.set_aspect('equal')\n\n self.cur_idx = 0\n\n self.draw()\n", "id": "2437518", "language": "Python", "matching_score": 4.791461944580078, "max_stars_count": 0, "path": "Assignments/Assignment_05/pda/utils/interactive_plot.py" }, { "content": "import numpy as np\nfrom matplotlib.artist import Artist\nimport matplotlib.pyplot as plt\n\n\nclass InteractiveCovariance:\n\n showverts = True\n epsilon = 5 # max pixel distance to count as a vertex hit\n\n def __init__(self, condition_mean, condition_cov):\n self.condition_mean = condition_mean\n self.condition_cov = condition_cov\n\n self.fig, self.ax = plt.subplots(figsize=(8, 6))\n self.ax.set_title(\n \"Interactive plot to get an intuitive feeling of gaussians\\n\"\n 'Drag blue or orange point to adjust estimate,\\n'\n 'drag black points or scroll to adjust covariance')\n\n self.fig.tight_layout()\n self.ax.set_xlim((-4, 4))\n self.ax.set_ylim((-3, 3))\n self.ax.set_aspect('equal')\n\n self.ellipse_points = np.array([[-1, 0],\n [-0.5, 0],\n [-1, 0.5],\n [1, 0],\n [1.5, 0],\n [1, 0.5]])\n self.circle_points = np.array([[np.cos(x), np.sin(x)]\n for x in np.linspace(0, 2*np.pi, 91)]).T\n\n self.active_vert = None # the active vert\n self.showverts = True\n\n self.artist = []\n self.pred_scatter = self.ax.scatter([0], [0], s=10, c='g',\n animated=True)\n self.ellipse_scatter = self.ax.scatter(*self.ellipse_points.T,\n s=10,\n c=[*'bkk']+['orange']+[*'kk'],\n animated=True)\n self.ellipse_plots = [self.ax.plot(*points, animated=True)[0]\n for points in self.get_ellipse_points()]\n\n self.canvas = self.fig.canvas\n self.draw_event_cid = self.canvas.mpl_connect(\n 'draw_event', self.cb_draw)\n self.canvas.mpl_connect('button_press_event', self.cb_button_press)\n self.canvas.mpl_connect('button_release_event', self.cb_button_release)\n self.canvas.mpl_connect('motion_notify_event', self.cb_mouse_move)\n self.canvas.mpl_connect('scroll_event', self.cb_scroll)\n\n def get_ellipse_points(self):\n mat1 = self.ellipse_points[1:3].T - self.ellipse_points[0][:, None]\n mat2 = self.ellipse_points[4:6].T - self.ellipse_points[3][:, None]\n\n x = self.ellipse_points[0]\n z = self.ellipse_points[3]\n P = mat1@mat1.T\n R = mat2@mat2.T\n H = np.eye(2)\n\n x_hat = self.condition_mean(x, P, z, R, H)\n P_hat = self.condition_cov(P, R, H)\n\n self.pred_scatter.set_offsets(x_hat)\n return (mat1 @ self.circle_points + self.ellipse_points[0][:, None],\n mat2 @ self.circle_points + self.ellipse_points[3][:, None],\n (np.linalg.cholesky(P_hat) @ self.circle_points\n + x_hat[:, None]))\n\n def cb_draw(self, event):\n self.background = self.canvas.copy_from_bbox(self.ax.bbox)\n self.draw(False)\n\n def draw(self, blit=True):\n self.canvas.restore_region(self.background)\n\n for plot, points in zip(self.ellipse_plots,\n self.get_ellipse_points()):\n plot.set_data(*points)\n\n self.ax.draw_artist(plot)\n self.ax.draw_artist(self.ellipse_scatter)\n self.ax.draw_artist(self.pred_scatter)\n\n if blit:\n self.canvas.blit(self.ax.bbox)\n\n def poly_changed(self, poly):\n \"\"\"This method is called whenever the pathpatch object is called.\"\"\"\n # only copy the artist props to the line (except visibility)\n vis = self.line.get_visible()\n Artist.update_from(self.line, poly)\n self.line.set_visible(vis) # don't use the poly visibility state\n\n def get_ind_under_point(self, event):\n \"\"\"\n Return the index of the point closest to the event position or *None*\n if no point is within ``self.epsilon`` to the event position.\n \"\"\"\n # display coords\n xyt = self.ax.transData.transform(self.ellipse_points)\n diff = xyt - np.array([[event.x, event.y]])\n closest = np.argmin(np.linalg.norm(diff, axis=1))\n return closest\n\n def cb_button_press(self, event):\n \"\"\"Callback for mouse button presses.\"\"\"\n if not self.showverts or event.inaxes is None or event.button != 1:\n return\n self.active_vert = self.get_ind_under_point(event)\n\n def cb_button_release(self, event):\n \"\"\"Callback for mouse button releases.\"\"\"\n if not self.showverts or event.inaxes is None or event.button != 1:\n return\n self.active_vert = None\n\n def cb_mouse_move(self, event):\n \"\"\"Callback for mouse movements.\"\"\"\n\n if (not self.showverts\n or event.inaxes is None\n or self.active_vert is None):\n return\n\n mouse_pos = np.array([event.xdata, event.ydata])\n\n scatterdata = self.ellipse_points\n group = self.active_vert // 3\n point = self.active_vert % 3\n affected = scatterdata[group*3: group*3+3]\n\n if point == 0:\n affected += (\n mouse_pos - scatterdata[self.active_vert, :])[None, :]\n\n if point == 1:\n affected[1, :] = mouse_pos\n veca = affected[1] - affected[0]\n vecb = affected[2] - affected[0]\n vecb = (np.array([[0, -1], [1, 0]])@veca\n * np.linalg.norm(vecb) / np.linalg.norm(veca))\n affected[2, :] = affected[0, :] + vecb\n\n if point == 2:\n affected[2, :] = mouse_pos\n veca = affected[1] - affected[0]\n vecb = affected[2] - affected[0]\n veca = (np.array([[0, 1], [-1, 0]])@vecb\n * np.linalg.norm(veca) / np.maximum(np.linalg.norm(vecb),\n 0.01))\n affected[1, :] = affected[0, :] + veca\n\n self.ellipse_scatter.set_offsets(scatterdata)\n\n self.draw()\n\n def cb_scroll(self, event):\n gxy = self.ax.transData.transform(self.ellipse_points)\n diff = gxy[::3] - np.array([[event.x, event.y]])\n group = np.argmin(np.linalg.norm(diff, axis=1))\n scatterdata = self.ellipse_points\n affected = scatterdata[group*3: group*3+3]\n\n scaling = 1.1**event.step\n affected[1] = scaling*(affected[1] - affected[0]) + affected[0]\n affected[2] = scaling*(affected[2] - affected[0]) + affected[0]\n self.ellipse_scatter.set_offsets(scatterdata)\n\n self.draw()\n\n\nif __name__ == '__main__':\n\n p = InteractiveCovariance()\n plt.show()\n", "id": "8838330", "language": "Python", "matching_score": 2.08756685256958, "max_stars_count": 0, "path": "Assignments/Assignment_02/assignment2/utils/interactive_covariance.py" }, { "content": "from matplotlib import pyplot as plt\nimport numpy as np\nfrom matplotlib.patches import Ellipse\nfrom typing import Union\nfrom matplotlib.transforms import Affine2D\n\n\ndef plot_cov_ellipse2d(\n ax: plt.Axes,\n mean: np.ndarray = np.zeros(2),\n cov: np.ndarray = np.eye(2),\n n_sigma: float = 1,\n *,\n edgecolor: Union[tuple, str] = \"C0\",\n facecolor: Union[tuple, str] = \"none\",\n **kwargs, # extra Ellipse keyword arguments\n) -> Ellipse:\n \"\"\"Plot a n_sigma covariance ellipse centered in mean into ax.\"\"\"\n ell_trans_mat = np.zeros((3, 3))\n ell_trans_mat[:2, :2] = np.linalg.cholesky(cov)\n ell_trans_mat[:2, 2] = mean\n ell_trans_mat[2, 2] = 1\n\n ell = Ellipse(\n (0.0, 0.0),\n 2.0 * n_sigma,\n 2.0 * n_sigma,\n edgecolor=edgecolor,\n facecolor=facecolor,\n **kwargs,\n )\n trans = Affine2D(ell_trans_mat)\n ell.set_transform(trans + ax.transData)\n return ax.add_patch(ell)\n", "id": "2695972", "language": "Python", "matching_score": 1.7895597219467163, "max_stars_count": 2, "path": "Assignments/Assignment_03/ekf/utils/plot_ellipse.py" }, { "content": "import matplotlib.pyplot as plt\nimport numpy as np\nimport scipy.linalg as la\n\n\ndef ellipse(mu, P, s, n):\n thetas = np.linspace(0, 2*np.pi, n)\n ell = mu + s * (la.cholesky(P).T @ np.array([np.cos(thetas), np.sin(thetas)])).T\n return ell", "id": "1944385", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "Graded/G3/slam/plotting.py" }, { "content": "#!/usr/bin/env python\n\nfrom cmath import pi\n\nN = 513\nM = 2**10\n\nFs = 1e6\nTs = 1.0 / Fs\n\nA = 1\nphi = pi / 8.0\n\nf0 = 1e5\nw0 = 2 * pi * f0 \n\nP = N * (N - 1) / 2.0\nQ = N * (N - 1) * (2 * N - 1) / 6.0\n\nn0 = int(-P / N)\n\nnum_opt = 100", "id": "7519336", "language": "Python", "matching_score": 0.13172003626823425, "max_stars_count": 1, "path": "scripts/cfg.py" }, { "content": "# %% imports\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom scipy.stats import multivariate_normal\nfrom mixturereduction import mixture_moments\n\nimport solution\n# %% setup and show initial\n\n\ndef get_task_parameters():\n \"\"\"Get mixture parameters\n\n Returns:\n w: shape (3,) normalized, 3 scalars\n mus: shape (3, 1), 3 1d vectors\n sigmas: shape (3, 1, 1) 3 1x1 matrices\n \"\"\"\n # TODO\n mus = np.array([0, 2, 4.5]).reshape(3, 1)\n sigmas = np.array([1, 1, 1]).reshape(3, 1, 1)\n w = np.array([1, 1, 1])\n w = w.ravel() / np.sum(w)\n return w, mus, sigmas\n\n\ndef main():\n w, mus, sigmas = get_task_parameters()\n\n tot_mean, tot_sigma2 = (\n elem.squeeze() for elem in mixture_moments(w, mus, sigmas ** 2)\n )\n plot_n_sigmas = 3\n x = tot_mean + plot_n_sigmas * \\\n np.sqrt(tot_sigma2) * np.arange(-1, 1 + 1e-10, 5e-2)\n\n fig1, ax1 = plt.subplots(num=1, clear=True)\n pdf_comp_vals = np.array(\n [\n multivariate_normal.pdf(\n x, mean=mus[i].item(), cov=sigmas[i].item() ** 2)\n for i in range(len(mus))\n ]\n )\n pdf_mix_vals = np.average(pdf_comp_vals, axis=0, weights=w)\n\n for i in range(len(mus)):\n ax1.plot(x, pdf_comp_vals[i], label=f\"comp {i}\")\n ax1.legend()\n\n# %% merge and show combinations\n fi2, ax2 = plt.subplots(num=2, clear=True)\n ax2.plot(x, pdf_mix_vals, label=\"original\")\n k = 0\n wcomb = np.zeros_like(w)\n mucomb = np.zeros_like(w)\n\n sigma2comb = np.zeros_like(w)\n pdf_mix_comb_vals = np.zeros_like(pdf_comp_vals)\n for i in range(2): # index of first to merge\n for j in range(i + 1, 3): # index of second to merge\n # the index of the non merging (only works for 3 components)\n k_other = 2 - k\n\n # merge components\n wcomb[k] = w[i] + w[j]\n mucomb[k], sigma2comb[k] = mixture_moments(\n w[[i, j]] / wcomb[k], mus[[i, j]], sigmas[[i, j]] ** 2\n )\n\n # plot\n pdf_mix_comb_vals[k] = (\n wcomb[k] * multivariate_normal.pdf(x, mucomb[k], sigma2comb[k])\n + w[k_other] * pdf_comp_vals[k_other]\n )\n ax2.plot(x, pdf_mix_comb_vals[k], label=f\"combining {i} {j}\")\n k += 1\n\n ax2.legend()\n\n print(mucomb)\n print(sigma2comb)\n sigmacomb = np.sqrt(sigma2comb)\n plt.show()\n\n\n# %% run\n\nif __name__ == \"__main__\":\n main()\n", "id": "564114", "language": "Python", "matching_score": 1.6600494384765625, "max_stars_count": 0, "path": "Assignments/Assignment_04/task_1/mixturereduction/perform_mixture_reduction.py" }, { "content": "# %% imports\nfrom typing import Callable, Final\nimport solution\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom numpy.core.numeric import zeros_like\nimport scipy.stats\n\n# sets the seed for random numbers to be predictable\nDEBUG: Final[bool] = False\n\n\ndef bind_variables(func, **kwargs):\n def func_bind(*args, **kwargs_inn):\n kwargs_upd = kwargs | kwargs_inn\n return func(*args, **kwargs_upd)\n return func_bind\n\n# %% trajectory generation\n\n\ndef get_scenario_parameters():\n # scenario parameters\n x0 = np.array([np.pi / 2, -np.pi / 100])\n Ts = 0.05\n K = round(20 / Ts)\n return x0, Ts, K\n\n\ndef get_dynamic_parameters():\n # constants\n g = 9.81\n l = 1\n a = g / l\n d = 0.5 # dampening\n S = 5\n return l, a, d, S\n\n\ndef get_measurement_parameters():\n Ld = 4\n Ll = 0.5\n r = 0.25\n return Ld, Ll, r\n\n# disturbance PDF\n\n\ndef process_noise_sampler(rng, S): return rng.uniform(-S, S)\n\n# dynamic function\n\n\ndef modulo2pi(x, idx=0):\n xmod = x\n xmod[idx] = (xmod[idx] + np.pi) % (2 * np.pi) - np.pi # wrap to [-pi, pi]\n return xmod\n\n\ndef pendulum_dynamics(x, a, d=0): # continuous dynamics\n xdot = np.array([x[1], -d * x[1] - a * np.sin(x[0])])\n return xdot\n\n\ndef pendulum_dynamics_discrete(xk, vk, Ts, a, d=0):\n xkp1 = modulo2pi(xk + Ts * pendulum_dynamics(xk, a, d)) # euler discretize\n xkp1[1] += Ts * vk # zero order hold noise\n return xkp1\n\n\n# sample a trajectory\ndef sample_trajectory(x0, Ts, K, process_noise_sampler, pendulum_dynamics_discrete):\n x = np.zeros((K, 2))\n x[0] = x0\n for k in range(K - 1):\n v = process_noise_sampler()\n x[k + 1] = pendulum_dynamics_discrete(x[k], v, Ts)\n return x\n\n\n# vizualize\ndef plot_trajectory(x):\n fig, axs = plt.subplots(2, sharex=True, num=1, clear=True)\n axs[0].plot(x[:, 0])\n axs[0].set_ylabel(r\"$\\theta$\")\n axs[0].set_ylim((-np.pi, np.pi))\n\n axs[1].plot(x[:, 1])\n axs[1].set_xlabel(\"Time step\")\n axs[1].set_ylabel(r\"$\\dot \\theta$\")\n return fig, axs\n\n\n# %% measurement generation\n\n# noise pdf\n\n\ndef measurement_noise_sampler(x, r, rng): return rng.triangular(-r, 0, r)\n\n# measurement function\n\n# makes a decorator that binds keyword arguments to a function\n\n\ndef measurement_function(x, Ld, l, Ll):\n lcth = l * np.cos(x[0])\n lsth = l * np.sin(x[0])\n z = np.sqrt((Ld - lcth) ** 2 + (lsth - Ll) ** 2) # 2norm\n return z\n\n\ndef sample_measurements(x, h, measurement_noise_sampler):\n Z = np.zeros(len(x))\n for k, xk in enumerate(x):\n wk = measurement_noise_sampler(xk)\n Z[k] = h(xk) + wk\n return Z\n\n\n# vizualize\ndef plot_measurements(Z, fignum):\n fig, ax = plt.subplots(num=fignum, clear=True)\n ax.plot(Z)\n ax.set_xlabel(\"Time step\")\n ax.set_ylabel(\"z\")\n return fig, ax\n\n\n# %% Task: Estimate using a particle filter\n\ndef init_PF(rng):\n \"\"\"initialize particles.\n\n Args:\n rng: a random number generator\n\n Returns:\n N (int): number of particles\n px (ndarray): particle states shape=(N, dim(state))\n weights (ndarray): normalized weights. shape = (N,)\n \"\"\"\n # number of particles to use\n N = 150\n\n # Do not know where the pendulum starts, but know the physical\n # limitations of the system!\n px = np.array([rng.uniform(-np.pi, np.pi, size=N), # Hint: use rng.somesampler\n rng.uniform(-np.pi, np.pi, size=N)]).T\n\n # initial weights\n w = rng.uniform(size=N)\n w /= sum(w)\n\n #N, px, w = solution.SIR_PF_pendulum.init_PF(rng)\n assert np.isclose(sum(w), 1), \"w must be normalized\"\n assert len(px) == N and len(w) == N, \"number of particles must be N\"\n\n return N, px, w\n\n\ndef weight_update(zk: float, px: np.ndarray, w: np.ndarray, h: Callable, meas_noise_dist: scipy.stats.distributions.rv_frozen):\n \"\"\"Update the weights.\n\n Args:\n zk: measurement\n px: particles, shape = (N, dim(state))\n w: weights in, shape = (N, )\n h: heasurement funciton that takes the state as args.\n PF_measurement_distribution: the measurement distribution (a numpy random variable)\n\n Returns:\n updated_weights: shape = (N,) must be normalized\n \"\"\"\n w_upd = np.empty_like(w)\n\n for n, pxn in enumerate(px):\n w_upd[n] = meas_noise_dist.pdf(zk - h(pxn))\n w_upd /= sum(w_upd) \n\n #w_upd = solution.SIR_PF_pendulum.weight_update(\n # zk, px, w, h, meas_noise_dist)\n\n return w_upd\n\n\ndef resample(px: np.ndarray, w: np.ndarray, rng: np.random.Generator) -> np.ndarray:\n \"\"\"Resample particles\n\n Args:\n px: shape = (N, dim(state)), the particles\n w: shape = (N,), particle weights\n rng: random number generator.\n Must be used in order to be able to make predictable behaviour.\n\n Returns:\n pxn: the resampled particles\n \"\"\"\n N = len(w)\n pxn = np.zeros_like(px)\n cumweights = np.cumsum(w)\n\n i = 0\n for j in range(N):\n u_j = (j-1)/N + rng.uniform()/N\n while u_j > cumweights[i]:\n i += 1\n pxn[j] = px[i]\n\n pxn = np.random.permutation(pxn)\n #pxn = solution.SIR_PF_pendulum.resample(px, w, rng)\n\n return pxn\n\n\ndef particle_prediction(px: np.ndarray, Ts: float, f: Callable, proc_noise_dist: scipy.stats.distributions.rv_frozen) -> np.ndarray:\n \"\"\"Predict particles some time units ahead sampling the process noise.\n\n Args:\n px: shape = (N. dim(state)), the particles\n Ts: Time step size\n f: process function taking the state, noise realization and time step size as arguments\n dyn_dist: a distribution that can create process noise realizations\n\n Returns:\n px_pred: the predicted particles\n \"\"\"\n px_pred = zeros_like(px)\n for n, pxn in enumerate(px):\n vkn = proc_noise_dist.rvs()\n px_pred[n] = f(pxn, vkn, Ts) \n\n #px_pred = solution.SIR_PF_pendulum.particle_prediction(\n # px, Ts, f, proc_noise_dist)\n\n return px_pred\n\n\ndef plot_step(x, pxn, l, fig, plotpause, sch_particles, sch_true):\n sch_particles.set_offsets(\n np.c_[l * np.sin(pxn[:, 0]), -l * np.cos(pxn[:, 0])])\n sch_true.set_offsets(np.c_[l * np.sin(x[0]), -l * np.cos(x[0])])\n\n fig.canvas.draw_idle()\n plt.show(block=False)\n plt.waitforbuttonpress(plotpause)\n\n\ndef plot_setup_PF(l, fignum):\n plt.ion()\n fig, ax = plt.subplots(num=fignum, clear=True)\n\n sch_particles = ax.scatter(\n np.nan, np.nan, marker=\".\", c=\"b\", label=r\"$\\hat \\theta^n$\")\n sch_true = ax.scatter(np.nan, np.nan, c=\"r\", marker=\"x\", label=r\"$\\theta$\")\n ax.set_ylim((-1.5 * l, 1.5 * l))\n ax.set_xlim((-1.5 * l, 1.5 * l))\n ax.set_xlabel(\"x\")\n ax.set_ylabel(\"y\")\n th = ax.set_title(f\"theta mapped to x-y\")\n ax.legend()\n return fig, ax, sch_particles, sch_true\n\n\ndef run_SIR_PF(rng, Ts, l, f, x, h, Z, px, w, PF_dynamic_distribution, PF_measurement_distribution, plotpause, fignum=4):\n fig, ax, sch_particles, sch_true = plot_setup_PF(l, fignum=fignum)\n\n for k, zk in enumerate(Z):\n print(f\"{k = }\")\n\n w = weight_update(zk, px, w, h, PF_measurement_distribution)\n\n pxn = resample(px, w, rng)\n\n px = particle_prediction(\n pxn, Ts, f, PF_dynamic_distribution)\n\n # plot\n plot_step(x[k], pxn, l, fig, plotpause, sch_particles, sch_true)\n\n plt.waitforbuttonpress()\n # %%\n\n\ndef main():\n seed = 0 if DEBUG else None\n rng = np.random.default_rng(seed=seed)\n\n x0, Ts, K = get_scenario_parameters()\n l, a, d, S = get_dynamic_parameters()\n Ld, Ll, r = get_measurement_parameters()\n\n N, px, w = init_PF(rng)\n\n f = bind_variables(\n pendulum_dynamics_discrete, a=a, d=d)\n proc_sampler = bind_variables(process_noise_sampler, S=S, rng=rng)\n\n h = bind_variables(measurement_function, Ld=Ld, l=l, Ll=Ll)\n meas_sampler = bind_variables(measurement_noise_sampler, r=r, rng=rng)\n\n x = sample_trajectory(x0, Ts, K, proc_sampler, f)\n #fig1, axs1 = plot_trajectory(x)\n\n Z = sample_measurements(x, h, meas_sampler)\n #fig2, ax2 = plot_measurements(Z, 2)\n\n # PF transition PDF: SIR proposal, or something you would like to test\n PF_dynamic_distribution = scipy.stats.uniform(loc=-S, scale=2 * S)\n PF_measurement_distribution = scipy.stats.triang(\n c=0.5, loc=-r, scale=2 * r)\n\n # initialize a figure for particle animation.\n plotpause = 0.01\n\n run_SIR_PF(rng, Ts, l, f, x, h, Z, px, w, PF_dynamic_distribution,\n PF_measurement_distribution, plotpause, fignum=4)\n\n\nif __name__ == \"__main__\":\n main()\n", "id": "2332974", "language": "Python", "matching_score": 2.55415678024292, "max_stars_count": 0, "path": "Assignments/Assignment_04/task_5/particlefilter/SIR_PF_pendulum.py" }, { "content": "import matplotlib as mpl\nimport matplotlib.pyplot as plt\nfrom numpy.core.shape_base import block\nimport scipy\nimport numpy as np\n\nplt.close(\"all\")\nmpl.rcParams['axes.grid'] = True\n\nc_gt = 'g'\nc_measurement = 'purple'\nc_estimate = 'red'\n\n\ndef plot_trajectory_with_measurements(x_gt_series, z_series):\n fig, ax = plt.subplots()\n ax.scatter(*z_series.T, c=c_measurement, s=10, marker=\".\", label=r\"$z$\")\n ax.plot(*x_gt_series.T[:2], c=c_gt, alpha=0.9, label=r\"$x_{gt}$\")\n ax.set_title(\"Data\")\n ax.legend()\n # show turnrate\n fig2, ax2 = plt.subplots(num=2, clear=True)\n ax2.plot(x_gt_series.T[4], c=c_gt)\n ax2.set_xlabel(\"time step\")\n ax2.set_ylabel(\"turn rate\")\n\n\ndef plot_ekf_trajectory(x_gt_series, x_hat_series,\n RMSE_pred, RMSE_upd, sigma_a, sigma_z):\n fig, ax = plt.subplots(num=3, clear=True)\n ax.plot(*x_hat_series.T[:2], c=c_estimate, label=r\"$\\hat x$\")\n ax.plot(*x_gt_series.T[:2], c=c_gt, alpha=0.9, label=r\"$x_{gt}$\")\n RMSEs_str = \", \".join(f\"{v:.2f}\" for v in (*RMSE_pred, *RMSE_upd))\n ax.set_title(\n f\"Output from EKF with \"\n rf\"$\\sigma_a = {sigma_a}$, $\\sigma_z= {sigma_z}$\"\n \"\\n\"\n f\"RMSE(p_p, p_v, u_p, u_v) = ({RMSEs_str})\"\n )\n ax.legend()\n\n\ndef plot_NIS_NEES_data(sigma_a_low, sigma_a_high, sigma_a_list,\n sigma_z_low, sigma_z_high, sigma_z_list,\n ANIS_data, CINIS,\n ANEES_pred_data, ANEES_upd_data, CINEES):\n # %% interpolate ANEES/NIS\n ANIS_spline = scipy.interpolate.RectBivariateSpline(\n sigma_a_list, sigma_z_list, ANIS_data)\n ANEES_pred_spline = scipy.interpolate.RectBivariateSpline(\n sigma_a_list, sigma_z_list, ANEES_pred_data\n )\n ANEES_upd_spline = scipy.interpolate.RectBivariateSpline(\n sigma_a_list, sigma_z_list, ANEES_upd_data\n )\n\n n_eval = 100\n mesh_a, mesh_z = np.meshgrid(\n np.linspace(sigma_a_low, sigma_a_high, n_eval),\n np.linspace(sigma_z_low, sigma_z_high, n_eval))\n ANIS_eval = ANIS_spline(mesh_a.ravel(), mesh_z.ravel(), grid=False\n ).reshape(mesh_a.shape)\n ANEES_pred_eval = ANEES_pred_spline(mesh_a.ravel(), mesh_z.ravel(),\n grid=False).reshape(mesh_a.shape)\n ANEES_upd_eval = ANEES_upd_spline(mesh_a.ravel(), mesh_z.ravel(),\n grid=False).reshape(mesh_a.shape)\n\n # %% find confidence regions for NIS and plot\n # %% confidence plots\n\n # plot\n fig4 = plt.figure(4, clear=True)\n ax4 = fig4.add_subplot(1, 1, 1, projection=\"3d\")\n z_max = 10\n ax4.plot_surface(mesh_a, mesh_z, np.clip(ANIS_eval, 0, z_max), alpha=0.9)\n ax4.contour(\n mesh_a, mesh_z, ANIS_eval, [*CINIS], offset=0\n ) # , extend3d=True, colors='yellow')\n ax4.set_xlabel(r\"$\\sigma_a$\")\n ax4.set_ylabel(r\"$\\sigma_z$\")\n ax4.set_zlabel(\"ANIS\")\n ax4.set_zlim(0, z_max)\n ax4.view_init(30, 20)\n\n # %% find confidence regions for NEES and plot\n\n # plot\n fig5 = plt.figure(5, clear=True)\n z_max = 50\n ax5s = [\n fig5.add_subplot(1, 2, 1, projection=\"3d\"),\n fig5.add_subplot(1, 2, 2, projection=\"3d\"),\n ]\n ax5s[0].plot_surface(mesh_a, mesh_z, np.clip(ANEES_pred_eval, 0, z_max),\n alpha=0.9)\n ax5s[0].contour(\n mesh_a, mesh_z, ANEES_pred_eval, [*CINEES], offset=0,\n )\n ax5s[0].set_xlabel(r\"$\\sigma_a$\")\n ax5s[0].set_ylabel(r\"$\\sigma_z$\")\n ax5s[0].set_zlabel(\"ANEES_pred\")\n\n ax5s[0].set_zlim(0, z_max)\n ax5s[0].view_init(40, 30)\n\n ax5s[1].plot_surface(mesh_a, mesh_z, np.clip(ANEES_upd_eval, 0, z_max),\n alpha=0.9)\n ax5s[1].contour(\n mesh_a, mesh_z, ANEES_upd_eval, [*CINEES], offset=0,\n )\n ax5s[1].set_xlabel(r\"$\\sigma_a$\")\n ax5s[1].set_ylabel(r\"$\\sigma_z$\")\n ax5s[1].set_zlabel(\"ANEES_upd\")\n ax5s[1].set_zlim(0, z_max)\n ax5s[1].view_init(40, 30)\n\n # %% see the intersection of NIS and NEESes\n fig6, ax6 = plt.subplots(num=6, clear=True)\n cont_upd = ax6.contour(mesh_a, mesh_z, ANEES_upd_eval,\n CINEES, colors=[\"C0\", \"C1\"])\n cont_pred = ax6.contour(mesh_a, mesh_z, ANEES_pred_eval,\n CINEES, colors=[\"C2\", \"C3\"])\n cont_nis = ax6.contour(mesh_a, mesh_z, ANIS_eval,\n CINIS, colors=[\"C4\", \"C5\"])\n\n for cs, l in zip([cont_upd, cont_pred, cont_nis],\n [\"NEESupd\", \"NEESpred\", \"NIS\"]):\n for c, hl in zip(cs.collections, [\"low\", \"high\"]):\n c.set_label(l + \"_\" + hl)\n ax6.legend()\n ax6.set_xlabel(r\"$\\sigma_a$\")\n ax6.set_ylabel(r\"$\\sigma_z$\")\n", "id": "4078586", "language": "Python", "matching_score": 3.2660958766937256, "max_stars_count": 2, "path": "Assignments/Assignment_03/ekf/utils/plotting.py" }, { "content": "# %% Imports\nfrom typing import Collection\nfrom scipy import stats\nfrom matplotlib import pyplot as plt\nimport numpy as np\nimport numpy.linalg as nla\nfrom numpy import ndarray\nfrom tqdm import tqdm\n\nfrom utils.dataloader import load_data\nfrom utils.plotting import (plot_trajectory_with_measurements,\n plot_ekf_trajectory,\n plot_NIS_NEES_data)\n\nfrom ekf import EKF\nfrom utils.gaussparams import MultiVarGaussian\nfrom measurementmodels import CartesianPosition2D\nfrom dynamicmodels import WhitenoiseAcceleration2D\nfrom analysis import get_ANIS, get_ANEES\n\n\ndef run_ekf(sigma_a: float, sigma_z: float,\n z_data: Collection[ndarray], Ts: float, N_data: int):\n \"\"\"This function will estimate the initial state and covariance from\n the measurements and iterate the kalman filter through the data.\n\n Args:\n sigma_a (float): std of acceleration\n sigma_z (float): std of measurements\n Ts (float): the time step between each measurement\n N_data (int): the number of measurements\n Returns:\n state_pred_gauss_data (list[MultiVarGaussian]):\n list of all state predictions\n measurement_gauss_data (list[MultiVarGaussian]):\n list of all measurement pdfs\n state_upd_gauss_data (list[MultiVarGaussian]):\n list of all updated states\n \"\"\"\n # create the model and estimator object\n dynmod = WhitenoiseAcceleration2D(sigma_a)\n measmod = CartesianPosition2D(sigma_z)\n ekf_filter = EKF(dynmod, measmod)\n\n # Optimal init for model\n mean = np.array([*z_data[1], *(z_data[1] - z_data[0]) / Ts])\n cov11 = sigma_z ** 2 * np.eye(2)\n cov12 = sigma_z ** 2 * np.eye(2) / Ts\n cov22 = (2 * sigma_z ** 2 / Ts ** 2 + sigma_a ** 2 * Ts / 3) * np.eye(2)\n cov = np.block([[cov11, cov12], [cov12.T, cov22]])\n init_ekfstate = MultiVarGaussian(mean, cov)\n\n # estimate\n x_upd_gauss = init_ekfstate\n x_pred_gauss_data = []\n z_pred_gauss_data = []\n x_upd_gauss_data = []\n NIS_data = []\n for z_k in z_data[2:]:\n\n (x_pred_gauss,\n z_pred_gauss,\n x_upd_gauss) = ekf_filter.step_with_info(x_upd_gauss, z_k, Ts)\n\n x_pred_gauss_data.append(x_pred_gauss)\n z_pred_gauss_data.append(z_pred_gauss)\n x_upd_gauss_data.append(x_upd_gauss)\n\n return x_pred_gauss_data, z_pred_gauss_data, x_upd_gauss_data\n\n\ndef show_ekf_output(sigma_a: float, sigma_z: float,\n x_gt_data: Collection[ndarray],\n z_data: Collection[ndarray], Ts: float, N_data: int):\n \"\"\"Run the calman filter, find RMSE and show the trajectory\"\"\"\n\n (x_pred_gauss,\n z_pred_gauss,\n x_upd_gauss) = run_ekf(sigma_a, sigma_z, z_data, Ts, N_data)\n\n x_hat_data = np.array([upd.mean[:2] for upd in x_upd_gauss])\n\n diff_pred_data = np.array([pred.mean - x_gt[:4] for pred, x_gt\n in zip(x_pred_gauss, x_gt_data)])\n\n diff_upd_data = np.array([upd.mean - x_gt[:4]for upd, x_gt\n in zip(x_upd_gauss, x_gt_data)])\n\n RMSE_pred = np.sqrt(\n np.mean(np.sum(diff_pred_data.reshape(-1, 2, 2)**2, axis=-1), axis=0))\n RMSE_upd = np.sqrt(\n np.mean(np.sum(diff_upd_data.reshape(-1, 2, 2)**2, axis=-1), axis=0))\n\n plot_ekf_trajectory(x_gt_data, x_hat_data,\n RMSE_pred, RMSE_upd, sigma_a, sigma_z)\n# %% Task 5 b and c\n\n\ndef try_multiple_alphas(x_gt_data: Collection[ndarray],\n z_data: Collection[ndarray],\n Ts: float, N_data: int):\n \"\"\"Run the Kalman filter with multiple different sigma values,\n the result from each run is used to create a mesh plot of the NIS and NEES\n values for the different configurations\"\"\"\n # % parameters for the parameter grid\n n_vals = 20\n sigma_a_low = 0.5\n sigma_a_high = 10\n sigma_z_low = 0.3\n sigma_z_high = 12\n\n # % set the grid on logscale(not mandatory)\n sigma_a_list = np.geomspace(sigma_a_low, sigma_a_high, n_vals)\n sigma_z_list = np.geomspace(sigma_z_low, sigma_z_high, n_vals)\n\n ANIS_data = np.empty((n_vals, n_vals))\n ANEES_pred_data = np.empty((n_vals, n_vals))\n ANEES_upd_data = np.empty((n_vals, n_vals))\n\n # tqdm is used to show progress bars\n for i, sigma_a in tqdm(enumerate(sigma_a_list), \"sigma_a\", n_vals, None):\n for j, sigma_z in tqdm(enumerate(sigma_z_list),\n \"sigma_z\", n_vals, None):\n\n (x_pred_gauss_data,\n z_pred_gauss_data,\n x_upd_gauss_data) = run_ekf(sigma_a, sigma_z, z_data,\n Ts, N_data)\n\n # dont use the first 2 values of x_gt_data or a_data\n # as they are used for initialzation\n\n ANIS_data[i, j] = get_ANIS(z_pred_gauss_data, z_data[2:])\n\n ANEES_pred_data[i, j] = get_ANEES(x_pred_gauss_data,\n x_gt_data[2:, :4])\n\n ANEES_upd_data[i, j] = get_ANEES(x_upd_gauss_data,\n x_gt_data[2:, :4])\n\n confprob = 0.9\n CINIS = np.array(stats.chi2.interval(confprob, 2 * N_data)) / N_data\n CINEES = np.array(stats.chi2.interval(confprob, 4 * N_data)) / N_data\n plot_NIS_NEES_data(sigma_a_low, sigma_a_high, sigma_a_list,\n sigma_z_low, sigma_z_high, sigma_z_list,\n ANIS_data, CINIS,\n ANEES_pred_data, ANEES_upd_data, CINEES)\n\n\ndef main():\n usePregen = True # choose between own generated data and pregenerated\n x_gt_data, z_data, Ts, N_data = load_data(usePregen)\n #plot_trajectory_with_measurements(x_gt_data, z_data)\n\n # %% a: tune by hand and comment\n # Large sigma_z -> x_hat smooth, but severely over/undershoots\n # Small sigma_z -> x_hat very \"spiky\", jumping back and forth between gt\n # sigma_z = 3.2 seemed to be a good middle ground\n # Large sigma_a -> similar response as small sigma_z\n # Small sigma_a -> similar response as large sigma_z\n # sigma_a = 2.5 seemed to be a good middle ground\n\n # set parameters\n sigma_a = 2.5\n sigma_z = 3.2\n\n show_ekf_output(sigma_a, sigma_z, x_gt_data, z_data, Ts, N_data)\n\n #if input(\"Try multiple alpha combos? (y/n): \") == 'y':\n # try_multiple_alphas(x_gt_data, z_data, Ts, N_data)\n\n plt.tight_layout()\n plt.show()\n\n\nif __name__ == '__main__':\n main()\n", "id": "9349593", "language": "Python", "matching_score": 3.628772020339966, "max_stars_count": 0, "path": "Assignments/Assignment_03/ekf/run.py" }, { "content": "import numpy as np\nfrom numpy import ndarray\nimport scipy.linalg as la\nimport solution\nfrom utils.gaussparams import MultiVarGaussian\nfrom config import DEBUG\nfrom typing import Sequence\n\n\ndef get_NIS(z_pred_gauss: MultiVarGaussian, z: ndarray):\n \"\"\"Calculate the normalized innovation squared (NIS), this can be seen as \n the normalized measurement prediction error squared. \n See (4.66 in the book). \n Tip: use the mahalanobis_distance method of z_pred_gauss, (3.2) in the book\n\n Args:\n z_pred_gauss (MultiVarGaussian): predigted measurement gaussian\n z (ndarray): measurement\n\n Returns:\n NIS (float): normalized innovation squared\n \"\"\"\n\n # TODO replace this with your own code\n NIS = solution.analysis.get_NIS(z_pred_gauss, z)\n\n return NIS\n\n\ndef get_NEES(x_gauss: MultiVarGaussian, x_gt: ndarray):\n \"\"\"Calculate the normalized estimation error squared (NEES)\n See (4.65 in the book). \n Tip: use the mahalanobis_distance method of x_gauss, (3.2) in the book\n\n Args:\n x_gauss (MultiVarGaussian): state estimate gaussian\n x_gt (ndarray): true state\n\n Returns:\n NEES (float): normalized estimation error squared\n \"\"\"\n\n # TODO replace this with your own code\n NEES = solution.analysis.get_NEES(x_gauss, x_gt)\n\n return NEES\n\n\ndef get_ANIS(z_pred_gauss_data: Sequence[MultiVarGaussian],\n z_data: Sequence[ndarray]):\n \"\"\"Calculate the average normalized innovation squared (ANIS)\n Tip: use get_NIS\n\n Args:\n z_pred_gauss_data (Sequence[MultiVarGaussian]): Sequence (List) of \n predicted measurement gaussians\n z_data (Sequence[ndarray]): Sequence (List) of true measurements\n\n Returns:\n ANIS (float): average normalized innovation squared\n \"\"\"\n\n # TODO replace this with your own code\n ANIS = solution.analysis.get_ANIS(z_pred_gauss_data, z_data)\n\n return ANIS\n\n\ndef get_ANEES(x_upd_gauss_data: Sequence[MultiVarGaussian],\n x_gt_data: Sequence[ndarray]):\n \"\"\"Calculate the average normalized estimation error squared (ANEES)\n Tip: use get_NEES\n\n Args:\n x_upd_gauss_data (Sequence[MultiVarGaussian]): Sequence (List) of \n state estimate gaussians\n x_gt_data (Sequence[ndarray]): Sequence (List) of true states\n\n Returns:\n ANEES (float): average normalized estimation error squared\n \"\"\"\n\n # TODO replace this with your own code\n ANEES = solution.analysis.get_ANEES(x_upd_gauss_data, x_gt_data)\n\n return ANEES\n", "id": "1852568", "language": "Python", "matching_score": 3.0328691005706787, "max_stars_count": 0, "path": "Assignments/Assignment_03/ekf/analysis.py" }, { "content": "import pickle\nfrom numpy.core.numeric import isscalar\nimport pytest\nfrom copy import deepcopy\nimport sys\nfrom pathlib import Path\nimport numpy as np\nimport os\nfrom dataclasses import is_dataclass\n\nassignment_name = \"ekf\"\n\nthis_file = Path(__file__)\ntests_folder = this_file.parent\ntest_data_file = tests_folder.joinpath(\"test_data.pickle\")\nproject_folder = tests_folder.parent\ncode_folder = project_folder.joinpath(assignment_name)\n\nsys.path.insert(0, str(code_folder))\n\nimport solution # nopep8\nimport analysis # nopep8\n\n\n@pytest.fixture\ndef test_data():\n with open(test_data_file, \"rb\") as file:\n test_data = pickle.load(file)\n return test_data\n\n\ndef compare(a, b):\n if (\n isinstance(a, np.ndarray)\n or isinstance(b, np.ndarray)\n or np.isscalar(a)\n or np.isscalar(b)\n ):\n return np.allclose(a, b)\n elif is_dataclass(a) or is_dataclass(b):\n return str(a) == str(b)\n else:\n return a == b\n\n\nclass TestOutput:\n\n def test_output__get_NIS(self, test_data):\n for finput in test_data[\"analysis.get_NIS\"]:\n params = tuple(finput.values())\n\n z_pred_gauss_1, z_1 = deepcopy(params)\n\n z_pred_gauss_2, z_2 = deepcopy(params)\n\n NIS_1 = analysis.get_NIS(z_pred_gauss_1, z_1)\n\n NIS_2 = solution.analysis.get_NIS(z_pred_gauss_2, z_2)\n\n assert compare(NIS_1, NIS_2)\n\n assert compare(z_pred_gauss_1, z_pred_gauss_2)\n assert compare(z_1, z_2)\n\n def test_output__get_NEES(self, test_data):\n for finput in test_data[\"analysis.get_NEES\"]:\n params = tuple(finput.values())\n\n x_gauss_1, x_gt_1 = deepcopy(params)\n\n x_gauss_2, x_gt_2 = deepcopy(params)\n\n NEES_1 = analysis.get_NEES(x_gauss_1, x_gt_1)\n\n NEES_2 = solution.analysis.get_NEES(x_gauss_2, x_gt_2)\n\n assert compare(NEES_1, NEES_2)\n\n assert compare(x_gauss_1, x_gauss_2)\n assert compare(x_gt_1, x_gt_2)\n\n def test_output__get_ANIS(self, test_data):\n for finput in test_data[\"analysis.get_ANIS\"]:\n params = tuple(finput.values())\n\n z_pred_gauss_data_1, z_data_1 = deepcopy(params)\n\n z_pred_gauss_data_2, z_data_2 = deepcopy(params)\n\n ANIS_1 = analysis.get_ANIS(z_pred_gauss_data_1, z_data_1)\n\n ANIS_2 = solution.analysis.get_ANIS(z_pred_gauss_data_2, z_data_2)\n\n assert compare(ANIS_1, ANIS_2)\n\n assert compare(z_pred_gauss_data_1, z_pred_gauss_data_2)\n assert compare(z_data_1, z_data_2)\n\n def test_output__get_ANEES(self, test_data):\n for finput in test_data[\"analysis.get_ANEES\"]:\n params = tuple(finput.values())\n\n x_upd_gauss_data_1, x_gt_data_1 = deepcopy(params)\n\n x_upd_gauss_data_2, x_gt_data_2 = deepcopy(params)\n\n ANEES_1 = analysis.get_ANEES(x_upd_gauss_data_1, x_gt_data_1)\n\n ANEES_2 = solution.analysis.get_ANEES(\n x_upd_gauss_data_2, x_gt_data_2)\n\n assert compare(ANEES_1, ANEES_2)\n\n assert compare(x_upd_gauss_data_1, x_upd_gauss_data_2)\n assert compare(x_gt_data_1, x_gt_data_2)\n\n\nclass TestSolutionUsage:\n\n def test_solution_usage__get_NIS(self, test_data):\n for finput in test_data[\"analysis.get_NIS\"][:1]:\n params = finput\n\n solution.used[\"analysis.get_NIS\"] = False\n\n analysis.get_NIS(**params)\n\n assert not solution.used[\"analysis.get_NIS\"], \"The function uses the solution\"\n\n def test_solution_usage__get_NEES(self, test_data):\n for finput in test_data[\"analysis.get_NEES\"][:1]:\n params = finput\n\n solution.used[\"analysis.get_NEES\"] = False\n\n analysis.get_NEES(**params)\n\n assert not solution.used[\"analysis.get_NEES\"], \"The function uses the solution\"\n\n def test_solution_usage__get_ANIS(self, test_data):\n for finput in test_data[\"analysis.get_ANIS\"][:1]:\n params = finput\n\n solution.used[\"analysis.get_ANIS\"] = False\n\n analysis.get_ANIS(**params)\n\n assert not solution.used[\"analysis.get_ANIS\"], \"The function uses the solution\"\n\n def test_solution_usage__get_ANEES(self, test_data):\n for finput in test_data[\"analysis.get_ANEES\"][:1]:\n params = finput\n\n solution.used[\"analysis.get_ANEES\"] = False\n\n analysis.get_ANEES(**params)\n\n assert not solution.used[\"analysis.get_ANEES\"], \"The function uses the solution\"\n\n\nif __name__ == \"__main__\":\n os.environ[\"_PYTEST_RAISE\"] = \"1\"\n pytest.main()\n", "id": "964311", "language": "Python", "matching_score": 1.938920021057129, "max_stars_count": 0, "path": "Assignments/Assignment_03/tests/test_analysis.py" }, { "content": "import pickle\nimport pytest\nfrom copy import deepcopy\nimport sys\nfrom pathlib import Path\nimport numpy as np\nimport os\nfrom dataclasses import is_dataclass\nassignment_name = \"assignment2\"\n\nthis_file = Path(__file__)\ntests_folder = this_file.parent\ntest_data_file = tests_folder.joinpath('test_data.pickle')\nproject_folder = tests_folder.parent\ncode_folder = project_folder.joinpath(assignment_name)\n\nsys.path.insert(0, str(code_folder))\n\nimport solution # nopep8\nimport task2 # nopep8\n\n\n@pytest.fixture\ndef test_data():\n with open(test_data_file, 'rb') as file:\n test_data = pickle.load(file)\n return test_data\n\n\ndef compare(a, b):\n if isinstance(a, np.ndarray) or isinstance(b, np.ndarray):\n return np.allclose(a, b)\n elif is_dataclass(a) or is_dataclass(b):\n return str(a) == str(b)\n else:\n return a == b\n\n\nclass TestOutput:\n\n def test_output__condition_mean(self, test_data):\n for finput in test_data[\"task2.condition_mean\"]:\n params = tuple(finput.values())\n\n x_1, P_1, z_1, R_1, H_1 = deepcopy(params)\n\n x_2, P_2, z_2, R_2, H_2 = deepcopy(params)\n\n cond_mean_1 = task2.condition_mean(x_1, P_1, z_1, R_1, H_1)\n\n cond_mean_2 = solution.task2.condition_mean(\n x_2, P_2, z_2, R_2, H_2)\n\n assert compare(cond_mean_1, cond_mean_2)\n\n assert compare(x_1, x_2)\n assert compare(P_1, P_2)\n assert compare(z_1, z_2)\n assert compare(R_1, R_2)\n assert compare(H_1, H_2)\n\n def test_output__condition_cov(self, test_data):\n for finput in test_data[\"task2.condition_cov\"]:\n params = tuple(finput.values())\n\n P_1, R_1, H_1 = deepcopy(params)\n\n P_2, R_2, H_2 = deepcopy(params)\n\n conditional_cov_1 = task2.condition_cov(P_1, R_1, H_1)\n\n conditional_cov_2 = solution.task2.condition_cov(P_2, R_2, H_2)\n\n assert compare(conditional_cov_1, conditional_cov_2)\n\n assert compare(P_1, P_2)\n assert compare(R_1, R_2)\n assert compare(H_1, H_2)\n\n def test_output__get_task_2f(self, test_data):\n for finput in test_data[\"task2.get_task_2f\"]:\n params = tuple(finput.values())\n\n x_bar_1, P_1, z_c_1, R_c_1, H_c_1, z_r_1, R_r_1, H_r_1 = deepcopy(\n params)\n\n x_bar_2, P_2, z_c_2, R_c_2, H_c_2, z_r_2, R_r_2, H_r_2 = deepcopy(\n params)\n\n x_bar_c_1, P_c_1, x_bar_r_1, P_r_1 = task2.get_task_2f(\n x_bar_1, P_1, z_c_1, R_c_1, H_c_1, z_r_1, R_r_1, H_r_1)\n\n x_bar_c_2, P_c_2, x_bar_r_2, P_r_2 = solution.task2.get_task_2f(\n x_bar_2, P_2, z_c_2, R_c_2, H_c_2, z_r_2, R_r_2, H_r_2)\n\n assert compare(x_bar_c_1, x_bar_c_2)\n assert compare(P_c_1, P_c_2)\n assert compare(x_bar_r_1, x_bar_r_2)\n assert compare(P_r_1, P_r_2)\n\n assert compare(x_bar_1, x_bar_2)\n assert compare(P_1, P_2)\n assert compare(z_c_1, z_c_2)\n assert compare(R_c_1, R_c_2)\n assert compare(H_c_1, H_c_2)\n assert compare(z_r_1, z_r_2)\n assert compare(R_r_1, R_r_2)\n assert compare(H_r_1, H_r_2)\n\n def test_output__get_task_2g(self, test_data):\n for finput in test_data[\"task2.get_task_2g\"]:\n params = tuple(finput.values())\n\n x_bar_c_1, P_c_1, x_bar_r_1, P_r_1, z_c_1, R_c_1, H_c_1, z_r_1, R_r_1, H_r_1 = deepcopy(\n params)\n\n x_bar_c_2, P_c_2, x_bar_r_2, P_r_2, z_c_2, R_c_2, H_c_2, z_r_2, R_r_2, H_r_2 = deepcopy(\n params)\n\n x_bar_cr_1, P_cr_1, x_bar_rc_1, P_rc_1 = task2.get_task_2g(\n x_bar_c_1, P_c_1, x_bar_r_1, P_r_1, z_c_1, R_c_1, H_c_1, z_r_1, R_r_1, H_r_1)\n\n x_bar_cr_2, P_cr_2, x_bar_rc_2, P_rc_2 = solution.task2.get_task_2g(\n x_bar_c_2, P_c_2, x_bar_r_2, P_r_2, z_c_2, R_c_2, H_c_2, z_r_2, R_r_2, H_r_2)\n\n assert compare(x_bar_cr_1, x_bar_cr_2)\n assert compare(P_cr_1, P_cr_2)\n assert compare(x_bar_rc_1, x_bar_rc_2)\n assert compare(P_rc_1, P_rc_2)\n\n assert compare(x_bar_c_1, x_bar_c_2)\n assert compare(P_c_1, P_c_2)\n assert compare(x_bar_r_1, x_bar_r_2)\n assert compare(P_r_1, P_r_2)\n assert compare(z_c_1, z_c_2)\n assert compare(R_c_1, R_c_2)\n assert compare(H_c_1, H_c_2)\n assert compare(z_r_1, z_r_2)\n assert compare(R_r_1, R_r_2)\n assert compare(H_r_1, H_r_2)\n\n def test_output__get_task_2h(self, test_data):\n for finput in test_data[\"task2.get_task_2h\"]:\n params = tuple(finput.values())\n\n x_bar_rc_1, P_rc_1 = deepcopy(params)\n\n x_bar_rc_2, P_rc_2 = deepcopy(params)\n\n prob_above_line_1 = task2.get_task_2h(x_bar_rc_1, P_rc_1)\n\n prob_above_line_2 = solution.task2.get_task_2h(x_bar_rc_2, P_rc_2)\n\n assert compare(prob_above_line_1, prob_above_line_2)\n\n assert compare(x_bar_rc_1, x_bar_rc_2)\n assert compare(P_rc_1, P_rc_2)\n\n\nclass TestSolutionUsage:\n\n def test_solution_usage__condition_mean(self, test_data):\n for finput in test_data[\"task2.condition_mean\"][:1]:\n params = finput\n\n solution.used[\"task2.condition_mean\"] = False\n\n task2.condition_mean(**params)\n\n assert not solution.used[\"task2.condition_mean\"], (\n \"The function uses the solution\")\n\n def test_solution_usage__condition_cov(self, test_data):\n for finput in test_data[\"task2.condition_cov\"][:1]:\n params = finput\n\n solution.used[\"task2.condition_cov\"] = False\n\n task2.condition_cov(**params)\n\n assert not solution.used[\"task2.condition_cov\"], (\n \"The function uses the solution\")\n\n def test_solution_usage__get_task_2f(self, test_data):\n for finput in test_data[\"task2.get_task_2f\"][:1]:\n params = finput\n\n solution.used[\"task2.get_task_2f\"] = False\n\n task2.get_task_2f(**params)\n\n assert not solution.used[\"task2.get_task_2f\"], (\n \"The function uses the solution\")\n\n def test_solution_usage__get_task_2g(self, test_data):\n for finput in test_data[\"task2.get_task_2g\"][:1]:\n params = finput\n\n solution.used[\"task2.get_task_2g\"] = False\n\n task2.get_task_2g(**params)\n\n assert not solution.used[\"task2.get_task_2g\"], (\n \"The function uses the solution\")\n\n def test_solution_usage__get_task_2h(self, test_data):\n for finput in test_data[\"task2.get_task_2h\"][:1]:\n params = finput\n\n solution.used[\"task2.get_task_2h\"] = False\n\n task2.get_task_2h(**params)\n\n assert not solution.used[\"task2.get_task_2h\"], (\n \"The function uses the solution\")\n\n\nif __name__ == '__main__':\n os.environ['_PYTEST_RAISE'] = \"1\"\n pytest.main()\n", "id": "5115750", "language": "Python", "matching_score": 1.2109700441360474, "max_stars_count": 0, "path": "Assignments/Assignment_02/tests/test_task2.py" }, { "content": "import os\nimport pytest\n\n\"\"\"\nThis file is a hack to let the debugger in vscode catch the assert statements\n\"\"\"\n\nif os.getenv('_PYTEST_RAISE', \"0\") != \"0\":\n\n @pytest.hookimpl(tryfirst=True)\n def pytest_exception_interact(call):\n raise call.excinfo.value\n\n @pytest.hookimpl(tryfirst=True)\n def pytest_internalerror(excinfo):\n raise excinfo.value\n", "id": "2342964", "language": "Python", "matching_score": 0.016255738213658333, "max_stars_count": 0, "path": "Graded/G3/tests/conftest.py" }, { "content": "import numpy as np\nfrom numpy import ndarray\nfrom dataclasses import dataclass\nfrom scipy.spatial.transform import Rotation\n\nfrom config import DEBUG\n\nfrom cross_matrix import get_cross_matrix\n\n\n@dataclass\nclass RotationQuaterion:\n \"\"\"Class representing a rotation quaternion (norm = 1). Has some useful\n methods for converting between rotation representations.\n\n Hint: You can implement all methods yourself, or use scipys Rotation class.\n scipys Rotation uses the xyzw notation for quats while the book uses wxyz\n (this i really annoying, I know).\n\n Args:\n real_part (float): eta (n) in the book, w in scipy notation\n vec_part (ndarray[3]): epsilon in the book, (x,y,z) in scipy notation\n \"\"\"\n real_part: float\n vec_part: 'ndarray[3]'\n\n def __post_init__(self):\n if DEBUG:\n assert len(self.vec_part) == 3\n\n norm = np.sqrt(self.real_part**2 + sum(self.vec_part**2))\n if not np.allclose(norm, 1):\n self.real_part /= norm\n self.vec_part /= norm\n\n if self.real_part < 0:\n self.real_part *= -1\n self.vec_part *= -1\n\n def multiply(self, other: 'RotationQuaterion') -> 'RotationQuaterion':\n \"\"\"Multiply two rotation quaternions\n Hint: see (10.33)\n\n As __matmul__ is implemented for this class, you can use:\n q1@q2 which is equivalent to q1.multiply(q2)\n\n Args:\n other (RotationQuaternion): the other quaternion \n Returns:\n quaternion_product (RotationQuaternion): the product\n \"\"\"\n real_part = self.real_part*other.real_part - self.vec_part@other.vec_part\n vec_part = self.vec_part*other.real_part + (self.real_part*np.eye(3) + get_cross_matrix(self.vec_part))@other.vec_part\n quaternion_product = RotationQuaterion(real_part, vec_part)\n\n return quaternion_product\n\n\n def conjugate(self) -> 'RotationQuaterion':\n \"\"\"Get the conjugate of the RotationQuaternion\"\"\"\n conj = RotationQuaterion(self.real_part, -self.vec_part)\n return conj \n\n def as_rotmat(self) -> 'ndarray[3,3]':\n \"\"\"Get the rotation matrix representation of self\n\n Returns:\n R (ndarray[3,3]): rotation matrix\n \"\"\"\n scalar_last_quat = np.append(self.vec_part, self.real_part)\n R = Rotation.from_quat(scalar_last_quat).as_matrix()\n return R\n\n @property\n def R(self) -> 'ndarray[3,3]':\n return self.as_rotmat()\n\n def as_euler(self) -> 'ndarray[3]':\n \"\"\"Get the euler angle representation of self\n\n Returns:\n euler (ndarray[3]): extrinsic xyz euler angles (roll, pitch, yaw)\n \"\"\"\n scalar_last_quat = np.append(self.vec_part, self.real_part)\n euler = Rotation.from_quat(scalar_last_quat).as_euler('xyz', degrees=False)\n return euler\n\n def as_avec(self) -> 'ndarray[3]':\n \"\"\"Get the angles vector representation of self\n\n Returns:\n euler (ndarray[3]): extrinsic xyz euler angles (roll, pitch, yaw)\n \"\"\"\n scalar_last_quat = np.append(self.vec_part, self.real_part)\n avec = Rotation.from_quat(scalar_last_quat).as_rotvec()\n return avec\n\n @staticmethod\n def from_euler(euler: 'ndarray[3]') -> 'RotationQuaterion':\n \"\"\"Get a rotation quaternion from euler angles\n usage: rquat = RotationQuaterion.from_euler(euler)\n\n Args:\n euler (ndarray[3]): extrinsic xyz euler angles (roll, pitch, yaw)\n\n Returns:\n rquat (RotationQuaternion): the rotation quaternion\n \"\"\"\n scipy_quat = Rotation.from_euler('xyz', euler).as_quat()\n rquat = RotationQuaterion(scipy_quat[3], scipy_quat[:3])\n return rquat\n\n def _as_scipy_quat(self):\n \"\"\"If you're using scipys Rotation class, this can be handy\"\"\"\n return np.append(self.vec_part, self.real_part)\n\n def __iter__(self):\n return iter([self.real_part, self.vec_part])\n\n def __matmul__(self, other) -> 'RotationQuaterion':\n \"\"\"Lets u use the @ operator, q1@q2 == q1.multiply(q2)\"\"\"\n return self.multiply(other)\n", "id": "3437295", "language": "Python", "matching_score": 2.034501791000366, "max_stars_count": 0, "path": "Graded/G2/eskf/quaternion.py" }, { "content": "import numpy as np\nfrom numpy import ndarray\n\nimport solution\n\n\ndef get_cross_matrix(vec: ndarray) -> ndarray:\n \"\"\"Get the matrix equivalent of cross product. S() in (10.68)\n\n cross_product_matrix(vec1)@vec2 == np.cross(vec1, vec2)\n\n Hint: see (10.5)\n\n Args:\n vec (ndarray[3]): vector\n\n Returns:\n S (ndarray[3,3]): cross product matrix equivalent\n \"\"\"\n S = np.array([\n [0, -vec[2], vec[1]],\n [vec[2], 0, -vec[0]],\n [-vec[1], vec[0], 0]\n ])\n\n return S\n\n", "id": "12522315", "language": "Python", "matching_score": 0.236687570810318, "max_stars_count": 0, "path": "Graded/G2/eskf/cross_matrix.py" }, { "content": "from numpy import ndarray\nfrom dataclasses import dataclass\n\n\n@dataclass\nclass ESKFStaticParams():\n \"\"\"Dataclass containing static parameter for the drone and IMU\n\n lever_arm (ndarray[3]): GPS position relative to imu (in body frame)\n accm_correction (ndarray[3,3]): accelerometer correction matrix\n gyro_correction (ndarray[3,3]): gyro correction matrix\n \"\"\"\n accm_correction: 'ndarray[3]'\n gyro_correction: 'ndarray[3]'\n lever_arm: 'ndarray[3]'\n\n\n@dataclass\nclass ESKFTuningParams():\n \"\"\"Dataclass containing tunable parameter for the eskf\n\n acc_std (float): accelerometer standard deviation\n acc_bias_std (float): accelerometer bias standard deviation (see 10.50)\n acc_bias_p (float): accelerometer bias random walk gain (see 10.50)\n\n gyro_std (float): gyro standard deviation\n gyro_bias_std (float): gyro bias standard deviation (see 10.50)\n gyro_bias_p (float): gyro bias random walk gain (see 10.50)\n\n gnss_std_ne (float): gnss standard deviation in north and east dir (xy)\n gnss_std_d (float): gnss standard deviation in down dir (z)\n\n use_gnss_accuracy (bool): to use the gnss measurements estimated accuracy\n \"\"\"\n accm_std: float\n accm_bias_std: float\n accm_bias_p: float\n\n gyro_std: float\n gyro_bias_std: float\n gyro_bias_p: float\n\n gnss_std_ne: float\n gnss_std_d: float\n\n use_gnss_accuracy: bool = False\n", "id": "11435079", "language": "Python", "matching_score": 2.2721426486968994, "max_stars_count": 0, "path": "Graded/G2/eskf/datatypes/eskf_params.py" }, { "content": "from numpy import ndarray\nfrom dataclasses import dataclass\nfrom typing import Optional\n\n\n@dataclass\nclass ImuMeasurement():\n \"\"\"Represents raw data received from the imu\n\n Args:\n ts (float): IMU measurement timestamp\n acc (ndarray[3]): accelerometer measurement\n avel (ndarray[3]): gyro measurement\n \"\"\"\n ts: float\n acc: 'ndarray[3]'\n avel: 'ndarray[3]'\n\n def __post_init__(self):\n assert self.acc.shape == (3,)\n assert self.avel.shape == (3,)\n\n\n@dataclass\nclass CorrectedImuMeasurement(ImuMeasurement):\n \"\"\"Represents processed data from the IMU.\n Corrected for axis alignmentand scale scale, and bias. \n\n Not 'corrected' for gravity.\n\n Implementation is exaclty the same as ImuMeasurement\n\n Args:\n ts (float): IMU measurement timestamp\n acc (ndarray[3]): corrected accelerometer measurement\n avel (ndarray[3]): corrected gyro measurement\n \"\"\"\n\n\n@ dataclass\nclass GnssMeasurement():\n \"\"\"Represents data received from gnss\n Args:\n ts(ndarray[:]): IMU measurement timestamp\n position(ndarray[:, 3]): GPS position measurement\n accuracy (Optional[float]): the reported accuracy from the gnss\n \"\"\"\n ts: float\n pos: 'ndarray[3]'\n accuracy: Optional[float] = None\n\n def __post_init__(self):\n assert self.pos.shape == (3,)\n", "id": "936485", "language": "Python", "matching_score": 0.5763769745826721, "max_stars_count": 0, "path": "Graded/G2/eskf/datatypes/measurements.py" }, { "content": "import numpy as np\nfrom numpy import ndarray\nfrom typing import Sequence, Optional\n\nfrom datatypes.measurements import GnssMeasurement\nfrom datatypes.eskf_states import NominalState, ErrorStateGauss\nfrom datatypes.multivargaussian import MultiVarGaussStamped\n\ndef get_NIS(z_gnss: GnssMeasurement,\n z_gnss_pred_gauss: MultiVarGaussStamped,\n marginal_idxs: Optional[Sequence[int]] = None\n ) -> float:\n \"\"\"Calculate NIS\n\n Args:\n z_gnss (GnssMeasurement): gnss measurement\n z_gnss_pred_gauss (MultiVarGaussStamped): predicted gnss measurement\n marginal_idxs (Optional[Sequence[int]]): Sequence of marginal indexes.\n For example used for calculating NIS in only xy direction. \n\n Returns:\n NIS (float): NIS value\n \"\"\"\n z_pos_marg = z_gnss.pos\n z_pred_gauss_marg = z_gnss_pred_gauss\n\n if marginal_idxs is not None:\n z_pos_marg = z_pos_marg[marginal_idxs]\n z_pred_gauss_marg = z_pred_gauss_marg.marginalize(marginal_idxs)\n\n nu = z_pos_marg - z_pred_gauss_marg.mean \n S = z_pred_gauss_marg.cov\n\n NIS = nu.T@np.linalg.inv(S)@nu\n return NIS\n\n\ndef get_error(x_true: NominalState,\n x_nom: NominalState,\n ) -> 'ndarray[15]':\n \"\"\"Finds the error (difference) between True state and \n nominal state. See (Table 10.1).\n\n\n Returns:\n error (ndarray[15]): difference between x_true and x_nom. \n \"\"\"\n q_inv = x_nom.ori.conjugate()\n error_ori = q_inv @ x_true.ori \n \n error = np.concatenate([\n x_true.pos - x_nom.pos,\n x_true.vel - x_nom.vel,\n error_ori.as_euler(),\n x_true.accm_bias - x_nom.accm_bias,\n x_true.gyro_bias - x_nom.gyro_bias\n ])\n\n return error\n\n\ndef get_NEES(error: 'ndarray[15]',\n x_err: ErrorStateGauss,\n marginal_idxs: Optional[Sequence[int]] = None\n ) -> float:\n \"\"\"Calculate NEES\n\n Args:\n error (ndarray[15]): errors between x_true and x_nom (from get_error)\n x_err (ErrorStateGauss): estimated error\n marginal_idxs (Optional[Sequence[int]]): Sequence of marginal indexes.\n For example used for calculating NEES for only the position. \n\n Returns:\n NEES (float): NEES value\n \"\"\"\n e = error\n x_err_gauss = x_err\n \n if marginal_idxs is not None:\n e = e[marginal_idxs]\n x_err_gauss = x_err_gauss.marginalize(marginal_idxs)\n\n P = x_err_gauss.cov\n\n NEES = e.T@np.linalg.inv(P)@e\n\n return NEES\n\n\ndef get_time_pairs(unique_data, data):\n \"\"\"match data from two different time series based on timestamps\"\"\"\n gt_dict = dict(([x.ts, x] for x in unique_data))\n pairs = [(gt_dict[x.ts], x) for x in data if x.ts in gt_dict]\n times = [pair[0].ts for pair in pairs]\n return times, pairs\n", "id": "3850361", "language": "Python", "matching_score": 3.461846113204956, "max_stars_count": 0, "path": "Graded/G2/eskf/nis_nees.py" }, { "content": "import numpy as np\nfrom numpy import ndarray\nimport scipy\nfrom dataclasses import dataclass, field\nfrom typing import Tuple\nfrom functools import cache\nfrom math import isnan\n\nfrom scipy.spatial.transform.rotation import Rotation\nfrom datatypes.multivargaussian import MultiVarGaussStamped\nfrom datatypes.measurements import (ImuMeasurement,\n CorrectedImuMeasurement,\n GnssMeasurement)\nfrom datatypes.eskf_states import NominalState, ErrorStateGauss\nfrom utils.indexing import block_3x3\n\nfrom quaternion import RotationQuaterion\nfrom cross_matrix import get_cross_matrix\n\nimport solution\n\n\n@dataclass\nclass ESKF():\n\n accm_std: float\n accm_bias_std: float\n accm_bias_p: float\n\n gyro_std: float\n gyro_bias_std: float\n gyro_bias_p: float\n\n gnss_std_ne: float\n gnss_std_d: float\n\n accm_correction: 'ndarray[3,3]'\n gyro_correction: 'ndarray[3,3]'\n lever_arm: 'ndarray[3]'\n\n do_approximations: bool\n use_gnss_accuracy: bool = False\n\n Q_err: 'ndarray[12,12]' = field(init=False, repr=False)\n g: 'ndarray[3]' = np.array([0, 0, 9.82])\n\n def __post_init__(self):\n\n self.Q_err = scipy.linalg.block_diag(\n self.accm_std ** 2 * self.accm_correction @ self.accm_correction.T,\n self.gyro_std ** 2 * self.gyro_correction @ self.gyro_correction.T,\n self.accm_bias_std ** 2 * np.eye(3),\n self.gyro_bias_std ** 2 * np.eye(3),\n )\n self.gnss_cov = np.diag([self.gnss_std_ne]*2 + [self.gnss_std_d])**2\n\n def correct_z_imu(self,\n x_nom_prev: NominalState,\n z_imu: ImuMeasurement,\n ) -> CorrectedImuMeasurement:\n \"\"\"Correct IMU measurement so it gives a measurmenet of acceleration \n and angular velocity in body.\n\n Hint: self.accm_correction and self.gyro_correction translates \n measurements from IMU frame (probably not correct name) to body frame\n\n Args:\n x_nom_prev (NominalState): previous nominal state\n z_imu (ImuMeasurement): raw IMU measurement\n\n Returns:\n CorrectedImuMeasurement: corrected IMU measurement\n \"\"\"\n acc_imu = z_imu.acc - x_nom_prev.accm_bias\n avel_imu = z_imu.avel - x_nom_prev.gyro_bias\n \n acc_body = self.accm_correction@acc_imu\n avel_body = self.gyro_correction@avel_imu\n\n z_corr = CorrectedImuMeasurement(z_imu.ts, acc_body, avel_body)\n\n return z_corr\n\n def predict_nominal(self,\n x_nom_prev: NominalState,\n z_corr: CorrectedImuMeasurement,\n ) -> NominalState:\n \"\"\"Predict the nominal state, given a corrected IMU measurement\n\n Hint: Discrete time prediction of equation (10.58)\n See the assignment description for more hints \n\n Args:\n x_nom_prev (NominalState): previous nominal state\n z_corr (CorrectedImuMeasurement): corrected IMU measuremnt\n\n Returns:\n x_nom_pred (NominalState): predicted nominal state\n \"\"\"\n\n # To avoid x_nom_prev.ts being None error\n if x_nom_prev.ts is None:\n x_nom_prev.ts = 0\n\n # Catch NaN's\n if isnan(x_nom_prev.ori.real_part) or isnan(any(x_nom_prev.ori.vec_part)):\n x_nom_prev.ori = RotationQuaterion(1, np.zeros((3, 1)))\n\n Ts = float(abs(x_nom_prev.ts - z_corr.ts))\n if Ts == 0:\n return x_nom_prev\n\n # Previous state\n pos_prev = x_nom_prev.pos\n vel_prev = x_nom_prev.vel\n ori_prev = x_nom_prev.ori\n accm_bias_prev = x_nom_prev.accm_bias\n gyro_bias_prev = x_nom_prev.gyro_bias\n\n # State derivatives from (10.58) without noise or bias\n pos_dot = vel_prev\n vel_dot = x_nom_prev.ori.R@(z_corr.acc) + self.g\n accm_bias_dot = -self.accm_bias_p * np.eye(3) @ accm_bias_prev\n gyro_bias_dot = -self.gyro_bias_std * np.eye(3) @ gyro_bias_prev\n\n # Euler step to get predictions from \n pos = pos_prev + Ts*pos_dot\n vel = vel_prev + Ts*vel_dot\n accm_bias = accm_bias_prev + Ts*accm_bias_dot\n gyro_bias = gyro_bias_prev + Ts*gyro_bias_dot\n\n # Handle orientation on its own since quaternions are a bitch sometimes (at least in this implementation)\n omega = np.array(z_corr.avel)\n kappa = Ts*omega\n kappa_2norm = np.sqrt(kappa@kappa.T)\n ori = ori_prev @ RotationQuaterion(np.cos(0.5*kappa_2norm), np.sin(0.5*kappa_2norm)/kappa_2norm * kappa)\n\n x_nom_pred = NominalState(pos, vel, ori, accm_bias, gyro_bias, z_corr.ts)\n\n return x_nom_pred\n\n def get_error_A_continous(self,\n x_nom_prev: NominalState,\n z_corr: CorrectedImuMeasurement,\n ) -> 'ndarray[15,15]':\n \"\"\"Get the transition matrix, A, in (10.68)\n\n Hint: The S matrices can be created using get_cross_matrix. In the book\n a perfect IMU is expected (thus many I matrices). Here we have \n to use the correction matrices, self.accm_correction and \n self.gyro_correction, instead of som of the I matrices. \n\n You can use block_3x3 to simplify indexing if you want to.\n The first I element in A can be set as A[block_3x3(0, 1)] = np.eye(3)\n\n Args:\n x_nom_prev (NominalState): previous nominal state\n z_corr (CorrectedImuMeasurement): corrected IMU measurement\n Returns:\n A (ndarray[15,15]): A\n \"\"\"\n A = np.zeros((15, 15))\n A[block_3x3(0, 1)] = np.eye(3)\n A[block_3x3(1, 2)] = -x_nom_prev.ori.R @ get_cross_matrix(z_corr.acc - x_nom_prev.accm_bias)\n A[block_3x3(1, 3)] = -x_nom_prev.ori.R\n A[block_3x3(2, 2)] = -get_cross_matrix(z_corr.avel - x_nom_prev.accm_bias)\n A[block_3x3(2, 4)] = -np.eye(3)\n A[block_3x3(3, 3)] = -self.accm_bias_p*self.accm_correction\n A[block_3x3(4, 4)] = -self.gyro_bias_p*self.gyro_correction\n\n return A\n\n def get_error_GQGT_continous(self,\n x_nom_prev: NominalState\n ) -> 'ndarray[15, 12]':\n \"\"\"The noise covariance matrix, GQGT, in (10.68)\n\n From (Theorem 3.2.2) we can see that (10.68) can be written as \n d/dt x_err = A@x_err + G@n == A@x_err + m\n where m is gaussian with mean 0 and covariance G @ Q @ G.T. Thats why\n we need GQGT.\n\n Hint: you can use block_3x3 to simplify indexing if you want to.\n The first I element in G can be set as G[block_3x3(2, 1)] = -np.eye(3)\n\n Args:\n x_nom_prev (NominalState): previous nominal state\n Returns:\n GQGT (ndarray[15, 15]): G @ Q @ G.T\n \"\"\"\n G = np.zeros((15, 12))\n\n G[block_3x3(1, 0)] = -x_nom_prev.ori.R\n G[block_3x3(2, 1)] = -np.eye(3)\n G[block_3x3(3, 2)] = np.eye(3)\n G[block_3x3(4, 3)] = np.eye(3)\n\n GQGT = G@self.Q_err@G.T\n\n return GQGT\n\n def get_van_loan_matrix(self, V: 'ndarray[30, 30]'):\n \"\"\"Use this funciton in get_discrete_error_diff to get the van loan \n matrix. See (4.63)\n\n All the tests are ran with do_approximations=False\n\n Args:\n V (ndarray[30, 30]): [description]\n\n Returns:\n VanLoanMatrix (ndarray[30, 30]): VanLoanMatrix\n \"\"\"\n if self.do_approximations:\n # second order approcimation of matrix exponential which is faster\n VanLoanMatrix = np.eye(*V.shape) + V + (V@V) / 2\n else:\n VanLoanMatrix = scipy.linalg.expm(V)\n return VanLoanMatrix\n\n def get_discrete_error_diff(self,\n x_nom_prev: NominalState,\n z_corr: CorrectedImuMeasurement,\n ) -> Tuple['ndarray[15, 15]',\n 'ndarray[15, 15]']:\n \"\"\"Get the discrete equivalents of A and GQGT in (4.63)\n\n Hint: you should use get_van_loan_matrix to get the van loan matrix\n\n See (4.5 Discretization) and (4.63) for more information. \n Or see \"Discretization of process noise\" in \n https://en.wikipedia.org/wiki/Discretization\n\n Args:\n x_nom_prev (NominalState): previous nominal state\n z_corr (CorrectedImuMeasurement): corrected IMU measurement\n\n Returns:\n Ad (ndarray[15, 15]): discrede transition matrix\n GQGTd (ndarray[15, 15]): discrete noise covariance matrix\n \"\"\"\n Ts = abs(x_nom_prev.ts - z_corr.ts)\n\n A = self.get_error_A_continous(x_nom_prev, z_corr)\n GQGT = self.get_error_GQGT_continous(x_nom_prev)\n\n V = np.block([[-A, GQGT],\n [np.zeros((15, 15)), A.T]])\n\n VL = self.get_van_loan_matrix(V*Ts)\n\n V2 = VL[:15, 15:]\n V1 = VL[15:, 15:]\n\n Ad = V1.T\n GQGTd = V1.T@V2\n #Ad, GQGTd = solution.eskf.ESKF.get_discrete_error_diff(\n # self, x_nom_prev, z_corr)\n\n return Ad, GQGTd\n\n def predict_x_err(self,\n x_nom_prev: NominalState,\n x_err_prev_gauss: ErrorStateGauss,\n z_corr: CorrectedImuMeasurement,\n ) -> ErrorStateGauss:\n \"\"\"Predict the error state\n\n Hint: This is doing a discrete step of (10.68) where x_err \n is a multivariate gaussian.\n\n Args:\n x_nom_prev (NominalState): previous nominal state\n x_err_prev_gauss (ErrorStateGauss): previous error state gaussian\n z_corr (CorrectedImuMeasurement): corrected IMU measuremnt\n\n Returns:\n x_err_pred (ErrorStateGauss): predicted error state\n \"\"\"\n\n Ad, GQGTd = self.get_discrete_error_diff(x_nom_prev, z_corr)\n\n P_prev = x_err_prev_gauss.cov\n Q = Ad@P_prev@Ad.T + GQGTd\n\n x_err_pred = ErrorStateGauss(x_err_prev_gauss.mean, Q, x_nom_prev.ts)\n\n return x_err_pred\n\n def predict_from_imu(self,\n x_nom_prev: NominalState,\n x_err_gauss: ErrorStateGauss,\n z_imu: ImuMeasurement,\n ) -> Tuple[NominalState, ErrorStateGauss]:\n \"\"\"Method called every time an IMU measurement is received\n\n Args:\n x_nom_prev (NominalState): previous nominal state\n x_err_gauss (ErrorStateGauss): previous error state gaussian\n z_imu (ImuMeasurement): raw IMU measurement\n\n Returns:\n x_nom_pred (NominalState): predicted nominal state\n x_err_pred (ErrorStateGauss): predicted error state\n \"\"\"\n z_corr = self.correct_z_imu(x_nom_prev, z_imu)\n \n x_nom_pred = self.predict_nominal(x_nom_prev, z_corr)\n x_err_pred = self.predict_x_err(x_nom_prev, x_err_gauss, z_corr)\n\n return x_nom_pred, x_err_pred\n\n def get_gnss_measurment_jac(self, x_nom: NominalState) -> 'ndarray[3,15]':\n \"\"\"Get the measurement jacobian, H.\n\n Hint: the gnss antenna has a relative position to the center given by\n self.lever_arm. How will the gnss measurement change if the drone is \n rotated differently? Use get_cross_matrix and some other stuff :) \n\n Returns:\n H (ndarray[3, 15]): [description]\n \"\"\"\n H = np.zeros((3, 15))\n H[block_3x3(0, 0)] = np.eye(3) + x_nom.ori.R @ get_cross_matrix(self.lever_arm)\n return H\n\n def get_gnss_cov(self, z_gnss: GnssMeasurement) -> 'ndarray[3,3]':\n \"\"\"Use this function in predict_gnss_measurement to get R. \n Get gnss covariance estimate based on gnss estimated accuracy. \n\n All the test data has self.use_gnss_accuracy=False, so this does not \n affect the tests.\n\n There is no given solution to this function, feel free to play around!\n\n Returns:\n gnss_cov (ndarray[3,3]): the estimated gnss covariance\n \"\"\"\n if self.use_gnss_accuracy and z_gnss.accuracy is not None:\n # play around with this part, the suggested way is not optimal\n gnss_cov = (z_gnss.accuracy/3)**2 * self.gnss_cov\n\n else:\n # dont change this part\n gnss_cov = self.gnss_cov\n return gnss_cov\n\n def predict_gnss_measurement(self,\n x_nom: NominalState,\n x_err: ErrorStateGauss,\n z_gnss: GnssMeasurement,\n ) -> MultiVarGaussStamped:\n \"\"\"Predict the gnss measurement\n\n Hint: z_gnss is only used in get_gnss_cov and to get timestamp for \n the predicted measurement\n\n Args:\n x_nom (NominalState): previous nominal state\n x_err (ErrorStateGauss): previous error state gaussian\n z_gnss (GnssMeasurement): gnss measurement\n\n Returns:\n z_gnss_pred_gauss (MultiVarGaussStamped): gnss prediction gaussian\n \"\"\"\n mean = x_nom.pos - x_nom.ori.R@self.lever_arm\n H = self.get_gnss_measurment_jac(x_nom)\n cov = H@x_err.cov@H.T + self.get_gnss_cov(z_gnss)\n\n z_gnss_pred_gauss = MultiVarGaussStamped(mean, cov, z_gnss.ts)\n\n return z_gnss_pred_gauss\n\n def get_x_err_upd(self,\n x_nom: NominalState,\n x_err: ErrorStateGauss,\n z_gnss_pred_gauss: MultiVarGaussStamped,\n z_gnss: GnssMeasurement\n ) -> ErrorStateGauss:\n \"\"\"Update the error state from a gnss measurement\n\n Hint: see (10.75)\n Due to numerical error its recomended use the robust calculation of \n posterior covariance.\n\n I_WH = np.eye(*P.shape) - W @ H\n P_upd = (I_WH @ P @ I_WH.T + W @ R @ W.T)\n\n Args:\n x_nom (NominalState): previous nominal state\n x_err (ErrorStateGauss): previous error state gaussian\n z_gnss_pred_gauss (MultiVarGaussStamped): gnss prediction gaussian\n z_gnss (GnssMeasurement): gnss measurement\n\n Returns:\n x_err_upd_gauss (ErrorStateGauss): updated error state gaussian\n \"\"\"\n P = x_err.cov\n R = self.get_gnss_cov(z_gnss)\n H = self.get_gnss_measurment_jac(x_nom)\n\n W = P@H.T@np.linalg.inv(H@P@H.T + R)\n I_WH = np.eye(*P.shape) - W @ H\n\n P_upd = (I_WH @ P @ I_WH.T + W @ R @ W.T)\n mean = W@(z_gnss.pos - z_gnss_pred_gauss.mean)\n\n x_err_upd_gauss = ErrorStateGauss(mean, P_upd, z_gnss.ts)\n\n return x_err_upd_gauss\n\n def inject(self,\n x_nom_prev: NominalState,\n x_err_upd: ErrorStateGauss\n ) -> Tuple[NominalState, ErrorStateGauss]:\n \"\"\"Perform the injection step\n\n Hint: see (10.85) and (10.72) on how to inject into nominal state.\n See (10.86) on how to find error state after injection\n\n Args:\n x_nom_prev (NominalState): previous nominal state\n x_err_upd (ErrorStateGauss): updated error state gaussian\n\n Returns:\n x_nom_inj (NominalState): nominal state after injection\n x_err_inj (ErrorStateGauss): error state gaussian after injection\n \"\"\"\n\n x_nom_inj = NominalState(\n x_nom_prev.pos + x_err_upd.pos,\n x_nom_prev.vel + x_err_upd.vel,\n x_nom_prev.ori.multiply(RotationQuaterion(1, 0.5*x_err_upd.avec)), \n x_nom_prev.accm_bias + x_err_upd.accm_bias,\n x_nom_prev.gyro_bias + x_err_upd.gyro_bias,\n x_nom_prev.ts \n )\n\n mean = np.zeros(15)\n G = np.eye(15)\n G[6:9, 6:9] = np.eye(3) - get_cross_matrix(0.5*x_err_upd.avec)\n cov = G@x_err_upd.cov@G.T\n\n x_err_inj = ErrorStateGauss(mean, cov, x_err_upd.ts)\n return x_nom_inj, x_err_inj\n\n def update_from_gnss(self,\n x_nom_prev: NominalState,\n x_err_prev: NominalState,\n z_gnss: GnssMeasurement,\n ) -> Tuple[NominalState,\n ErrorStateGauss,\n MultiVarGaussStamped]:\n \"\"\"Method called every time an gnss measurement is received.\n\n\n Args:\n x_nom_prev (NominalState): [description]\n x_nom_prev (NominalState): [description]\n z_gnss (GnssMeasurement): gnss measurement\n\n Returns:\n x_nom_inj (NominalState): previous nominal state \n x_err_inj (ErrorStateGauss): previous error state\n z_gnss_pred_gauss (MultiVarGaussStamped): predicted gnss \n measurement, used for NIS calculations.\n \"\"\"\n z_gnss_pred_gauss = self.predict_gnss_measurement(x_nom_prev, x_err_prev, z_gnss)\n x_err_upd = self.get_x_err_upd(x_nom_prev, x_err_prev, z_gnss_pred_gauss, z_gnss)\n x_nom_inj, x_err_inj = self.inject(x_nom_prev, x_err_upd)\n\n return x_nom_inj, x_err_inj, z_gnss_pred_gauss\n", "id": "6055141", "language": "Python", "matching_score": 4.100195407867432, "max_stars_count": 0, "path": "Graded/G2/eskf/eskf.py" }, { "content": "from numpy import ndarray\nfrom dataclasses import dataclass\nfrom typing import Optional, TYPE_CHECKING\n\nif TYPE_CHECKING: # used to avoid circular imports with solution\n from quaternion import RotationQuaterion\n\nfrom datatypes.multivargaussian import MultiVarGaussStamped\n\nfrom config import DEBUG\n\n\n@dataclass\nclass NominalState:\n \"\"\"Class representing a nominal state. See (Table 10.1) in the book.\n\n Args:\n pos (ndarray[3]): position in NED\n vel (ndarray[3]): velocity in NED\n ori (RotationQuaterion): orientation as a quaternion in NED\n accm_bias (ndarray[3]): accelerometer bias\n gyro_bias (ndarray[3]): gyro bias\n \"\"\"\n pos: 'ndarray[3]'\n vel: 'ndarray[3]'\n ori: 'RotationQuaterion'\n accm_bias: 'ndarray[3]'\n gyro_bias: 'ndarray[3]'\n\n ts: Optional[float] = None\n\n def __post_init__(self):\n if DEBUG:\n assert self.pos.shape == (3,)\n assert self.vel.shape == (3,)\n # hack to avoid circular imports with solution\n assert type(self.ori).__name__ == 'RotationQuaterion'\n assert self.accm_bias.shape == (3,)\n assert self.gyro_bias.shape == (3,)\n\n\n@dataclass\nclass ErrorStateGauss(MultiVarGaussStamped):\n \"\"\"A multivariate gaussian representing the error state.\n Has some properties to fetch out useful indexes\"\"\"\n\n def __post_init__(self):\n super().__post_init__()\n assert self.mean.shape == (15,)\n\n @property\n def pos(self):\n \"\"\"position\"\"\"\n return self.mean[0:3]\n\n @property\n def vel(self):\n \"\"\"velocity\"\"\"\n return self.mean[3:6]\n\n @property\n def avec(self):\n \"\"\"angles vector\n this is often called a rotation vector\n \"\"\"\n return self.mean[6:9]\n\n @property\n def accm_bias(self):\n \"\"\"accelerometer bias\"\"\"\n return self.mean[9:12]\n\n @property\n def gyro_bias(self):\n \"\"\"gyro bias\"\"\"\n return self.mean[12:15]\n", "id": "2909030", "language": "Python", "matching_score": 2.130866765975952, "max_stars_count": 0, "path": "Graded/G2/eskf/datatypes/eskf_states.py" }, { "content": "import numpy as np\nfrom quaternion import RotationQuaterion\nfrom datatypes.eskf_params import ESKFTuningParams\nfrom datatypes.eskf_states import NominalState, ErrorStateGauss\n\ntuning_params_sim = ESKFTuningParams(\n accm_std=0.01,\n accm_bias_std=0.0005,\n accm_bias_p=10e-12,\n\n gyro_std=0.0002,\n gyro_bias_std=0.0002,\n gyro_bias_p=10e-12,\n\n gnss_std_ne=0.25,\n gnss_std_d=0.5)\n\nx_nom_init_sim = NominalState(\n np.array([0., 0., 0.]), # position\n np.array([0., 0., 0.]), # velocity\n RotationQuaterion.from_euler([0., 0., 0.]), # orientation\n np.zeros(3), # accelerometer bias\n np.zeros(3), # gyro bias\n ts=0.)\n\ninit_std_sim = np.repeat(repeats=3, # repeat each element 3 times\n a=[10, # position\n 10, # velocity\n np.deg2rad(np.pi/30), # angle vector\n 0.001, # accelerometer bias\n 0.001]) # gyro bias\nx_err_init_sim = ErrorStateGauss(np.zeros(15), np.diag(init_std_sim**2), 0.)\n", "id": "10766731", "language": "Python", "matching_score": 5.7292280197143555, "max_stars_count": 0, "path": "Graded/G2/eskf/tuning_sim.py" }, { "content": "import numpy as np\nfrom quaternion import RotationQuaterion\nfrom datatypes.eskf_params import ESKFTuningParams\nfrom datatypes.eskf_states import NominalState, ErrorStateGauss\n\ntuning_params_real = ESKFTuningParams(\n accm_std=1.,\n accm_bias_std=1.,\n accm_bias_p=1.,\n\n gyro_std=1.,\n gyro_bias_std=1.,\n gyro_bias_p=1.,\n\n gnss_std_ne=1.,\n gnss_std_d=1.,\n\n use_gnss_accuracy=False)\n\nx_nom_init_real = NominalState(\n np.array([0., 0., 0.]), # position\n np.array([0., 0., 0.]), # velocity\n RotationQuaterion.from_euler([0., 0., 0.]), # orientation\n np.zeros(3), # accelerometer bias\n np.zeros(3), # gyro bias\n ts=0.)\n\ninit_std_real = np.repeat(repeats=3, # repeat each element 3 times\n a=[1., # position\n 1., # velocity\n np.deg2rad(1), # angle vector\n 1., # accelerometer bias\n 1.]) # gyro bias\n\nx_err_init_real = ErrorStateGauss(np.zeros(15), np.diag(init_std_real**2), 0.)\n", "id": "826961", "language": "Python", "matching_score": 2.446866989135742, "max_stars_count": 0, "path": "Graded/G2/eskf/tuning_real.py" }, { "content": "from typing import List, Tuple\nimport numpy as np\nfrom tqdm import tqdm\nfrom matplotlib import pyplot as plt\nfrom dataclasses import asdict\n\nfrom utils.dataloader import load_sim_data, load_real_data\nfrom datatypes.eskf_params import ESKFTuningParams, ESKFStaticParams\nfrom datatypes.eskf_states import ErrorStateGauss, NominalState\nfrom datatypes.multivargaussian import MultiVarGaussStamped\nfrom datatypes.measurements import ImuMeasurement, GnssMeasurement\n\nfrom plotting import (plot_state, plot_position_path_3d,\n plot_nis, plot_errors, plot_nees)\n\nfrom eskf import ESKF\nfrom nis_nees import get_NIS, get_NEES, get_error, get_time_pairs\nimport config\nimport tuning_sim\nimport tuning_real\n\n\ndef run_eskf(eskf_tuning_params: ESKFTuningParams,\n eskf_static_params: ESKFStaticParams,\n imu_measurements: List[ImuMeasurement],\n gnss_measurements: List[GnssMeasurement],\n x_nom_init: NominalState,\n x_err_gauss_init: ErrorStateGauss\n ) -> Tuple[List[NominalState],\n List[ErrorStateGauss],\n List[MultiVarGaussStamped]]:\n\n eskf = ESKF(**asdict(eskf_tuning_params),\n **asdict(eskf_static_params),\n do_approximations=config.DO_APPROXIMATIONS)\n\n x_nom = x_nom_init\n x_err_gauss = x_err_gauss_init\n\n x_nom_seq = []\n x_err_gauss_seq = []\n z_gnss_pred_gauss_seq = []\n gnss_measurements_copy = gnss_measurements.copy()\n next_logging_time = 0\n LOGGING_DELTA = 0.1\n for z_imu in tqdm(imu_measurements):\n x_nom, x_err_gauss = eskf.predict_from_imu(\n x_nom, x_err_gauss, z_imu)\n\n if (len(gnss_measurements_copy) > 0\n and z_imu.ts >= gnss_measurements_copy[0].ts):\n z_gnss = gnss_measurements_copy.pop(0)\n\n # we pretend z_gnss arrived at the same time as the last z_imu\n # this is not ideal, but works fine as the IMU intervals are small\n z_gnss.ts = z_imu.ts\n\n x_nom, x_err_gauss, z_gnss_pred_gauss = eskf.update_from_gnss(\n x_nom, x_err_gauss, z_gnss)\n z_gnss_pred_gauss_seq.append(z_gnss_pred_gauss)\n next_logging_time = -np.inf\n\n if z_imu.ts >= next_logging_time:\n x_nom_seq.append(x_nom)\n x_err_gauss_seq.append(x_err_gauss)\n next_logging_time = z_imu.ts + LOGGING_DELTA\n return x_nom_seq, x_err_gauss_seq, z_gnss_pred_gauss_seq\n\n\ndef main():\n if config.RUN == 'sim':\n print(f\"Running {config.MAX_TIME} seconds of simulated data set\")\n (x_true_data, z_imu_data, z_gnss_data, drone_params\n ) = load_sim_data(config.MAX_TIME)\n tuning_params = tuning_sim.tuning_params_sim\n x_nom_init = tuning_sim.x_nom_init_sim\n x_err_init = tuning_sim.x_err_init_sim\n\n elif config.RUN == 'real':\n print(f\"Running {config.MAX_TIME} seconds of real data set\")\n x_true_data = None\n (z_imu_data, z_gnss_data, drone_params\n ) = load_real_data(config.MAX_TIME)\n tuning_params = tuning_real.tuning_params_real\n x_nom_init = tuning_real.x_nom_init_real\n x_err_init = tuning_real.x_err_init_real\n else:\n raise IndexError(\"config.RUN must be 'sim' or 'real'\")\n\n x_nom_seq, x_err_gauss_seq, z_gnss_pred_gauss_seq = run_eskf(\n tuning_params, drone_params,\n z_imu_data, z_gnss_data,\n x_nom_init, x_err_init)\n\n NIS_times, z_true_pred_pairs = get_time_pairs(z_gnss_data,\n z_gnss_pred_gauss_seq)\n\n NISxyz_seq = [(get_NIS(z, pred)) for z, pred in z_true_pred_pairs]\n NISxy_seq = [(get_NIS(z, pred, [0, 1])) for z, pred in z_true_pred_pairs]\n NISz_seq = [(get_NIS(z, pred, [2])) for z, pred in z_true_pred_pairs]\n plot_nis(NIS_times, NISxyz_seq, NISxy_seq, NISz_seq)\n\n if x_true_data:\n x_times, x_true_nom_pairs = get_time_pairs(x_true_data,\n x_nom_seq)\n errors = np.array([get_error(x_true, x_nom)\n for x_true, x_nom in x_true_nom_pairs])\n err_gt_est_pairs = list(zip(errors, x_err_gauss_seq))\n NEES_pos_seq = [(get_NEES(gt, est, [0, 1, 2]))\n for gt, est in err_gt_est_pairs]\n NEES_vel_seq = [(get_NEES(gt, est, [3, 4, 5]))\n for gt, est in err_gt_est_pairs]\n NEES_avec_seq = [(get_NEES(gt, est, [6, 7, 8]))\n for gt, est in err_gt_est_pairs]\n NEES_accm_seq = [(get_NEES(gt, est, [9, 10, 11]))\n for gt, est in err_gt_est_pairs]\n NEES_gyro_seq = [(get_NEES(gt, est, [12, 13, 14]))\n for gt, est in err_gt_est_pairs]\n\n plot_errors(x_times, errors)\n plot_nees(x_times, NEES_pos_seq, NEES_vel_seq,\n NEES_avec_seq, NEES_accm_seq, NEES_gyro_seq)\n\n plot_state(x_nom_seq)\n plot_position_path_3d(x_nom_seq, x_true_data)\n\n plt.show(block=True)\n\n\nif __name__ == '__main__':\n main()\n", "id": "3890874", "language": "Python", "matching_score": 3.4187135696411133, "max_stars_count": 0, "path": "Graded/G2/eskf/run.py" }, { "content": "from pathlib import Path\nimport numpy as np\nfrom scipy.io import loadmat\n\nfrom datatypes.measurements import ImuMeasurement, GnssMeasurement\nfrom datatypes.eskf_states import NominalState\nfrom datatypes.eskf_params import ESKFStaticParams\n\nfrom quaternion import RotationQuaterion\n\ndata_dir = Path(__file__).parents[2].joinpath('data')\nsimulated_data_file = data_dir.joinpath('task_simulation.mat')\nreal_data_file = data_dir.joinpath('task_real.mat')\n\n\ndef load_sim_data(max_time=np.inf):\n loaded_data = loadmat(simulated_data_file)\n\n x_true = loaded_data[\"xtrue\"].T\n\n timeGNSS = loaded_data[\"timeGNSS\"].ravel()\n z_GNSS = loaded_data[\"zGNSS\"].T\n\n timeIMU = loaded_data[\"timeIMU\"].ravel()\n z_acceleration = loaded_data[\"zAcc\"].T\n z_gyroscope = loaded_data[\"zGyro\"].T\n\n lever_arm = loaded_data[\"leverarm\"].ravel()\n S_a = loaded_data[\"S_a\"]\n S_g = loaded_data[\"S_g\"]\n\n x_nom_true_data = [NominalState(x[:3], x[3:6],\n RotationQuaterion(x[6], x[7:10]),\n x[10:13], x[13:16],\n ts)\n for x, ts in zip(x_true, timeIMU)\n if ts <= max_time]\n\n imu_measurements = [ImuMeasurement(ts, acc, gyro) for ts, acc, gyro\n in zip(timeIMU, z_acceleration, z_gyroscope)\n if ts <= max_time]\n gnss_measurements = [GnssMeasurement(ts, pos) for ts, pos\n in zip(timeGNSS, z_GNSS)\n if ts <= max_time]\n drone_params = ESKFStaticParams(S_a, S_g, lever_arm)\n\n return x_nom_true_data, imu_measurements, gnss_measurements, drone_params\n\n\ndef load_real_data(max_time=np.inf):\n loaded_data = loadmat(real_data_file)\n\n timeGNSS = loaded_data[\"timeGNSS\"].ravel()\n z_GNSS = loaded_data[\"zGNSS\"].T\n accuracy_GNSS = loaded_data[\"GNSSaccuracy\"].ravel()\n\n timeIMU = loaded_data[\"timeIMU\"].ravel()\n z_acceleration = loaded_data[\"zAcc\"].T\n z_gyroscope = loaded_data[\"zGyro\"].T\n\n lever_arm = loaded_data[\"leverarm\"].ravel()\n S_a = loaded_data[\"S_a\"]\n S_g = loaded_data[\"S_g\"]\n\n start_time = 302850\n imu_measurements = [ImuMeasurement(ts-start_time, acc, gyro)\n for ts, acc, gyro\n in zip(timeIMU, z_acceleration, z_gyroscope)\n if start_time <= ts < max_time+start_time]\n\n gnss_measurements = [GnssMeasurement(ts-start_time, pos, precision)\n for ts, pos, precision\n in zip(timeGNSS, z_GNSS, accuracy_GNSS)\n if start_time <= ts < max_time+start_time]\n\n drone_params = ESKFStaticParams(S_a, S_g, lever_arm)\n\n return imu_measurements, gnss_measurements, drone_params\n", "id": "2238024", "language": "Python", "matching_score": 1.3368892669677734, "max_stars_count": 0, "path": "Graded/G2/eskf/utils/dataloader.py" }, { "content": "from pathlib import Path\nimport numpy as np\nfrom scipy.io import loadmat\n\nfrom utils.sample_CT_trajectory import sample_CT_trajectory\n\ndata_path = Path(__file__).parents[2].joinpath(\"data/data_for_ekf.mat\")\n\n\ndef load_data(usePregen=True, data_path=data_path, seed=None):\n if usePregen:\n loaded_data: dict = loadmat(str(data_path))\n N_data: int = int(loaded_data[\"K\"]) # The number of time steps\n Ts: float = float(loaded_data[\"Ts\"]) # The sampling time\n x_gt_data: np.ndarray = loaded_data[\"Xgt\"].T # grounexutd truth\n z_data: np.ndarray = loaded_data[\"Z\"].T # the measurements\n\n else:\n if seed:\n np.random.seed(seed) # random seed can be set for repeatability\n\n # inital state distribution\n x0 = np.array([0, 0, 1, 1, 0])\n P0 = np.diag([50, 50, 10, 10, np.pi / 4]) ** 2\n\n # model parameters\n sigma_a_true = 0.25\n sigma_omega_true = np.pi / 15\n sigma_z_true = 3\n\n # sampling interval a lenght\n N_data = 1000\n Ts = 0.1\n\n # get data\n x_gt_data, z_data = sample_CT_trajectory(\n N_data, Ts, x0, P0, sigma_a_true, sigma_omega_true, sigma_z_true\n )\n return x_gt_data, z_data, Ts, N_data\n", "id": "8230108", "language": "Python", "matching_score": 2.7810118198394775, "max_stars_count": 2, "path": "Assignments/Assignment_03/ekf/utils/dataloader.py" }, { "content": "from pathlib import Path\nimport numpy as np\nfrom scipy.io import loadmat\n\ndata_dir = Path(__file__).parents[2].joinpath('data')\n\n\ndef load_data(data_path='data_for_pda.mat'):\n loaded_data: dict = loadmat(str(data_dir.joinpath(data_path)))\n K = loaded_data[\"K\"].item()\n Ts = loaded_data[\"Ts\"].item()\n Xgt = loaded_data[\"Xgt\"].T\n Z = [zk.T for zk in loaded_data[\"Z\"].ravel()]\n true_association = loaded_data[\"a\"].ravel()\n return K, Ts, Xgt, Z, true_association\n", "id": "6024322", "language": "Python", "matching_score": 1.1698222160339355, "max_stars_count": 0, "path": "Assignments/Assignment_05/pda/utils/dataloader.py" }, { "content": "from os import path\nfrom pathlib import Path\nimport shutil\n\nproj_dir = Path(__file__).parent\ncode_dir = proj_dir.joinpath('slam')\nhandin_dir = proj_dir.joinpath(\"handin\")\n\nshutil.make_archive(handin_dir, 'zip', code_dir)\n", "id": "4569335", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "Graded/G3/generate_handin.py" }, { "content": "#!/usr/bin/env python\n\nimport signals\nimport cfg\n\nimport numpy as np\n\ndef calculate_m_star(Fw):\n \"\"\"\n Calculate the m_star as described by \n Eq. (5) and Eq. (9) in the project spec.\n \"\"\"\n return np.argmax(np.absolute(Fw))\n\ndef calculate_w_hat(m_star, M):\n \"\"\"\n Calculate the angular frequency estimate\n w_FFT_hat as described by Eq. (8) in the\n project spec\n \"\"\"\n return 2*np.pi*m_star / (M*cfg.Ts)\n\ndef calculate_phi_hat(x_d, w_hat):\n \"\"\"\n Calculate the phase estimate phi_hat as\n described by Eq. (7) in the project spec\n \"\"\"\n F_w_hat = signals.F(x_d, w_hat)\n phi_arg = np.exp(-1j*w_hat*cfg.n0*cfg.Ts)*F_w_hat\n return np.angle(phi_arg)\n\ndef estimator(x_d, M):\n \"\"\"\n Use the M-point FFT estimator described in the\n problem spec. to estimate the angular frequency\n \"w\" and phase \"phi\" of the input signal x_d.\n\n Returns the estimates w_hat and phi_hat, as well\n as the FFT of the input signal, for data analysis\n purposes.\n \"\"\"\n Fw, Ff = M_point_fft(x_d, M)\n\n m_star = calculate_m_star(Fw)\n\n w_hat = calculate_w_hat(m_star, M)\n phi_hat = calculate_phi_hat(x_d, w_hat)\n\n\n return w_hat, phi_hat, Fw, Ff\n\ndef M_point_fft(x_d, M):\n Fw = np.fft.fft(x_d, M)\n Ff = np.fft.fftfreq(M, 1 / cfg.Fs)\n\n return Fw, Ff", "id": "12748253", "language": "Python", "matching_score": 2.39035701751709, "max_stars_count": 1, "path": "scripts/fft_estimator.py" }, { "content": "#!/usr/bin/env python\n\nimport csv\nimport matplotlib.pyplot as plt\nfrom collections import Counter\n\nfrom scipy import optimize\nfrom math import pi, floor\nimport numpy as np\n\nimport fft_estimator\nimport signals\nimport cfg\n\ndef mse(list_lhs, list_rhs):\n \"\"\" \n Calculates the MSE between two lists. Throws an error if the lists don't\n have the same lenght\n \"\"\"\n assert(len(list_lhs) == len(list_rhs))\n return np.square(np.absolute(list_lhs - list_rhs)).mean()\n \n\ndef frequency_objective_function(x, M, x_d, phi_hat):\n \"\"\"\n Creates the objective-function for optimizing the frequency. The\n function assumes the input to be a ndarray, with the first value\n being the next frequency/iteration to minimize for. The algorithm \n uses this frequency to create a theoretical signal, and returns the\n MSE wrt to the measured signal\n \"\"\"\n\n omega_k = x[0]\n x_f = signals.x_ideal(omega_k, phi_hat) # Phase has no effect as it removed through FFT\n\n Fx_d, _ = fft_estimator.M_point_fft(x_d, M)\n Fx_f, _ = fft_estimator.M_point_fft(x_f, M)\n\n return mse(np.absolute(Fx_d), np.absolute(Fx_f))\n\n\ndef phase_objective_function(x, x_d, omega_hat):\n \"\"\"\n Creates the objective-function for optimizing the phase. The\n function assumes the input to be a ndarray, with the first value\n being the next phase/iteration to minimize for. The algorithm \n uses this phase to create a theoretical signal, and returns the\n MSE wrt to the measured signal\n \"\"\"\n phi_k = x[0]\n\n x_p = signals.x_ideal(omega_hat, phi_k)\n\n return mse(x_d, x_p)\n", "id": "747554", "language": "Python", "matching_score": 1.5800238847732544, "max_stars_count": 1, "path": "scripts/optimizing.py" }, { "content": "#!/usr/bin/env python\n\nimport sys\nimport os\n\nif os.name == 'nt':\n sys.path[0]=os.path.dirname(os.path.realpath(__file__))\n \nimport cfg\nimport numpy as np\n\ndef sigma_squared_from_SNR_dB(SNR_dB):\n \"\"\"\n Calculate the value for sigma^2 according to the\n definition.\n \"\"\"\n SNR = 10**(SNR_dB/10.0)\n return cfg.A**2 / (2*float(SNR)) # Float casting prevents floor division\n\ndef F(x_d, w):\n \"\"\"\n Calculate F(w) according to Eq. (6) in the project\n specifications.\n \"\"\"\n sum = 0\n for n in range(0, cfg.N):\n sum += x_d[n]*np.exp(-1j*w*n*cfg.Ts)\n\n return sum / cfg.N\n\ndef x_discrete(SNR_dB):\n \"\"\"\n Generate a signal according to the problem spec.\n which consists of a complex exponential with \n added noise. Noise-to-signal ratio defined by SNR\n \"\"\"\n sigma = np.sqrt(sigma_squared_from_SNR_dB(SNR_dB))\n\n wr = np.random.normal(0, sigma, cfg.N)\n wi = np.random.normal(0, sigma, cfg.N)\n w = wr + 1j*wi\n\n x = np.empty(cfg.N, dtype=np.complex_)\n\n for n in range(cfg.N):\n z = 1j*(cfg.w0 * (n+cfg.n0) * cfg.Ts + cfg.phi)\n x[n] = cfg.A * np.exp(z)\n\n return x + w\n\n\ndef x_ideal(omega, phase):\n \"\"\" \n Generates a complex-exponential signal with given frequency\n and phase. Does not contain noise\n \"\"\"\n x = np.empty(cfg.N, dtype=np.complex_)\n \n for n in range(cfg.N):\n z = 1j*(omega * (cfg.n0+n) * cfg.Ts + phase)\n x[n] = cfg.A * np.exp(z)\n\n return x", "id": "3365547", "language": "Python", "matching_score": 2.6038818359375, "max_stars_count": 1, "path": "scripts/signals.py" }, { "content": "#!/usr/bin/env python\n\nimport cfg\nimport signals as sig\n\n\ndef omega(SNR_dB):\n sigma_squared = sig.sigma_squared_from_SNR_dB(SNR_dB)\n\n numerator = 12*sigma_squared\n denominator = cfg.A**2 * cfg.Ts**2 * cfg.N*(cfg.N**2 - 1)\n\n return numerator / denominator\n\ndef phi(SNR_dB):\n sigma_squared = sig.sigma_squared_from_SNR_dB(SNR_dB)\n\n numerator = 12*sigma_squared*(cfg.n0**2 * cfg.N + 2*cfg.n0*cfg.P + cfg.Q)\n denominator = cfg.A**2 * cfg.N**2 * (cfg.N**2 - 1)\n\n return numerator / denominator\n", "id": "3866936", "language": "Python", "matching_score": 1.01852285861969, "max_stars_count": 1, "path": "scripts/crlb.py" }, { "content": "#!/usr/bin/env python\n\n# A single file containing everything needed for the estimation\nimport numpy as np\nimport sys\nimport matplotlib.pyplot as plt\n\ndef generate_crlb(sigma_squared):\n crlb_w = 12*sigma_squared / (A**2 * T**2 * N*(N**2 - 1))\n crlb_phi = 12*sigma_squared*(n0**2 * N + 2*n0*P + Q) / (A**2 * N**2 * (N**2 - 1))\n\n return crlb_w, crlb_phi\n\ndef generate_signal(sigma):\n # Noise generation\n wr = np.random.normal(0, sigma, N)\n wi = np.random.normal(0, sigma, N)\n w = wr + 1j*wi\n\n # Pure signal generation\n x = np.empty(N, dtype=np.complex_)\n for n in range(N):\n x[n] = A*np.exp(1j*(w0*(n+n0)*T + phi))\n\n return x + w\n\ndef F(x_d, w):\n Fw0 = 0\n for n in range(N):\n Fw0 += x_d[n]*np.exp(-1j*w*n*T) # Eq. (6)\n Fw0 /= N\n\n return Fw0\n\ndef fft_estimator(x_d, M):\n Fw = np.fft.fft(x_d, M)\n m_star = np.argmax(np.absolute(Fw))\n \n w_hat = 2*np.pi*m_star / (M*T) # Eq. (8)\n phi_hat = np.angle(np.exp(-1j*w_hat*n0*T)*F(x_d, w_hat)) # Eq. (7)\n\n return w_hat, phi_hat, Fw\n\ndef plot(x_d, Fw, i):\n Fw = np.fft.fftshift(Fw)\n Fw = np.absolute(Fw)\n\n Ff = np.fft.fftfreq(M, 1.0/Fs)\n Ff = np.fft.fftshift(Ff)\n\n if i == 0:\n plt.figure(1)\n plt.plot(np.arange(len(x_d)), np.real(x_d))\n plt.title(\"First generated signal with GWN, real values\")\n\n plt.figure(2)\n plt.plot(np.arange(len(x_d)), np.imag(x_d))\n plt.title(\"First generated signal with GWN, imag values\")\n\n plt.figure(3)\n plt.plot(Ff, Fw)\n plt.title(\"Abs. of F-transform of first generated signal\")\n\n plt.figure(4)\n plt.plot(Ff, Fw)\n plt.title(\"F-transforms of all generated signals\")\n\ndef print_status_bar(i, progress):\n if i % (num_of_runs/status_bar_length) == 0:\n progress += 1\n\n sys.stdout.write('\\r')\n sys.stdout.write(\"Status: [\" + \"=\"*progress + \" \"*(status_bar_length-progress) + \"]\")\n sys.stdout.flush()\n\n return progress\n\n# Constants\nFs = 10**6\nT = 1.0/Fs\n\nf0 = 10**5\nw0 = 2*np.pi*f0\n\nphi = np.pi / 8\n\nA = 1.0\nN = 513\n\nP = N*(N-1)/2.0\nQ = N*(N-1)*(2*N-1)/6.0\n\nn0 = -P/N\n\nstatus_bar_length = 50\nnum_of_runs = 500\n\n# Generate multiple samples to calculate variance\nSNR_dB = 30.0\nSNR = 10**(SNR_dB/10.0)\n\nK = 16\nM = 2**K\n\nsigma_squared = A**2 / (2*SNR)\n\nw_estimates = np.empty(num_of_runs)\nphi_estimates = np.empty(num_of_runs)\n\nstatus_bar_progress = 0\ndo_plot = False\nfor i in range(num_of_runs):\n\n x_d = generate_signal(np.sqrt(sigma_squared))\n w_hat, phi_hat, Fw = fft_estimator(x_d, M)\n\n w_estimates[i] = w_hat\n phi_estimates[i] = phi_hat\n\n if do_plot:\n plot(x_d, Fw, i)\n\n status_bar_progress = print_status_bar(i, status_bar_progress)\n\nmean_w = np.mean(w_estimates)\nmean_phi = np.mean(phi_estimates)\n\nvar_w = np.var(w_estimates)\nvar_phi = np.var(phi_estimates)\n\ncrlb_w, crlb_phi = generate_crlb(sigma_squared)\n\nprint(\"\")\nif var_w < crlb_w:\n print(\"Variance for omega lower than CRLB!\")\n\nif var_phi < crlb_phi:\n print(\"Variance for phi lower than CRLB!\")\n\nprint(\"CONFIG | SNR [dB]: {}, M: 2^{}, true omega: {}, true phase: {}\".format(SNR_dB, K, w0, phi))\nprint(\"OMEGA | estimated mean: {}, estimated variance: {}, crlb: {}\".format(mean_w, var_w, crlb_w))\nprint(\"PHASE | estimated mean: {}, estimated variance: {}, crlb: {}\".format(mean_phi, var_phi, crlb_phi))\n\nplt.show()", "id": "8475069", "language": "Python", "matching_score": 4.05489444732666, "max_stars_count": 1, "path": "solocarry_estimation.py" }, { "content": "#!/usr/bin/env python\n\nimport csv\nimport sys\nimport os, os.path\nimport numpy as np\n\nfrom datetime import datetime as dt\nfrom scipy import optimize\n\nfrom scripts import signals as sig\nfrom scripts import fft_estimator\nfrom scripts import optimizing\nfrom scripts import utility\nfrom scripts import crlb\nfrom scripts import cfg\n\n\ntry:\n task = sys.argv[1]\nexcept Exception as e:\n print(\"No input task provided, exiting. \\n Usage: python main.py <task>\")\n exit(1)\n\n\nSNR_dBs =[-10, 0, 10, 20, 30, 40, 50, 60]\nFFT_Ks = [10, 12, 14, 16, 18, 20]\n\nn = len(SNR_dBs)\nm = len(FFT_Ks)\nN = 100 # Amount of samples to generate when estimating variance\n\n# Generate unique filename for data file output\nrun_number = len([name for name in os.listdir('./data') if os.path.isfile('./data/' + name)])\n\nif task == 'a':\n filename = 'data/part_a_run_' + str(run_number) + '_N_' + str(N) + '.csv'\n with open(filename, 'ab') as file:\n writer = csv.writer(file, delimiter=' ')\n\n total_time_begin = dt.now()\n for i in range(m):\n K = FFT_Ks[i]\n M = 2**K\n\n for j in range(n):\n SNR_dB = SNR_dBs[j]\n\n w_estimates = np.zeros(N)\n phi_estimates = np.zeros(N)\n\n status_bar_progress = 0\n run_time_begin = dt.now()\n for k in range(N):\n x_d = sig.x_discrete(SNR_dB)\n\n omega_hat, phi_hat, _, _ = fft_estimator.estimator(x_d, M)\n\n w_estimates[k] = omega_hat\n phi_estimates[k] = phi_hat\n\n status_bar_progress = utility.print_status_bar(k, status_bar_progress, N)\n\n mean_f = np.mean(w_estimates) / (2*np.pi)\n mean_phi = np.mean(phi_estimates)\n\n var_f = np.var(w_estimates)\n var_phi = np.var(phi_estimates)\n\n crlb_f = crlb.omega(SNR_dB)\n crlb_phi = crlb.phi(SNR_dB)\n\n run_time_end = dt.now()\n print(\"\")\n utility.print_execution_time(run_time_begin, run_time_end)\n\n f_estimate_valid = True\n phi_estimate_valid = True\n if var_f < crlb_f:\n f_estimate_valid = False\n print(\"Variance for frequency lower than CRLB!\")\n\n if var_phi < crlb_phi:\n phi_estimate_valid = False\n print(\"Variance for phi lower than CRLB!\")\n \n\n writer.writerow([SNR_dB, K, crlb_f, var_f, f_estimate_valid, crlb_phi, var_phi, phi_estimate_valid, mean_f, mean_phi])\n\n print(\"CONFIG | SNR [dB]: {}, M: 2^{}, true frequency: {}, true phase: {}\".format(SNR_dB, K, cfg.f0, cfg.phi))\n print(\"FREQUENCY | estimated mean: {}, estimated variance: {}, crlb: {}\".format(mean_f, var_f, crlb_f))\n print(\"PHASE | estimated mean: {}, estimated variance: {}, crlb: {}\".format(mean_phi, var_phi, crlb_phi))\n print(\"\")\n\n total_time_end = dt.now()\n utility.print_execution_time(total_time_begin, total_time_end)\n\nif task == 'b':\n filename = 'data/part_b_run_' + str(run_number) + '_N_' + str(N) + '.csv'\n with open(filename, 'ab') as file:\n writer = csv.writer(file, delimiter=' ')\n M = 2**10\n \n total_time_begin = dt.now()\n for SNR_dB in SNR_dBs:\n\n w_estimates = np.zeros(N)\n phi_estimates = np.zeros(N)\n\n status_bar_progress = 0\n run_time_begin = dt.now()\n\n for i in range(N):\n\n x_d = sig.x_discrete(SNR_dB)\n\n omega_hat, phi_hat, _, _ = fft_estimator.estimator(x_d, M)\n\n omega_opt = optimize.minimize(optimizing.frequency_objective_function, omega_hat, method=\"Nelder-Mead\", args=(M, x_d, phi_hat))\n phase_opt = optimize.minimize(optimizing.phase_objective_function, phi_hat, method=\"Nelder-Mead\", args=(x_d, omega_hat))\n\n w_estimates[i] = omega_opt.x[0]\n phi_estimates[i] = phase_opt.x[0]\n\n status_bar_progress = utility.print_status_bar(i, status_bar_progress, N)\n\n run_time_end = dt.now()\n print(\"\")\n utility.print_execution_time(run_time_begin, run_time_end)\n\n mean_f = np.mean(w_estimates) / (2*np.pi)\n mean_phi = np.mean(phi_estimates)\n\n var_f = np.var(w_estimates)\n var_phi = np.var(phi_estimates)\n\n crlb_f = crlb.omega(SNR_dB)\n crlb_phi = crlb.phi(SNR_dB)\n\n f_estimate_valid = True\n phi_estimate_valid = True\n \n if var_f < crlb_f:\n f_estimate_valid = False\n print(\"Variance for f lower than CRLB!\")\n\n if var_phi < crlb_phi:\n phi_estimate_valid = False\n print(\"Variance for phi lower than CRLB!\")\n\n\n writer.writerow([SNR_dB, 10, crlb_f, var_f, f_estimate_valid, crlb_phi, var_phi, phi_estimate_valid, mean_f, mean_phi])\n print(\"CONFIG | SNR [dB]: {}, M: 2^{}, true f: {}, true phase: {}\".format(SNR_dB, 10, cfg.f0, cfg.phi))\n print(\"FREQUENCY | estimated mean: {}, estimated variance: {}, crlb: {}\".format(mean_f, var_f, crlb_f))\n print(\"PHASE | estimated mean: {}, estimated variance: {}, crlb: {}\".format(mean_phi, var_phi, crlb_phi))\n print(\"\")\n\n total_time_end = dt.now()\n utility.print_execution_time(total_time_begin, total_time_end)", "id": "1646122", "language": "Python", "matching_score": 3.3643012046813965, "max_stars_count": 1, "path": "main.py" }, { "content": "#!/usr/bin/env python\n\nimport csv\nimport sys\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport cfg\n\n\ndef plot_var_task_a(ax, crlb, estimator_variance, ylim=None):\n plt.tight_layout()\n for i in range(N_ROWS):\n for j in range(N_COLS):\n n = range(N*(2*i+j), N*(2*i+j+1))\n axis = ax[i][j]\n axis.semilogy(SNRs, crlb[n], 'k.:')\n axis.semilogy(SNRs, estimator_variance[n], 'r.-')\n axis.set_title(\"FFT length = 2^\" + str(4*i + 2*j + 10))\n axis.set_xlabel(\"SNR\")\n axis.set_ylabel(\"Variance\")\n axis.legend(['CRLB', 'Estimator'])\n\n if ylim is not None:\n axis.set_ylim(ylim)\n\ndef plot_mean_task_a(true_mean, estimated_mean):\n plt.tight_layout()\n plt.plot([-10, 60], [true_mean, true_mean], 'k')\n for i in range(M):\n n = range(N*i, N*(i+1))\n plt.plot(SNRs, estimated_mean[n], '.--')\n\n plt.legend([\"True value\", \"2^10\", \"2^12\", \"2^14\", \"2^16\", \"2^18\", \"2^20\"])\n plt.xlabel(\"SNR\")\n plt.ylabel(\"Mean\")\n\ndef plot_var_task_b(crlb, estimator_variance):\n plt.tight_layout()\n plt.semilogy(SNRs, crlb[0:N], 'k.:')\n plt.semilogy(SNRs, estimator_variance[0:N], 'r.-')\n plt.legend(['CRLB', 'Estimator'])\n plt.xlabel(\"SNR\")\n plt.ylabel(\"Variance\")\n\ndef plot_mean_task_b(true_mean, estimated_mean):\n plt.rc('axes.formatter', useoffset=False)\n plt.plot([-10, 60], [true_mean, true_mean], 'k')\n plt.plot(SNRs, estimated_mean, 'r.--')\n plt.legend([\"True value\", \"Fine-tuned estimate\"])\n plt.xlabel(\"SNR\")\n plt.ylabel(\"Mean\")\n\n\nif __name__ == '__main__':\n\n try:\n filename = sys.argv[1]\n except Exception as e:\n print(\"No input file provided, exiting. \\n Usage: python plot.py 'filename.csv'\")\n exit(1)\n\n if \"_a_\" in filename:\n task = 'a'\n elif \"_b_\" in filename:\n task = 'b'\n else:\n print(\"No matching task file provided, exiting...\")\n exit(1)\n \n SNRs = [-10, 0, 10, 20, 30, 40, 50, 60]\n Ks = [10, 12, 14, 16, 18, 20]\n\n N = len(SNRs)\n M = len(Ks)\n\n crlb_w = np.empty(N*M)\n crlb_phi = np.empty(N*M)\n\n var_w = np.empty(N*M)\n var_phi = np.empty(N*M)\n\n mean_f = np.empty(N*M)\n mean_phi = np.empty(N*M)\n\n w_estimate_valid = np.empty(N*M)\n phi_estimate_valid = np.empty(N*M)\n\n\n with open(filename) as csvfile:\n\n reader = csv.reader(csvfile, delimiter=' ')\n\n i = 0\n for row in reader:\n K = row[1]\n\n crlb_w[i] = row[2]\n var_w[i] = row[3]\n\n crlb_phi[i] = row[5]\n var_phi[i] = row[6]\n\n mean_f[i] = row[8]\n mean_phi[i] = row[9]\n\n w_estimate_valid[i] = bool(row[4])\n phi_estimate_valid[i] = bool(row[7])\n\n i += 1\n\n if task == 'a':\n N_ROWS = M/2\n N_COLS = 2\n\n _, ax = plt.subplots(N_ROWS, N_COLS)\n plt.figure(1)\n plot_var_task_a(ax, crlb_w, var_w, [0.01, 1e7])\n\n _, ax = plt.subplots(N_ROWS, N_COLS)\n plt.figure(2)\n plot_var_task_a(ax, crlb_phi, var_phi)\n\n\n plt.figure(3)\n plot_mean_task_a(cfg.f0, mean_f)\n\n plt.figure(4)\n plot_mean_task_a(cfg.phi, mean_phi)\n\n if task == 'b':\n\n plt.figure(1)\n plot_var_task_b(crlb_w, var_w)\n\n plt.figure(2)\n plot_var_task_b(crlb_phi, var_phi)\n\n plt.figure(3)\n plot_mean_task_b(cfg.f0, mean_f[0:N])\n\n plt.figure(4)\n plot_mean_task_b(cfg.phi, mean_phi[0:N])\n\n plt.show()\n\n", "id": "7002494", "language": "Python", "matching_score": 1.1431013345718384, "max_stars_count": 1, "path": "scripts/plot.py" }, { "content": "\nimport matplotlib as mpl\n\nimport matplotlib.pyplot as plt\n\nfrom utils.plot_ellipse import plot_cov_ellipse2d\n\nmpl.use('Qt5Agg') # needs the pyqt package,\n# crazy cpu usage sometimes but better behaved than MacOSX\n# to see your plot config\nprint(f\"matplotlib backend: {mpl.get_backend()}\")\nprint(f\"matplotlib config file: {mpl.matplotlib_fname()}\")\nprint(f\"matplotlib config dir: {mpl.get_configdir()}\")\n# installed with \"pip install SciencePLots\" (https://github.com/garrettj403/SciencePlots.git)\n# plt.style.use(['science', 'grid', 'ieee', 'bright']) # gives quite nice plots\n\nplt.close(\"all\")\n\n\ndef show_task_2f_and_2g(x_bar, P,\n z_c, R_c, x_bar_c, P_c,\n z_r, R_r, x_bar_r, P_r,\n x_bar_rc, P_rc,\n x_bar_cr, P_cr):\n\n fig, ax = plt.subplots()\n ax.set_title(\"Task 2f and 2g\")\n\n plot_cov_ellipse2d(ax, x_bar, P, edgecolor=\"C0\")\n ax.scatter(*x_bar, c=\"C0\", marker=\"x\", label=r\"$\\bar x$\")\n\n plot_cov_ellipse2d(ax, x_bar_c, P_c, edgecolor=\"C1\")\n ax.scatter(*x_bar_c, c=\"C1\", marker=\"x\", label=r\"$\\bar x_c$\")\n\n plot_cov_ellipse2d(ax, x_bar_r, P_r, edgecolor=\"C2\")\n ax.scatter(*x_bar_r, c=\"C2\", marker=\"x\", label=r\"$\\bar x_r$\")\n\n plot_cov_ellipse2d(ax, x_bar_cr, P_cr, edgecolor=\"C3\")\n ax.scatter(*x_bar_cr, c=\"C3\", marker=\"x\", label=r\"$\\bar x_{cr}$\")\n\n plot_cov_ellipse2d(ax, x_bar_rc, P_rc, edgecolor=\"cyan\", linestyle=\"--\")\n ax.scatter(*x_bar_rc, c=\"cyan\", marker=\"+\", label=r\"$\\bar x_{rc}$\")\n\n # %% measurements\n ax.scatter(*z_c, c=\"C1\", label=\"$z_c$\")\n plot_cov_ellipse2d(ax, z_c, R_c, edgecolor=\"C1\")\n\n ax.scatter(*z_r, c=\"C2\", label=\"$z_r$\")\n plot_cov_ellipse2d(ax, z_r, R_r, edgecolor=\"C2\")\n # % true value\n # ax.scatter(-5, 12, c=\"C6\", marker=\"^\", label=\"$x$\")\n\n ax.axis(\"equal\")\n ax.legend()\n\n plt.show(block=False)\n\n\ndef show_task_2h(x_bar_rc, P_rc):\n fig, ax = plt.subplots()\n ax.set_title(\"Task 2h\")\n\n plot_cov_ellipse2d(ax, x_bar_rc, P_rc, edgecolor=\"C0\")\n ax.scatter(*x_bar_rc, marker=\"x\", c=\"C0\", label=r\"$\\bar x_{rc}$\")\n ax.plot([-1, 4], [4, 9], color=\"C1\", label=\"$x_2 = x_1 + 5$\")\n\n ax.axis(\"equal\")\n ax.legend()\n plt.show(block=False)\n", "id": "9210743", "language": "Python", "matching_score": 3.3353593349456787, "max_stars_count": 0, "path": "Assignments/Assignment_02/assignment2/utils/plotting.py" }, { "content": "#/usr/bin/env python3\nimport numpy as np\nfrom numpy import ndarray\nfrom scipy.stats import norm\n\n\ndef condition_mean(x: ndarray, P: ndarray,\n z: ndarray, R: ndarray, H: ndarray) -> ndarray:\n \"\"\"compute conditional mean\n\n Args:\n x (ndarray): initial state\n P (ndarray): initial state covariance\n z (ndarray): measurement\n R (ndarray): measurement covariance\n H (ndarray): measurement matrix i.e. z = H @ x + error\n\n Returns:\n cond_mean (ndarray): conditioned mean (state)\n \"\"\"\n return x + P@H.T@np.linalg.inv((H@P@H.T + R))@(z - H@x)\n\ndef condition_cov(P: ndarray, R: ndarray, H: ndarray) -> ndarray:\n \"\"\"compute conditional covariance\n\n Args:\n P (ndarray): covariance of state estimate\n R (ndarray): covariance of measurement\n H (ndarray): measurement matrix\n\n Returns:\n ndarray: the conditioned covariance\n \"\"\"\n return P - P@H.T@np.linalg.inv(H@P@H.T + R)@H@P\n\ndef get_task_2f(x_bar: ndarray, P: ndarray,\n z_c: ndarray, R_c: ndarray, H_c: ndarray,\n z_r: ndarray, R_r: ndarray, H_r: ndarray\n ):\n \"\"\"get state estimates after receiving measurement c or measurement r\n\n Args:\n x_bar (ndarray): initial state estimate\n P (ndarray): covariance of x_bar\n z_c (ndarray): measurement c\n R_c (ndarray): covariance of measurement c\n H_c (ndarray): measurement matrix i.e. z_c = H_c @ x + error\n z_r (ndarray): measurement r\n R_r (ndarray): covariance of measurement r\n H_r (ndarray): measurement matrix i.e. z_r + H_c @ x + error\n\n Returns:\n x_bar_c (ndarray): state estimate after measurement c\n P_c (ndarray): covariance of x_bar_c\n x_bar_r (ndarray): state estimate after measurement r\n P_r (ndarray): covariance of x_bar_r\n \"\"\"\n x_bar_c = condition_mean(x_bar, P, z_c, R_c, H_c)\n P_c = condition_cov(P, R_c, H_c)\n\n x_bar_r = condition_mean(x_bar, P, z_r, R_r, H_r)\n P_r = condition_cov(P, R_r, H_r)\n\n return x_bar_c, P_c, x_bar_r, P_r\n\n\ndef get_task_2g(x_bar_c: ndarray, P_c: ndarray,\n x_bar_r: ndarray, P_r: ndarray,\n z_c: ndarray, R_c: ndarray, H_c: ndarray,\n z_r: ndarray, R_r: ndarray, H_r: ndarray):\n \"\"\"get state estimates after receiving measurement c and measurement r\n\n Args:\n x_bar_c (ndarray): state estimate after receiving measurement c\n P_c (ndarray): covariance of x_bar_c\n x_bar_r (ndarray): state estimate after receiving measurement r\n P_r (ndarray): covariance of x_bar_r\n z_c (ndarray): measurement c\n R_c (ndarray): covariance of measurement c\n H_c (ndarray): measurement matrix i.e. z_c = H_c @ x + error\n z_r (ndarray): measurement r\n R_r (ndarray): covariance of measurement r\n H_r (ndarray): measurement matrix i.e. z_r = H_r @ x + error\n\n Returns:\n x_bar_cr (ndarray): state estimate after receiving z_c then z_r\n P_cr (ndarray): covariance of x_bar_cr\n x_bar_rc (ndarray): state estimate after receiving z_r then z_c\n P_rc (ndarray): covariance of x_bar_rc\n \"\"\"\n x_bar_cr = condition_mean(x_bar_c, P_c, z_r, R_r, H_r)\n P_cr = condition_cov(P_c, R_r, H_r)\n\n x_bar_rc = condition_mean(x_bar_r, P_r, z_c, R_c, H_c,)\n P_rc = condition_cov(P_r, R_c, H_c)\n\n return x_bar_cr, P_cr, x_bar_rc, P_rc\n\n\ndef get_task_2h(x_bar_rc: ndarray, P_rc: ndarray):\n \"\"\"get the probability that the boat is above the line\n\n Args:\n x_bar_rc (ndarray): state\n P_rc (ndarray): covariance\n\n Returns:\n prob_above_line: the probability that the boat is above the line\n \"\"\"\n # z = x2 - x1 => Pr(above line) = Pr(z > 5) = 1 - P(z < 5) = 1 - CDF_z(5)\n # Turning multivariate gaussian to a scalar gaussian by evaluating along the\n # normal of the line: z = [-1 1] * [x1 x2]^T\n # The area of this normal above the line is exactly Pr(above line)!\n\n n = np.array([-1, 1]).reshape((2, 1))\n\n mean = n.T@x_bar_rc\n std = np.sqrt(n.T@P_rc@n)\n\n prob_above_line = 1 - norm.cdf(5.0, mean, std)\n\n return prob_above_line.squeeze()\n", "id": "6578341", "language": "Python", "matching_score": 4.362105369567871, "max_stars_count": 0, "path": "Assignments/Assignment_02/assignment2/task2.py" }, { "content": "import numpy as np\nimport matplotlib.pyplot as plt\n\nfrom utils.plotting import show_task_2f_and_2g, show_task_2h\nfrom utils.interactive_covariance import InteractiveCovariance\nfrom task2 import (condition_mean, condition_cov,\n get_task_2f, get_task_2g, get_task_2h)\n\n\ndef main():\n # %% initialize the values\n x_bar = np.zeros(2) # initial estimate\n P = 25 * np.eye(2) # covariance of initial estimate\n\n\n z_c = np.array([2, 14]) # measurement 1\n R_c = np.array([[79, 36], [36, 36]]) # covariance of measurement 1\n H_c = np.eye(2) # measurement matrix of measurement 2\n\n z_r = np.array([-4, 6]) # measurement 2\n R_r = np.array([[28, 4], [4, 22]]) # covariance of measurement 2\n H_r = np.eye(2) # measurement matrix of measurement 1\n\n x_bar_c, P_c, x_bar_r, P_r = get_task_2f(x_bar, P,\n z_c, R_c, H_c,\n z_r, R_r, H_r)\n\n x_bar_rc, P_rc, x_bar_cr, P_cr = get_task_2g(x_bar_c, P_c,\n x_bar_r, P_r,\n z_c, R_c, H_c,\n z_r, R_r, H_r)\n\n prob_above_line = get_task_2h(x_bar_rc, P_rc)\n print(f\"Probability that it is above x_2 = x_1 + 5 is {prob_above_line}\")\n\n interactive = InteractiveCovariance(condition_mean,\n condition_cov)\n\n show_task_2f_and_2g(x_bar, P,\n z_c, R_c, x_bar_c, P_c,\n z_r, R_r, x_bar_r, P_r,\n x_bar_rc, P_rc,\n x_bar_cr, P_cr)\n\n show_task_2h(x_bar_rc, P_rc)\n\n plt.show()\n\n\nif __name__ == '__main__':\n main()\n", "id": "4853162", "language": "Python", "matching_score": 0.6866862773895264, "max_stars_count": 0, "path": "Assignments/Assignment_02/assignment2/run.py" }, { "content": "import numpy as np\nfrom numpy import ndarray\nfrom typing import Sequence\n\n\ndef tuninghints(measurement_data: Sequence[ndarray],\n association_gt_data: ndarray):\n \"\"\"Function used to give hints how to tune the filter.\n\n Args:\n measurement_data (Sequence[ndarray]): the measurements\n association_gt_data (ndarray): the true associations\n \"\"\"\n number_of_steps = len(association_gt_data)\n number_of_detections = len([a for a in association_gt_data if a != 0])\n total_number_of_clutter = (sum([len(zs) for zs in measurement_data])\n - number_of_detections)\n\n z_xmin = min([z[0] for zs in measurement_data for z in zs])\n z_xmax = max([z[0] for zs in measurement_data for z in zs])\n z_ymin = min([z[1] for zs in measurement_data for z in zs])\n z_ymax = max([z[1] for zs in measurement_data for z in zs])\n\n clutter_density_estimate = total_number_of_clutter / (number_of_steps*(z_xmax - z_xmin)*(z_ymax - z_ymin)) # TODO\n detection_probability_estimate = number_of_detections / number_of_steps\n\n print(\"Hints from tuninghints.py:\")\n print(f\"A reasonable clutter density is {clutter_density_estimate}\")\n print(f\"A reasonable detection probability is \"\n f\"{detection_probability_estimate}\")\n", "id": "8110746", "language": "Python", "matching_score": 1.2076727151870728, "max_stars_count": 0, "path": "Assignments/Assignment_05/pda/tuninghints.py" }, { "content": "# set to false for speedup.\n# Speedup will also occur if argument '-O' is given to python,\n# as __debug__ then is False\nDEBUG = False and __debug__\n\n\nsigma_a = 2.2 # acceleration standard deviation\nsigma_z = 3.2 # measurement standard deviation\n\n# clutter density, (measurements per m^2, is this reasonable?)\nclutter_density = 0.003\n\n# detection probability, (how often cyan dot appear, is this reasonable?)\ndetection_prob = 0.896\n\n# gate percentile, (estimated percentage of correct measurements that will be\n# accepted by gate function)\ngate_percentile = 0.999\n", "id": "4567342", "language": "Python", "matching_score": 2.095245122909546, "max_stars_count": 0, "path": "Assignments/Assignment_05/pda/config.py" }, { "content": "# set to false for speedup.\n# Speedup will also occur if argument '-O' is given to python,\n# as __debug__ then is False\nDEBUG = False and __debug__\n", "id": "7723605", "language": "Python", "matching_score": 1.1553982496261597, "max_stars_count": 0, "path": "Assignments/Assignment_03/ekf/config.py" }, { "content": "import numpy as np\n# set to 'sim' or 'real'\nRUN = 'sim'\n\n# set to False for speedup, skips sanity checks for in dataclasses\nDEBUG = False and __debug__\n\n# set to True for speedup as matrix exponential is approximated in\n# ESKF.get_van_loan_matrix()\nDO_APPROXIMATIONS = True\n\n# max unning time set to np.inf to run through all the data\nMAX_TIME = 600\n", "id": "2122608", "language": "Python", "matching_score": 0.13224317133426666, "max_stars_count": 0, "path": "Graded/G2/eskf/config.py" }, { "content": "#!/usr/bin/env python\n\nimport sys\n\ndef print_status_bar(i, progress, N, size=50):\n if i % (N/size) == 0:\n progress += 1\n\n sys.stdout.write('\\r')\n sys.stdout.write(\"Status: [\" + \"=\"*progress + \" \"*(size-progress) + \"]\")\n sys.stdout.flush()\n\n return progress\n\ndef print_execution_time(begin, end):\n total_calculation_time = float((end - begin).total_seconds())\n print(\"Calculation time: %f seconds\" % total_calculation_time)", "id": "10718960", "language": "Python", "matching_score": 0.26498904824256897, "max_stars_count": 1, "path": "scripts/utility.py" }, { "content": "from typing import Sequence\nimport numpy as np\nfrom numpy import ndarray\nfrom matplotlib import pyplot as plt\nfrom mpl_toolkits import mplot3d\nfrom pathlib import Path\n\nfrom datatypes.eskf_states import NominalState\nimport config\nfrom scipy.stats import chi2\n\nplot_folder = Path(__file__).parents[1].joinpath('plots')\nplot_folder.mkdir(exist_ok=True)\n\nplt.rcParams[\"axes.grid\"] = True\nplt.rcParams[\"grid.linestyle\"] = \":\"\nplt.rcParams[\"legend.framealpha\"] = 1\n\n\ndef plot_state(x_nom_seq: Sequence[NominalState]):\n fig, ax = plt.subplots(5, sharex=True, figsize=(6.4, 7))\n fig.canvas.manager.set_window_title(\"States\")\n times = [x.ts for x in x_nom_seq]\n\n ax[0].plot(times, [x.pos for x in x_nom_seq],\n label=[f\"${s}$\" for s in \"xyz\"])\n ax[0].set_ylabel(r\"$\\mathbf{\\rho}$ [$m$]\")\n\n ax[1].plot(times, [x.vel for x in x_nom_seq],\n label=[f\"${s}$\" for s in \"uvw\"])\n ax[1].set_ylabel(r\"$\\mathbf{v}$ [$m/s$]\")\n\n ax[2].plot(times, [np.rad2deg(x.ori.as_euler()) for x in x_nom_seq],\n label=[f\"${s}$\" for s in [r\"\\phi\", r\"\\theta\", r\"\\psi\"]])\n ax[2].set_ylabel(r\"$\\mathbf{q}$ (as euler) [deg]\")\n\n ax[3].plot(times, [x.accm_bias for x in x_nom_seq],\n label=[f\"${s}$\" for s in \"xyz\"])\n ax[3].set_ylabel(r\"$\\mathbf{a}_b$ [$m/s^2$]\")\n\n ax[4].plot(times, [np.rad2deg(x.gyro_bias) for x in x_nom_seq],\n label=[f\"${s}$\" for s in [r\"\\phi\", r\"\\theta\", r\"\\psi\"]])\n ax[4].set_ylabel(r\"$\\mathbf{\\omega}_b$ [deg$/s$]\")\n\n ax[-1].set_xlabel(\"$t$ [$s$]\")\n\n for i in range(len(ax)):\n ax[i].legend(loc=\"upper right\")\n\n fig.align_ylabels(ax)\n fig.subplots_adjust(left=0.15, right=0.97, bottom=0.08, top=0.97,\n hspace=0.1)\n fig.savefig(plot_folder.joinpath(\"States.pdf\"))\n\n\ndef plot_errors(times: Sequence[float], errors: Sequence['ndarray[15]']):\n fig, ax = plt.subplots(5, sharex=True, figsize=(6.4, 7))\n fig.canvas.manager.set_window_title(\"Errors\")\n\n ax[0].plot(times, errors[:, :3],\n label=[f\"${s}$\" for s in \"xyz\"])\n ax[0].set_ylabel(r\"$\\mathbf{\\delta \\rho}$ [$m$]\")\n\n ax[1].plot(times, errors[:, 3:6],\n label=[f\"${s}$\" for s in \"uvw\"])\n ax[1].set_ylabel(r\"$\\mathbf{\\delta v}$ [$m/s$]\")\n\n ax[2].plot(times, np.rad2deg(errors[:, 6:9]),\n label=[f\"${s}$\" for s in [r\"\\phi\", r\"\\theta\", r\"\\psi\"]])\n ax[2].set_ylabel(r\"$\\mathbf{\\delta \\Theta}$ [deg]\")\n\n ax[3].plot(times, errors[:, 9:12],\n label=[f\"${s}$\" for s in \"xyz\"])\n ax[3].set_ylabel(r\"$\\mathbf{\\delta a}_b$ [$m/s^2$]\")\n\n ax[4].plot(times, np.rad2deg(errors[:, 12:15]),\n label=[f\"${s}$\" for s in [r\"\\phi\", r\"\\theta\", r\"\\psi\"]])\n ax[4].set_ylabel(r\"$\\mathbf{ \\delta\\omega}_b$ [deg$/s$]\")\n\n ax[-1].set_xlabel(\"$t$ [$s$]\")\n\n for i in range(len(ax)):\n ax[i].legend(loc=\"upper right\")\n\n fig.align_ylabels(ax)\n fig.subplots_adjust(left=0.15, right=0.97, bottom=0.08, top=0.97,\n hspace=0.1)\n fig.savefig(plot_folder.joinpath(\"Errors.pdf\"))\n\n\ndef plot_position_path_3d(x_nom, x_true=None):\n\n fig = plt.figure(figsize=(6.4, 5.2))\n ax = fig.add_subplot(111, projection=\"3d\")\n fig.canvas.manager.set_window_title(\"Position 3D\")\n if x_true:\n ax.plot(*np.array([x.pos * np.array([1, 1, -1]) for x in x_true]).T,\n c='C1', label=r\"$\\mathbf{\\rho}_t$\")\n ax.plot(*np.array([x.pos * np.array([1, 1, -1]) for x in x_nom]).T,\n c='C0', label=r\"$\\mathbf{\\rho}$\")\n ax.legend(loc=\"upper right\")\n ax.set_xlabel(\"north ($x$) [$m$]\")\n ax.set_ylabel(\"east ($y$) [$m$]\")\n ax.set_zlabel(\"up ($-z$) [$m$]\")\n fig.subplots_adjust(left=0.1, right=0.9, bottom=0.05, top=0.95)\n fig.savefig(plot_folder.joinpath(\"Position3D.pdf\"))\n\n\ndef plot_nis(times, NIS_xyz, NIS_xy, NIS_z, confidence=0.90):\n confidence_intervals = [np.array(chi2.interval(confidence, ndof))\n for ndof in range(1, 4)]\n fig, ax = plt.subplots(3, 1, sharex=True, figsize=(6.4, 5.2))\n fig.canvas.manager.set_window_title(\"NIS\")\n\n for i, (NIS, name, nstates) in enumerate(zip([NIS_xyz, NIS_xy, NIS_z],\n ['xyz', 'xy', 'z'],\n [3, 2, 1])):\n ci_lower, ci_upper = confidence_intervals[nstates-1]\n n_total = len(NIS)\n n_below = len([None for value in NIS if value < ci_lower])\n n_above = len([None for value in NIS if value > ci_upper])\n frac_inside = (n_total - n_below - n_above)/n_total\n frac_below = n_below/n_total\n frac_above = n_above/n_total\n\n ax[i].plot(times, NIS, label=fr\"$NIS_{{{name}}}$\")\n ax[i].hlines([ci_lower, ci_upper], min(times), max(times), 'C3', \":\",\n label=f\"{confidence:2.1%} conf\")\n ax[i].set_title(\n f\"NIS ${{{name}}}$ \"\n f\"({frac_inside:2.1%} inside, {frac_below:2.1%} below, \"\n f\"{frac_above:2.1%} above \"\n f\" [{confidence:2.1%} conf])\")\n\n ax[i].set_yscale('log')\n\n ax[-1].set_xlabel('$t$ [$s$]')\n fig.align_ylabels(ax)\n fig.subplots_adjust(left=0.15, right=0.97, bottom=0.1, top=0.93,\n hspace=0.3)\n fig.savefig(plot_folder.joinpath('NIS.pdf'))\n\n\ndef plot_nees(times, pos, vel, avec, accm, gyro, confidence=0.90):\n ci_lower, ci_upper = np.array(chi2.interval(confidence, 4))\n fig, ax = plt.subplots(5, 1, sharex=True, figsize=(6.4, 9))\n fig.canvas.manager.set_window_title(\"NEES\")\n\n enu = enumerate(zip(\n [pos, vel, avec, accm, gyro],\n [r\"\\mathbf{\\rho}\", r\"\\mathbf{v}\", r\"\\mathbf{\\Theta}\",\n r\"\\mathbf{a}_b\", r\"\\mathbf{\\omega}_b\"]))\n for i, (NEES, name) in enu:\n n_total = len(NEES)\n n_below = len([None for value in NEES if value < ci_lower])\n n_above = len([None for value in NEES if value > ci_upper])\n frac_inside = (n_total - n_below - n_above)/n_total\n frac_below = n_below/n_total\n frac_above = n_above/n_total\n\n ax[i].plot(times, NEES, label=fr\"$NEES_{{{name}}}$\")\n ax[i].hlines([ci_lower, ci_upper], min(times), max(times), 'C3', \":\",\n label=f\"{confidence:2.1%} conf\")\n ax[i].set_title(\n fr\"NEES ${{{name}}}$ \"\n fr\"({frac_inside:2.1%} inside, \"\n f\" {frac_below:2.1%} below, {frac_above:2.1%} above \"\n f\"[{confidence:2.1%} conf])\"\n )\n\n ax[i].set_yscale('log')\n\n ax[-1].set_xlabel('$t$ [$s$]')\n fig.align_ylabels(ax)\n fig.subplots_adjust(left=0.15, right=0.97, bottom=0.06, top=0.94,\n hspace=0.3)\n fig.savefig(plot_folder.joinpath('NEES.pdf'))\n", "id": "2558980", "language": "Python", "matching_score": 4.387444496154785, "max_stars_count": 0, "path": "Graded/G2/eskf/plotting.py" }, { "content": "from matplotlib import pyplot as plt\nfrom scipy.stats import chi2\nimport numpy as np\n\n\ndef plot_NEES(NEES, dof=2, confidence=0.95):\n fig, ax = plt.subplots(1)\n ax.plot(NEES, label=\"NEES\")\n conf_lower = chi2.ppf((1-confidence)/2, df=2)\n conf_upper = chi2.ppf(1 - (1-confidence)/2, df=2)\n n_total = len(NEES)\n n_below = len([None for value in NEES if value < conf_lower])\n n_above = len([None for value in NEES if value > conf_upper])\n frac_inside = (n_total - n_below - n_above)/n_total\n frac_below = n_below/n_total\n frac_above = n_above/n_total\n\n ax.hlines([conf_lower, conf_upper], 0, len(NEES), \"r\", \":\",\n label=f\"{confidence:2.1%} confidence interval\")\n ax.legend()\n ax.set_title(f\"NEES\\n {frac_inside:2.1%} \"\n f\"inside {confidence:2.1%} confidence interval \"\n f\"({frac_below:2.1%} below, {frac_above:2.1%} above)\")\n\n ax.set_yscale('log')\n ax.set_ylabel('NEES')\n ax.set_xlabel('K')\n fig.tight_layout()\n", "id": "1666310", "language": "Python", "matching_score": 0.7832673788070679, "max_stars_count": 0, "path": "Assignments/Assignment_05/pda/utils/plotting.py" }, { "content": "# %% Imports\nfrom scipy.io import loadmat\nfrom scipy.stats import chi2\nfrom pathlib import Path\n\ntry:\n from tqdm import tqdm\nexcept ImportError as e:\n print(e)\n print(\"install tqdm for progress bar\")\n\n # def tqdm as dummy\n def tqdm(*args, **kwargs):\n return args[0]\n\n\nimport numpy as np\nfrom EKFSLAM import EKFSLAM\nimport matplotlib\nimport matplotlib.pyplot as plt\nfrom matplotlib import animation\nfrom plotting import ellipse\nfrom vp_utils import detectTrees, odometry, Car\nfrom utils import rotmat2d\n\n# %% plot config check and style setup\n\n\n# to see your plot config\nprint(f\"matplotlib backend: {matplotlib.get_backend()}\")\nprint(f\"matplotlib config file: {matplotlib.matplotlib_fname()}\")\nprint(f\"matplotlib config dir: {matplotlib.get_configdir()}\")\nplt.close(\"all\")\n\n# try to set separate window ploting\nif \"inline\" in matplotlib.get_backend():\n print(\"Plotting is set to inline at the moment:\", end=\" \")\n\n if \"ipykernel\" in matplotlib.get_backend():\n print(\"backend is ipykernel (IPython?)\")\n print(\"Trying to set backend to separate window:\", end=\" \")\n import IPython\n\n IPython.get_ipython().run_line_magic(\"matplotlib\", \"\")\n else:\n print(\"unknown inline backend\")\n\nprint(\"continuing with this plotting backend\", end=\"\\n\\n\\n\")\n\n\n# set styles\ntry:\n # installed with \"pip install SciencePLots\" (https://github.com/garrettj403/SciencePlots.git)\n # gives quite nice plots\n plt_styles = [\"science\", \"grid\", \"bright\", \"no-latex\"]\n plt.style.use(plt_styles)\n print(f\"pyplot using style set {plt_styles}\")\nexcept Exception as e:\n print(e)\n print(\"setting grid and only grid and legend manually\")\n plt.rcParams.update(\n {\n # setgrid\n \"axes.grid\": True,\n \"grid.linestyle\": \":\",\n \"grid.color\": \"k\",\n \"grid.alpha\": 0.5,\n \"grid.linewidth\": 0.5,\n # Legend\n \"legend.frameon\": True,\n \"legend.framealpha\": 1.0,\n \"legend.fancybox\": True,\n \"legend.numpoints\": 1,\n }\n )\n\n\ndef main():\n # %% Load data\n victoria_park_foler = Path(\n __file__).parents[1].joinpath(\"data/victoria_park\")\n realSLAM_ws = {\n **loadmat(str(victoria_park_foler.joinpath(\"aa3_dr\"))),\n **loadmat(str(victoria_park_foler.joinpath(\"aa3_lsr2\"))),\n **loadmat(str(victoria_park_foler.joinpath(\"aa3_gpsx\"))),\n }\n\n timeOdo = (realSLAM_ws[\"time\"] / 1000).ravel()\n timeLsr = (realSLAM_ws[\"TLsr\"] / 1000).ravel()\n timeGps = (realSLAM_ws[\"timeGps\"] / 1000).ravel()\n\n steering = realSLAM_ws[\"steering\"].ravel()\n speed = realSLAM_ws[\"speed\"].ravel()\n LASER = (\n realSLAM_ws[\"LASER\"] / 100\n ) # Divide by 100 to be compatible with Python implementation of detectTrees\n La_m = realSLAM_ws[\"La_m\"].ravel()\n Lo_m = realSLAM_ws[\"Lo_m\"].ravel()\n\n K = timeOdo.size\n mK = timeLsr.size\n Kgps = timeGps.size\n\n # %% Parameters\n\n L = 2.83 # axel distance\n H = 0.76 # center to wheel encoder\n a = 0.95 # laser distance in front of first axel\n b = 0.5 # laser distance to the left of center\n\n car = Car(L, H, a, b)\n\n\n sigmas = 0.02 * np.array([0.001, 0.0005, 6 * np.pi / 180]) # TODO tune\n CorrCoeff = np.array([[1, 0, 0], [0, 1, 0.9], [0, 0.9, 1]])\n Q = np.diag(sigmas) @ CorrCoeff @ np.diag(sigmas)\n R = np.diag([0.1, 0.5 * np.pi / 180]) ** 2 # TODO tune\n\n # first is for joint compatibility, second is individual\n JCBBalphas = np.array([1e-5, 1e-6]) # TODO tune\n\n\n sensorOffset = np.array([car.a + car.L, car.b])\n doAsso = True\n\n slam = EKFSLAM(Q, R, do_asso=doAsso, alphas=JCBBalphas,\n sensor_offset=sensorOffset)\n\n # For consistency testing\n alpha = 0.05\n confidence_prob = 1 - alpha\n\n xupd = np.zeros((mK, 3))\n a = [None] * mK\n NIS = np.zeros(mK)\n NISnorm = np.zeros(mK)\n CI = np.zeros((mK, 2))\n CInorm = np.zeros((mK, 2))\n\n # Initialize state\n # you might want to tweak these for a good reference\n eta = np.array([Lo_m[0], La_m[1], 36 * np.pi / 180])\n P = np.zeros((3, 3))\n\n mk_first = 1 # first seems to be a bit off in timing\n mk = mk_first\n t = timeOdo[0]\n\n # %% run\n N = 20000 # K\n\n doPlot = False\n\n lh_pose = None\n\n if doPlot:\n fig, ax = plt.subplots(num=1, clear=True)\n\n lh_pose = ax.plot(eta[0], eta[1], \"k\", lw=3)[0]\n sh_lmk = ax.scatter(np.nan, np.nan, c=\"r\", marker=\"x\")\n sh_Z = ax.scatter(np.nan, np.nan, c=\"b\", marker=\".\")\n\n do_raw_prediction = True\n if do_raw_prediction:\n odos = np.zeros((K, 3))\n odox = np.zeros((K, 3))\n odox[0] = eta\n P_odo = P.copy()\n for k in range(min(N, K - 1)):\n odos[k + 1] = odometry(speed[k + 1], steering[k + 1], 0.025, car)\n odox[k + 1], _ = slam.predict(odox[k], P_odo, odos[k + 1])\n\n for k in tqdm(range(N)):\n if mk < mK - 1 and timeLsr[mk] <= timeOdo[k + 1]:\n # Force P to symmetric: there are issues with long runs (>10000 steps)\n # seem like the prediction might be introducing some minor asymetries,\n # so best to force P symetric before update (where chol etc. is used).\n # TODO: remove this for short debug runs in order to see if there are small errors\n P = (P + P.T) / 2\n dt = timeLsr[mk] - t\n if dt < 0: # avoid assertions as they can be optimized avay?\n raise ValueError(\"negative time increment\")\n\n # ? reset time to this laser time for next post predict\n t = timeLsr[mk]\n odo = odometry(speed[k + 1], steering[k + 1], dt, car)\n eta, P = slam.predict(eta, P, odo)\n\n z = detectTrees(LASER[mk])\n eta, P, NIS[mk], a[mk] = slam.update(eta, P, z)\n\n num_asso = np.count_nonzero(a[mk] > -1)\n\n if num_asso > 0:\n NISnorm[mk] = NIS[mk] / (2 * num_asso)\n CInorm[mk] = np.array(chi2.interval(confidence_prob, 2 * num_asso)) / (\n 2 * num_asso\n )\n else:\n NISnorm[mk] = 1\n CInorm[mk].fill(1)\n\n xupd[mk] = eta[:3]\n\n if doPlot:\n sh_lmk.set_offsets(eta[3:].reshape(-1, 2))\n if len(z) > 0:\n zinmap = (\n rotmat2d(eta[2])\n @ (\n z[:, 0] *\n np.array([np.cos(z[:, 1]), np.sin(z[:, 1])])\n + slam.sensor_offset[:, None]\n )\n + eta[0:2, None]\n )\n sh_Z.set_offsets(zinmap.T)\n lh_pose.set_data(*xupd[mk_first:mk, :2].T)\n\n ax.set(\n xlim=[-200, 200],\n ylim=[-200, 200],\n title=f\"step {k}, laser scan {mk}, landmarks {len(eta[3:])//2},\\nmeasurements {z.shape[0]}, num new = {np.sum(a[mk] == -1)}\",\n )\n plt.draw()\n plt.pause(0.00001)\n\n mk += 1\n\n if k < K - 1:\n dt = timeOdo[k + 1] - t\n t = timeOdo[k + 1]\n odo = odometry(speed[k + 1], steering[k + 1], dt, car)\n eta, P = slam.predict(eta, P, odo)\n\n # %% Consistency\n\n # NIS\n insideCI = (CInorm[:mk, 0] <= NISnorm[:mk]) * \\\n (NISnorm[:mk] <= CInorm[:mk, 1])\n\n fig3, ax3 = plt.subplots(num=3, clear=True)\n ax3.plot(CInorm[:mk, 0], \"--\")\n ax3.plot(CInorm[:mk, 1], \"--\")\n ax3.plot(NISnorm[:mk], lw=0.5)\n\n ax3.set_title(f\"NIS, {insideCI.mean()*100:.2f}% inside CI\")\n\n # %% slam\n\n if do_raw_prediction:\n fig5, ax5 = plt.subplots(num=5, clear=True)\n ax5.scatter(\n Lo_m[timeGps < timeOdo[N - 1]],\n La_m[timeGps < timeOdo[N - 1]],\n c=\"r\",\n marker=\".\",\n label=\"GPS\",\n )\n #ax5.plot(*odox[:N, :2].T, label=\"odom\")\n ax5.plot(*xupd[mk_first:mk, :2].T, label=\"x_SLAM\")\n ax5.grid()\n ax5.set_title(\"GPS vs SLAM pose estimate\")\n ax5.legend()\n\n # %%\n fig6, ax6 = plt.subplots(num=6, clear=True)\n ax6.scatter(*eta[3:].reshape(-1, 2).T, color=\"r\", marker=\"x\")\n ax6.plot(*xupd[mk_first:mk, :2].T)\n ax6.set(\n title=f\"Steps {k}, laser scans {mk-1}, landmarks {len(eta[3:])//2},\\nmeasurements {z.shape[0]}, num new = {np.sum(a[mk] == -1)}\"\n )\n plt.show()\n\n\nif __name__ == \"__main__\":\n main()\n", "id": "1313040", "language": "Python", "matching_score": 6.96122932434082, "max_stars_count": 0, "path": "Graded/G3/slam/run_real_SLAM.py" }, { "content": "# %% Imports\nfrom plotting import ellipse\nfrom EKFSLAM import EKFSLAM\nfrom typing import List, Optional\n\nfrom scipy.io import loadmat\nimport numpy as np\n\nimport matplotlib\nimport matplotlib.pyplot as plt\nfrom matplotlib import animation\nfrom scipy.stats import chi2\nimport utils\nfrom pathlib import Path\n\ntry:\n from tqdm import tqdm\nexcept ImportError as e:\n print(e)\n print(\"install tqdm to have progress bar\")\n\n # def tqdm as dummy as it is not available\n def tqdm(*args, **kwargs):\n return args[0]\n\n# %% plot config check and style setup\n\n\n# to see your plot config\nprint(f\"matplotlib backend: {matplotlib.get_backend()}\")\nprint(f\"matplotlib config file: {matplotlib.matplotlib_fname()}\")\nprint(f\"matplotlib config dir: {matplotlib.get_configdir()}\")\nplt.close(\"all\")\n\n# try to set separate window ploting\nif \"inline\" in matplotlib.get_backend():\n print(\"Plotting is set to inline at the moment:\", end=\" \")\n\n if \"ipykernel\" in matplotlib.get_backend():\n print(\"backend is ipykernel (IPython?)\")\n print(\"Trying to set backend to separate window:\", end=\" \")\n import IPython\n\n IPython.get_ipython().run_line_magic(\"matplotlib\", \"\")\n else:\n print(\"unknown inline backend\")\n\nprint(\"continuing with this plotting backend\", end=\"\\n\\n\\n\")\n\n\n# set styles\ntry:\n # installed with \"pip install SciencePLots\" (https://github.com/garrettj403/SciencePlots.git)\n # gives quite nice plots\n plt_styles = [\"science\", \"grid\", \"bright\", \"no-latex\"]\n plt.style.use(plt_styles)\n print(f\"pyplot using style set {plt_styles}\")\nexcept Exception as e:\n print(e)\n print(\"setting grid and only grid and legend manually\")\n plt.rcParams.update(\n {\n # setgrid\n \"axes.grid\": True,\n \"grid.linestyle\": \":\",\n \"grid.color\": \"k\",\n \"grid.alpha\": 0.5,\n \"grid.linewidth\": 0.5,\n # Legend\n \"legend.frameon\": True,\n \"legend.framealpha\": 1.0,\n \"legend.fancybox\": True,\n \"legend.numpoints\": 1,\n }\n )\n\n\ndef main():\n # %% Load data\n datafile = Path(__file__).parents[1].joinpath(\"data/simulatedSLAM\")\n simSLAM_ws = loadmat(str(datafile))\n\n # NB: this is a MATLAB cell, so needs to \"double index\" to get out the measurements of a time step k:\n #\n # ex:\n #\n # z_k = z[k][0] # z_k is a (2, m_k) matrix with columns equal to the measurements of time step k\n #\n ##\n z = [zk.T for zk in simSLAM_ws[\"z\"].ravel()]\n\n landmarks = simSLAM_ws[\"landmarks\"].T\n odometry = simSLAM_ws[\"odometry\"].T\n poseGT = simSLAM_ws[\"poseGT\"].T\n\n K = len(z)\n M = len(landmarks)\n\n # %% Initilize\n Q = np.diag([0.018, 0.018, 0.45 * np.pi / 180]) ** 2 # first element is variance of x, second is variance of y, third is variance of heading\n R = np.diag([0.1, 0.9 * np.pi / 180]) ** 2 # first element is variance of radius, second is variance of theta (range, azi)\n\n # first is for joint compatibility, second is individual\n JCBBalphas = np.array([1e-5, 1e-5]) \n\n doAsso = True\n\n\n# these can have a large effect on runtime either through the number of landmarks created\n# or by the size of the association search space.\n\n slam = EKFSLAM(Q, R, do_asso=doAsso, alphas=JCBBalphas)\n\n # allocate\n eta_pred: List[Optional[np.ndarray]] = [None] * K\n P_pred: List[Optional[np.ndarray]] = [None] * K\n eta_hat: List[Optional[np.ndarray]] = [None] * K\n P_hat: List[Optional[np.ndarray]] = [None] * K\n a: List[Optional[np.ndarray]] = [None] * K\n NIS = np.zeros(K)\n NISnorm = np.zeros(K)\n CI = np.zeros((K, 2))\n CInorm = np.zeros((K, 2))\n NEESes = np.zeros((K, 3))\n\n # For consistency testing\n alpha = 0.05\n confidence_prob = 1 - alpha\n\n # init\n eta_pred[0] = poseGT[0] # we start at the correct position for reference\n # we also say that we are 100% sure about that\n P_pred[0] = np.zeros((3, 3))\n\n # %% Set up plotting\n # plotting\n\n doAssoPlot = False\n playMovie = False\n if doAssoPlot:\n figAsso, axAsso = plt.subplots(num=1, clear=True)\n\n # %% Run simulation\n N = K\n\n print(\"starting sim (\" + str(N) + \" iterations)\")\n\n for k, z_k in tqdm(enumerate(z[:N]), total=N):\n # See top: need to do \"double indexing\" to get z at time step k\n # Transpose is to stack measurements rowwise\n # z_k = z[k][0].T\n\n eta_hat[k], P_hat[k], NIS[k], a[k] = slam.update(eta_pred[k], P_pred[k], z_k)\n\n if k < K - 1:\n eta_pred[k + 1], P_pred[k + 1] = slam.predict(eta_hat[k], P_hat[k], odometry[k])\n\n assert (\n eta_hat[k].shape[0] == P_hat[k].shape[0]\n ), \"dimensions of mean and covariance do not match\"\n\n num_asso = np.count_nonzero(a[k] > -1)\n\n CI[k] = chi2.interval(confidence_prob, 2 * num_asso)\n\n if num_asso > 0:\n NISnorm[k] = NIS[k] / (2 * num_asso)\n CInorm[k] = CI[k] / (2 * num_asso)\n else:\n NISnorm[k] = 1\n CInorm[k].fill(1)\n\n NEESes[k] = slam.NEESes(eta_hat[k][0:3], P_hat[k][0:3, 0:3], poseGT[k]) \n\n if doAssoPlot and k > 0:\n axAsso.clear()\n axAsso.grid()\n zpred = slam.h(eta_pred[k]).reshape(-1, 2)\n axAsso.scatter(z_k[:, 0], z_k[:, 1], label=\"z\")\n axAsso.scatter(zpred[:, 0], zpred[:, 1], label=\"zpred\")\n xcoords = np.block(\n [[z_k[a[k] > -1, 0]], [zpred[a[k][a[k] > -1], 0]]]).T\n ycoords = np.block(\n [[z_k[a[k] > -1, 1]], [zpred[a[k][a[k] > -1], 1]]]).T\n for x, y in zip(xcoords, ycoords):\n axAsso.plot(x, y, lw=3, c=\"r\")\n axAsso.legend()\n axAsso.set_title(\n f\"k = {k}, {np.count_nonzero(a[k] > -1)} associations\")\n plt.draw()\n plt.pause(0.001)\n\n print(\"sim complete\")\n\n pose_est = np.array([x[:3] for x in eta_hat[:N]])\n lmk_est = [eta_hat_k[3:].reshape(-1, 2) for eta_hat_k in eta_hat]\n lmk_est_final = lmk_est[N - 1]\n\n np.set_printoptions(precision=4, linewidth=100)\n\n # %% Plotting of results\n mins = np.amin(landmarks, axis=0)\n maxs = np.amax(landmarks, axis=0)\n\n ranges = maxs - mins\n offsets = ranges * 0.2\n\n mins -= offsets\n maxs += offsets\n\n fig2, ax2 = plt.subplots(num=2, clear=True)\n # landmarks\n ax2.scatter(*landmarks.T, c=\"r\", marker=\"^\")\n ax2.scatter(*lmk_est_final.T, c=\"b\", marker=\".\")\n # Draw covariance ellipsis of measurements\n for l, lmk_l in enumerate(lmk_est_final):\n idxs = slice(3 + 2 * l, 3 + 2 * l + 2)\n rI = P_hat[N - 1][idxs, idxs]\n el = ellipse(lmk_l, rI, 5, 200)\n ax2.plot(*el.T, \"b\")\n\n ax2.plot(*poseGT.T[:2], c=\"r\", label=\"gt\")\n ax2.plot(*pose_est.T[:2], c=\"g\", label=\"est\")\n ax2.plot(*ellipse(pose_est[-1, :2], P_hat[N - 1][:2, :2], 5, 200).T, c=\"g\")\n ax2.set(title=\"results\", xlim=(mins[0], maxs[0]), ylim=(mins[1], maxs[1]))\n ax2.axis(\"equal\")\n ax2.grid()\n\n # %% Consistency\n\n # NIS\n insideCI = (CInorm[:N, 0] <= NISnorm[:N]) * (NISnorm[:N] <= CInorm[:N, 1])\n\n fig3, ax3 = plt.subplots(num=3, clear=True)\n ax3.plot(CInorm[:N, 0], '--')\n ax3.plot(CInorm[:N, 1], '--')\n ax3.plot(NISnorm[:N], lw=0.5)\n\n ax3.set_title(f'NIS, {insideCI.mean()*100}% inside CI')\n\n # NEES\n\n fig4, ax4 = plt.subplots(nrows=3, ncols=1, figsize=(\n 7, 5), num=4, clear=True, sharex=True)\n tags = ['all', 'pos', 'heading']\n dfs = [3, 2, 1]\n\n for ax, tag, NEES, df in zip(ax4, tags, NEESes.T, dfs):\n CI_NEES = chi2.interval(confidence_prob, df)\n ax.plot(np.full(N, CI_NEES[0]), '--')\n ax.plot(np.full(N, CI_NEES[1]), '--')\n ax.plot(NEES[:N], lw=0.5)\n insideCI = (CI_NEES[0] <= NEES) * (NEES <= CI_NEES[1])\n ax.set_title(f'NEES {tag}: {insideCI.mean()*100}% inside CI')\n\n CI_ANEES = np.array(chi2.interval(confidence_prob, df*N)) / N\n print(f\"CI ANEES {tag}: {CI_ANEES}\")\n print(f\"ANEES {tag}: {NEES.mean()}\")\n\n fig4.tight_layout()\n\n # %% RMSE\n\n ylabels = ['m', 'deg']\n scalings = np.array([1, 180/np.pi])\n\n fig5, ax5 = plt.subplots(nrows=2, ncols=1, figsize=(\n 7, 5), num=5, clear=True, sharex=True)\n\n pos_err = np.linalg.norm(pose_est[:N, :2] - poseGT[:N, :2], axis=1)\n heading_err = np.abs(utils.wrapToPi(pose_est[:N, 2] - poseGT[:N, 2]))\n\n errs = np.vstack((pos_err, heading_err))\n\n for ax, err, tag, ylabel, scaling in zip(ax5, errs, tags[1:], ylabels, scalings):\n ax.plot(err*scaling)\n ax.set_title(\n f\"{tag}: RMSE {np.sqrt((err**2).mean())*scaling} {ylabel}\")\n ax.set_ylabel(f\"[{ylabel}]\")\n ax.grid()\n\n fig5.tight_layout()\n\n # %% Movie time\n\n if playMovie:\n try:\n print(\"recording movie...\")\n\n from celluloid import Camera\n\n pauseTime = 0.05\n fig_movie, ax_movie = plt.subplots(num=6, clear=True)\n\n camera = Camera(fig_movie)\n\n ax_movie.grid()\n ax_movie.set(xlim=(mins[0], maxs[0]), ylim=(mins[1], maxs[1]))\n camera.snap()\n\n for k in tqdm(range(N)):\n ax_movie.scatter(*landmarks.T, c=\"r\", marker=\"^\")\n ax_movie.plot(*poseGT[:k, :2].T, \"r-\")\n ax_movie.plot(*pose_est[:k, :2].T, \"g-\")\n ax_movie.scatter(*lmk_est[k].T, c=\"b\", marker=\".\")\n\n if k > 0:\n el = ellipse(pose_est[k, :2], P_hat[k][:2, :2], 5, 200)\n ax_movie.plot(*el.T, \"g\")\n\n numLmk = lmk_est[k].shape[0]\n for l, lmk_l in enumerate(lmk_est[k]):\n idxs = slice(3 + 2 * l, 3 + 2 * l + 2)\n rI = P_hat[k][idxs, idxs]\n el = ellipse(lmk_l, rI, 5, 200)\n ax_movie.plot(*el.T, \"b\")\n\n camera.snap()\n animation = camera.animate(interval=100, blit=True, repeat=False)\n print(\"playing movie\")\n\n except ImportError:\n print(\n \"Install celluloid module, \\n\\n$ pip install celluloid\\n\\nto get fancy animation of EKFSLAM.\"\n )\n\n plt.show()\n # %%\n\n\nif __name__ == \"__main__\":\n main()\n", "id": "9179511", "language": "Python", "matching_score": 3.3659582138061523, "max_stars_count": 0, "path": "Graded/G3/slam/run_simulated_SLAM.py" }, { "content": "from typing import Tuple\nimport numpy as np\nfrom numpy import ndarray\nfrom dataclasses import dataclass, field\nfrom scipy.linalg import block_diag\nimport scipy.linalg as la\nfrom utils import rotmat2d\nfrom JCBB import JCBB\nimport utils\nimport solution\n\n\n@dataclass\nclass EKFSLAM:\n Q: ndarray\n R: ndarray\n do_asso: bool\n alphas: 'ndarray[2]' = field(default=np.array([0.001, 0.0001]))\n sensor_offset: 'ndarray[2]' = field(default=np.zeros(2))\n\n def f(self, x: np.ndarray, u: np.ndarray) -> np.ndarray:\n \"\"\"Add the odometry u to the robot state x.\n\n Parameters\n ----------\n x : np.ndarray, shape=(3,)\n the robot state\n u : np.ndarray, shape=(3,)\n the odometry\n\n Returns\n -------\n np.ndarray, shape = (3,)\n the predicted state\n \"\"\"\n psikm1 = x[2]\n\n xk = x[0] + u[0]*np.cos(psikm1) - u[1]*np.sin(psikm1)\n yk = x[1] + u[0]*np.sin(psikm1) + u[1]*np.cos(psikm1)\n psik = psikm1 + u[2]\n\n xpred = np.array([xk, yk, psik])\n\n return xpred\n\n def Fx(self, x: np.ndarray, u: np.ndarray) -> np.ndarray:\n \"\"\"Calculate the Jacobian of f with respect to x.\n\n Parameters\n ----------\n x : np.ndarray, shape=(3,)\n the robot state\n u : np.ndarray, shape=(3,)\n the odometry\n\n Returns\n -------\n np.ndarray\n The Jacobian of f wrt. x.\n \"\"\"\n #Fx = solution.EKFSLAM.EKFSLAM.Fx(self, x, u)\n #return Fx\n\n psi = x[2]\n Fx = np.array([[1, 0, -u[0]*np.sin(psi) - u[1]*np.cos(psi)],\n [0, 1, u[0]*np.cos(psi) - u[1]*np.sin(psi)],\n [0, 0, 1]]) \n return Fx\n\n def Fu(self, x: np.ndarray, u: np.ndarray) -> np.ndarray:\n \"\"\"Calculate the Jacobian of f with respect to u.\n\n Parameters\n ----------\n x : np.ndarray, shape=(3,)\n the robot state\n u : np.ndarray, shape=(3,)\n the odometry\n\n Returns\n -------\n np.ndarray\n The Jacobian of f wrt. u.\n \"\"\"\n #Fu = solution.EKFSLAM.EKFSLAM.Fu(self, x, u)\n #return Fu\n\n psi = x[2]\n Fu = np.array([[np.cos(psi), -np.sin(psi), 0],\n [np.sin(psi), np.cos(psi), 0],\n [0, 0, 1]]) \n return Fu\n\n def predict(\n self, eta: np.ndarray, P: np.ndarray, z_odo: np.ndarray\n ) -> Tuple[np.ndarray, np.ndarray]:\n \"\"\"Predict the robot state using the zOdo as odometry the corresponding state&map covariance.\n\n Parameters\n ----------\n eta : np.ndarray, shape=(3 + 2*#landmarks,)\n the robot state and map concatenated\n P : np.ndarray, shape=(3 + 2*#landmarks,)*2\n the covariance of eta\n z_odo : np.ndarray, shape=(3,)\n the measured odometry\n\n Returns\n -------\n Tuple[np.ndarray, np.ndarray], shapes= (3 + 2*#landmarks,), (3 + 2*#landmarks,)*2\n predicted mean and covariance of eta.\n \"\"\"\n #etapred, P = solution.EKFSLAM.EKFSLAM.predict(self, eta, P, z_odo)\n #return etapred, P\n\n # check inout matrix\n assert np.allclose(P, P.T), \"EKFSLAM.predict: not symmetric P input\"\n assert np.all(\n np.linalg.eigvals(P) >= 0\n ), \"EKFSLAM.predict: non-positive eigen values in P input\"\n assert (\n eta.shape * 2 == P.shape\n ), \"EKFSLAM.predict: input eta and P shape do not match\"\n etapred = np.empty_like(eta)\n\n x = eta[:3]\n etapred[:3] = self.f(x, z_odo)\n etapred[3:] = eta[3:]\n\n Fx = self.Fx(x, z_odo)\n Fu = self.Fu(x, z_odo)\n\n # evaluate covariance prediction in place to save computation\n # only robot state changes, so only rows and colums of robot state needs changing\n # cov matrix layout:\n # [[P_xx, P_xm],\n # [P_mx, P_mm]]\n P[:3, :3] = Fx@P[:3, :3]@Fx.T + Fu@self.Q@Fu.T\n P[:3, 3:] = Fx@P[:3, 3:]\n P[3:, :3] = P[:3, 3:].T\n\n assert np.allclose(P, P.T), \"EKFSLAM.predict: not symmetric P\"\n assert np.all(\n np.linalg.eigvals(P) > 0\n ), \"EKFSLAM.predict: non-positive eigen values\"\n assert (\n etapred.shape * 2 == P.shape\n ), \"EKFSLAM.predict: calculated shapes does not match\"\n\n return etapred, P\n\n def h(self, eta: np.ndarray) -> np.ndarray:\n \"\"\"Predict all the landmark positions in sensor frame.\n\n Parameters\n ----------\n eta : np.ndarray, shape=(3 + 2 * #landmarks,)\n The robot state and landmarks stacked.\n\n Returns\n -------\n np.ndarray, shape=(2 * #landmarks,)\n The landmarks in the sensor frame.\n \"\"\"\n\n #zpred = solution.EKFSLAM.EKFSLAM.h(self, eta)\n #return zpred\n\n # extract states and map\n x = eta[0:3]\n # reshape map (2, #landmarks), m[:, j] is the jth landmark\n m = eta[3:].reshape((-1, 2)).T\n\n Rot = rotmat2d(-x[2])\n\n # relative position of landmark to sensor on robot in world frame\n delta_m = (m.T - eta[0:2]).T\n\n # predicted measurements in cartesian coordinates, beware sensor offset for VP\n zpredcart = Rot @ delta_m - self.sensor_offset[:, None] # None as index ads an axis with size 1 at that position.\n\n zpred_r = la.norm(zpredcart, 2, axis=0) # ranges\n zpred_theta = np.arctan2(zpredcart[1,:], zpredcart[0,:]) # bearings\n zpred = np.vstack((zpred_r, zpred_theta)) # the two arrays above stacked on top of each other vertically like\n\n # stack measurements along one dimension, [range1 bearing1 range2 bearing2 ...]\n zpred = zpred.T.ravel()\n\n assert (\n zpred.ndim == 1 and zpred.shape[0] == eta.shape[0] - 3\n ), \"SLAM.h: Wrong shape on zpred\"\n\n return zpred\n\n def h_jac(self, eta: np.ndarray) -> np.ndarray:\n \"\"\"Calculate the jacobian of h.\n Parameters\n ----------\n eta : np.ndarray, shape=(3 + 2 * #landmarks,)\n The robot state and landmarks stacked.\n Returns\n -------\n np.ndarray, shape=(2 * #landmarks, 3 + 2 * #landmarks)\n the jacobian of h wrt. eta.\n \"\"\"\n # H = solution.EKFSLAM.EKFSLAM.h_jac(self, eta)\n # return H\n\n # extract states and map\n x = eta[0:3]\n # reshape map (2, #landmarks), m[j] is the jth landmark\n m = eta[3:].reshape((-1, 2)).T\n\n numM = m.shape[1]\n\n Rot = rotmat2d(x[2])\n\n # relative position of landmark to robot in world frame. m - rho that appears in (11.15) and (11.16)\n delta_m = (m.T - eta[0:2]).T\n\n # (2, #measurements), each measured position in cartesian coordinates like\n zc = delta_m - Rot @ self.sensor_offset[:, None]\n zr = la.norm(zc, 2, axis=0) # ranges\n\n Rpihalf = rotmat2d(np.pi / 2)\n\n # In what follows you can be clever and avoid making this for all the landmarks you _know_\n # you will not detect (the maximum range should be available from the data).\n # But keep it simple to begin with.\n\n # Allocate H and set submatrices as memory views into H\n # You may or may not want to do this like this\n # see eq (11.15), (11.16), (11.17)\n H = np.zeros((2 * numM, 3 + 2 * numM))\n Hx = H[:, :3] # slice view, setting elements of Hx will set H as well\n Hm = H[:, 3:] # slice view, setting elements of Hm will set H as well\n\n # proposed way is to go through landmarks one by one\n # preallocate and update this for some speed gain if looping\n jac_z_cb = -np.eye(2, 3)\n for i in range(numM): # But this whole loop can be vectorized\n ind = 2 * i # starting postion of the ith landmark into H\n # the inds slice for the ith landmark into H\n inds = slice(ind, ind + 2)\n\n jac_z_cb[:,2] = -Rpihalf@delta_m[:,i]\n\n jac_x_range = zc[:,i].T / zr[i]\n jac_x_bearing = zc[:,i].T @ Rpihalf.T / zr[i]**2\n\n Hx[ind,:] = jac_x_range @ jac_z_cb\n Hx[ind+1,:] = jac_x_bearing @ jac_z_cb\n \n Hm[ind,inds] = jac_x_range \n Hm[ind+1,inds] = jac_x_bearing \n\n # You can set some assertions here to make sure that some of the structure in H is correct\n # Don't mind if I don't :)\n return H\n\n def add_landmarks(\n self, eta: np.ndarray, P: np.ndarray, z: np.ndarray\n ) -> Tuple[np.ndarray, np.ndarray]:\n \"\"\"Calculate new landmarks, their covariances and add them to the state.\n\n Parameters\n ----------\n eta : np.ndarray, shape=(3 + 2*#landmarks,)\n the robot state and map concatenated\n P : np.ndarray, shape=(3 + 2*#landmarks,)*2\n the covariance of eta\n z : np.ndarray, shape(2 * #newlandmarks,)\n A set of measurements to create landmarks for\n\n Returns\n -------\n Tuple[np.ndarray, np.ndarray], shapes=(3 + 2*(#landmarks + #newlandmarks,), (3 + 2*(#landmarks + #newlandmarks,)*2\n eta with new landmarks appended, and its covariance\n \"\"\"\n # etaadded, Padded = solution.EKFSLAM.EKFSLAM.add_landmarks(\n # self, eta, P, z)\n # return etaadded, Padded\n n = P.shape[0]\n assert z.ndim == 1, \"SLAM.add_landmarks: z must be a 1d array\"\n\n numLmk = z.shape[0] // 2\n\n lmnew = np.empty_like(z)\n\n Gx = np.empty((numLmk * 2, 3))\n Rall = np.zeros((numLmk * 2, numLmk * 2))\n\n I2 = np.eye(2) # Preallocate, used for Gx\n Rnb = rotmat2d(eta[2])\n sensor_offset_world = Rnb @ self.sensor_offset + eta[:2]\n sensor_offset_world_der = rotmat2d(\n eta[2] + np.pi / 2) @ self.sensor_offset # Used in Gx\n \n for j in range(numLmk):\n ind = 2 * j\n inds = slice(ind, ind + 2)\n zj = z[inds]\n\n ang = zj[1] + eta[2]\n rot = rotmat2d(ang) # rotmat in Gz\n\n # calculate position of new landmark in world frame\n lmnew[inds] = Rnb @ (zj[0] * np.array([np.cos(zj[1]), np.sin(zj[1])])) + sensor_offset_world\n\n Gx[inds, :2] = I2\n Gx[inds, 2] = zj[0] * np.array([-np.sin(ang), np.cos(ang)]) + sensor_offset_world_der\n\n Gz = rot @ np.diag([1, zj[0]])\n\n # Gz * R * Gz^T, transform measurement covariance from polar to cartesian coordinates\n Rall[inds, inds] = Gz @ self.R @ Gz.T\n\n assert len(lmnew) % 2 == 0, \"SLAM.add_landmark: lmnew not even length\"\n etaadded = np.append(eta, lmnew) # append new landmarks to state vector\n # block diagonal of P_new, see problem text in 1g) in graded assignment 3\n Padded = block_diag(P, Gx@P[:3,:3]@Gx.T + Rall)\n Padded[:n, n:] = P[:, :3]@Gx.T # top right corner of Padded\n Padded[n:, :n] = Padded[:n, n:].T # botton left corner of Padded\n\n assert (\n etaadded.shape * 2 == Padded.shape\n ), \"EKFSLAM.add_landmarks: calculated eta and P has wrong shape\"\n assert np.allclose(\n Padded, Padded.T\n ), \"EKFSLAM.add_landmarks: Padded not symmetric\"\n assert np.all(\n np.linalg.eigvals(Padded) >= 0\n ), \"EKFSLAM.add_landmarks: Padded not PSD\"\n\n return etaadded, Padded\n\n def associate(\n self, z: np.ndarray, zpred: np.ndarray, H: np.ndarray, S: np.ndarray,\n ): # -> Tuple[*((np.ndarray,) * 5)]:\n \"\"\"Associate landmarks and measurements, and extract correct matrices for these.\n\n Parameters\n ----------\n z : np.ndarray,\n The measurements all in one vector\n zpred : np.ndarray\n Predicted measurements in one vector\n H : np.ndarray\n The measurement Jacobian matrix related to zpred\n S : np.ndarray\n The innovation covariance related to zpred\n\n Returns\n -------\n Tuple[*((np.ndarray,) * 5)]\n The extracted measurements, the corresponding zpred, H, S and the associations.\n\n Note\n ----\n See the associations are calculated using JCBB. See this function for documentation\n of the returned association and the association procedure.\n \"\"\"\n if self.do_asso:\n # Associate\n a = JCBB(z, zpred, S, self.alphas[0], self.alphas[1])\n\n # Extract associated measurements\n zinds = np.empty_like(z, dtype=bool)\n zinds[::2] = a > -1 # -1 means no association\n zinds[1::2] = zinds[::2]\n zass = z[zinds]\n\n # extract and rearange predicted measurements and cov\n zbarinds = np.empty_like(zass, dtype=int)\n zbarinds[::2] = 2 * a[a > -1]\n zbarinds[1::2] = 2 * a[a > -1] + 1\n\n zpredass = zpred[zbarinds]\n Sass = S[zbarinds][:, zbarinds]\n Hass = H[zbarinds]\n\n assert zpredass.shape == zass.shape\n assert Sass.shape == zpredass.shape * 2\n assert Hass.shape[0] == zpredass.shape[0]\n\n return zass, zpredass, Hass, Sass, a\n else:\n # should one do something her\n pass\n\n def update(\n self, eta: np.ndarray, P: np.ndarray, z: np.ndarray\n ) -> Tuple[np.ndarray, np.ndarray, float, np.ndarray]:\n \"\"\"Update eta and P with z, associating landmarks and adding new ones.\n\n Parameters\n ----------\n eta : np.ndarray\n [description]\n P : np.ndarray\n [description]\n z : np.ndarray, shape=(#detections, 2)\n [description]\n\n Returns\n -------\n Tuple[np.ndarray, np.ndarray, float, np.ndarray]\n [description]\n \"\"\"\n # etaupd, Pupd, NIS, a = solution.EKFSLAM.EKFSLAM.update(self, eta, P, z)\n #return etaupd, Pupd, NIS, a\n\n numLmk = (eta.size - 3) // 2\n assert (len(eta) - 3) % 2 == 0, \"EKFSLAM.update: landmark lenght not even\"\n\n if numLmk > 0:\n # Prediction and innovation covariance\n zpred = self.h(eta)\n H = self.h_jac(eta)\n\n # Here you can use simply np.kron (a bit slow) to form the big (very big in VP after a while) R,\n # or be smart with indexing and broadcasting (3d indexing into 2d mat) realizing you are adding the same R on all diagonals\n S = H@P@H.T + np.kron(np.eye(numLmk), self.R)\n assert (\n S.shape == zpred.shape * 2\n ), \"EKFSLAM.update: wrong shape on either S or zpred\"\n z = z.ravel() # 2D -> flat\n\n # Perform data association\n za, zpred, Ha, Sa, a = self.associate(z, zpred, H, S)\n\n # No association could be made, so skip update\n if za.shape[0] == 0:\n etaupd = eta\n Pupd = P\n NIS = 1 # TODO: beware this one when analysing consistency.\n else:\n # Create the associated innovation\n v = za.ravel() - zpred # za: 2D -> flat\n v[1::2] = utils.wrapToPi(v[1::2])\n\n # Kalman mean update\n S_cho_factors = la.cho_factor(Sa) # Optional, used in places for S^-1, see scipy.linalg.cho_factor and scipy.linalg.cho_solve\n Sa_inv = la.cho_solve(S_cho_factors, np.eye(Sa.shape[0]))\n W = P@Ha.T@Sa_inv\n etaupd = eta + W@v\n\n # Kalman cov update: use Joseph form for stability\n jo = -W @ Ha\n # same as adding Identity mat\n jo[np.diag_indices(jo.shape[0])] += 1\n Pupd = jo@P@jo.T + W@np.kron(np.eye(int(len(zpred)/2)), self.R)@W.T\n\n # calculate NIS, can use S_cho_factors\n NIS = v.T@Sa_inv@v\n\n # When tested, remove for speed\n assert np.allclose(\n Pupd, Pupd.T), \"EKFSLAM.update: Pupd not symmetric\"\n assert np.all(\n np.linalg.eigvals(Pupd) > 0\n ), \"EKFSLAM.update: Pupd not positive definite\"\n\n else: # All measurements are new landmarks,\n a = np.full(z.shape[0], -1)\n z = z.flatten()\n NIS = 1 # TODO: beware this one when analysing consistency.\n etaupd = eta\n Pupd = P\n\n # Create new landmarks if any is available\n if self.do_asso:\n is_new_lmk = a == -1\n if np.any(is_new_lmk):\n z_new_inds = np.empty_like(z, dtype=bool)\n z_new_inds[::2] = is_new_lmk\n z_new_inds[1::2] = is_new_lmk\n z_new = z[z_new_inds]\n etaupd, Pupd = self.add_landmarks(etaupd, Pupd, z_new)\n\n assert np.allclose(\n Pupd, Pupd.T), \"EKFSLAM.update: Pupd must be symmetric\"\n assert np.all(np.linalg.eigvals(Pupd) >=\n 0), \"EKFSLAM.update: Pupd must be PSD\"\n\n return etaupd, Pupd, NIS, a\n\n @classmethod\n def NEESes(cls, x: np.ndarray, P: np.ndarray, x_gt: np.ndarray,) -> np.ndarray:\n \"\"\"Calculates the total NEES and the NEES for the substates\n Args:\n x (np.ndarray): The estimate\n P (np.ndarray): The state covariance\n x_gt (np.ndarray): The ground truth\n Raises:\n AssertionError: If any input is of the wrong shape, and if debug mode is on, certain numeric properties\n Returns:\n np.ndarray: NEES for [all, position, heading], shape (3,)\n \"\"\"\n\n assert x.shape == (3,), f\"EKFSLAM.NEES: x shape incorrect {x.shape}\"\n assert P.shape == (3, 3), f\"EKFSLAM.NEES: P shape incorrect {P.shape}\"\n assert x_gt.shape == (\n 3,), f\"EKFSLAM.NEES: x_gt shape incorrect {x_gt.shape}\"\n\n d_x = x - x_gt\n d_x[2] = utils.wrapToPi(d_x[2])\n assert (\n -np.pi <= d_x[2] <= np.pi\n ), \"EKFSLAM.NEES: error heading must be between (-pi, pi)\"\n\n d_p = d_x[0:2]\n P_p = P[0:2, 0:2]\n assert d_p.shape == (2,), \"EKFSLAM.NEES: d_p must be 2 long\"\n d_heading = d_x[2] # Note: scalar\n assert np.ndim(\n d_heading) == 0, \"EKFSLAM.NEES: d_heading must be scalar\"\n P_heading = P[2, 2] # Note: scalar\n assert np.ndim(\n P_heading) == 0, \"EKFSLAM.NEES: P_heading must be scalar\"\n\n # NB: Needs to handle both vectors and scalars! Additionally, must handle division by zero\n NEES_all = d_x @ (np.linalg.solve(P, d_x))\n NEES_pos = d_p @ (np.linalg.solve(P_p, d_p))\n try:\n NEES_heading = d_heading ** 2 / P_heading\n except ZeroDivisionError:\n NEES_heading = 1.0 # TODO: beware\n\n NEESes = np.array([NEES_all, NEES_pos, NEES_heading])\n NEESes[np.isnan(NEESes)] = 1.0 # We may divide by zero, # TODO: beware\n\n assert np.all(NEESes >= 0), \"ESKF.NEES: one or more negative NEESes\"\n return NEESes\n", "id": "12857898", "language": "Python", "matching_score": 2.938399314880371, "max_stars_count": 0, "path": "Graded/G3/slam/EKFSLAM.py" }, { "content": "import numpy as np\nfrom functools import lru_cache\nfrom scipy.stats import chi2\nimport scipy.linalg as la\nimport utils\n\nchi2isf_cached = lru_cache(maxsize=None)(chi2.isf)\n\n# TODO: make sure a is 0-indexed\ndef JCBB(z, zbar, S, alpha1, alpha2):\n assert len(z.shape) == 1, \"z must be in one row in JCBB\"\n assert z.shape[0] % 2 == 0, \"z must be equal in x and y\"\n m = z.shape[0] // 2\n\n a = np.full(m, -1, dtype=int)\n abest = np.full(m, -1, dtype=int)\n\n # ic has measurements rowwise and predicted measurements columnwise\n ic = individualCompatibility(z, zbar, S)\n g2 = chi2.isf(alpha2, 2)\n order = np.argsort(np.amin(ic, axis=1))\n j = 0\n z_order = np.empty(2 * len(order), dtype=int)\n z_order[::2] = 2 * order\n z_order[1::2] = 2 * order + 1\n zo = z[z_order]\n\n ico = ic[order]\n\n abesto = JCBBrec(zo, zbar, S, alpha1, g2, j, a, ico, abest)\n\n abest[order] = abesto\n\n return abest\n\n\ndef JCBBrec(z, zbar, S, alpha1, g2, j, a, ic, abest):\n m = z.shape[0] // 2\n assert isinstance(m, int), \"m in JCBBrec must be int\"\n n = num_associations(a)\n\n if j >= m: # end of recursion\n if n > num_associations(abest) or (\n (n >= num_associations(abest))\n and (NIS(z, zbar, S, a) < NIS(z, zbar, S, abest))\n ):\n abest = a\n # else abest = previous abest from the input\n else: # still at least one measurement to associate\n I = np.argsort(ic[j, ic[j, :] < g2])\n # allinds = np.array(range(ic.shape[1]), dtype=int)\n usableinds = np.where(ic[j, :] < g2)[0] # allinds[ic[j, :] < g2]\n # if np.any(np.where(ic[j, :] < g2)[0] != usableinds):\n # raise ValueError\n\n for i in usableinds[I]:\n a[j] = i\n # jointly compatible?\n if NIS(z, zbar, S, a) < chi2isf_cached(alpha1, 2 * (n + 1)):\n # We need to decouple ici from ic, so copy is required\n ici = ic[j:, i].copy()\n ic[j:, i] = np.Inf # landmark not available any more.\n\n # Needs to explicitly copy a for recursion to work\n abest = JCBBrec(z, zbar, S, alpha1, g2, j + 1, a.copy(), ic, abest)\n ic[j:, i] = ici # set landmark available again for next round.\n\n if n + (m - j - 2) >= num_associations(abest):\n a[j] = -1\n abest = JCBBrec(z, zbar, S, alpha1, g2, j + 1, a, ic, abest)\n\n return abest\n\n\n# @profilehooks.profile(sort=\"cumulative\")\ndef individualCompatibility(z, zbar, S):\n nz = z.shape[0] // 2\n nz_bar = zbar.shape[0] // 2\n\n assert z.shape[0] % 2 == 0, \"JCBB.individualCompatibility: z must have even lenght\"\n assert (\n zbar.shape[0] % 2 == 0\n ), \"JCBB.individualCompatibility: zbar must have even length\"\n\n # all innovations from broadcasting\n # extra trailing dimension to avoid problems in solve when z has 2 landmarks\n v_all = z.reshape(-1, 1, 2, 1) - zbar.reshape(1, -1, 2, 1)\n\n # get the (2, 2) blocks on the diagonal to make the (nz_bar, 2, 2) array of individual S\n # first idxs get to the start of lmk, second is along the lmk axis\n idxs = np.arange(nz_bar)[:, None] * 2 + np.arange(2)[None]\n # broadcast lmk axis to two dimesions\n S_all = S[idxs[..., None], idxs[:, None]]\n\n # broadcast S_all over the measurements by adding leading 1 size axis to match v_all\n # solve nz by nz_bar systems\n # sum over axis 3 to get rid of trailing dim (faster than squeeze?)\n ic = (v_all * np.linalg.solve(S_all[None], v_all)).sum(axis=(2, 3))\n return ic\n\n\ndef NIS(z, zbar, S, a):\n zr = z.reshape(-1, 2).T\n zbarr = zbar.reshape(-1, 2).T\n\n nis = np.inf\n\n if (a > -1).any(): # We have associations\n is_ass = a > -1\n ztest = zr[:, is_ass]\n ass_idxs = a[is_ass] # .astype(np.int)\n zbartest = zbarr[:, ass_idxs]\n\n inds = np.empty(2 * len(ass_idxs), dtype=int)\n inds[::2] = 2 * ass_idxs\n inds[1::2] = 2 * ass_idxs + 1\n # inds = np.block([[inds], [inds + 1]]).flatten(\"F\")\n\n Stest = S[inds[:, None], inds]\n\n v = ztest - zbartest\n v = v.T.flatten()\n\n v[1::2] = utils.wrapToPi(v[1::2])\n\n nis = v @ np.linalg.solve(Stest, v)\n\n return nis\n\n\ndef num_associations(array):\n return np.count_nonzero(array > -1)\n", "id": "2425754", "language": "Python", "matching_score": 1.2161363363265991, "max_stars_count": 0, "path": "Graded/G3/slam/JCBB.py" }, { "content": "from functools import cache\n\n\n@cache\ndef block_3x3(i: int, j: int):\n \"\"\"used to generate 3x3 block slices\n This can be usefull to fill out A and G in (10.68)\n\n arr[block33(0,1)] == arr[0:3, 3:6]\n arr[block33(1,2)] == arr[3:6, 6:9]\n ... \n\n Args:\n i (int): row in (10.68)\n j (int): column in (10.68)\n\n Returns:\n [type]: [description]\n \"\"\"\n return slice(i*3, (i+1)*3), slice(j*3, (j+1)*3)\n", "id": "9614587", "language": "Python", "matching_score": 0.9443113803863525, "max_stars_count": 0, "path": "Graded/G2/eskf/utils/indexing.py" }, { "content": "import numpy as np\n\ndef isPSD(arr: np.ndarray) -> bool:\n return np.allclose(arr, arr.T) and np.all(np.linalg.eigvals(arr) >= 0)\n", "id": "12826539", "language": "Python", "matching_score": 0.6893553733825684, "max_stars_count": 2, "path": "Assignments/Assignment_03/ekf/utils/check_array.py" }, { "content": "from dataclasses import dataclass\nimport numpy as np\nfrom numpy import linalg as nla, ndarray\nfrom functools import cached_property\n\nfrom config import DEBUG\n\n\ndef isPSD(arr: np.ndarray) -> bool:\n return np.allclose(arr, arr.T) and np.all(np.linalg.eigvals(arr) >= 0)\n\n\n@dataclass(frozen=True)\nclass MultiVarGaussian:\n \"\"\"A class for using Gaussians\"\"\"\n mean: ndarray # shape=(n,)\n cov: ndarray # shape=(n, n)\n\n def __post_init__(self):\n if DEBUG:\n assert self.mean.shape * 2 == self.cov.shape\n assert np.all(np.isfinite(self.mean))\n assert np.all(np.isfinite(self.cov))\n assert isPSD(self.cov)\n\n @cached_property\n def ndim(self) -> int:\n return self.mean.shape[0]\n\n @cached_property\n def scaling(self) -> float:\n scaling = (2*np.pi)**(-self.ndim/2) * nla.det(self.cov)**(-1/2)\n return scaling\n\n def mahalanobis_distance_sq(self, x: np.ndarray) -> float:\n \"\"\"Calculate the mahalanobis distance between self and x.\n\n This is also known as the quadratic form of the Gaussian.\n See (3.2) in the book.\n \"\"\"\n # this method could be vectorized for efficient calls\n error = x - self.mean\n mahalanobis_distance = error.T @ nla.solve(self.cov, error)\n return mahalanobis_distance\n\n def pdf(self, x):\n density = self.scaling*np.exp(-self.mahalanobis_distance_sq(x)/2)\n return density\n\n def marginalize(self, idxs):\n return MultiVarGaussian(self.mean[idxs], self.cov[idxs][:, idxs])\n\n def __iter__(self): # in order to use tuple unpacking\n return iter((self.mean, self.cov))\n\n def __eq__(self, o: object) -> bool:\n if not isinstance(o, MultiVarGaussian):\n return False\n return np.allclose(self.mean, o.mean) and np.allclose(self.cov, o.cov)\n", "id": "9064780", "language": "Python", "matching_score": 5.313174247741699, "max_stars_count": 0, "path": "Assignments/Assignment_05/pda/utils/multivargaussian.py" }, { "content": "import numpy as np\nfrom numpy import linalg as nla, ndarray\nfrom dataclasses import dataclass\nfrom functools import cached_property\n\nfrom config import DEBUG\n\n\ndef isPSD(arr: np.ndarray) -> bool:\n \"\"\"if this fail you can try the more robust update step:\n Before: \n P_upd =(np.eye(*P.shape) - W @ H) @ P\n After:\n I_WH = np.eye(*P.shape) - W @ H\n P_upd = (I_WH @ P @ I_WH.T+ W @ R @ W.T)\n \"\"\"\n return (np.allclose(arr, arr.T, atol=1e-6)\n and np.all(np.linalg.eigvals(arr) >= 0))\n\n\n@dataclass\nclass MultiVarGaussStamped:\n \"\"\"Multi variate gaussian with a timestamp\"\"\"\n mean: 'ndarray[:]'\n cov: 'ndarray[:,:]'\n ts: float\n\n def __post_init__(self):\n if DEBUG:\n assert self.mean.shape * 2 == self.cov.shape\n assert np.all(np.isfinite(self.mean))\n assert np.all(np.isfinite(self.cov))\n assert isPSD(self.cov)\n\n @cached_property\n def ndim(self) -> int:\n return self.mean.shape[0]\n\n @cached_property\n def scaling(self) -> float:\n scaling = (2*np.pi)**(-self.ndim/2) * nla.det(self.cov)**(-1/2)\n return scaling\n\n def mahalanobis_distance_sq(self, x: np.ndarray) -> float:\n \"\"\"Calculate the mahalanobis distance between self and x.\n\n This is also known as the quadratic form of the Gaussian.\n See (3.2) in the book.\n \"\"\"\n # this method could be vectorized for efficient calls\n error = x - self.mean\n mahalanobis_distance = error.T @ nla.solve(self.cov, error)\n return mahalanobis_distance\n\n def pdf(self, x):\n density = self.scaling*np.exp(-self.mahalanobis_distance_sq(x)/2)\n return density\n\n def marginalize(self, idxs):\n return MultiVarGaussStamped(self.mean[idxs],\n self.cov[np.ix_(idxs, idxs)],\n self.ts)\n\n def __iter__(self): # in order to use tuple unpacking\n return iter((self.mean, self.cov))\n", "id": "6229306", "language": "Python", "matching_score": 1.633727788925171, "max_stars_count": 0, "path": "Graded/G2/eskf/datatypes/multivargaussian.py" }, { "content": "import numpy as np\nfrom numpy import ndarray\nfrom dataclasses import dataclass\nfrom typing import Sequence\n\nfrom config import DEBUG\nfrom utils.multivargaussian import MultiVarGaussian\n\nimport solution\n\n\n@dataclass(frozen=True)\nclass GaussianMuxture:\n \"\"\"Dataclass representing a gaussian mixture\"\"\"\n\n weights: ndarray\n gaussians: Sequence[MultiVarGaussian]\n\n def __post_init__(self):\n if DEBUG:\n assert len(self.weights) == len(self.gaussians)\n assert np.all(self.weights > 0)\n assert np.isclose(self.weights.sum(), 1)\n ndim = self.gaussians[0].ndim\n assert all([g.ndim == ndim for g in self.gaussians])\n\n def get_mean(self) -> ndarray:\n \"\"\"Return the mean of the gaussian mixture\n\n Returns:\n mean (ndarray): the mean\n \"\"\"\n mean = np.average(self.gaussians, axis=0, weights=self.weights)\n\n return mean\n\n def get_cov(self) -> ndarray:\n \"\"\"Return the covariance of the gaussian mixture\n\n Hint: use what you did in mixturereductin.py assignment 4\n\n Returns:\n cov (ndarray): the covariance\n \"\"\"\n\n cov_int = np.average(self.gaussians, axis=0, weights=self.weights)\n\n mean_diff = self.gaussians - self.get_mean()\n cov_ext = np.average(mean_diff**2, axis=0, weights=self.weights)\n\n cov = cov_int + cov_ext\n\n return cov\n\n def reduce(self) -> MultiVarGaussian:\n \"\"\"Reduce the gaussian mixture to a multivariate gaussian\n Hint: you can use self.get_mean and self.get_cov\n\n Returns:\n reduction (MultiVarGaussian): the reduction\n \"\"\"\n reduction = MultiVarGaussian(self.get_mean(), self.get_cov())\n\n return reduction\n\n @property\n def mean(self) -> ndarray:\n return self.get_mean()\n\n @property\n def cov(self) -> ndarray:\n return self.get_cov()\n\n def pdf(self, x: ndarray) -> float:\n \"\"\"Probability density function\n\n Args:\n x (ndarray): point\n\n Returns:\n float: probability density at point x \n \"\"\"\n density = sum(w * g.pdf(x)\n for w, g in zip(self.weights, self.gaussians))\n return density\n\n def __iter__(self): # in order to use tuple unpacking\n return iter((self.mean, self.cov))\n\n def __eq__(self, o: object) -> bool:\n \"\"\"This definition of equality is actually to stict \n as the order of weights and gaussians matter, but for this purpose\n it works\"\"\"\n\n if not isinstance(o, GaussianMuxture):\n return False\n elif not np.allclose(o.weights, self.weights):\n return False\n elif not all([g1 == g2\n for g1, g2 in zip(self.gaussians, o.gaussians)]):\n return False\n else:\n return True\n", "id": "7017877", "language": "Python", "matching_score": 2.1494741439819336, "max_stars_count": 0, "path": "Assignments/Assignment_05/pda/gaussmix.py" }, { "content": "import numpy as np\nfrom numpy import ndarray\n\nimport solution\n\n\ndef mixture_moments(weights: ndarray,\n means: ndarray,\n covs: ndarray,\n ) -> tuple[ndarray, ndarray]:\n \"\"\"Calculate the first two moments of a Gaussian mixture.\n\n Args:\n weights: shape = (N,)\n means: shape = (N, n)\n covs: shape = (N, n, n)\n\n Returns:\n mean: shape = (n,)\n cov: shape = (n, n)\n \"\"\"\n \n N = means.shape[0]\n n = means.shape[1]\n\n mean = 0\n for i in range(N):\n mean += weights[i]*means[i]\n\n cov = np.empty([n, n])\n for i in range(N):\n diff = means[i] - mean\n cov += weights[i]*covs[i] + weights[i]*(diff@diff.T)\n\n return mean, cov\n", "id": "3042599", "language": "Python", "matching_score": 0.1651022583246231, "max_stars_count": 0, "path": "Assignments/Assignment_04/task_1/mixturereduction/mixturereduction.py" }, { "content": "import numpy as np\n\n# Shamelessly stolen from here: https://github.com/ramanans1/EKF-SLAM/blob/master/tree_extraction.py\n# Small modifications by <NAME>\ndef detectTrees(scan):\n\n M11 = 75\n M10 = 1\n daa = 5 * np.pi / 306\n M2 = 1.5\n M2a = 10 * np.pi / 360\n M3 = 3\n M5 = 1\n daMin2 = 2 * np.pi / 360\n\n RR = scan\n\n AA = np.array(range(361)) * np.pi / 360\n\n (ii1,) = np.where(RR < M11)\n\n L1 = len(ii1)\n if L1 < 1:\n return []\n\n R1 = RR[ii1]\n A1 = AA[ii1]\n\n ii2 = np.flatnonzero((np.abs(np.diff(R1)) > M2) | (np.diff(A1) > M2a))\n\n L2 = len(ii2) + 1\n ii2u = np.append(ii2, L1 - 1)\n ii2 = np.insert(ii2 + 1, 0, 0)\n # ii2u = int16([ ii2, L1 ])\n # ii2 = int16([1, ii2+1 ])\n\n # %ii2 , size(R1) ,\n\n R2 = R1[ii2]\n A2 = A1[ii2]\n\n A2u = A1[ii2u]\n R2u = R1[ii2u]\n\n x2 = R2 * np.cos(A2)\n y2 = R2 * np.sin(A2)\n x2u = R2u * np.cos(A2u)\n y2u = R2u * np.sin(A2u)\n\n flag = np.zeros(L2)\n\n L3 = 0\n M3c = M3 * M3\n\n if L2 > 1:\n L2m = L2 - 1\n dx2 = x2[1:L2] - x2u[:L2m]\n dy2 = y2[1:L2] - y2u[:L2m]\n\n dl2 = dx2 * dx2 + dy2 * dy2\n ii3 = np.flatnonzero(dl2 < M3c)\n L3 = len(ii3)\n if L3 > 0:\n flag[ii3] = 1\n flag[ii3 + 1] = 1\n\n if L2 > 2:\n L2m = L2 - 2\n dx2 = x2[2:L2] - x2u[0:L2m]\n dy2 = y2[2:L2] - y2u[0:L2m]\n\n dl2 = dx2 * dx2 + dy2 * dy2\n ii3 = np.flatnonzero(dl2 < M3c)\n L3b = len(ii3)\n if L3b > 0:\n flag[ii3] = 1\n flag[ii3 + 2] = 1\n L3 = L3 + L3b\n\n if L2 > 3:\n L2m = L2 - 3\n dx2 = x2[3:L2] - x2u[0:L2m]\n dy2 = y2[3:L2] - y2u[0:L2m]\n\n dl2 = dx2 * dx2 + dy2 * dy2\n ii3 = np.flatnonzero(dl2 < M3c)\n L3b = len(ii3)\n if L3b > 0:\n flag[ii3] = 1\n flag[ii3 + 3] = 1\n L3 = L3 + L3b\n\n if L2 > 1:\n ii3 = np.array(range(L2 - 1))\n ii3 = np.flatnonzero(\n (A2[ii3 + 1] - A2u[ii3]) < daMin2\n ) # objects close (in angle) from viewpoint.\n L3b = len(ii3)\n if L3b > 0:\n ff = R2[ii3 + 1] > R2u[ii3] # which object is in the back?\n ii3 = ii3 + ff\n flag[ii3] = 1 # mark them for the deletion\n L3 = L3 + L3b\n iixx = ii3\n\n if L3 > 0:\n ii3 = np.flatnonzero(flag == 0)\n L3 = len(ii3)\n ii4 = ii2[ii3].astype(np.float64)\n ii4u = ii2u[ii3].astype(np.float64)\n R4 = R2[ii3]\n R4u = R2u[ii3]\n A4 = A2[ii3]\n A4u = A2u[ii3]\n x4 = x2[ii3]\n y4 = y2[ii3]\n x4u = x2u[ii3]\n y4u = y2u[ii3]\n else:\n ii4 = ii2.astype(np.float64)\n ii4u = ii2u.astype(np.float64)\n R4 = R2\n R4u = R2u\n A4 = A2\n A4u = A2u\n x4 = x2\n y4 = y2\n x4u = x2u\n y4u = y2u\n\n dx2 = x4 - x4u\n dy2 = y4 - y4u\n dl2 = dx2 * dx2 + dy2 * dy2\n\n ii5 = np.flatnonzero(dl2 < (M5 * M5))\n L5 = len(ii5)\n if L5 < 1:\n return np.zeros((0, 2))\n\n R5 = R4[ii5]\n R5u = R4u[ii5]\n A5 = A4[ii5]\n A5u = A4u[ii5]\n ii4 = ii4[ii5]\n ii4u = ii4u[ii5]\n\n ii5 = np.flatnonzero((R5 > M10) & (A5 > daa) & (A5u < (np.pi - daa)))\n\n L5 = len(ii5)\n if L5 < 1:\n return np.zeros((0, 2))\n\n R5 = R5[ii5]\n R5u = R5u[ii5]\n A5 = A5[ii5]\n A5u = A5u[ii5]\n ii4 = ii4[ii5]\n ii4u = ii4u[ii5]\n dL5 = (A5u + np.pi / 360 - A5) * (R5 + R5u) / 2\n\n compa = np.abs(R5 - R5u) < (dL5 / 3)\n\n ii6 = np.flatnonzero(~compa)\n ii6 = ii4[ii6]\n\n ii5 = np.flatnonzero(compa)\n L5 = len(ii5)\n if L5 < 1:\n return np.zeros((0, 2))\n\n R5 = R5[ii5]\n R5u = R5u[ii5]\n A5 = A5[ii5]\n A5u = A5u[ii5]\n ii4 = ii4[ii5]\n ii4u = ii4u[ii5]\n dL5 = dL5[ii5]\n\n auxi = (ii4 + ii4u) / 2\n iia = np.floor(auxi)\n iib = np.ceil(auxi)\n\n Rs = (R1[iia.astype(int)] + R1[iib.astype(int)]) / 2\n\n ranges = Rs + dL5 / 2.0\n angles = (A5 + A5u) / 2.0 - np.pi / 2\n diameters = dL5\n\n # z = np.array([[ranges], [angles]]).squeeze().T\n z = np.vstack((ranges, angles)).T # keeps the dims\n # if z.shape != (2,): # to check for equality, all passed 19.oct 23:45 until k=3000\n # assert np.allclose(np.vstack((ranges, angles)).T, z)\n # else:\n # assert np.allclose(np.vstack((ranges, angles)).T[0], z)\n return z\n\n\ndef odometry(ve, alpha, dt, car):\n vc = ve / (1 - car.H * np.tan(alpha) / car.L)\n dp = dt * vc * np.tan(alpha) / car.L\n dx = dt * vc * np.sinc(dp / np.pi)\n if np.abs(dp) < 0.001:\n # Taylor approximation\n dy = -dt * vc * (dp / 2 - dp ** 3 / 24 + dp ** 5 / 720)\n else:\n dy = dt * vc * (np.cos(dp) - 1) / dp\n\n odo = np.array([dx, dy, dp])\n\n return odo\n\n\nclass Car:\n def __init__(self, L, H, a, b):\n self.L = L\n self.H = H\n self.a = a\n self.b = b\n", "id": "6775174", "language": "Python", "matching_score": 1.2755154371261597, "max_stars_count": 0, "path": "Graded/G3/slam/vp_utils.py" }, { "content": "import numpy as np\n\ndef wrapToPi(angle):\n return (angle + np.pi) % (2*np.pi) - np.pi\n\ndef rotmat2d(angle):\n return np.array([[np.cos(angle), -np.sin(angle)],\n [np.sin(angle), np.cos(angle)]])\n", "id": "11244561", "language": "Python", "matching_score": 0.1969543844461441, "max_stars_count": 0, "path": "Graded/G3/slam/utils.py" } ]
2.210808
Zollern233
[ { "content": "# encoding: UTF-8\n\nimport threading\nimport traceback\nimport signal\n\nimport zmq\nfrom msgpack import packb, unpackb\nfrom json import dumps, loads\n\n\n# 实现Ctrl-c中断recv\nsignal.signal(signal.SIGINT, signal.SIG_DFL)\n\n\n########################################################################\nclass RpcObject(object):\n \"\"\"\n RPC对象\n \n 提供对数据的序列化打包和解包接口,目前提供了json和msgpack两种工具。\n \n msgpack:性能更高,但通常需要安装msgpack相关工具;\n json:性能略低但通用性更好,大部分编程语言都内置了相关的库。\n \n 因此建议尽量使用msgpack,如果要和某些语言通讯没有提供msgpack时再使用json。\n \n 如果希望使用其他的序列化工具也可以在这里添加。\n \"\"\"\n\n #----------------------------------------------------------------------\n def __init__(self):\n \"\"\"Constructor\"\"\"\n # 默认使用msgpack作为序列化工具\n self.useMsgpack()\n \n #----------------------------------------------------------------------\n def pack(self, data):\n \"\"\"打包\"\"\"\n pass\n \n #----------------------------------------------------------------------\n def unpack(self, data):\n \"\"\"解包\"\"\"\n pass\n \n #----------------------------------------------------------------------\n def __jsonPack(self, data):\n \"\"\"使用json打包\"\"\"\n return dumps(data)\n \n #----------------------------------------------------------------------\n def __jsonUnpack(self, data):\n \"\"\"使用json解包\"\"\"\n return loads(data)\n \n #----------------------------------------------------------------------\n def __msgpackPack(self, data):\n \"\"\"使用msgpack打包\"\"\"\n return packb(data)\n \n #----------------------------------------------------------------------\n def __msgpackUnpack(self, data):\n \"\"\"使用msgpack解包\"\"\"\n return unpackb(data)\n \n #----------------------------------------------------------------------\n def useJson(self):\n \"\"\"使用json作为序列化工具\"\"\"\n self.pack = self.__jsonPack\n self.unpack = self.__jsonUnpack\n \n #----------------------------------------------------------------------\n def useMsgpack(self):\n \"\"\"使用msgpack作为序列化工具\"\"\"\n self.pack = self.__msgpackPack\n self.unpack = self.__msgpackUnpack\n\n\n########################################################################\nclass RpcServer(RpcObject):\n \"\"\"RPC服务器\"\"\"\n\n #----------------------------------------------------------------------\n def __init__(self, repAddress, pubAddress):\n \"\"\"Constructor\"\"\"\n super(RpcServer, self).__init__()\n \n # 保存功能函数的字典,key是函数名,value是函数对象\n self.__functions = {} \n\n # zmq端口相关\n self.__context = zmq.Context()\n \n self.__socketREP = self.__context.socket(zmq.REP) # 请求回应socket\n self.__socketREP.bind(repAddress)\n \n self.__socketPUB = self.__context.socket(zmq.PUB) # 数据广播socket\n self.__socketPUB.bind(pubAddress)\n \n # 工作线程相关\n self.__active = False # 服务器的工作状态\n self.__thread = threading.Thread(target=self.__run) # 服务器的工作线程\n \n #----------------------------------------------------------------------\n def start(self):\n \"\"\"启动服务器\"\"\"\n # 将服务器设为启动\n self.__active = True\n \n # 启动工作线程\n self.__thread.start()\n \n #----------------------------------------------------------------------\n def stop(self):\n \"\"\"停止服务器\"\"\"\n # 将服务器设为停止\n self.__active = False\n \n # 等待工作线程退出\n self.__thread.join()\n \n #----------------------------------------------------------------------\n def __run(self):\n \"\"\"连续运行函数\"\"\"\n while self.__active:\n # 从请求响应socket收取请求数据\n reqb = self.__socketREP.recv()\n \n # 序列化解包\n req = self.unpack(reqb)\n \n # 获取函数名和参数\n name, args, kwargs = req\n \n # 获取引擎中对应的函数对象,并执行调用,如果有异常则捕捉后返回\n try:\n func = self.__functions[name]\n r = func(*args, **kwargs)\n rep = [True, r]\n except Exception as e:\n rep = [False, traceback.format_exc()]\n \n # 序列化打包\n repb = self.pack(rep)\n \n # 通过请求响应socket返回调用结果\n self.__socketREP.send(repb)\n \n #----------------------------------------------------------------------\n def publish(self, topic, data):\n \"\"\"\n 广播推送数据\n topic:主题内容\n data:具体的数据\n \"\"\"\n # 序列化数据\n datab = self.pack(data)\n \n # 通过广播socket发送数据\n self.__socketPUB.send_multipart([topic, datab])\n \n #----------------------------------------------------------------------\n def register(self, func):\n \"\"\"注册函数\"\"\"\n self.__functions[func.__name__] = func\n\n\n########################################################################\nclass RpcClient(RpcObject):\n \"\"\"RPC客户端\"\"\"\n \n #----------------------------------------------------------------------\n def __init__(self, reqAddress, subAddress):\n \"\"\"Constructor\"\"\"\n super(RpcClient, self).__init__()\n \n # zmq端口相关\n self.__reqAddress = reqAddress\n self.__subAddress = subAddress\n \n self.__context = zmq.Context()\n self.__socketREQ = self.__context.socket(zmq.REQ) # 请求发出socket\n self.__socketSUB = self.__context.socket(zmq.SUB) # 广播订阅socket \n\n # 工作线程相关,用于处理服务器推送的数据\n self.__active = False # 客户端的工作状态\n self.__thread = threading.Thread(target=self.__run) # 客户端的工作线程\n \n #----------------------------------------------------------------------\n def __getattr__(self, name):\n \"\"\"实现远程调用功能\"\"\"\n # 执行远程调用任务\n def dorpc(*args, **kwargs):\n # 生成请求\n req = [name, args, kwargs]\n \n # 序列化打包请求\n reqb = self.pack(req)\n \n # 发送请求并等待回应\n self.__socketREQ.send(reqb)\n repb = self.__socketREQ.recv()\n \n # 序列化解包回应\n rep = self.unpack(repb)\n \n # 若正常则返回结果,调用失败则触发异常\n if rep[0]:\n return rep[1]\n else:\n raise RemoteException(rep[1])\n \n return dorpc\n \n #----------------------------------------------------------------------\n def start(self):\n \"\"\"启动客户端\"\"\"\n # 连接端口\n self.__socketREQ.connect(self.__reqAddress)\n self.__socketSUB.connect(self.__subAddress)\n \n # 将服务器设为启动\n self.__active = True\n \n # 启动工作线程\n self.__thread.start()\n \n #----------------------------------------------------------------------\n def stop(self):\n \"\"\"停止客户端\"\"\"\n # 将客户端设为停止\n self.__active = False\n \n # 等待工作线程退出\n self.__thread.join()\n \n #----------------------------------------------------------------------\n def __run(self):\n \"\"\"连续运行函数\"\"\"\n while self.__active:\n # 从订阅socket收取广播数据\n topic, datab = self.__socketSUB.recv_multipart()\n \n # 序列化解包\n data = self.unpack(datab)\n\n # 调用回调函数处理\n self.callback(topic, data)\n \n #----------------------------------------------------------------------\n def callback(self, topic, data):\n \"\"\"回调函数,必须由用户实现\"\"\"\n raise NotImplementedError\n \n #----------------------------------------------------------------------\n def subscribe(self, topic):\n \"\"\"\n 订阅特定主题的广播数据\n \n 可以使用topic=''来订阅所有的主题\n \"\"\"\n self.__socketSUB.setsockopt(zmq.SUBSCRIBE, topic)\n \n \n\n########################################################################\nclass RemoteException(Exception):\n \"\"\"RPC远程异常\"\"\"\n\n #----------------------------------------------------------------------\n def __init__(self, value):\n \"\"\"Constructor\"\"\"\n self.__value = value\n \n #----------------------------------------------------------------------\n def __str__(self):\n \"\"\"输出错误信息\"\"\"\n return self.__value\n\n ", "id": "10423600", "language": "Python", "matching_score": 0, "max_stars_count": 2, "path": "vn.rpc/vnrpc.py" } ]
0
Rabyss
[ { "content": "from PIL import Image\nimport numpy as np\nimport sys, os\nfrom progress_bar import ProgressBar\n\ndef set_px(pos, bit, img):\n # avoids modifying thumbnail\n size = img.shape[0]*img.shape[1] - 4096\n rgb = pos//size\n if rgb > 2:\n raise IndexError(\"Position is too large for image\")\n n_pos = pos % size + 4096\n \n x,y = n_pos // img.shape[1], n_pos % img.shape[1]\n img[x][y][rgb] = img[x][y][rgb] & 254\n img[x][y][rgb] = img[x][y][rgb] | bit\n\nwith Image.open(sys.argv[1]) as img:\n with open(sys.argv[2], 'r') as input_txt:\n arrimg = np.array(img)\n pos = 0\n tasks = int(os.popen(\"wc -c \" + sys.argv[2]).read().split(\" \")[0])\n for c in str(tasks)+\"|\":\n for i in range(8):\n bit = (ord(c) & 1 << i) >> i\n set_px(pos, bit, arrimg)\n pos += 1\n pb = ProgressBar(tasks)\n pb.begin()\n for line in input_txt:\n for char in line:\n for i in range(8):\n bit = (ord(char) & 1 << i) >> i\n set_px(pos, bit, arrimg)\n pos += 1\n pb.add_progress()\n\n output_img = Image.fromarray(arrimg)\n output_img.save(sys.argv[3])\n", "id": "755860", "language": "Python", "matching_score": 3.0020925998687744, "max_stars_count": 1, "path": "hide.py" }, { "content": "from PIL import Image\nimport numpy as np\nimport sys, os\nfrom progress_bar import ProgressBar\n\ndef get_bit(pos, img):\n # avoids modifying thumbnail\n size = img.shape[0]*img.shape[1] - 4096\n rgb = pos//size\n if rgb > 2:\n raise IndexError(\"Position is too large\")\n pos = pos % size + 4096\n x,y = pos // img.shape[1], pos % img.shape[1]\n return img[x][y][rgb] & 1\n\n\nwith Image.open(sys.argv[1]) as img:\n with open(sys.argv[2], \"w+\") as out:\n arrimg = np.array(img)\n pos = 0\n cur_char = ''\n size_str = \"\"\n while cur_char != \"|\":\n ord_chr = 0\n for i in range(8):\n bit = get_bit(pos, arrimg)\n pos += 1\n ord_chr = ord_chr | bit << i\n cur_char = chr(ord_chr)\n size_str += cur_char\n size = int(size_str[:-1])\n pb = ProgressBar(size)\n pb.begin()\n for i in range(size):\n ord_chr = 0\n for i in range(8):\n bit = get_bit(pos, arrimg)\n pos += 1\n ord_chr = ord_chr | bit << i\n out.write(chr(ord_chr))\n pb.add_progress()\n\n", "id": "12746596", "language": "Python", "matching_score": 0, "max_stars_count": 1, "path": "discover.py" }, { "content": "__author__ = 'daroud'\n", "id": "5898265", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "www/__init__.py" }, { "content": "import random\n\nfor i in range(1000):\n if i % 100 == 0:\n print(random.uniform(-90, 90), random.uniform(-180, 180), sep=', ')\n elif i % 50 == 0:\n print(random.uniform(48, 49), random.uniform(3, 4), sep=', ')\n elif i % 2 == 0:\n print(random.uniform(45, 47), random.uniform(6, 8), sep=', ')\n else:\n print(random.uniform(50, 52), random.uniform(-1, 1), sep=', ')\n", "id": "629127", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "helpers/generateloc.py" }, { "content": "from django.conf.urls import patterns, include, url\nfrom django.contrib import admin\nfrom django.http import HttpResponseRedirect\nfrom django.conf import settings\n\nurlpatterns = patterns('',\n url(r\"^admin/\", include(admin.site.urls)),\n url(\"^$\", \"main.views.index.index\"),\n url(\"^api/website\", \"main.api.get.website\"),\n url(\"^api/webpage\", \"main.api.get.webpage\"),\n url(\"^api/text\", \"main.api.post.text\"),\n url(r'^favicon.ico/$', lambda x: HttpResponseRedirect(settings.STATIC_URL+'ico/favicon.ico'))\n )\n", "id": "5579427", "language": "Python", "matching_score": 1.5830692052841187, "max_stars_count": 0, "path": "www/www/urls.py" }, { "content": "#!/usr/bin/python2\n# -*- coding: utf-8 -*-\n\nfrom django.http import HttpResponseBadRequest, HttpResponse, HttpResponseNotFound\nfrom libs.analysis import analyze\nimport json\n\n\ndef text(request):\n if request.method != \"POST\":\n return HttpResponseBadRequest(\"This should be a POST request.\")\n params = request.POST\n if \"text\" not in params:\n return HttpResponseBadRequest(\"No text to analyze.\")\n return text_analysis(params[\"text\"])\n\n\ndef text_analysis(text):\n if text != \"\":\n return HttpResponse(json.dumps(analyze(text)))\n return HttpResponseNotFound(\"The page has no interesting feature.\")\n\n", "id": "10282027", "language": "Python", "matching_score": 2.8347647190093994, "max_stars_count": 0, "path": "www/main/api/post.py" }, { "content": "#!/usr/bin/python2\n# -*- coding: utf-8 -*-\n\nfrom django.http import HttpResponseBadRequest, HttpResponse, HttpResponseNotFound\nfrom libs import search\nfrom libs.extract import extract\nfrom libs.analysis import analyze\nimport os\nimport json\n\n\ndef website(request):\n if request.method != \"GET\":\n return HttpResponseBadRequest(\"This should be a GET request.\")\n params = request.GET\n if \"topic\" not in params or len(params[\"topic\"]) == 0:\n return HttpResponseBadRequest(\"Parameter \\\"topic\\\" not set.\")\n if \"quantity\" not in params:\n return HttpResponseBadRequest(\"Parameter \\\"quantity\\\" not set.\")\n if not is_int(params[\"quantity\"]):\n return HttpResponseBadRequest(\"Parameter \\\"quantity\\\" must be an integer.\")\n if \"url\" not in params or len(params[\"url\"]) == 0 or params[\"url\"] == \"http://\":\n return HttpResponseBadRequest(\"Parameter \\\"url\\\" not set.\")\n return website_analysis(params[\"topic\"], params[\"quantity\"], params[\"url\"])\n\n\ndef webpage(request):\n if request.method != \"GET\":\n return HttpResponseBadRequest(\"This should be a GET request.\")\n params = request.GET\n if \"url\" not in params:\n return HttpResponseBadRequest(\"Parameter \\\"url\\\" not set.\")\n return webpage_analysis(params[\"url\"])\n\n\ndef webpage_analysis(url):\n text = extract(url).cleaned_text\n\n if text != \"\":\n return HttpResponse(json.dumps(analyze(text)))\n return HttpResponseNotFound(\"The page has no interesting feature.\")\n\n\ndef website_analysis(topic, quantity, url):\n quantity = int(quantity)\n engine = search.BingSearchEngine(os.environ[\"BING_API_KEY\"])\n urls = search.search_from_sources(url, topic, quantity, engine)\n\n print urls\n\n results = []\n for url in urls:\n try:\n text = extract(url).cleaned_text\n if text != \"\":\n results.append(analyze(text))\n except ValueError as e:\n print \"Value Error :\", e.message\n pass\n\n accumulator = {}\n accumulator[\"moods\"]={\"indicative\":0,\"imperative\":0,\"conditional\":0,\"subjunctive\":0}\n for analysis in results:\n for key in analysis.keys():\n if key not in accumulator:\n accumulator[key] = analysis[key]\n elif key != \"moods\":\n accumulator[key] = [sum(x) for x in zip(accumulator[key], analysis[key])]\n for k in accumulator[\"moods\"].keys():\n if k in analysis[\"moods\"].keys():\n accumulator[\"moods\"][k]=accumulator[\"moods\"][k]+analysis[\"moods\"][k]\n #accumulator[key].update(analysis[key].keys())\n\n results_number = len(results)\n\n for key in accumulator.keys():\n if key != \"moods\":\n accumulator[key] = [x/results_number for x in accumulator[key]]\n for k in accumulator[\"moods\"].keys():\n if results_number>0:\n accumulator[\"moods\"][k]=accumulator[\"moods\"][k]/results_number\n\n return HttpResponse(json.dumps(accumulator), content_type=\"application/json\")\n\n\ndef is_int(s):\n try:\n int(s)\n return True\n except ValueError:\n return False\n", "id": "671715", "language": "Python", "matching_score": 2.8552255630493164, "max_stars_count": 0, "path": "www/main/api/get.py" }, { "content": "#!/usr/bin/python2\n# -*- coding: utf-8 -*-\n\nimport requests\nimport string\n\n\nclass BingSearchEngine:\n bing_api = \"https://api.datamarket.azure.com/Data.ashx/Bing/Search/v1/Composite?\"\n\n def __init__(self, key):\n self.key = key\n\n @staticmethod\n def encode(request):\n # Custom url encoder to match bing API specs\n request = string.replace(request, \"'\", '%27')\n request = string.replace(request, '\"', '%27')\n request = string.replace(request, '+', '%2b')\n request = string.replace(request, ' ', '%20')\n request = string.replace(request, ':', '%3a')\n return request\n\n def search(self, query):\n params = {'ImageFilters': '\"Face:Face\"',\n '$format': 'json',\n '$top': 10,\n '$skip': 0}\n return self.search_with_params(query, params)\n\n def search_with_params(self, query, params):\n request = 'Sources=\"web\"'\n request += '&Query=\"' + query + '\"'\n for key, value in params.iteritems():\n request += '&' + key + '=' + str(value)\n request = self.bing_api + self.encode(request)\n return requests.get(request, auth=(self.key, self.key))\n\n @staticmethod\n def extract_urls_from_result(result, results_per_query):\n web_results = result[\"d\"][\"results\"][0][\"Web\"]\n urls = [wr[\"Url\"] for wr in web_results]\n if len(urls) > results_per_query:\n urls = urls[:results_per_query]\n return urls\n\n\ndef search_from_sources(url, query, results_per_query, engine):\n params = {\"$format\": \"json\", \"$top\": results_per_query, \"$skip\": 0}\n if url == \"web\":\n query = (url, query)\n else:\n query = (url, \"site:\" + url + \" \" + query)\n\n return engine.extract_urls_from_result(engine.search_with_params(query[1], params).json(), results_per_query)\n\n\ndef main():\n import os\n # This environment variable must be set\n key = os.environ[\"BING_API_KEY\"]\n bing = BingSearchEngine(key)\n #print search_from_sources([\"web\", \"udc.ch\"], \"politique\", 1, bing)\n print search_from_sources(\"udc.ch\", \"immigration\", 10 , bing)\n return 0\n\nif __name__ == \"__main__\":\n import sys\n sys.exit(main())\n", "id": "5576166", "language": "Python", "matching_score": 1.263049602508545, "max_stars_count": 0, "path": "www/libs/search.py" }, { "content": "from .analysis import *\nfrom .search import *\nfrom .extract import *", "id": "3132178", "language": "Python", "matching_score": 0.9732798933982849, "max_stars_count": 0, "path": "www/libs/__init__.py" }, { "content": "from .analysis import Analysis\n", "id": "374435", "language": "Python", "matching_score": 0.2848591208457947, "max_stars_count": 39, "path": "www/main/models/__init__.py" }, { "content": "import os\nimport django\nimport time\n\nos.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"www.settings\")\ndjango.setup()\n\nfrom main.models import Analysis\n\n#Analysis.objects.all().delete()\n#Analysis.objects.add(\"http://www.google.ch\", \"test\", 10, \"127.0.0.1\")\n\npending=Analysis.objects.pending()\nprint len(pending),\"analyses pending\"\nfor i,p in enumerate(pending):\n print \"Processing analysis \",i,\"/\",len(pending)\n start=time.time()\n print p.url, p.topic, p.numberResults, p.date\n duration=int(time.time()-start)\n p.duration=duration\n p.result=\"b\"\n p.save()\n", "id": "2458796", "language": "Python", "matching_score": 2.108271837234497, "max_stars_count": 0, "path": "www/processAnalyses.py" }, { "content": "from django.db import models\nfrom datetime import datetime\nfrom django.utils import timezone\n\nclass AnalysisManager(models.Manager):\n def pending(self):\n result=list(Analysis.objects.filter(result=\"\"))\n return result\n def add(self, url, topic, numberResults, requester):\n a=Analysis(url=url, topic=topic, numberResults=numberResults, requester=requester)\n a.save()\n return a\n\nclass Analysis(models.Model):\n url = models.URLField()\n topic = models.CharField(max_length=50)\n result = models.TextField(default=\"\") # JSON result\n numberResults = models.IntegerField() # number of search results to analyze\n duration = models.IntegerField(default=0) # length of the analysis\n requester = models.IPAddressField()\n date = models.DateTimeField(default=timezone.now()) # date of request\n objects = AnalysisManager()\n", "id": "1285212", "language": "Python", "matching_score": 1.157700777053833, "max_stars_count": 0, "path": "www/main/models/analysis.py" }, { "content": "#!/usr/bin/python3.4\n\nimport os\n\nclass ProgressBar:\n def __init__(self, task_number, bar_opening=\"[\", bar_ending=\"]\", empty_char=\"-\", filled_char=\"=\",\n update_rate=0, percent_precision=1, display_percent=True, display_absolute_progress=True, bar_length=0,\n enable_front_char=False, front_char=\">\"):\n\n self.__task_number = task_number\n self.__bar_opening = bar_opening\n self.__bar_ending = bar_ending\n self.__empty_char = empty_char\n self.__filled_char = filled_char\n self.__update_rate = update_rate\n self.__percent_precision = str(percent_precision)\n self.__display_percent = display_percent\n self.__display_absolute_progress = display_absolute_progress\n\n if bar_length > 0:\n self.__bar_length = min(bar_length, self.__compute_max_length())\n else:\n self.__bar_length = self.__compute_max_length()\n\n self.__enable_front_char = enable_front_char\n\n self.__front_char = front_char\n\n def begin(self):\n self.__update_count = 0\n self.__current_length = 0\n self.__current_progress = 0\n print(self.__get_bar_string(), end='\\r')\n\n def add_progress(self, inc=1):\n increment = inc if inc > 0 else 1\n if self.__current_progress < self.__task_number:\n prev_percent = self.__get_percent_progress()\n self.__current_progress = min(self.__task_number, self.__current_progress + increment)\n self.__update_count += increment\n new_length = int(self.__get_progress() * self.__bar_length)\n if self.__update_rate > 0:\n need_to_update = self.__update_count >= self.__update_rate\n else:\n need_to_update = new_length > self.__current_length or prev_percent != self.__get_percent_progress()\n if need_to_update or self.__current_progress == self.__task_number:\n self.__update_count = 0\n self.__current_length = new_length\n end_char = \"\\r\" if self.__current_progress < self.__task_number else \"\\n\"\n print(self.__get_bar_string(), end=end_char)\n\n def __get_progress(self):\n return float(float(self.__current_progress) / float(self.__task_number))\n\n def __get_percent_progress(self):\n format_string = \"{0:.\" + self.__percent_precision + \"f}\"\n return format_string.format(self.__get_progress() * 100) + \"%\"\n\n def __get_progress_fraction(self):\n return str(self.__current_progress) + \"/\" + str(self.__task_number)\n\n def __get_bar_string(self):\n diff = self.__bar_length - self.__current_length - (1 if self.__enable_front_char else 0)\n progresses = \"\"\n if self.__display_percent:\n progresses += \" : \" + self.__get_percent_progress()\n progresses += \" (\" + self.__get_progress_fraction() + \")\" if self.__display_absolute_progress else \"\"\n elif self.__display_absolute_progress:\n progresses += \" : \" + self.__get_progress_fraction()\n front_char = self.__front_char if (\n self.__enable_front_char and self.__current_progress < self.__task_number) else \"\"\n return ( self.__bar_opening + self.__current_length * self.__filled_char + front_char +\n diff * self.__empty_char + self.__bar_ending + progresses )\n\n def __compute_max_length(self):\n\n sz = None\n\n try:\n sz = os.get_terminal_size().columns\n except:\n pass\n\n if sz is None:\n sz = 80\n\n max_length = int(sz)\n max_length -= (len(str(self.__task_number)) * 2 + 1) if self.__display_absolute_progress else 0\n max_length -= len(self.__bar_opening)\n max_length -= len(self.__bar_ending)\n max_length -= (5 + int(self.__percent_precision)) if self.__display_percent else 0\n max_length -= 1 if int(self.__percent_precision) > 0 else 0\n max_length -= 3 if (self.__display_percent and self.__display_absolute_progress) else 0\n max_length -= 2 if (self.__display_percent or self.__display_absolute_progress) else 0\n return max_length - 1\n", "id": "11138981", "language": "Python", "matching_score": 3.59087872505188, "max_stars_count": 1, "path": "progress_bar.py" }, { "content": "#!/usr/bin/python3.5\nimport progress_bar\n\ndef test_bar(args):\n pb = progress_bar.ProgressBar(**args)\n pb.begin()\n for i in range(args[\"task_number\"]):\n # NB : you can specify a number to add more than one task here\n pb.add_progress()\n print()\n\nTASK_NUMBER = 200000\n\narguments = dict(task_number=TASK_NUMBER)\n\nprint(\"Simple progress bar :\")\ntest_bar(arguments)\n\nprint(\"Simple progress bar without absolute progress :\")\narguments[\"display_absolute_progress\"] = False\ntest_bar(arguments)\n\nprint(\"Simple progress bar without percentage :\")\narguments[\"display_absolute_progress\"] = True\narguments[\"display_percent\"] = False\ntest_bar(arguments)\n\nprint(\"Custom bars with custom filled/empty char :\")\narguments[\"display_percent\"] = True\n\narguments[\"empty_char\"] = \"~\"\narguments[\"filled_char\"] = \"#\"\ntest_bar(arguments)\n\nprint(\"Custom bar with custom begin/end char :\")\narguments[\"bar_opening\"] = \"°°~<[|\"\narguments[\"bar_ending\"] = \"|]>~°°\"\narguments[\"empty_char\"] = \"-\"\narguments[\"filled_char\"] = \"=\"\ntest_bar(arguments)\n\nprint(\"Custom shorter bar :\")\narguments[\"filled_char\"] = \"~\"\narguments[\"bar_length\"] = 80\n\ntest_bar(arguments)\n\nprint(\"Front char:\")\narguments[\"enable_front_char\"] = True\ntest_bar(arguments)\narguments[\"front_char\"] = \"|\"\ntest_bar(arguments)\n\nprint(\"Changing the update rate :\")\n\n# Note that changing the update rate will impact the number of visual refresh of the bar, the number of times it is\n# really updated has to be handled via the number of calls to add_progress(inc) and the number in inc\n\narguments[\"update_rate\"] = TASK_NUMBER/10\ntest_bar(arguments)\n\n# WARNING : increasing the precision with no update rate set may result in the bar being reprinted at each iteration\n# (because the percentage would change every time) and thus slowing your process, it is recommended to have a not to\n# high precision compared to the number of tasks to perform or to set an update rate\n\nprint(\"Changing decimal precision of percentage :\")\narguments[\"percent_precision\"] = 0\ntest_bar(arguments)\n\nprint(\"Adding a unit:\")\narguments[\"unit\"] = \"Mo\"\ntest_bar(arguments)\n", "id": "7887402", "language": "Python", "matching_score": 0.6699544787406921, "max_stars_count": 0, "path": "examples.py" }, { "content": "#!/usr/bin/python3\n\nimport curses\nimport random\nimport argparse\nimport os\n\n\ndef main():\n parser = argparse.ArgumentParser(description=\"Hacker typer to impress your friends! Press ESC at any moment to \"\n \"exit the program.\")\n parser.add_argument(\"-t\", \"--test\", action=\"store_true\", help=\"Testing mode, uses a shorter file and a shorter\"\n \"window.\")\n parser.add_argument(\"-s\", \"--speed\", help=\"Typing speed, i.e. number of characters per key\"\n \"stroke.\", required=False, default=4, type=int)\n args = parser.parse_args()\n\n root = os.path.dirname(os.path.realpath(__file__))\n if args.test:\n input_file = root + \"/test\"\n else:\n input_file = root + \"/in\" + str(random.randint(1, 8))\n text_file = open(input_file)\n try:\n speed = args.speed\n default_screen = curses.initscr()\n curses.start_color()\n curses.init_pair(1, curses.COLOR_GREEN, curses.COLOR_BLACK)\n default_screen.clear()\n max_xy = default_screen.getmaxyx()\n y_dim = max_xy[0]\n if args.test and y_dim > 10:\n y_dim = 10\n x_dim = max_xy[1]\n pad = curses.newpad(y_dim, x_dim)\n pad.attrset(curses.color_pair(1))\n pad.refresh(0, 0, 0, 0, y_dim - 1, x_dim - 1)\n if not args.test:\n text_file.seek(random.randrange(30000))\n\n exit_program = False\n pos = 0\n while True:\n key = pad.getch()\n if key == 27:\n exit_program = True\n break\n\n out_of_chars = False\n\n for i in range(speed):\n char = text_file.read(1)\n if not char:\n out_of_chars = True\n break\n try:\n pad.addstr(char)\n except curses.error:\n pos += 1\n pad.resize(y_dim + pos, x_dim)\n pad.addstr(char)\n\n pad.refresh(pos, 0, 0, 0, y_dim - 1, x_dim - 1)\n if out_of_chars:\n break\n if not exit_program:\n pad.erase()\n default_screen.refresh()\n pad.resize(3, 8)\n pad.addstr(1, 0, \" HACKED \")\n pad.border()\n pad.refresh(0, 0, 0, 0, 2, 7)\n while pad.getch() != 27:\n i = 0\n\n finally:\n text_file.close()\n curses.endwin()\n\n\nif __name__ == \"__main__\":\n main()\n", "id": "1914024", "language": "Python", "matching_score": 1.7957360744476318, "max_stars_count": 0, "path": "main.py" }, { "content": "#!/usr/bin/python2\n# -*- coding: utf-8 -*-\n\nfrom extract import extract\nimport sys\nimport importlib\nimport argparse\nfrom langdetect import detect\nimport numpy as np\nfrom collections import Counter\n\n\ndef stats(l):\n return np.mean(l), np.std(l)\n\n\ndef analyze(s, language=None):\n # Detect language if not provided\n if language is None:\n language = detect(s)\n if language not in [\"en\"]:\n raise ValueError(\"Language \"+language+\" not supported\")\n\n # Load pattern\n pattern = importlib.import_module(\"pattern.\" + language)\n # Perform analysis\n analysis = {}\n pt = pattern.parsetree(s)\n analysis[\"wordPerSentence\"] = stats([len(s.words) for s in pt])\n #Moods\n moods = Counter([pattern.mood(s) for s in pt])\n tot=sum([v for k,v in moods.iteritems()])\n analysis[\"moods\"] = {}\n for k in moods.keys():\n analysis[\"moods\"][k] = round(float(moods[k])/tot*100)\n #\n analysis[\"modality\"] = stats([pattern.modality(s) for s in pt])\n sentiments = [pattern.sentiment(s) for s in pt]\n analysis[\"polarity\"] = stats([s[0] for s in sentiments])\n analysis[\"subjectivity\"] = stats([s[1] for s in sentiments])\n analysis[\"positivity\"] = stats([int(pattern.positive(s)) for s in pt])\n return analysis\n\n\ndef analyze_url(url, language=None):\n string = extract(url, language).cleaned_text\n return analyze(string)\n\n\ndef main():\n parser = argparse.ArgumentParser(description=\"Analyze text in a string or a web page.\")\n parser.add_argument(\"string\", metavar='string', type=str, help=\"the text to process (or the url)\")\n parser.add_argument(\"language\", metavar=\"language\", type=str, help=\"the language of the text\", nargs='?')\n parser.add_argument(\"-u\", \"--url\", action=\"store_true\", help=\"the string is an url\")\n args = parser.parse_args()\n if args.url:\n # Extract text from URL\n args.string = extract(args.string, args.language).cleaned_text\n print analyze(args.string, args.language)\n return 0\n\nif __name__ == \"__main__\":\n sys.exit(main())\n", "id": "9723944", "language": "Python", "matching_score": 3.3369765281677246, "max_stars_count": 0, "path": "www/libs/analysis.py" }, { "content": "#!/usr/bin/python2\n# -*- coding: utf-8 -*-\n\nfrom goose import Goose\nimport argparse\n\n\ndef extract(url, language=None):\n if language:\n g = Goose({'use_meta_language': False, 'target_language': language})\n else:\n g = Goose()\n\n return g.extract(url=url)\n\n\ndef main():\n parser = argparse.ArgumentParser(description=\"Extract data from a web page\")\n parser.add_argument(\"url\", metavar='url', type=str, help=\"the url to extract data from\")\n parser.add_argument(\"language\", metavar=\"language\", type=str, help=\"the language of the web page\", nargs='?')\n args = parser.parse_args()\n print extract(args.url, args.language).cleaned_text\n return 0\n\nif __name__ == \"__main__\":\n import sys\n sys.exit(main())\n", "id": "6039573", "language": "Python", "matching_score": 0.4889536499977112, "max_stars_count": 0, "path": "www/libs/extract.py" }, { "content": "import json\nimport sys\nfrom collections import defaultdict\nfrom dateutil import parser\nfrom datetime import datetime, timezone\n\ndef unix_time_millis(dt):\n epoch = datetime.utcfromtimestamp(0).replace(tzinfo=timezone.utc)\n return int((dt - epoch).total_seconds() * 1000)\n\n\nif len(sys.argv) < 2:\n sys.exit(1)\n\ndump_filename = sys.argv[1]\n\nwith open(dump_filename, 'r') as dump_file:\n dump_data = json.loads(dump_file.read())\n\nitems_by_user = defaultdict(list)\n\nfor item in dump_data:\n if \"userId\" in item:\n items_by_user[item[\"userId\"]].append(item)\n\nfor userId, items in items_by_user.items():\n with open(userId + \"_time.data\", \"w+\") as out:\n for item in items:\n date = parser.parse(item[\"createdTime\"])\n out.write(str(unix_time_millis(date)) + \"\\n\")\n", "id": "6034759", "language": "Python", "matching_score": 3.351471424102783, "max_stars_count": 0, "path": "helpers/extract_time.py" }, { "content": "import random\nfrom datetime import datetime, timedelta, timezone\n\ndef unix_time_millis(dt):\n epoch = datetime.utcfromtimestamp(0).replace(tzinfo=timezone.utc)\n return int((dt - epoch).total_seconds() * 1000)\n\nfor i in range(1000):\n step = timedelta(days=1)\n year = 2007 + i % 10\n start = datetime(year, 1, 1, tzinfo=timezone.utc)\n end = datetime(year + 1, 1, 1, tzinfo=timezone.utc)\n random_date = start + random.randrange((end - start) // step + 1) * step\n print(unix_time_millis(random_date))\n", "id": "6945397", "language": "Python", "matching_score": 2.0258126258850098, "max_stars_count": 0, "path": "helpers/generateTime.py" } ]
1.583069
gytdau
[ { "content": "import re\n\nlines = []\n\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n lines.append(line.replace('\\n', '').replace('\\r', ''))\n\ncodeLetters = 0\nescapedLetters = 0\n\nfor line in lines:\n codeLetters += len(line)\n\n escaped = re.escape(line)\n escapedLetters += len(escaped) + 2 # + 2 for the two quotes we didn't add\n\n\ndifference = escapedLetters - codeLetters\nprint(str(difference))\n", "id": "4776961", "language": "Python", "matching_score": 2.3996429443359375, "max_stars_count": 4, "path": "Day8/part2.py" }, { "content": "\nlines = []\n\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n lines.append(line.replace('\\n', '').replace('\\r', ''))\n\nrealLetters = 0\ncodeLetters = 0\n\nfor line in lines:\n codeLetters += len(line)\n\n lineWithoutQuotes = line[1:-1]\n decodedString = bytes(lineWithoutQuotes, \"utf-8\").decode(\"unicode_escape\")\n realLetters += len(decodedString)\n\n\ndifference = codeLetters - realLetters\nprint(str(difference))\n", "id": "2283729", "language": "Python", "matching_score": 0.04281681030988693, "max_stars_count": 4, "path": "Day8/part1.py" }, { "content": "# I wanted to try making some classes as practice.\n\nRUNNING = True\nRESTING = False\n\n\nclass Reindeer:\n\n def __init__(self, line):\n \"\"\"\n :param line: Parses line into the class.\n :return: nothing\n \"\"\"\n line = line.split()\n self.speed = int(line[3])\n self.running_time = int(line[6])\n self.resting_time = int(line[13])\n self.state = RUNNING\n self.state_timer = int(line[6])\n self.distance = 0\n self.points = 0\n\n def next_step(self):\n \"\"\"\n Uses a second of a race.\n :return: nothing\n \"\"\"\n if self.state == RUNNING:\n self.distance += self.speed\n\n self.state_timer -= 1\n if self.state_timer <= 0:\n if self.state == RUNNING:\n self.state = RESTING\n self.state_timer = self.resting_time\n else:\n self.state = RUNNING\n self.state_timer = self.running_time\n\n def add_point(self):\n \"\"\"\n Adds one point.\n :return: nothing\n \"\"\"\n self.points += 1\n\n\nall_reindeer = []\n\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n # Make the reindeer objects.\n all_reindeer.append(Reindeer(line))\n\nfor i in range(2503):\n for reindeer in all_reindeer:\n reindeer.next_step()\n\n # Calculate the largest distance of all the reindeer\n distances = [x.distance for x in all_reindeer]\n max_score = max(distances)\n\n # Search through the reindeer, and add a point to any reindeer that have travelled the 'largest distance'.\n for reindeer in all_reindeer:\n if reindeer.distance == max_score:\n reindeer.add_point()\n\n\n# Print out the points of the largest reindeer with the most points.\nfinal_scores = [x.points for x in all_reindeer]\nprint(max(final_scores))\n", "id": "6357103", "language": "Python", "matching_score": 3.7474048137664795, "max_stars_count": 4, "path": "Day14/part2.py" }, { "content": "# I wanted to try making some classes as practice.\n\nRUNNING = True\nRESTING = False\n\n\nclass Reindeer:\n\n def __init__(self, line):\n \"\"\"\n :param line: Parses line into the class.\n \"\"\"\n line = line.split()\n self.speed = int(line[3])\n self.running_time = int(line[6])\n self.resting_time = int(line[13])\n\n def calculate_distance_at(self, time):\n \"\"\"\n :param time: Amount of time this race should continue for\n :return: The distance this reindeer has run at the end of the race.\n \"\"\"\n state = RUNNING\n distance = 0\n state_timer = self.running_time\n timer = time\n for i in range(time):\n if state == RUNNING:\n distance += self.speed\n\n state_timer -= 1\n if state_timer <= 0:\n if state == RUNNING:\n state = RESTING\n state_timer = self.resting_time\n else:\n state = RUNNING\n state_timer = self.running_time\n\n timer -= 1\n if timer <= 0:\n return distance\n\nreindeer_distances = []\n\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n testing = Reindeer(line)\n reindeer_distances.append(testing.calculate_distance_at(2503))\n\nprint(str(max(reindeer_distances)))\n", "id": "9443666", "language": "Python", "matching_score": 1.2765523195266724, "max_stars_count": 4, "path": "Day14/part1.py" }, { "content": "from itertools import permutations\n\ndestinations = set()\ndistances = dict()\n\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n values = line.split()\n # 0 = Departing, 2 = Arriving, 4 = Cost\n destinations.add(values[0])\n destinations.add(values[2])\n # This will create a dictionary of dictionaries of possible distances, e.g.\n # {'Tristram': {'AlphaCentauri': 34, 'Snowdin': 100}, (...)}\n distances.setdefault(values[0], dict())[values[2]] = int(values[4])\n distances.setdefault(values[2], dict())[values[0]] = int(values[4])\n\n\npossible_distances = []\n\ndef getDistance(x, y):\n return distances[x][y]\n\nfor items in permutations(destinations):\n distancesT = map(getDistance, items[:-1], items[1:])\n total_fuel_used = sum(distancesT)\n possible_distances.append(total_fuel_used)\n\n# This required very little modification.\n\nlongest_distance = max(possible_distances)\nprint(longest_distance)", "id": "6247527", "language": "Python", "matching_score": 0.9899126887321472, "max_stars_count": 4, "path": "Day9/part2.py" }, { "content": "import itertools\n\nweapons = [ # Exactly 1\n [8, 4, 0],\n [10, 5, 0],\n [25, 6, 0],\n [40, 7, 0],\n [74, 8, 0]\n]\narmor = [ # 0 to 1\n [13, 0, 1],\n [31, 0, 2],\n [53, 0, 3],\n [75, 0, 4],\n [102, 0, 5],\n [0, 0, 0] # Armor is optional\n]\nrings = [ # 0 to 2\n [25, 1, 0],\n [50, 2, 0],\n [100, 3, 0],\n [20, 0, 1],\n [40, 0, 2],\n [80, 0, 3],\n [0, 0, 0],\n [0, 0, 0] # Two rings are optional\n]\n\nenemy_hp = 0\nenemy_dmg = 0\nenemy_armor = 0\n\nwith open(\"inputData.txt\", \"r\") as infile:\n \"\"\"\n Process inputData.txt to get the enemy values.\n \"\"\"\n for line in infile:\n data = line.split()\n if data[0] == \"Hit\":\n enemy_hp = int(data[2])\n elif data[0] == \"Damage:\":\n enemy_dmg = int(data[1])\n else:\n enemy_armor = int(data[1])\n\n\ndef does_player_win(dmg, armor):\n \"\"\"\n Does the player win against the boss with this amount of 'dmg' and 'armor' points?\n :param dmg: Amount of dmg points the player has\n :param armor: Amount of armor points the player has\n :return: True if the player wins against the boss; False otherwise\n \"\"\"\n this_enemy_hp = enemy_hp\n this_enemy_dmg = enemy_dmg\n this_enemy_armor = enemy_armor\n this_player_hp = 100\n this_player_dmg = dmg\n this_player_armor = armor\n\n this_enemy_hit = max(1, this_enemy_dmg - this_player_armor)\n this_player_hit = max(1, this_player_dmg - this_enemy_armor)\n\n toggler = True\n while this_enemy_hp > 0 and this_player_hp > 0:\n if toggler:\n this_enemy_hp -= this_player_hit\n else:\n this_player_hp -= this_enemy_hit\n toggler = not toggler\n\n return this_enemy_hp <= 0\n\n\ndef get_stats(equipment):\n \"\"\"\n Adds up the stats of the equipment\n :param equipment: A list with multiple lists inside - the inner lists are the equipment stats\n :return: Total cost, dmg, and armor points\n \"\"\"\n dmg = 0\n armor_points = 0\n cost = 0\n\n for thing in equipment:\n dmg += thing[1]\n armor_points += thing[2]\n cost += thing[0]\n\n return cost, dmg, armor_points\n\n\npossible_costs = []\npossible_ring_combinations = list(itertools.combinations(rings, 2))\nfor weapon in weapons:\n for n_armor in armor:\n for rings in possible_ring_combinations:\n cost, dmg, armor_points = get_stats([weapon] + [n_armor] + list(rings))\n if does_player_win(dmg, armor_points):\n possible_costs.append(cost)\n\nprint(\"Answer: \" + str(min(possible_costs)))", "id": "9207199", "language": "Python", "matching_score": 1.3073605298995972, "max_stars_count": 4, "path": "Day21/part1.py" }, { "content": "import re\n\nreplacements = []\ndata = \"\"\n\n\nwith open(\"inputData.txt\", \"r\") as infile:\n \"\"\"\n Process inputData.txt and put it into the data array.\n \"\"\"\n for line in infile:\n line = line.strip().split()\n if not line:\n # Do nothing\n pass\n elif \"=>\" not in line:\n # This the molecule that we have to build on\n data = \"\".join(line)\n else:\n # Not nothing, and it has a '=>', so it's data for a possible replacement\n replacements.append([line[0], line[2]])\n\n\ndef delete_from(to_delete, string, pos):\n \"\"\"\n Deletes instance of 'str' in 'data' at this 'pos'\n :param to_delete: string to be deleted from 'string'\n :param string: string from which 'to_delete' will be deleted\n :param pos: position of 'string' to delete\n \"\"\"\n data_array = list(string)\n for i in range(len(to_delete)):\n data_array.pop(pos)\n return \"\".join(data_array)\n\n\ndef replace_at(to_replace, new_replacement, string, pos):\n \"\"\"\n Replaces 'a' with 'b' at this 'pos'\n :param to_replace: substring of 'string' which needs to be replaced\n :param new_replacement: string which will replace 'to_replace'\n :param string: string which the replacement will be executed on\n :param pos: position of 'new_replacement' in 'string' that should be replaced\n \"\"\"\n string = delete_from(to_replace, string, pos)\n string = string[:pos] + new_replacement + string[pos:]\n return string\n\n\n\n\npossible_values = []\n\n\"\"\"\nMain loop\n\"\"\"\n\nfor replacement in replacements:\n # Uses regex to find locations of the replacement we're looking for\n locations = [m.start() for m in re.finditer(replacement[0], data)]\n\n for location in locations:\n possible_values.append(replace_at(replacement[0], replacement[1], data, location))\n\n\nprint(\"Answer: \" + str(len(set(possible_values))))\n\n", "id": "9527507", "language": "Python", "matching_score": 0.8120557069778442, "max_stars_count": 4, "path": "Day19/part1.py" }, { "content": "santaX = 0\nsantaY = 0\nrobotX = 0\nrobotY = 0\nlocations = [(0, 0)]\n\nticker = False\n\ninstructions = \"\"\n\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n instructions += line\n \nfor i in instructions:\n y = 0\n x = 0\n if i == \"^\":\n y = 1\n elif i == \"v\":\n y = -1\n elif i == \"<\":\n x = 1\n elif i == \">\":\n x = -1\n\n if ticker:\n ticker = False\n santaX += x\n santaY += y\n\n location = (santaX, santaY)\n if location not in locations:\n locations.append(location)\n else:\n ticker = True\n robotX += x\n robotY += y\n\n location = (robotX, robotY)\n if location not in locations:\n locations.append(location)\n\nprint(len(locations))\n", "id": "10931612", "language": "Python", "matching_score": 2.1933248043060303, "max_stars_count": 4, "path": "Day3/part2.py" }, { "content": "posX = 0\nposY = 0\nlocations = [(0, 0)]\n\nticker = False\n\ninstructions = \"\"\n\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n instructions += line\n\nfor i in instructions:\n y = 0\n x = 0\n if i == \"^\":\n posY += 1\n elif i == \"v\":\n posY += -1\n elif i == \"<\":\n posX += 1\n elif i == \">\":\n posX += -1\n \n location = (posX, posY)\n if location not in locations:\n locations.append(location)\n\n \nprint(len(locations))", "id": "12021842", "language": "Python", "matching_score": 0.16793020069599152, "max_stars_count": 4, "path": "Day3/part1.py" }, { "content": "data = []\n\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n line = line.replace('x', ' ')\n data += line.split()\n\n\npaper = 0\nfor i in range(0, len(data), 3):\n length = int(data[i])\n width = int(data[i+1])\n height = int(data[i+2])\n\n paper += (2 * length * width) + (2 * width * height) + (2 * height * length)\n paper += min((length * width), (width * height), (height * length))\n\nprint(paper)\n", "id": "9629381", "language": "Python", "matching_score": 1.4797929525375366, "max_stars_count": 4, "path": "Day2/part1.py" }, { "content": "data = []\n\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n line = line.replace('x', ' ')\n data += line.split()\n\nribbon = 0\nfor i in range(0, len(data), 3):\n length = int(data[i])\n width = int(data[i+1])\n height = int(data[i+2])\n \n things = [length, width, height]\n things.remove(max(things))\n\n ribbon += (2 * things[0] + 2 * things[1])\n ribbon += length * width * height\nprint(ribbon)\n", "id": "10449564", "language": "Python", "matching_score": 0.09455647319555283, "max_stars_count": 4, "path": "Day2/part2.py" }, { "content": "import itertools\n\ndata = []\n\n\"\"\"\nProcess inputData.txt and put it into the data array.\n\"\"\"\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n data.append(int(line))\n\n\nvalid_permutations = {}\n\ndef checkIfValid(permutation):\n return sum(permutation) == 150\n\n\"\"\"\nMain loop\n This is the best explanation I could come up with:\n 1. For every possible amount of containers (e.g. if there are 4 containers, try 1, 2, 3, and 4)\n 2. And for every possible combination of that amount of containers\n\"\"\"\nfor i in range(0, len(data)):\n for permutation in itertools.combinations(data, i):\n if checkIfValid(permutation):\n \"\"\"\n This was the logic that was modified.\n \"\"\"\n if len(permutation) not in valid_permutations:\n valid_permutations[len(permutation)] = 1\n else:\n valid_permutations[len(permutation)] += 1\n\nprint(\"Answer: \" + str(valid_permutations[min(valid_permutations)]))\n", "id": "2108027", "language": "Python", "matching_score": 3.056544065475464, "max_stars_count": 4, "path": "Day17/part2.py" }, { "content": "import itertools\n\ndata = []\n\n\"\"\"\nProcess inputData.txt and put it into the data array.\n\"\"\"\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n data.append(int(line))\n\n\nvalid_permutations = 0\n\ndef checkIfValid(permutation):\n return 1 if sum(permutation) == 150 else 0\n\n\"\"\"\nMain loop\n This is the best explanation I could come up with:\n 1. For every possible amount of containers (e.g. if there are 4 containers, try 1, 2, 3, and 4)\n 2. And for every possible combination of that amount of containers\n\"\"\"\nfor i in range(0, len(data)):\n for permutation in itertools.combinations(data, i):\n valid_permutations += checkIfValid(permutation)\n\nprint(\"Answer: \" + str(valid_permutations))\n", "id": "2947598", "language": "Python", "matching_score": 0.1502193957567215, "max_stars_count": 4, "path": "Day17/part1.py" }, { "content": "\ningredients = []\n\n\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n line = line.split()\n ingredients.append(\n [\n int(line[2][:-1]),\n int(line[4][:-1]),\n int(line[6][:-1]),\n int(line[8][:-1])\n\n ]\n )\n\n\ntop_score = 0\n\n# This is really bad.\n\nfor a in range(100):\n for b in range(100 ):\n for c in range(100):\n # Thank you, Eric, for only giving us four ingredients\n if a + b + c <= 100:\n d = 100 - a - b - c\n\n # This is also really terrible - it should be done with a For loop instead\n score = max(0,\n ingredients[0][0] * a +\n ingredients[1][0] * b +\n ingredients[2][0] * c +\n ingredients[3][0] * d) * max(0,\n ingredients[0][1] * a +\n ingredients[1][1] * b +\n ingredients[2][1] * c +\n ingredients[3][1] * d) * max(0,\n ingredients[0][2] * a +\n ingredients[1][2] * b +\n ingredients[2][2] * c +\n ingredients[3][2] * d) * max(0,\n ingredients[0][3] * a +\n ingredients[1][3] * b +\n ingredients[2][3] * c +\n ingredients[3][3] * d)\n\n top_score = max(top_score, score)\n\n\n\n\nprint(top_score)", "id": "12044498", "language": "Python", "matching_score": 0.05524280667304993, "max_stars_count": 4, "path": "Day15/part1.py" }, { "content": "from itertools import permutations\n\n\ninstructions = []\npeople = set()\n\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n line = line.split()\n happiness = int(line[3])\n positive = line[2] == \"gain\"\n if not positive:\n happiness = -happiness\n\n instructions.append([line[0], line[10][:-1], happiness])\n\n people.add(line[0])\n people.add(line[10][:-1])\n\n\n\ndef get_happiness(a, b):\n # Search to match person `a`\n search_1 = [something for something in instructions if something[0] == a]\n\n # Search what we've found to match person `b`\n search_2 = [something for something in search_1 if something[1] == b]\n\n return search_2[0][2]\n\n\nmax_happiness = 0\n\nfor permutation in permutations(people):\n this_happiness = 0\n last_element = len(permutation) - 1\n for i, v in enumerate(permutation):\n # If this is the first element - because it's a round table, it's sitting beside the last element.\n if i == 0:\n this_happiness += get_happiness(v, permutation[last_element])\n else:\n this_happiness += get_happiness(v, permutation[i - 1])\n\n # Likewise for the last element - it's sitting beside the first element.\n if i == last_element:\n this_happiness += get_happiness(v, permutation[0])\n else:\n this_happiness += get_happiness(v, permutation[i + 1])\n\n max_happiness = max(this_happiness, max_happiness)\n\nprint(max_happiness)", "id": "10852583", "language": "Python", "matching_score": 1, "max_stars_count": 4, "path": "Day13/part1.py" }, { "content": "instructions = [[1, 489,959, 759,964],\n [2, 820,516, 871,914],\n [2, 427,423, 929,502],\n [1, 774,14, 977,877],\n [1, 410,146, 864,337],\n [1, 931,331, 939,812],\n [2, 756,53, 923,339],\n [2, 313,787, 545,979],\n [2, 12,823, 102,934],\n [3, 756,965, 812,992],\n [2, 743,684, 789,958],\n [3, 120,314, 745,489],\n [3, 692,845, 866,994],\n [2, 587,176, 850,273],\n [2, 674,321, 793,388],\n [3, 749,672, 973,965],\n [1, 943,30, 990,907],\n [1, 296,50, 729,664],\n [1, 212,957, 490,987],\n [3, 171,31, 688,88],\n [2, 991,989, 994,998],\n [2, 913,943, 958,953],\n [2, 278,258, 367,386],\n [3, 275,796, 493,971],\n [2, 70,873, 798,923],\n [3, 258,985, 663,998],\n [1, 601,259, 831,486],\n [2, 914,94, 941,102],\n [2, 558,161, 994,647],\n [1, 119,662, 760,838],\n [3, 378,775, 526,852],\n [2, 384,670, 674,972],\n [2, 249,41, 270,936],\n [1, 614,742, 769,780],\n [1, 427,70, 575,441],\n [1, 410,478, 985,753],\n [2, 619,46, 931,342],\n [1, 284,55, 768,922],\n [2, 40,592, 728,685],\n [1, 825,291, 956,950],\n [1, 147,843, 592,909],\n [2, 218,675, 972,911],\n [3, 249,291, 350,960],\n [2, 556,80, 967,675],\n [3, 609,148, 968,279],\n [3, 217,605, 961,862],\n [3, 407,177, 548,910],\n [3, 400,936, 599,938],\n [2, 721,101, 925,455],\n [1, 268,631, 735,814],\n [3, 549,969, 612,991],\n [3, 553,268, 689,432],\n [2, 817,668, 889,897],\n [3, 801,544, 858,556],\n [3, 615,729, 832,951],\n [2, 427,477, 958,948],\n [1, 164,49, 852,946],\n [1, 542,449, 774,776],\n [2, 923,196, 980,446],\n [3, 90,310, 718,846],\n [2, 657,215, 744,252],\n [2, 800,239, 811,712],\n [1, 502,90, 619,760],\n [3, 649,512, 862,844],\n [2, 334,903, 823,935],\n [2, 630,233, 839,445],\n [1, 713,67, 839,865],\n [1, 932,50, 982,411],\n [2, 480,729, 984,910],\n [1, 100,219, 796,395],\n [1, 758,108, 850,950],\n [2, 427,276, 439,938],\n [1, 178,284, 670,536],\n [3, 540,27, 625,102],\n [2, 906,722, 936,948],\n [3, 345,418, 859,627],\n [3, 175,775, 580,781],\n [3, 863,28, 929,735],\n [2, 824,858, 905,973],\n [3, 752,312, 863,425],\n [1, 985,716, 988,852],\n [2, 68,504, 763,745],\n [3, 76,209, 810,720],\n [2, 657,607, 676,664],\n [3, 596,869, 896,921],\n [2, 915,411, 968,945],\n [2, 368,39, 902,986],\n [1, 11,549, 393,597],\n [2, 842,893, 976,911],\n [3, 274,106, 581,329],\n [3, 406,403, 780,950],\n [3, 408,988, 500,994],\n [3, 217,73, 826,951],\n [1, 917,872, 961,911],\n [3, 394,34, 510,572],\n [3, 424,603, 583,626],\n [3, 106,159, 755,738],\n [2, 244,610, 472,709],\n [1, 350,265, 884,690],\n [1, 688,184, 928,280],\n [3, 279,443, 720,797],\n [2, 615,493, 888,610],\n [3, 118,413, 736,632],\n [1, 798,782, 829,813],\n [2, 250,934, 442,972],\n [1, 68,503, 400,949],\n [3, 297,482, 313,871],\n [3, 710,3, 839,859],\n [1, 125,300, 546,888],\n [3, 482,39, 584,159],\n [2, 536,89, 765,962],\n [1, 530,518, 843,676],\n [1, 994,467, 994,676],\n [1, 623,628, 744,927],\n [3, 704,912, 837,983],\n [1, 154,364, 517,412],\n [3, 344,409, 780,524],\n [2, 578,740, 725,879],\n [1, 251,933, 632,957],\n [1, 827,705, 971,789],\n [3, 191,282, 470,929],\n [3, 324,525, 446,867],\n [3, 534,343, 874,971],\n [3, 550,650, 633,980],\n [3, 837,404, 881,915],\n [3, 338,881, 845,905],\n [1, 469,462, 750,696],\n [1, 741,703, 892,870],\n [2, 570,215, 733,562],\n [1, 445,576, 870,775],\n [1, 466,747, 554,878],\n [2, 820,453, 868,712],\n [2, 892,706, 938,792],\n [2, 300,238, 894,746],\n [2, 306,44, 457,444],\n [2, 912,569, 967,963],\n [3, 109,756, 297,867],\n [1, 37,546, 41,951],\n [1, 321,637, 790,910],\n [3, 66,50, 579,301],\n [3, 933,221, 933,791],\n [1, 486,676, 878,797],\n [1, 417,231, 556,317],\n [3, 904,468, 981,873],\n [1, 417,675, 749,712],\n [1, 692,371, 821,842],\n [3, 324,73, 830,543],\n [1, 912,490, 977,757],\n [2, 634,872, 902,949],\n [3, 266,779, 870,798],\n [1, 772,982, 990,996],\n [2, 607,46, 798,559],\n [1, 295,602, 963,987],\n [1, 657,86, 944,742],\n [2, 334,639, 456,821],\n [2, 997,667, 997,670],\n [2, 725,832, 951,945],\n [2, 30,120, 952,984],\n [1, 860,965, 917,976],\n [3, 471,997, 840,998],\n [2, 319,307, 928,504],\n [3, 823,631, 940,908],\n [3, 969,984, 981,993],\n [2, 691,319, 865,954],\n [3, 911,926, 938,929],\n [1, 953,937, 968,991],\n [3, 914,643, 975,840],\n [1, 266,982, 436,996],\n [2, 101,896, 321,932],\n [2, 193,852, 751,885],\n [2, 576,532, 863,684],\n [1, 761,456, 940,783],\n [1, 20,290, 398,933],\n [2, 435,335, 644,652],\n [1, 830,569, 905,770],\n [2, 630,517, 905,654],\n [1, 664,53, 886,976],\n [3, 275,416, 408,719],\n [1, 370,621, 515,793],\n [1, 483,373, 654,749],\n [1, 656,786, 847,928],\n [2, 532,752, 945,974],\n [3, 301,150, 880,792],\n [2, 951,488, 958,952],\n [1, 207,729, 882,828],\n [3, 694,532, 973,961],\n [3, 676,639, 891,802],\n [2, 653,6, 905,519],\n [3, 391,109, 418,312],\n [1, 877,423, 957,932],\n [1, 340,145, 563,522],\n [2, 978,467, 988,895],\n [2, 396,418, 420,885],\n [2, 31,308, 816,316],\n [1, 107,675, 758,824],\n [1, 61,82, 789,876],\n [1, 750,743, 754,760],\n [3, 88,733, 736,968],\n [2, 754,349, 849,897],\n [3, 157,50, 975,781],\n [2, 230,231, 865,842],\n [2, 516,317, 630,329],\n [2, 697,820, 829,903],\n [1, 218,250, 271,732],\n [3, 56,167, 404,431],\n [3, 626,891, 680,927],\n [3, 370,207, 791,514],\n [3, 860,74, 949,888],\n [1, 416,527, 616,541],\n [2, 745,449, 786,908],\n [1, 485,554, 689,689],\n [1, 586,62, 693,141],\n [3, 506,759, 768,829],\n [1, 473,109, 929,166],\n [1, 760,617, 773,789],\n [3, 595,683, 618,789],\n [2, 210,775, 825,972],\n [3, 12,426, 179,982],\n [1, 774,539, 778,786],\n [1, 102,498, 121,807],\n [2, 706,897, 834,965],\n [2, 678,529, 824,627],\n [1, 7,765, 615,870],\n [2, 730,872, 974,943],\n [2, 595,626, 836,711],\n [2, 215,424, 841,959],\n [3, 341,780, 861,813],\n [3, 507,503, 568,822],\n [1, 252,603, 349,655],\n [3, 93,521, 154,834],\n [1, 565,682, 951,954],\n [1, 544,318, 703,418],\n [3, 756,953, 891,964],\n [1, 531,123, 856,991],\n [1, 148,315, 776,559],\n [2, 925,835, 963,971],\n [1, 895,944, 967,964],\n [2, 102,527, 650,747],\n [3, 626,105, 738,720],\n [2, 160,75, 384,922],\n [3, 813,724, 903,941],\n [1, 207,107, 982,849],\n [3, 750,505, 961,697],\n [3, 105,410, 885,819],\n [1, 226,104, 298,283],\n [2, 224,604, 508,762],\n [1, 477,368, 523,506],\n [2, 477,901, 627,936],\n [2, 887,131, 889,670],\n [1, 896,994, 938,999],\n [3, 401,580, 493,728],\n [3, 987,184, 991,205],\n [1, 821,643, 882,674],\n [3, 784,940, 968,959],\n [2, 251,293, 274,632],\n [2, 339,840, 341,844],\n [2, 675,351, 675,836],\n [3, 918,857, 944,886],\n [3, 70,253, 918,736],\n [2, 612,604, 772,680],\n [2, 277,40, 828,348],\n [3, 692,139, 698,880],\n [3, 124,446, 883,453],\n [3, 969,932, 990,945],\n [3, 855,692, 993,693],\n [3, 722,472, 887,899],\n [3, 978,149, 985,442],\n [3, 837,540, 916,889],\n [2, 612,2, 835,82],\n [3, 560,767, 878,856],\n [1, 461,734, 524,991],\n [3, 206,824, 976,912],\n [1, 826,610, 879,892],\n [1, 577,699, 956,933],\n [2, 9,250, 50,529],\n [2, 77,657, 817,677],\n [1, 68,419, 86,426],\n [1, 991,720, 992,784],\n [1, 668,20, 935,470],\n [2, 133,418, 613,458],\n [2, 487,286, 540,328],\n [3, 247,874, 840,955],\n [3, 301,808, 754,970],\n [2, 34,194, 578,203],\n [2, 451,49, 492,921],\n [1, 907,256, 912,737],\n [2, 479,305, 702,587],\n [1, 545,583, 732,749],\n [3, 11,16, 725,868],\n [1, 965,343, 986,908],\n [1, 674,953, 820,965],\n [3, 398,147, 504,583],\n [2, 778,194, 898,298],\n [1, 179,140, 350,852],\n [2, 241,118, 530,832],\n [2, 41,447, 932,737],\n [2, 820,663, 832,982],\n [1, 550,460, 964,782],\n [1, 31,760, 655,892],\n [3, 628,958, 811,992]]\n", "id": "9809645", "language": "Python", "matching_score": 0.9896940588951111, "max_stars_count": 4, "path": "Day6/inputData.py" }, { "content": "from inputData import *\n\nlights = [[0 for i in range(1000)] for j in range(1000)]\n\n\n# I've decided to not use numpy, in the spirit of Christmas.\n\n\ndef process_light(instruction, x, y):\n type = instruction[0]\n if type == 1:\n lights[x][y] = 1\n elif type == 2:\n lights[x][y] = 0\n else:\n toggle_light(x, y)\n\n\ndef toggle_light(x, y):\n if lights[x][y] == 1:\n lights[x][y] = 0\n else:\n lights[x][y] = 1\n\n\nfor instruction in instructions:\n # Squares!\n for x in range(instruction[1], instruction[3] + 1):\n for y in range(instruction[2], instruction[4] + 1):\n process_light(instruction, x, y)\n\nprint(sum(map(sum, lights)))\n", "id": "5504863", "language": "Python", "matching_score": 1.0186949968338013, "max_stars_count": 4, "path": "Day6/part1.py" }, { "content": "\nlights = []\n\nwith open(\"inputData.txt\", \"r\") as infile:\n \"\"\"\n Process inputData.txt and put it into the data array.\n \"\"\"\n for line in infile:\n line = list(line.strip())\n lights.append([1 if char == \"#\" else 0 for char in line])\n\n\ndef get_light(x, y):\n \"\"\"\n Returns light value at X, Y, or 0 if that light doesn't exist.\n :param x: position X\n :param y: position Y\n :return: light value at position X, Y\n \"\"\"\n return 0 if x < 0 or x > 99 or y < 0 or y > 99 else lights[x][y]\n\n\ndef get_neighbours(x, y):\n \"\"\"\n Returns the sum of the neighbours of X, Y.\n :param x: position X\n :param y: position Y\n :return: sum of the 8 neighbours of the light at position X, Y\n \"\"\"\n top = [\n get_light(x-1, y-1),\n get_light(x-1, y),\n get_light(x-1, y+1)\n ]\n middle = [\n get_light(x, y-1),\n get_light(x, y+1)\n ]\n bottom = [\n get_light(x+1, y-1),\n get_light(x+1, y),\n get_light(x+1, y+1)\n ]\n return sum(top) + sum(middle) + sum(bottom)\n\n\ndef get_new_state(x, y):\n \"\"\"\n Returns 1 or 0, depending on whether this cell should die or live.\n :param x: position X\n :param y: position Y\n :return: desired state\n \"\"\"\n\n neighbours = get_neighbours(x, y)\n current_state = get_light(x, y)\n\n if current_state == 0:\n if neighbours == 3:\n return 1\n else:\n return 0\n else:\n if neighbours == 2 or neighbours == 3:\n return 1\n else:\n return 0\n\n\ndef turn_on_corners():\n \"\"\"\n Turns on the four corners of the game.\n :return: nothing\n \"\"\"\n lights[0][0] = 1\n lights[0][99] = 1\n lights[99][0] = 1\n lights[99][99] = 1\n\n\ndef next_step():\n \"\"\"\n Processes the next step of the game.\n :return: next step of the lights variable\n \"\"\"\n new_lights = [[0 for a in range(100)] for b in range(100)]\n for x in range(0, 100):\n for y in range(0, 100):\n new_lights[x][y] = get_new_state(x, y)\n\n return new_lights\n\n\ndef make_output(lights_to_render):\n \"\"\"\n Prints out the lights in the desired format. Only a helper function. Not used.\n :param lights_to_render: lights that should be rendered.\n :return: nothing\n \"\"\"\n output = \"\"\n for light_strip in lights_to_render:\n for light in light_strip:\n output += \"#\" if light == 1 else \".\"\n output += \"\\n\"\n print(output)\n\n\nturn_on_corners()\n\n# This is where the magic happens.\n\nfor iteration in range(100):\n lights = next_step()\n turn_on_corners()\n\n\nresult = sum([sum(c) for c in lights])\nprint(result)\n", "id": "967804", "language": "Python", "matching_score": 0.8459866642951965, "max_stars_count": 4, "path": "Day18/part2.py" }, { "content": "\ninstructions = []\n\nwith open(\"inputData.txt\", \"r\") as infile:\n \"\"\"\n Process inputData.txt to insert all the instructions into an array.\n \"\"\"\n for line in infile:\n instructions.append(line.split())\n\ni = 0\na = 1\nb = 0\n\n\ndef execute_line(line):\n \"\"\"\n Executes the command stored in 'line'.\n :param line: command to execute\n :return: nothing\n \"\"\"\n if line[0] == \"inc\":\n inc(line)\n next_instruction()\n elif line[0] == \"hlf\":\n hlf(line)\n next_instruction()\n elif line[0] == \"tpl\":\n tpl(line)\n next_instruction()\n elif line[0] == \"jmp\":\n jmp(line)\n elif line[0] == \"jie\":\n jie(line)\n elif line[0] == \"jio\":\n jio(line)\n\n\ndef next_instruction():\n \"\"\"\n Increments i by 1 to allow execution of next instruction.\n :return: nothing\n \"\"\"\n global i\n i += 1\n\n\ndef jmp(line):\n global i\n i += int(line[-1])\n\n\ndef hlf(line):\n set_t(line[1], get_t(line[1]) / 2)\n\n\ndef inc(line):\n set_t(line[1], get_t(line[1]) + 1)\n\n\ndef tpl(line):\n set_t(line[1], get_t(line[1]) * 3)\n\n\ndef jio(line):\n if get_t(line[1][:-1]) == 1:\n jmp(line)\n else:\n next_instruction()\n\n\ndef jie(line):\n if get_t(line[1][:-1]) % 2 == 0:\n jmp(line)\n else:\n next_instruction()\n\n\ndef set_t(token, value):\n \"\"\"\n Sets token to value.\n :param token: register to be set\n :param value: value to set\n :return: nothing\n \"\"\"\n if token == \"a\":\n global a\n a = int(value)\n else:\n global b\n b = int(value)\n\n\ndef get_t(token):\n \"\"\"\n Gets value of token\n :param token: register to get\n :return:\n \"\"\"\n return a if token == \"a\" else b\n\n\nwhile i < len(instructions):\n execute_line(instructions[i])\n\n\"\"\"\nThese instructions are mysterious. Are they implying <NAME> was trapped somewhere, and the code released her?\nWhat are we being distracted from?\nIf Advent of Code starts becoming weirdly morbid, my christmas will be ruined.\n\"\"\"\n\nprint(\"Answer: \" + str(b))\n", "id": "4068866", "language": "Python", "matching_score": 0.8237307667732239, "max_stars_count": 4, "path": "Day23/part2.py" }, { "content": "\nresults = [[20151125]]\n\nlast_code_added = 20151125\n\nrow_requested = 0\ncolumn_requested = 0\n\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n line = line.split()\n row_requested = int(line[15][:-1])\n column_requested = int(line[17][:-1])\n\ndef next_code(code):\n \"\"\"\n :param code: the previous code in the sequence\n :return: the next code in the sequence\n \"\"\"\n return (code * 252533) % 33554393\n\ndef add_diagonal():\n \"\"\"\n Adds a diagonal to the results array\n :return: nothing\n \"\"\"\n global last_code_added\n # A new row is always required for a new diagonal\n results.append([])\n\n for row in reversed(results):\n last_code_added = next_code(last_code_added)\n row.append(last_code_added)\n\n\n\nwhile len(results) < row_requested:\n # Keep adding diagonals until we have as many rows as we requested\n add_diagonal()\n\nwhile len(results[row_requested - 1]) < column_requested:\n # Since the last row only has the first column - keep adding more diagonals until we have the column requested\n # for that row.\n add_diagonal()\n\n\nprint(results[row_requested - 1][3075 - 1])", "id": "6157282", "language": "Python", "matching_score": 0.1835213303565979, "max_stars_count": 4, "path": "Day25/part1.py" }, { "content": "alphabet = list(map(chr, range(97, 123)))\n\ndataInput = \"hxbxwxba\"\n\ndef increment(a):\n a_number = letter_number(a)\n if a_number == 25:\n return \"a\"\n\n return alphabet[a_number + 1]\n\n\ndef full_increment(a):\n should_increment = True\n i = len(a) - 1\n while should_increment:\n new_letter = increment(a[i])\n a_2 = list(a)\n a_2[i] = new_letter\n a = ''.join(a_2)\n\n i -= 1\n\n if not new_letter == \"a\":\n should_increment = False\n\n return a\n\n\n# Test `a` against the rules\n\n# 'Passwords must include one increasing straight of at least three letters'\ndef rule_one(a):\n for i in range(len(a)-2):\n letters = [letter_number(a[i]), letter_number(a[i+1]), letter_number(a[i+2])]\n if letters[1] == letters[0] + 1 and letters[2] == letters[0] + 2:\n return True\n return False\n\n# 'Passwords may not contain the letters i, o, or l'\ndef rule_two(a):\n return not(\"i\" in a or \"o\" in a or \"l\" in a)\n\n# 'Passwords must contain at least two different, non-overlapping pairs of letters'\ndef rule_three(a):\n pairs = 0\n incrementer = 0\n while incrementer < len(a) - 2:\n incrementer += 1\n if a[incrementer] == a[incrementer + 1]:\n pairs += 1\n incrementer += 1\n\n return pairs >= 2\n\ndef all_rules(a):\n return rule_one(a) and rule_two(a) and rule_three(a)\n\ndef letter_number(a):\n return alphabet.index(a)\n\ndef next_password(a):\n password = a\n while not all_rules(password):\n password = <PASSWORD>(password)\n return password\n\n\nprint(next_password(dataInput))", "id": "6392844", "language": "Python", "matching_score": 0.139586940407753, "max_stars_count": 4, "path": "Day11/part1.py" }, { "content": "import mock, pytest, requests\nfrom pytest_mock import mocker\nfrom holiday_bot import run\nfrom holiday_bot import commands\n\ndef test_execute_list_intent(mocker):\n mocker.patch('holiday_bot.commands.list')\n run.execute_intent(\"list\", 1, 2)\n commands.list.assert_called_once_with(1, 2)\n\ndef test_undo_intent(mocker):\n mocker.patch('holiday_bot.commands.undo')\n run.execute_intent(\"undo\", 1, 2)\n commands.undo.assert_called_once_with(1)\n", "id": "7487133", "language": "Python", "matching_score": 1.208899974822998, "max_stars_count": 1, "path": "tests/test_run.py" }, { "content": "from slackbot.bot import Bot\nfrom slackbot.bot import listen_to\nfrom slackbot.bot import respond_to\nfrom holiday_bot.slackbot_settings import *\nimport json, re, pdb, time, schedule, threading, sys, logging\nfrom holiday_bot.Messenger import Messenger\nfrom holiday_bot import google_calendar, nlp, commands\nlogging.basicConfig()\n\nblocked = False\nlast_client = None\n\n@respond_to('(.+)', re.IGNORECASE)\ndef respond(message, text):\n \"\"\"\n Handles messages sent to the bot and calls the appropriate command.\n \"\"\"\n messenger = Messenger(message)\n\n global last_client, blocked\n last_client = message.channel._client\n\n wait()\n blocked = True\n try:\n processed = nlp.query(messenger.sender_id(), text)\n print(processed)\n intent = processed[\"result\"][\"metadata\"][\"intentName\"]\n arguments = processed[\"result\"][\"parameters\"]\n execute_intent(intent, messenger, arguments)\n except:\n blocked = False\n raise\n\n blocked = False\n\ndef daily_list():\n \"\"\"\n Sends a list of the week's tasks to the appropriate channel.\n \"\"\"\n wait()\n global blocked\n blocked = True\n\n messenger = Messenger(last_client, SEND_TO)\n try:\n execute_intent(\"list\", messenger, {'date': '', 'date-period': ''})\n except:\n blocked = False\n raise\n\n blocked = False\n\ndef execute_intent(intent, messenger, arguments):\n if intent == \"failure\":\n commands.failure(messenger)\n if intent == \"list\":\n commands.list(messenger, arguments)\n if intent == \"add\":\n commands.add(messenger, arguments)\n if intent == \"delete\":\n commands.delete(messenger, arguments)\n if intent == \"undo\":\n commands.undo(messenger)\n\ndef wait():\n \"\"\"\n If there's a command currently waiting, only return when it's finished.\n \"\"\"\n global blocked\n wait_timer = 0\n while blocked:\n time.sleep(0.5)\n wait_timer += 0.5\n print(\"Message waiting for \" + str(wait_timer) + \"s\")\n\nschedule.every().day.at(SEND_AT).do(daily_list)\n\ndef main():\n \"\"\"\n Runs everything.\n \"\"\"\n print(\"Beep bloop, starting up the bot...\")\n google_calendar.initialize_service()\n schedulerThread = threading.Thread(target=run_scheduler, args=(), kwargs={})\n schedulerThread.start()\n bot = Bot()\n bot.run()\n\ndef run_scheduler():\n \"\"\"\n A loop that handles running the daily list on time.\n \"\"\"\n while True:\n schedule.run_pending()\n time.sleep(1)\n\nif __name__ == \"__main__\":\n main()\n", "id": "1553156", "language": "Python", "matching_score": 2.173133373260498, "max_stars_count": 1, "path": "holiday_bot/run.py" }, { "content": "import json\n\nclass Messenger:\n \"\"\"\n Messenger is a wrapper for either a Message or SlackClient instance.\n \"\"\"\n def __init__(self, service, channel=None):\n self.service = service\n self.service_type = type(service).__name__\n self.channel = channel\n\n def reply(self, message):\n if self.service_type == \"Message\":\n self.service.reply(message)\n else:\n self.service.send_message(self.channel, message)\n\n def send(self, message):\n if self.service_type == \"Message\":\n self.service.send(message)\n else:\n self.service.send_message(self.channel, message)\n\n def send_attachments(self, attachments):\n if self.service_type == \"Message\":\n self.service.send_webapi('', json.dumps(attachments))\n else:\n self.service.send_message(self.channel, '', json.dumps(attachments))\n\n def full_name(self):\n if self.service_type == \"Message\":\n return self.service.channel._client.users[self.service.body['user']][u'real_name']\n else:\n return \"*Unknown Person*\" # Or should I throw an error?\n\n def sender_id(self):\n if self.service_type == \"Message\":\n return self.service.channel._client.users[self.service.body['user']]['id']\n else:\n return 0 # Or should I throw an error?\n", "id": "9456797", "language": "Python", "matching_score": 0.6320620179176331, "max_stars_count": 1, "path": "holiday_bot/Messenger.py" }, { "content": "data = []\n\n\"\"\"\nI am assuming the desired quantities aren't generated randomly for each user.\n\"\"\"\ndesired = {\n \"children\": 3,\n \"cats\": 7,\n \"samoyeds\": 2,\n \"pomeranians\": 3,\n \"akitas\": 0,\n \"vizslas\": 0,\n \"goldfish\": 5,\n \"trees\": 3,\n \"cars\": 2,\n \"perfumes\": 1\n}\n\n\"\"\"\"\nProcess inputData.txt and puts it into the data array.\n\"\"\"\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n line = line.split()\n data.append(\n [\n int(line[1][:-1]),\n line[2][:-1], # First object\n int(line[3][:-1]),\n line[4][:-1], # Second object\n int(line[5][:-1]),\n line[6][:-1], # Third object\n int(line[7])\n\n ]\n )\n\n\n\"\"\"\nChecks if this aunt has the correct amount of 'name'\nThis function was edited to conform to the new rules\n\"\"\"\ndef checkIfValid(name, quantity):\n if name == \"cats\" or name == \"trees\":\n return desired[name] < quantity\n\n if name == \"pomeranians\" or name == \"goldfish\":\n return desired[name] > quantity\n\n return desired[name] == quantity\n\n\n\"\"\"\nMain loop - checks if each aunt has the correct amount.\n\"\"\"\nfor aunt in data:\n if checkIfValid(aunt[1], aunt[2]) and checkIfValid(aunt[3], aunt[4]) and checkIfValid(aunt[5], aunt[6]):\n print(\"Aunt \" + str(aunt[0]))\n\n\n", "id": "9885662", "language": "Python", "matching_score": 1.220943808555603, "max_stars_count": 4, "path": "Day16/part2.py" }, { "content": "import json\n\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n data = line\n\ncaptured = json.loads(data)\n\ndef main_loop(object):\n if type(object) == int:\n # It's an int, we can just return it\n return object\n elif type(object) == list:\n # It's a list, we'll have to sum it recursively\n sub_objects = [main_loop(sub_object) for sub_object in object]\n return sum(sub_objects)\n elif type(object) != dict or \"red\" in object.values():\n # It's something else or there's 'red' in the object\n return 0\n else:\n #It's a dict, we'll have to sum it recursively too\n return main_loop(list(object.values()))\n\nprint(main_loop(captured))", "id": "1303685", "language": "Python", "matching_score": 1.120419979095459, "max_stars_count": 4, "path": "Day12/part2.py" }, { "content": "import re\n\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n data = line\n\ncaptured = re.findall(\"([-]?[0-9]+)\", data)\n\ncaptured = list(map(int, captured))\n\nprint(sum(captured))", "id": "6990945", "language": "Python", "matching_score": 0.7116394639015198, "max_stars_count": 4, "path": "Day12/part1.py" }, { "content": "import hashlib\n\ni = 0\n\ncode = \"\"\n\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n code = line\n\nmd5_hash_string = \"\"\n\nwhile not md5_hash_string.startswith('00000'):\n i += 1\n to_hash = code + str(i)\n to_hash.encode('utf-8')\n md5_hash = hashlib.md5()\n md5_hash.update(to_hash.encode('utf-8'))\n md5_hash_string = md5_hash.hexdigest()\n \n\nprint(i)", "id": "10239198", "language": "Python", "matching_score": 0.08360550552606583, "max_stars_count": 4, "path": "Day4/part1.py" }, { "content": "data = \"\"\nelf_counter = {}\n\nwith open(\"inputData.txt\", \"r\") as infile:\n \"\"\"\n Process inputData.txt and put it into the data array.\n \"\"\"\n for line in infile:\n data = int(line)\n\n\ndef get_factors(number):\n \"\"\"\n Note: Taken from Stack Overflow\n http://stackoverflow.com/a/6800214\n :param number: Number to factorize\n :return: Set of factors for this number\n \"\"\"\n return set(reduce(list.__add__,\n ([i, number // i] for i in range(1, int(number ** 0.5) + 1) if number % i == 0)))\n\n\ndef get_result(number):\n \"\"\"\n :param number: House number\n :return: The number of presents in the house of that number\n \"\"\"\n result = get_factors(number)\n result = remove_unwanted_elves(result)\n register_delivery(result)\n return sum(result) * 11\n\n\ndef remove_unwanted_elves(elves):\n \"\"\"\n Removes elves that have already delivered to 50 houses\n :param elves: set of elves to check\n :return: new set, with disqualified elves removed\n \"\"\"\n new_set = elves.copy()\n for elf in elves:\n if elf in elf_counter and elf_counter[elf] >= 50:\n new_set.discard(elf)\n return new_set\n\n\ndef register_delivery(elves):\n \"\"\"\n Ticks every elf up by 1 in the elf_counter\n :param elves: set of elves to tick up\n :return: nothing\n \"\"\"\n for elf in elves:\n if elf in elf_counter:\n elf_counter[elf] += 1\n else:\n elf_counter[elf] = 1\n\n\n\nresult_so_far = 0\nticker = 0\nwhile result_so_far < data:\n ticker += 1\n result_so_far = get_result(ticker)\n\nprint(\"Answer: House #\" + str(ticker))\n", "id": "9102843", "language": "Python", "matching_score": 3.5516936779022217, "max_stars_count": 4, "path": "Day20/part2.py" }, { "content": "data = \"\"\n\nwith open(\"inputData.txt\", \"r\") as infile:\n \"\"\"\n Process inputData.txt and put it into the data array.\n \"\"\"\n for line in infile:\n data = int(line)\n\n\ndef get_factors(number):\n \"\"\"\n Note: Taken from Stack Overflow\n http://stackoverflow.com/a/6800214\n :param number: Number to factorize\n :return: Set of factors for this number\n \"\"\"\n return set(reduce(list.__add__,\n ([i, number // i] for i in range(1, int(number ** 0.5) + 1) if number % i == 0)))\n\n\ndef get_result(number):\n \"\"\"\n :param number: House number\n :return: The number of presents in the house of that number\n \"\"\"\n return sum(get_factors(number)) * 10\n\n\nresult = 0\nticker = 0\nwhile result < data:\n ticker += 1\n result = get_result(ticker)\n\nprint(\"Answer: House #\" + str(ticker))\n", "id": "9958982", "language": "Python", "matching_score": 0.9272477626800537, "max_stars_count": 4, "path": "Day20/part1.py" }, { "content": "\ndef repeat(data):\n # Quick hack, so sorry\n data = data + \"0\"\n\n iteration = 0\n result = \"\"\n\n while iteration < len(data) - 1:\n x = iteration + 1\n character_count = 1\n char = data[iteration]\n\n while data[x] == char and x < len(data) - 1:\n x = min(x + 1, len(data) - 1)\n character_count += 1\n\n iteration += character_count\n result += str(character_count) +str(char)\n return result\n\n\nactualResult = \"1113222113\"\n\nfor i in range(0, 40):\n actualResult = repeat(actualResult)\n\nprint(len(actualResult))", "id": "7095807", "language": "Python", "matching_score": 0.4886723458766937, "max_stars_count": 4, "path": "Day10/part1.py" }, { "content": "\nx = 0\ncounter = 0\ncharacters = \"\"\n\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n characters += line\n\nfor i in characters:\n counter += 1\n if i == \"(\":\n x += 1\n else:\n x -= 1\n\n if x < 0:\n print(counter)\n break\n", "id": "5101604", "language": "Python", "matching_score": 1.1081660985946655, "max_stars_count": 4, "path": "Day1/part2.py" }, { "content": "# The first time I did it,\n# I just replaced ( with + 1 and ( with - 1 and got the result.\n# But that was a bit too cheaty in my eyes.\n\nx = 0\ncharacters = \"\"\n\nwith open(\"inputData.txt\", \"r\") as infile:\n for line in infile:\n characters += line\n\nfor i in characters:\n if i == \"(\":\n x += 1\n else:\n x -= 1\n\nprint(x)", "id": "4521037", "language": "Python", "matching_score": 0, "max_stars_count": 4, "path": "Day1/part1.py" }, { "content": "\nfrom __future__ import print_function\nimport httplib2\nimport os\nimport sys\n\nfrom googleapiclient import discovery\nfrom oauth2client import client\nfrom oauth2client import tools\nfrom oauth2client.file import Storage\n\nfrom datetime import *\nfrom holiday_bot.slackbot_settings import *\n\nflags = None\n\n\nservice = None\n\ndef initialize_service():\n global service\n credentials = get_credentials()\n http = credentials.authorize(httplib2.Http())\n service = discovery.build('calendar', 'v3', http=http)\n\ndef get_credentials():\n \"\"\"Gets valid user credentials from storage.\n\n If nothing has been stored, or if the stored credentials are invalid,\n the OAuth2 flow is completed to obtain the new credentials.\n\n Returns:\n Credentials, the obtained credential.\n \"\"\"\n credential_path = os.path.join(os.path.dirname(sys.argv[0]),\n 'credentials.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials\n\ndef list_range(min, max):\n min = min.isoformat() + 'Z' # 'Z' indicates UTC time\n max = max.isoformat() + 'Z'\n events_result = service.events().list(\n calendarId=GOOGLE_CALENDAR_ID, timeMin=min, timeMax=max, maxResults=1000, singleEvents=True,\n orderBy='startTime').execute()\n return events_result.get('items', [])\n\ndef add_event(event):\n service.events().insert(calendarId=GOOGLE_CALENDAR_ID, body=event).execute()\n\ndef delete_event(event_id):\n service.events().delete(calendarId=GOOGLE_CALENDAR_ID, eventId=event_id).execute()\n", "id": "623197", "language": "Python", "matching_score": 2.840883255004883, "max_stars_count": 1, "path": "holiday_bot/google_calendar.py" }, { "content": "\n\n#\n# This is an example configuration.\n# Add your own keys and rename it to `slackbot_settings.py`.\n#\n\n\nDEFAULT_REPLY = \"Hi! :wave: If you want me to do something, you should tell me what to do.\"\nERRORS_TO = \"your_slack_username_goes_here\" # Don't put an @ sign.\n\nPLUGINS = [\n 'slackbot.plugins'\n]\n\nAPI_AI_KEY = \"api.ai client access token\"\nAPI_TOKEN = \"slack integration api key\"\n\nGOOGLE_CALENDAR_ID=\"<EMAIL>\"\n# Find this by going into Google Calendar for Web, clicking the dropdown beside a specific calendar,\n# going into Settings, and you'll find the ID on that page in that email format.\n\n# Small inconsequential note: the bot needs to first be messaged after it starts\n# before it can send the daily list. Anyone can message it, and they don't\n# have to message it directly, they can just ping it.\nSEND_AT = \"9:00\"\nSEND_TO = \"pto\" # send the week's calendar at 9am to the channel #pto\n\n\n# Google Calendar configuration:\n# If modifying these scopes, delete your previously saved credentials\n# located in client_secret.json in this folder\nSCOPES = 'https://www.googleapis.com/auth/calendar'\nCLIENT_SECRET_FILE = 'client_secret.json'\n# client_secret.json is a JSON file you download from Google.\n# Follow Step 1. of this guide to do so:\n# https://developers.google.com/google-apps/calendar/quickstart/python\n# Put it in this folder, holiday_bot/client_secret.json\nAPPLICATION_NAME = 'PTO Thingy'\n", "id": "485401", "language": "Python", "matching_score": 0.8075318336486816, "max_stars_count": 1, "path": "holiday_bot/slackbot_settings.example.py" }, { "content": "from datetime import *\nfrom dateutil.parser import parse # This parser just makes a guess at what format it is in.\n# It could be better for stability to supply the format in advance.\nimport json, re, pdb\nfrom holiday_bot import google_calendar\n\nundo_queue = {}\n\ndef failure(message):\n message.reply(\"Sorry, I didn't catch that.\")\n message.send(\"Try rephrasing what you said to make it clearer :robot_face:\")\n\ndef list(message, arguments):\n\n arguments = convert_dates(arguments, 7)\n start_date, end_date = date_period_to_datetime_objects(arguments)\n\n # Fetching the data\n\n events = google_calendar.list_range(start_date, end_date)\n\n if len(events) == 0:\n message.reply(\"Looks like there's no PTOs from \" + readable_format(start_date) + \" to \" + readable_format(end_date) + \".\")\n return\n\n days_in_range = max(abs((start_date - end_date).days), abs((end_date - start_date).days))\n days = [[] for day in range(days_in_range)]\n continued = []\n\n for event in events:\n start = parse(event['start']['date'])\n end = parse(event['end']['date'])\n\n now = start_date\n\n start_days_offset = (start - now).days\n end_days_offset = (end - now).days\n\n full_name = event['summary']\n description = None\n if 'description' in event:\n description = event['description']\n\n if start_days_offset == end_days_offset:\n if start_days_offset < days_in_range:\n # The one day PTO is within the week.\n attachment = generate_attachment(full_name, \"oneDay\", description, start, end)\n days[start_days_offset].append(attachment)\n else:\n if start_days_offset < 0 and end_days_offset >= 0:\n attachment = generate_attachment(full_name, \"continue\", description, start, end)\n continued.append(attachment)\n\n elif start_days_offset < days_in_range:\n # The start is within the week.\n attachment = generate_attachment(full_name, \"start\", description, start, end)\n days[start_days_offset].append(attachment)\n\n if end_days_offset < days_in_range:\n # The end is within the week.\n attachment = generate_attachment(full_name, \"end\", description, start, end)\n days[end_days_offset].append(attachment)\n\n message.send('Here\\'s what\\'s scheduled from ' + readable_format(start_date) + \" to \" + readable_format(end_date) + \".\")\n\n if continued:\n message.send_attachments(continued)\n\n if not days:\n message.send(\"It looks like there's no PTOs from \" + readable_format(start_date) + \" to \" + readable_format(end_date) + \".\")\n\n for dayNumber, day in enumerate(days):\n if day:\n message.send(readable_format(start_date + timedelta(days=dayNumber)) +\":\\n\")\n message.send_attachments(day)\n\n message.send(\"That's it!\")\n\ndef add(message, arguments):\n full_name = message.full_name()\n\n arguments = convert_dates(arguments)\n\n if not arguments['date-period']:\n message.send(\"I can't book a PTO for you unless you give me a date or a date period.\")\n return\n\n start_date, end_date = date_period_to_datetime_objects(arguments)\n\n # Prevent duplicates\n events = google_calendar.list_range(start_date, end_date)\n\n for event in events:\n if event['summary'].lower().strip() == full_name.lower().strip():\n message.reply(\"Uh oh, it looks like you're already booked to be on PTO at that time:\")\n start = parse(event['start']['date'])\n end = parse(event['end']['date'])\n attachment = generate_attachment(full_name, \"future\", event['description'] if 'description' in event else '', start, end)\n message.send_attachments([attachment])\n message.send(\"You could delete this PTO or pick another period to book your PTO on.\")\n return\n\n\n event = {\n 'summary': full_name,\n 'description': arguments['reason'],\n 'start': {\n 'date': iso_format(start_date)\n },\n 'end': {\n 'date': iso_format(end_date)\n }\n }\n\n google_calendar.add_event(event)\n message.reply(\"Done, take a look at what I added:\")\n attachment = generate_attachment(full_name, \"future\", event['description'] if 'description' in event else '', start_date, end_date)\n message.send_attachments([attachment])\n message.reply(\"You can tell me to undo this.\")\n\n user_id = message.sender_id()\n undo_queue[user_id] = {'action': 'add', 'event': event}\n\ndef delete(message, arguments):\n full_name = message.full_name()\n user_id = message.sender_id()\n\n arguments = convert_dates(arguments, 365)\n start_date, end_date = date_period_to_datetime_objects(arguments)\n\n # Fetching the data\n\n events = google_calendar.list_range(start_date, end_date)\n\n deleted = 0\n undo_queue[user_id] = {'action': 'delete', 'events': []}\n for event in events:\n if event['summary'].lower().strip() == full_name.lower().strip():\n google_calendar.delete_event(event['id'])\n undo_queue[user_id]['events'].append(event)\n deleted += 1\n\n if deleted == 0:\n message.reply(\"I didn't delete anything because I couldn't find any PTOs from you between \" + readable_format(start_date) + \" to \" + readable_format(end_date))\n return\n\n message.reply(\":wastebasket: Done! I deleted \" + str(deleted) + \" PTOs from you.\")\n message.reply(\"You can tell me to undo this.\")\n\n user_id = message.sender_id()\n\ndef undo(message):\n user_id = message.sender_id()\n full_name = message.full_name().lower().strip()\n\n if user_id not in undo_queue or 'action' not in undo_queue[user_id]:\n message.reply(\"There's nothing you can undo. You could try to delete or create a PTO instead.\")\n return\n\n to_undo = undo_queue[user_id]\n if to_undo['action'] == 'delete':\n undo_delete(to_undo, message)\n\n if to_undo['action'] == 'add':\n undo_add(to_undo, message)\n\n undo_queue.pop(user_id, None)\n\ndef undo_delete(to_undo, message):\n for event in to_undo['events']:\n new_event = {\n 'summary': event['summary'],\n 'start': event['start'],\n 'end': event['end']\n }\n if 'description' in event:\n new_event['description'] = event['description']\n google_calendar.add_event(new_event)\n if len(to_undo['events']) == 1:\n message.reply(\"Undone! I added the PTO back.\")\n else:\n message.reply(\"Undone! I added \" + str(len(to_undo['events'])) + \" PTOs back.\")\n\ndef undo_add(to_undo, message):\n full_name = message.full_name()\n event_to_undo = to_undo['event']\n start_date = parse(event_to_undo['start']['date'])\n end_date = parse(event_to_undo['end']['date'])\n events = google_calendar.list_range(start_date, end_date)\n\n for event in events:\n if event['summary'] == event_to_undo['summary']:\n google_calendar.delete_event(event['id'])\n message.reply(\"Undone! I deleted that PTO.\")\n message.reply(\"If I got something wrong when making it, you could try again but phrase it differently.\")\n break\n\ndef generate_attachment(name, type, description, start, end):\n attachment = {}\n attachment[\"fallback\"] = \"Calendar event\"\n attachment[\"title\"] = name\n if description:\n attachment[\"footer\"] = description\n\n if type == \"end\":\n attachment[\"text\"] = \"Returns from PTO\"\n attachment[\"color\"] = \"#2ECC40\"\n elif type == \"oneDay\":\n attachment[\"text\"] = \"On PTO for one day\"\n attachment[\"color\"] = \"#FF851B\"\n elif type == \"start\":\n attachment[\"text\"] = \"Starts PTO until they return on \" + readable_format(end)\n attachment[\"color\"] = \"#FF4136\"\n elif type == \"continue\":\n attachment[\"text\"] = \"Still on PTO from \" + readable_format(start) + \" until they return on \" + readable_format(end)\n attachment[\"color\"] = \"#0074D9\"\n elif type == \"future\":\n attachment[\"text\"] = \"On PTO from \" + readable_format(start) + \" until they return on \" + readable_format(end)\n attachment[\"color\"] = \"#0074D9\"\n\n return attachment\n\n\ndef convert_dates(arguments, dayOffset=None):\n if 'date' in arguments and arguments['date']:\n arguments['date-period'] = date_to_date_period(arguments['date'])\n\n if dayOffset and 'date-period' in arguments and not arguments['date-period']:\n arguments['date-period'] = date_to_date_period(datetime.now(), dayOffset)\n\n return arguments\n\ndef date_to_date_period(date, dayOffset=1):\n if type(date) == str:\n that_day = parse(date)\n else:\n that_day = date\n return iso_format(that_day) + \"/\" + iso_format(that_day + timedelta(days=dayOffset))\n\ndef date_period_to_datetime_objects(arguments):\n dates = arguments['date-period'].split('/')\n start_date = parse(dates[0])\n end_date = parse(dates[1])\n return start_date, end_date\n\ndef iso_format(date):\n return date.strftime('%Y-%m-%d')\n\ndef readable_format(date):\n return date.strftime(\"%A, %d %B\")\n", "id": "1413292", "language": "Python", "matching_score": 3.6664254665374756, "max_stars_count": 1, "path": "holiday_bot/commands.py" }, { "content": "import mock, pytest, requests, datetime\nfrom pytest_mock import mocker\nfrom holiday_bot import run\nfrom holiday_bot import commands\nfrom dateutil.parser import parse\nfrom datetime import datetime, timedelta\n\ndef test_conversion_of_date_into_date_period():\n assert commands.date_to_date_period(\"2017-05-03\", 2) == \"2017-05-03/2017-05-05\"\n\ndef test_conversion_of_date_into_date_period_with_one_day():\n assert commands.date_to_date_period(\"2017-05-03\") == \"2017-05-03/2017-05-04\"\n\ndef test_convert_dates_from_arguments():\n arguments = {\n 'date': '2017-05-03'\n }\n assert commands.convert_dates(arguments)['date-period'] == \"2017-05-03/2017-05-04\"\n\ndef test_convert_dates_from_arguments_does_nothing():\n arguments = {\n 'date-period': '2017-05-03/2017-05-06'\n }\n assert commands.convert_dates(arguments, 7)['date-period'] == '2017-05-03/2017-05-06'\n\ndef test_convert_date_periods_to_datetime_objects():\n arguments = {\n 'date-period': '2017-05-03/2017-05-06'\n }\n start, end = commands.date_period_to_datetime_objects(arguments)\n assert start.strftime('%Y-%m-%d') == \"2017-05-03\"\n assert end.strftime('%Y-%m-%d') == \"2017-05-06\"\n\ndef test_attachment_generation():\n result = commands.generate_attachment(\"<NAME>\", \"future\", \"Description...\", datetime.now(), datetime.now() + timedelta(days=3))\n assert result[\"footer\"] == \"Description...\"\n assert result[\"title\"] == \"<NAME>\"\n\ndef test_iso_format():\n assert commands.iso_format(parse(\"2017-01-01\")) == \"2017-01-01\"\n\ndef test_readable_format():\n assert commands.readable_format(parse(\"2017-01-01\")) == \"Sunday, 01 January\"\n", "id": "2205274", "language": "Python", "matching_score": 1.9577568769454956, "max_stars_count": 1, "path": "tests/test_commands.py" }, { "content": "\nimport mock, pytest, requests\nfrom pytest_mock import mocker\nfrom holiday_bot import nlp, slackbot_settings\n\ndef test_attempts_request(mocker):\n mocker.patch('requests.get')\n nlp.query(3, 'Text to parse...')\n requests.get.assert_called_once_with('https://api.api.ai/api/query', headers={'Authorization': 'Bearer ' + slackbot_settings.API_AI_KEY}, params={'v': 20150910, 'query': 'Text to parse...', 'lang': 'en', 'sessionId': 3})\n", "id": "5743973", "language": "Python", "matching_score": 3.738621234893799, "max_stars_count": 1, "path": "tests/test_nlp.py" }, { "content": "import requests\nfrom holiday_bot.slackbot_settings import API_AI_KEY\n\ndef query(user_id, text):\n endpoint = 'https://api.api.ai/api/query'\n parameters = {\n 'v': 20150910, # API version\n 'query': text,\n 'lang': 'en',\n 'sessionId': 'sass'\n }\n headers = {\n 'Authorization': 'Bearer ' + API_AI_KEY\n }\n response = requests.get(endpoint, params=parameters, headers=headers)\n return response.json()\n", "id": "12185298", "language": "Python", "matching_score": 2.8725223541259766, "max_stars_count": 1, "path": "holiday_bot/nlp.py" } ]
1
aryxns
[ { "content": "\"\"\"\nModule of utility functions to handle data manipulation\n\"\"\"\nimport itertools\nimport numpy as np\nfrom nltk.tokenize import RegexpTokenizer\nfrom nltk.corpus import stopwords\nfrom nltk import FreqDist, pos_tag, ne_chunk\nfrom nltk.tree import Tree\nfrom sklearn.externals import joblib\nimport pkg_resources\n\ndef flatten_list(items):\n \"\"\"\n Method to flatten list of lists using itertools (faster than traditional lists)\n Args:\n items: the list of lists [ [item1],[item2] ]\n \"\"\"\n if len(items) > 0 and items is not None:\n return list(itertools.chain.from_iterable(items))\n\ndef top_freq_words(corpus, topwords):\n \"\"\"\n Method to return frequency distribution of words from corpus text\n Args:\n corpus: the corpus of comments as a single string\n \"\"\"\n tokenizer = RegexpTokenizer(r'\\w+')\n words = tokenizer.tokenize(corpus)\n swords = stopwords.words('english')\n freq_words = FreqDist(w.lower() for w in words if w not in swords)\n return freq_words.most_common(topwords)\n\ndef extract_entities(corpus):\n \"\"\"\n Method to extract key entities from corpus of words\n Returns list of chunked key entities\n Args:\n corpus: the corpus of comments as a single string\n \"\"\"\n tokenizer = RegexpTokenizer(r'\\w+')\n words = tokenizer.tokenize(corpus)\n chunked = ne_chunk(pos_tag(words))\n cont_chunk = []\n curr_chunk = []\n for c in chunked:\n if type(c) == Tree:\n curr_chunk.append(\" \".join([token for token, pos in c.leaves()]))\n elif curr_chunk:\n named_entity = \" \".join(curr_chunk)\n if named_entity not in cont_chunk:\n cont_chunk.append(named_entity)\n curr_chunk = []\n else:\n continue\n if (len(cont_chunk) > 0):\n return cont_chunk[:10]\n\ndef load_ml_pipeline(filename):\n \"\"\"\n Method to load ML pipeline model via sklearn joblib (pickle)\n Args:\n file_path: the file path of the .pkl model\n \"\"\"\n model_path = pkg_resources.resource_filename('youtube_sentiment', 'models/')\n with (open(model_path + filename, \"rb\")) as f:\n try:\n return joblib.load(f)\n except Exception as e:\n raise e\n\ndef total_counts(arrcounts):\n \"\"\"\n Method to calculate the total counts of values in numpy array\n Returns tuple of number of 1 vs. 0 counts\n Example: (100, 50)\n Args:\n arrcounts: numpy array of values\n \"\"\"\n counts = np.bincount(arrcounts)\n return (counts[1], counts[0]) # totals 1s, 0s\n ", "id": "10215893", "language": "Python", "matching_score": 3.6846115589141846, "max_stars_count": 4, "path": "youtube_sentiment/utility.py" }, { "content": "from youtube_sentiment import load_ml_pipeline\nfrom youtube_sentiment import total_counts\nfrom youtube_sentiment import top_freq_words\nfrom youtube_sentiment import extract_entities\n\nclass NLP(object):\n \"\"\"\n Main class to use NLP structures and data analysis\n Args:\n self\n model: ML model to use under /models\n \"\"\"\n def __init__(self, model):\n self.model = load_ml_pipeline(model)\n\n def process_comments(self, comments):\n \"\"\"\n Return list of tuples of video comments with tagged sentiment\n Ex. [(\"Great video\", 1), (\"Terrible video\", 0)]\n Args:\n comments: list of comments\n \"\"\"\n predictions = self.model.predict(comments)\n return list(zip(comments, predictions))\n\n def process_comments_summary(self, comments):\n \"\"\"\n Display video sentiment and analytics\n Args:\n comments: list of comments\n \"\"\"\n # Corpus Summary\n comments_corpus = ' '.join(comments)\n entities = extract_entities(comments_corpus)\n top_words = top_freq_words(comments_corpus, topwords=20)\n # Classify sentiment\n predictions = self.model.predict(comments)\n pos, neg = total_counts(predictions)\n print(\"\"\"\n Video Summary:\n --------------------------------------\n Total sentiment scores (Pos, Neg): {0}, {1}\n Percentage Negative Sentiment: {2}\n Top words by frequency: {3}\n Key entities: {4}\n \"\"\"\n .format(pos, neg, (neg / (pos + neg)), top_words, entities))", "id": "7471314", "language": "Python", "matching_score": 1.7945194244384766, "max_stars_count": 4, "path": "youtube_sentiment/nlp.py" }, { "content": "from youtube_sentiment import Service\nfrom youtube_sentiment import Logger\nfrom youtube_sentiment import flatten_list\n\nclass Youtube(object):\n \"\"\"\n Main class to use REST requests using Google Youtube API V3\n https://developers.google.com/youtube/v3/docs/\n Args:\n self\n endpoint: Google API endpoint\n api_key: Google API key for Youtube\n \"\"\"\n def __init__(self, endpoint, api_key, maxpages):\n self.apiService = Service(endpoint)\n self.api_key = api_key\n self.maxpages = maxpages\n\n def get_comments(self, videoId):\n \"\"\"\n Method to return list of video comments\n Args:\n self\n videoId: Youtube video unique id from url\n \"\"\"\n payload = {\n 'key': self.api_key, \n 'textFormat': 'plaintext', \n 'part': 'snippet', \n 'videoId': videoId,\n 'maxResults': 100\n }\n r = self.apiService.get(payload=payload)\n all_comments = []\n all_comments.append(self.get_comments_threads(r.json()))\n nextPageToken = r.json().get('nextPageToken')\n idx = 0\n while(nextPageToken and idx < self.maxpages):\n payload[\"pageToken\"] = nextPageToken\n r_next = self.apiService.get(payload=payload)\n nextPageToken = r_next.json().get(\"nextPageToken\")\n all_comments.append(self.get_comments_threads(r_next.json()))\n idx += 1\n return flatten_list(all_comments)\n\n def get_comments_threads(self, comments):\n \"\"\"\n Method to return all comments from Youtube comment threads\n Args:\n self\n comments: list of response json comments including replies\n \"\"\"\n try:\n all_comments = []\n for item in comments[\"items\"]:\n comment = item[\"snippet\"][\"topLevelComment\"]\n text = comment[\"snippet\"][\"textDisplay\"]\n all_comments.append(text)\n if 'replies' in item.keys():\n for reply in item['replies']['comments']:\n rtext = reply[\"snippet\"][\"textDisplay\"]\n all_comments.append(rtext)\n return all_comments\n except KeyError as keyError:\n raise\n except Exception as e:\n raise\n", "id": "3707339", "language": "Python", "matching_score": 2.584244728088379, "max_stars_count": 4, "path": "youtube_sentiment/youtube.py" }, { "content": "import argparse\nfrom youtube_sentiment import Youtube\nfrom youtube_sentiment import NLP\n\ndef video_summary(apiKey, videoId, maxpages, model):\n \"\"\"\n Main method to return a print friendly summary of Youtube sentiment\n \"\"\"\n yt = Youtube('https://www.googleapis.com/youtube/v3/commentThreads', apiKey, maxpages)\n comments = yt.get_comments(videoId)\n nlp = NLP(model)\n nlp.process_comments_summary(comments)\n\ndef tagged_comments(apiKey, videoId, maxpages, model):\n \"\"\"\n Main method to return list of tagged comments as tuple\n Ex. [(\"Great movie\", 1), (\"Terrible movie\", 0)]\n \"\"\"\n yt = Youtube('https://www.googleapis.com/youtube/v3/commentThreads', apiKey, maxpages)\n comments = yt.get_comments(videoId)\n nlp = NLP(model)\n tagged_comments = nlp.process_comments(comments)\n return tagged_comments\n\ndef main():\n \"\"\"\n Main argparse for command line\n \"\"\"\n parser = argparse.ArgumentParser()\n parser.add_argument(\"apiKey\", help=\"Enter the Youtube API key to use for requests\")\n parser.add_argument(\"videoId\", help=\"Enter the Youtube video ID\")\n parser.add_argument(\"maxpages\", help=\"Enter the max pages returned of comments\", type=int)\n parser.add_argument(\"model\", help=\"Enter the model name to use for sentiment\")\n args = parser.parse_args()\n video_summary(args.apiKey, args.videoId, args.maxpages, args.model)\n\nif __name__ == '__main__':\n main()", "id": "2782845", "language": "Python", "matching_score": 1.4272397756576538, "max_stars_count": 4, "path": "youtube_sentiment/main.py" }, { "content": "from unittest import TestCase\nimport sys\nfrom contextlib import contextmanager\nfrom io import StringIO\nfrom youtube_sentiment import NLP\n\nclass TestUtil(TestCase):\n \"\"\" Test Utility \"\"\"\n @classmethod\n def setUpClass(self):\n \"\"\" Setup \"\"\"\n self.mock = NLP(\"lr_sentiment_basic.pkl\")\n\n @contextmanager\n def captured_output(self):\n \"\"\" To capture stdout or errors \"\"\"\n new_out, new_err = StringIO(), StringIO()\n old_out, old_err = sys.stdout, sys.stderr\n try:\n sys.stdout, sys.stderr = new_out, new_err\n yield sys.stdout, sys.stderr\n finally:\n sys.stdout, sys.stderr = old_out, old_err\n\n \"\"\" Utility class tests \"\"\"\n def test_process_comments(self):\n \"\"\" Test tagged video comments \"\"\"\n mock_comments = [\"Was a great movie\", \"Terrible movie and awful not recommend\"]\n tagged = self.mock.process_comments(mock_comments)\n self.assertTrue(type(tagged) == list)\n self.assertTrue(tagged[0][0] == \"Was a great movie\")\n self.assertTrue(tagged[1][0] == \"Terrible movie and awful not recommend\")\n self.assertTrue(tagged[0][1] == 1)\n self.assertTrue(tagged[1][1] == 0)\n\n def test_comments_summary(self):\n \"\"\" Test NLP summary \"\"\"\n mock_comments = [\"Was a great movie the character Jude was good\", \"Terrible movie and awful not recommend\", \"Worst I have ever seen\"]\n with self.captured_output() as (out, err):\n self.mock.process_comments_summary(mock_comments)\n printout = out.getvalue().strip() # capture stdout for summary test\n self.assertTrue(\"Total sentiment scores (Pos, Neg): 1, 2\" in printout)\n self.assertTrue(\"Percentage Negative Sentiment: 0.6666666666666666\" in printout)\n self.assertTrue(\"Top words by frequency: [('movie', 2)\" in printout)\n self.assertTrue(\"Key entities: ['Jude']\" in printout)", "id": "11031372", "language": "Python", "matching_score": 4.264031410217285, "max_stars_count": 4, "path": "youtube_sentiment/tests/test_nlp.py" }, { "content": "from unittest import TestCase\nfrom numpy import array as nparray\nfrom youtube_sentiment import flatten_list\nfrom youtube_sentiment import load_ml_pipeline\nfrom youtube_sentiment import top_freq_words\nfrom youtube_sentiment import total_counts\nfrom youtube_sentiment import extract_entities\n\nclass TestUtil(TestCase):\n \"\"\" Test Utility \"\"\"\n @classmethod\n def setUpClass(self):\n \"\"\" Setup \"\"\"\n\n \"\"\" Utility class tests \"\"\"\n def test_flatten_list(self):\n \"\"\" Test flatten list of lists structure \"\"\"\n mock = [[\"Was a great movie\"], [\"Wow did you see that?\"]]\n self.assertTrue(flatten_list(mock) == [\"Was a great movie\", \"Wow did you see that?\"])\n\n def test_load_model(self):\n \"\"\" Test loading of a model \"\"\"\n mock = load_ml_pipeline(\"lr_sentiment_basic.pkl\")\n self.assertTrue(mock != None)\n self.assertTrue(hasattr(mock, 'predict'))\n with self.assertRaises(Exception) as mockexception:\n r = load_ml_pipeline(\"lr_sentiment_findme.pkl\")\n\n def test_model_predict(self):\n \"\"\" Test model sentiment predict and action \"\"\"\n mock_comments = [\"Hey nice video you made loved it\", \"Terrible video worst ever\"]\n mock = load_ml_pipeline(\"lr_sentiment_basic.pkl\")\n predictions = mock.predict(mock_comments)\n self.assertTrue(predictions[0] == 1)\n self.assertTrue(predictions[1] == 0)\n\n def test_comments_frequency(self):\n \"\"\" Test comments word frequencies \"\"\"\n mock_comments = \"Hey nice video you made loved it Terrible video worst ever loved it loved\"\n top_words = top_freq_words(mock_comments, topwords=20)\n self.assertTrue(len(top_words) > 1)\n self.assertTrue(top_words[0] == (\"loved\", 3))\n\n def test_arr_counts(self):\n \"\"\" Test numpy counts of values \"\"\"\n mock_counts = nparray([0, 0, 1, 1, 1])\n counts = total_counts(mock_counts)\n self.assertTrue(counts == (3, 2))\n\n def test_extract_entities(self):\n \"\"\" Test entity extract via NLTK \"\"\"\n mock_comments = \"Hey nice video you made loved it Mike is in Japan right now good job\"\n entities = extract_entities(mock_comments)\n self.assertTrue(len(entities) > 0)\n self.assertTrue(\"Mike\" in entities)", "id": "2006427", "language": "Python", "matching_score": 3.5440187454223633, "max_stars_count": 4, "path": "youtube_sentiment/tests/test_utility.py" }, { "content": "from unittest import TestCase\nimport shutil\nfrom youtube_sentiment import Logger\n\nclass TestLogger(TestCase):\n \"\"\" Test logger \"\"\"\n @classmethod\n def setUpClass(self):\n \"\"\" Setup \"\"\"\n self.mock = Logger(self.__class__.__name__, maxbytes=8)\n\n \"\"\" Logging tests \"\"\"\n def test_logging(self):\n \"\"\" Test logger init and log creation \"\"\"\n self.assertTrue(hasattr(self.mock, 'get'))\n logger = self.mock.get()\n self.assertTrue(hasattr(logger, 'info'))\n logger.info(\"log test\")\n\n @classmethod\n def tearDownClass(self):\n \"\"\" Tear down \"\"\"\n shutil.rmtree('./log')\n", "id": "3017515", "language": "Python", "matching_score": 1.8031320571899414, "max_stars_count": 4, "path": "youtube_sentiment/tests/test_logger.py" }, { "content": "from unittest import TestCase\nimport shutil\nimport os\nfrom youtube_sentiment import Service\nfrom requests import codes\n\nclass TestService(TestCase):\n \"\"\" Test Service layer \"\"\"\n\n def test_service_attributes(self):\n \"\"\" Ensure service attributes \"\"\"\n with Service('') as mock_service:\n self.assertTrue(hasattr(mock_service, 'get'))\n self.assertTrue(hasattr(mock_service, '__enter__'))\n self.assertTrue(hasattr(mock_service, '__exit__'))\n self.assertTrue(type(mock_service.__enter__()) is type(mock_service))\n\n \"\"\" Service test cases \"\"\"\n def test_ok_endpoint(self):\n \"\"\" Test sample endpoint \"\"\"\n with Service('https://www.google.com') as mock_service:\n payload = {\n 'q': 'test'\n }\n r = mock_service.get(\"https://www.google.com/search\")\n self.assertTrue(r.status_code == codes.ok)\n \n def test_request_exception(self):\n \"\"\" Test sample endpoint \"\"\"\n with self.assertRaises(Exception) as mockexception:\n with Service('https://www.googleapis.com/youtube/v3/commentThreads') as mock_service:\n payload = {\n 'key': 'FAKE_API_KEY', \n 'textFormat': 'plaintext', \n 'part': 'snippet', \n 'videoId': '9999',\n 'maxResults': 100\n }\n r = mock_service.get(payload=payload)", "id": "902959", "language": "Python", "matching_score": 2.516101598739624, "max_stars_count": 4, "path": "youtube_sentiment/tests/test_service.py" }, { "content": "import requests\nfrom requests.exceptions import RequestException\nfrom youtube_sentiment import Logger\n\nclass Service(object):\n \"\"\"\n Service helper class for API requests\n Args:\n endpoint: URL endpoint\n \"\"\"\n def __init__(self, endpoint):\n self.endpoint = endpoint\n self.session = requests.Session()\n self.session.headers.update({'Content-Type': 'application/json'})\n self.logger = Logger(self.__class__.__name__, maxbytes=10*1024*1024).get()\n\n def __enter__(self):\n return self\n\n def __exit__(self, type, value, traceback):\n pass\n\n def get(self, payload):\n try:\n r = self.session.request(method='get', url=self.endpoint, params=payload)\n if (r.status_code == requests.codes.ok):\n self.logger.info(\"API endpoint request received, payload: {0}\".format(payload))\n return r\n elif (r.status_code == requests.codes.bad_request):\n self.logger.error(\"Status: {0} | Bad Request. Check API Key or connectivity\".format(r.status_code))\n raise Exception(\"Bad Request\")\n elif (r.status_code == requests.codes.forbidden):\n self.logger.error(\"Status: {0} | API Key is incorrect or restricted\".format(r.status_code))\n raise Exception(\"Forbidden\")\n elif (r.status_code == requests.codes.not_found): \n self.logger.error(\"Status: {0} | Video not found\".format(r.status_code))\n raise Exception(\"Not Found\")\n else:\n self.logger.error(\"Status: {0} | An error has occurred\".format(r.status_code))\n raise Exception(\"Internal server error\")\n except RequestException as e:\n self.logger.exception(str(e))\n raise\n \n", "id": "8058531", "language": "Python", "matching_score": 0.7293285727500916, "max_stars_count": 4, "path": "youtube_sentiment/service.py" }, { "content": "from setuptools import setup\nfrom setuptools.command.install import install\nimport nltk\n\ndef readme():\n with open('README.md') as f:\n return f.read()\n\ndef post_install():\n \"\"\"Post installation nltk corpus downloads.\"\"\"\n nltk.download(\"punkt\")\n nltk.download('words')\n nltk.download('maxent_ne_chunker')\n nltk.download('averaged_perceptron_tagger')\n nltk.download(\"stopwords\")\n\nclass PostInstall(install):\n \"\"\"Post-installation\"\"\"\n def run(self):\n install.run(self)\n self.execute(post_install, [], msg=\"Running post installation tasks\")\n\nsetup(name='youtube_sentiment',\n version='0.3.0',\n description='Analyze Youtube videos for general sentiment analysis',\n long_description=readme(),\n long_description_content_type='text/markdown',\n url='https://github.com/dillonmabry/youtube-sentiment-helper',\n author='<NAME>',\n author_email='<EMAIL>',\n license='MIT',\n packages=['youtube_sentiment'],\n test_suite='nose.collector',\n tests_require=['nose'],\n install_requires=[\"requests\", \"nltk\", \"numpy\", \"scipy\", \"scikit-learn\"],\n cmdclass={\"install\": PostInstall},\n include_package_data=True,\n data_files=[('', [\n 'youtube_sentiment/models/lr_sentiment_basic.pkl', \n 'youtube_sentiment/models/lr_sentiment_cv.pkl'])\n ],\n zip_safe=False)\n", "id": "107654", "language": "Python", "matching_score": 0.9777032136917114, "max_stars_count": 0, "path": "setup.py" }, { "content": "import os\nimport logging \nimport logging.handlers\n\nclass Logger(object):\n \"\"\"\n Class to setup and utilize basic logging\n\n Args:\n self\n name: The name of the class utilizing logging\n \"\"\"\n def __init__(self, name, maxbytes):\n name = name.replace('.log','')\n logger = logging.getLogger('log_namespace.%s' % name)\n logger.setLevel(logging.DEBUG)\n if not logger.handlers:\n if not os.path.isdir('log'):\n os.mkdir('log')\n file_name = os.path.join('log', '%s.log' % name) \n handler = logging.handlers.RotatingFileHandler(\n file_name, maxBytes=maxbytes, backupCount=5)\n formatter = logging.Formatter('%(asctime)s %(levelname)s:%(name)s %(message)s')\n handler.setFormatter(formatter)\n handler.setLevel(logging.DEBUG)\n logger.addHandler(handler)\n self._logger = logger\n\n \"\"\"\n Method to return an instance of the logger\n\n Args:\n self\n \"\"\"\n def get(self):\n return self._logger", "id": "11893870", "language": "Python", "matching_score": 0.5782585144042969, "max_stars_count": 4, "path": "youtube_sentiment/logger.py" }, { "content": "from .logger import Logger\nfrom .utility import *\nfrom .service import *\nfrom .youtube import *\nfrom .nlp import *\nfrom .main import *", "id": "7078077", "language": "Python", "matching_score": 0.3118147850036621, "max_stars_count": 4, "path": "youtube_sentiment/__init__.py" } ]
1.798826
GammaConvexity
[ { "content": "\"\"\"\r\n'The module for first_inside_quotes'\r\n\r\n<NAME> asw263\r\nSeptember 22 2020\r\n\"\"\"\r\n\r\ndef first_inside_quotes(s):\r\n \"\"\"\r\n Returns the first substring of s between two (double) quotes\r\n \r\n A quote character is one that is inside a string, not one that \r\n delimits it. We typically use single quotes (') to delimit a \r\n string if want to use a double quote character (\") inside of it.\r\n \r\n Examples:\r\n first_inside_quotes('A \"B C\" D') returns 'B C'\r\n first_inside_quotes('A \"B C\" D \"E F\" G') also returns 'B C', \r\n because it only picks the first such substring\r\n \r\n Parameter s: a string to search\r\n Precondition: s is a string containing at least two double quotes\r\n \"\"\"\r\n start = s.index('\"')+1\r\n end = s.index('\"',start)\r\n insidequotes = s[start:end]\r\n return insidequotes", "id": "164519", "language": "Python", "matching_score": 0.22213268280029297, "max_stars_count": 1, "path": "Currency/quotes.py" }, { "content": "\"\"\"\r\nUser interface for module currency\r\n\r\nWhen run as a script, this module prompts the user for two currencies and \r\nan amount. It prints out the result of converting the first currency to \r\nthe second.\r\n\r\nAuthor: <NAME> asw263\r\nDate: 30 September 2020\r\n\"\"\"\r\n\r\nimport a1\r\nx = input(\"Enter source currency: \")\r\ny = input(\"Enter target currency: \")\r\nz = input(\"Enter original amount: \")\r\nresult = str(a1.exchange(x,y,float(z)))\r\nprint(\"You can exchange \"+z+\" \"+x+\" for \"+result+\" \"+y)", "id": "2341061", "language": "Python", "matching_score": 0.7107779383659363, "max_stars_count": 1, "path": "Currency/a1app.py" }, { "content": "\"\"\"\r\nModule for currency exchange\r\n\r\nThis module provides several string parsing functions to implement a \r\nsimple currency exchange routine using an online currency service. \r\nThe primary function in this module is exchange.\r\n\r\nAuthor: <NAME> asw263\r\nDate: 30 September 2020\r\n\"\"\"\r\nimport introcs\r\nimport introcs as q\r\nimport a1\r\nimport re\r\nimport numpy as np\r\n\r\ndef before_space(s):\r\n\r\n \"\"\"\r\n Returns a copy of s up to, but not including, the first space\r\n\r\n Parameter s: the string to slice\r\n Precondition: s is a string with at least one space\r\n\r\n \"\"\"\r\n return s[:s.find(' ')]\r\n\r\ndef after_space(s):\r\n\r\n \"\"\"\r\n Returns a copy of s after the first space\r\n\r\n Parameter s: the string to slice\r\n Precondition: s is a string with at least one space\r\n \"\"\"\r\n return s[s.find(' ') + 1:]\r\n\r\ndef is_currency(code):\r\n \"\"\"\r\n Returns: True if code is a valid (3 letter code for a) currency\r\n It returns False otherwise.\r\n\r\n Parameter code: the currency code to verify\r\n Precondition: code is a string with no spaces or non-letters.\r\n \"\"\"\r\n return True if has_error(currency_response(code, 'USD', 2.5))==False else False\r\n\r\ndef exchange(old, new, amt):\r\n \"\"\"\r\n Returns the amount of currency received in the given exchange.\r\n\r\n In this exchange, the user is changing amt money in currency src to the currency dst. The value returned represents the amount in currency dst.\r\n\r\n The value returned has type float.\r\n\r\n Parameter old: the currency on hand (the SRC)\r\n Precondition: old is a string for a valid currency code\r\n \r\n Parameter new: the currency to convert to (the DST)\r\n Precondition: new is a string for a valid currency code\r\n \r\n Parameter amt: amount of currency to convert\r\n Precondition: amt is a float\r\n \"\"\"\r\n justNumbers = before_space(get_dst(currency_response(old, new, amt)))\r\n return float(justNumbers)\r\n\r\n\r\ndef currency_response(old, new, amt):\r\n \"\"\"\r\n Returns a JSON string that is a response to a currency query.\r\n\r\n A currency query converts amt money in currency src to the \r\n currency dst. The response should be a string of the form \r\n\r\n '{ \"src\":\"<old-amt>\", \"dst\":\"<new-amt>\", \"valid\":true, \"err\":\"\" }'\r\n\r\n where the values old-amount and new-amount contain the value \r\n and name for the original and new currencies. If the query is \r\n invalid, both old-amount and new-amount will be empty, while \r\n \"valid\" will be followed by the value false (and \"err\" will have \r\n an error message).\r\n\r\n Parameter old: the currency on hand (the SRC)\r\n Precondition: old is a string with no spaces or non-letters\r\n \r\n Parameter new: the currency to convert to (the DST)\r\n Precondition: new is a string with no spaces or non-letters\r\n \r\n Parameter amt: amount of currency to convert\r\n Precondition: amt is a float\r\n \"\"\"\r\n o = old\r\n n = new\r\n a = str(amt)\r\n z=q.urlread('http://cs1110.cs.cornell.edu/2020fa/a1?from='+o+'&to='+n+'&amt='+a)\r\n return z\r\n\r\ndef first_inside_quotes(s):\r\n \"\"\"\r\n Returns the first substring of s between two (double) quotes\r\n \r\n A quote character is one that is inside a string, not one that \r\n delimits it. We typically use single quotes (') to delimit a \r\n string if want to use a double quote character (\") inside of it.\r\n \r\n Examples:\r\n first_inside_quotes('A \"B C\" D') returns 'B C'\r\n first_inside_quotes('A \"B C\" D \"E F\" G') returns 'B C', \r\n because it only picks the first such substring\r\n \r\n Parameter s: a string to search\r\n Precondition: s is a string containing at least two double quotes\r\n \"\"\"\r\n start = s.index('\"')+1\r\n end = s.index('\"',start)\r\n insidequotes = s[start:end]\r\n return insidequotes\r\n\r\ndef get_src(json):\r\n \"\"\"\r\n Returns the src value in the response to a currency query\r\n\r\n Given a JSON response to a currency query, this returns the \r\n string inside double quotes (\") immediately following the keyword\r\n \"src\". For example, if the JSON is\r\n\r\n '{ \"src\":\"1 Bitcoin\", \"dst\":\"9916.0137 Euros\", \"valid\":true, \"err\":\"\" }'\r\n\r\n then this function returns '1 Bitcoin' (not '\"1 Bitcoin\"'). \r\n\r\n This function returns the empty string if the JSON response\r\n contains an error message.\r\n\r\n Parameter json: a json string to parse\r\n Precondition: json is the response to a currency query\r\n \"\"\"\r\n return first_inside_quotes(json[len('\"src\":') + json.find('\"src\":'):])\r\n\r\ndef get_dst(json):\r\n \"\"\"\r\n Returns the dst value in the response to a currency query\r\n\r\n Given a JSON response to a currency query, this returns the \r\n string inside double quotes (\") immediately following the keyword\r\n \"dst\". For example, if the JSON is\r\n\r\n '{ \"src\":\"1 Bitcoin\", \"dst\":\"9916.0137 Euros\", \"valid\":true, \"err\":\"\" }'\r\n\r\n then this function returns '9916.0137 Euros' (not \r\n '\"9916.0137 Euros\"'). \r\n\r\n This function returns the empty string if the JSON response\r\n contains an error message.\r\n\r\n Parameter json: a json string to parse\r\n Precondition: json is the response to a currency query\r\n \"\"\"\r\n return first_inside_quotes(json[len('\"dst\":') + json.find('\"dst\":'):])\r\n\r\ndef has_error(json):\r\n \"\"\"\r\n Returns True if the query has an error; False otherwise.\r\n\r\n Given a JSON response to a currency query, this returns the \r\n opposite of the value following the keyword \"valid\". For example,\r\n if the JSON is \r\n\r\n '{ \"src\":\"\", \"dst\":\"\", \"valid\":false, \"err\":\"Currency amount is invalid.\" }'\r\n\r\n then the query is not valid, so this function returns True (It \r\n does NOT return the message 'Source currency code is invalid').\r\n\r\n Parameter json: a json string to parse\r\n Precondition: json is the response to a currency query\r\n \"\"\"\r\n if 'false' in json:\r\n return True\r\n else:\r\n return False\r\n", "id": "2491381", "language": "Python", "matching_score": 3.5502755641937256, "max_stars_count": 1, "path": "Currency/a1.py" }, { "content": "\"\"\"\r\nTest script for module a1\r\n\r\nWhen run as a script, this module invokes several procedures that \r\ntest the various functions in the module a1.\r\n\r\nAuthor: <NAME>w263\r\nDate: 30 September 2020\r\n\"\"\"\r\nimport a1\r\nimport introcs\r\n\r\ndef testA():\r\n \"\"\"\r\n Test procedure for Part A\r\n \"\"\"\r\n\r\n #tests space at beginning of string for before_space \r\n introcs.assert_equals(\"\", a1.before_space(\" my name\"))\r\n #tests space at beginning of string for after_space\r\n introcs.assert_equals(\"my name\", a1.after_space(\" my name\"))\r\n\r\n #tests space in between words of string for before_space\r\n introcs.assert_equals(\"my\", a1.before_space(\"my name\"))\r\n #tests space in between words of string for after_space\r\n introcs.assert_equals(\"name\", a1.after_space(\"my name\"))\r\n\r\n #tests multiple spaces in between words of string for before_space\r\n introcs.assert_equals(\"my\", a1.before_space(\"my name is Arthur\"))\r\n #tests multiple spaces in between words of string for after_space\r\n introcs.assert_equals(\"name is Arthur\", a1.after_space(\"my name is Arthur\"))\r\n\r\n #tests single space as the string for before_space\r\n introcs.assert_equals(\"\", a1.before_space(\" \"))\r\n #tests single space as the string for after_space \r\n introcs.assert_equals(\"\", a1.after_space(\" \")) \r\n\r\n #tests an empty string as the string for before_space\r\n introcs.assert_equals(\"\", a1.before_space(\"\"))\r\n #tests an empty string as the string for after_space \r\n introcs.assert_equals(\"\", a1.after_space(\"\")) \r\n\r\ndef testB():\r\n \"\"\"\r\n Test procedure for Part B\r\n \"\"\"\r\n \r\n #sample json to test functions of part B\r\n test_json = '{ \"src\":\"2 Namibian Dollars\", \"dst\":\"2 Lesotho Maloti\", \"valid\":true, \"err\":\"\" }'\r\n\r\n #tests get_src\r\n a1.get_src(test_json)\r\n\r\n #tests get_dst\r\n a1.get_dst(test_json)\r\n\r\n #tests has_error\r\n a1.has_error(test_json)\r\n\r\n #tests has_error with false in valid field\r\n test_json_false = '{ \"src\":\"2 Namibian Dollars\", \"dst\":\"2 Lesotho Maloti\", \"valid\":false, \"err\":\"\" }'\r\n a1.has_error(test_json_false)\r\n \r\n #PRODUCES ERROR\r\n #tests blank json for all functions\r\n #blank_json = ''\r\n #a1.get_src(blank_json)\r\n #a1.get_src(blank_json)\r\n #a1.get_src(blank_json)\r\n\r\ndef testC():\r\n \"\"\"\r\n Test procedure for Part C\r\n \"\"\"\r\n \r\n #tests currency_response with 5.6 USD to CAD \r\n a1.currency_response('USD', 'CAD', 5.6)\r\n\r\n #tests currency_response with invalid from value\r\n a1.currency_response('UDSER', 'CAD', 5.6)\r\n\r\n #tests currency_response with invalid from value\r\n a1.currency_response('UDS', 'CDAER', 5.6)\r\n\r\n #tests currency_response with an int amt value\r\n a1.currency_response('UDS', 'CDA', 5)\r\n\r\n #PRODUCES ERROR\r\n #tests currency_response with no arguments\r\n #a1.currency_response()\r\n\r\ndef testD():\r\n \"\"\"\r\n Test procedure for Part D\r\n \"\"\"\r\n\r\n #test is_currency with CAD\r\n a1.is_currency('CAD')\r\n #test is_currency with blank field\r\n a1.is_currency('')\r\n #test is_currency with invalid currency\r\n a1.is_currency('CADA') \r\n\r\n #tests exchange with 5.6 USD to CAD \r\n a1.exchange('USD', 'CAD', 5.6)\r\n #tests exchange with invalid from value\r\n a1.exchange('UDSER', 'CAD', 5.6)\r\n #tests exchange with invalid from value\r\n a1.exchange('UDS', 'CDAER', 5.6)\r\n #tests exchange with an int amt value\r\n a1.exchange('UDS', 'CDA', 5)\r\n\r\ntestA()\r\ntestB()\r\ntestC()\r\ntestD()\r\nprint('Module a1 passed all tests.')\r\n", "id": "9944593", "language": "Python", "matching_score": 1.355960726737976, "max_stars_count": 1, "path": "Currency/a1test.py" }, { "content": "\"\"\" \nUnit Test for Assignment A3\n\nThis module implements several test cases for a3. It is incomplete. You should look \nthough this file for places to add tests.\n\n<NAME>w263\n16 October 2020\n\"\"\" \nimport introcs\nimport a3\n\n\ndef test_complement():\n \"\"\"\n Test function complement\n \"\"\"\n print('Testing complement')\n \n # One test is really good enough here\n comp = a3.complement_rgb(introcs.RGB(250, 0, 71))\n introcs.assert_equals(255-250, comp.red)\n introcs.assert_equals(255-0, comp.green)\n introcs.assert_equals(255-71, comp.blue)\n \n # One more for good measure\n comp = a3.complement_rgb(introcs.RGB(128, 64, 255))\n introcs.assert_equals(255-128, comp.red)\n introcs.assert_equals(255-64, comp.green)\n introcs.assert_equals(255-255, comp.blue)\n\n\ndef test_str5_color():\n \"\"\"\n Test the str5 functions for cmyk and hsv.\n \"\"\"\n print('Testing str5_cmyk and str5_hsv')\n \n # Tests for str5_cmyk\n # We need to make sure the coordinates round properly\n text = a3.str5_cmyk(introcs.CMYK(98.448, 25.362, 72.8, 1.0))\n introcs.assert_equals('(98.45, 25.36, 72.80, 1.000)',text)\n \n text = a3.str5_cmyk(introcs.CMYK(0.0, 1.5273, 100.0, 57.846))\n introcs.assert_equals('(0.000, 1.527, 100.0, 57.85)',text)\n \n # Tests for str5_hsv\n text = a3.str5_hsv(introcs.HSV(98.448, 0.9, 1))\n introcs.assert_equals('(98.45, 0.900, 1.000)',text)\n \n text = a3.str5_hsv(introcs.HSV(341.112, 0.0273, 0))\n introcs.assert_equals('(341.1, 0.027, 0.000)',text)\n\n\ndef test_rgb_to_cmyk():\n \"\"\"\n Test translation function rgb_to_cmyk\n \"\"\"\n print('Testing rgb_to_cmyk')\n \n # The function should guarantee accuracy to three decimal places\n rgb = introcs.RGB(255, 255, 255)\n cmyk = a3.rgb_to_cmyk(rgb)\n introcs.assert_equals(0.0, round(cmyk.cyan,3))\n introcs.assert_equals(0.0, round(cmyk.magenta,3))\n introcs.assert_equals(0.0, round(cmyk.yellow,3))\n introcs.assert_equals(0.0, round(cmyk.black,3))\n \n rgb = introcs.RGB(0, 0, 0)\n cmyk = a3.rgb_to_cmyk(rgb)\n introcs.assert_equals(0.0, round(cmyk.cyan,3))\n introcs.assert_equals(0.0, round(cmyk.magenta,3))\n introcs.assert_equals(0.0, round(cmyk.yellow,3))\n introcs.assert_equals(100.0, round(cmyk.black,3))\n \n rgb = introcs.RGB(217, 43, 164)\n cmyk = a3.rgb_to_cmyk(rgb)\n introcs.assert_equals(0.0, round(cmyk.cyan,3))\n introcs.assert_equals(80.184, round(cmyk.magenta,3))\n introcs.assert_equals(24.424, round(cmyk.yellow,3))\n introcs.assert_equals(14.902, round(cmyk.black,3))\n\n\ndef test_cmyk_to_rgb():\n \"\"\"\n Test translation function cmyk_to_rgb\n \"\"\"\n print('Testing cmyk_to_rgb')\n \n cmyk = introcs.CMYK(100.0, 100.0, 100.0, 100.0)\n rgb = a3.cmyk_to_rgb(cmyk)\n introcs.assert_equals(0, rgb.red)\n introcs.assert_equals(0, rgb.green)\n introcs.assert_equals(0, rgb.blue)\n \n cmyk = introcs.CMYK(0.0, 0.0, 0.0, 0.0)\n rgb = a3.cmyk_to_rgb(cmyk)\n introcs.assert_equals(255, rgb.red)\n introcs.assert_equals(255, rgb.red)\n introcs.assert_equals(255, rgb.red)\n \n cmyk = introcs.CMYK(17.0, 43.0, 64.0, 59.0)\n rgb = a3.cmyk_to_rgb(cmyk)\n introcs.assert_equals(87, rgb.red)\n introcs.assert_equals(60, rgb.red)\n introcs.assert_equals(38, rgb.red)\n\n\ndef test_rgb_to_hsv():\n \"\"\"\n Test translation function rgb_to_hsv\n \"\"\"\n print('Testing rgb_to_hsv')\n rgb = introcs.RGB(255,255,255)\n hsv = a3.rgb_to_hsv(rgb)\n introcs.assert_equals(0.0,hsv.hue)\n introcs.assert_equals(0.0,hsv.saturation)\n introcs.assert_equals(1.0,hsv.value)\n \n rgb = introcs.RGB(0,0,0)\n hsv = a3.rgb_to_hsv(rgb)\n introcs.assert_equals(0.0,hsv.hue)\n introcs.assert_equals(0.0,hsv.saturation)\n introcs.assert_equals(0.0,hsv.value)\n \n rgb = introcs.RGB(13,45,57)\n hsv = a3.rgb_to_hsv(rgb)\n introcs.assert_equals(196.364,hsv.hue)\n introcs.assert_equals(0.772,hsv.saturation)\n introcs.assert_equals(0.224,hsv.value)\n\n\n\ndef test_hsv_to_rgb():\n \"\"\"\n Test translation function hsv_to_rgb\n \"\"\"\n print('Testing hsv_to_rgb')\n hsv = introcs.HSV(0,0,0)\n rgb = a3.hsv_to_rgb(hsv)\n introcs.assert_equals(0,rgb.red)\n introcs.assert_equals(0,rgb.blue)\n introcs.assert_equals(0,rgb.green)\n\n hsv = introcs.HSV(360,1,1)\n rgb = a3.hsv_to_rgb(hsv)\n introcs.assert_equals(255,rgb.red)\n introcs.assert_equals(0,rgb.blue)\n introcs.assert_equals(0,rgb.green)\n\n hsv = introcs.HSV(268,0.25,0.7)\n rgb = a3.hsv_to_rgb(hsv)\n introcs.assert_equals(155,rgb.red)\n introcs.assert_equals(134,rgb.blue)\n introcs.assert_equals(178,rgb.green)\n\n\ndef test_contrast_value():\n \"\"\"\n Test translation function contrast_value\n \"\"\"\n print('Testing contrast_value')\n \n # contrast == -1.0 (extreme)\n result = a3.contrast_value(0.0,-1.0)\n introcs.assert_floats_equal(0.5,result)\n \n result = a3.contrast_value(1.0,-1.0)\n introcs.assert_floats_equal(0.5,result)\n \n # contrast < 0, bottom part of sawtooth\n result = a3.contrast_value(0.1,-0.5)\n introcs.assert_floats_equal(0.3,result)\n \n # contrast < 0, middle of sawtooth\n result = a3.contrast_value(0.4,-0.4)\n introcs.assert_floats_equal(0.4571429,result)\n \n # contrast < 0, upper part of sawtooth\n result = a3.contrast_value(0.9,-0.3)\n introcs.assert_floats_equal(0.8142857,result)\n \n # contrast == 0.0, bottom part of sawtooth\n result = a3.contrast_value(0.1,0.0)\n introcs.assert_floats_equal(0.1,result)\n \n # contrast == 0, middle of sawtooth\n result = a3.contrast_value(0.6,0.0)\n introcs.assert_floats_equal(0.6,result)\n \n # contrast == 0.0, upper part of sawtooth\n result = a3.contrast_value(0.9,0.0)\n introcs.assert_floats_equal(0.9,result)\n \n # contrast > 0, bottom part of sawtooth\n result = a3.contrast_value(0.1,0.3)\n introcs.assert_floats_equal(0.05384615,result)\n \n # contrast > 0, middle of sawtooth\n result = a3.contrast_value(0.4,0.5)\n introcs.assert_floats_equal(0.2,result)\n \n # contrast > 0, upper part of sawtooth\n result = a3.contrast_value(0.9,0.4)\n introcs.assert_floats_equal(0.95714286,result)\n \n # contrast == 1.0 (extreme)\n result = a3.contrast_value(0.2,1.0)\n introcs.assert_floats_equal(0.0,result)\n \n result = a3.contrast_value(0.6,1.0)\n introcs.assert_floats_equal(1.0,result)\n\n\ndef test_contrast_rgb():\n \"\"\"\n Test translation function contrast_value\n \"\"\"\n print('Testing contrast_rgb')\n \n # Negative contrast\n rgb = introcs.RGB(240, 15, 118)\n hsv = a3.contrast_rgb(rgb,-0.4)\n introcs.assert_equals(220, rgb.red)\n introcs.assert_equals(35, rgb.green)\n introcs.assert_equals(123, rgb.blue)\n \n # Add two more tests\n\n\n# Script Code\n# THIS PREVENTS THE TESTS RUNNING ON IMPORT\nif __name__ == '__main__':\n test_complement()\n test_str5_color()\n test_rgb_to_cmyk()\n test_cmyk_to_rgb()\n test_rgb_to_hsv()\n test_hsv_to_rgb()\n test_contrast_value()\n test_contrast_rgb()\n print('Module a3 passed all tests.')\n", "id": "11802978", "language": "Python", "matching_score": 1.6523101329803467, "max_stars_count": 1, "path": "Color/a3test.py" }, { "content": "\"\"\"\nApplication for displaying color models\n\nYou are welcome to read the code in this file. I guarantee that you will not understand\nit. The TAs do not understand it. That does not matter. When you code with another\nperson, you do not need to understand the code they write. You just have to write your\ncode to specification.\n\nThis is an example of a fairly sophisticated Kivy application. Kivy applications are\nbroken into a code file (this) and a layout file (colormodel.kv). The layout file serves\nmuch the same role as CSS in web pages. In order for it to work it must be in the same \nfolder as colormodel.kv. Do not move or change the name of that file.\n\nAuthor: <NAME> (wmw2)\nDate: September 25, 2019\n\"\"\"\nimport kivy\nfrom kivy.app import App\nfrom kivy.lang import Builder\nfrom kivy.uix.widget import Widget\nfrom kivy.uix.boxlayout import BoxLayout\nfrom kivy.properties import *\nfrom kivy.vector import Vector\nfrom kivy.uix.label import Label\nfrom kivy.factory import Factory\nfrom kivy.graphics import Color\nfrom kivy.graphics import Ellipse\nfrom kivy.config import Config\n\nfrom kivy.metrics import dp\nfrom kivy.graphics import Mesh, InstructionGroup\n\nimport introcs\nimport math\nimport a3\n\n\n#mark String Functions\ndef str_rgb(rgb):\n \"\"\"\n Returns the string representation of an RGB object without alpha\n \n Parameter rgb: the color object to display\n Precondition: rgb is an RGB object\n \"\"\"\n return '('+str(rgb.red)+', '+str(rgb.green)+', '+str(rgb.blue)+')'\n\n\ndef str_cmyk(cmyk):\n \"\"\"\n Returns the string representation of a CMYK object.\n\n Proxy function for a3.str5_cmyk. It returns the empty string if it is not\n defined.\n \n Parameter cmyk: the color object to display\n Precondition: cmyk is an CMYK object\n \"\"\"\n result = a3.str5_cmyk(cmyk)\n if result is None:\n return ''\n return result \n\n\ndef str_hsv(hsv):\n \"\"\"\n Returns the string representation of a HSV object.\n\n Proxy function for a3.str5_hsv. It returns the empty string if it is not\n defined.\n \n Parameter hsv: the color object to display\n Precondition: hsv is an HSV object\n \"\"\"\n result = a3.str5_hsv(hsv)\n if result is None:\n return ''\n return result \n\n\npass\n#mark -\n#mark Utility Widgets\nclass Separator(Widget):\n \"\"\"\n A class to space out widgets from one another.\n \n This class is cosmetic and simply gives us some edge definition\n \"\"\"\n # The background color of the left edge\n left = ListProperty([1,1,1,1])\n # The background color of the right edge\n right = ListProperty([1,1,1,1])\n\n\nclass PanelHeader(Label):\n \"\"\"\n A label header for each subpanel.\n \n This class is essentially a label with predefined features for convenience. \n It is fully defined in colormodel.kv\n \"\"\"\n pass\n\n\nclass ColorArc(InstructionGroup):\n \"\"\"\n A color arc (color wheel segment) created using OpenGL\n \n This code is heavily adapted (with permission) from\n \n https://kivy.org/doc/stable/_modules/kivy/uix/colorpicker.html\n \n We have removed alpha, as it is not relevant.\n \"\"\"\n \n def __init__(self, r_min, r_max, theta_min, theta_max, **kwargs):\n \"\"\"\n Initializes a new color arc.\n \n Parameter r_min: The minimum segment radius\n Precondition: r_min a float <= r_max\n \n Parameter r_max: The maximum segment radius\n Precondition: r_max a float >= r_min\n \n Parameter theta_min: The minimum segment angle\n Precondition: theta_min is a float is 0..2pi, theta_min <= theta_max\n \n Parameter theta_max: The maximum segment angle\n Precondition: theta_max is a float is 0..2pi, theta_min <= theta_max\n \n Parameter kwargs: Additional kivy keyword arguments\n \"\"\"\n super(ColorArc, self).__init__(**kwargs)\n self.r_min = r_min\n self.r_max = r_max\n self.theta_min = theta_min\n self.theta_max = theta_max\n self.origin = kwargs['origin'] if 'origin' in kwargs else (0, 0)\n self.color = list(kwargs['color'] if 'color' in kwargs else (0, 0, 1, 1))\n \n if 'origin' in kwargs:\n del kwargs['origin']\n if 'color' in kwargs:\n del kwargs['color']\n super(ColorArc, self).__init__(**kwargs)\n \n self.color_instr = Color(*self.color, mode='hsv')\n self.add(self.color_instr)\n self.mesh = self.get_mesh()\n self.add(self.mesh)\n \n def __str__(self):\n \"\"\"\n Returns a string representation of this arc segment (for debugging)\n \"\"\"\n return \"r_min: %s r_max: %s theta_min: %s theta_max: %s color: %s\" % (\n self.r_min, self.r_max, self.theta_min, self.theta_max, self.color\n )\n \n def get_mesh(self):\n \"\"\"\n Returns a newly created OpenGL mesh for this segment.\n \"\"\"\n v = []\n # first calculate the distance between endpoints of the inner\n # arc, so we know how many steps to use when calculating\n # vertices\n end_point_inner = self.polar_to_rect(self.origin, self.r_min, self.theta_max)\n \n d_inner = d_outer = 3.\n theta_step_inner = (self.theta_max - self.theta_min) / d_inner\n \n end_point_outer = self.polar_to_rect(self.origin, self.r_max, self.theta_max)\n \n if self.r_min == 0:\n theta_step_outer = (self.theta_max - self.theta_min) / d_outer\n for x in range(int(d_outer)):\n v += (self.polar_to_rect(self.origin, 0, 0) * 2)\n v += (self.polar_to_rect(self.origin, self.r_max, \n self.theta_min + x * theta_step_outer) * 2)\n else:\n for x in range(int(d_inner + 1)):\n v += (self.polar_to_rect(self.origin, self.r_min - 1,\n self.theta_min + x * theta_step_inner) * 2)\n v += (self.polar_to_rect(self.origin, self.r_max + 1,\n self.theta_min + x * theta_step_inner) * 2)\n \n v += (end_point_inner * 2)\n v += (end_point_outer * 2)\n \n return Mesh(vertices=v, indices=range(int(len(v) / 4)), mode='triangle_strip')\n \n def change_value(self, value):\n \"\"\"\n Changes the HSV value for this segment.\n \n The value is external to the segment, and is 1 by default, but can be changed.\n \n Parameter value: The new hsv value\n Precondition: value is a float in 0..1\n \"\"\"\n self.remove(self.color_instr)\n self.color[2] = value\n self.color_instr = Color(*self.color, mode='hsv')\n self.insert(0, self.color_instr)\n \n @classmethod\n def distance(cls, pt1, pt2):\n \"\"\"\n Returns the distance between points pt1 and pt2\n \n This is a utility function used throughout this file.\n \n Parameter pt1: The first point\n Precondition: pt1 a list of two floats\n \n Parameter pt2: The second point\n Precondition: pt2 a list of two floats\n \"\"\"\n return math.sqrt((pt1[0] - pt2[0]) ** 2. + (pt1[1] - pt2[1]) ** 2.)\n \n @classmethod\n def polar_to_rect(cls, origin, r, theta):\n \"\"\"\n Returns a point in rectangular coordinates given polar coordinates.\n \n This is a utility function used throughout this file.\n \n Parameter origin: The polar origin in rectangular coordinates\n Precondition: origin a list of two floats\n \n Parameter r: The radius coordinate\n Precondition: r is a float >= 0\n \n Parameter theta: The angular coordinate\n Precondition: theta is a float in 0..math.pi*2\n \"\"\"\n return origin[0] + r * math.cos(theta), origin[1] + r * math.sin(theta)\n \n @classmethod\n def rect_to_polar(cls, origin, x, y):\n \"\"\"\n Returns a point in polar coordinates given rectangular coordinates.\n \n This is a utility function used throughout this file.\n \n Parameter origin: The polar origin in rectangular coordinates\n Precondition: origin a list of two floats\n \n Parameter x: The x coordinate\n Precondition: x is a float\n \n Parameter y: The y coordinate\n Precondition: y is a float\n \"\"\"\n if x == origin[0]:\n if y == origin[1]:\n return (0, 0)\n elif y > origin[1]:\n return (y - origin[1], math.pi / 2.)\n else:\n return (origin[1] - y, 3 * math.pi / 2.)\n t = math.atan(float((y - origin[1])) / (x - origin[0]))\n if x - origin[0] < 0:\n t += math.pi\n \n if t < 0:\n t += 2 * math.pi\n \n return (cls.distance((x, y), origin), t)\n\n\nclass HSVWheel(Widget):\n \"\"\"\n A chromatic wheel supporting HSV colors\n \n This code is heavily adapted (with permission) from\n \n https://kivy.org/doc/stable/_modules/kivy/uix/colorpicker.html\n \n We have removed alpha, as it is not relevant. We have also removed the gestures,\n in order to customize this for traditional mouse support. The value property is\n now external and provided by other input sources.\n \"\"\"\n # The hue and saturation (other widgets should observe this)\n # Direct access is only safe for READ-ONLY access\n huesat = ListProperty([0.0, 0.0])\n # The value (lumens). This value is read-write safe, as it only affects graphics\n value = BoundedNumericProperty(1, min=0, max=1)\n \n # The number of concentric circles (lower in kv file for weaker graphics cards)\n radial_segments = NumericProperty(10)\n # The angular segments (lower in kv file for weaker graphics cards)\n angular_segments = NumericProperty(16)\n \n # The position of the control knob in rectangular coordinates\n knob_pos = ListProperty([0, 0])\n # The knob radius in pts (not pixels)\n knob_size = NumericProperty(20)\n # The factor of the knob outline\n KNOB_FACTOR = 1.5\n \n def __init__(self, **kwargs):\n \"\"\"\n Initializes a new color wheel.\n \n Parameter kwargs: Additional kivy keyword arguments\n \"\"\"\n super(HSVWheel, self).__init__(**kwargs)\n self.origin = (self.pos[0]+self.size[0]/2,self.pos[1]+self.size[1]/2)\n self.radius = min(self.size[0],self.size[1])/2 - dp(10)\n self.selected = None\n self.wheelmesh = InstructionGroup()\n self.canvas.add(self.wheelmesh)\n self.bind(value=self.recolor)\n \n def init_wheel(self, dt):\n \"\"\"\n (Re)initializes the OpenGL meshes for the wheel\n \n Parameter dt: the time since last initialization\n Precondition: dt is a float\n \"\"\"\n # initialize list to hold all meshes\n self.wheelmesh.clear()\n self.arcs = []\n self.sv_idx = 0\n pdv = self.radial_segments\n ppie = self.angular_segments\n \n self.origin = (self.pos[0]+self.size[0]/2,self.pos[1]+self.size[1]/2)\n self.radius = min(self.size[0],self.size[1])/2 - dp(10)\n for r in range(pdv):\n for t in range(ppie):\n self.arcs.append(\n ColorArc(\n self.radius * (float(r) / float(pdv)),\n self.radius * (float(r + 1) / float(pdv)),\n 2 * math.pi * (float(t) / float(ppie)),\n 2 * math.pi * (float(t + 1) / float(ppie)),\n origin=self.origin,\n color=(float(t) / ppie,\n float(r) / pdv,\n self.value,\n 1)))\n \n self.wheelmesh.add(self.arcs[-1])\n \n def recolor(self,instance,value):\n \"\"\"\n Recolors the color wheel for a new HSV brightness value\n \n Parameter instance: the Kivy widget setting the new value\n Precondition: instance is a Kivy widget\n \n Parameter value: the new brightness value\n Precondition: value is a float in 0..1\n \"\"\"\n for segment in self.arcs:\n segment.change_value(value)\n \n def on_touch_down(self, touch):\n \"\"\"\n Responds to a mouse down event\n \n If the mouse down is inside the knob, it will start to move the knob.\n \n Parameter touch: the mouse down information\n Precondition: touch is a Kivy touch event\n \"\"\"\n vec = [touch.pos[0]-self.origin[0], touch.pos[1]-self.origin[1]]\n dis = ColorArc.distance(self.knob_pos,vec)\n if dis <= dp(self.knob_size)*self.KNOB_FACTOR:\n touch.grab(self)\n self.selected = touch.pos\n \n def on_touch_move(self, touch):\n \"\"\"\n Responds to a mouse move event.\n \n This method is only relevant if the knob is actively moving.\n \n Parameter touch: the mouse move information\n Precondition: touch is a Kivy touch event\n \"\"\"\n if touch.grab_current is not self or not self.selected:\n return\n \n origin = [0,0]\n new_knob = self.knob_pos[:]\n new_knob[0] += touch.pos[0]-self.selected[0]\n new_knob[1] += touch.pos[1]-self.selected[1]\n \n (r,t) = ColorArc.rect_to_polar(origin,new_knob[0],new_knob[1])\n if r > self.radius:\n r = self.radius\n new_knob = ColorArc.polar_to_rect(origin,r,t)\n \n adjust = [0,0]\n adjust[0] = (new_knob[0]-self.knob_pos[0])+self.selected[0]\n adjust[1] = (new_knob[1]-self.knob_pos[1])+self.selected[1]\n self.selected = adjust\n \n self.knob_pos[0] = new_knob[0]\n self.knob_pos[1] = new_knob[1]\n \n t *= 180.0/math.pi # Degrees\n t = t % 360\n r /= self.radius\n self.huesat = [t,r]\n \n def on_touch_up(self, touch):\n \"\"\"\n Responds to a mouse up event.\n \n This method is only relevant if the knob is actively moving.\n \n Parameter touch: the mouse up information\n Precondition: touch is a Kivy touch event\n \"\"\"\n if touch.grab_current is self:\n touch.ungrab(self)\n self.selected = None\n \n def setHue(self,hue):\n \"\"\"\n Sets the hue, updating the knob position.\n \n This method is for \"pushing down\" a hue to synchronize it with other\n input sources. Direct write access to the huesat attribute is unsafe.\n \n Parameter hue: The new hue value\n Precondition: hue is a float 0..360\n \"\"\"\n hue = hue % 360\n t = hue*math.pi/180.0\n r = self.huesat[1]*self.radius\n knob = ColorArc.polar_to_rect([0,0],r,t)\n \n self.knob_pos[0] = knob[0]\n self.knob_pos[1] = knob[1]\n self.huesat[0] = hue\n \n def setSat(self,sat):\n \"\"\"\n Sets the saturation, updating the knob position.\n \n This method is for \"pushing down\" a saturation to synchronize it with other\n input sources. Direct write access to the huesat attribute is unsafe.\n \n Parameter sat: The new saturation value\n Precondition: sat is a float 0..1\n \"\"\"\n t = self.huesat[0]*math.pi/180.0\n r = sat*self.radius\n knob = ColorArc.polar_to_rect([0,0],r,t)\n \n self.knob_pos[0] = knob[0]\n self.knob_pos[1] = knob[1]\n self.huesat[1] = sat\n\n\nclass SliderField(BoxLayout):\n \"\"\"\n A class to implement a slider/field combination.\n \n This is a convenience class as many of the inputs work with text input/slider\n combinations. It keeps the text field and slider in sync and provides a unified\n observable value for higher-level widgets.\n \"\"\"\n # The background color (for melding into the background)\n color = ListProperty([1,1,1,1])\n # The foreground color (for melding into the background)\n text_color = ListProperty([0,0,0,1])\n # A reference to the slider child widget\n slider = ObjectProperty(None)\n # A reference to the text input child widget\n field = ObjectProperty(None)\n # A name label for display up tp[\n text = ObjectProperty(\"\")\n \n # The unified value (other widgets should observe this)\n # Direct access is only safe for READ-ONLY access\n value = NumericProperty(1.0)\n # The initial value as start-up (override in the kv file)\n initial = NumericProperty(10000)\n # The maximum slider value (override in the kv file)\n max_val = NumericProperty(10000)\n # The minimum slider value (override in the kv file)\n min_val = NumericProperty(0)\n # Amount to divide the slider value to get the unified value (override in the kv file)\n factor = NumericProperty(10000)\n \n # The number of decimals to display in the text box (0 means round to int)\n decimals = NumericProperty(3)\n \n # A semaphore-style lock to prevent infinite cascade in Kivy events \n lock = BooleanProperty(False)\n \n def on_kv_post(self, widget):\n \"\"\"\n Links and registers all children after the KV file has been processed\n \n Parameter widget: This object, after kv initialization\n Precondition: widget is this object\n \"\"\"\n if self.slider:\n self.slider.bind(value=self.update_on_slide)\n self.update_on_slide(self,self.initial)\n \n def validate_text(self,text):\n \"\"\"\n Returns the number for the given text, or None if it is invalid\n \n Parameter text: The text to convert to a number\n Precondition: text is a string\n \"\"\"\n try:\n value = float(text)\n return self.validate_number(value)\n except:\n return None\n \n def validate_number(self,value):\n \"\"\"\n Returns this number rounded to the appropriate decimals, or None if it is invalid\n \n This method uses self.decimals to determine the number of places to round to.\n If the number is outside of range (min_val to max_val), this method will \n return None.\n \n Parameter value: The number to verify\n Precondition: value is an int or float\n \"\"\"\n if self.decimals:\n value = round(value,self.decimals)\n else:\n value = round(value)\n \n if self.min_val/self.factor <= value and value <= self.max_val/self.factor:\n return value\n \n return None\n \n def update_on_slide(self, instance, value):\n \"\"\"\n Updates the value property to match the child slider.\n \n Parameter instance: the reporting slider\n Precondition: instance is the child Slider object\n \n Parameter value: the new slider value\n Precondition: value is an int\n \"\"\"\n if self.lock:\n return\n \n self.lock = True # Prevent infinite event recursion\n \n value = self.validate_number(value/self.factor)\n if self.value == value: # Hack because of poor Kivy design\n self.value = value+1 if value < self.max_val else value-1\n self.value = value\n if self.field:\n self.field.focus = False\n self.field.text = str(value)\n \n self.lock = False\n \n \n def update_on_text(self, instance, text):\n \"\"\"\n Updates the value property to match the child text input\n \n Parameter instance: the reporting text input\n Precondition: instance is the child TextInput object\n \n Parameter text: the new text input\n Precondition: value is a string\n \"\"\"\n if self.lock:\n return\n \n self.lock = True # Prevent infinite event recursion\n \n value = self.validate_text(text)\n if not value is None:\n if self.slider:\n self.slider.value = value*self.factor\n self.value = value\n self.field.text = str(value)\n elif self.field:\n self.field.text = str(self.value)\n \n self.lock = False\n \n def filter_text(self, text, from_undo):\n \"\"\"\n Returns a truncated text value to prevent overflow in the text input.\n \n This method allows the user to type just enough into the text input field to\n be in range and have the given number of decimals. Any more than that, and it\n will stop accepting input.\n \n Parameter text: the new text to append to the input\n Precondition: text is a string\n \n Parameter from_undo: whether this result is from an undo operation\n Precondition: from_undo is a boolean\n \"\"\"\n size = int(math.log10(self.max_val/self.factor))+1\n if self.min_val < 0:\n size = max(size,int(math.log10(-self.min_val/self.factor))+1)\n if self.decimals:\n size += self.decimals+1\n if '-' in self.field.text:\n size += 1\n return text[:size-len(self.field.text)]\n \n def setValue(self,value):\n \"\"\"\n Sets the value, updating both the slider and the text field.\n \n This method is for \"pushing down\" a value to synchronize it with other\n input sources. Direct write access to the value attribute is unsafe.\n \n Parameter value: The new numeric value\n Precondition: value is a number\n \"\"\"\n self.lock = True # Prevent infinite event recursion\n \n value = self.validate_number(value)\n if not value is None:\n if self.slider:\n self.slider.value = value*self.factor\n if self.field:\n self.field.text = str(value)\n self.value = value\n \n self.lock = False\n\n\npass\n#mark -\n#mark Input Panels\n\nclass RGBInputPanel(BoxLayout):\n \"\"\"\n An input panel for defining an RGB color.\n \n This panels stores its internal state as a color property. It consists of three\n separate slider fields.\n \"\"\"\n # The current active color (other widgets should observe this)\n # Direct access is only safe for READ-ONLY access\n color = ListProperty([0, 0, 0])\n # Reference to the red slider\n rSlider = ObjectProperty(None)\n # Reference to the green slider\n gSlider = ObjectProperty(None)\n # Reference to the blue slider\n bSlider = ObjectProperty(None)\n \n def on_kv_post(self, widget):\n \"\"\"\n Links and registers all children after the KV file has been processed\n \n Parameter widget: This object, after kv initialization\n Precondition: widget is this object\n \"\"\"\n if self.rSlider:\n self.rSlider.bind(value=self.pollSlider)\n self.color[0] = self.rSlider.value\n if self.gSlider:\n self.gSlider.bind(value=self.pollSlider)\n self.color[1] = self.gSlider.value\n if self.bSlider:\n self.bSlider.bind(value=self.pollSlider)\n self.color[2] = self.bSlider.value\n \n def pollSlider(self, instance, value):\n \"\"\"\n Polls the latest color value from a slider change\n \n Parameter instance: the reporting instance\n Precondition: instance is a SliderField\n \n Parameter value: the color value\n Precondition: value is an int 0..255\n \"\"\"\n if instance == self.rSlider:\n self.color[0] = value\n elif instance == self.gSlider:\n self.color[1] = value\n elif instance == self.bSlider:\n self.color[2] = value\n \n def setColor(self, r, g, b):\n \"\"\"\n Sets the color value for this input device, updating all sliders.\n \n This method is for \"pushing down\" a color to synchronize it with other\n input sources. Direct write access to the color attribute is unsafe.\n \n Parameter r: the red value\n Precondition: r is an int 0..255\n \n Parameter g: the green value\n Precondition: g is an int 0..255\n \n Parameter b: the blue value\n Precondition: b is an int 0..255\n \"\"\"\n if self.rSlider:\n self.rSlider.setValue(r)\n else:\n self.color[0] = r\n if self.gSlider:\n self.gSlider.setValue(g)\n else:\n self.color[1] = g\n if self.bSlider:\n self.bSlider.setValue(b)\n else:\n self.color[2] = g\n\n\nclass CMYKInputPanel(BoxLayout):\n \"\"\"\n An input panel for defining a CMYK color.\n \n This panels stores its internal state as a color property. It consists of four\n separate slider fields.\n \"\"\"\n # The current active color (other widgets should observe this)\n # Direct access is only safe for READ-ONLY access\n color = ListProperty([0, 0, 0, 0])\n # Reference to the cyan slider\n cSlider = ObjectProperty(None)\n # Reference to the magenta slider\n mSlider = ObjectProperty(None)\n # Reference to the yellow slider\n ySlider = ObjectProperty(None)\n # Reference to the black slider\n kSlider = ObjectProperty(None)\n \n def on_kv_post(self, widget):\n \"\"\"\n Links and registers all children after the KV file has been processed\n \n Parameter widget: This object, after kv initialization\n Precondition: widget is this object\n \"\"\"\n if self.cSlider:\n self.cSlider.bind(value=self.pollSlider)\n self.color[0] = self.cSlider.value\n if self.mSlider:\n self.mSlider.bind(value=self.pollSlider)\n self.color[1] = self.mSlider.value\n if self.ySlider:\n self.ySlider.bind(value=self.pollSlider)\n self.color[2] = self.ySlider.value\n if self.kSlider:\n self.kSlider.bind(value=self.pollSlider)\n self.color[3] = self.kSlider.value\n \n def pollSlider(self, instance, value):\n \"\"\"\n Polls the latest color value from a slider change\n \n Parameter instance: the reporting instance\n Precondition: instance is a SliderField\n \n Parameter value: the color value\n Precondition: value is an float 0..100\n \"\"\"\n if instance == self.cSlider:\n self.color[0] = value\n elif instance == self.mSlider:\n self.color[1] = value\n elif instance == self.ySlider:\n self.color[2] = value\n elif instance == self.kSlider:\n self.color[3] = value\n \n def setColor(self, c, m, y, k):\n \"\"\"\n Sets the color value for this input device, updating all sliders.\n \n This method is for \"pushing down\" a color to synchronize it with other\n input sources. Direct write access to the color attribute is unsafe.\n \n Parameter c: the cyan value\n Precondition: c is a float 0.0..100.0\n \n Parameter m: the magenta value\n Precondition: m is a float 0.0..100.0\n \n Parameter y: the yellow value\n Precondition: y is a float 0.0..100.0\n \n Parameter k: the yellow value\n Precondition: k is a float 0.0..100.0\n \"\"\"\n if self.cSlider:\n self.cSlider.setValue(c)\n else:\n self.color[0] = c\n if self.mSlider:\n self.mSlider.setValue(m)\n else:\n self.color[1] = m\n if self.ySlider:\n self.ySlider.setValue(y)\n else:\n self.color[2] = y\n if self.kSlider:\n self.kSlider.setValue(k)\n else:\n self.color[3] = k\n\n\nclass HSVInputPanel(BoxLayout):\n \"\"\"\n An input panel for defining a HSV color.\n \n This panels stores its internal state as a color property. It consists of a color\n wheel, a slider, and three text fields.\n \"\"\"\n # The current active color (other widgets should observe this)\n # Direct access is only safe for READ-ONLY access\n color = ListProperty([0, 0, 1])\n # Reference to the color wheel\n hsWheel = ObjectProperty(None)\n # Reference to the value slider\n vSlider = ObjectProperty(None)\n # Reference to the hue text field\n hField = ObjectProperty(None)\n # Reference to the saturation text field\n sField = ObjectProperty(None)\n # Reference to the value text field\n vField = ObjectProperty(None)\n \n # A semaphore-style lock to prevent infinite cascade in Kivy events \n lock = BooleanProperty(False)\n \n def on_kv_post(self, widget):\n \"\"\"\n Links and registers all children after the KV file has been processed\n \n Parameter widget: This object, after kv initialization\n Precondition: widget is this object\n \"\"\"\n if self.lock:\n return\n \n self.lock = True\n \n h = self.color[0]\n s = self.color[1]\n v = self.color[2]\n \n if self.hsWheel:\n self.hsWheel.bind(huesat=self.pollWheel)\n h = self.hsWheel.huesat[0]\n s = self.hsWheel.huesat[1]\n if self.vSlider:\n self.vSlider.bind(value=self.pollSlider)\n v = round(self.vSlider.value/self.vSlider.max,3)\n if self.hField:\n self.hField.index = 0\n self.hField.text = str(h)\n if self.sField:\n self.sField.index = 1\n self.sField.text = str(s)\n if self.vField:\n self.vField.index = 2\n self.vField.text = str(v)\n \n self.color[0] = h\n self.color[1] = s\n self.color[2] = v\n \n self.lock = False\n \n def validate_text(self,instance,text):\n \"\"\"\n Returns the number for the given text, or None if it is invalid\n \n Parameter instance: the reporting text input\n Precondition: instance is one of the three Text Input children\n \n Parameter text: The text to convert to a number\n Precondition: text is a string\n \"\"\"\n try:\n value = round(float(text),3)\n if instance == self.hField:\n while value < 0:\n value += 360\n value = value % 360\n elif value < 0 or value > 1:\n return None\n \n return value\n except:\n return None\n\n def update_on_text(self, instance, text):\n \"\"\"\n Updates a color attribute to match the child text input\n \n Parameter instance: the reporting text input\n Precondition: instance is one of the three Text Input children\n \n Parameter text: the new text input\n Precondition: value is a string\n \"\"\"\n if self.lock or not hasattr(instance,'index'):\n return\n \n self.lock = True # Prevent infinite event recursion\n \n oldvalue = self.color[instance.index]\n newvalue = self.validate_text(instance,text)\n if not newvalue is None:\n if instance.index == 2:\n if self.vSlider:\n self.vSlider.value = newvalue*self.vSlider.max\n if self.hsWheel:\n self.hsWheel.value = newvalue\n elif self.hsWheel:\n if instance.index == 0:\n self.hsWheel.setHue(newvalue)\n else:\n self.hsWheel.setSat(newvalue)\n instance.text = str(newvalue)\n self.color[instance.index] = newvalue\n else:\n instance.text = str(oldvalue)\n \n self.lock = False\n \n def pollSlider(self, instance, value):\n \"\"\"\n Polls the latest value from a slider change\n \n Parameter instance: the reporting instance\n Precondition: instance is the child value Slider\n \n Parameter value: the hsv brightness value\n Precondition: value is an float 0..1\n \"\"\"\n if self.lock:\n return\n \n self.lock = True # Prevent infinite event recursion\n \n value = round(value/self.vSlider.max,3)\n if self.hsWheel:\n self.hsWheel.value = value\n if self.vField:\n self.vField.text = str(value)\n self.color[2] = value\n \n self.lock = False\n \n def pollWheel(self, instance, value):\n \"\"\"\n Polls the latest hue and saturation from the color wheel\n \n Parameter instance: the reporting instance\n Precondition: instance is the child HSVWheel\n \n Parameter value: the hue and saturation\n Precondition: value is an list of float 0..360 and float 0..1\n \"\"\"\n if self.lock:\n return\n \n self.lock = True # Prevent infinite event recursion\n \n hue = round(value[0],3) % 360\n sat = round(value[1],3)\n if self.hField:\n self.hField.text = str(hue)\n if self.sField:\n self.sField.text = str(sat)\n self.color[0] = hue\n self.color[1] = sat\n \n self.lock = False\n \n def setColor(self,h,s,v):\n \"\"\"\n Sets the color value for this input device, updating input features.\n \n This method is for \"pushing down\" a color to synchronize it with other\n input sources. Direct write access to the color attribute is unsafe.\n \n Parameter h: the hue value\n Precondition: h is a float 0.0..360.0\n \n Parameter s: the saturation value\n Precondition: s is a float 0.0..1.0\n \n Parameter v: the brightness value\n Precondition: v is a float 0.0..1.0\n \"\"\"\n if self.lock:\n return\n \n self.lock = True # Prevent infinite event recursion\n \n if self.hsWheel:\n self.hsWheel.setHue(h)\n self.hsWheel.setSat(s)\n self.hsWheel.value = v\n if self.vSlider:\n self.vSlider.value = v*self.vSlider.max\n if self.hField:\n self.hField.text = str(round(h,3))\n if self.sField:\n self.sField.text = str(round(s,3))\n if self.vField:\n self.vField.text = str(round(v,3))\n self.color[0] = round(h,3)\n self.color[1] = round(s,3)\n self.color[2] = round(v,3)\n \n self.lock = False\n\n\npass\n#mark -\n#mark Color Panels\n\nclass ColorPanel(BoxLayout):\n \"\"\"\n A class to display a color and its complement.\n \n This is really just an exalted label, with fine tune control over colors.\n \"\"\"\n # The font color\n foreground = ListProperty([1,0,0,1])\n # The panel color\n background = ListProperty([0,1,1,1])\n # The text contents\n text = ObjectProperty(\"\")\n\n\nclass ContrastPanel(BoxLayout):\n \"\"\"\n A ColorPanel variation with a slider.\n \n The contrast setting is a localized input. Therefore, we attach it to the color\n panel that uses it.\n \"\"\"\n # The font color\n foreground = ListProperty([1,0,0,1])\n # The panel color\n background = ListProperty([0,1,1,1])\n # The text contents\n text = ObjectProperty(\"\")\n # A reference to the contrast SliderField\n slider = ObjectProperty(None)\n\n\npass\n#mark -\n#mark Primary Application\nclass ColorWidget(BoxLayout):\n \"\"\"\n A class to implement the fully integrated application.\n \n This class synchronizes the input panels and uses them to set the color settings\n in the various color panels. It depends heavily on the completion of a3.py. Until\n that file is completed, it will not do much.\n \"\"\"\n # Reference to the RGB input panel\n rgbPanel = ObjectProperty(None)\n # Reference to the CMYK input panel\n cmykPanel = ObjectProperty(None)\n # Reference to the HSV input panel\n hsvPanel = ObjectProperty(None)\n # Reference to the main (central) color panel\n mainPanel = ObjectProperty(None)\n # Reference to the complementary color panel\n compPanel = ObjectProperty(None)\n # Reference to the contrast panel\n contPanel = ObjectProperty(None)\n # Reference to the edge separator between the complement and main\n leftSep = ObjectProperty(None)\n # Reference to the edge separator between the main and contrast\n rightSep = ObjectProperty(None)\n \n # A semaphore-style lock to prevent infinite cascade in Kivy events \n lock = BooleanProperty(False)\n \n # Color attributes for display and synchronization\n rgb = introcs.RGB(255,255,255)\n cmyk = None\n hsv = None\n \n def on_kv_post(self, widget):\n \"\"\"\n Links and registers all children after the KV file has been processed\n \n Parameter widget: This object, after kv initialization\n Precondition: widget is this object\n \"\"\"\n if self.rgbPanel:\n self.rgbPanel.bind(color=self.syncInput)\n self.rgb = introcs.RGB(*self.rgbPanel.color)\n if self.cmykPanel:\n self.cmykPanel.bind(color=self.syncInput)\n self.cmyk = a3.rgb_to_cmyk(self.rgb)\n if self.hsvPanel:\n self.hsvPanel.bind(color=self.syncInput)\n self.hsv = a3.rgb_to_hsv(self.rgb)\n if self.contPanel and self.contPanel.slider:\n self.contPanel.slider.bind(value=self.recalibrate)\n self.recolor()\n \n def syncInput(self, instance, value):\n \"\"\"\n Synchronizes all input between input panels.\n \n This is called whenever the user updates and input panel. This code uses the\n conversion functions in a3.py to automatically update the other two panels.\n \n Parameter instance: the reporting input instance\n Precondition: instance is one of the three input panels\n \n Parameter value: the color value\n Precondition: value the color property of instance\n \"\"\"\n if self.lock:\n return\n \n self.lock = True # Prevent infinite event recursion\n \n if instance == self.rgbPanel:\n self.rgb = introcs.RGB(*value)\n self.cmyk = a3.rgb_to_cmyk(self.rgb)\n if not self.cmyk is None:\n assert (type(self.cmyk) == introcs.CMYK), 'rgb_to_cmyk does not return a CMYK object'\n self.cmykPanel.setColor(self.cmyk.cyan,self.cmyk.magenta,self.cmyk.yellow,self.cmyk.black)\n self.hsv = a3.rgb_to_hsv(self.rgb)\n if not self.hsv is None:\n assert (type(self.hsv) == introcs.HSV), 'rgb_to_hsv does not return a HSV object'\n self.hsvPanel.setColor(self.hsv.hue,self.hsv.saturation,self.hsv.value)\n elif instance == self.cmykPanel:\n self.cmyk = introcs.CMYK(*value)\n rgb = a3.cmyk_to_rgb(self.cmyk)\n if not rgb is None:\n assert (type(rgb) == introcs.RGB), 'cmyk_to_rgb does not return a RGB object'\n self.rgbPanel.setColor(rgb.red,rgb.green,rgb.blue)\n self.rgb = rgb\n else:\n self.cmyk = None\n self.hsv = a3.rgb_to_hsv(self.rgb)\n if not self.hsv is None:\n assert (type(self.hsv) == introcs.HSV), 'rgb_to_hsv does not return a HSV object'\n self.hsvPanel.setColor(self.hsv.hue,self.hsv.saturation,self.hsv.value)\n elif instance == self.hsvPanel:\n self.hsv = introcs.HSV(*value)\n rgb = a3.hsv_to_rgb(self.hsv)\n if not rgb is None:\n assert (type(rgb) == introcs.RGB), 'hsv_to_rgb does not return a RGB object'\n self.rgbPanel.setColor(rgb.red,rgb.green,rgb.blue)\n self.rgb = rgb\n else:\n self.hsv = None\n self.cmyk = a3.rgb_to_cmyk(self.rgb)\n if not self.cmyk is None:\n assert (type(self.cmyk) == introcs.CMYK), 'rgb_to_cmyk does not return a CMYK object'\n self.cmykPanel.setColor(self.cmyk.cyan,self.cmyk.magenta,self.cmyk.yellow,self.cmyk.black)\n \n self.recolor()\n self.lock = False\n \n def recalibrate(self,instance,value):\n \"\"\"\n Recalibrates the contrast panel to use the new color and contrast.\n \n This can be called for two different events: a change in color or a change\n in contrast. On a change in color, instance and value will be None, but \n self.rgb will have the current correct color.\n \n Parameter instance: the reporting input instance\n Precondition: instance is one of the three input panels or None\n \n Parameter value: the color value\n Precondition: value the color property of instance or None\n \"\"\"\n if not self.contPanel.slider:\n return\n \n cRGB = introcs.RGB(self.rgb.red,self.rgb.green,self.rgb.blue)\n cComp = a3.complement_rgb(self.rgb)\n cComp = introcs.RGB(cComp.red,cComp.green,cComp.blue)\n level = self.contPanel.slider.value\n a3.contrast_rgb(cRGB,level)\n a3.contrast_rgb(cComp,level)\n self.contPanel.foreground = cComp.glColor()\n self.contPanel.background = cRGB.glColor()\n \n if self.rightSep:\n if self.mainPanel:\n self.rightSep.left = self.mainPanel.background\n if self.contPanel:\n self.rightSep.right = self.contPanel.background\n \n def recolor(self):\n \"\"\"\n Recolors the three bottom color panels.\n \"\"\"\n compRGB = a3.complement_rgb(self.rgb)\n if (compRGB is None):\n compRGB = self.rgb\n \n \n rgb_str = str_rgb(self.rgb)\n cmyk_str = '' if self.cmyk is None else str_cmyk(self.cmyk) \n hsv_str = '' if self.hsv is None else str_hsv(self.hsv)\n text = ('Color\\n' +\n 'RGB: ' + rgb_str +'\\n'+\n 'CMYK: ' + cmyk_str +'\\n'+\n 'HSV: ' + hsv_str + '\\n \\n' +\n 'R,G,B sliders in: 0..255\\n' +\n 'C,M,Y,K sliders: 0 to 100%\\n' +\n 'Color Wheel: 360 degrees, radius 1\\n'+\n 'V slider: 0 to 1')\n if self.mainPanel:\n self.mainPanel.text = text\n self.mainPanel.background = self.rgb.glColor()\n self.mainPanel.foreground = compRGB.glColor()\n \n if self.compPanel:\n self.compPanel.text = text\n self.compPanel.foreground = self.rgb.glColor()\n self.compPanel.background = compRGB.glColor()\n \n if self.contPanel:\n self.contPanel.text = text\n self.recalibrate(None,None)\n \n if self.leftSep:\n if self.compPanel:\n self.leftSep.left = self.compPanel.background\n if self.mainPanel:\n self.leftSep.right = self.mainPanel.background\n \n if self.rightSep:\n if self.mainPanel:\n self.rightSep.left = self.mainPanel.background\n if self.contPanel:\n self.rightSep.right = self.contPanel.background\n\n\nclass ColorModelApp(App):\n \"\"\"\n The Kivy entry-point for the color model application\n \"\"\"\n def build(self):\n \"\"\"\n Reads the kivy file and performs the layout\n \"\"\"\n Config.set('graphics', 'multisamples', '0')\n Config.set('graphics', 'width', '900')\n Config.set('graphics', 'height', '600')\n Config.set('graphics', 'resizable', False)\n return ColorWidget()\n\n\npass\n#mark -\n#mark Application Code\nif __name__ in ('__android__', '__main__'):\n # .kv initialization\n Factory.register(\"Separator\", Separator)\n Factory.register(\"ColorPanel\", ColorPanel)\n Factory.register(\"SliderField\", SliderField)\n Factory.register(\"RGBInputPanel\", RGBInputPanel)\n Factory.register(\"CMYKInputPanel\", CMYKInputPanel)\n Factory.register(\"HSVInputPanel\", HSVInputPanel)\n Factory.register(\"HSVWheel\", HSVWheel)\n Factory.register(\"ColorWidget\", ColorWidget)\n ColorModelApp().run()", "id": "7152389", "language": "Python", "matching_score": 4.073488712310791, "max_stars_count": 1, "path": "Color/a3app.py" }, { "content": "\"\"\" \nFunctions for Assignment A3\n\nThis file contains the functions for the assignment. You should replace the stubs\nwith your own implementations.\n\n<NAME>263\n16 October 2020\n\"\"\"\nimport introcs\nimport math\n\n\ndef complement_rgb(rgb):\n \"\"\"\n Returns the complement of color rgb.\n \n Parameter rgb: the color to complement\n Precondition: rgb is an RGB object\n \"\"\"\n\n return introcs.RGB(255 - rgb.red, 255 - rgb.green, 255 - rgb.blue)\n\n\ndef str5(value):\n \"\"\"\n Returns value as a string, but expanded or rounded to be exactly 5 characters.\n \n The decimal point counts as one of the five characters.\n \n Examples:\n str5(1.3546) is '1.355'.\n str5(21.9954) is '22.00'.\n str5(21.994) is '21.99'.\n str5(130.59) is '130.6'.\n str5(130.54) is '130.5'.\n str5(1) is '1.000'.\n \n Parameter value: the number to conver to a 5 character string.\n Precondition: value is a number (int or float), 0 <= value <= 360.\n \"\"\"\n # Remember that the rounding takes place at a different place depending \n # on how big value is. Look at the examples in the specification.\n if value < 10:\n return str(format(value, '.3f'))\n elif value >= 10 and value < 100:\n if value == 99.995:\n return str(format(value, '.1f'))\n return str(format(value, '.2f'))\n else:\n return str(format(value, '.1f'))\n\n\ndef str5_cmyk(cmyk):\n \"\"\"\n Returns the string representation of cmyk in the form \"(C, M, Y, K)\".\n \n In the output, each of C, M, Y, and K should be exactly 5 characters long.\n Hence the output of this function is not the same as str(cmyk)\n \n Example: if str(cmyk) is \n \n '(0.0,31.3725490196,31.3725490196,0.0)'\n \n then str5_cmyk(cmyk) is '(0.000, 31.37, 31.37, 0.000)'. Note the spaces after the\n commas. These must be there.\n \n Parameter cmyk: the color to convert to a string\n Precondition: cmyk is an CMYK object.\n \"\"\"\n return \"(\"+str5(cmyk.cyan)+\", \"+str5(cmyk.magenta)+\", \"+str5(cmyk.yellow)+\", \"+str5(cmyk.black)+\")\"\n\n\ndef str5_hsv(hsv):\n \"\"\"\n Returns the string representation of hsv in the form \"(H, S, V)\".\n \n In the output, each of H, S, and V should be exactly 5 characters long.\n Hence the output of this function is not the same as str(hsv)\n \n Example: if str(hsv) is \n \n '(0.0,0.313725490196,1.0)'\n \n then str5_hsv(hsv) is '(0.000, 0.314, 1.000)'. Note the spaces after the\n commas. These must be there.\n \n Parameter hsv: the color to convert to a string\n Precondition: hsv is an HSV object.\n \"\"\"\n s = \"(\"+str5(hsv.hue)+\", \"+str5(hsv.saturation)+\", \"+str5(hsv.value)+\")\"\n return s\n\n\ndef rgb_to_cmyk(rgb):\n \n \"\"\"\n Returns a CMYK object equivalent to rgb, with the most black possible.\n \n Formulae from https://www.rapidtables.com/convert/color/rgb-to-cmyk.html\n \n Parameter rgb: the color to convert to a CMYK object\n Precondition: rgb is an RGB object\n \"\"\"\n # The RGB numbers are in the range 0..255.\n # Change them to the range 0..1 by dividing them by 255.0.\n r = rgb.red / 255.0\n g = rgb.green / 255.0\n b = rgb.blue / 255.0\n\n k = 1 - max(r,g,b)\n\n if k == 1:\n c = 0\n m = 0\n y = 0\n else:\n c = (1-r-k)/(1-k)\n m = (1-g-k)/(1-k)\n y = (1-b-k)/(1-k)\n\n c *= 100.0\n m *= 100.0\n y *= 100.0\n k *= 100.\n0\n return introcs.CMYK(c,m,y,k)\n\n\ndef cmyk_to_rgb(cmyk):\n \"\"\"\n Returns an RGB object equivalent to cmyk\n \n Formulae from https://www.rapidtables.com/convert/color/cmyk-to-rgb.html\n \n Parameter cmyk: the color to convert to a RGB object\n Precondition: cmyk is an CMYK object.\n \"\"\"\n # The CMYK numbers are in the range 0.0..100.0. \n # Deal with them the same way as the RGB numbers in rgb_to_cmyk()\n c = cmyk.cyan / 100.0\n m = cmyk.magenta / 100.0\n y = cmyk.yellow / 100.0\n k = cmyk.black / 100.0\n\n r = (1-c)*(1-k)\n g = (1-m)*(1-k)\n b = (1-y)*(1-k)\n\n r = round(r*255.0)\n g = round(g*255.0)\n b = round(b*255.0)\n\n return introcs.RGB(r,g,b)\n\n\ndef rgb_to_hsv(rgb):\n \"\"\"\n Return an HSV object equivalent to rgb\n \n Formulae from https://en.wikipedia.org/wiki/HSL_and_HSV\n \n Parameter hsv: the color to convert to a HSV object\n Precondition: rgb is an RGB object\n \"\"\"\n # The RGB numbers are in the range 0..255.\n # Change them to range 0..1 by dividing them by 255.0.\n r = rgb.red / 255.0\n g = rgb.green / 255.0\n b = rgb.blue / 255.0\n\n maxy = max(r,g,b)\n miny = min(r,g,b)\n\n if maxy == miny:\n h = 0\n elif maxy == r and g >= b:\n h = 60.0*(g-b)/(maxy-miny)\n elif maxy == r and g < b:\n h = 60.0*(g-b)/(maxy-miny) + 360.0\n elif maxy == g:\n h = 60.0*(b-r)/(maxy-miny) + 120.0\n elif maxy == b:\n h = 60.0*(r-g)/(maxy-miny) + 240.0\n\n if maxy == 0:\n s = 0\n else: \n s = 1 - (miny/maxy)\n\n v = maxy\n\n return introcs.HSV(h,s,v)\n\n\ndef hsv_to_rgb(hsv):\n \"\"\"\n Returns an RGB object equivalent to hsv\n \n Formulae from https://en.wikipedia.org/wiki/HSL_and_HSV\n \n Parameter hsv: the color to convert to a RGB object\n Precondition: hsv is an HSV object.\n \"\"\"\n \n hi = math.floor(hsv.hue/60)\n f = hsv.hue/60 - hi\n p = hsv.value*(1-hsv.saturation)\n q = hsv.value*(1-(f*hsv.saturation))\n t = hsv.value*(1-(1-f)*hsv.saturation)\n\n if hi == 0 or hi == 5:\n r = hsv.value\n elif hi == 1:\n r = q\n elif hi == 2 or hi == 3:\n r = p\n elif hi == 4:\n r = t\n\n if hi == 0:\n g = t\n elif hi == 1 or hi == 2:\n g = hsv.value\n elif hi == 3:\n g = q\n elif hi == 4 or hi == 5:\n g = p\n\n if hi == 0 or hi == 1:\n b = p\n elif hi == 2:\n b = t\n elif hi == 3 or hi == 4:\n b = hsv.value\n elif hi == 5:\n b = q\n\n return introcs.RGB(round(r*255),round(g*255),round(b*255))\n\n\ndef contrast_value(value,contrast):\n \"\"\"\n Returns value adjusted to the \"sawtooth curve\" for the given contrast\n \n At contrast = 0, the curve is the normal line y = x, so value is unaffected.\n If contrast < 0, values are pulled closer together, with all values collapsing\n to 0.5 when contrast = -1. If contrast > 0, values are pulled farther apart, \n with all values becoming 0 or 1 when contrast = 1.\n \n Parameter value: the value to adjust\n Precondition: value is a float in 0..1\n \n Parameter contrast: the contrast amount (0 is no contrast)\n Precondition: contrast is a float in -1..1\n \"\"\"\n x = value\n c = contrast\n if c >= -1 and c < 1:\n if x < (0.25 + (0.25*c)):\n y = ((1-c)/(1+c))*x\n elif x > (0.75 - (0.25*c)):\n y = ((1-c)/(1+c))*(x-((3-c)/4))+((3+c)/4)\n else:\n y = ((1+c)/(1-c))*(x-((1+c)/4))+((1+c)/4)\n elif c == 1:\n if x >= 0.5:\n y = 1\n else:\n y = 0\n\n return y\n\n\n\ndef contrast_rgb(rgb,contrast):\n \"\"\"\n Applies the given contrast to the RGB object rgb\n \n This function is a PROCEDURE. It modifies rgb and has no return value. It should\n apply contrast_value to the red, blue, and green values.\n \n Parameter rgb: the color to adjust\n Precondition: rgb is an RGB object\n \n Parameter contrast: the contrast amount (0 is no contrast)\n Precondition: contrast is a float in -1..1\n \"\"\"\n rgb.red = round(255.0*contrast_value(rgb.red/255.0, contrast))\n rgb.blue = round(255.0*contrast_value(rgb.blue/255.0, contrast))\n rgb.green = round(255.0*contrast_value(rgb.green/255.0, contrast))", "id": "10970410", "language": "Python", "matching_score": 0.43126896023750305, "max_stars_count": 1, "path": "Color/a3.py" }, { "content": "\"\"\"\r\nA module to draw cool shapes with the introcs Turtle.\r\n\r\nYou call all of these functions in the interactive shell, but you will have \r\nto create a Window first. Alternatively, you can use the a4test.py test script\r\nto try out the functions.\r\n\r\n<NAME>263\r\nNovember 2, 2020\r\n\"\"\"\r\nfrom introcs.turtle import Window, Turtle, Pen\r\nimport introcs # For the RGB and HSV objects\r\nimport math # For the math computations\r\n\r\n\r\n################# Helpers for Precondition Verification #################\r\n\r\ndef is_number(x):\r\n \"\"\"\r\n Returns: True if value x is a number; False otherwise.\r\n \r\n Parameter x: the value to check\r\n Precondition: NONE (x can be any value)\r\n \"\"\"\r\n return type(x) in [float, int]\r\n\r\n\r\ndef is_window(w):\r\n \"\"\"\r\n Returns: True if w is a introcs Window; False otherwise.\r\n \r\n Parameter w: the value to check\r\n Precondition: NONE (w can be any value)\r\n \"\"\"\r\n return type(w) == Window\r\n\r\n\r\ndef is_valid_color(c):\r\n \"\"\"\r\n Returns: True c is a valid turtle color; False otherwise\r\n \r\n Parameter c: the value to check\r\n Precondition: NONE (c can be any value)\r\n \"\"\"\r\n return (type(c) == introcs.RGB or type(c) == introcs.HSV or\r\n (type(c) == str and (introcs.is_tkcolor(c) or introcs.is_webcolor(c))))\r\n\r\n\r\ndef is_valid_speed(sp):\r\n \"\"\"\r\n Returns: True if sp is an int in range 0..10; False otherwise.\r\n \r\n Parameter sp: the value to check\r\n Precondition: NONE (sp can be any value)\r\n \"\"\"\r\n return (type(sp) == int and 0 <= sp and sp <= 10)\r\n\r\n\r\ndef is_valid_length(side):\r\n \"\"\"\r\n Returns: True if side is a number >= 0; False otherwise.\r\n \r\n Parameter side: the value to check\r\n Precondition: NONE (side can be any value)\r\n \"\"\"\r\n return (is_number(side) and 0 <= side)\r\n\r\n\r\ndef is_valid_iteration(n):\r\n \"\"\"\r\n Returns: True if n is an int >= 1; False otherwise.\r\n \r\n Parameter n: the value to check\r\n Precondition: NONE (n can be any value)\r\n \"\"\"\r\n return (type(n) == int and 1 <= n)\r\n\r\n\r\ndef is_valid_polygon(n):\r\n \"\"\"\r\n Returns: True if n is an int >= 3; False otherwise.\r\n \r\n Parameter n: the value to check\r\n Precondition: NONE (n can be any value)\r\n \"\"\"\r\n return (type(n) == int and 3 <= n)\r\n\r\n\r\ndef is_valid_depth(d):\r\n \"\"\"\r\n Returns: True if d is an int >= 0; False otherwise.\r\n \r\n Parameter d: the value to check\r\n Precondition: NONE (d can be any value)\r\n \"\"\"\r\n return (type(d) == int and d >= 0)\r\n\r\n\r\ndef is_valid_turtlemode(t):\r\n \"\"\"\r\n Returns: True t is a Turtle with drawmode True; False otherwise.\r\n \r\n Parameter t: the value to check\r\n Precondition: NONE (t can be any value)\r\n \"\"\"\r\n return (type(t) == Turtle and t.drawmode)\r\n\r\n\r\ndef is_valid_penmode(p):\r\n \"\"\"\r\n Returns: True t is a Pen with solid False; False otherwise.\r\n \r\n Parameter p: the value to check\r\n Precondition: NONE (p can be any value)\r\n \"\"\"\r\n return (type(p) == Pen and not p.solid)\r\n\r\n\r\ndef report_error(message, value):\r\n \"\"\"\r\n Returns: An error message about the given value.\r\n \r\n This is a function for constructing error messages to be used in assert \r\n statements. We find that students often introduce bugs into their assert \r\n statement messages, and do not find them because they are in the habit of \r\n not writing tests that violate preconditions.\r\n \r\n The purpose of this function is to give you an easy way of making error \r\n messages without having to worry about introducing such bugs. Look at \r\n the function draw_two_lines for the proper way to use it.\r\n \r\n Parameter message: The error message to display\r\n Precondition: message is a string\r\n \r\n Parameter value: The value that caused the error\r\n Precondition: NONE (value can be anything)\r\n \"\"\"\r\n return message+': '+repr(value)\r\n\r\n\r\n#################### DEMO: Two lines ####################\r\n\r\ndef draw_two_lines(w,sp):\r\n \"\"\"\r\n Draws two lines on to window w.\r\n \r\n This function clears w of any previous drawings. Then, in the middle of \r\n the window w, this function draws a green line 100 pixels to the east, \r\n and then a blue line 200 pixels to the north. It uses a new turtle that \r\n moves at speed sp, 0 <= sp <= 10, with 1 being slowest and 10 fastest \r\n (and 0 being \"instant\").\r\n \r\n REMEMBER: You need to flush the turtle if the speed is 0.\r\n \r\n This procedure asserts all preconditions.\r\n \r\n Parameter w: The window to draw upon.\r\n Precondition: w is a introcs Window object.\r\n \r\n Parameter sp: The turtle speed.\r\n Precondition: sp is a valid turtle speed.\r\n \"\"\"\r\n # Assert the preconditions\r\n assert is_window(w), report_error('w is not a valid window',w)\r\n assert is_valid_speed(sp), report_error('sp is not a valid speed',sp)\r\n \r\n # Clear the window first!\r\n w.clear()\r\n \r\n # Create a turtle and draw\r\n t = Turtle(w)\r\n t.speed = sp\r\n t.color = 'green'\r\n t.forward(100) # draw a line 100 pixels in the current direction\r\n t.left(90) # add 90 degrees to the angle\r\n t.color = 'blue'\r\n t.forward(200)\r\n \r\n # This is necessary if speed is 0!\r\n t.flush()\r\n\r\n\r\n#################### TASK 1: Triangle ####################\r\n\r\ndef draw_triangle(t, s, c):\r\n \"\"\"\r\n Draws an equilateral triangle of side s and color c at current position.\r\n \r\n The direction of the triangle depends on the current facing of the turtle.\r\n If the turtle is facing west, the triangle points up and the turtle starts\r\n and ends at the east end of the base line.\r\n \r\n WHEN DONE, THE FOLLOWING TURTLE ATTRIBUTES ARE THE SAME AS IT STARTED:\r\n position (x and y, within round-off errors), heading, color, and drawmode.\r\n If you changed any of these in the function, you must change them back.\r\n \r\n REMEMBER: You need to flush the turtle if the speed is 0.\r\n \r\n This procedure asserts all preconditions.\r\n \r\n Parameter t: The drawing Turtle\r\n Precondition: t is a Turtle with drawmode True.\r\n \r\n Parameter s: The length of each triangle side\r\n Precondition: s is a valid side length (number >= 0)\r\n \r\n Parameter c: The triangle color\r\n Precondition: c is a valid turtle color (see the helper function above)\r\n \"\"\"\r\n # Assert the preconditions\r\n assert is_valid_turtlemode(t), report_error('Invalid turtle mode', t)\r\n assert is_valid_length(s), report_error('Invalid side length', s)\r\n assert is_valid_color(c), report_error('Invalid color', c)\r\n \r\n # Hint: each angle in an equilateral triangle is 60 degrees.\r\n # Note: In this function, DO NOT save the turtle position and heading\r\n # in the beginning and then restore them at the end. The turtle moves\r\n # should be such that the turtle ends up where it started and facing\r\n # in the same direction, automatically.\r\n \r\n # Also, 3 lines have to be drawn. Does this suggest a for loop that\r\n # processes the range 0..2?\r\n orig_color = t.color\r\n t.color = c\r\n for i in range(3):\r\n t.forward(s)\r\n t.right(120)\r\n t.color = orig_color\r\n t.flush()\r\n\r\n\r\n#################### TASK 2: Hexagon ####################\r\n\r\ndef draw_hex(t, s):\r\n \"\"\"\r\n Draws six triangles using the color 'cyan' to make a hexagon.\r\n \r\n The triangles are equilateral triangles, using draw_triangle as a helper.\r\n The drawing starts at the turtle's current position and heading. The\r\n middle of the hexagon is the turtle's starting position.\r\n \r\n WHEN DONE, THE FOLLOWING TURTLE ATTRIBUTES ARE THE SAME AS IT STARTED:\r\n position (x and y, within round-off errors), heading, color, and drawmode.\r\n If you changed any of these in the function, you must change them back.\r\n \r\n REMEMBER: You need to flush the turtle if the speed is 0.\r\n \r\n This procedure asserts all preconditions.\r\n \r\n Parameter t: The drawing Turtle\r\n Precondition: t is a Turtle with drawmode True.\r\n \r\n Parameter s: The length of each triangle side\r\n Precondition: s is a valid side length (number >= 0)\r\n \"\"\"\r\n # Assert the preconditions\r\n assert is_valid_turtlemode(t), report_error('Invalid turtle mode', t)\r\n assert is_valid_length(s), report_error('Invalid side length', s)\r\n\r\n # Note: Do not save any of the turtle's properties and then restore them\r\n # at the end. Just use 6 calls on procedures drawTriangle and t.left. Test\r\n # the procedure to make sure that t's final location and heading are the\r\n # same as t's initial location and heading (except for roundoff error).\r\n for i in range(6):\r\n draw_triangle(t, s, 'cyan')\r\n t.left(60)\r\n t.flush()\r\n\r\n\r\n#################### Task 3A: Spirals ####################\r\n\r\ndef draw_spiral(w, side, ang, n, sp):\r\n \"\"\"\r\n Draws a spiral using draw_spiral_helper(t, side, ang, n, sp)\r\n \r\n This function clears the window and makes a new turtle t. This turtle\r\n starts in the middle of the canvas facing south (NOT the default east).\r\n It then calls draw_spiral_helper(t, side, ang, n, sp). When it is done,\r\n the turtle is left hidden (visible is False).\r\n \r\n REMEMBER: You need to flush the turtle if the speed is 0.\r\n \r\n This procedure asserts all preconditions.\r\n \r\n Parameter w: The window to draw upon.\r\n Precondition: w is a introcs Window object.\r\n \r\n Parameter side: The length of each spiral side\r\n Precondition: side is a valid side length (number >= 0)\r\n \r\n Parameter ang: The angle of each corner of the spiral\r\n Precondition: ang is a number\r\n \r\n Parameter n: The number of edges of the spiral\r\n Precondition: n is a valid number of iterations (int >= 1)\r\n \r\n Parameter sp: The turtle speed.\r\n Precondition: sp is a valid turtle speed.\r\n \"\"\"\r\n assert is_window(w), report_error('w is not a valid window',w)\r\n assert is_valid_length(side), report_error('side is not a valid length',side)\r\n assert is_number(ang), report_error('angle is not valid number', ang)\r\n assert is_valid_iteration(n), report_error('n is not a valid number of iterations',side)\r\n assert is_valid_speed(sp), report_error('sp is not a valid speed',sp)\r\n \r\n # HINT: Set the visible attribute to False at the end, and remember to flush\r\n w.clear()\r\n t = Turtle(w)\r\n t.heading = 270\r\n draw_spiral_helper(t, side, ang, n, sp)\r\n t.visible = False\r\n t.flush()\r\n\r\n\r\ndef draw_spiral_helper(t, side, ang, n, sp):\r\n \"\"\"\r\n Draws a spiral of n lines at the current position and heading.\r\n \r\n The spiral begins at the current turtle position and heading, turning ang\r\n degrees to the left after each line. Line 0 is side pixels long. Line 1\r\n is 2*side pixels long, and so on. Hence each Line i is (i+1)*side pixels\r\n long. The lines alternate between blue, magenta, and red, in that order, \r\n with the first one blue.\r\n \r\n WHEN DONE, THE FOLLOWING TURTLE ATTRIBUTES ARE THE SAME AS IT STARTED:\r\n color, speed, visible, and drawmode. However, the final position and\r\n heading may be different. If you changed any of these four in the \r\n function, you must change them back.\r\n \r\n This procedure asserts all preconditions.\r\n \r\n Parameter t: The drawing Turtle\r\n Precondition: t is a Turtle with drawmode True.\r\n \r\n Parameter side: The length of each spiral side\r\n Precondition: side is a valid side length (number >= 0)\r\n \r\n Parameter ang: The angle of each corner of the spiral\r\n Precondition: ang is a number\r\n \r\n Parameter n: The number of edges of the spiral\r\n Precondition: n is a valid number of iterations (int >= 1)\r\n \r\n Parameter sp: The turtle speed.\r\n Precondition: sp is a valid turtle speed.\r\n \"\"\"\r\n assert is_valid_turtlemode(t), report_error('Invalid turtle mode', t)\r\n assert is_valid_length(side), report_error('side is not a valid length',side)\r\n assert is_number(ang), report_error('angle is not valid number', ang)\r\n assert is_valid_iteration(n), report_error('n is not a valid number of iterations',side)\r\n assert is_valid_speed(sp), report_error('sp is not a valid speed',sp)\r\n\r\n orig_color = t.color\r\n orig_speed = t.speed\r\n colorList = ['blue', 'magenta', 'red']\r\n for i in range(n):\r\n if i > 2:\r\n j = i%3\r\n t.color = colorList[j]\r\n else:\r\n t.color = colorList[i]\r\n t.forward((i+1)*side)\r\n t.left(ang)\r\n t.color = orig_color\r\n t.speed = orig_speed \r\n\r\n\r\n#################### TASK 3B: Polygons ####################\r\n\r\ndef multi_polygons(w, side, k, n, sp):\r\n \"\"\"\r\n Draws polygons using multi_polygons_helper(t, side, k, n, sp)\r\n \r\n This function clears the window and makes a new turtle t. This turtle \r\n starts in the middle of the canvas facing north (NOT the default east). \r\n It then calls multi_polygons_helper(t, side, k, n, sp). When it is done, \r\n the turtle is left hidden (visible is False).\r\n \r\n REMEMBER: You need to flush the turtle if the speed is 0.\r\n \r\n This procedure asserts all preconditions.\r\n \r\n Parameter w: The window to draw upon.\r\n Precondition: w is a introcs Window object.\r\n \r\n Parameter side: The length of each polygon side\r\n Precondition: side is a valid side length (number >= 0)\r\n \r\n Parameter k: The number of polygons to draw\r\n Precondition: k is an int >= 1\r\n \r\n Parameter n: The number of sides of each polygon\r\n Precondition: n is an int >= 3\r\n \r\n Parameter sp: The turtle speed.\r\n Precondition: sp is a valid turtle speed.\r\n \"\"\"\r\n assert is_window(w), report_error('w is not a valid window',w)\r\n assert is_valid_length(side), report_error('side is not a valid length',side)\r\n assert is_valid_iteration(k), report_error('k is not valid number of polygons',k)\r\n assert is_valid_polygon(n), report_error('n is not valid number of sides', n)\r\n assert is_valid_speed(sp), report_error('sp is not a valid speed',sp)\r\n \r\n w.clear()\r\n t = Turtle(w)\r\n t.heading = 90\r\n multi_polygons_helper(t, side, k, n, sp)\r\n t.visible = False\r\n t.flush()\r\n\r\n\r\ndef multi_polygons_helper(t, side, k, n, sp):\r\n \"\"\"\r\n Draws k n-sided polygons of side length s.\r\n \r\n The polygons are drawn by turtle t, starting at the current position. The\r\n turtles alternate colors between blue and orange (starting with blue). \r\n Each polygon is drawn starting at the same place (within roundoff errors),\r\n but t turns left 360.0/k degrees after each polygon.\r\n \r\n At the end, ALL ATTRIBUTES of the turtle are the same as they were in the \r\n beginning (within roundoff errors). If you change any attributes of the \r\n turtle. then you must restore them. Look at the helper draw_polygon for \r\n more information.\r\n \r\n This procedure asserts all preconditions.\r\n \r\n Parameter t: The drawing Turtle\r\n Precondition: t is a Turtle with drawmode True.\r\n \r\n Parameter side: The length of each polygon side\r\n Precondition: side is a valid side length (number >= 0)\r\n \r\n Parameter k: The number of polygons to draw\r\n Precondition: k is an int >= 1\r\n \r\n Parameter n: The number of sides of each polygon\r\n Precondition: n is an int >= 3\r\n \r\n Parameter sp: The turtle speed.\r\n Precondition: sp is a valid turtle speed.\r\n \"\"\"\r\n \r\n assert is_valid_turtlemode(t), report_error('Invalid turtle mode', t)\r\n assert is_valid_length(side), report_error('side is not a valid length',side)\r\n assert is_valid_iteration(k), report_error('k is not valid number of polygons',k)\r\n assert is_valid_polygon(n), report_error('n is not valid number of sides', n)\r\n assert is_valid_speed(sp), report_error('sp is not a valid speed',sp)\r\n \r\n # HINT: Make sure you restore t's color and speed when done\r\n # HINT: Since k polygons should be drawn, use a for-loop on a range of numbers.\r\n colorList = ['blue','orange']\r\n orig_color = t.color\r\n orig_speed = t.speed\r\n for i in range(k):\r\n if i > 1:\r\n j = i%2\r\n t.color = colorList[j]\r\n else:\r\n t.color = colorList[i]\r\n draw_polygon(t, side, n)\r\n t.left(360.0/k)\r\n t.color = orig_color\r\n t.speed = orig_speed\r\n\r\n\r\n# DO NOT MODIFY\r\ndef draw_polygon(t, side, n):\r\n \"\"\"\r\n Draws an n-sided polygon using of side length side.\r\n \r\n WHEN DONE, THE FOLLOWING TURTLE ATTRIBUTES ARE THE SAME AS IT STARTED:\r\n position (x and y, within round-off errors), heading, color, speed, \r\n visible, and drawmode. There is no need to restore these.\r\n \r\n This procedure asserts all preconditions.\r\n \r\n Parameter t: The drawing Turtle\r\n Precondition: t is a Turtle with drawmode True.\r\n \r\n Parameter side: The length of each polygon side\r\n Precondition: side is a valid side length (number >= 0)\r\n \r\n Parameter n: The number of sides of each polygon\r\n Precondition: n is an int >= 1\r\n \"\"\"\r\n # Assert the preconditions\r\n assert is_valid_turtlemode(t), report_error('Invalid turtle mode', t)\r\n assert is_valid_length(side), report_error('side is not a valid length',side)\r\n assert type(n) == int and n >= 1, report_error('n is an invalid # of poly sides',n)\r\n \r\n # Remember old speed\r\n ang = 360.0/n # exterior angle between adjacent sides\r\n \r\n # t is in position and facing the direction to draw the next line.\r\n for _ in range(n):\r\n t.forward(side)\r\n t.left(ang)\r\n\r\n\r\n#################### TASK 3C: Radiating Petals ####################\r\n\r\ndef radiate_petals(w, radius, width, n, sp):\r\n \"\"\"\r\n Draws a color flower with n petals using radiate_petals_helper(t, side, width, n, sp)\r\n \r\n This function clears the window and makes a new turtle t. This turtle\r\n starts in the middle of the canvas facing west (NOT the default east).\r\n It then calls radiate_petals_helper(t, side, width, n, sp). When it is \r\n done, the turtle is left hidden (visible is False).\r\n \r\n REMEMBER: You need to flush the turtle if the speed is 0.\r\n \r\n This procedure asserts all preconditions.\r\n \r\n Parameter w: The window to draw upon.\r\n Precondition: w is a introcs Window object.\r\n \r\n Parameter radius: The radius of the produced \"flower\"\r\n Precondition: radius is a valid side length (number >= 0)\r\n \r\n Parameter width: The width of an open petal\r\n Precondition: width is a valid side length (number >= 0)\r\n \r\n Parameter n: The number of lines to draw\r\n Precondition: n is an int >= 2\r\n \r\n Parameter sp: The turtle speed.\r\n Precondition: sp is a valid turtle speed.\r\n \"\"\"\r\n # ARE THESE ALL OF THE PRECONDITIONS?\r\n assert is_window(w), report_error('w is not a valid window',w)\r\n assert is_valid_length(radius), report_error('radius is not a valid length',radius)\r\n assert is_valid_length(width), report_error('width is not a valid length',width)\r\n assert is_valid_speed(sp), report_error('sp is not a valid speed',sp)\r\n \r\n # HINT: w.clear() clears window.\r\n # HINT: Set the visible attribute to False at the end, and remember to flush\r\n pass\r\n\r\n\r\ndef radiate_petals_helper(t, radius, width, n, sp):\r\n \"\"\"\r\n Draws a color flower with n petals of length radius at equal angles.\r\n \r\n The petals alternate between open (a diamond of the given width) and\r\n closed (a straight line), starting with an open petal.. Open petals are \r\n drawn with function draw_diamond, while closed petals are drawn by \r\n moving the turtle in a straight line. After drawing each petal, the \r\n turtle should return to its original position.\r\n \r\n The petals are drawn at equal angles starting from the initial turtle\r\n heading. A petal drawn at angle ang, 0 <= ang < 360 has the HSV color \r\n (ang % 360.0, 1, 1).\r\n \r\n WHEN DONE, THE FOLLOWING TURTLE ATTRIBUTES ARE THE SAME AS IT STARTED:\r\n color, speed, visible, and drawmode. However, the final position and\r\n heading may be different. If you changed any of these four in the \r\n function, you must change them back.\r\n \r\n This procedure asserts all preconditions.\r\n \r\n Parameter t: The drawing Turtle\r\n Precondition: t is a Turtle with drawmode True.\r\n \r\n Parameter radius: The radius of the produced \"flower\"\r\n Precondition: radius is a valid side length (number >= 0)\r\n \r\n Parameter width: The width of an open petal\r\n Precondition: width is a valid side length (number >= 0)\r\n \r\n Parameter n: The number of lines to draw\r\n Precondition: n is an int >= 2\r\n \r\n Parameter sp: The turtle speed.\r\n Precondition: sp is a valid turtle speed.\r\n \"\"\"\r\n # Assert the preconditions\r\n assert is_valid_turtlemode(t), report_error('Invalid turtle mode', t)\r\n assert is_valid_length(radius), report_error('radius is not a valid length',radius)\r\n assert is_valid_length(width), report_error('width is not a valid length',width)\r\n assert (type(n) == int and n >= 2), report_error('n is an invalid # of petals',n)\r\n assert is_valid_speed(sp), report_error('sp is not a valid speed',sp)\r\n \r\n # Hints:\r\n # 1. Drawing the petals should be drawn with a range loop.\r\n # 2. The first petal should be open, alternating with closed petals afterwards\r\n # 3. Open petals should be drawn with the function draw_diamond\r\n # 4. The heading of the turtle should stay in the range 0 <= heading < 360.\r\n # 5. (t.heading % 360.0, 1, 1) is the HSV color of the turtle for each petal\r\n # 6. You can use an HSV object for the turtle's color attribute,\r\n # even though all the examples use strings with color names\r\n pass\r\n\r\n\r\n# DO NOT MODIFY\r\ndef draw_diamond(t, length, width):\r\n \"\"\"\r\n Draws an diamond whose major axis (length) is along the current heading.\r\n \r\n The width is the size of the minor axis, which is perpendicular to the \r\n current turtle heading.\r\n \r\n WHEN DONE, THE FOLLOWING TURTLE ATTRIBUTES ARE THE SAME AS IT STARTED: \r\n position (x and y, within round-off errors), heading, color, speed, visible, \r\n and drawmode. There is no need to restore these.\r\n \r\n This procedure asserts all preconditions.\r\n \r\n Parameter t: The drawing Turtle\r\n Precondition: t is a Turtle with drawmode True.\r\n \r\n Parameter length: The size of the major axis\r\n Precondition: length is a valid side length (number >= 0)\r\n \r\n Parameter width: The size of the minor (perpendicular) axis\r\n Precondition: width is a valid side length (number >= 0)\r\n \"\"\"\r\n assert is_valid_turtlemode(t), report_error('Invalid turtle mode', t)\r\n assert is_valid_length(length), report_error('length is not a valid length',length)\r\n assert is_valid_length(width), report_error('width is not a valid length',width)\r\n \r\n # Compute the next position to go to\r\n angle1 = t.heading*math.pi/180.0\r\n x2 = t.x+math.cos(angle1)*length/2\r\n y2 = t.y+math.sin(angle1)*length/2\r\n x2 -= math.sin(angle1)*width/2\r\n y2 += math.cos(angle1)*width/2\r\n \r\n # Compute the offset heading and edge length\r\n angle2 = math.atan2(y2-t.y,x2-t.x)*180.0/math.pi\r\n angle3 = angle2-t.heading\r\n edgesz = math.sqrt((x2-t.x)*(x2-t.x)+(y2-t.y)*(y2-t.y))\r\n \r\n # Draw the diamond, restoring position and heading\r\n t.right(angle3)\r\n t.forward(edgesz)\r\n t.left(2*angle3)\r\n t.forward(edgesz)\r\n t.right(2*angle3)\r\n t.backward(edgesz)\r\n t.left(2*angle3)\r\n t.backward(edgesz)\r\n t.right(angle3)\r\n\r\n\r\n#################### TASK 4A: Sierpinski Triangle ####################\r\n\r\ndef triangle(w, side, d, sp):\r\n \"\"\"\r\n Draws a Sierpinski triangle with the given side length and depth d.\r\n \r\n This function clears the window and makes a new graphics pen p. This\r\n pen starts in the middle of the canvas at (0,0). It draws by calling\r\n the function triangle_helper(p, 0, 0, side, d). The pen is visible\r\n during drawing and should be set to hidden at the end.\r\n \r\n The pen should have a 'magenta' fill color and a 'black' edge color.\r\n \r\n REMEMBER: You need to flush the pen if the speed is 0.\r\n \r\n Parameter w: The window to draw upon.\r\n Precondition: w is a Window object.\r\n \r\n Parameter side: The side length of the triangle\r\n Precondition: side is a valid side length (number >= 0)\r\n \r\n Parameter d: The recursive depth of the triangle\r\n Precondition: d is a valid depth (int >= 0)\r\n \r\n Parameter sp: The drawing speed.\r\n Precondition: sp is a valid turtle/pen speed.\r\n \"\"\"\r\n assert is_window(w), report_error('w is not a valid window',w)\r\n assert is_valid_length(side), report_error('side is not a valid length',side)\r\n assert is_valid_depth(d), report_error('d is not a valid depth',d)\r\n assert is_valid_speed(sp), report_error('sp is not a valid speed',sp)\r\n # HINT: Set the visible attribute to False at the end, and remember to flush\r\n w.clear()\r\n p = Pen(w)\r\n p.fillcolor = 'magenta'\r\n p.edgecolor = 'black'\r\n triangle_helper(p, 0, 0, side, d)\r\n p.visible = False\r\n\r\n\r\ndef triangle_helper(p, x, y, side, d):\r\n \"\"\"\r\n Draws a Sierpinski triangle with the given side length and depth d, centered at (x, y).\r\n \r\n The triangle is draw with the current pen color and visibility attribute. Follow \r\n the instructions on the course website to recursively draw the Sierpinski triangle. \r\n The center of the triangle is positioned at (x,y).\r\n \r\n Parameter p: The graphics pen\r\n Precondition: p is a Pen with fill attribute False.\r\n \r\n Parameter x: The x-coordinate of the triangle center\r\n Precondition: x is a number\r\n \r\n Parameter y: The y-coordinate of the triangle center\r\n Precondition: y is a number\r\n \r\n Parameter side: The side-length of the triangle\r\n Precondition: side is a valid side length (number >= 0)\r\n \r\n Parameter d: The recursive depth of the triangle\r\n Precondition: d is a valid depth (int >= 0)\r\n \"\"\"\r\n assert is_valid_penmode(p), report_error('Invalid pen mode', p)\r\n assert is_number(x), report_error('x is not valid number', x)\r\n assert is_number(y), report_error('y is not valid number', y)\r\n assert is_valid_length(side), report_error('side is not a valid length',side)\r\n assert is_valid_depth(d), report_error('d is not a valid depth',d)\r\n # Hint: Use fill_triangle to draw an individual triangle\r\n height = side*(math.sqrt(3)/2)\r\n if d == 0:\r\n fill_triangle(p, x, y, side)\r\n else:\r\n # three calls to triangle helper with variable x and y's\r\n triangle_helper(p,x-(side/4),y-(height/4),side/2,d-1)\r\n triangle_helper(p,x+(side/4),y-(height/4),side/2,d-1)\r\n triangle_helper(p,x,y+(height/4),side/2,d-1)\r\n\r\n\r\n# DO NOT MODIFY\r\ndef fill_triangle(p, x, y, side):\r\n \"\"\"\r\n Fills an equilateral triangle of side length \r\n \r\n The triangle is pointing up.\r\n \r\n Parameter p: The graphics pen\r\n Precondition: p is a Pen with fill attribute False.\r\n \r\n Parameter x: The x-coordinate of the triangle center\r\n Precondition: x is a number\r\n \r\n Parameter y: The y-coordinate of the triangle center\r\n Precondition: y is a number\r\n \r\n Parameter side: The side length of the triangle\r\n Precondition: side is a valid side length (number >= 0)\r\n \"\"\"\r\n # Precondition Assertions\r\n assert is_valid_penmode(p), report_error('Invalid pen mode', p)\r\n assert is_number(x), report_error('x is not a valid position',x)\r\n assert is_number(y), report_error('x is not a valid position',y)\r\n assert is_valid_length(side), report_error('side is not a valid length',side)\r\n \r\n h = side * math.sqrt(.75)\r\n p.move(x-side/2, y-h/2)\r\n p.solid = True\r\n p.drawLine(side, 0)\r\n p.drawLine(-side/2.0, h)\r\n p.drawLine(-side/2.0, -h)\r\n p.solid = False\r\n\r\n\r\n#################### TASK 4B: Cantor Stool ####################\r\n\r\ndef cantor(w, side, hght, d, sp):\r\n \"\"\"\r\n Draws a Cantor Stool of dimensions side x hght, and depth d.\r\n \r\n This function clears the window and makes a new graphics pen p. This\r\n pen starts in the middle of the canvas at (0,0). It draws by calling\r\n the function cantor_helper(p, 0, 0, side, hght, d). The pen is visible\r\n during drawing and should be set to hidden at the end.\r\n \r\n The pen should have both fill color and edge color 'red'.\r\n \r\n REMEMBER: You need to flush the pen if the speed is 0.\r\n \r\n Parameter w: The window to draw upon.\r\n Precondition: w is a Window object.\r\n \r\n Parameter side: The width of the Cantor stool\r\n Precondition: side is a valid side length (number >= 0)\r\n \r\n Parameter hght: The height of the Cantor stool\r\n Precondition: hght is a valid side length (number >= 0)\r\n \r\n Parameter d: The recursive depth of the stool\r\n Precondition: d is a valid depth (int >= 0)\r\n \r\n Parameter sp: The drawing speed.\r\n Precondition: sp is a valid turtle/pen speed.\r\n \"\"\"\r\n # ARE THESE ALL OF THE PRECONDITIONS?\r\n assert is_window(w), report_error('w is not a valid window',w)\r\n \r\n # HINT: w.clear() clears window.\r\n # HINT: Set the visible attribute to False at the end, and remember to flush\r\n pass\r\n\r\n\r\ndef cantor_helper(p, x, y, side, hght, d):\r\n \"\"\"\r\n Draws a stool of dimensions side x hght, and depth d centered at (x,y)\r\n \r\n The stool is draw with the current pen color and visibility attribute.\r\n Follow the instructions on the course website to recursively draw the\r\n Cantor stool.\r\n \r\n Parameter p: The graphics pen\r\n Precondition: p is a Pen with fill attribute False.\r\n \r\n Parameter x: The x-coordinate of the stool center\r\n Precondition: x is a number\r\n \r\n Parameter y: The y-coordinate of the stool center\r\n Precondition: y is a number\r\n \r\n Parameter side: The width of the Cantor stool\r\n Precondition: side is a valid side length (number >= 0)\r\n \r\n Parameter hght: The height of the Cantor stool\r\n Precondition: hght is a valid side length (number >= 0)\r\n \r\n Parameter d: The recursive depth of the stool\r\n Precondition: d is a valid depth (int >= 0)\r\n \"\"\"\r\n # ARE THESE ALL OF THE PRECONDITIONS?\r\n assert is_valid_penmode(p), report_error('Invalid pen mode', p)\r\n \r\n # Hint: Use fill_rect to draw an individual rectangle\r\n pass\r\n\r\n\r\n# DO NOT MODIFY\r\ndef fill_rect(p, x, y, side, hght):\r\n \"\"\"\r\n Fills a rectangle of lengths side, hght with center (x, y) using pen p.\r\n \r\n This procedure asserts all preconditions.\r\n \r\n Parameter p: The graphics pen\r\n Precondition: p is a Pen with solid attribute False.\r\n \r\n Parameter x: The x-coordinate of the rectangle center\r\n Precondition: x is a number\r\n \r\n Parameter y: The y-coordinate of the rectangle center\r\n Precondition: y is a number\r\n \r\n Parameter side: The width of the rectangle\r\n Precondition: side is a valid side length (number >= 0)\r\n \r\n Parameter hght: The height of the rectangle\r\n Precondition: hght is a valid side length (number >= 0)\r\n \"\"\"\r\n # Precondition assertions omitted\r\n assert is_valid_penmode(p), report_error('Invalid pen mode', p)\r\n assert is_number(x), report_error('x is not a valid position',x)\r\n assert is_number(y), report_error('x is not a valid position',y)\r\n assert is_valid_length(side), report_error('side is not a valid length',side)\r\n assert is_valid_length(hght), report_error('hght is not a valid length',hght)\r\n \r\n # Move to the center and draw\r\n p.move(x - side/2.0, y - hght/2.0)\r\n p.solid = True\r\n p.drawLine( 0, hght)\r\n p.drawLine( side, 0)\r\n p.drawLine( 0, -hght)\r\n p.drawLine(-side, 0)\r\n p.solid = False\r\n p.move(x - side/2.0, y - hght/2.0)\r\n\r\n\r\n#################### TASK 5: Minkowski Island ####################\r\n\r\ndef island(w, side, d, sp):\r\n \"\"\"\r\n Draws a Minkowski island with the given side length and depth d.\r\n \r\n This function clears the window and makes a new Turtle t. This turtle starts in \r\n lower right corner of the square centered at (0,0) with side length side. It is \r\n facing straight up. It draws by calling the function island_edge(t, side, d) four \r\n times, rotating the turtle left after each call to form a square.\r\n \r\n The turtle should be visible while drawing, but hidden at the end. The turtle color \r\n is 'sea green'.\r\n \r\n REMEMBER: You need to flush the turtle if the speed is 0.\r\n \r\n Parameter w: The window to draw upon.\r\n Precondition: w is a Window object.\r\n \r\n Parameter side: The side-length of the island\r\n Precondition: side is a valid side length (number >= 0)\r\n \r\n Parameter d: The recursive depth of the island\r\n Precondition: d is a valid depth (int >= 0)\r\n \r\n Parameter sp: The drawing speed.\r\n Precondition: sp is a valid turtle/pen speed.\r\n \"\"\"\r\n assert is_window(w), report_error('w is not a valid window',w)\r\n assert is_valid_length(side), report_error('side is not a valid length',side)\r\n assert is_valid_depth(d), report_error('d is not a valid depth',d)\r\n assert is_valid_speed(sp), report_error('sp is not a valid speed',sp) \r\n\r\n w.clear()\r\n t = Turtle(w)\r\n t.heading = 90\r\n t.move(side/2,-(side/2))\r\n t.color = 'sea green'\r\n for i in range(4):\r\n island_edge(t,side,d)\r\n t.left(90)\r\n t.visible = False\r\n t.flush()\r\n\r\n\r\ndef island_edge(t, side, d):\r\n \"\"\"\r\n Draws a single Minkowski edge with and depth d at the current position and angle.\r\n \r\n The edge is draw with the current turtle color. You should make no assumptions of\r\n the current angle of the turtle (e.g. use left and right to turn; do not set the\r\n heading).\r\n \r\n WHEN DONE, THE FOLLOWING TURTLE ATTRIBUTES ARE THE SAME AS IT STARTED:\r\n color, speed, visible, and drawmode. However, the final position and\r\n heading may be different. If you changed any of these four in the function,\r\n you must change them back.\r\n \r\n Parameter t: The drawing Turtle\r\n Precondition: t is a Turtle with drawmode True.\r\n \r\n Parameter side: The length of each Minkowski side\r\n Precondition: side is a valid side length (number >= 0)\r\n \r\n Parameter d: The recursive depth of the edge\r\n Precondition: d is a valid depth (int >= 0)\r\n \"\"\"\r\n # ARE THESE ALL OF THE PRECONDITIONS?\r\n assert is_valid_turtlemode(t), report_error('Invalid turtle mode', t)\r\n assert is_valid_length(side), report_error('side is not a valid length',side)\r\n assert is_valid_depth(d), report_error('d is not a valid depth',d)\r\n \r\n # HINT: Look closely at the picture from the instructions. \r\n # For depth other than 0, divide the side four equal parts (but eight edges)\r\n if d == 0:\r\n t.forward(side)\r\n else:\r\n island_edge(t,side/4,d-1)\r\n t.right(90)\r\n island_edge(t,side/4,d-1)\r\n t.left(90)\r\n island_edge(t,side/4,d-1)\r\n t.left(90)\r\n island_edge(t,side/4,d-1)\r\n island_edge(t,side/4,d-1)\r\n t.right(90)\r\n island_edge(t,side/4,d-1)\r\n t.right(90)\r\n island_edge(t,side/4,d-1)\r\n t.left(90)\r\n island_edge(t,side/4,d-1)\r\n\r\n", "id": "2762825", "language": "Python", "matching_score": 5.327441215515137, "max_stars_count": 1, "path": "Turtle/a4.py" }, { "content": "\"\"\"\nTest script for Assignment A4\n\nThis test script is different from previous test scripts, because the output \nof the functions in A4 is graphical. We cannot use assert_equals to verify \nthat the turtle is drawing correctly. Instead, we have to let the Turtle draw, \nlook at the result, and manually verify that they are correct. Hence the test \nprocedures for A4 are procedures that draw one or more pictures using the \nfunction being tested.\n\nHowever, there are some things that we can test automatically. Many of the \nfunctions in A4 require that we restore the state of a turtle when we are done.\nWe can use assert_equals to verify that these values are properly restored. \nWe can also use the new function assert_error to verify that a precondition is \nbeing enforced (e.g. an error is raised if the precondition is violated). \nThere are examples of both of these in this file.\n\nThis is an EXTREMELY incomplete test script. We do not guarantee that we have \ntested all possibilities for all functions (and in some cases have intentionally \navoided doing so). Passing this script is not a guarantee that you will get a \nperfect on the assignment. It is up to you add more tests to ensure that your \nA4 functions are complete and correct.\n\nWith that said, you will not be submitting this file as part of the assignment. \nWe have provided it simply as a convenience.\n\nAuthor: <NAME> (wmw2)\nDate: October 12, 2020\n\"\"\"\nimport a4\nimport introcs\nfrom introcs.turtle import Window, Turtle, Pen\n\n\n#################### DEMO: Two lines ####################\n\ndef test_draw_two_lines(w,sp):\n \"\"\"\n Tests the procedure draw_two_lines\n \n Unlike most test procedures, you will notice that this test procedure has \n parameters. That is because we want all of our test procedures to share the\n same drawing window and same drawing speed. Theses are set in the master \n procedure test_all.\n \n Parameter w: The window to draw upon.\n Precondition: w is a introcs Window object.\n \n Parameter sp: The drawing speed.\n Precondition: sp is a valid drawing speed (int 0..10).\n \"\"\"\n print('Testing draw_two_lines')\n \n # First verify that the preconditions are enforced\n introcs.assert_error(a4.draw_two_lines,'window',sp)\n introcs.assert_error(a4.draw_two_lines,w,-1)\n introcs.assert_error(a4.draw_two_lines,w,str(sp))\n \n a4.draw_two_lines(w,sp)\n \n # Allow the user to look at the picture before continuing\n input('Press [return]')\n\n\n#################### TASK 1: Triangle ####################\n\ndef test_draw_triangle(w,sp):\n \"\"\"\n Tests the procedure draw_triangle\n \n Unlike most test procedures, you will notice that this test procedure has \n parameters. That is because we want all of our test procedures to share the\n same drawing window and same drawing speed. Theses are set in the master \n procedure test_all.\n \n Parameter w: The window to draw upon.\n Precondition: w is a introcs Window object.\n \n Parameter sp: The drawing speed.\n Precondition: sp is a valid drawing speed (int 0..10).\n \"\"\"\n print('Testing draw_triangle')\n w.clear()\n turt = Turtle(w)\n turt.speed = sp\n \n # First verify that the preconditions are enforced\n turt.drawmode = False\n introcs.assert_error(a4.draw_triangle,turt,50,'orange')\n turt.drawmode = True\n introcs.assert_error(a4.draw_triangle,turt,'50','orange')\n introcs.assert_error(a4.draw_triangle,turt,-50,'orange')\n introcs.assert_error(a4.draw_triangle,turt,-50,'orangy')\n \n # Store original values (These are all of the important ones)\n oldx = turt.x\n oldy = turt.y\n oldang = turt.heading % 360\n oldcol = turt.color\n oldmode = turt.drawmode\n \n # Now draw\n a4.draw_triangle(turt,50,'orange')\n \n # Verify value restored\n introcs.assert_floats_equal(oldx,turt.x)\n introcs.assert_floats_equal(oldy,turt.y)\n introcs.assert_floats_equal(oldang,turt.heading % 360) # Okay if 360 was added\n introcs.assert_equals(oldcol, turt.color)\n introcs.assert_equals(oldmode,turt.drawmode)\n \n # Allow the user to look at the picture before continuing\n input('Press [return]')\n\n\n#################### TASK 2: Hexagon ####################\n\ndef test_draw_hex(w,sp):\n \"\"\"\n Tests the procedure draw_hex\n \n Unlike most test procedures, you will notice that this test procedure has \n parameters. That is because we want all of our test procedures to share the\n same drawing window and same drawing speed. Theses are set in the master \n procedure test_all.\n \n Parameter w: The window to draw upon.\n Precondition: w is a introcs Window object.\n \n Parameter sp: The drawing speed.\n Precondition: sp is a valid drawing speed (int 0..10).\n \"\"\"\n print('Testing draw_hex')\n w.clear()\n turt = Turtle(w)\n turt.speed = sp\n \n # First verify that the preconditions are enforced\n turt.drawmode = False\n introcs.assert_error(a4.draw_hex,turt,50)\n turt.drawmode = True\n introcs.assert_error(a4.draw_hex,turt,'50')\n introcs.assert_error(a4.draw_hex,turt,-50)\n \n # Store original values (These are all of the important ones)\n oldx = turt.x\n oldy = turt.y\n oldang = turt.heading % 360\n oldcol = turt.color\n oldmode = turt.drawmode\n \n # Now draw\n a4.draw_hex(turt,50)\n \n # Verify value restored\n introcs.assert_floats_equal(oldx,turt.x)\n introcs.assert_floats_equal(oldy,turt.y)\n introcs.assert_floats_equal(oldang,turt.heading % 360) # Okay if 360 was added\n introcs.assert_equals(oldcol, turt.color)\n introcs.assert_equals(oldmode,turt.drawmode)\n \n # Allow the user to look at the picture before continuing\n input('Press [return]')\n\n\n#################### Task 3A: Spirals ####################\n\ndef test_draw_spiral(w,sp):\n \"\"\"\n Tests the procedure draw_spiral AND draw_spiral_helper\n \n Unlike most test procedures, you will notice that this test procedure has \n parameters. That is because we want all of our test procedures to share the\n same drawing window and same drawing speed. Theses are set in the master \n procedure test_all.\n \n Parameter w: The window to draw upon.\n Precondition: w is a introcs Window object.\n \n Parameter sp: The drawing speed.\n Precondition: sp is a valid drawing speed (int 0..10).\n \"\"\"\n print('Testing draw_spiral')\n # This is only ONE TEST. Feel free to change the values for different tests.\n \n # First verify that the preconditions are enforced\n # WE HAVE NOT ADDED ALL OF THEM. YOU MAY NEED TO ADD SOME\n introcs.assert_error(a4.draw_spiral,'window',1,24,64,sp)\n introcs.assert_error(a4.draw_spiral,w,'1',24,64,sp)\n introcs.assert_error(a4.draw_spiral,w,-1,24,64,sp)\n introcs.assert_error(a4.draw_spiral,w,1,24,64.3,sp)\n introcs.assert_error(a4.draw_spiral,w,1,24,0,sp)\n introcs.assert_error(a4.draw_spiral,w,1,24,64,str(sp))\n introcs.assert_error(a4.draw_spiral,w,1,24,64,-1)\n \n # Now draw\n a4.draw_spiral(w, 1, 24, 64, sp)\n \n # Allow the user to look at the picture before continuing\n input('Press [return]')\n \n print('Testing draw_spiral_helper')\n # This is only ONE TEST. Feel free to change the values for different tests.\n w.clear()\n turt = Turtle(w)\n turt.color = 'blue'\n \n # First verify that the preconditions are enforced\n # WE HAVE NOT ADDED ALL OF THEM. YOU MAY NEED TO ADD SOME\n turt.drawmode = False\n introcs.assert_error(a4.draw_spiral_helper,turt,20,90,6,sp)\n turt.drawmode = True\n introcs.assert_error(a4.draw_spiral_helper,turt,'20',90,6,sp)\n introcs.assert_error(a4.draw_spiral_helper,turt,-1,90,6,sp)\n introcs.assert_error(a4.draw_spiral_helper,turt,20,90,6.3,sp)\n introcs.assert_error(a4.draw_spiral_helper,turt,20,90,0,sp)\n introcs.assert_error(a4.draw_spiral_helper,turt,20,90,6,str(sp))\n introcs.assert_error(a4.draw_spiral_helper,turt,20,90,6,-1)\n \n # Store original values (These are all of the important ones)\n oldcol = turt.color\n oldvis = turt.visible\n oldspd = turt.speed\n oldmode = turt.drawmode\n \n # Now draw\n a4.draw_spiral_helper(turt, 20, 90, 6, sp)\n \n # Verify value restored\n introcs.assert_equals(oldcol, turt.color)\n introcs.assert_equals(oldvis,turt.visible)\n introcs.assert_equals(oldspd,turt.speed)\n introcs.assert_equals(oldmode,turt.drawmode)\n \n # Allow the user to look at the picture before continuing\n input('Press [return]')\n\n\n#################### TASK 3B: Polygons ####################\n\ndef test_multi_polygons(w,sp):\n \"\"\"\n Tests the procedure multi_polygons AND multi_polygons_helper\n \n Unlike most test procedures, you will notice that this test procedure has \n parameters. That is because we want all of our test procedures to share the\n same drawing window and same drawing speed. Theses are set in the master \n procedure test_all.\n \n Parameter w: The window to draw upon.\n Precondition: w is a introcs Window object.\n \n Parameter sp: The drawing speed.\n Precondition: sp is a valid drawing speed (int 0..10).\n \"\"\"\n print('Testing multi_polygons')\n # This is only ONE TEST. Feel free to change the values for different tests.\n \n # First verify that the preconditions are enforced\n # WE HAVE NOT ADDED ALL OF THEM. YOU MAY NEED TO ADD SOME\n introcs.assert_error(a4.multi_polygons,'window', 100, 5, 6, sp)\n introcs.assert_error(a4.multi_polygons, w, '100', 5, 6, sp)\n introcs.assert_error(a4.multi_polygons, w, -1, 5, 6, sp)\n introcs.assert_error(a4.multi_polygons, w, 100, 5, 6,str(sp))\n introcs.assert_error(a4.multi_polygons, w, 100, 5, 6,-1)\n \n # Now draw\n a4.multi_polygons(w, 100, 5, 6, sp)\n \n # Allow the user to look at the picture before continuing\n input('Press [return]')\n \n print('Testing multi_polygons_helper')\n # This is only ONE TEST. Feel free to change the values for different tests.\n w.clear()\n turt = Turtle(w)\n turt.color = 'blue'\n \n # First verify that the preconditions are enforced\n # WE HAVE NOT ADDED ALL OF THEM. YOU MAY NEED TO ADD SOME\n turt.drawmode = False\n introcs.assert_error(a4.multi_polygons_helper, turt, 60, 7, 3, sp)\n turt.drawmode = True\n introcs.assert_error(a4.multi_polygons_helper, turt, '60', 7, 3, sp)\n introcs.assert_error(a4.multi_polygons_helper, turt, -1, 7, 3, sp)\n introcs.assert_error(a4.multi_polygons_helper, turt, 60, 7, 3, str(sp))\n introcs.assert_error(a4.multi_polygons_helper, turt, 60, 7, 3, -1)\n \n # Store original values (These are all of the important ones)\n oldx = turt.x\n oldy = turt.y\n oldang = turt.heading % 360\n oldcol = turt.color\n oldvis = turt.visible\n oldspd = turt.speed\n oldmode = turt.drawmode\n \n # Now draw\n a4.multi_polygons_helper(turt, 60, 7, 3, sp)\n \n # Verify value restored\n introcs.assert_floats_equal(oldx,turt.x)\n introcs.assert_floats_equal(oldy,turt.y)\n introcs.assert_floats_equal(oldang,turt.heading % 360) # Okay if 360 was added\n introcs.assert_equals(oldcol, turt.color)\n introcs.assert_equals(oldvis,turt.visible)\n introcs.assert_equals(oldspd,turt.speed)\n introcs.assert_equals(oldmode,turt.drawmode)\n \n # Allow the user to look at the picture before continuing\n input('Press [return]')\n\n\n#################### TASK 3C: Radiating Petals ####################\n\ndef test_radiate_petals(w,sp):\n \"\"\"\n Tests the procedure radiate_petals AND radiate_petals_helper\n \n Unlike most test procedures, you will notice that this test procedure has \n parameters. That is because we want all of our test procedures to share the\n same drawing window and same drawing speed. Theses are set in the master \n procedure test_all.\n \n Parameter w: The window to draw upon.\n Precondition: w is a introcs Window object.\n \n Parameter sp: The drawing speed.\n Precondition: sp is a valid drawing speed (int 0..10).\n \"\"\"\n print('Testing radiate_petals')\n # This is only ONE TEST. Feel free to change the values for different tests.\n \n # First verify that the preconditions are enforced\n # WE HAVE NOT ADDED ALL OF THEM. YOU MAY NEED TO ADD SOME\n introcs.assert_error(a4.radiate_petals, 'window', 150, 30, 45, sp)\n introcs.assert_error(a4.radiate_petals, w, '150', 30, 45, sp)\n introcs.assert_error(a4.radiate_petals, w, -1, 30, 45, sp)\n introcs.assert_error(a4.radiate_petals, w, 150, '30', 45, sp)\n introcs.assert_error(a4.radiate_petals, w, 150, -1, 45, sp)\n introcs.assert_error(a4.radiate_petals, w, 150, 30, 45, str(sp))\n introcs.assert_error(a4.radiate_petals, w, 150, 30, 45, -1)\n \n # Now draw\n a4.radiate_petals(w, 150, 30, 45, sp)\n \n # Allow the user to look at the picture before continuing\n input('Press [return]')\n \n print('Testing radiate_petals_helper')\n # This is only ONE TEST. Feel free to change the values for different tests.\n w.clear()\n turt = Turtle(w)\n turt.color = 'red'\n \n # First verify that the preconditions are enforced\n # WE HAVE NOT ADDED ALL OF THEM. YOU MAY NEED TO ADD SOME\n turt.drawmode = False\n introcs.assert_error(a4.radiate_petals_helper, turt, 50, 10, 4, sp)\n turt.drawmode = True\n introcs.assert_error(a4.radiate_petals_helper, turt, '50', 10, 4, sp)\n introcs.assert_error(a4.radiate_petals_helper, turt, -1, 10, 4, sp)\n introcs.assert_error(a4.radiate_petals_helper, turt, 50, '10', 4, sp)\n introcs.assert_error(a4.radiate_petals_helper, turt, 50, -1, 4, sp)\n introcs.assert_error(a4.radiate_petals_helper, turt, 50, 10, 4, str(sp))\n introcs.assert_error(a4.radiate_petals_helper, turt, 50, 10, 4, -1)\n \n # Store original values (These are all of the important ones)\n oldcol = turt.color\n oldvis = turt.visible\n oldspd = turt.speed\n oldmode = turt.drawmode\n \n # Now draw\n a4.radiate_petals_helper(turt, 50, 10, 4, sp)\n \n # Verify value restored\n introcs.assert_equals(oldcol, turt.color)\n introcs.assert_equals(oldvis,turt.visible)\n introcs.assert_equals(oldspd,turt.speed)\n introcs.assert_equals(oldmode,turt.drawmode)\n \n # Allow the user to look at the picture before continuing\n input('Press [return]')\n\n\n#################### TASK 4A: Sierpinski Triangle ####################\n\ndef test_triangle(w,sp):\n \"\"\"\n Tests the procedure triangle AND triangle_helper\n \n Unlike most test procedures, you will notice that this test procedure has \n parameters. That is because we want all of our test procedures to share the\n same drawing window and same drawing speed. Theses are set in the master \n procedure test_all.\n \n Parameter w: The window to draw upon.\n Precondition: w is a introcs Window object.\n \n Parameter sp: The drawing speed.\n Precondition: sp is a valid drawing speed (int 0..10).\n \"\"\"\n # Fractals need a few tests\n print('Testing triangle (depth 0)')\n \n # First verify that the preconditions are enforced\n # WE HAVE NOT ADDED ALL OF THEM. YOU MAY NEED TO ADD SOME\n introcs.assert_error(a4.triangle,'window', 300, 0, sp)\n \n # Now draw (three different depths)\n a4.triangle(w, 300, 0, sp)\n input('Press [return]')\n \n print('Testing triangle (depth 1)')\n a4.triangle(w, 300, 1, sp)\n input('Press [return]')\n \n print('Testing triangle (depth 3)')\n a4.triangle(w, 300, 3, sp)\n input('Press [return]')\n \n print('Testing triangle_helper')\n\n # This is only ONE TEST. Feel free to change the values for different tests.\n w.clear()\n pen = Pen(w)\n pen.fillcolor = 'magenta'\n pen.edgecolor = 'black'\n pen.speed = sp\n \n # First verify that the preconditions are enforced\n # WE HAVE NOT ADDED ALL OF THEM. YOU MAY NEED TO ADD SOME\n pen.solid = True\n introcs.assert_error(a4.triangle_helper, pen, 0, 0, 243, 4)\n pen.solid = False\n \n # Now draw\n a4.triangle_helper(pen, 0, 0, 243, 4)\n pen.flush()\n input('Press [return]')\n\n\n#################### TASK 4B: Cantor Stool ####################\n\ndef test_cantor(w,sp):\n \"\"\"\n Tests the procedure cantor AND cantor_helper\n \n Unlike most test procedures, you will notice that this test procedure has \n parameters. That is because we want all of our test procedures to share the\n same drawing window and same drawing speed. Theses are set in the master \n procedure test_all.\n \n Parameter w: The window to draw upon.\n Precondition: w is a introcs Window object.\n \n Parameter sp: The drawing speed.\n Precondition: sp is a valid drawing speed (int 0..10).\n \"\"\"\n # Fractals need a few tests\n print('Testing cantor (depth 0)')\n \n # First verify that the preconditions are enforced\n # WE HAVE NOT ADDED ALL OF THEM. YOU MAY NEED TO ADD SOME\n introcs.assert_error(a4.cantor,'window', 300, 200, 0, sp)\n \n # Now draw (three different depths)\n a4.cantor(w, 300, 200, 0, sp)\n input('Press [return]')\n \n print('Testing cantor (depth 1)')\n a4.cantor(w, 300, 200, 1, sp)\n input('Press [return]')\n \n print('Testing cantor (depth 3)')\n a4.cantor(w, 300, 200, 3, sp)\n input('Press [return]')\n \n print('Testing cantor_helper')\n # This is only ONE TEST. Feel free to change the values for different tests.\n w.clear()\n pen = Pen(w)\n pen.fillcolor = 'red'\n pen.edgecolor = 'red'\n pen.speed = sp\n \n # First verify that the preconditions are enforced\n # WE HAVE NOT ADDED ALL OF THEM. YOU MAY NEED TO ADD SOME\n pen.solid = True\n introcs.assert_error(a4.cantor_helper, pen, 0, 0, 243, 400, 4)\n pen.solid = False\n \n # Now draw\n a4.cantor_helper(pen, 0, 0, 243, 412, 4)\n pen.flush()\n input('Press [return]')\n\n\n#################### TASK 5: Minkowski Island ####################\n\ndef test_island(w,sp):\n \"\"\"\n Tests the procedure island AND island_helper\n \n Unlike most test procedures, you will notice that this test procedure has \n parameters. That is because we want all of our test procedures to share the\n same drawing window and same drawing speed. Theses are set in the master \n procedure test_all.\n \n Parameter w: The window to draw upon.\n Precondition: w is a introcs Window object.\n \n Parameter sp: The drawing speed.\n Precondition: sp is a valid drawing speed (int 0..10).\n \"\"\"\n # Fractals need a few tests\n print('Testing island (depth 0)')\n \n # First verify that the preconditions are enforced\n # WE HAVE NOT ADDED ALL OF THEM. YOU MAY NEED TO ADD SOME\n introcs.assert_error(a4.island,'window', 300, 0, sp)\n \n # Now draw (three different depths)\n a4.island(w, 300, 0, sp)\n input('Press [return]')\n \n print('Testing island (depth 1)')\n a4.island(w, 300, 1, sp)\n input('Press [return]')\n \n print('Testing island (depth 3)')\n a4.island(w, 300, 3, sp)\n input('Press [return]')\n \n print('Testing island_edge')\n # This is only ONE TEST. Feel free to change the values for different tests.\n w.clear()\n turt = Turtle(w)\n turt.move(-250,0)\n turt.color = 'sea green'\n turt.speed = sp\n \n # First verify that the preconditions are enforced\n # WE HAVE NOT ADDED ALL OF THEM. YOU MAY NEED TO ADD SOME\n turt.drawmode = False\n introcs.assert_error(a4.island_edge, turt, 500, 4)\n turt.drawmode = True\n \n # Store original values (These are all of the important ones)\n oldang = turt.heading % 360\n oldcol = turt.color\n oldvis = turt.visible\n oldspd = turt.speed\n oldmode = turt.drawmode\n \n # Now draw\n a4.island_edge(turt, 500, 4)\n turt.flush()\n \n # Verify value restored\n introcs.assert_floats_equal(oldang,turt.heading % 360) # Okay if 360 was added\n introcs.assert_equals(oldcol, turt.color)\n introcs.assert_equals(oldvis,turt.visible)\n introcs.assert_equals(oldspd,turt.speed)\n introcs.assert_equals(oldmode,turt.drawmode)\n \n # Allow the user to look at the picture before continuing\n input('Press [return]')\n\n\n#################### Main Test Procedure ####################\n\ndef get_speed():\n \"\"\"\n Returns the answer to a prompt about the speed.\n \n If the anwser is invalid, it returns the value 10\n \"\"\"\n ans = input('Enter the drawing speed [0..10]: ')\n try:\n return int(ans.strip())\n except:\n print('Answer '+repr(ans)+' is invalid. Using speed 10.')\n return 10\n\n\ndef test_all():\n \"\"\"\n Tests all of the drawing functions in a4.\n \n This is the master test procedure. It creates a drawing window and sets the \n drawing speed for all of the tests.\n \n If you want to disable a test (because the turtle is not very fast, and so you \n do not want to keep drawing the same things), comment it out in the code below.\n \"\"\"\n print('Testing module a4')\n w = Window()\n sp = get_speed()\n \n # Test procedures. Comment out a test to skip it.\n test_draw_two_lines(w,sp)\n \n test_draw_triangle(w,sp)\n test_draw_hex(w,sp)\n \n test_draw_spiral(w,sp)\n test_multi_polygons(w,sp)\n test_radiate_petals(w,sp)\n \n test_triangle(w,sp)\n test_cantor(w,sp)\n \n test_island(w,sp)\n \n print('Testing complete')\n\n\nif __name__ == '__main__':\n test_all()", "id": "6214504", "language": "Python", "matching_score": 2.0779507160186768, "max_stars_count": 1, "path": "Turtle/a4test.py" }, { "content": "\"\"\"\nTest script for Assignment 6.\n\nYou cannot even start to process images until the class Image is complete. Many of \ntest procedures will make sure that this class is working properly.\n\nThis script also contains test procedures for Filter and Encoder. However, these\ntest procedures are sparse and are not guaranteed to find everything.\n\nAuthor: <NAME> (wmw2)\nDate: October 29, 2019\n\"\"\"\nimport introcs\nimport a6image\nimport a6filter\nimport traceback\n\n# Helper to read the test images\n\ndef load_image(file):\n \"\"\"\n Returns an Image object for the give file in the tests folder.\n \n If it cannot read the image (either Image is not defined or the file \n is not an image file), this method returns None.\n \n Parameter file: The image file (without the png suffix)\n Precondition: file is a string\n \"\"\"\n import os.path\n from PIL import Image as CoreImage\n path = os.path.split(__file__)[0]\n path = os.path.join(path,'tests',file+'.png')\n \n try:\n image = CoreImage.open(path)\n image = image.convert(\"RGB\")\n buffer = list(image.getdata())\n size = image.size[0]*image.size[1]\n width = image.size[0]\n except:\n traceback.print_exc()\n print('Could not load the file '+path)\n buffer = None\n \n result = None\n if not buffer is None:\n try:\n result = a6image.Image(buffer,width)\n except:\n traceback.print_exc()\n result = None\n return result\n\n\ndef load_text(file):\n \"\"\"\n Returns an text string for the give file in the tests folder.\n \n If it cannot read the text, this method returns None.\n \n Parameter file: The text file (without the txt suffix)\n Precondition: file is a string\n \"\"\"\n import os.path\n from PIL import Image as CoreImage\n path = os.path.split(__file__)[0]\n path = os.path.join(path,'tests',file+'.txt')\n \n try:\n data = open(path)\n result = data.read()\n data.close()\n except:\n traceback.print_exc()\n self.error('Could not load the text file')\n result = None\n \n return result\n\n\n# Test functions\ndef test_pixel_list():\n \"\"\"\n Tests the precondition helper _is_pixel_list\n \"\"\"\n print('Testing helper _is_pixel_list')\n introcs.assert_false(a6image._is_pixel_list('a'))\n introcs.assert_false(a6image._is_pixel_list((0,244,255)))\n introcs.assert_false(a6image._is_pixel_list(['a']))\n introcs.assert_true(a6image._is_pixel_list([(0,244,255)]))\n introcs.assert_false(a6image._is_pixel_list([[(0,244,255)]]))\n introcs.assert_false(a6image._is_pixel_list([(304,244,255)]))\n introcs.assert_true(a6image._is_pixel_list([(0,244,255),(100,64,255),(50,3,250)]))\n introcs.assert_false(a6image._is_pixel_list([(0,244,255),(100,'64',255),(50,3,250)]))\n introcs.assert_false(a6image._is_pixel_list([(0,244,255),(100,-64,255),(50,3,250)]))\n print(\"tests complete\")\n\n\ndef test_image_init():\n \"\"\"\n Tests the __init__ method and getters for class Image\n \"\"\"\n print('Testing image initializer')\n p = [(0,0,0)]*6\n \n image = a6image.Image(p,3)\n # Normally it is bad to test things that are hidden\n # But without this you will not find the error until test_image_operators\n introcs.assert_equals(id(p),id(image._data))\n introcs.assert_not_equals(id(p),id(image.getData()))\n introcs.assert_equals(p,image.getData())\n introcs.assert_equals(3,image.getWidth())\n introcs.assert_equals(2,image.getHeight())\n\n image = a6image.Image(p,2)\n introcs.assert_equals(id(p),id(image._data))\n introcs.assert_not_equals(id(p),id(image.getData()))\n introcs.assert_equals(p,image.getData())\n introcs.assert_equals(2,image.getWidth())\n introcs.assert_equals(3,image.getHeight())\n\n image = a6image.Image(p,1)\n introcs.assert_equals(id(p),id(image._data))\n introcs.assert_not_equals(id(p),id(image.getData()))\n introcs.assert_equals(p,image.getData())\n introcs.assert_equals(1,image.getWidth())\n introcs.assert_equals(6,image.getHeight())\n \n # Test enforcement\n introcs.assert_error(a6image.Image,'aaa',3,message='Image does not enforce the precondition on data')\n introcs.assert_error(a6image.Image,p,'a', message='Image does not enforce the precondition width type')\n introcs.assert_error(a6image.Image,p,5, message='Image does not enforce the precondition width validity')\n\n\ndef test_image_setters():\n \"\"\"\n Tests the width and height setters for class Image\n \"\"\"\n print('Testing image setters for width/height')\n p = [(0,0,0)]*6\n \n image = a6image.Image(p,3)\n introcs.assert_equals(3,image.getWidth())\n introcs.assert_equals(2,image.getHeight())\n \n image.setWidth(2)\n introcs.assert_equals(2,image.getWidth())\n introcs.assert_equals(3,image.getHeight())\n \n image.setHeight(1)\n introcs.assert_equals(6,image.getWidth())\n introcs.assert_equals(1,image.getHeight())\n \n image.setWidth(1)\n introcs.assert_equals(1,image.getWidth())\n introcs.assert_equals(6,image.getHeight())\n \n # Test enforcement\n introcs.assert_error(image.setWidth,'a', message='setWidth does not enforce the precondition on width type')\n introcs.assert_error(image.setWidth,5, message='setWidth does not enforce the precondition on width validity')\n introcs.assert_error(image.setHeight,'a',message='setHeight does not enforce the precondition on height type')\n introcs.assert_error(image.setHeight,5, message='setHeight does not enforce the precondition on height validity')\n\n\ndef test_image_operators():\n \"\"\"\n Tests the double-underscore methods for 1-d access in class Image.\n \"\"\"\n print('Testing image operators for 1-dimensional access')\n p = [(0,0,0)]*4\n \n image = a6image.Image(p,2)\n introcs.assert_equals(4,len(image))\n\n p = [(255,0,0),(0,255,0),(0,0,255),(0,255,255),(255,0,255),(255,255,0)]\n rgb1 = (255,255,255)\n rgb2 = (64,128,192)\n \n image = a6image.Image(p,3)\n introcs.assert_equals(6,len(image))\n for n in range(6):\n introcs.assert_equals(p[n],image[n])\n introcs.assert_equals(id(p[n]),id(image[n]))\n \n image[4] = rgb1\n introcs.assert_equals(rgb1,image[4])\n image[4] = rgb2\n introcs.assert_equals(rgb2,image[4])\n introcs.assert_equals(rgb2,p[4]) # Because image has a reference to p\n \n introcs.assert_error(image.__getitem__,'a', message='__getitem__ does not enforce the precondition on type')\n introcs.assert_error(image.__getitem__,9, message='__getitem__ does not enforce the precondition on range')\n introcs.assert_error(image.__setitem__,'a',(0,0,255), message='__setitem__ does not enforce the precondition on type')\n introcs.assert_error(image.__setitem__,9,(0,0,255), message='__setitem__ does not enforce the precondition on range')\n introcs.assert_error(image.__setitem__,9,(0,0,'255'), message='__setitem__ does not enforce the precondition on pixel value')\n \n\ndef test_image_access():\n \"\"\"\n Tests the methods the two-dimensional get/setPixel methods in class Image\n \"\"\"\n print('Testing image get/setPixel methods')\n p = [(255,0,0),(0,255,0),(0,0,255),(0,255,255),(255,0,255),(255,255,0)]\n rgb1 = (255,255,255)\n rgb2 = (64,128,192)\n \n image = a6image.Image(p,2)\n for n in range(6):\n introcs.assert_equals(p[n],image.getPixel(n // 2, n % 2))\n introcs.assert_equals(id(p[n]),id(image.getPixel(n // 2, n % 2)))\n \n image.setPixel(2,1,rgb1)\n introcs.assert_equals(rgb1,image.getPixel(2,1))\n \n image.setPixel(2,1,rgb2)\n introcs.assert_equals(rgb2,image.getPixel(2,1))\n \n # Test enforcement\n introcs.assert_error(image.getPixel, 'a', 1, message='getPixel does not enforce the precondition on row type')\n introcs.assert_error(image.getPixel, 8, 1, message='getPixel does not enforce the precondition on row value')\n introcs.assert_error(image.getPixel, 2, 'a', message='getPixel does not enforce the precondition on col value')\n introcs.assert_error(image.getPixel, 2, 8, message='getPixel does not enforce the precondition on col value')\n introcs.assert_error(image.setPixel, 'a', 1, (0,0,255), message='setPixel does not enforce the precondition on row type')\n introcs.assert_error(image.setPixel, 8, 1, (0,0,255), message='setPixel does not enforce the precondition on row value')\n introcs.assert_error(image.setPixel, 2, 'a', (0,0,255), message='setPixel does not enforce the precondition on col value')\n introcs.assert_error(image.setPixel, 2, 8, (0,0,255), message='setPixel does not enforce the precondition on col value')\n introcs.assert_error(image.setPixel, 2, 1, (0,0,'255'), message='setPixel does not enforce the precondition on pixel value')\n\n\ndef test_image_str():\n \"\"\"\n Tests the __str__ method in class Image\n \"\"\"\n print('Testing image __str__ method')\n p = [(255, 64, 0),(0, 255, 64),(64, 0, 255),(64, 255, 128),(128, 64, 255),(255, 128, 64)]\n \n str0 = '[['+str(p[0])+', '+str(p[1])+'],\\n['+str(p[2])+', '+str(p[3])+']]'\n str1 = '[['+str(p[0])+', '+str(p[1])+'],\\n['+str(p[2])+', '+str(p[3])+'],\\n['+str(p[4])+', '+str(p[5])+']]'\n str2 = '[['+str(p[0])+', '+str(p[1])+', '+str(p[2])+'],\\n['+str(p[3])+', '+str(p[4])+', '+str(p[5])+']]'\n str3 = '[['+str(p[0])+', '+str(p[1])+', '+str(p[2])+', '+str(p[3])+', '+str(p[4])+', '+str(p[5])+']]'\n str4 = '[['+str(p[0])+'],\\n['+str(p[1])+'],\\n['+str(p[2])+'],\\n['+str(p[3])+'],\\n['+str(p[4])+'],\\n['+str(p[5])+']]'\n \n image = a6image.Image(p[:4],2)\n introcs.assert_equals(str0,str(image))\n \n image = a6image.Image(p,2)\n introcs.assert_equals(str1,str(image))\n image.setWidth(3)\n introcs.assert_equals(str2,str(image))\n image.setWidth(6)\n introcs.assert_equals(str3,str(image))\n image.setWidth(1)\n introcs.assert_equals(str4,str(image))\n\n\ndef test_image_other():\n \"\"\"\n Tests the copy and swapPixel methods in class Image\n \"\"\"\n print('Testing image extra methods')\n p = [(255, 64, 0),(0, 255, 64),(64, 0, 255),(64, 255, 128),(128, 64, 255),(255, 128, 64)]\n q = p[:] # Need to copy this\n \n # Test the copy\n image = a6image.Image(p,2)\n copy = image.copy()\n introcs.assert_equals(len(image),len(copy))\n introcs.assert_equals(image.getWidth(),copy.getWidth())\n introcs.assert_not_equals(id(image), id(copy))\n introcs.assert_not_equals(id(image._data), id(copy._data))\n for pos in range(len(copy)):\n introcs.assert_equals(image[pos],copy[pos])\n \n # Test swap pixels\n image.swapPixels(0,0,2,1)\n introcs.assert_equals(q[5],image.getPixel(0,0))\n introcs.assert_equals(q[0],image.getPixel(2,1))\n image.swapPixels(0,0,2,1)\n introcs.assert_equals(q[0],image.getPixel(0,0))\n introcs.assert_equals(q[5],image.getPixel(2,1))\n image.swapPixels(0,1,2,0)\n introcs.assert_equals(q[4],image.getPixel(0,1))\n introcs.assert_equals(q[1],image.getPixel(2,0))\n image.swapPixels(0,1,2,0)\n introcs.assert_equals(q[1],image.getPixel(0,1))\n introcs.assert_equals(q[4],image.getPixel(2,0))\n image.swapPixels(0,0,0,0)\n introcs.assert_equals(q[0],image.getPixel(0,0))\n \n # Test enforcement\n introcs.assert_error(image.swapPixels, 'a', 1, 0, 0, message='swapPixels does not enforce the precondition on row type')\n introcs.assert_error(image.swapPixels, 8, 1, 0, 0, message='swapPixels does not enforce the precondition on row value')\n introcs.assert_error(image.swapPixels, 0, 1, 'a', 0, message='swapPixels does not enforce the precondition on row type')\n introcs.assert_error(image.swapPixels, 0, 1, 8, 0, message='swapPixels does not enforce the precondition on row value')\n introcs.assert_error(image.swapPixels, 0, 'a', 0, 0, message='swapPixels does not enforce the precondition on column type')\n introcs.assert_error(image.swapPixels, 0, 8, 0, 0, message='swapPixels does not enforce the precondition on column value')\n introcs.assert_error(image.swapPixels, 0, 1, 0, 'a', message='swapPixels does not enforce the precondition on column type')\n introcs.assert_error(image.swapPixels, 0, 1, 0, 8, message='swapPixels does not enforce the precondition on column value')\n\n## All of these tests hava a familiar form\n\ndef compare_images(image1,image2,file1,file2):\n \"\"\"\n Compares image1 and image2 via assert functions.\n \n If the images are the same, nothing happens. Otherwise this function \n produces an error and quits python. We provide the file names to give\n use proper error messages\n \n Parameter image1: The first image to compare\n Precondition: image1 is an Image object\n \n Parameter image2: The second image to compare\n Precondition: image2 is an Image object\n \n Parameter file1: The file name of the first image\n Precondition: file1 is an Image object\n \n Parameter file2: The file name of the second image\n Precondition: file2 is an Image object\n \"\"\"\n introcs.assert_equals(len(image2),len(image1),\n file1+' and '+file2+' do not have the same pixel size')\n introcs.assert_equals(image2.getWidth(),image1.getWidth(),\n file1+' and '+file2+' do not have the same width')\n introcs.assert_equals(image2.getHeight(),image1.getHeight(),\n file1+' and '+file2+' do not have the same height')\n \n for col in range(image2.getWidth()):\n for row in range(image2.getHeight()):\n introcs.assert_equals(image2.getPixel(row,col),image1.getPixel(row,col),\n 'Pixel mismatch between '+file1+' and '+file2+\n ' at ('+str(col)+','+str(row)+')')\n\n\ndef test_reflect_vert():\n \"\"\"\n Tests the method reflectVert in class Filter\n \"\"\"\n print('Testing method reflectVert')\n\n file1 = 'blocks'\n file2 = 'blocks-reflect-vertical'\n image1 = load_image(file1)\n image2 = load_image(file2)\n editor = a6filter.Filter(image1)\n \n editor.reflectVert()\n compare_images(editor.getCurrent(),image2,file1,file2)\n \n file1 = 'home'\n file2 = 'home-reflect-vertical'\n image1 = load_image(file1)\n image2 = load_image(file2)\n editor = a6filter.Filter(image1)\n \n editor.reflectVert()\n compare_images(editor.getCurrent(),image2,file1,file2)\n\n\ndef test_monochromify():\n \"\"\"\n Tests the method monochromify in class Filter\n \"\"\"\n print('Testing method monochromify (greyscale)')\n \n file1 = 'blocks'\n file2 = 'blocks-grey'\n image1 = load_image(file1)\n image2 = load_image(file2)\n editor = a6filter.Filter(image1)\n \n editor.monochromify(False)\n compare_images(editor.getCurrent(),image2,file1,file2)\n \n file1 = 'home'\n file2 = 'home-grey'\n image1 = load_image(file1)\n image2 = load_image(file2)\n editor = a6filter.Filter(image1)\n \n editor.monochromify(False)\n compare_images(editor.getCurrent(),image2,file1,file2)\n \n print('Testing method monochromify (sepia)')\n \n file1 = 'blocks'\n file2 = 'blocks-sepia'\n image1 = load_image(file1)\n image2 = load_image(file2)\n editor = a6filter.Filter(image1)\n \n editor.monochromify(True)\n compare_images(editor.getCurrent(),image2,file1,file2)\n \n file1 = 'home'\n file2 = 'home-sepia'\n image1 = load_image(file1)\n image2 = load_image(file2)\n editor = a6filter.Filter(image1)\n \n editor.monochromify(True)\n compare_images(editor.getCurrent(),image2,file1,file2)\n\n\ndef test_jail():\n \"\"\"\n Tests the method jail in class Filter\n \"\"\"\n print('Testing method jail')\n \n file1 = 'blocks'\n file2 = 'blocks-jail'\n image1 = load_image(file1)\n image2 = load_image(file2)\n editor = a6filter.Filter(image1)\n \n editor.jail()\n compare_images(editor.getCurrent(),image2,file1,file2)\n \n file1 = 'home'\n file2 = 'home-jail'\n image1 = load_image(file1)\n image2 = load_image(file2)\n editor = a6filter.Filter(image1)\n \n editor.jail()\n compare_images(editor.getCurrent(),image2,file1,file2)\n\n\ndef test_vignette():\n \"\"\"\n Tests the method vignette in class Filter\n \"\"\"\n print('Testing method vignette')\n \n file1 = 'blocks'\n file2 = 'blocks-vignette'\n image1 = load_image(file1)\n image2 = load_image(file2)\n editor = a6filter.Filter(image1)\n \n editor.vignette()\n compare_images(editor.getCurrent(),image2,file1,file2)\n \n file1 = 'home'\n file2 = 'home-vignette'\n image1 = load_image(file1)\n image2 = load_image(file2)\n editor = a6filter.Filter(image1)\n \n editor.vignette()\n compare_images(editor.getCurrent(),image2,file1,file2)\n\n\ndef test_pixellate():\n \"\"\"\n Tests the method pixellate in class Filter\n \"\"\"\n print('Testing method pixellate')\n \n file1 = 'blocks'\n file2 = 'blocks-pixellate-10'\n image1 = load_image(file1)\n image2 = load_image(file2)\n editor = a6filter.Filter(image1)\n \n editor.pixellate(10)\n compare_images(editor.getCurrent(),image2,file1,file2)\n \n file2 = 'blocks-pixellate-20'\n image1 = load_image(file1)\n image2 = load_image(file2)\n editor = a6filter.Filter(image1)\n \n editor.pixellate(20)\n compare_images(editor.getCurrent(),image2,file1,file2)\n \n file2 = 'blocks-pixellate-50'\n image1 = load_image(file1)\n image2 = load_image(file2)\n editor = a6filter.Filter(image1)\n \n editor.pixellate(50)\n compare_images(editor.getCurrent(),image2,file1,file2)\n \n file1 = 'home'\n file2 = 'home-pixellate-10'\n image1 = load_image(file1)\n image2 = load_image(file2)\n editor = a6filter.Filter(image1)\n \n editor.pixellate(10)\n compare_images(editor.getCurrent(),image2,file1,file2)\n \n file2 = 'home-pixellate-20'\n image1 = load_image(file1)\n image2 = load_image(file2)\n editor = a6filter.Filter(image1)\n \n editor.pixellate(20)\n compare_images(editor.getCurrent(),image2,file1,file2)\n \n file2 = 'home-pixellate-50'\n image1 = load_image(file1)\n image2 = load_image(file2)\n editor = a6filter.Filter(image1)\n \n editor.pixellate(50)\n compare_images(editor.getCurrent(),image2,file1,file2)\n\n\ndef test_all():\n \"\"\"\n Execute all of the test cases.\n \n This function is called by __main__.py\n \"\"\"\n test_pixel_list()\n print()\n \n print('Testing class Image')\n test_image_init()\n test_image_setters()\n test_image_operators()\n test_image_access()\n test_image_str()\n test_image_other()\n print('Class Image passed all tests.')\n print()\n \n print('Testing class Filter')\n test_reflect_vert()\n test_monochromify()\n test_jail()\n test_vignette()\n test_pixellate()\n print('Class Filter passed all tests.')\n", "id": "1075531", "language": "Python", "matching_score": 3.3404736518859863, "max_stars_count": 1, "path": "RGB Image/imager/a6test.py" }, { "content": "\"\"\"\nThe main class for our imager application.\n\nThis modules contains a single class. Instances of this class support an image that can\nbe modified. This is the main class needed to display images in the viewer.\n\nBased on an original file by <NAME> (dck10) and <NAME> (wmw2)\n\n<NAME> asw263\nNovember 16 2020\n\"\"\"\nfrom copy import deepcopy\nfrom copy import copy\n\ndef _is_pixel(item):\n \"\"\"\n Returns True if item is a pixel, False otherwise.\n\n A pixel is a tuple of 3 ints in the range 0..255\n\n Parameter item: The item to check\n Precondition: NONE (item can be anything)(\n \"\"\"\n if type(item) != tuple or len(item) != 3:\n return False\n\n for ii in range(3):\n if type(item[ii]) != int or item[ii] < 0 or item[ii] > 255:\n return False\n\n return True\n\n\n# TASK 0: IMPLEMENT THIS HELPER\ndef _is_pixel_list(data):\n \"\"\"\n Returns True if data is a pixel list, False otherwise.\n\n A pixel list is a 1-dimensional list of pixels where a pixel is a tuple\n of 3 ints in the range 0..255\n\n Parameter data: The data to check\n Precondition: NONE (data can be anything)\n \"\"\"\n if type(data) != list:\n return False\n\n for j in range(len(data)):\n if _is_pixel(data[j]) == False:\n return False\n\n return True\n\n\n# TASK 1: IMPLEMENT THIS CLASS\nclass Image(object):\n \"\"\"\n A class that allows flexible access to an image pixel list\n\n One of the things that we will see in this assignment is that sometimes\n you want to treat an image as a flat 1D list and other times you want to\n treat it as a 2D list. This class has methods that allow you to go back\n and forth between the two.\n\n If you want to treat the image like a 2D list, you use the methods\n `getPixel` and `setPixel`. As with the Pixels class, pxels are represented\n as 3-element tuples, with each element in the range 0..255. For example,\n red is (255,0,0). These methods are used by many of the Instagram-style\n filter functions.\n\n If you want to treat the image like a 1D list you just use list brackets\n like it was a normal list:\n\n image[pos] = (255,0,0)\n\n The methods `__getitem__` and `__getitem__` provide operator overloading for [].\n So the call above is the same as the image call\n\n image.__setitem__(pos, (255,0,0))\n\n These operations are used by the greyscale filters in particular.\n \"\"\"\n # IMMUTABLE ATTRIBUTES (Fixed after initialization)\n # Attribute _data: The underlying list of pixels\n # Invariant: _data is a non-empty pixel list (see _is_pixel_list)\n #\n # MUTABLE ATTRIBUTES (Can be changed at any time, via the setters)\n # Attribute _width: The image width, which is the number of columns\n # Invariant: _width is an int > 0, _width*_height = len(_data)\n #\n # Attribute _height: The image height, which is the number of rows\n # Invariant: _height is an int > 0, _width*_height = len(_data)\n #\n # Note that if you change width, you must change height (to satisfy the invariant)\n\n # PART A\n # GETTERS AND SETTERS\n def getData(self):\n \"\"\"\n Returns a COPY of the image data.\n\n The image data is a 1-dimensional list of 3-element tuples. The list\n returned by this method is a copy of the one managed by this object.\n \"\"\"\n listcopy = self._data.copy()\n return listcopy\n\n def getWidth(self):\n \"\"\"\n Returns the image width\n\n A value width is an int evenly dividing the number of pixels in the\n image. The width cannot be 0.\n \"\"\"\n return self._width\n\n def setWidth(self,value):\n \"\"\"\n Sets the image width to value, assuming it is valid.\n\n If the width changes, then height must change to so that we preserve\n width*height == # of pixels. This can only happen if the value is valid.\n\n The value is valid if it is an int and it evenly divides the number of\n pixels in the image. If the pixel list has 10 pixels, a valid width is\n 1, 2, 5, or 10. The width cannot be 0.\n \n Parameter value: the new width value\n Precondition: value is a valid width > 0\n \"\"\"\n assert type(value) == int and value > 0, repr(value) + \" is not a valid width\"\n assert len(self._data) % value == 0, repr(value) + \" is not a valid width\"\n self._width = value\n if len(self._data) / value != self._height:\n self.setHeight(int(len(self._data) / value)) \n\n\n def getHeight(self):\n \"\"\"\n Returns the image height\n\n A value height is an int evenly dividing the number of pixels in the\n image. The height cannot be 0.\n \"\"\"\n return self._height\n\n def setHeight(self,value):\n \"\"\"\n Sets the image height to value, assuming it is valid.\n\n If the height changes, then width must change to so that we preserve\n width*height == # of pixels. This can only happen if the value is valid.\n\n The value is valid if it is an int and it evenly divides the number of\n pixels in the image. If the pixel list has 10 pixels, a valid height is\n 1, 2, 5, or 10. The height cannot be 0.\n\n Parameter value: the new height value\n Precondition: value is a valid height > 0\n \"\"\"\n assert type(value) == int and value > 0, repr(value) + \" is not a valid height\"\n assert len(self._data) % value == 0, repr(value) + \" is not a valid height\"\n self._height = value\n if len(self._data) / value != self._width:\n self.setWidth(int(len(self._data) / value))\n\n # INITIALIZER\n def __init__(self, data, width):\n \"\"\"\n Initializes an Image from the given pixel list.\n\n A pixel list is a 1-dimensional list of pixels where a pixel is a\n tuple of 3 ints in the range 0..255. The pixel list contains the\n image data. You do not need to worry about loading an image file.\n That happens elsewhere in the application (in code that you did not\n write).\n\n However, in order to be valid, the width must evenly divide the\n number of pixels in the image. So if the pixel list has 10 pixels, a\n valid width is 1, 2, 5, or 10.\n\n The height is not given explicitly, but you must compute it from the\n width and pixel list length.\n\n This initializer stores a reference to the original image data; it\n does not copy it. So changes to the image will change the data\n parameter as well.\n\n Parameter data: The image data as a pixel list\n Precondition: data is a non-empty pixel list\n\n Parameter width: The image width\n Precondition: width is an int > 0 and evenly divides the length of pixels\n \"\"\"\n assert _is_pixel_list(data), repr(data)+\" is not a pixel list\"\n assert type(width) == int, repr(width) + \" is not a valid width\"\n assert width>0 and len(data) % width==0, repr(width) + \" is not a valid width\"\n self._data = data\n self._width = width\n self.setHeight(int(len(self._data)/self._width))\n\n # PART B\n # OPERATOR OVERLOADING\n def __len__(self):\n \"\"\"\n Returns the number of pixels in this image\n\n This special method supports the built-in len function.\n \"\"\"\n return len(self._data)\n\n def __getitem__(self, pos):\n \"\"\"\n Returns the pixel at the given position.\n\n This special method supports the [] operator for accessing pixels.\n It is better than direct access because it enforces its precondition.\n\n This method is used when you want to treat an image as a flat,\n one-dimensional list rather than a 2-dimensional image. It is useful\n for the steganography part of the assignment.\n\n The value returned is a 3-element tuple (r,g,b).\n\n Parameter pos: The position in the pixel list\n Precondition: pos is an int and a valid position >= 0 in the pixel list.\n \"\"\"\n assert type(pos) == int and pos >= 0, repr(pos) + \"is not a valid position\"\n assert pos <= len(self._data), repr(pos) + \"is not a valid position\"\n return self._data[pos]\n\n def __setitem__(self, pos, pixel):\n \"\"\"\n Sets the pixel at the given position to the given value.\n\n This special method supports the [] operator for accessing pixels.\n It is better than direct access because it enforces its precondition.\n\n Parameter pos: The position in the pixel list\n Precondition: pos is an int and a valid position >= 0 in the pixel list.\n\n Parameter pixel: The pixel value\n Precondition: pixel is a 3-element tuple (r,g,b) of ints in 0..255\n \"\"\"\n assert type(pos) == int and pos >= 0, repr(pos) + \" is not a valid position\"\n assert pos <= len(self._data), repr(pos) + \"is not a valid position\"\n assert _is_pixel(pixel) == True, repr(pixel) + \" is not a valid pixel\"\n self._data[pos] = pixel\n\n # PART C\n # TWO-DIMENSIONAL ACCESS METHODS\n def getPixel(self, row, col):\n \"\"\"\n Returns the pixel value at (row, col)\n\n Remember that this way of accessing a pixel is essentially (y,x) since\n height is the number of rows and width is the number of columns.\n\n The value returned is a 3-element tuple (r,g,b).\n\n Parameter row: The pixel row\n Precondition: row is an int >= 0 and < height\n\n Parameter col: The pixel column\n Precondition: col is an int >= 0 and < width\n \"\"\"\n assert type(row) == int and (row >= 0 and row < self._height)\n assert type(col) == int and (col >= 0 and col < self._width)\n return self._data[(self._width*row)+col]\n\n def setPixel(self, row, col, pixel):\n \"\"\"\n Sets the pixel value at (row, col) to pixel\n\n Remember that this way of setting a pixel is essentially (y,x) since\n height is the number of rows and width is the number of columns.\n\n Parameter row: The pixel row\n Precondition: row is an int >= 0 and < height\n\n Parameter col: The pixel column\n Precondition: col is an int >= 0 and < width\n\n Parameter pixel: The pixel value\n Precondition: pixel is a 3-element tuple (r,g,b) of ints in 0..255\n \"\"\"\n assert type(row) == int and (row >= 0 and row < self._height)\n assert type(col) == int and (col >= 0 and col < self._width)\n assert _is_pixel(pixel) == True, repr(pixel) + \" is not a valid pixel\"\n self._data[(self._width*row)+col] = pixel\n\n # PART D\n def __str__(self):\n \"\"\"\n Returns the string representation of this image.\n\n The string should be displayed as a 2D list of pixels in row-major\n order. For example, suppose the image data is\n\n [(255, 0, 0), (0, 255, 0), (0, 0, 255), (0, 0, 0), (128, 0, 0), (0, 128, 0)]\n\n If the width (which is the number of columns) is two, the string\n should be\n\n '[[(255, 0, 0), (0, 255, 0)],\\n[(0, 0, 255), (0, 0, 0)],\\n[(128, 0, 0), (0, 128, 0)]]\n\n Note the newlines (\\n) after each row. That is because when you print this\n string, it will look like this:\n\n [[(255, 0, 0), (0, 255, 0)],\n [(0, 0, 255), (0, 0, 0)],\n [(128, 0, 0), (0, 128, 0)]]\n\n This is useful for debugging, since it allows us to see each row of the\n image on its own line.\n\n There should be spaces after the commas but no where else. Tuples\n (the individual pixels) handle this part for you automatically, but you\n need to handle the commas between pixels and the newlines between rows.\n \"\"\"\n twoDimList = \"\"\n for i in range(len(self._data)):\n if i == 0:\n twoDimList += \"[[\"\n if (i + 1) % self._width == 0 and i + 1 != len(self._data):\n twoDimList = twoDimList + str(self._data[i]) +\"],\\n[\"\n elif i + 1 == len(self._data):\n twoDimList = twoDimList + str(self._data[i]) + \"]]\"\n else:\n twoDimList = twoDimList + str(self._data[i]) +\", \"\n return twoDimList \n\n # ADDITIONAL METHODS\n def swapPixels(self, row1, col1, row2, col2):\n \"\"\"\n Swaps the pixel at (row1, col1) with the pixel at (row2, col2)\n\n Parameter row1: The pixel row to swap from\n Precondition: row1 is an int >= 0 and < height\n\n Parameter col1: The pixel column to swap from\n Precondition: col1 is an int >= 0 and < width\n\n Parameter row2: The pixel row to swap to\n Precondition: row1 is an int >= 0 and < height\n\n Parameter col2: The pixel column to swap to\n Precondition: col2 is an int >= 0 and < width\n \"\"\"\n # NOTE: DO NOT enforce any preconditions here.\n # They should be enforced already in getPixel and setPixel.\n newPixel = deepcopy(self.getPixel(row2, col2))\n origPixel = deepcopy(self.getPixel(row1, col1))\n self.setPixel(row1, col1, newPixel)\n self.setPixel(row2, col2, origPixel)\n \n def copy(self):\n \"\"\"\n Returns a copy of this image object.\n\n The underlying pixel data must be copied (e.g. the copy cannot refer\n to the same list of pixels that this object does).\n \"\"\"\n return deepcopy(self)\n", "id": "8193608", "language": "Python", "matching_score": 2.843935966491699, "max_stars_count": 1, "path": "RGB Image/imager/a6image.py" }, { "content": "\"\"\"\nImage processing methods for the imager application.\n\nThis module provides all of the image processing operations that are called \nwhenever you press a button. Some of these are provided for you and others you\nare expected to write on your own.\n\nNote that this class is a subclass of Editor. This allows you to make use\nof the undo functionality. You do not have to do anything special to take \nadvantage of this. Just make sure you use getCurrent() to access the most \nrecent version of the image.\n\nBased on an original file by <NAME> (dck10) and <NAME> (wmw2)\n\n<NAME> asw263\nNovember 16 2020\n\"\"\"\nimport a6editor\nimport a6image\nimport math # Just in case\n\n\nclass Filter(a6editor.Editor):\n \"\"\"\n A class that contains a collection of image processing methods\n \n This class is a subclass of a6editor. That means it inherits all of the \n methods and attributes of that class too. We do that (1) to put all of the \n image processing methods in one easy to read place and (2) because we might \n want to change how we implement the undo functionality later.\n \n This class is broken into three parts (1) implemented non-hidden methods, \n (2) non-implemented non-hidden methods and (3) hidden methods. The \n non-hidden methods each correspond to a button press in the main \n application. The hidden methods are all helper functions.\n \n Each one of the non-hidden functions should edit the most recent image \n in the edit history (which is inherited from Editor).\n \"\"\"\n \n # PROVIDED ACTIONS (STUDY THESE)\n def invert(self):\n \"\"\"\n Inverts the current image, replacing each element with its color complement\n \"\"\"\n current = self.getCurrent()\n for pos in range(len(current)): # We can do this because of __len__\n rgb = current[pos] # We can do this because of __getitem__\n red = 255 - rgb[0]\n green = 255 - rgb[1]\n blue = 255 - rgb[2]\n rgb = (red,green,blue) # New pixel value\n current[pos] = rgb # We can do this because of __setitem__\n \n def transpose(self):\n \"\"\"\n Transposes the current image\n \n Transposing is tricky, as it is hard to remember which values have been \n changed and which have not. To simplify the process, we copy the \n current image and use that as a reference. So we change the current \n image with setPixel, but read (with getPixel) from the copy.\n \"\"\"\n current = self.getCurrent()\n original = current.copy()\n current.setWidth(current.getHeight())\n \n for row in range(current.getHeight()): # Loop over the rows\n for col in range(current.getWidth()): # Loop over the columnns\n current.setPixel(row,col,original.getPixel(col,row))\n \n def reflectHori(self):\n \"\"\"\n Reflects the current image around the horizontal middle.\n \"\"\"\n current = self.getCurrent()\n for h in range(current.getWidth()//2): # Loop over the columnns\n for row in range(current.getHeight()): # Loop over the rows\n k = current.getWidth()-1-h\n current.swapPixels(row,h,row,k)\n \n def rotateRight(self):\n \"\"\"\n Rotates the current image right by 90 degrees.\n \n Technically, we can implement this via a transpose followed by a \n horizontal reflection. However, this is slow, so we use the faster \n strategy below.\n \"\"\"\n current = self.getCurrent()\n original = current.copy()\n current.setWidth(current.getHeight())\n \n for row in range(current.getHeight()): # Loop over the rows\n for col in range(current.getWidth()): # Loop over the columnns\n current.setPixel(row,col,original.getPixel(original.getHeight()-col-1,row))\n \n def rotateLeft(self):\n \"\"\"\n Rotates the current image left by 90 degrees.\n \n Technically, we can implement this via a transpose followed by a \n vertical reflection. However, this is slow, so we use the faster \n strategy below.\n \"\"\"\n current = self.getCurrent()\n original = current.copy()\n current.setWidth(current.getHeight())\n \n for row in range(current.getHeight()): # Loop over the rows\n for col in range(current.getWidth()): # Loop over the columnns\n current.setPixel(row,col,original.getPixel(col,original.getWidth()-row-1))\n \n # ASSIGNMENT METHODS (IMPLEMENT THESE)\n def reflectVert(self):\n \"\"\" \n Reflects the current image around the vertical middle.\n \"\"\"\n current = self.getCurrent()\n for h in range(current.getHeight()//2): # Loop over the rows\n for col in range(current.getHeight()): # Loop over the columns\n k = current.getHeight()-1-h\n current.swapPixels(h,col,k,col)\n \n def monochromify(self, sepia):\n \"\"\"\n Converts the current image to monochrome (greyscale or sepia tone).\n \n If `sepia` is False, then this function uses greyscale. It removes all\n color from the image by setting the three color components of each \n pixel to that pixel's overall brightness, defined as \n \n brightness = 0.3 * red + 0.6 * green + 0.1 * blue.\n \n If sepia is True, it makes the same computations as before but sets \n green to 0.6 * brightness and blue to 0.4 * brightness (red is same as\n for greyscale).\n \n Parameter sepia: Whether to use sepia tone instead of greyscale.\n Precondition: sepia is a bool\n \"\"\"\n assert type(sepia) == bool, repr(sepia) + \" is not a bool\"\n current = self.getCurrent()\n\n if sepia == False: #greyscale\n for row in range(current.getHeight()): # Loop over the rows\n for col in range(current.getWidth()): # Loop over the columnns\n pixel = current.getPixel(row,col)\n red = pixel[0]\n green = pixel[1]\n blue = pixel[2]\n bness = int(0.3 * red + 0.6 * green + 0.1 * blue)\n current.setPixel(row,col,(bness,bness,bness))\n else: #sepia\n for row in range(current.getHeight()): # Loop over the rows\n for col in range(current.getWidth()): # Loop over the columnns\n pixel = current.getPixel(row,col)\n red = pixel[0]\n green = pixel[1]\n blue = pixel[2]\n bness = 0.3 * red + 0.6 * green + 0.1 * blue\n current.setPixel(row,col,(int(bness), int(0.6 * bness), int(0.4 *bness))) \n \n def jail(self):\n \"\"\"\n Puts jail bars on the current image\n \n The jail should be built as follows:\n * Put 3-pixel-wide horizontal bars across top and bottom,\n * Put 4-pixel vertical bars down left and right, and\n * Put n 4-pixel vertical bars inside, where n is \n (number of columns - 8) // 50.\n \n Note that the formula for the number of interior bars is explicitly\n not counting the two bars on the outside.\n \n The n+2 vertical bars should be as evenly spaced as possible.\n \"\"\"\n current = self.getCurrent()\n\n red = (255,0,0)\n\n self._drawHBar(0,red)\n self._drawHBar(current.getHeight()-3,red)\n self._drawVBar(0,red)\n self._drawVBar(current.getWidth()-4,red)\n n = (current.getWidth() - 8) // 50\n spacing = (current.getWidth()-(4*(n+2)))/(n+1)\n for i in range(n): \n self._drawVBar(int((4*(i+1))+(spacing*(i+1))),red)\n \n def vignette(self):\n \"\"\"\n Modifies the current image to simulates vignetting (corner darkening).\n \n Vignetting is a characteristic of antique lenses. This plus sepia tone \n helps give a photo an antique feel.\n \n To vignette, darken each pixel in the image by the factor\n \n 1 - (d / hfD)^2\n \n where d is the distance from the pixel to the center of the image and \n hfD (for half diagonal) is the distance from the center of the image \n to any of the corners. \n \n The values d and hfD should be left as floats and not converted to ints.\n Furthermore, when the final color value is calculated for each pixel,\n the result should be converted to int, but not rounded.\n \"\"\"\n current = self.getCurrent()\n for row in range(current.getHeight()): # Loop over the rows\n for cl in range(current.getWidth()): # Loop over the columnns\n pixel = current.getPixel(row,cl)\n d=math.sqrt(((row-(current.getHeight()/2))**2)\n +((cl-(current.getWidth()/2))**2))\n hfD=math.sqrt(((0-(current.getHeight()/2))**2)\n +((0-(current.getWidth()/2))**2))\n darken = 1.0 - ((d/hfD)**2)\n red = int(pixel[0]*darken)\n green = int(pixel[1]*darken)\n blue = int(pixel[2]*darken)\n current.setPixel(row,cl,(red,green,blue))\n \n def pixellate(self,step):\n \"\"\"\n Pixellates the current image to give it a blocky feel.\n \n To pixellate an image, start with the top left corner (e.g. the first \n row and column). Average the colors of the step x step block to the \n right and down from this corner (if there are less than step rows or \n step columns, go to the edge of the image). Then assign that average \n to ALL of the pixels in that block.\n \n When you are done, skip over step rows and step columns to go to the \n next corner pixel. Repeat this process again. The result will be a \n pixellated image.\n \n When the final color value is calculated for each pixel, the result \n should be converted to int, but not rounded.\n \n Parameter step: The number of pixels in a pixellated block\n Precondition: step is an int > 0\n \"\"\"\n assert type(step) == int and step > 0, repr(step) + \" is not a valid step\"\n current = self.getCurrent()\n widthSteps = current.getWidth()//step\n heightSteps = current.getHeight()//step\n widthRem = current.getWidth()%step\n heightRem = current.getHeight()%step\n for i in range(heightSteps):\n for j in range(widthSteps):\n block = self._avging(i*step,j*step,step)\n # fill in a block with dimensions step x step, starting at row + (i*step) and col + (j*step)\n for row in range(step):\n for col in range(step):\n current.setPixel(row+(i*step), col+(j*step), block)\n # fill the rest in through width remainder width and step height\n if widthRem != 0:\n r = 0\n g = 0\n b = 0\n for ro in range(step):\n for co in range(widthRem):\n pixel = current.getPixel(ro+(i*step),co+(widthRem*step))\n r += pixel[0]\n g += pixel[1]\n b += pixel[2]\n avgFactor = step*step \n blok = (int(r/avgFactor),int(g/avgFactor),int(b/avgFactor))\n for k in range(step):\n for l in range(widthRem):\n current.setPixel(k+(i*step),l+(widthRem*step), blok)\n \n if heightRem != 0:\n r = 0\n g = 0\n b = 0\n for z in range(widthSteps):\n for ro in range(heightRem):\n for co in range(step):\n pixel = current.getPixel(ro+(heightRem*step),co+(z*step))\n r += pixel[0]\n g += pixel[1]\n b += pixel[2]\n avgFactor = step*step \n blok = (int(r/avgFactor),int(g/avgFactor),int(b/avgFactor))\n for k in range(heightRem):\n for l in range(step):\n current.setPixel(ro+(heightRem*step),co+(z*step), blok)\n\n # HELPER METHODS\n def _drawHBar(self, row, pixel):\n \"\"\"\n Draws a horizontal bar on the current image at the given row.\n \n This method draws a horizontal 3-pixel-wide bar at the given row \n of the current image. This means that the bar includes the pixels \n row, row+1, and row+2. The bar uses the color given by the pixel \n value.\n \n Parameter row: The start of the row to draw the bar\n Precondition: row is an int, 0 <= row && row+2 < image height\n \n Parameter pixel: The pixel color to use\n Precondition: pixel is a 3-element tuple (r,g,b) of ints in 0..255\n \"\"\"\n current = self.getCurrent()\n assert type(row) == int, repr(row) + \" is not an int\"\n assert (0<=row and row+2<current.getHeight()), repr(row)+\"is not a valid row\"\n assert a6image._is_pixel(pixel) == True, repr(pixel) + \" is not a pixel\"\n\n for col in range(current.getWidth()):\n current.setPixel(row, col, pixel)\n current.setPixel(row+1, col, pixel)\n current.setPixel(row+2, col, pixel)\n\n def _drawVBar(self, col, pixel):\n \"\"\"\n Draws a vertical bar on the current image at the given col.\n \n This method draws a vertical 4-pixel-wide bar at the given col \n of the current image. This means that the bar includes the pixels \n col, col+1, col+2, and col+3. The bar uses the color given by the pixel \n value.\n \n Parameter col: The start of the col to draw the bar\n Precondition: col is an int, 0 <= col && col + 3 < image width\n \n Parameter pixel: The pixel color to use\n Precondition: pixel is a 3-element tuple (r,g,b) of ints in 0..255\n \"\"\"\n current = self.getCurrent()\n assert type(col) == int, repr(col) + \" is not an int\"\n assert (0<=col and col+3<current.getWidth()), repr(col)+\" is not a valid col\"\n assert a6image._is_pixel(pixel) == True, repr(pixel) + \" is not a pixel\"\n\n for row in range(current.getHeight()):\n current.setPixel(row, col, pixel)\n current.setPixel(row, col+1, pixel)\n current.setPixel(row, col+2, pixel)\n current.setPixel(row, col+3, pixel)\n\n def _avging(self, row, col, step):\n \"\"\"\n Returns the a tuple of the average rgb values for a given step, starting at coordinate row, col\n \n Parameter step: The number of pixels in a pixellated block\n Precondition: step is an int > 0\n\n Parameter col: The starting column\n Precondition: col is an int, 0 <= col && col < image width\n\n Parameter row: The starting row\n Precondition: row is an int, 0 <= row && row < image height\n \n \"\"\"\n current = self.getCurrent()\n assert type(step) == int and step > 0, repr(step) + \" is not a valid step\"\n assert type(col) == int and (0<=col and col<current.getWidth())\n assert type(row) == int and (0<=row and row<current.getHeight())\n r = 0\n g = 0\n b = 0\n for i in range(step):\n for j in range(step):\n pixel = current.getPixel(row+i,col+j)\n r += pixel[0]\n g += pixel[1]\n b += pixel[2]\n avgFactor = step*step \n return (int(r/avgFactor),int(g/avgFactor),int(b/avgFactor))", "id": "10794918", "language": "Python", "matching_score": 1.448360562324524, "max_stars_count": 1, "path": "RGB Image/imager/a6filter.py" }, { "content": "\"\"\"\nThe primary GUI interface for the imager filter application\n\nThe default application corresponds to the class InterfaeApp. This class is\nthe root controller for each of the View components defined in interface.kv. \nThe View (filter.kv) and this Controller module (filter.py) have the same name \nbecause they are so tightly interconnected.\n\nBased on an original file by <NAME> (dck10) and <NAME> (wmw2)\n\nAuthor: <NAME> (wmw2)\nDate: October 29, 2019\n\"\"\"\n# We have to configure the window before everything else\nfrom kivy.config import Config\n#Config.set('kivy', 'log_level', 'error')\nConfig.set('graphics', 'width', '1056')\nConfig.set('graphics', 'height', '557')\nConfig.set('graphics', 'resizable', '0') # make not resizable\n\nfrom kivy.clock import Clock, mainthread\nfrom kivy.uix.boxlayout import BoxLayout\nfrom kivy.properties import *\nfrom kivy.app import App\nfrom kivy.metrics import sp\n\nfrom widgets import *\nimport traceback\n\nclass InterfacePanel(BoxLayout):\n \"\"\"\n This class is a controller for the imager filter application.\n \n This controller manages all of the buttons and text fields of the \n application. It supports all parts of the assignment, and may be used \\\n for additional (eyeball) testing beyond the provided test script\n \n The view for this application is defined the interface.kv file.\n \"\"\"\n # These fields are 'hooks' to connect to the .kv file\n # The source file for the initial image\n source = StringProperty(ImagePanel.getResource('im_walker.png'))\n # The Image object for the loaded file\n picture = ObjectProperty(None,allownone=True)\n # The workspace object for working on the file\n workspace = ObjectProperty(None,allownone=True)\n \n # The most recent file edit\n workimage = ObjectProperty(None,allownone=True)\n # The original image file (this will never change)\n origimage = ObjectProperty(None,allownone=True)\n \n # The menu bar\n menubar = ObjectProperty(None)\n # The progress monitor\n progress = ObjectProperty(None)\n \n # The file drop-down menu\n imagedrop = ObjectProperty(None)\n # The reflect drop-down menu\n axisdrop = ObjectProperty(None)\n # The monochromify drop-down menu\n greydrop = ObjectProperty(None)\n # The rotate drop-down menu\n turndrop = ObjectProperty(None)\n # The pixellate drop-down menu\n blockdrop = ObjectProperty(None)\n \n # For handling the \"progress\" monitor\n processing = BooleanProperty(False)\n \n def config(self):\n \"\"\"\n Configures the application at start-up.\n \n Controllers are responsible for initializing the application and creating all of \n the other objects. This method does just that. It loads the currently selected \n image file, and creates an editor for that file (if possible).\n \"\"\"\n # For working with pop-ups (Hidden since not .kv aware)\n self._popup = None\n self.place_image('',self.source)\n self.imagedrop = ImageDropDown(choices=['load','save','undo','reset'], \n save=[self.save_image], load=[self.load_image],\n undo=[self.undo], reset=[self.clear])\n self.axisdrop = AxisDropDown( choices=['horizontal','vertical'],\n horizontal=[self.do_async,'reflectHori'], \n vertical=[self.do_async,'reflectVert'])\n self.greydrop = GreyDropDown( choices=['greyscale','sepia'],\n greyscale=[self.do_async,'monochromify',False], \n sepia=[self.do_async,'monochromify',True])\n self.turndrop = TurnDropDown( choices=['left','right','transpose'],\n left= [self.do_async,'rotateLeft'],\n right=[self.do_async,'rotateRight'],\n transpose=[self.do_async,'transpose'])\n self.blockdrop = BlockDropDown(choices=['p10','p20','p50','p100', 'p200'],\n p10=[self.do_async,'pixellate',10],\n p20=[self.do_async,'pixellate',20],\n p50=[self.do_async,'pixellate',50],\n p100=[self.do_async,'pixellate',100],\n p200=[self.do_async,'pixellate',200])\n self.async_action = None\n self.async_thread = None\n \n # DIALOG BOXES\n def error(self, msg):\n \"\"\"\n Opens a dialog to report an error to the user\n \n The dialog will take up most of the Window, and last until the user \n dismisses it.\n \n Parameter msg: the error message\n Precondition: msg is a string\n \"\"\"\n assert type(msg) == str, repr(msg)+' is not a string'\n content = ErrorDialog(message=msg, okchoice=self.dismiss_popup)\n self._popup = Popup(title='Error', content=content, \n size_hint=(0.4, 0.4), \n pos_hint={'center_x':0.5, 'center_y':0.5})\n self._popup.open()\n \n def warn(self, msg, data=None, callback=None):\n \"\"\"\n Alerts the user of an issue when trying to load or save a file\n \n The dialog will take up most of the Window, and last until the user \n dismisses it.\n \n Parameter msg: the error message\n Precondition: msg is a string\n \n Parameter data: the problematic file\n Precondition: data is a string\n \n Parameter callback: The callback to invoke on ok\n Precondition: callback is callable\n \"\"\"\n if callback:\n content = WarningDialog(message=msg, payload=data, \n okchoice=callback, \n exitchoice=self.dismiss_popup)\n self._popup = Popup(title='Warning', content=content, \n size_hint=(0.4, 0.4), \n pos_hint={'center_x':0.5, 'center_y':0.5})\n elif data:\n print(data)\n content = ErrorDialog(message=msg, okchoice=self.dismiss_popup)\n self._popup = Popup(title='Warning', content=content, **data)\n else:\n content = ErrorDialog(message=msg, okchoice=self.dismiss_popup)\n self._popup = Popup(title='Warning', content=content, \n size_hint=(0.4, 0.4), \n pos_hint={'center_x':0.5, 'center_y':0.5})\n self._popup.open()\n \n def load(self,title,callback, filters=None):\n \"\"\"\n Opens a dialog to load a file.\n \n The dialog will take up most of the Window, and last until the user \n dismisses it.\n \n Parameter title: The title to display\n Precondition: title is a string\n \n Parameter callback: The callback to invoke on load\n Precondition: callback is callable\n \"\"\"\n content = LoadDialog(loadchoice=callback, exitchoice=self.dismiss_popup)\n if filters:\n content.filechooser.filters = filters\n self._popup = Popup(title=title, content=content,\n size_hint=(0.8,0.9), \n pos_hint={'center_x':0.5, 'center_y':0.5})\n self._popup.open()\n\n def save(self,title,callback,filters=None):\n \"\"\"\n Opens a dialog to save a file.\n \n The dialog will take up most of the Window, and last until the user \n dismisses it.\n \n Parameter title: The title to display\n Precondition: title is a string\n \n Parameter callback: The callback to invoke on save\n Precondition: callback is callable\n \"\"\"\n content = SaveDialog(savechoice=callback, exitchoice=self.dismiss_popup)\n if filters:\n content.filechooser.filters = filters\n self._popup = Popup(title=title, content=content,\n size_hint=(0.8,0.9), \n pos_hint={'center_x':0.5, 'center_y':0.5})\n self._popup.open()\n \n def dismiss_popup(self):\n \"\"\"\n Dismisses the currently active pop-up\n \"\"\"\n if self._popup:\n self._popup.dismiss()\n self._popup = None\n \n # FILE HANDLING\n def read_image(self, file):\n \"\"\"\n Returns an Image object for the give file.\n \n If it cannot read the image (either Image is not defined or the file \n is not an image file), this method returns None.\n \n Parameter file: An absolute path to an image file\n Precondition: file is a string\n \"\"\"\n import a6image\n from PIL import Image as CoreImage\n \n try:\n image = CoreImage.open(file)\n image = image.convert(\"RGB\")\n buffer = list(image.getdata())\n size = image.size[0]*image.size[1]\n width = image.size[0]\n except:\n traceback.print_exc()\n self.error('Could not load the image file')\n buffer = None\n \n result = None\n if not buffer is None:\n try:\n result = a6image.Image(buffer,width)\n except:\n traceback.print_exc()\n result = None\n return result\n \n def check_save_png(self, path, filename):\n \"\"\"\n Saves the current image to a file, checking that the format is PNG\n \n If user uses another extension, or no extension at all, this method \n forces the file to be a .png\n \n Parameter path: The base path to the file\n Precondition: path is a string\n \n Parameter filename: An absolute or relative filename\n Precondition: filename is a string\n \"\"\"\n import os.path\n self.dismiss_popup()\n \n if os.path.isabs(filename):\n file = filename\n else:\n file = os.path.join(path,filename)\n \n if file.lower().endswith('.png'):\n self.save_png(file)\n else:\n file = os.path.splitext(file)[0]+'.png'\n msg = 'File will be saved as {} in .png format.\\nProceed?'\n self.warn(msg.format(os.path.split(file)[1]), file, self.save_png)\n \n def save_png(self, filename):\n \"\"\"\n Saves the current image to a file, checking first if the file exists.\n \n If the file exist, this will display a warning.\n \n Parameter filename: An absolute filename\n Precondition: filename is a string\n \"\"\"\n import os.path\n assert filename.lower().endswith('.png')\n self.dismiss_popup()\n if os.path.isfile(filename):\n msg = 'File {} exists.\\nOverwrite?'\n self.warn(msg.format(os.path.split(filename)[1]), filename, self.force_png)\n else:\n self.force_png(filename)\n \n def force_png(self, filename):\n \"\"\"\n Saves the current image, without user confirmation.\n \n Parameter filename: An absolute filename\n Precondition: filename is a string\n \"\"\"\n import os.path\n import traceback\n self.dismiss_popup()\n \n # prepare image for saving\n from PIL import Image as CoreImage\n\n # This worked (Unlike Kivy)! But is slow.\n current = self.workspace.getCurrent()\n try:\n im = CoreImage.new('RGBA',(current.getWidth(),current.getHeight()))\n im.putdata(tuple(current.getData()))\n im.save(filename,'PNG')\n except:\n traceback.print_exc()\n self.error('Cannot save image file ' + os.path.split(filename)[1])\n # These fields are 'hooks' to connect to the imager.kv file\n \n def place_image(self, path, filename):\n \"\"\"\n Loads the image from file and stores the result in the image panel(s)\n \n If it cannot read the image (either Image is not defined or the file \n is not an image file), this method does nothing.\n \n Parameter path: The base path to the file\n Precondition: path is a string\n \n Parameter filename: An absolute or relative filename\n Precondition: filename is a string\n \"\"\"\n import os.path\n self.dismiss_popup()\n \n if os.path.isabs(filename):\n file = filename\n else:\n file = os.path.join(path,filename)\n \n import a6filter\n self.picture = self.read_image(file)\n try:\n self.workspace = a6filter.Filter(self.picture)\n self.workimage.setImage(self.workspace.getCurrent())\n self.origimage.setImage(self.workspace.getOriginal())\n except:\n traceback.print_exc()\n self.workspace = None\n self.workimage.setImage(None)\n self.origimage.setImage(self.picture)\n quit()\n self.canvas.ask_update()\n \n def check_save_txt(self, path, filename):\n \"\"\"\n Saves the current image to a file, checking that the format is TXT\n \n If user uses another extension, or no extension at all, this method \n forces the file to be a .txt\n \n Parameter path: The base path to the file\n Precondition: path is a string\n \n Parameter filename: An absolute or relative filename\n Precondition: filename is a string\n \"\"\"\n import os.path\n self.dismiss_popup()\n \n if os.path.isabs(filename):\n file = filename\n else:\n file = os.path.join(path,filename)\n \n if file.lower().endswith('.txt'):\n self.save_txt(file)\n else:\n file = os.path.splitext(file)[0]+'.txt'\n msg = 'File will be saved as {} in .txt format.\\nProceed?'\n self.warn(msg.format(os.path.split(file)[1]), file, self.save_txt)\n \n def save_txt(self, filename):\n \"\"\"\n Saves the current message text to a file, checking if the file exists.\n \n If the file exist, this will display a warning.\n \n Parameter filename: An absolute filename\n Precondition: filename is a string\n \"\"\"\n import os.path\n assert filename.lower().endswith('.txt')\n self.dismiss_popup()\n if os.path.isfile(filename):\n msg = 'File {} exists.\\nOverwrite?'\n self.warn(msg.format(os.path.split(filename)[1]), filename, self.force_txt)\n else:\n self.force_txt(filename)\n \n def force_txt(self, filename):\n \"\"\"\n Saves the current message text, without user confirmation.\n \n Parameter filename: An absolute filename\n Precondition: filename is a string\n \"\"\"\n import os.path\n self.dismiss_popup()\n \n # prepare image for saving\n text = self.textpanel.hidden.text\n try:\n file = open(filename,'w',encoding=\"utf-8\")\n file.write(text)\n file.close()\n except:\n self.error('Cannot save text file ' + os.path.split(filename)[1])\n \n def place_text(self, path, filename):\n \"\"\"\n Loads the text from file and stores the result in the text editor\n \n If it cannot read the text, this method does nothing.\n \n Parameter path: The base path to the file\n Precondition: path is a string\n \n Parameter filename: An absolute or relative filename\n Precondition: filename is a string\n \"\"\"\n from kivy.metrics import sp\n \n import os.path\n self.dismiss_popup()\n \n if os.path.isabs(filename):\n file = filename\n else:\n file = os.path.join(path,filename)\n \n try:\n handle = open(file,encoding=\"utf-8\")\n text = handle.read()\n handle.close()\n except:\n traceback.print_exc()\n self.error('Could not load the text file')\n text = ''\n \n height = max((text.count('\\n')+1)*20*sp(1),self.textpanel.height)\n \n self.textpanel.active = True\n self.textpanel.hidden.text = text\n self.textpanel.hidden.height = height\n self.textpanel.select(True)\n \n # MENU OPERATIONS\n def load_image(self):\n \"\"\"\n Opens a dialog to load an image file.\n \n The dialog will take up most of the Window, and last until the user \n dismisses it. Open dismissal it will read the file and display it\n in the window if successful.\n \"\"\"\n self.load('Load image',self.place_image)\n \n def save_image(self):\n \"\"\"\n Opens a dialog to save an image file.\n \n The dialog will take up most of the Window, and last until the user \n dismisses it. Open dismissal it will write the current image to a file.\n \"\"\"\n self.save('Save image',self.check_save_png)\n \n def undo(self):\n \"\"\"\n Undos the last edit to the image.\n \n This method will undo the last edit to the image.\n \"\"\"\n try:\n self.workspace.undo()\n self.workimage.update(self.workspace.getCurrent())\n self.canvas.ask_update()\n except:\n traceback.print_exc()\n self.error('An error occurred when trying to undo')\n \n def clear(self):\n \"\"\"\n Clears all edits to the image.\n \n This method will remove all edits to the image.\n \"\"\"\n try:\n self.workspace.clear()\n self.workimage.update(self.workspace.getCurrent())\n self.canvas.ask_update()\n except:\n traceback.print_exc()\n self.error('An error occurred when trying to clear edits')\n \n def load_text(self):\n \"\"\"\n Opens a dialog to load an text file.\n \n The dialog will take up most of the Window, and last until the user \n dismisses it. Upon dismissal, it will load the text into the\n text window, but not encode it.\n \"\"\"\n self.load('Load message',self.place_text,['*.txt','*.py'])\n \n def save_text(self):\n \"\"\"\n Opens a dialog to save an text file.\n \n The dialog will take up most of the Window, and last until the user \n dismisses it. Upon dismissal, it will save the current text to\n a text file.\n \"\"\"\n self.save('Save message',self.check_save_txt,['*.txt'])\n \n def do_async(self,*action):\n \"\"\"\n Launchs the given action in an asynchronous thread\n \n The action parameters are an expanded list where the first element is \n a callable and any other elements are parameters to the callable.\n \n The thread progress is monitored by async_monitor. When the thread \n is done, it will call async_complete in the main event thread.\n \n Parameter(s) *action: An expanded list defining the action\n Precondition: The first element of action is callable\n \"\"\"\n import threading\n self.menubar.disabled = True\n self.processing = True\n self.async_thread = threading.Thread(target=self.async_work,args=action)\n self.async_thread.start()\n\n def async_work(self,*action):\n \"\"\"\n Performs the given action asynchronously.\n \n The action parameters are an expanded list where the first element is \n a callable and any other elements are parameters to the callable.\n \n This is the function that is launched in a separate thread. Even if \n the action fails, it is guaranteed to call async_complete for clean-up\n \n Parameter(s) *action: An expanded list defining the action\n Precondition: The first element of action is callable\n \"\"\"\n try:\n self.workspace.increment()\n getattr(self.workspace,action[0])(*action[1:])\n except:\n traceback.print_exc()\n self.error('Action '+action[0]+' could not be completed')\n self.async_complete()\n \n @mainthread\n def async_complete(self):\n \"\"\"\n Cleans up an asynchronous thread after completion.\n \"\"\"\n self.workimage.update(self.workspace.getCurrent())\n self.async_thread.join()\n Clock.unschedule(self.async_action)\n self.async_thread = None\n self.async_action = None\n self.menubar.disabled = False\n self.processing = False\n #self.progress.canvas.ask_update()\n self.canvas.ask_update()\n\n\nclass InterfaceApp(App):\n \"\"\"\n This class is the imager filter application.\n \n This class corresponds to the Kivy window and is charge of processing \n the primary event loop. It is the root class for the application.\n \"\"\"\n \n def __init__(self,file):\n \"\"\"\n Initializes a new application window.\n \n It will start with the given image file. If file is None or cannot be\n read, it will use the default application image (the instructor).\n \n Parameter file: The location of the initial image file.\n Precondition: file is a string or None.\n \"\"\"\n super().__init__()\n self.source = file\n \n def build(self):\n \"\"\"\n Reads the kivy file and performs any initial layout\n \"\"\"\n panel = InterfacePanel()\n if self.source:\n panel.source = self.source\n return panel\n\n def on_start(self):\n \"\"\"\n Starts up the app and initializes values\n \"\"\"\n super().on_start()\n self.root.config()\n\n\ndef launch(image):\n \"\"\"\n Launches the application with the given image file.\n \n It will start with the given image file. If file is None or cannot be\n read, it will use the default application image (the instructor).\n \n Parameter file: The location of the initial image file.\n Precondition: file is a string or None.\n \"\"\"\n InterfaceApp(image).run()\n", "id": "803218", "language": "Python", "matching_score": 6.369884014129639, "max_stars_count": 1, "path": "RGB Image/imager/interface.py" }, { "content": "\"\"\"\nGUI support widgets for the imager application\n\nThe GUI for this application is quite complex, with dialog boxes, text input, \nmenus and the like. To simplify the code, we pulled a lot of smaller features\nout into its own file.\n\nBased on an original file by <NAME> (dck10) and <NAME> (wmw2)\n\nAuthor: <NAME> (wmw2)\nDate: October 29, 2019\n\"\"\"\n# These are the kivy parent classes\nfrom kivy.uix.boxlayout import BoxLayout\nfrom kivy.uix.dropdown import DropDown\nfrom kivy.uix.widget import Widget\nfrom kivy.uix.popup import Popup\nfrom kivy.graphics.texture import Texture\nfrom kivy.metrics import sp\n\nfrom kivy.properties import *\n\nfrom array import array # Byte buffers\nfrom io import StringIO # Making complex strings\nimport traceback\n\n\n# DIALOGS\nclass LoadDialog(BoxLayout):\n \"\"\"\n A controller for a LoadDialog, a pop-up dialog to load a file.\n \n The View for this controller is defined in interface.kv. This class simply \n contains the hooks for the view properties\n \"\"\"\n # These fields are 'hooks' to connect to the interface.kv file\n # The point-and-click file navigator\n filechooser = ObjectProperty(None)\n # The text box (for the file name)\n textinput = ObjectProperty(None)\n # The load button\n loadchoice = ObjectProperty(None)\n # The cancel button\n exitchoice = ObjectProperty(None)\n\n\nclass SaveDialog(BoxLayout):\n \"\"\"\n A controller for a SaveDialog, a pop-up dialog to save a file.\n \n The View for this controller is defined in interface.kv. This class simply \n contains the hooks for the view properties\n \"\"\"\n # These fields are 'hooks' to connect to the interface.kv file\n # The point-and-click file navigator\n filechooser = ObjectProperty(None)\n # The text box (for the file name)\n textinput = ObjectProperty(None)\n # The save button\n savechoice = ObjectProperty(None)\n # The cancel button\n exitchoice = ObjectProperty(None)\n\n\nclass ErrorDialog(BoxLayout):\n \"\"\"\n A controller for an ErrorDialog, a pop-up to notify the user of an error.\n \n The View for this controller is defined in interface.kv. This class simply \n contains the hooks for the view properties\n \"\"\"\n # These fields are 'hooks' to connect to the interface.kv file\n # The error message\n message = StringProperty('')\n # A confirmation button\n okchoice = ObjectProperty(None)\n\n\nclass WarningDialog(BoxLayout):\n \"\"\"\n A controller for a WarningDialog, a pop-up dialog to warn the user.\n \n It differs from ErrorDialog in that it may be nested inside of another \n pop-up dialog. The warning can be dismissed and ignored.\n \n The View for this controller is defined in interface.kv. This class simply \n contains the hooks for the view properties\n \"\"\"\n # These fields are 'hooks' to connect to the interface.kv file\n # The error message\n message = StringProperty('')\n # The data that caused the problem.\n payload = StringProperty('')\n # A confirmation button (to ignore the warning).\n okchoice = ObjectProperty(None)\n # The cancel button\n exitchoice = ObjectProperty(None)\n\n\n# MENUS\nclass MenuDropDown(DropDown):\n \"\"\"\n The parent class for all drop-down menus.\n \n This class contains unified logic for all of the drop-down menus in this \n application. This includes the code for opening and closing the menu.\n \"\"\"\n # These fields are 'hooks' to connect to the interface.kv file\n # The possible choices from the drop down menu\n options = DictProperty({})\n # The size of the drop down menu (set dynamically)\n rowspan = NumericProperty(0)\n \n def __init__(self,**keywords):\n \"\"\"\n Initializes a new drop-down menu\n \n Drop-down menus take the same keywords as other widgets. However, \n they have an important additional keyword: choices. This lists the \n possible valid responses of this drop-down menu.\n \n In addition, each element of 'choices' is also a valid keyword of \n this drop-down menu. This specifies the call function as a tuple. \n The first element stores the function, while the remaining elements \n are the arguments.\n \n Parameter keyword: The Kivy (and drop-down menu) keyword arguments\n Precondition: keyword is a dictionary with string keys\n \"\"\"\n if 'choices' in keywords:\n for choice in keywords['choices']:\n if choice in keywords:\n self.options[choice] = keywords[choice]\n del keywords[choice] # Gobble\n del keywords['choices'] # Gobble\n super().__init__(**keywords)\n self.bind(on_select=self.dochoice)\n \n def dochoice(self,instance,value):\n \"\"\"\n Performs a call-back (provided one exists) for the selected item.\n \n The extra parameter instance is an artifact of how Kivy does things. \n It is not used at all since it is the same as self. \n \n Parameter instance: A reference to this object\n Precondition: instance is the same as self\n \n Parameter value: The menu option chosen\n Precondition: value is a string\n \"\"\"\n if value in self.options:\n callback = self.options[value]\n func = callback[0]\n func(*callback[1:])\n \n def open(self,widget):\n \"\"\"\n Opens this drop-down, making the provided widget its parent.\n \n The drop-down will be arranged vertically, either up or down, \n depending on the parent.\n \n Parameter widget: The parent widget to open the drop-down\n Precondition: widget is a Kivy Widget\n \"\"\"\n self.rowspan = widget.height\n super().open(widget)\n\n\nclass ImageDropDown(MenuDropDown):\n \"\"\"\n A controller for the Image drop-down, with options for image loading and edits\n \n The View for this controller is defined in interface.kv. This class simply \n contains the hooks for the view properties\n \"\"\"\n # These fields are 'hooks' to connect to the interface.kv file\n # Load an image\n loadchoice = ObjectProperty(None)\n # Save an image\n savechoice = ObjectProperty(None)\n # Undo one edit step\n undochoice = ObjectProperty(None)\n # Undo all edits\n clearchoice = ObjectProperty(None)\n\n\nclass TextDropDown(MenuDropDown):\n \"\"\"\n A controller for the Test drop-down, with options for text encoding/decoding.\n \n The View for this controller is defined in interface.kv. This class simply \n contains the hooks for the view properties\n \"\"\"\n # These fields are 'hooks' to connect to the interface.kv file\n # Show the text panel\n showchoice = ObjectProperty(None)\n # Hide the text panel\n hidechoice = ObjectProperty(None)\n # Encode the text\n codechoice = ObjectProperty(None)\n # Load a text file\n loadchoice = ObjectProperty(None)\n # Save a text file\n savechoice = ObjectProperty(None)\n \n def disable(self,flag):\n \"\"\"\n Disables or enables the text editting functionality.\n \n Text editting is only possible when the text panel is visible.\n \n Parameter flag: Whether to disable the editting functionality.\n Precondition: flag is a boolean\n \"\"\"\n self.codechoice.disabled = flag\n self.loadchoice.disabled = flag\n self.savechoice.disabled = flag\n \n\nclass AxisDropDown(MenuDropDown):\n \"\"\"\n A controller for anReflect drop-down, with a choice between image axes.\n \n The View for this controller is defined in interface.kv. This class simply \n contains the hooks for the view properties\n \"\"\"\n # These fields are 'hooks' to connect to the interface.kv file\n # Flip horizontally\n horichoice = ObjectProperty(None)\n # Flip vertically\n vertchoice = ObjectProperty(None)\n\n\nclass TurnDropDown(MenuDropDown):\n \"\"\"\n A controller for a Rotate drop-down, with a choice of left or right\n \n The View for this controller is defined in interface.kv. This class simply \n contains the hooks for the view properties\n \"\"\"\n # These fields are 'hooks' to connect to the interface.kv file\n # Rotate left\n leftchoice = ObjectProperty(None)\n # Rotate right\n rghtchoice = ObjectProperty(None)\n # Transpose\n tranchoice = ObjectProperty(None)\n\n\nclass GreyDropDown(MenuDropDown):\n \"\"\"\n A controller for a Mono drop-down, with a choice of monochromatic styles\n \n The View for this controller is defined in interface.kv. This class simply \n contains the hooks for the view properties\n \"\"\"\n # These fields are 'hooks' to connect to the interface.kv file\n # Make it traditional greyscale\n greychoice = ObjectProperty(None)\n # Make it sepia tone\n sepiachoice = ObjectProperty(None)\n\n\nclass BlockDropDown(MenuDropDown):\n \"\"\"\n A controller for a Pixellate drop-down, with options for the block size\n \n The View for this controller is defined in interface.kv. This class simply \n contains the hooks for the view properties\n \"\"\"\n # These fields are 'hooks' to connect to the interface.kv file\n # 10 pixel block\n choice10 = ObjectProperty(None)\n # 20 pixel block\n choice20 = ObjectProperty(None)\n # 50 pixel block\n choice50 = ObjectProperty(None)\n # 100 pixel block\n choice100 = ObjectProperty(None)\n # 200 pixel block\n choice200 = ObjectProperty(None)\n\n\n# PANELS\nclass ImagePanel(Widget):\n \"\"\"\n A controller for an ImagePanel, an widget to display an image on screen.\n \n An image panel displays an Image object for the user to see. This GUI r\n equires that the student have completed the Image class. However, it does \n not require that the student have completed anything else.\n \n The view for this application is defined the interface.kv file. This class \n simply contains the hooks for the view properties. In addition, it has \n several helpful methods for image processing.\n \"\"\"\n # These fields are 'hooks' to connect to the imager.kv file\n # The image, represented as an Image object\n picture = ObjectProperty(None,allownone=True)\n # The image, represented as a Texture object\n texture = ObjectProperty(None,allownone=True)\n # The \"interior\" dimensions of this panel (ignoring the border)\n inside = ListProperty((0,0))\n # The display size of the current image\n imagesize = ListProperty((0,0))\n # The position offset of the current image\n imageoff = ListProperty((0,0))\n \n @classmethod\n def getResource(self,filename):\n \"\"\"\n Returns the absolute pathname for a file stored in the imager folder.\n \n This is a class method that allows all objects of this class to load \n any image file stored in the imager folder. Without it, we have to \n specify the full path to the file (which may vary depending on your \n installation).\n \n Parameter filename: The relative name of the file\n Precondition: filename is a string\n \"\"\"\n import os.path\n dir = os.path.split(__file__)[0]\n return os.path.join(dir,filename)\n \n def blit(self,picture):\n for pos in range(len(picture)):\n pixel = picture[pos]\n self._blitter[pos*3 ] = pixel[0]\n self._blitter[pos*3+1] = pixel[1]\n self._blitter[pos*3+2] = pixel[2]\n return self._blitter\n \n def setImage(self,picture):\n \"\"\"\n Returns True if the image panel successfully displayed picture\n \n This method sets the given picture to be the image of this panel, \n and returns True if it is successful. If it fails, the texture is \n erased and the method returns false.\n \n Parameter picture: The image to display\n Precondition: picture is an Image object or None\n \"\"\"\n import a6image\n \n self.picture = None\n self.texture = None\n self.imagesize = self.inside\n self.imageoff[0] = (self.size[0]-self.imagesize[0])//2\n self.imageoff[1] = (self.size[1]-self.imagesize[1])//2\n if picture is None:\n return False\n \n try:\n self.picture = picture\n self.texture = Texture.create(size=(picture.getWidth(), picture.getHeight()), \n colorfmt='rgb', bufferfmt='ubyte')\n self._blitter = array('B',[0]*len(picture)*3)\n self.texture.blit_buffer(self.blit(picture), colorfmt='rgb', bufferfmt='ubyte')\n self.texture.flip_vertical()\n \n if self.texture.width < self.texture.height:\n self.imagesize[0] = int(self.inside[0]*(self.texture.width/self.texture.height))\n self.imagesize[1] = self.inside[1]\n elif self.texture.width > self.texture.height:\n self.imagesize[0] = self.inside[0]\n self.imagesize[1] = int(self.inside[1]*(self.texture.height/self.texture.width))\n else:\n self.imagesize = self.inside\n \n self.imageoff[0] = (self.size[0]-self.imagesize[0])//2\n self.imageoff[1] = (self.size[1]-self.imagesize[1])//2\n return True\n except:\n traceback.print_exc()\n return False\n \n def update(self,picture):\n \"\"\"\n Returns True if the image panel successfully displayed picture\n \n This method is slightly faster than setImage in the case where the \n picture is a (dimension-preserving) modification of the current one. \n Otherwise it calls setImage.\n \n Parameter picture: The image to display\n Precondition: picture is an Image object or None\n \"\"\"\n try:\n assert picture.getWidth() == self.texture.width\n self.picture = picture\n self.texture.blit_buffer(self.blit(picture), colorfmt='rgb', bufferfmt='ubyte')\n return True\n except:\n pass\n print('REMAKING')\n return self.setImage(picture)\n \n def hide_widget(self, dohide=True):\n \"\"\"\n Hides or shows this widget on screen.\n \n This method is what allows us to have one panel \"behind\" another, \n moving it to the front or the back.\n \n Parameter dohide: Whether to hide or show the widget\n Precondition: dohide is a boolean (default True)\n \"\"\"\n if hasattr(self, 'saved_attrs'):\n if not dohide:\n self.height, self.size_hint_y, self.opacity, self.disabled = self.saved_attrs\n del self.saved_attrs\n elif dohide:\n self.saved_attrs = self.height, self.size_hint_y, self.opacity, self.disabled\n self.height, self.size_hint_y, self.opacity, self.disabled = 0, None, 0, True\n\n\nclass MessagePanel(Widget):\n \"\"\"\n A controller for a MessagePanel, an widget to display scrollable text.\n \n An message panel displays the hidden message for the steganography part of \n the assignment. It does not require any student code to function.\n \n The View for this controller is defined in interface.kv. This class simply \n contains the hooks for the view properties\n \"\"\"\n # These fields are 'hooks' to connect to the interface.kv file\n # The text input field\n hidden = ObjectProperty(None)\n # The background color\n textclr = ListProperty([1, 1, .9, 1])\n # Whether a message is currently present\n active = BooleanProperty(False)\n \n @classmethod\n def getResource(self,filename):\n \"\"\"\n Returns the absolute pathname for a file stored in the imager folder.\n \n This is a class method that allows all objects of this class to load \n any text file stored in the imager folder. Without it, we have to \n specify the full path to the file (which may vary depending on your \n installation).\n \n Parameter filename: The relative name of the file\n Precondition: filename is a string\n \"\"\"\n import os.path\n dir = os.path.split(__file__)[0]\n return os.path.join(dir,filename)\n \n def select(self,flag):\n \"\"\"\n Changes the background color to notify of uncommitted changes\n \n Parameter flag: True if there are uncommitted changes\n Precondition: flag is a boolean\n \"\"\"\n self.active = True\n if flag:\n self.textclr = [.9, .9, 1, 1]\n else:\n self.textclr = [ 1, 1, .9, 1]\n \n def hide_widget(self, dohide=True):\n \"\"\"\n Hides or shows this widget on screen.\n \n This method is what allows us to have one panel \"behind\" another, \n moving it to the front or the back.\n \n Parameter dohide: Whether to hide or show the widget\n Precondition: dohide is a boolean (default True)\n \"\"\"\n if hasattr(self, 'saved_attrs'):\n if not dohide:\n self.height, self.size_hint_y, self.opacity, self.disabled = self.saved_attrs\n del self.saved_attrs\n elif dohide:\n self.saved_attrs = self.height, self.size_hint_y, self.opacity, self.disabled\n self.height, self.size_hint_y, self.opacity, self.disabled = 0, None, 0, True", "id": "9274576", "language": "Python", "matching_score": 1.6208597421646118, "max_stars_count": 1, "path": "RGB Image/imager/widgets.py" }, { "content": "\"\"\"\nPrimary module for Froggit\n\nThis module contains the main controller class for the Froggit application. There\nis no need for any additional classes in this module. If you need more classes, 99% \nof the time they belong in either the lanes module or the models module. If you are \nunsure about where a new class should go, post a question on Piazza.\n\n<NAME> asw263\n21 December 2020\n\"\"\"\nfrom consts import *\nfrom game2d import *\nfrom level import *\nimport introcs\n\nfrom kivy.logger import Logger\n\n\n# PRIMARY RULE: Froggit can only access attributes in level.py via getters/setters\n# Froggit is NOT allowed to access anything in lanes.py or models.py.\n\n\nclass Froggit(GameApp):\n \"\"\"\n The primary controller class for the Froggit application\n \n This class extends GameApp and implements the various methods necessary for \n processing the player inputs and starting/running a game.\n \n Method start begins the application.\n \n Method update either changes the state or updates the Level object\n \n Method draw displays the Level object and any other elements on screen\n \n Because of some of the weird ways that Kivy works, you SHOULD NOT create an\n initializer __init__ for this class. Any initialization should be done in\n the start method instead. This is only for this class. All other classes\n behave normally.\n \n Most of the work handling the game is actually provided in the class Level.\n Level should be modeled after subcontrollers.py from lecture, and will have\n its own update and draw method.\n \n The primary purpose of this class is managing the game state: when is the\n game started, paused, completed, etc. It keeps track of that in a hidden\n attribute\n \n Attribute view: The game view, used in drawing (see examples from class)\n Invariant: view is an instance of GView and is inherited from GameApp\n \n Attribute input: The user input, used to control the frog and change state\n Invariant: input is an instance of GInput and is inherited from GameApp\n \"\"\"\n # HIDDEN ATTRIBUTES\n # Attribute _state: The current state of the game (taken from consts.py)\n # Invariant: _state is one of STATE_INACTIVE, STATE_LOADING, STATE_PAUSED, \n # STATE_ACTIVE, STATE_CONTINUE, or STATE_COMPLETE\n #\n # Attribute _level: The subcontroller for a level, managing the frog and obstacles\n # Invariant: _level is a Level object or None if no level is currently active\n #\n # Attribute _title: The title of the game\n # Invariant: _title is a GLabel, or None if there is no title to display\n #\n # Attribute _text: A message to display to the player\n # Invariant: _text is a GLabel, or None if there is no message to display\n \n # LIST MORE ATTRIBUTES (AND THEIR INVARIANTS) HERE IF NECESSARY\n # Attribute _lastkeys: Determines if S is pressed\n # Invariant: _lastkeys is Boolean\n\n # DO NOT MAKE A NEW INITIALIZER!\n\n # THREE MAIN GAMEAPP METHODS\n def start(self):\n \"\"\"\n Initializes the application.\n \n This method is distinct from the built-in initializer __init__ (which \n you should not override or change). This method is called once the \n game is running. You should use it to initialize any game specific \n attributes.\n \n This method should make sure that all of the attributes satisfy the \n given invariants. When done, it sets the _state to STATE_INACTIVE and \n creates both the title (in attribute _title) and a message (in attribute\n _text) saying that the user should press a key to play a game.\n \"\"\"\n self._state = STATE_INACTIVE\n self._level = None\n self._lastkeys = 0\n self._title = GLabel(text = \"Froggit\", font_name = ALLOY_FONT,\n font_size = ALLOY_LARGE, x = self.width/2, y = self.height/2)\n self._text = GLabel(text = \"Press 'S' to Start\", font_name = ALLOY_FONT,\n font_size = ALLOY_MEDIUM, x = self.width/2, y = self.height/3)\n\n def update(self,dt):\n \"\"\"\n Updates the game objects each frame.\n \n It is the method that does most of the work. It is NOT in charge of \n playing the game. That is the purpose of the class Level. The primary \n purpose of this game is to determine the current state, and -- if the \n game is active -- pass the input to the Level object _level to play the \n game.\n \n As part of the assignment, you are allowed to add your own states. \n However, at a minimum you must support the following states: \n STATE_INACTIVE, STATE_LOADING, STATE_ACTIVE, STATE_PAUSED, \n STATE_CONTINUE, and STATE_COMPLETE. Each one of these does its own \n thing and might even needs its own helper. We describe these below.\n \n STATE_INACTIVE: This is the state when the application first opens. \n It is a paused state, waiting for the player to start the game. It \n displays the title and a simple message on the screen. The application \n remains in this state so long as the player never presses a key.\n \n STATE_LOADING: This is the state that creates a new level and shows it on \n the screen. The application switches to this state if the state was \n STATE_INACTIVE in the previous frame, and the player pressed a key. \n This state only lasts one animation frame (the amount of time to load\n the data from the file) before switching to STATE_ACTIVE. One of the\n key things about this state is that it resizes the window to match the\n level file.\n \n STATE_ACTIVE: This is a session of normal gameplay. The player can \n move the frog towards the exit, and the game will move all obstacles \n (cars and logs) about the screen. All of this should be handled inside\n of class Level (NOT in this class). Hence the Level class should have \n an update() method, just like the subcontroller example in lecture.\n \n STATE_PAUSED: Like STATE_INACTIVE, this is a paused state. However, \n the game is still visible on the screen.\n \n STATE_CONTINUE: This state restores the frog after it was either killed\n or reached safety. The application switches to this state if the state \n was STATE_PAUSED in the previous frame, and the player pressed a key. \n This state only lasts one animation frame before switching to STATE_ACTIVE.\n \n STATE_COMPLETE: The wave is over (all lives are lost or all frogs are safe), \n and is either won or lost.\n \n You are allowed to add more states if you wish. Should you do so, you should\n describe them here.\n \n Parameter dt: The time in seconds since last update\n Precondition: dt is a number (int or float)\n \"\"\"\n self._determineState()\n\n if self._state == STATE_INACTIVE: \n self._title = GLabel(text = \"Froggit\", font_name = ALLOY_FONT,\n font_size = ALLOY_LARGE, x = self.width/2, y = self.height/2)\n self._text = GLabel(text = \"Press 'S' to Start\", font_name = ALLOY_FONT,\n font_size = ALLOY_MEDIUM, x = self.width/2, y = self.height/3)\n \n elif self._state == STATE_LOADING:\n newDict = self.load_json(DEFAULT_LEVEL)\n self.width = newDict['size'][0]*GRID_SIZE\n self.height = (newDict['size'][1]+1)*GRID_SIZE\n newLevel = Level(newDict)\n self._level = newLevel\n self._state = STATE_ACTIVE\n \n if self._state == STATE_ACTIVE:\n self._level.update(self.input, dt)\n\n def draw(self):\n \"\"\"\n Draws the game objects to the view.\n \n Every single thing you want to draw in this game is a GObject. To draw a \n GObject g, simply use the method g.draw(self.view). It is that easy!\n \n Many of the GObjects (such as the cars, logs, and exits) are attributes\n in either Level or Lane. In order to draw them, you either need to add \n getters for these attributes or you need to add a draw method to \n those two classes. We suggest the latter. See the example subcontroller.py \n from the lesson videos.\n \"\"\"\n if self._title != None:\n self._title.draw(self.view)\n if self._text != None:\n self._text.draw(self.view)\n if self._level != None:\n self._level.draw(self.view)\n \n def _determineState(self):\n \"\"\"\n Determines the current state and assigns it to\n self.state\n\n This method checks for a key press, and if there is\n one, changes the state to the next value. A key\n press is when a key is pressed for the FIRST TIME.\n We do not want the state to continue to change as\n we hold down the key. The user must release the\n key and press it again to change the state.\n \"\"\"\n # Determine s is being pressed\n curr_keys = self.input.is_key_down('s')\n\n # Only make sPress True if s has just been pressed\n sPress = curr_keys and self._lastkeys == 0\n\n if sPress:\n # S press happened. Change the state to loading\n self._state = STATE_LOADING\n self._title = None\n self._text = None\n\n # Update last_keys\n self.__lastkeys = curr_keys\n\n", "id": "11727056", "language": "Python", "matching_score": 4.852024078369141, "max_stars_count": 1, "path": "Froggit/app.py" }, { "content": "\"\"\"\nConstants for Froggit\n\nThis module contains global constants for the game Froggit. These constants need to be \nused in the model, the view, and the controller. As these are spread across multiple \nmodules, we separate the constants into their own module. This allows all modules to \naccess them.\n\n# YOUR NAME AND NETID HERE\n# DATE COMPLETED HERE\n\"\"\"\nimport introcs\nimport sys\n\n### WINDOW CONSTANTS (all coordinates are in pixels) ###\n\n# The initial width of the game display\nGAME_WIDTH = 1024\n# The initial height of the game display\nGAME_HEIGHT = 896\n# The size in pixels of a single grid square\nGRID_SIZE = 64\n\n\n### FROG CONSTANTS ###\n\n# The image file for the non-animated frog\nFROG_IMAGE = 'frog1.png'\n# The number of seconds that frog movement takes\nFROG_SPEED = 0.25\n# The image file for a frog that made it to safety\nFROG_SAFE = 'safe.png'\n# The image file for a frog life\nFROG_HEAD = 'froghead.png'\n# The number of lives the frog has before losing\nFROG_LIVES = 3\n\n# The angles for the frog heading. Set the angle to these to get the right direction\n# Notice the frog image is upside down, so NORTH requires a 180 degree rotation\nFROG_NORTH = 180\nFROG_WEST = -90\nFROG_EAST = 90\nFROG_SOUTH = 0\n\n# The sprite sheet for the animated frog\nFROG_SPRITE = 'frog2'\n# The sprite sheet for the dying frog\nDEATH_SPRITE = 'skulls'\n# The number of seconds for a death animation\nDEATH_SPEED = 0.5\n\n\n### GAME CONSTANTS ###\n\n# The state before the game has started\nSTATE_INACTIVE = 0\n# The state when we are loading in a new level\nSTATE_LOADING = 1\n# The state when the level is activated and in play\nSTATE_ACTIVE = 2\n# The state when we are are paused between lives\nSTATE_PAUSED = 3\n# The state when we restoring the frog\nSTATE_CONTINUE = 4\n# The state when the game is complete (won or lost)\nSTATE_COMPLETE = 5\n\n\n### FONT CONSTANTS ###\n\n# The font choice for labels and messages\nALLOY_FONT = 'AlloyInk.ttf'\n# A large message or label\nALLOY_LARGE = 124\n# A medium message or label\nALLOY_MEDIUM = 64\n# A small message or label\nALLOY_SMALL = 48\n\n\n### SOUND EFFECTS ###\n\n# The jumping sound\nCROAK_SOUND = 'croak.wav'\n# The death sound\nSPLAT_SOUND = 'splat.wav'\n# The succes sound\nTRILL_SOUND = 'trill.wav'\n\n\n### JSON FILES ###\n\n# The default level file\nDEFAULT_LEVEL = 'easy2.json'\n# The object data (hitboxes) file\nOBJECT_DATA = 'objects.json'\n\n\n### USE COMMAND LINE ARGUMENTS TO CHANGE DEFAULT LEVEL FILE AND FROG SPEED\n\"\"\"\nsys.argv is a list of the command line arguments when you run python. These arguments are\neverything after the word python. So if you start the game typing\n\n python froggit default.json 1\n\nPython puts ['froggit', 'default.json', '1'] into sys.argv. Below, we take advantage of \nthis fact to change the constant DEFAULT_LEVEL. This is the level file to be used when \nyou start the game. \n\nThe second argument is the FROG_SPEED, which is the amount of time between move steps. \nA large value means a much slower moving frog.\n\"\"\"\ntry:\n file = sys.argv[1]\n if file[-5:].lower() == '.json':\n DEFAULT_LEVEL = file\n else:\n DEFAULT_LEVEL = file+'.json'\nexcept:\n pass # Use original value\n\ntry:\n value = float(sys.argv[2])\n FROG_SPEED = value\nexcept:\n pass # Use original value\n\n\n### ADD MORE CONSTANTS (PROPERLY COMMENTED) AS NECESSARY ###\n#Animation constants\n\n#the animation\n", "id": "1462092", "language": "Python", "matching_score": 1.6002390384674072, "max_stars_count": 1, "path": "Froggit/consts.py" }, { "content": "import alphien\r\nimport math\r\nimport numpy as np\r\nimport pandas as pd\r\nfrom sklearn.preprocessing import MinMaxScaler\r\nimport tensorflow as tf\r\nfrom tensorflow import keras\r\nfrom tensorflow.keras.models import Sequential\r\nfrom tensorflow.keras.utils import Sequence\r\nfrom tensorflow.keras.layers import Dense\r\nfrom tensorflow.keras.layers import Dropout\r\n# Creates a data loader for UBS pricing data\r\n\r\nd = alphien.data.DataLoader()\r\n\r\n# Need to normalize the data for the neural net to interpret them properly\r\ndef engineerData(df):\r\n x = df.values #returns a numpy array\r\n min_max_scaler = MinMaxScaler()\r\n x_scaled = min_max_scaler.fit_transform(x)\r\n df = pd.DataFrame(x_scaled)\r\n return df\r\n\r\ndef splitXY(df):\r\n X = df.iloc[:,:-1]\r\n Y = df.iloc[:,-1]\r\n return X,Y\r\n\r\n# Keras sequence, like a generator but with the guarantee that the network will only train once on each sample per epoch\r\nclass ComplexPricingSequence(Sequence):\r\n\r\n def __init__(self, data, batch_size):\r\n self.data = data\r\n self.batch_size = batch_size\r\n self.size = data.size()\r\n\r\n def __len__(self):\r\n return math.ceil(self.size / self.batch_size)\r\n\r\n def __getitem__(self, index):\r\n batch = engineerData(next(self.data.batch(fromRow=index,toRow=index+self.batch_size)))\r\n batch_x, batch_y = splitXY(batch)\r\n\r\n return batch_x.to_numpy(), batch_y.to_numpy()\r\n\r\ndef training_loop():\r\n model = Sequential()\r\n model.add(Dense(164, input_dim=164, kernel_initializer='normal',\r\n activation='relu'))\r\n model.add(Dropout(0.2))\r\n model.add(Dense(109, activation='relu'))\r\n model.add(Dropout(0.2))\r\n model.add(Dense(1,activation='linear'))\r\n model.compile(loss='mse', optimizer='adam', metrics=['mse','mae'])\r\n dataSequence = ComplexPricingSequence(d,10000)\r\n model.fit(dataSequence,epochs=5,verbose=1)\r\n model.save('deepLearningModel.txt')\r\n\r\ntraining_loop()\r\n\r\nmodel = keras.models.load_model('deepLearningModel.txt')\r\n\r\ndataTest = next(d.batch(fromRow=1, toRow=200000))\r\ndf = dataTest\r\n\r\ndef predictFunc(newData, dataTransformFunc, model):\r\n X,Y = splitXY(engineerData(newData))\r\n return model.predict(X)\r\n\r\npredictFunc(dataTest, engineerData, model)\r\n\r\ndef evalFunc(newData, dataTransformFunc, model):\r\n X,Y = splitXY(engineerData(newData))\r\n return model.evaluate(X,Y)\r\n\r\nevalFunc(dataTest, engineerData, model)\r\n", "id": "5437971", "language": "Python", "matching_score": 1.3295338153839111, "max_stars_count": 4, "path": "ml-derivatives.py" }, { "content": "import alphien\r\nimport pandas as pd\r\nimport numpy as np\r\n\r\n# Creates a list of Historical S&P 500 Tickers\r\ntickersList = list(alphien.data.getTickersSP500()['ticker'])\r\nprint(\"Created a list of \" + str(len(tickersList)) + \" tickers.\")\r\n\r\n# High-Low-Adjusted Close data (HLC) dataframe for S&P 500 since 2007\r\ndf_HLC = alphien.data.getHistoryData(ticker=tickersList, field = ['high_price','low_price','close_price'])\r\n\r\n# Returns a dataframe of portfolio allocations\r\ndef payout(df, freq=300, period=14):\r\n print(\"Setting up dataframes\")\r\n # High-Low-Adjusted Close data (HLC) dataframe for S&P 500 since 2007\r\n df_HLC = alphien.data.getHistoryData(ticker=tickersList, field = ['high_price','low_price','close_price'])\r\n # Adjusted close data (bb live) dataframe for S&P 500 since 2007\r\n df_bblive = alphien.data.getHistoryData(ticker=tickersList,field = \"bb_live\")\r\n \r\n # Cleaning Data\r\n # Sets nan values to zero\r\n df_HLC = df_HLC.fillna(value=0)\r\n df_bblive = df_bblive.fillna(value=0)\r\n # Remove duplicate columns\r\n df_bblive = df_bblive.loc[:,~df_bblive.columns.duplicated()]\r\n df_HLC = df_HLC.loc[:,~df_HLC.columns.duplicated()]\r\n \r\n # Create a dataframe for allocations and scores with the rebalancing dates\r\n df_Allo = df_bblive.loc[[i for j, i in enumerate(df_bblive.index) if j % freq == 0]]\r\n df_Allo.add_suffix('.Allo')\r\n df_Scores = df_bblive.loc[[i for j, i in enumerate(df_bblive.index) if j % freq == 0]]\r\n df_Scores.add_suffix('.Scores')\r\n print(\"Starting to loop\")\r\n\r\n # Loop over the rebalancing dates:\r\n for row in range(len(df_Scores)):\r\n row_HLC = row*freq\r\n if row != 0:\r\n # Loop over the companies\r\n for col in range(len(df_Scores.columns)):\r\n if df_Scores.iloc[row,col] != 0:\r\n col_HLC = col*3\r\n dxs = []\r\n for j in range(period):\r\n dx_date = row_HLC-j\r\n # Directional Movement Positives\r\n pos_DMS = []\r\n # Directional Movement Negatives\r\n neg_DMS = []\r\n # True Ranges\r\n trs = []\r\n \r\n # Calculate the Positive and Negative Directional Movement for the past period days\r\n for i in range(period):\r\n dm_date = dx_date-i\r\n # Current High - Previous High\r\n pos_DM = df_HLC.iat[dm_date,col_HLC] - df_HLC.iat[dm_date-1,col_HLC]\r\n # Previous Low - Current Low\r\n neg_DM = df_HLC.iat[dm_date-1,col_HLC+1] - df_HLC.iat[dm_date,col_HLC+1]\r\n \r\n if pos_DM > neg_DM:\r\n \tneg_DM = 0\r\n elif pos_DM < 0 and neg_DM < 0:\r\n \tpos_DM = 0\r\n \tneg_DM = 0\r\n else:\r\n \tpos_DM = 0\r\n\r\n pos_DMS.append(pos_DM)\r\n neg_DMS.append(neg_DM)\r\n \r\n # Calculate the Average Positive and Negative DMs\r\n pos_DM_period = 0\r\n for k in pos_DMS:\r\n pos_DM_period += k\r\n pos_DM_period = pos_DM_period / period \r\n\r\n neg_DM_period = 0\r\n for l in neg_DMS:\r\n neg_DM_period += k\r\n neg_DM_period = neg_DM_period / period\r\n\r\n # Calculate the True Range for the past period days\r\n for i in range(period):\r\n tr_date = dx_date-i\r\n # Current high - current low\r\n hl = df_HLC.iat[tr_date,col_HLC] - df_HLC.iat[tr_date,col_HLC+1]\r\n # Current high - previous low (abs value)\r\n hc = abs(df_HLC.iat[tr_date,col_HLC] - df_HLC.iat[tr_date-1,col_HLC+2])\r\n # Current low - previous low (abs value)\r\n lc = abs(df_HLC.iat[tr_date,col_HLC+1] - df_HLC.iat[tr_date-1,col_HLC+2])\r\n trs.append(max(hl,hc,lc))\r\n \r\n # Calculate the ATR\r\n atr = 0\r\n for j in trs: \r\n atr += j\r\n atr = atr/period\r\n\r\n # Avoid division by zero errors in case ATR is 0\r\n if atr == 0: atr = 0.0000000000001\r\n \r\n # Calculate the Positive and Negative Directional Indicators and DXScore\r\n Pos_DI = 100*pos_DM_period/atr\r\n Neg_DI = 100*neg_DM_period/atr\r\n DXScore = Pos_DI - Neg_DI\r\n #DX = 100*(abs(Pos_DI - Neg_DI)/abs(Pos_DI + Neg_DI))\r\n dxs.append(DXScore)\r\n \r\n # Calculate ADXScore and input it into df_Scores\r\n ADXScore = 0\r\n for k in dxs:\r\n ADXScore += k\r\n ADXScore = ADXScore / period\r\n print(\"row\"+str(row)+\"col\"+str(col)+\"ADXScore:\" + str(ADXScore))\r\n df_Scores.iloc[row,col] = ADXScore\r\n \r\n # Set all values in allocations DataFrame to zero to clear \r\n for col in df_Allo.columns:\r\n df_Allo[col].values[:] = 0 \r\n \r\n print(\"Creating allocations\")\r\n for row in range(len(df_Scores)):\r\n #copies a row into a list\r\n rowcopy = []\r\n for col in range(len(df_Scores.columns)):\r\n rowcopy.append(df_HLC.iloc[0,col]) \r\n \r\n #creates a list of the indexes of the top 50 scores in df_Scores\r\n top50_index = sorted(range(len(rowcopy)), key=lambda i: rowcopy[i], reverse=True)[:50]\r\n \r\n #stores 0.02 into the top 50 stocks of df_Allo\r\n for i in top50_index:\r\n df_Allo.iloc[row,i] = 0.02\r\n\r\n return df_Allo\r\n\r\nport = alphien.portfolio.Portfolio(tickersList)\r\nport.addFeatures()\r\nport.payout(payout)\r\nport.evaluate(zoom=\"fc00:e968:6179::de52:7100\")\r\nport.backtest()\r\n", "id": "2824221", "language": "Python", "matching_score": 1.0495864152908325, "max_stars_count": 4, "path": "payout.py" }, { "content": "\"\"\"\nThe base class for modifying an image in the imager application.\n\nWhen we work with an image, we like to have an edit history. An edit history \nkeeps track of all modifications of an original image. It allows for \n(step-by-step) undos of any changes. The class in this module provides an\nedit history. The filter functions are in a subclass of this class so that \nthey can take advantage of the edit history.\n\nBased on an original file by <NAME> (dck10) and <NAME> (wmw2)\n\nAuthor: <NAME> (wmw2)\nDate: October 29, 2019\n\"\"\"\n# THIS FILE IS COMPLETE. DO NOT MODIFY THIS FILE AT ALL\nimport a6image\n\n\nclass Editor(object):\n \"\"\"\n A class that keeps track of edits from an original image.\n \n This class is what allows us to implement the Undo functionality in our \n application. It separates the image into the original (saved) image and \n the current modification. It also keeps track of all edits in-between\n (up to a maximum of MAX_HISTORY edits) in order. It can undo any of\n these edits, rolling the current image back.\n \n If the number of edits exceeds MAX_HISTORY, the oldest edit will be\n deleted. \n \n Attribute MAX_HISTORY: A CLASS ATTRIBUTE for the maximum number of edits\n Invariant: MAX_HISTORY is an int > 0\n \"\"\"\n # IMMUTABLE ATTRIBUTES (Fixed after initialization)\n # Attribute _original: The original image \n # Invariant: _original is an Image object\n #\n # Attribute _history: The edit history\n # Invariant: _history is a non-empty list of Image objects. In addition, \n #the length of _history should never be longer than MAX_HISTORY.\n \n # The number of edits that we are allowed to keep track of.\n # (THIS GOES IN CLASS FOLDER)\n MAX_HISTORY = 20\n \n # GETTERS\n def getOriginal(self):\n \"\"\"\n Returns the original image\n \"\"\"\n return self._original\n \n def getCurrent(self):\n \"\"\"\n Returns the most recent edit\n \"\"\"\n return self._history[-1]\n \n # INITIALIZER\n def __init__(self,original):\n \"\"\"\n Initializes an edit history for the given image.\n \n The edit history starts with exactly one element, which is an \n (uneditted) copy of the original image.\n \n Parameter original: The image to edit\n Precondition: original is an Image object\n \"\"\"\n assert isinstance(original,a6image.Image), repr(original)+' is not an image'\n self._original = original\n self._history = [original.copy()]\n \n # EDIT METHODS\n def undo(self):\n \"\"\"\n Returns True if the latest edit can be undone, False otherwise.\n \n This method attempts to undo the latest element by removing the last \n element of the edit history. However, the edit history can never\n be empty. If this method is called on an edit history of one element,\n this method returns False instead.\n \"\"\"\n if len(self._history) > 1:\n self._history.pop()\n return True\n return False\n \n def clear(self):\n \"\"\"\n Deletes the entire edit history, retoring the original image.\n \n When this method completes, the object should have the same values that \n it did once it was first initialized.\n \"\"\"\n self._history = [self._original.copy()]\n \n def increment(self):\n \"\"\"\n Adds a new copy of the image to the edit history.\n \n This method copies the current most recent edit and adds it to the \n end of the history. If this causes the history to grow to larger \n (greater than MAX_HISTORY), this method deletes the oldest edit.\n \"\"\"\n self._history.append(self.getCurrent().copy())\n if len(self._history) > self.MAX_HISTORY:\n self._history.pop(0)\n\n", "id": "10912353", "language": "Python", "matching_score": 0.8764241933822632, "max_stars_count": 1, "path": "RGB Image/imager/a6editor.py" }, { "content": "\"\"\"\nAn application for processing images\n\nThis file is the main entry-point for the imager application. When you 'run the folder',\nthis is the file that is executed. This file works as traffic cop that directs the \napplication to the correct entry point. It allows you to launch the GUI, or to do \nsomething simple from the command line.\n\nAuthor: <NAME> (wmw2)\nDate: October 29, 2019\n\"\"\"\n# To handle command line options\nimport argparse\n\n# This is necessary to prevent conflicting command line arguments\nimport os\nos.environ[\"KIVY_NO_ARGS\"] = \"1\"\n\n\ndef parse():\n \"\"\"\n Returns: the command line arguments\n \n This function uses argparse to handle the command line arguments. The benefit of\n argparse is the built-in error checking and help menu.\n \"\"\"\n parser = argparse.ArgumentParser(prog='imager',description='Application to process an image file.')\n parser.add_argument('image', type=str, nargs='?', help='the image file to process')\n parser.add_argument('-t','--test', action='store_true', help='run a unit test on Image and Editor')\n parser.add_argument('-g','--grade', action='store_true', help='grade the assignment')\n return parser.parse_args()\n\n\ndef launch(image):\n \"\"\"\n Launches the gui application with the given image and output (if specified)\n \n Parameter image: The image file to use immediately after launch\n Precondition: image is a filename string or None\n \n Parameter output: The output file for saving any changes\n Precondition: output is a filename string or None\n \"\"\"\n from interface import launch\n launch(image)\n\n\ndef unittest():\n \"\"\"\n Runs a unittest on the Image and Editor classes\n \"\"\"\n from a6test import test_all\n test_all()\n\n\ndef grade(image):\n \"\"\"\n Grades the assignment.\n \n Parameter output: The output file for storing feedback\n Precondition: output is a filename string or None\n \"\"\"\n try:\n import grade\n grade.grade(image)\n except:\n print('The grading program is not currently installed.')\n\n\ndef execute():\n \"\"\"\n Executes the application, according to the command line arguments specified.\n \"\"\"\n args = parse()\n \n image = args.image\n \n # Switch on the options\n if args.test:\n unittest()\n elif args.grade:\n grade(image)\n else:\n launch(image)\n\n# Do it\nexecute()", "id": "9260026", "language": "Python", "matching_score": 0.4719249904155731, "max_stars_count": 1, "path": "RGB Image/imager/__main__.py" }, { "content": "import xlwings as xw\r\nfrom datetime import date\r\n\r\n#for getImpMove\r\nimport pyautogui\r\nimport numpy as np\r\nfrom PIL import ImageGrab\r\nimport pytesseract\r\nfrom PIL import Image, ImageEnhance, ImageFilter\r\nfrom textblob import TextBlob\r\npytesseract.pytesseract.tesseract_cmd = 'C:\\\\Program Files\\\\Tesseract-OCR\\\\tesseract.exe'\r\n\r\n@xw.func\r\ndef convertToRTD (attr, ticker):\r\n\treturn '=RTD(\"tos.rtd\", , \"'+attr+'\", \"'+ticker+'\")'\r\n\r\n@xw.func\r\ndef getShortStrike (ticker):\r\n\tif 'C' in ticker:\r\n\t\treturn ticker[ticker.find('C')+1:ticker.find('-')]\r\n\telse:\r\n\t\treturn ticker[ticker.find('-')-3:ticker.find('-')]\r\n\r\n@xw.func\r\ndef getLongStrike(ticker):\r\n\tif 'C' in ticker:\r\n\t\treturn ticker[ticker.rfind('C')+1:]\r\n\telse:\r\n\t\treturn ticker[ticker.rfind('P')+1:]\r\n\r\n@xw.func\r\ndef getSide(ticker):\r\n\tif 'C' in ticker:\r\n\t\treturn 'Call'\r\n\telse:\r\n\t\treturn 'Put'\r\n\r\n@xw.func\r\ndef getShortStrikeTicker (ticker):\r\n\treturn ticker[ticker.find('.'):ticker.find('-')]\r\n\r\n@xw.func\r\ndef getLongStrikeTicker (ticker):\r\n\treturn ticker[ticker.find('-')+1:]\r\n\r\n@xw.func\r\ndef getDTE (expir):\r\n\texp = date(int(expir[0:4]),int(expir[5:7]),int(expir[8:]))\r\n\ttoday = date.today()\r\n\tdte = exp - today\r\n\treturn dte.days\r\n\r\n@xw.func\r\ndef getExp (ticker):\r\n\treturn '20'+ticker[4:6]+'-'+ticker[6:8]+'-'+ticker[8:10]\r\n\r\n@xw.func\r\ndef getImpMove ():\r\n\t# Grab some screen\r\n\tscreen = pyautogui.screenshot(region = (3592,1088,246,38))\r\n\t# Make greyscale\r\n\tw = screen.convert('L')\r\n\t# Save so we can see what we grabbed\r\n\tw.save('grabbed.png')\r\n\tw.show()\r\n\ttext = pytesseract.image_to_string(w)\r\n\tcorrectedText = TextBlob(text).correct()\r\n\tprint(correctedText)\r\n\r\n # tl 3592 1088\r\n # tr 3838 1088\r\n # bl 3592 1050\r\n # br 3838 1050\r\n", "id": "4688183", "language": "Python", "matching_score": 0.11039067059755325, "max_stars_count": 2, "path": "xlFunctions.py" } ]
1.600239
RiccardoGrin
[ { "content": "import torch\nimport torch.distributed as dist\nfrom torch.nn.modules import Module\nimport sys\nfrom torch.autograd import Variable\n\ndef _flatten_dense_tensors(tensors):\n \"\"\"Flatten dense tensors into a contiguous 1D buffer. Assume tensors are of\n same dense type.\n Since inputs are dense, the resulting tensor will be a concatenated 1D\n buffer. Element-wise operation on this buffer will be equivalent to\n operating individually.\n Arguments:\n tensors (Iterable[Tensor]): dense tensors to flatten.\n Returns:\n A contiguous 1D buffer containing input tensors.\n \"\"\"\n if len(tensors) == 1:\n return tensors[0].contiguous().view(-1)\n flat = torch.cat([t.contiguous().view(-1).float() for t in tensors], dim=0)\n return flat\n\ndef _unflatten_dense_tensors(flat, tensors):\n \"\"\"View a flat buffer using the sizes of tensors. Assume that tensors are of\n same dense type, and that flat is given by _flatten_dense_tensors.\n Arguments:\n flat (Tensor): flattened dense tensors to unflatten.\n tensors (Iterable[Tensor]): dense tensors whose sizes will be used to\n unflatten flat.\n Returns:\n Unflattened dense tensors with sizes same as tensors and values from\n flat.\n \"\"\"\n outputs = []\n offset = 0\n for tensor in tensors:\n numel = tensor.numel()\n outputs.append(flat.narrow(0, offset, numel).view_as(tensor))\n offset += numel\n return tuple(outputs)\n\n\n'''\nThis version of DistributedDataParallel is designed to be used in conjunction with the multiproc.py\nlauncher included with this example. It assumes that your run is using multiprocess with 1\nGPU/process, that the model is on the correct device, and that torch.set_device has been\nused to set the device.\n\nParameters are broadcasted to the other processes on initialization of DistributedDataParallel,\nand will be allreduced at the finish of the backward pass.\n'''\nclass DistributedDataParallel(Module):\n\n def __init__(self, module):\n super(DistributedDataParallel, self).__init__()\n #fallback for PyTorch 0.3\n if not hasattr(dist, '_backend'):\n self.warn_on_half = True\n else:\n self.warn_on_half = True if dist._backend == dist.dist_backend.GLOO else False\n\n self.module = module\n\n for p in self.module.state_dict().values():\n if not torch.is_tensor(p):\n continue\n dist.broadcast(p, 0)\n\n def allreduce_params():\n if(self.needs_reduction):\n self.needs_reduction = False\n buckets = {}\n for param in self.module.parameters():\n if param.requires_grad and param.grad is not None:\n tp = type(param.data)\n if tp not in buckets:\n buckets[tp] = []\n buckets[tp].append(param)\n if self.warn_on_half:\n if torch.cuda.HalfTensor in buckets:\n print(\"WARNING: gloo dist backend for half parameters may be extremely slow.\" +\n \" It is recommended to use the NCCL backend in this case. This currently requires\" +\n \"PyTorch built from top of tree master.\")\n self.warn_on_half = False\n\n for tp in buckets:\n bucket = buckets[tp]\n grads = [param.grad.data for param in bucket]\n coalesced = _flatten_dense_tensors(grads)\n dist.all_reduce(coalesced)\n coalesced /= dist.get_world_size()\n for buf, synced in zip(grads, _unflatten_dense_tensors(coalesced, grads)):\n buf.copy_(synced)\n\n #print(\"LIST\", list(self.module.parameters())) \n for param in list(self.module.parameters()):\n def allreduce_hook(*unused):\n Variable._execution_engine.queue_callback(allreduce_params)\n if param.requires_grad:\n param.register_hook(allreduce_hook)\n\n def forward(self, *inputs, **kwargs):\n self.needs_reduction = True\n return self.module(*inputs, **kwargs)\n\n '''\n def _sync_buffers(self):\n buffers = list(self.module._all_buffers())\n if len(buffers) > 0:\n # cross-node buffer sync\n flat_buffers = _flatten_dense_tensors(buffers)\n dist.broadcast(flat_buffers, 0)\n for buf, synced in zip(buffers, _unflatten_dense_tensors(flat_buffers, buffers)):\n buf.copy_(synced)\n def train(self, mode=True):\n # Clear NCCL communicator and CUDA event cache of the default group ID,\n # These cache will be recreated at the later call. This is currently a\n # work-around for a potential NCCL deadlock.\n if dist._backend == dist.dist_backend.NCCL:\n dist._clear_group_cache()\n super(DistributedDataParallel, self).train(mode)\n self.module.train(mode)\n '''\n", "id": "2957932", "language": "Python", "matching_score": 0, "max_stars_count": 2, "path": "distributed.py" } ]
0
faceless2
[ { "content": "#!/usr/bin/python\n\n# Copyright (c) 2013-2015 Mozilla Foundation\n#\n# Permission is hereby granted, free of charge, to any person obtaining a \n# copy of this software and associated documentation files (the \"Software\"), \n# to deal in the Software without restriction, including without limitation \n# the rights to use, copy, modify, merge, publish, distribute, sublicense, \n# and/or sell copies of the Software, and to permit persons to whom the \n# Software is furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in \n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR \n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, \n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL \n# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER \n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING \n# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER \n# DEALINGS IN THE SOFTWARE.\n\nimport json\n\nclass Label:\n def __init__(self, label, preferred):\n self.label = label\n self.preferred = preferred\n def __cmp__(self, other):\n return cmp(self.label, other.label)\n\n# If a multi-byte encoding is on this list, it is assumed to have a\n# non-generated decoder implementation class. Otherwise, the JDK default\n# decoder is used as a placeholder.\nMULTI_BYTE_DECODER_IMPLEMENTED = [\n u\"x-user-defined\",\n u\"replacement\",\n u\"big5\",\n]\n\nMULTI_BYTE_ENCODER_IMPLEMENTED = [\n u\"big5\",\n]\n\npreferred = []\n\nlabels = []\n\ndata = json.load(open(\"../encoding/encodings.json\", \"r\"))\n\nindexes = json.load(open(\"../encoding/indexes.json\", \"r\"))\n\nsingle_byte = []\n\nmulti_byte = []\n\ndef to_camel_name(name):\n if name == u\"iso-8859-8-i\":\n return u\"Iso8I\"\n if name.startswith(u\"iso-8859-\"):\n return name.replace(u\"iso-8859-\", u\"Iso\")\n return name.title().replace(u\"X-\", u\"\").replace(u\"-\", u\"\").replace(u\"_\", u\"\")\n\ndef to_constant_name(name):\n return name.replace(u\"-\", u\"_\").upper()\n\n# Encoding.java\n\nfor group in data:\n if group[\"heading\"] == \"Legacy single-byte encodings\":\n single_byte = group[\"encodings\"]\n else:\n multi_byte.extend(group[\"encodings\"])\n for encoding in group[\"encodings\"]:\n preferred.append(encoding[\"name\"])\n for label in encoding[\"labels\"]:\n labels.append(Label(label, encoding[\"name\"]))\n\npreferred.sort()\nlabels.sort()\n\nlabel_file = open(\"src/nu/validator/encoding/Encoding.java\", \"w\")\n\nlabel_file.write(\"\"\"/*\n * Copyright (c) 2015 Mozilla Foundation\n *\n * Permission is hereby granted, free of charge, to any person obtaining a \n * copy of this software and associated documentation files (the \"Software\"), \n * to deal in the Software without restriction, including without limitation \n * the rights to use, copy, modify, merge, publish, distribute, sublicense, \n * and/or sell copies of the Software, and to permit persons to whom the \n * Software is furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in \n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR \n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, \n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL \n * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER \n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING \n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER \n * DEALINGS IN THE SOFTWARE.\n */\n\npackage nu.validator.encoding;\n\nimport java.nio.charset.Charset;\nimport java.nio.charset.CharsetEncoder;\nimport java.nio.charset.IllegalCharsetNameException;\nimport java.nio.charset.UnsupportedCharsetException;\nimport java.nio.charset.spi.CharsetProvider;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.SortedMap;\nimport java.util.TreeMap;\n\n/**\n * Represents an <a href=\"https://encoding.spec.whatwg.org/#encoding\">encoding</a>\n * as defined in the <a href=\"https://encoding.spec.whatwg.org/\">Encoding\n * Standard</a>, provides access to each encoding defined in the Encoding\n * Standard via a static constant and provides the \n * \"<a href=\"https://encoding.spec.whatwg.org/#concept-encoding-get\">get an \n * encoding</a>\" algorithm defined in the Encoding Standard.\n * \n * <p>This class inherits from {@link Charset} to allow the Encoding \n * Standard-compliant encodings to be used in contexts that support\n * <code>Charset</code> instances. However, by design, the Encoding \n * Standard-compliant encodings are not supplied via a {@link CharsetProvider}\n * and, therefore, are not available via and do not interfere with the static\n * methods provided by <code>Charset</code>. (This class provides methods of\n * the same name to hide each static method of <code>Charset</code> to help\n * avoid accidental calls to the static methods of the superclass when working\n * with Encoding Standard-compliant encodings.)\n * \n * <p>When an application needs to use a particular encoding, such as utf-8\n * or windows-1252, the corresponding constant, i.e.\n * {@link #UTF_8 Encoding.UTF_8} and {@link #WINDOWS_1252 Encoding.WINDOWS_1252}\n * respectively, should be used. However, when the application receives an\n * encoding label from external input, the method {@link #forName(String) \n * forName()} should be used to obtain the object representing the encoding \n * identified by the label. In contexts where labels that map to the \n * <a href=\"https://encoding.spec.whatwg.org/#replacement\">replacement\n * encoding</a> should be treated as unknown, the method {@link\n * #forNameNoReplacement(String) forNameNoReplacement()} should be used instead.\n * \n * \n * @author hsivonen\n */\npublic abstract class Encoding extends Charset {\n\n private static final String[] LABELS = {\n\"\"\")\n\nfor label in labels:\n label_file.write(\" \\\"%s\\\",\\n\" % label.label)\n\nlabel_file.write(\"\"\" };\n \n private static final Encoding[] ENCODINGS_FOR_LABELS = {\n\"\"\")\n\nfor label in labels:\n label_file.write(\" %s.INSTANCE,\\n\" % to_camel_name(label.preferred))\n\nlabel_file.write(\"\"\" };\n\n private static final Encoding[] ENCODINGS = {\n\"\"\")\n\nfor label in preferred:\n label_file.write(\" %s.INSTANCE,\\n\" % to_camel_name(label))\n \nlabel_file.write(\"\"\" };\n\n\"\"\")\n\nfor label in preferred:\n label_file.write(\"\"\" /**\n * The %s encoding.\n */\n public static final Encoding %s = %s.INSTANCE;\n\n\"\"\" % (label, to_constant_name(label), to_camel_name(label)))\n \nlabel_file.write(\"\"\"\nprivate static SortedMap<String, Charset> encodings = null;\n\n protected Encoding(String canonicalName, String[] aliases) {\n super(canonicalName, aliases);\n }\n\n private enum State {\n HEAD, LABEL, TAIL\n };\n\n public static Encoding forName(String label) {\n if (label == null) {\n throw new IllegalArgumentException(\"Label must not be null.\");\n }\n if (label.length() == 0) {\n throw new IllegalCharsetNameException(label);\n }\n // First try the fast path\n int index = Arrays.binarySearch(LABELS, label);\n if (index >= 0) {\n return ENCODINGS_FOR_LABELS[index];\n }\n // Else, slow path\n StringBuilder sb = new StringBuilder();\n State state = State.HEAD;\n for (int i = 0; i < label.length(); i++) {\n char c = label.charAt(i);\n if ((c == ' ') || (c == '\\\\n') || (c == '\\\\r') || (c == '\\\\t')\n || (c == '\\\\u000C')) {\n if (state == State.LABEL) {\n state = State.TAIL;\n }\n continue;\n }\n if ((c >= 'a' && c <= 'z') || (c >= '0' && c <= '9')) {\n switch (state) {\n case HEAD:\n state = State.LABEL;\n // Fall through\n case LABEL:\n sb.append(c);\n continue;\n case TAIL:\n throw new IllegalCharsetNameException(label);\n }\n }\n if (c >= 'A' && c <= 'Z') {\n c += 0x20;\n switch (state) {\n case HEAD:\n state = State.LABEL;\n // Fall through\n case LABEL:\n sb.append(c);\n continue;\n case TAIL:\n throw new IllegalCharsetNameException(label);\n }\n }\n if ((c == '-') || (c == '+') || (c == '.') || (c == ':')\n || (c == '_')) {\n switch (state) {\n case LABEL:\n sb.append(c);\n continue;\n case HEAD:\n case TAIL:\n throw new IllegalCharsetNameException(label);\n }\n }\n throw new IllegalCharsetNameException(label);\n }\n index = Arrays.binarySearch(LABELS, sb.toString());\n if (index >= 0) {\n return ENCODINGS_FOR_LABELS[index];\n }\n throw new UnsupportedCharsetException(label);\n }\n\n public static Encoding forNameNoReplacement(String label) {\n Encoding encoding = Encoding.forName(label);\n if (encoding == Encoding.REPLACEMENT) {\n throw new UnsupportedCharsetException(label); \n }\n return encoding;\n }\n\n public static boolean isSupported(String label) {\n try {\n Encoding.forName(label);\n } catch (UnsupportedCharsetException e) {\n return false;\n }\n return true;\n }\n\n public static boolean isSupportedNoReplacement(String label) {\n try {\n Encoding.forNameNoReplacement(label);\n } catch (UnsupportedCharsetException e) {\n return false;\n }\n return true;\n }\n\n public static SortedMap<String, Charset> availableCharsets() {\n if (encodings == null) {\n TreeMap<String, Charset> map = new TreeMap<String, Charset>();\n for (Encoding encoding : ENCODINGS) {\n map.put(encoding.name(), encoding);\n }\n encodings = Collections.unmodifiableSortedMap(map);\n }\n return encodings;\n }\n\n public static Encoding defaultCharset() {\n return WINDOWS_1252;\n }\n\n @Override public boolean canEncode() {\n return false;\n }\n\n @Override public boolean contains(Charset cs) {\n return false;\n }\n\n @Override public CharsetEncoder newEncoder() {\n throw new UnsupportedOperationException(\"Encoder not implemented.\");\n }\n}\n\"\"\")\n\nlabel_file.close()\n\n# Single-byte encodings\n\nfor encoding in single_byte:\n name = encoding[\"name\"]\n labels = encoding[\"labels\"]\n labels.sort()\n class_name = to_camel_name(name)\n mapping_name = name\n if mapping_name == u\"iso-8859-8-i\":\n mapping_name = u\"iso-8859-8\"\n mapping = indexes[mapping_name]\n class_file = open(\"src/nu/validator/encoding/%s.java\" % class_name, \"w\")\n class_file.write('''/*\n * Copyright (c) 2013-2015 Mozilla Foundation\n *\n * Permission is hereby granted, free of charge, to any person obtaining a \n * copy of this software and associated documentation files (the \"Software\"), \n * to deal in the Software without restriction, including without limitation \n * the rights to use, copy, modify, merge, publish, distribute, sublicense, \n * and/or sell copies of the Software, and to permit persons to whom the \n * Software is furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in \n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR \n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, \n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL \n * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER \n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING \n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER \n * DEALINGS IN THE SOFTWARE.\n */\n\n/*\n * THIS IS A GENERATED FILE. PLEASE DO NOT EDIT.\n * Instead, please regenerate using generate-encoding-data.py\n */\n\npackage nu.validator.encoding;\n\nimport java.nio.charset.CharsetDecoder;\n\nclass ''')\n class_file.write(class_name)\n class_file.write(''' extends Encoding {\n\n private static final char[] TABLE = {''')\n fallible = False\n comma = False\n for code_point in mapping:\n # XXX should we have error reporting?\n if not code_point:\n code_point = 0xFFFD\n fallible = True\n if comma:\n class_file.write(\",\")\n class_file.write(\"\\n '\\u%04x'\" % code_point);\n comma = True \n class_file.write('''\n };\n \n private static final String[] LABELS = {''')\n\n comma = False\n for label in labels:\n if comma:\n class_file.write(\",\")\n class_file.write(\"\\n \\\"%s\\\"\" % label);\n comma = True \n class_file.write('''\n };\n \n private static final String NAME = \"''')\n class_file.write(name)\n class_file.write('''\";\n \n static final Encoding INSTANCE = new ''')\n class_file.write(class_name)\n class_file.write('''();\n \n private ''')\n class_file.write(class_name)\n class_file.write('''() {\n super(NAME, LABELS);\n }\n\n @Override public CharsetDecoder newDecoder() {\n return new ''')\n class_file.write(\"Fallible\" if fallible else \"Infallible\")\n class_file.write('''SingleByteDecoder(this, TABLE);\n }\n\n}\n''')\n class_file.close()\n\n# Multi-byte encodings\n\nfor encoding in multi_byte:\n name = encoding[\"name\"]\n labels = encoding[\"labels\"]\n labels.sort()\n class_name = to_camel_name(name)\n class_file = open(\"src/nu/validator/encoding/%s.java\" % class_name, \"w\")\n class_file.write('''/*\n * Copyright (c) 2013-2015 Mozilla Foundation\n *\n * Permission is hereby granted, free of charge, to any person obtaining a \n * copy of this software and associated documentation files (the \"Software\"), \n * to deal in the Software without restriction, including without limitation \n * the rights to use, copy, modify, merge, publish, distribute, sublicense, \n * and/or sell copies of the Software, and to permit persons to whom the \n * Software is furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in \n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR \n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, \n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL \n * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER \n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING \n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER \n * DEALINGS IN THE SOFTWARE.\n */\n\n/*\n * THIS IS A GENERATED FILE. PLEASE DO NOT EDIT.\n * Instead, please regenerate using generate-encoding-data.py\n */\n\npackage nu.validator.encoding;\n\nimport java.nio.charset.Charset;\nimport java.nio.charset.CharsetDecoder;\nimport java.nio.charset.CharsetEncoder;\n\nclass ''')\n class_file.write(class_name)\n class_file.write(''' extends Encoding {\n\n private static final String[] LABELS = {''')\n\n comma = False\n for label in labels:\n if comma:\n class_file.write(\",\")\n class_file.write(\"\\n \\\"%s\\\"\" % label);\n comma = True \n class_file.write('''\n };\n \n private static final String NAME = \"''')\n class_file.write(name)\n class_file.write('''\";\n \n static final ''')\n class_file.write(class_name)\n class_file.write(''' INSTANCE = new ''')\n class_file.write(class_name)\n class_file.write('''();\n \n private ''')\n class_file.write(class_name)\n class_file.write('''() {\n super(NAME, LABELS);\n }\n\n @Override public CharsetDecoder newDecoder() {\n ''')\n if name == \"gbk\":\n class_file.write('''return Charset.forName(\"gb18030\").newDecoder();''') \n elif name in MULTI_BYTE_DECODER_IMPLEMENTED:\n class_file.write(\"return new %sDecoder(this);\" % class_name)\n else:\n class_file.write('''return Charset.forName(NAME).newDecoder();''')\n class_file.write('''\n }\n\n @Override public CharsetEncoder newEncoder() {\n ''')\n if name in MULTI_BYTE_ENCODER_IMPLEMENTED:\n class_file.write(\"return new %sEncoder(this);\" % class_name)\n else:\n class_file.write('''return Charset.forName(NAME).newEncoder();''')\n class_file.write('''\n }\n}\n''')\n class_file.close()\n\n# Big5\n\ndef null_to_zero(code_point):\n if not code_point:\n code_point = 0\n return code_point\n\nindex = []\n\nfor code_point in indexes[\"big5\"]:\n index.append(null_to_zero(code_point)) \n\n# There are four major gaps consisting of more than 4 consecutive invalid pointers\ngaps = []\nconsecutive = 0\nconsecutive_start = 0\noffset = 0\nfor code_point in index:\n if code_point == 0:\n if consecutive == 0:\n consecutive_start = offset\n consecutive +=1\n else:\n if consecutive > 4:\n gaps.append((consecutive_start, consecutive_start + consecutive))\n consecutive = 0\n offset += 1\n\ndef invert_ranges(ranges, cap):\n inverted = []\n invert_start = 0\n for (start, end) in ranges:\n if start != 0:\n inverted.append((invert_start, start))\n invert_start = end\n inverted.append((invert_start, cap))\n return inverted\n\ncap = len(index)\nranges = invert_ranges(gaps, cap)\n\n# Now compute a compressed lookup table for astralness\n\ngaps = []\nconsecutive = 0\nconsecutive_start = 0\noffset = 0\nfor code_point in index:\n if code_point <= 0xFFFF:\n if consecutive == 0:\n consecutive_start = offset\n consecutive +=1\n else:\n if consecutive > 40:\n gaps.append((consecutive_start, consecutive_start + consecutive))\n consecutive = 0\n offset += 1\n\nastral_ranges = invert_ranges(gaps, cap)\n\nclass_file = open(\"src/nu/validator/encoding/Big5Data.java\", \"w\")\nclass_file.write('''/*\n * Copyright (c) 2015 Mozilla Foundation\n *\n * Permission is hereby granted, free of charge, to any person obtaining a \n * copy of this software and associated documentation files (the \"Software\"), \n * to deal in the Software without restriction, including without limitation \n * the rights to use, copy, modify, merge, publish, distribute, sublicense, \n * and/or sell copies of the Software, and to permit persons to whom the \n * Software is furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in \n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR \n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, \n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL \n * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER \n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING \n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER \n * DEALINGS IN THE SOFTWARE.\n */\n\n/*\n * THIS IS A GENERATED FILE. PLEASE DO NOT EDIT.\n * Instead, please regenerate using generate-encoding-data.py\n */\n\npackage nu.validator.encoding;\n\nfinal class Big5Data {\n\n private static final String ASTRALNESS = \"''')\n\nbits = []\nfor (low, high) in astral_ranges:\n for i in xrange(low, high):\n bits.append(1 if index[i] > 0xFFFF else 0)\n# pad length to multiple of 16\nfor j in xrange(16 - (len(bits) % 16)):\n bits.append(0)\n\ni = 0\nwhile i < len(bits):\n accu = 0\n for j in xrange(16):\n accu |= bits[i + j] << j\n if accu == 0x22:\n class_file.write('\\\\\"')\n else:\n class_file.write('\\\\u%04X' % accu)\n i += 16\n\nclass_file.write('''\";\n\n''')\n\nj = 0\nfor (low, high) in ranges:\n class_file.write(''' private static final String TABLE%d = \"''' % j)\n for i in xrange(low, high):\n class_file.write('\\\\u%04X' % (index[i] & 0xFFFF))\n class_file.write('''\";\n\n''')\n j += 1\n\nclass_file.write(''' private static boolean readBit(int i) {\n return (ASTRALNESS.charAt(i >> 4) & (1 << (i & 0xF))) != 0;\n }\n\n static char lowBits(int pointer) {\n''')\n\nj = 0\nfor (low, high) in ranges:\n class_file.write(''' if (pointer < %d) {\n return '\\\\u0000';\n }\n if (pointer < %d) {\n return TABLE%d.charAt(pointer - %d);\n }\n''' % (low, high, j, low))\n j += 1\n\nclass_file.write(''' return '\\\\u0000';\n }\n\n static boolean isAstral(int pointer) {\n''')\n\nbase = 0\nfor (low, high) in astral_ranges:\n if high - low == 1:\n class_file.write(''' if (pointer < %d) {\n return false;\n }\n if (pointer == %d) {\n return true;\n }\n''' % (low, low))\n else:\n class_file.write(''' if (pointer < %d) {\n return false;\n }\n if (pointer < %d) {\n return readBit(%d + (pointer - %d));\n }\n''' % (low, high, base, low))\n base += (high - low)\n\nclass_file.write(''' return false;\n }\n\n public static int findPointer(char lowBits, boolean isAstral) {\n if (!isAstral) {\n switch (lowBits) {\n''')\n\nhkscs_bound = (0xA1 - 0x81) * 157\n\nprefer_last = [\n 0x2550,\n 0x255E,\n 0x2561,\n 0x256A,\n 0x5341,\n 0x5345,\n]\n\nfor code_point in prefer_last:\n # Python lists don't have .rindex() :-(\n for i in xrange(len(index) - 1, -1, -1):\n candidate = index[i]\n if candidate == code_point:\n class_file.write(''' case 0x%04X:\n return %d;\n''' % (code_point, i))\n break\n\nclass_file.write(''' default:\n break;\n }\n }''')\n\nj = 0\nfor (low, high) in ranges:\n if high > hkscs_bound:\n start = 0\n if low <= hkscs_bound and hkscs_bound < high:\n # This is the first range we don't ignore and the\n # range that contains the first non-HKSCS pointer.\n # Avoid searching HKSCS.\n start = hkscs_bound - low\n class_file.write('''\n for (int i = %d; i < TABLE%d.length(); i++) {\n if (TABLE%d.charAt(i) == lowBits) {\n int pointer = i + %d;\n if (isAstral == isAstral(pointer)) {\n return pointer;\n }\n }\n }''' % (start, j, j, low))\n j += 1\n\nclass_file.write('''\n return 0;\n }\n}\n''')\nclass_file.close()\n", "id": "8967730", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "generate-encoding-data.py" } ]
0
jayashankar987
[ { "content": "import string\nfrom typing import Final\nfrom colorama import Fore, Back, Style\nimport os\n\nINVALID: Final = -1\nLONG_POSITION: Final = 1\nSHORT_POSITION: Final = 2\n\n\ndef showError(msg: string):\n print(Fore.WHITE + Back.RED + \"********************** {msg} **********************\", msg)\n\ndef getEntryType() -> int:\n buyOrSell = INVALID\n try:\n buyOrSell = float(showInput(\"Enter 1 for BUY Or 2 for SELL\"))\n if (buyOrSell != 1 and buyOrSell != 2) :\n showError(\"INVALID TRADE TYPE INPUT!!!\")\n return getEntryType()\n else:\n return buyOrSell\n except:\n showError(\"INVALID TRADE TYPE INPUT!!!\")\n return getEntryType()\n\n\ndef getLastTradedPrice() -> float:\n try:\n currentPrice = float(showInput(\"Enter LTP of the Stock: \"))\n if(currentPrice <= 0):\n showError(\"INVALID LTP INPUT!!!\")\n return getLastTradedPrice()\n return currentPrice\n except:\n showError(\"INVALID LTP INPUT!!!\")\n return getLastTradedPrice()\n\n \n\ndef getTargetProfitPercentage() -> float :\n \n try:\n profitPercentage = float(showInput(\"Enter TARGET PROFIT Raning from 1.....100\"))\n if (profitPercentage <= 0):\n showError(\"EXPECTING A TARGET PERCENTAGE in RANGE of 1, 2, 3, .... 100\")\n return getTargetProfitPercentage()\n return profitPercentage\n except:\n showError(\"INVALID TARGET INPUT!!!\")\n return getTargetProfitPercentage()\n\ndef getStopLossPercentage() -> float :\n \n try:\n stopLossPercentage = float(showInput(\"Enter STOP LOSS Raning from 1.....100\"))\n if (stopLossPercentage <= 0):\n showError(\"EXPECTING A STOP LOSS PERCENTAGE in RANGE of 1, 2, 3, .... 100\")\n return getTargetProfitPercentage()\n return stopLossPercentage\n except:\n showError(\"INVALID STOPLOSS INPUT!!!\")\n return getStopLossPercentage()\n\ndef showInput(message: string) -> input:\n print(Style.RESET_ALL)\n return input(message + \": \")\n\nos.system('cls' if os.name == 'nt' else 'clear')\ntradeEntryType = getEntryType()\ncurrentPrice = getLastTradedPrice()\nexpectedProfitPercentage = getTargetProfitPercentage()\nexpectedStoplossPercentage = getStopLossPercentage()\ntarget = stopLoss = float(INVALID)\n\nif (LONG_POSITION == tradeEntryType):\n target = currentPrice + (currentPrice * expectedProfitPercentage * 0.01)\n stoploss = currentPrice - (currentPrice * expectedStoplossPercentage * 0.01)\nelif(SHORT_POSITION == tradeEntryType): \n target = currentPrice - (currentPrice * expectedProfitPercentage * 0.01)\n stoploss = currentPrice + (currentPrice * expectedStoplossPercentage * 0.01)\nelse:\n showError(\"INVALID INPUTS\")\n exit\n\nprint(Fore.WHITE + Back.GREEN + \"Expected TARGET Price: \", target)\nprint(Fore.WHITE + Back.RED + \"Expected STOPLOSS Price\", stoploss)\n\n", "id": "10012499", "language": "Python", "matching_score": 4.521275997161865, "max_stars_count": 0, "path": "profitAndLossValue.py" }, { "content": "from ast import Return\nfrom locale import currency\n\nfrom typing import Final\nfrom colorama import Fore, Back, Style\nimport os\n\n\nLONG_POSITION: Final = 1\nSHORT_POSITION: Final = 2\nCE:Final = 1\nPE: Final = 2\nINVALID: Final = -1\n\ngloballongOrShortPosition = peOrCe = INVALID\ncurrntIndex = indexStopLoss = indexTarget = IentryPrice = deltaValue = targetPoints = stoplossPoints = float(INVALID)\n\n\ndef getTradeType():\n global longOrShortPosition\n global peOrCe\n\n longOrShortPosition = int(input(Fore.WHITE + Back.GREEN + \"Enter 1 for Long(buy) and 2 for short(sell): \"))\n peOrCe = int(input(Fore.WHITE + Back.GREEN + \"Enter 1 for CE and 2 for PE Trade: \"))\n if(longOrShortPosition != 1 and longOrShortPosition != 2 and peOrCe != 1 and peOrCe != 2):\n print(Fore.WHITE + Back.RED + \"********************** Enter Valid Inputs **********************\")\n getTradeType()\n return\n getCurrentIndex()\n\n\ndef getCurrentIndex():\n global currntIndex\n currntIndex = float(input(Fore.WHITE + Back.GREEN + \"Enter current INDEX value: \"))\n getTradeDetails() \n\n\ndef getTargetIndex():\n global indexTarget\n indexTarget = float(input(Fore.WHITE + Back.GREEN + \"Enter TARGET INDEX value: \"))\n if (longOrShortPosition == LONG_POSITION):\n if(CE == peOrCe and indexTarget < currntIndex):\n print(Fore.WHITE + Back.RED + \"********************** FOR A LONG CALL OPTION: ITS ADVISED TO SELECT A HIGHER TARGET INDEX POINT THAN CURRENT INDEX **********************\")\n getTargetIndex()\n return\n elif (PE == peOrCe and indexTarget > currntIndex):\n print(Fore.WHITE + Back.RED + \"********************** FOR A LONG PUT OPTION: ITS ADVISED TO SELECT A LOWER TARGET INDEX POINT THAN CURRENT INDEX **********************\")\n getTargetIndex()\n return\n\n elif (longOrShortPosition == SHORT_POSITION):\n if (CE == peOrCe and indexTarget > currntIndex):\n print(Fore.WHITE + Back.RED + \"********************** FOR A SHORT CALL OPTION: ITS ADVISED TO SELECT A LOWER TARGET INDEX POINT THAN CURRENT INDEX **********************\")\n getTargetIndex()\n return\n elif (PE == peOrCe and indexTarget < currntIndex):\n print(Fore.WHITE + Back.RED + \"********************** FOR A SHORT PUT OPTION: ITS ADVISED TO SELECT A HIGHER TARGET INDEX POINT THAN CURRENT INDEX **********************\")\n getTargetIndex()\n return\n getStopLossIndex()\n\n \ndef getStopLossIndex():\n global indexStopLoss\n indexStopLoss = float(input(Fore.WHITE + Back.RED + \"Enter STOPLOSS INDEX value: \"))\n if (longOrShortPosition == LONG_POSITION):\n if (CE == peOrCe and indexStopLoss > currntIndex):\n print(Fore.WHITE + Back.RED + \"********************** FOR A LONG CALL OPTION STOP LOSS: ITS ADVISED TO SELECT LOWER INDEX POINT THAN CURRENT INDEX **********************\")\n getStopLossIndex()\n return\n elif (PE == peOrCe and indexStopLoss < currntIndex):\n print(Fore.WHITE + Back.RED + \"********************** FOR A LONG PUT OPTION STOP LOSS: ITS ADVISED TO SELECT HIGHER INDEX POINT THAN CURRENT INDEX **********************\")\n getStopLossIndex()\n return\n elif (longOrShortPosition == SHORT_POSITION):\n if (CE == peOrCe and indexStopLoss < currntIndex):\n print(Fore.WHITE + Back.RED + \"********************** FOR A SHORT CALL OPTION STOP LOSS: ITS ADVISED TO SELECT HIGHER INDEX POINT THAN CURRENT INDEX **********************\")\n getStopLossIndex()\n return\n elif (PE == peOrCe and indexStopLoss > currntIndex):\n print(Fore.WHITE + Back.RED + \"********************** FOR A SHORT PUT OPTION STOP LOSS: ITS ADVISED TO SELECT LOWER INDEX POINT THAN CURRENT INDEX **********************\")\n getStopLossIndex()\n return\n evaluate()\n\n\ndef getDeltaValue():\n global deltaValue\n deltaValue = abs(float(input(Fore.WHITE + Back.GREEN \n + \"Enter DELTA of the INDEX SELECTED: if unware of delta check in sensi bull [[[https://web.sensibull.com/option-chain?expiry=2022-02-10&tradingsymbol=NIFTY]]] \")))\n getTargetIndex()\n\n\ndef getTradeDetails():\n global entryPrice\n entryPrice = float(input(Fore.WHITE + Back.GREEN + \"Enter ENTRY PRICE for selected INDEX: \"))\n if(entryPrice <= 0):\n print(Fore.WHITE + Back.RED + \"********************** POTENTIAL 100% LOSS DETECTED **********************\")\n getTradeDetails()\n return\n getDeltaValue()\n\n\ndef evaluate():\n global targetPoints\n global stoplossPoints\n \n if (CE == peOrCe):\n targetPoints = indexTarget - currntIndex\n stoplossPoints = currntIndex - indexStopLoss\n elif (PE == peOrCe) :\n targetPoints = currntIndex - indexTarget\n stoplossPoints = indexStopLoss - currntIndex\n else:\n print(Fore.WHITE + Back.RED + \"********************** INVALID PE(Put Option) or CE(Call Option) INPUT **********************\")\n getTradeType()\n showTargetAndStoplossPrices()\n\n \ndef showTargetAndStoplossPrices():\n global optionsTargetPrice\n global optionsStopLossPrice\n optionsTargetPrice = entryPrice + (targetPoints * deltaValue)\n optionsStopLossPrice = entryPrice - (stoplossPoints * deltaValue)\n\nos.system('cls' if os.name == 'nt' else 'clear')\ngetTradeType()\n\nprint(Fore.WHITE + Back.GREEN + \"Your TARGET for entry of option is: \", optionsTargetPrice)\nprint(Fore.WHITE + Back.RED + \"Your STOPLOSS for the entry of option is \", optionsStopLossPrice)\nprint(Style.RESET_ALL)", "id": "10777080", "language": "Python", "matching_score": 0.3177312910556793, "max_stars_count": 0, "path": "indextargetStoploss.py" }, { "content": "\n\n\ndef evalueteIncrementPercentage():\n oldSalary = float(input(\"Enter old salary: \"))\n newSalary = float(input(\"Enter new salary: \"))\n\n diff = newSalary - oldSalary\n increment = (diff/oldSalary)*100\n print(\"Salary incremented percentage = \", increment)\n\n\ndef newSalaryForGivenPercentage():\n currentSalary = float(input(\"Enter the current salary: \"))\n percentageIncrement = float(input(\"Enter the percentage increment: \"))\n\n newSalary = currentSalary + (currentSalary * (percentageIncrement / 100))\n print(\"New salary after ${percentageIncrement} percentege increment = \", newSalary)\n\noption = int(input(\"Enter 1 to show increment percentage from new salary \\n Enter 2 to show new salary for percentage increment\"))\nif(option == 1) :\n evalueteIncrementPercentage()\nelse:\n newSalaryForGivenPercentage()\n", "id": "10491868", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "IncrementPercentage.py" }, { "content": "\nhigh = int(input(\"Enter High: \"))\nlow = int(input(\"Enter Low: \" ))\nclose = int(input(\"Enter close: \"))\n\npivotPoint = int(((high) + (low) + (close)) / 3)\nr1 = (2 * pivotPoint) - low\ns1 = (2 * pivotPoint) - high\n\nr2 = pivotPoint + (high - low)\ns2 = pivotPoint - (high - low)\n\nr3 = high + 2 * (pivotPoint - low)\ns3 = low - 2 * (high-pivotPoint)\n\n\nprint(\"Resistance R1: \", r1)\nprint(\"Support S1: \", s1)\nprint()\nprint(\"Resistance R2: \",r2)\nprint(\"Support S2: \", s2)\nprint()\nprint(\"Resistance R3: \",r3)\nprint(\"Support S3: \", s3)", "id": "1166269", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "supportAndResistance.py" } ]
0.158866
vutsalsinghal
[ { "content": "from .set_col import set_col\n\ndef lower_case(df, columns=\"*\", summary=False):\n func = lambda cell: cell.lower() if cell is not None else cell\n \n if summary:\n return set_col(df, columns, func, 'string', True)\n return set_col(df, columns, func, 'string', False)", "id": "2270697", "language": "Python", "matching_score": 0.9396148920059204, "max_stars_count": 1, "path": "cleanflow/preprocessing/lower_case.py" }, { "content": "from .drop_col import drop_col\nfrom .trim_col import trim_col\nfrom .lower_case import lower_case\nfrom .upper_case import upper_case\nfrom .impute_missing import impute_missing\nfrom .replace_null import replace_null\nfrom .remove_duplicates import remove_duplicates\nfrom .rmSpChars import rmSpChars\nfrom .cleanColumnNames import cleanColumnNames\nfrom .cast_to import cast_to_int, cast_to_double, cast_to_string\nfrom .drop_null import drop_null\nfrom .format_date import format_date\n\n__all__ = ['drop_col', 'trim_col','lower_case' ,'upper_case', 'impute_missing', 'replace_null', 'remove_duplicates', 'rmSpChars', 'cleanColumnNames', 'drop_null', 'cast_to_int', 'cast_to_double', 'cast_to_string', 'format_date']\n", "id": "7981650", "language": "Python", "matching_score": 1.8881840705871582, "max_stars_count": 1, "path": "cleanflow/preprocessing/__init__.py" }, { "content": "def cleanColumnNames(df):\n '''\n Remove special characters such as !\"#$%\\'*,./:;<=>?@[\\\\]^`{|}~ from column names\n\n Parameter\n ---------\n df : data frame\n\n return df\n '''\n spChars = '!\"#$%\\'*,./:;<=>?@[\\\\]^`{|}~'\n for column in df.columns:\n oldName = column\n for punct in (set(column) & set(spChars)):\n column = column.replace(punct, \"\")\n df = df.withColumnRenamed(oldName,column)\n return df", "id": "1817231", "language": "Python", "matching_score": 1.1744065284729004, "max_stars_count": 1, "path": "cleanflow/preprocessing/cleanColumnNames.py" }, { "content": "from cleanflow.assertions import *\nfrom cleanflow.preprocessing.drop_null import *\nfrom pyspark.sql.types import *\n\ndef cast_to_int(df, columns):\n '''\n Convert a column type to integer, drop columns that are not convertible\n\n Parameters\n ----------\n df : dataframe\n columns : columns to be casted\n\n reutrn df\n '''\n assert_type_str_or_list(df, columns, \"columns\")\n if type(columns) is str:\n df = df.withColumn(columns, df[columns].cast(IntegerType()))\n return drop_null(df, columns)\n else:\n for column in columns:\n df = df.withColumn(column, df[column].cast(IntegerType()))\n return drop_null(df, column)\n\ndef cast_to_double(df, columns):\n '''\n Convert a column type to double, drop columns that are not convertible\n\n Parameters\n ----------\n df : dataframe\n columns : columns to be casted\n\n reutrn df\n '''\n assert_type_str_or_list(df, columns, \"columns\")\n \n if type(columns) is str:\n df = df.withColumn(columns, df[columns].cast(DoubleType()))\n return drop_null(df, columns)\n else:\n for column in columns:\n df = df.withColumn(column, df[column].cast(DoubleType()))\n return drop_null(df, column)\n\ndef cast_to_string(df, columns):\n '''\n Convert a column type to string, drop columns that are not convertible\n \n Parameters\n ----------\n df : dataframe\n columns : columns to be casted\n\n reutrn df\n '''\n assert_type_str_or_list(df, columns, \"columns\")\n\n if type(columns) is str:\n df = df.withColumn(columns, df[columns].cast(StringType()))\n return drop_null(df, columns)\n else:\n for column in columns:\n df = df.withColumn(column, df[column].cast(StringType()))\n return drop_null(df, column)", "id": "6169586", "language": "Python", "matching_score": 1.8474714756011963, "max_stars_count": 1, "path": "cleanflow/preprocessing/cast_to.py" }, { "content": "import os\nimport sys\nimport time\nfrom cleanflow.preprocessing import *\nfrom cleanflow.exploratory import Outlier\nfrom pyspark.sql import SparkSession\nfrom pyspark.sql import SQLContext\nfrom pyspark import SparkContext\n\nsc = SparkContext.getOrCreate()\nsqlContext = SQLContext(sc)\n\nstarttime = time.time()\n\nclusterType = sys.argv[1]\nnum_features = int(eval(sys.argv[2]))\nclusters = int(eval(sys.argv[3]))\n\n# Read datase\ndf = sqlContext.read.format(\"csv\").option(\"header\", \"true\").option(\"inferSchema\", \"true\").load('311.csv')\n\n# Pre-process dataframe\ndf = cleanColumnNames(df)\ncols = [\"Unique Key\", \"Incident Zip\", \"X Coordinate (State Plane)\", \"Y Coordinate (State Plane)\", \"Latitude\", \"Longitude\"]\ndouble_cols = [\"X Coordinate (State Plane)\", \"Y Coordinate (State Plane)\", \"Latitude\", \"Longitude\"]\nfor col in cols:\n df = drop_null(df, col)\nfor col in double_cols:\n df = cast_to_double(df, col)\ndf = cast_to_int(df, \"Unique Key\")\ndf = cast_to_int(df, \"Incident Zip\")\n\ncolumns = [\"Latitude\"]\n\n# Find outliers\noutlier = Outlier.cluster_type(clusterType)\noutlier.set_param(k = clusters)\noutlier.fit(df, columns[:num_features])\nsummary = outlier.summary()\nsummary.show()\n\nprint(\"Cluster type: {0:s}, # features: {1:d}, k: {2:d}\".format(clusterType, num_features, clusters))\nprint(\"Time: \" + str(time.time() - starttime))\n\nwith open(\"results.md\", \"a\") as res:\n res.write(\"Cluster type: {0:s}, # features: {1:d}, k: {2:d}\".format(clusterType, num_features, clusters))\n res.write(\"\\nTime: \" + str(time.time() - starttime))\n res.write('\\n\\n')\n\nres.close()\n", "id": "11496121", "language": "Python", "matching_score": 2.799262046813965, "max_stars_count": 1, "path": "tests/outlier/test.py" }, { "content": "from functools import lru_cache\nfrom pyspark.sql import SparkSession\n\n\"\"\"\nThis file contains methods to read, write, get sparksession etc.\nthat are not directly involved with data cleaning but enhance the process to great extent\n\"\"\"\n\n@lru_cache(maxsize=None)\ndef get_spark():\n \"\"\"\n Get instance of spark\n \"\"\"\n return SparkSession.builder \\\n .master(\"local\") \\\n .appName(\"cleanflow\") \\\n .config(\"spark.some.config.option\", \"config-value\") \\\n .getOrCreate()\n\n\ndef read_csv(path, sep=',', header='true', infer_schema='true'):\n \"\"\"\n Read csv file\n \"\"\"\n session = get_spark()\n return session.read \\\n .options(header=header) \\\n .options(delimiter=sep) \\\n .options(inferSchema=infer_schema) \\\n .csv(path)\n\n\ndef read_json(path, multiLine=True):\n \"\"\"\n Read json file\n \"\"\"\n session = get_spark()\n return session.read.json(path, multiLine=multiLine)\n\n\ndef write_csv(df, path):\n \"\"\"\n Write to a csv file\n \"\"\"\n df.toPandas.to_csv(path)\n\n\ndef write_json(df, path):\n \"\"\"\n Write to a json file\n \"\"\"\n df.toPandas.to_json(path)\n\ndef totChanges(old, new):\n '''\n Function to calculate total modification done on performing an action\n '''\n count = 0\n \n for column in range(len(new)):\n indiv_old = old[column].toPandas().set_index('UniqueValues').T.to_dict('records')[0]\n indiv_new = new[column].toPandas().set_index('UniqueValues').T.to_dict('records')[0]\n \n for n in indiv_new:\n if n in indiv_old:\n count += abs(indiv_new[n]-indiv_old[n])\n else:\n count += indiv_new[n]\n return count", "id": "9855309", "language": "Python", "matching_score": 1.9238991737365723, "max_stars_count": 1, "path": "cleanflow/utils.py" }, { "content": "def to_csv(df, path_name, singleFile=False, header=False, mode=\"overwrite\", separator=\",\", *args, **kargs):\n\t\"\"\"\n\tWrite dataframe to disk in CSV format.\n\t\n\tParameters\n\t----------\n\tdataframe: The dataframe to be converted to CSV\n\tpath_name: Path to write the DF and the name of the output CSV file.\n\theader : True or False to include header\n\tseparator: sets the single character as a separator for each field and value. If None is set,\n\t\tit uses the default value.\n\n\tif singleFile = False:\n\t\n\t\t# pyspark.sql.DataFrameWriter class is used! \t\t\n\t\t# (refer https://spark.apache.org/docs/latest/api/python/ \\\n\t\t\t\tpyspark.sql.html?highlight=dataframe#pyspark.sql.DataFrameWriter.csv)\n\t\t\n\t\tmode : Specifies the behavior of the save operation when data already exists.\n\t\t\t\t\"append\": Append contents of this DataFrame to existing data.\n\t\t\t\t\"overwrite\" (default case): Overwrite existing data.\n\t\t\t\t\"ignore\": Silently ignore this operation if data already exists.\n\t\t\t\t\"error\": Throw an exception if data already exists.\n\t\n\telse: \n\n\t\t# pandas.DataFrame.to_csv class is used\t\t\t\t\t\t\t\t\n\t\t# (refer https://pandas.pydata.org/pandas-docs/stable/generated/ \\\n\t\t\t\tpandas.DataFrame.to_csv.html)\n\n\t\tmode : str - Python write mode, default ‘w’\n\t\n\t\n\treturn : Dataframe in a CSV format in the specified path.\n\t\t\"\"\"\n\n\tassert isinstance(path_name, str), \"Error: path_name argument must be a string.\"\n\t#assert header == \"true\" or header == \"false\", \"Error header must be 'true' or 'false'.\"\n\tif singleFile:\n\t\treturn df.toPandas().to_csv(path_name, header=True, *args, **kargs)\n\treturn df.write.options(header=header).mode(mode).csv(path_name, sep=separator, *args, **kargs)", "id": "11754596", "language": "Python", "matching_score": 2.3707430362701416, "max_stars_count": 1, "path": "cleanflow/getOutput/to_csv.py" }, { "content": "def describe(df):\n\t'''\n\tFunction to find count, mean, std, min, max of all integer fields of a DataFrame\n\n\tParameters\n\t----------\n\tdf : Data frame to be described\n\n\treturn : Description of DataFrame\n\t'''\n\treturn df.toPandas().describe()", "id": "8274301", "language": "Python", "matching_score": 0.7829564213752747, "max_stars_count": 1, "path": "cleanflow/exploratory/describe.py" }, { "content": "# -*- coding: utf-8 -*-\nfrom setuptools import setup, find_packages\n\ntry:\n long_description = open(\"README.md\").read()\nexcept IOError:\n long_description = \"\"\n\nsetup(\n name=\"CleanFlow\",\n version=\"1.2.0a1\",\n description=\"A a framework for cleaning, pre-processing and exploring data in a scalable and distributed manner.\",\n license=\"MIT\",\n author=\"<NAME>\",\n author_email=\"<EMAIL>\",\n url = 'https://github.com/vutsalsinghal/CleanFlow',\n download_url = 'https://github.com/vutsalsinghal/CleanFlow/archive/master.zip',\n packages=find_packages(),\n install_requires=['pyspark'],\n long_description=long_description,\n classifiers=[\n \t\"Development Status :: 3 - Alpha\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n ]\n)\n", "id": "10153493", "language": "Python", "matching_score": 0.8226585388183594, "max_stars_count": 1, "path": "setup.py" }, { "content": "from nose.tools import *\nfrom shared import *\n\nimport terrascript\n\nclass Test_Terrascript(object):\n\n def setup(self):\n self.terrascript = terrascript.Terrascript()\n\n def test_terrascript_add(self):\n assert self.terrascript.hasattr('add')\n\n def test_terrascript_plus(self):\n assert self.terrascript.hasattr('__add__')\n\n @raises(TypeError)\n def test_terrascript_add_type_error(self):\n self.terrascript.add(1)\n\n @raises(TypeError)\n def test_terrascript_plus_type_error(self):\n self.terrascript += 1", "id": "10576308", "language": "Python", "matching_score": 0.0335950031876564, "max_stars_count": 0, "path": "tests/REVIEW/test_Terrascript.py" }, { "content": "from cleanflow.assertions import assert_cols_in_df\nfrom pyspark.sql.functions import date_format, unix_timestamp\n\n\ndef format_date(df, columns, current_format, output_format):\n \"\"\"\n \n Parameter\n ---------\n df : dataframe whose date column has to be modified\n columns : Name date columns to be transformed.\n current_format : current date string format eg: dd-MM-yyy\n output_format : output date string format to be expected.\n \"\"\"\n assert isinstance(current_format, str), \"Error, current_format argument provided must be a string.\"\n assert isinstance(output_format, str), \"Error, output_format argument provided must be a string.\"\n assert isinstance(columns, (str, list)), \"Error, columns argument provided must be a list.\"\n\n if isinstance(columns, str):\n columns = [columns]\n\n assert_cols_in_df(df, columns_provided=columns, columns_df=df.columns)\n\n expressions = [date_format(unix_timestamp(c, current_format).cast(\"timestamp\"), output_format).alias(c) if c in columns else c for c in df.columns]\n return df.select(*expressions)\n", "id": "6381049", "language": "Python", "matching_score": 2.273956060409546, "max_stars_count": 1, "path": "cleanflow/preprocessing/format_date.py" }, { "content": "from cleanflow.assertions import *\n\ndef drop_col(df, columns):\n \"\"\"\n Function to remove specific column(s)\n\n Parameters\n ----------\n df : dataframe\n column : column(s) to be dropped\n\n return modifiedDF\n \"\"\"\n assert_type_str_or_list(df, columns, \"columns\")\n\n if isinstance(columns, str):\n columns = [columns]\n\n assert_cols_in_df(df, columns_provided=columns, columns_df=df.columns)\n exprs = filter(lambda c: c not in columns, df.columns)\n return df.select(*exprs)", "id": "2365639", "language": "Python", "matching_score": 1.4591107368469238, "max_stars_count": 1, "path": "cleanflow/preprocessing/drop_col.py" }, { "content": "from pyspark.ml.feature import Imputer\nfrom cleanflow.assertions import *\n\ndef impute_missing(df, columns, out_cols, strategy='mean'):\n \"\"\"\n Imputes missing data from specified columns using the mean or median.\n\n Parameters\n ----------\n columns : List of columns to be analyze.\n out_cols: List of output columns with missing values imputed.\n strategy: String that specifies the way of computing missing data. Can be \"mean\" or \"median\"\n \n return : Transformer object (DF with columns that has the imputed values).\n \"\"\"\n\n # Check if columns to be process are in dataframe\n assert_cols_in_df(df, columns_provided=columns, columns_df=df.columns)\n\n assert isinstance(columns, list), \"Error: columns argument must be a list\"\n assert isinstance(out_cols, list), \"Error: out_cols argument must be a list\"\n\n # Check if columns argument a string datatype:\n assert_type_str(df, strategy, \"strategy\")\n\n assert (strategy == \"mean\" or strategy == \"median\"), \"Error: strategy has to be 'mean' or 'median'. 'mean' is default\"\n\n imputer = Imputer(inputCols=columns, outputCols=out_cols)\n model = imputer.setStrategy(strategy).fit(df)\n df = model.transform(df)\n\n return df", "id": "7684284", "language": "Python", "matching_score": 2.4814462661743164, "max_stars_count": 1, "path": "cleanflow/preprocessing/impute_missing.py" }, { "content": "def assert_type_str_or_list(df, variable, name_arg):\n \"\"\"This function asserts if variable is a string or a list dataType.\"\"\"\n assert isinstance(variable, (str, list)), \"Error: %s argument must be a string or a list.\" % name_arg\n\n\ndef assert_type_int_or_float(df, variable, name_arg):\n \"\"\"This function asserts if variable is a string or a list dataType.\"\"\"\n assert isinstance(variable, (int, float)), \"Error: %s argument must be a int or a float.\" % name_arg\n\n\ndef assert_type_str(df, variable, name_arg):\n \"\"\"This function asserts if variable is a string or a list dataType.\"\"\"\n assert isinstance(variable, str), \"Error: %s argument must be a string.\" % name_arg\n\n\ndef assert_cols_in_df(df, columns_provided, columns_df):\n \"\"\"This function asserts if columns_provided exists in dataFrame.\n Inputs:\n columns_provided: the list of columns to be process.\n columns_df: list of columns's dataFrames\n \"\"\"\n col_not_valids = (set([column for column in columns_provided]).difference(set([column for column in columns_df])))\n assert (col_not_valids == set()), 'Error: The following columns do not exits in dataFrame: %s' % col_not_valids\n", "id": "10400832", "language": "Python", "matching_score": 3.2133147716522217, "max_stars_count": 1, "path": "cleanflow/assertions.py" }, { "content": "from .assertions import assert_type_str,assert_cols_in_df, assert_type_str_or_list, assert_type_int_or_float\n\n__all__ = ['assert_type_str_or_list','assert_type_int_or_float','assert_type_str','assert_cols_in_df']", "id": "12793435", "language": "Python", "matching_score": 0.36072683334350586, "max_stars_count": 1, "path": "cleanflow/__init__.py" }, { "content": "\"\"\"Functions shared by all test modules.\"\"\"\n\nimport json\nimport os\nimport deepdiff\n\ndef assert_deep_equal(t1, path):\n \"\"\"Compare ``t1`` with the JSON structure parsed from ``path``. \n\n Comparison is performed through the DeepDiff Python package\n (https://github.com/seperman/deepdiff).\n \n The ``diffs`` variable will contain any differences which\n can be displayed in the debugger.\n \n (Pdb) pp(diffs)\n\n \"\"\"\n \n t1 = json.loads(str(t1))\n \n with open(os.path.join('tests', 'configs', path), 'rt') as fp:\n t2 = json.load(fp)\n \n diffs = deepdiff.DeepDiff(t1, t2)\n \n assert diffs == {}\n \n \n# For tests where the call has not been renamed yet.\nassert_equals_json = assert_deep_equal", "id": "7412680", "language": "Python", "matching_score": 0.9959684014320374, "max_stars_count": 0, "path": "tests/shared.py" }, { "content": "# terrascript/github/__init__.py\n\nimport terrascript\n\nclass github(terrascript.Provider):\n pass", "id": "7514552", "language": "Python", "matching_score": 0.07132350653409958, "max_stars_count": 0, "path": "terrascript/github/__init__.py" }, { "content": "# terrascript/logentries/r.py\n\nimport terrascript\n\n\nclass logentries_log(terrascript.Resource):\n pass\n\nclass logentries_logset(terrascript.Resource):\n pass\n", "id": "9635418", "language": "Python", "matching_score": 1.1225167512893677, "max_stars_count": 0, "path": "terrascript/logentries/r.py" }, { "content": "# terrascript/logentries/__init__.py\n\nimport terrascript\n\nclass logentries(terrascript.Provider):\n pass", "id": "9310634", "language": "Python", "matching_score": 0.10349464416503906, "max_stars_count": 0, "path": "terrascript/logentries/__init__.py" }, { "content": "# terrascript/arukas/d.py\n\nimport terrascript\n\n", "id": "2015578", "language": "Python", "matching_score": 1.1225167512893677, "max_stars_count": 0, "path": "terrascript/arukas/d.py" }, { "content": "# terrascript/arukas/__init__.py\n\nimport terrascript\n\nclass arukas(terrascript.Provider):\n pass", "id": "1433337", "language": "Python", "matching_score": 0.022050296887755394, "max_stars_count": 0, "path": "terrascript/arukas/__init__.py" }, { "content": "# terrascript/tls/r.py\n\nimport terrascript\n\n\nclass tls_private_key(terrascript.Resource):\n pass\n\nclass tls_locally_signed_cert(terrascript.Resource):\n pass\n\nclass tls_self_signed_cert(terrascript.Resource):\n pass\n\nclass tls_cert_request(terrascript.Resource):\n pass\n", "id": "8265015", "language": "Python", "matching_score": 1.090277910232544, "max_stars_count": 0, "path": "terrascript/tls/r.py" }, { "content": "# terrascript/rundeck/r.py\n\nimport terrascript\n\n\nclass rundeck_project(terrascript.Resource):\n pass\n\nclass rundeck_job(terrascript.Resource):\n pass\n\nclass rundeck_private_key(terrascript.Resource):\n pass\n\nclass rundeck_public_key(terrascript.Resource):\n pass\n\nclass rundeck_acl_policy(terrascript.Resource):\n pass\n", "id": "11085636", "language": "Python", "matching_score": 1.1056170463562012, "max_stars_count": 0, "path": "terrascript/rundeck/r.py" }, { "content": "# terrascript/rundeck/d.py\n\nimport terrascript\n\n", "id": "9051181", "language": "Python", "matching_score": 1.1318467855453491, "max_stars_count": 0, "path": "terrascript/rundeck/d.py" }, { "content": "# terrascript/rundeck/__init__.py\n\nimport terrascript\n\nclass rundeck(terrascript.Provider):\n pass", "id": "2388676", "language": "Python", "matching_score": 0.0324951708316803, "max_stars_count": 0, "path": "terrascript/rundeck/__init__.py" }, { "content": "# terrascript/chef/r.py\n\nimport terrascript\n\n\nclass chef_acl(terrascript.Resource):\n pass\n\nclass chef_client(terrascript.Resource):\n pass\n\nclass chef_cookbook(terrascript.Resource):\n pass\n\nclass chef_data_bag(terrascript.Resource):\n pass\n\nclass chef_data_bag_item(terrascript.Resource):\n pass\n\nclass chef_environment(terrascript.Resource):\n pass\n\nclass chef_node(terrascript.Resource):\n pass\n\nclass chef_role(terrascript.Resource):\n pass\n", "id": "8454866", "language": "Python", "matching_score": 1.111994743347168, "max_stars_count": 0, "path": "terrascript/chef/r.py" }, { "content": "# terrascript/chef/d.py\n\nimport terrascript\n\n", "id": "6270085", "language": "Python", "matching_score": 1.1286840438842773, "max_stars_count": 0, "path": "terrascript/chef/d.py" }, { "content": "# terrascript/chef/__init__.py\n\nimport terrascript\n\nclass chef(terrascript.Provider):\n pass", "id": "9235426", "language": "Python", "matching_score": 0.6188668608665466, "max_stars_count": 0, "path": "terrascript/chef/__init__.py" }, { "content": "# terrascript/alicloud/__init__.py\n\nimport terrascript\n\nclass alicloud(terrascript.Provider):\n pass", "id": "10964759", "language": "Python", "matching_score": 1.5512363910675049, "max_stars_count": 0, "path": "terrascript/alicloud/__init__.py" }, { "content": "# terrascript/aws/__init__.py\n\nimport terrascript\n\nclass aws(terrascript.Provider):\n pass", "id": "7726021", "language": "Python", "matching_score": 1.5512363910675049, "max_stars_count": 0, "path": "terrascript/aws/__init__.py" }, { "content": "# terrascript/dnsimple/__init__.py\n\nimport terrascript\n\nclass dnsimple(terrascript.Provider):\n pass", "id": "11096886", "language": "Python", "matching_score": 1.5512363910675049, "max_stars_count": 0, "path": "terrascript/dnsimple/__init__.py" }, { "content": "# terrascript/dns/__init__.py\n\nimport terrascript\n\nclass dns(terrascript.Provider):\n pass", "id": "10023335", "language": "Python", "matching_score": 0.6188668608665466, "max_stars_count": 0, "path": "terrascript/dns/__init__.py" }, { "content": "# terrascript/postgresql/__init__.py\n\nimport terrascript\n\nclass postgresql(terrascript.Provider):\n pass", "id": "923483", "language": "Python", "matching_score": 1.111994743347168, "max_stars_count": 0, "path": "terrascript/postgresql/__init__.py" }, { "content": "# terrascript/postgresql/d.py\n\nimport terrascript\n\n", "id": "11788446", "language": "Python", "matching_score": 0.9308326244354248, "max_stars_count": 0, "path": "terrascript/postgresql/d.py" }, { "content": "# terrascript/postgresql/r.py\n\nimport terrascript\n\n\nclass postgresql_database(terrascript.Resource):\n pass\n\nclass postgresql_default_privileges(terrascript.Resource):\n pass\n\nclass postgresql_extension(terrascript.Resource):\n pass\n\nclass postgresql_grant(terrascript.Resource):\n pass\n\nclass postgresql_schema(terrascript.Resource):\n pass\n\nclass postgresql_role(terrascript.Resource):\n pass\n", "id": "10048002", "language": "Python", "matching_score": 1.095781922340393, "max_stars_count": 0, "path": "terrascript/postgresql/r.py" }, { "content": "# terrascript/mysql/r.py\n\nimport terrascript\n\n\nclass mysql_database(terrascript.Resource):\n pass\n\nclass mysql_grant(terrascript.Resource):\n pass\n\nclass mysql_role(terrascript.Resource):\n pass\n\nclass mysql_user(terrascript.Resource):\n pass\n\nclass mysql_user_password(terrascript.Resource):\n pass\n", "id": "4869012", "language": "Python", "matching_score": 1.0840131044387817, "max_stars_count": 0, "path": "terrascript/mysql/r.py" }, { "content": "# terrascript/random/r.py\n\nimport terrascript\n\n\nclass random_id(terrascript.Resource):\n pass\n\nclass random_shuffle(terrascript.Resource):\n pass\n\nclass random_pet(terrascript.Resource):\n pass\n\nclass random_string(terrascript.Resource):\n pass\n\nclass random_password(terrascript.Resource):\n pass\n\nclass random_integer(terrascript.Resource):\n pass\n\nclass random_uuid(terrascript.Resource):\n pass\n", "id": "2195202", "language": "Python", "matching_score": 0.7354674339294434, "max_stars_count": 0, "path": "terrascript/random/r.py" }, { "content": "# terrascript/cobbler/r.py\n\nimport terrascript\n\n\nclass cobbler_distro(terrascript.Resource):\n pass\n\nclass cobbler_kickstart_file(terrascript.Resource):\n pass\n\nclass cobbler_profile(terrascript.Resource):\n pass\n\nclass cobbler_repo(terrascript.Resource):\n pass\n\nclass cobbler_snippet(terrascript.Resource):\n pass\n\nclass cobbler_system(terrascript.Resource):\n pass\n", "id": "11828293", "language": "Python", "matching_score": 1.0870046615600586, "max_stars_count": 0, "path": "terrascript/cobbler/r.py" }, { "content": "# terrascript/local/r.py\n\nimport terrascript\n\n\nclass local_file(terrascript.Resource):\n pass\n", "id": "5372437", "language": "Python", "matching_score": 1.014994502067566, "max_stars_count": 0, "path": "terrascript/local/r.py" }, { "content": "# terrascript/local/__init__.py\n\nimport terrascript\n\nclass local(terrascript.Provider):\n pass", "id": "1068117", "language": "Python", "matching_score": 0.7743210792541504, "max_stars_count": 0, "path": "terrascript/local/__init__.py" }, { "content": "from nose.tools import *\nfrom shared import *\nimport json\n\nimport terrascript\nimport terrascript.aws.r\n\nclass Test_Locals(object):\n # https://www.terraform.io/docs/configuration/locals.html\n\n def setup(self):\n self.locals = terrascript.Locals(service_name='forum', owner='Community Team')\n\n\n def test_locals_classes(self):\n assert isinstance(self.locals, terrascript.Block)\n\n\n def test_output_example1(self):\n assert_equals_json(self.locals, 'Locals_locals_example1.json')", "id": "3678672", "language": "Python", "matching_score": 2.5371267795562744, "max_stars_count": 0, "path": "tests/REVIEW/test_Locals.py" }, { "content": "from nose.tools import *\nfrom shared import assert_equals_json\nimport json\n\nimport terrascript\nimport terrascript.aws.r\n\nclass Test_Output(object):\n\n def test_output_classes(self):\n\n output = terrascript.Output(\"name\")\n assert isinstance(output, terrascript.Block)\n\n\n def test_output_example1(self):\n # https://www.terraform.io/docs/configuration/outputs.html\n\n resource = terrascript.Resource('name')\n output = terrascript.Output(\"ipaddress\", value=resource.ipaddress)\n\n assert_equals_json(output, 'Output_output_example1.json')", "id": "6786651", "language": "Python", "matching_score": 2.055554151535034, "max_stars_count": 0, "path": "tests/REVIEW/test_Output.py" }, { "content": "from nose.tools import *\nfrom shared import *\nimport json\n\nimport terrascript\n\nclass Test_Resource(object):\n\n def test_resource_classes(self):\n resource = terrascript.Resource('name')\n assert isinstance(resource, terrascript.Block)\n assert isinstance(resource['name'], terrascript.Block)\n\n\n def test_resource_attributes(self):\n resource = terrascript.Resource('name')\n assert resource.name == 'name'\n\n\n def test_resource(self):\n resource = terrascript.Resource('name')\n\n assert_equals_json(resource, 'Resource_resource.json')\n\n\n def test_resource_with_arguments(self):\n resource = terrascript.Resource('name',\n integer=1,\n string='string',\n float=3.14)\n\n assert_equals_json(resource, 'Resource_resource_with_arguments.json')\n\n\n def test_resource_with_dictionary(self):\n resource = terrascript.Resource('name',\n integer=1,\n string='string',\n float=3.14,\n dictionary=dict(integer=2,\n string='string2',\n float=6.28)\n )\n\n assert_equals_json(resource, 'Resource_resource_with_dictionary.json')\n\n\n def test_resource_with_list(self):\n resource = terrascript.Resource('name',\n integer=1,\n string='string',\n float=3.14,\n alist=[2, 'string2', 6.28]\n )\n\n assert_equals_json(resource, 'Resource_resource_with_list.json')\n\n\n def test_resource_with_nested_arguments(self):\n resource = terrascript.Resource('name',\n integer=1,\n string='string',\n float=3.14,\n dictionary=dict(integer=2,\n string='string2',\n float=6.28),\n alist=[2, 'string2', 6.28],\n nested=dict(dict2=dict(a=1, b=2),\n list2=['a', 'b', 'c']))\n\n assert_equals_json(resource, 'Resource_resource_with_nested_arguments.json')\n", "id": "10143605", "language": "Python", "matching_score": 2.9775187969207764, "max_stars_count": 0, "path": "tests/REVIEW/test_Resource.py" }, { "content": "from nose.tools import *\nfrom shared import *\nimport json\n\nimport terrascript\n\nclass Test_Provider(object):\n\n def test_povider_classes(self):\n provider = terrascript.Provider('name')\n assert isinstance(provider, terrascript.Block)\n\n\n def test_provider_with_nested_arguments(self):\n provider = terrascript.Provider('name',\n integer=1,\n string='string',\n float=3.14,\n dictionary=dict(integer=2,\n string='string2',\n float=6.28),\n alist=[2, 'string2', 6.28],\n nested=dict(dict2=dict(a=1, b=2),\n list2=['a', 'b', 'c']))\n\n assert_equals_json(provider, 'Provider_provider_with_nested_arguments.json')\n", "id": "543085", "language": "Python", "matching_score": 0.0879257544875145, "max_stars_count": 0, "path": "tests/REVIEW/test_Provider.py" }, { "content": "from cleanflow.assertions import *\nfrom cleanflow.utils import totChanges\nfrom cleanflow.exploratory import find_unique\nfrom pyspark.sql.functions import col, udf\nfrom pyspark.sql.types import StringType, IntegerType, FloatType, DoubleType, ArrayType\nfrom pyspark.sql import SparkSession\nfrom pyspark.sql import SQLContext\nfrom pyspark import SparkContext\n\nsc = SparkContext.getOrCreate()\nsqlContext = SQLContext(sc)\n\ndef set_col(df, columns, func, data_type, summary):\n dict_types = {'string': StringType(), 'str': StringType(), 'integer': IntegerType(),'int': IntegerType(), 'float': FloatType(), 'double': DoubleType(), 'Double': DoubleType()}\n types = {'string': 'string', 'str': 'string', 'String': 'string', 'integer': 'int', 'int': 'int', 'float': 'float', 'double': 'double', 'Double': 'double'}\n \n try:\n function = udf(func, dict_types[data_type])\n except KeyError:\n assert False, \"Error, data_type not recognized\"\n\n assert_type_str_or_list(df, columns, \"columns\")\n\n # Filters all string columns in dataFrame\n valid_cols = [c for (c, t) in filter(lambda t:t[1]==types[data_type], df.dtypes)]\n\n if columns == \"*\":\n columns = valid_cols[:]\n\n if isinstance(columns, str):\n columns = [columns]\n\n assert_cols_in_df(df, columns_provided=columns, columns_df=df.columns)\n col_not_valids = (set([column for column in columns]).difference(set([column for column in valid_cols])))\n assert (col_not_valids == set()), 'Error: The following columns do not have same datatype argument provided: %s' % col_not_valids\n \n oldUnique = [find_unique(df, column=c) for c in columns]\n exprs = [function(col(c)).alias(c) if c in columns else c for (c, t) in df.dtypes]\n newDF = df.select(*exprs)\n\n if summary:\n newUnique = [find_unique(newDF, column=c) for c in columns]\n count = int(totChanges(oldUnique, newUnique))\n summary = sqlContext.createDataFrame([(count,)],['Total Cells Modified',])\n return (newDF, summary)\n return newDF", "id": "4203398", "language": "Python", "matching_score": 5.265430927276611, "max_stars_count": 1, "path": "cleanflow/preprocessing/set_col.py" }, { "content": "import re\nimport string\nfrom cleanflow.assertions import assert_type_str_or_list, assert_cols_in_df\nfrom cleanflow.utils import totChanges\nfrom cleanflow.exploratory import find_unique\nfrom pyspark.sql.functions import trim, col\nfrom pyspark.sql.functions import col, udf\nfrom pyspark.sql.types import StringType\nfrom pyspark.sql import SparkSession\nfrom pyspark.sql import SQLContext\nfrom pyspark import SparkContext\n\nsc = SparkContext.getOrCreate()\nsqlContext = SQLContext(sc)\n\ndef rmSpChars(df, columns=\"*\", regex=None, summary=False):\n \"\"\"\n This function remove special characters in string columns, such as: .$%()!&\"#/\n You can also remove unwanted sub-string by specifying the regex in \"regex\" parameter!\n \n Parameters\n ----------\n df : Dataframe to be processed\n columns : (optional - default *)list of names columns to be processed.\n argument can be a string or a list of strings.\n regex : (optional - default None) string that contains the regular expression\n \n return df\n \"\"\"\n assert_type_str_or_list(df, columns, \"columns\")\n valid_cols = [c for (c, t) in filter(lambda t: t[1] == 'string', df.dtypes)]\n\n # If None or [] is provided with column parameter:\n if columns == \"*\":\n columns = valid_cols[:]\n\n if isinstance(columns, str):\n columns = [columns]\n\n assert_cols_in_df(df, columns_provided=columns, columns_df=df.columns)\n col_not_valids = (set([column for column in columns]).difference(set([column for column in valid_cols])))\n\n assert (col_not_valids == set()), 'Error: The following columns do not have same datatype argument provided: %s' % col_not_valids\n\n # Columns that are present in user_input and valid_columns\n col_pool = [c for c in columns if c in valid_cols]\n\n def rm_Sp_Chars(inputStr, regex):\n if regex is None:\n for punct in (set(inputStr) & set(string.punctuation)):\n inputStr = inputStr.replace(punct, \"\") \n else:\n for _ in set(inputStr):\n inputStr = re.sub(regex, '', inputStr)\n return inputStr\n\n function = udf(lambda cell: rm_Sp_Chars(cell, regex) if cell is not None else cell, StringType())\n \n oldUnique = [find_unique(df, column=c) for c in col_pool]\n \n exprs = [function(c).alias(c) if c in col_pool else c for c in df.columns]\n newDF = df.select(*exprs)\n\n if summary:\n newUnique = [find_unique(newDF, column=c) for c in col_pool]\n count = int(totChanges(oldUnique, newUnique))\n summary = sqlContext.createDataFrame([(count,)],['Total Cells Modified',])\n return (newDF, summary)\n return newDF", "id": "4339249", "language": "Python", "matching_score": 5.94935417175293, "max_stars_count": 1, "path": "cleanflow/preprocessing/rmSpChars.py" }, { "content": "from cleanflow.assertions import assert_type_str_or_list, assert_cols_in_df\nfrom cleanflow.utils import totChanges\nfrom cleanflow.exploratory import find_unique\nfrom pyspark.sql.functions import trim, col\nfrom pyspark.sql import SparkSession\nfrom pyspark.sql import SQLContext\nfrom pyspark import SparkContext\n\nsc = SparkContext.getOrCreate()\nsqlContext = SQLContext(sc)\n\ndef trim_col(df, columns=\"*\", summary=False):\n \"\"\"\n This methods removes left and right extra spaces from StringType column(s).\n\n Parameters\n ----------\n df : Dataframe\n columns : list of column names of dataFrame.\n \n return modifiedDF\n \"\"\"\n assert_type_str_or_list(df, columns, \"columns\")\n valid_cols = [c for (c, t) in filter(lambda t:t[1] == 'string', df.dtypes)]\n\n # If None or [] is provided with column parameter:\n if columns == '*':\n columns = valid_cols\n\n if isinstance(columns, str):\n columns = [columns]\n\n assert_cols_in_df(df, columns_provided=columns, columns_df=df.columns)\n \n # Columns that are present in user_input and valid_columns\n col_pool = [c for c in columns if c in valid_cols]\n \n oldUnique = [find_unique(df, column=c) for c in col_pool]\n exprs = [trim(col(c)).alias(c) if c in col_pool else c for (c, t) in df.dtypes]\n newDF = df.select(*exprs)\n\n if summary:\n newUnique = [find_unique(newDF, column=c) for c in col_pool]\n count = int(totChanges(oldUnique, newUnique))\n summary = sqlContext.createDataFrame([(count,)],['Total Cells Trimmed',])\n return (newDF, summary)\n return newDF", "id": "8060685", "language": "Python", "matching_score": 3.883664131164551, "max_stars_count": 1, "path": "cleanflow/preprocessing/trim_col.py" }, { "content": "from cleanflow.assertions import *\nfrom pyspark.sql.functions import col\n\nfrom pyspark.sql import SparkSession\nfrom pyspark.sql import SQLContext\nfrom pyspark import SparkContext\n\nsc = SparkContext.getOrCreate()\nsqlContext = SQLContext(sc)\n\ndef drop_null(df, column, summary=False):\n '''\n Drop rows that have null value in the given row\n\n Parameter\n ---------\n df : dataframe\n column : column to be processed\n\n Return\n ------\n df : dataframe\n summary: summary of action performed (type='pyspark.sql.dataframe.DataFrame')\n '''\n newDF = df.where(col(column).isNotNull())\n \n if summary:\n previousTotRows = df.count()\n newTotRows = newDF.count()\n summary = sqlContext.createDataFrame([(previousTotRows, newTotRows, previousTotRows-newTotRows)],['Previous Row Count', 'New Row Count','Rows affected'])\n return (newDF, summary)\n return newDF", "id": "3140741", "language": "Python", "matching_score": 2.9721109867095947, "max_stars_count": 1, "path": "cleanflow/preprocessing/drop_null.py" }, { "content": "from cleanflow.assertions import *\nfrom pyspark.sql import SparkSession\nfrom pyspark.sql import SQLContext\nfrom pyspark import SparkContext\n\nsc = SparkContext.getOrCreate()\nsqlContext = SQLContext(sc)\n\ndef check_duplicates(df, column):\n \"\"\"\n Function to check no.of values that occur more than once.\n\n Parameter\n ---------\n df : DataFrame to be processed.\n column : column in df for which we want to find duplicate values.\n \"\"\"\n assert_cols_in_df(df, columns_provided=[column], columns_df=df.columns)\n df.createOrReplaceTempView(\"df\")\n return sqlContext.sql(\"SELECT %s as DuplicateValue, COUNT(*) as Count FROM df GROUP BY %s HAVING COUNT(*)>1 order by Count desc\"%(column, column))\n\ndef find_unique(df, column):\n \"\"\"\n Function to find unique/distinct values in a column of a DataFrame.\n\n Parameter\n ---------\n df : DataFrame to be processed.\n column : column in df for which we want to find duplicate values.\n \"\"\"\n assert_cols_in_df(df, columns_provided=[column], columns_df=df.columns)\n df.createOrReplaceTempView(\"df\")\n return sqlContext.sql('select distinct(%s) as UniqueValues,COUNT(*) as Count from df group by %s order by Count desc'%(column, column))", "id": "9630233", "language": "Python", "matching_score": 1.6182373762130737, "max_stars_count": 1, "path": "cleanflow/exploratory/duplicates.py" }, { "content": "def remove_duplicates(df, columns=None):\n\t\"\"\"\n\tRemove duplicate values from specified columns.\n\n\tParameters\n\t----------\n\tcols : List of columns to make the comparison, this only will consider this subset of columns,\n\tfor dropping duplicates. The default behavior will only drop the identical rows.\n\t\n\treturn: Return a new DataFrame with duplicate rows removed\n\t\"\"\"\n\n\tassert isinstance(cols, list), \"Error, cols argument provided must be a list.\"\n\tnewDF = df.drop_duplicates(columns)\n\treturn newDF", "id": "9224110", "language": "Python", "matching_score": 1.5920641422271729, "max_stars_count": 1, "path": "cleanflow/preprocessing/remove_duplicates.py" }, { "content": "def replace_null(df, value, columns=\"*\"):\n \"\"\"\n Replace nulls with specified value.\n \n Parameters\n ----------\n columns : optional list of column names to consider. Columns specified in subset that do not have\n matching data type are ignored. For example, if value is a string, and subset contains a non-string column,\n then the non-string column is simply ignored.\n value : Value to replace null values with. If the value is a dict, then subset is ignored and value\n must be a mapping from column name (string) to replacement value. The replacement\n value must be an int, long, float, or string.\n \n return df : dataframe with replaced null values.\n \"\"\"\n if columns == \"*\":\n columns = None\n\n if isinstance(columns, str):\n columns = [columns]\n\n if columns is not None:\n assert isinstance(columns, list), \"Error: columns argument must be a list\"\n\n assert isinstance(value, (int, float, str, dict)), \"Error: value argument must be an int, long, float, string, or dict\"\n return df.fillna(value, subset=columns)", "id": "4580497", "language": "Python", "matching_score": 0.5231246948242188, "max_stars_count": 1, "path": "cleanflow/preprocessing/replace_null.py" }, { "content": "\"\"\"\nterrascript/__init__.py\n\nBase classes and functions that are used everywhere else in\nthis project.\n\n\"\"\"\n\nimport logging\nimport warnings\nimport json\n\n__author__ = '<NAME> <<EMAIL>>'\n__version__ = '0.8.0'\n__license__ = 'BSD 2-clause \"Simplified\" License'\n\nINDENT = 2\n\"\"\"JSON indentation level.\"\"\"\n\nDEBUG = False\n\"\"\"Set to enable some debugging.\"\"\"\n\nLOG = logging.getLogger(__name__)\n\nPROVIDER_KEY = 'provider'\nRESOURCE_KEY = 'resource'\nMODULE_KEY = 'module'\nVARIABLE_KEY = 'variable'\nOUTPUT_KEY = 'output'\nLOCALS_KEY = 'locals'\nDATA_KEY = 'data'\n\n\nclass String(str):\n \"\"\"A `String` handles access to not yet known attributes.\n\n This called by `Block.__getattr__` to deal with\n\n In the example below the ``aws_instance`` does not have attributes\n ``.server`` and in turn ``.server.private_ip``. To prevent Python\n from raising an `AttributeError` the `String.__getattr__()` method\n creates a new string by appending the attribute name.\n\n Python:\n\n config = terrascript.Terrascript()\n config += terrascript.aws.aws(version='~> 2.0', region='us-east-1')\n aws_instance = terrascript.aws.r.aws_instance('web', ...)\n config += aws_instance\n config += terrascript.Output('instance_ip_addr',\n value=aws_instance.server.private_ip)\n ^^^^^^^^^^^^^^^^^^\n\n JSON:\n\n \"\"\"\n\n def __getattr__(self, name):\n return String('{}.{}'.format(self, name))\n\n\nclass Block(dict):\n \"\"\"A `Block` is a dictionary-like container for other content.\n\n \"\"\"\n\n def __init__(self, **kwargs):\n\n # Convert instances of Resource, Variable, Data, ... into\n # their correct reference instead of inserting the actual\n # dictionary.\n #\n # Resource ->\n # Variable -> \"var.name\"\n #\n for k, v in kwargs.items():\n if isinstance(v, Variable):\n kwargs[k] = 'var.{}'.format(v.name)\n\n super().update(kwargs)\n\n def __getattr__(self, attr):\n \"\"\"Special handling for accessing attributes,\n\n If ``Block.attr`` does not exist, try to return Block[attr]. If that\n does not exists either, return `attr` as a string, prefixed\n by the name (and type) of the Block that is referenced.\n\n This is for example necessary for referencing an attribute of a\n Terraform resource which only becomes available after the resource\n has been created.\n\n Example:\n\n instance = terrascript.resources.aws_instance(\"server\", ...)\n output = terrascript.Output(\"instance_ip_addr\",\n value=instance.private_ip)\n ^^^^^^^^^^\n Where ``instance.private_ip`` does not (yet) exist.\n\n \"\"\"\n\n # The ``Block.name`` attribute gets special treatment. Some blocks have\n # names, which are the sole top-level key of the dictionary.\n #\n # Resources: {'resource_name': { ... } ==> 'resource_name'\n # Provider: {'project': 'myproject', region: 'us_central1'} ==> AttributeError.\n #\n if attr == 'name':\n keys = list(self.keys())\n if len(keys) > 1:\n raise AttributeError('{} has no name'.format(self.__class__.__name__))\n else:\n return keys[0]\n\n # Try to return the entry in the dictionary. Otherwise return a string\n # which must be formatted differently depending on what is referenced.\n #\n try:\n raise KeyError\n return self[attr]\n except KeyError:\n if isinstance(self, Resource):\n return String('{}.{}.{}'.format(self.__class__.__name__, self.name, attr))\n elif isinstance(self, Locals):\n return String('local.{}'.format(attr))\n elif isinstance(self, Provider):\n return '+++provider+++'\n\n\nclass Terrascript(dict):\n \"\"\"Top-level container for Terraform configurations.\n\n :param *args: Optional list of Terrascript data sources, resources,\n\n \"\"\"\n\n def __init__(self, *objects):\n super().__init__()\n\n for object in objects:\n self += object\n\n\n def __str__(self):\n return json.dumps(self, indent=INDENT)\n\n def __add__(self, object):\n \"\"\"Add to the configuration using the ``+`` syntax.\"\"\"\n\n #\n # Resource\n #\n if isinstance(object, Resource):\n if RESOURCE_KEY not in self:\n self[RESOURCE_KEY] = {}\n if object.__class__.__name__ not in self[RESOURCE_KEY]:\n self[RESOURCE_KEY][object.__class__.__name__] = {}\n self[RESOURCE_KEY][object.__class__.__name__].update(object)\n #\n # Data\n #\n elif isinstance(object, Data):\n if DATA_KEY not in self:\n self[DATA_KEY] = {}\n if object.__class__.__name__ not in self[DATA_KEY]:\n self[DATA_KEY][object.__class__.__name__] = {}\n self[DATA_KEY][object.__class__.__name__].update(object)\n #\n # Module\n #\n elif isinstance(object, Module):\n if MODULE_KEY not in self:\n self[MODULE_KEY] = {}\n self[MODULE_KEY].update(object)\n #\n # Provider\n #\n elif isinstance(object, Provider):\n if PROVIDER_KEY not in self:\n self[PROVIDER_KEY] = {}\n if object.__class__.__name__ not in self[PROVIDER_KEY]:\n self[PROVIDER_KEY][object.__class__.__name__] = []\n self[PROVIDER_KEY][object.__class__.__name__].append(object)\n #\n # Variable\n #\n elif isinstance(object, Variable):\n if VARIABLE_KEY not in self:\n self[VARIABLE_KEY] = Block()\n self[VARIABLE_KEY].update(object)\n #\n # Output\n #\n elif isinstance(object, Output):\n if OUTPUT_KEY not in self:\n self[OUTPUT_KEY] = Block()\n self[OUTPUT_KEY].update(object)\n #\n # Locals\n #\n elif isinstance(object, Locals):\n if LOCALS_KEY not in self:\n self[LOCALS_KEY] = Block()\n self[LOCALS_KEY].update(object)\n #\n # else\n #\n else:\n raise TypeError('A {} cannot be added to the configuration'.format(\n object.__class__.__name__))\n\n return self\n\n def add(self, object):\n \"\"\"Add to the configuration using the ``+`` syntax.\"\"\"\n\n self += object\n\n\nclass Resource(Block):\n \"\"\"Terraform resource block.\"\"\"\n\n def __init__(self, name, **kwargs):\n super().__init__()\n self[name] = Block(**kwargs)\n\n\nclass Data(Block):\n \"\"\"Terraform data source block.\n\n \"\"\"\n\n def __init__(self, name, **kwargs):\n super().__init__()\n self[name] = Block(**kwargs)\n\n\nclass Provider(Block):\n \"\"\"Terraform provider\n\n HCL:\n\n provider \"aws\" {\n region = \"us-east-1\"\n version = \"u~> 2.0\"\n }\n\n JSON:\n\n \"provider\": {\n \"aws\": [\n {\n \"region\": \"us-east-1\",\n \"version\": \"~> 2.0\"\n }\n ]\n }\n\n \"\"\"\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n\nclass Variable(Block):\n def __init__(self, name, **kwargs):\n super().__init__()\n self[name] = Block(**kwargs)\n\n\nclass Module(Block):\n \"\"\"Terraform child module call.\n\n https://www.terraform.io/docs/configuration/modules.html\n\n \"\"\"\n\n def __init__(self, name, **kwargs):\n super().__init__()\n self[name] = Block(**kwargs)\n\n\nclass Output(Block):\n def __init__(self, name, **kwargs):\n super().__init__()\n self[name] = Block(**kwargs)\n\n\nclass Provisioner(Block):\n \"\"\"Provisioner block.\n\n resource \"aws_instance\" \"web\" {\n # ...\n\n provisioner \"local-exec\" {\n command = \"echo ${self.private_ip} > file.txt\"\n }\n }\n\n :param name: The name of the provisioner, e.g. ``file``, ``local-exec``, ``chef``.\n :param **kwargs: The arguments are provisioner dependent.\n\n \"\"\"\n\n def __init__(self, name, **kwargs):\n super().__init__()\n self[name] = Block(**kwargs)\n\n\nclass Connection(Block):\n pass\n\n\nclass Backend(Block):\n pass\n\n\nclass Terraform(Block):\n pass\n\n\nclass Locals(Block):\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n\nclass Function(Block):\n pass\n\n\n# Lower case classes for backwards will be deprecated in the future(???)\n\nclass module(Module):\n def init__(self, *args, **kwargs):\n warnings.warn(\"'{}' will be deprecated in the future, please use '{}' instead\".format(\n self.__class__.__name__, self.__class__.__name__.title()), PendingDeprecationWarning)\n super().__init__(*args, **kwargs)\n\nclass data(Data):\n def init__(self, *args, **kwargs):\n warnings.warn(\"'{}' will be deprecated in the future, please use '{}' instead\".format(\n self.__class__.__name__, self.__class__.__name__.title()), PendingDeprecationWarning)\n super().__init__(*args, **kwargs)\n\nclass resource(Resource):\n def init__(self, *args, **kwargs):\n warnings.warn(\"'{}' will be deprecated in the future, please use '{}' instead\".format(\n self.__class__.__name__, self.__class__.__name__.title()), PendingDeprecationWarning)\n super().__init__(*args, **kwargs)\n\nclass variable(Variable):\n def init__(self, *args, **kwargs):\n warnings.warn(\"'{}' will be deprecated in the future, please use '{}' instead\".format(\n self.__class__.__name__, self.__class__.__name__.title()), PendingDeprecationWarning)\n super().__init__(*args, **kwargs)\n\nclass provider(Provider):\n def init__(self, *args, **kwargs):\n warnings.warn(\"'{}' will be deprecated in the future, please use '{}' instead\".format(\n self.__class__.__name__, self.__class__.__name__.title()), PendingDeprecationWarning)\n super().__init__(*args, **kwargs)\n\nclass output(Output):\n def init__(self, *args, **kwargs):\n warnings.warn(\"'{}' will be deprecated in the future, please use '{}' instead\".format(\n self.__class__.__name__, self.__class__.__name__.title()), PendingDeprecationWarning)\n super().__init__(*args, **kwargs)\n\nclass provisioner(Provisioner):\n def init__(self, *args, **kwargs):\n warnings.warn(\"'{}' will be deprecated in the future, please use '{}' instead\".format(\n self.__class__.__name__, self.__class__.__name__.title()), PendingDeprecationWarning)\n super().__init__(*args, **kwargs)\n\nclass connection(Connection):\n def init__(self, *args, **kwargs):\n warnings.warn(\"'{}' will be deprecated in the future, please use '{}' instead\".format(\n self.__class__.__name__, self.__class__.__name__.title()), PendingDeprecationWarning)\n super().__init__(*args, **kwargs)\n\nclass backend(Backend):\n def init__(self, *args, **kwargs):\n warnings.warn(\"'{}' will be deprecated in the future, please use '{}' instead\".format(\n self.__class__.__name__, self.__class__.__name__.title()), PendingDeprecationWarning)\n super().__init__(*args, **kwargs)\n\nclass terraform(Terraform):\n def init__(self, *args, **kwargs):\n warnings.warn(\"'{}' will be deprecated in the future, please use '{}' instead\".format(\n self.__class__.__name__, self.__class__.__name__.title()), PendingDeprecationWarning)\n super().__init__(*args, **kwargs)\n\nclass function(Function):\n def init__(self, *args, **kwargs):\n warnings.warn(\"'{}' will be deprecated in the future, please use '{}' instead\".format(\n self.__class__.__name__, self.__class__.__name__.title()), PendingDeprecationWarning)\n super().__init__(*args, **kwargs)\n\n# THREE_TIER_ITEMS = ['data', 'resource', 'provider']\n# TWO_TIER_ITEMS = ['variable', 'module', 'output', 'provisioner']\n# ONE_TIER_ITEMS = ['terraform', 'locals']\n#\n#\n# class _Config(dict):\n# def __getitem__(self, key):\n# try:\n# return super(_Config, self).__getitem__(key)\n# except KeyError:\n# # Work-around for issue 3 as described in https://github.com/hashicorp/terraform/issues/13037:\n# # Make 'data' a list of a single dictionary.\n# if key == 'data':\n# super(_Config, self).__setitem__(key, [defaultdict(dict)])\n# elif key in THREE_TIER_ITEMS:\n# super(_Config, self).__setitem__(key, defaultdict(dict))\n# elif key in TWO_TIER_ITEMS:\n# super(_Config, self).__setitem__(key, {})\n# elif key in ONE_TIER_ITEMS:\n# super(_Config, self).__setitem__(key, {})\n# else:\n# raise KeyError(key)\n#\n# return super(_Config, self).__getitem__(key)\n#\n#\n# class Terrascript(object):\n# \"\"\"Top-level container for Terraform configurations.\"\"\"\n#\n# def __init__(self):\n#\n# self.config = _Config()\n# self._item_list = []\n#\n# def __add__(self, item):\n# # Does not add EMPTY values\n# clone = item._kwargs.copy()\n# for k in clone:\n# if item._kwargs[k] is None:\n# del item._kwargs[k]\n#\n# # Work-around for issue 3 as described in https://github.com/hashicorp/terraform/issues/13037:\n# # Make 'data' a list of a single dictionary.\n# if item._class == 'data':\n# self.config[item._class][0][item._type][item._name] = item._kwargs\n# elif item._class in THREE_TIER_ITEMS:\n# self.config[item._class][item._type][item._name] = item._kwargs\n# elif item._class in TWO_TIER_ITEMS:\n# self.config[item._class][item._name] = item._kwargs\n# elif item._class in ONE_TIER_ITEMS:\n# self.config[item._class] = item._kwargs\n# else:\n# raise KeyError(item)\n#\n# if not isinstance(item, Terrascript):\n# if item in self._item_list:\n# self._item_list.remove(item)\n# self._item_list.append(item)\n#\n# return self\n#\n# def add(self, item):\n# self.__add__(item)\n# return item\n#\n# def update(self, terrascript2):\n# if isinstance(terrascript2, Terrascript):\n# for item in terrascript2._item_list:\n# self.__add__(item)\n# else:\n# raise TypeError('{0} is not a Terrascript instance.'.format(\n# type(terrascript2)))\n#\n# def dump(self):\n# \"\"\"Return the JSON representaion of config.\"\"\"\n# import json\n#\n# def _json_default(v):\n# # How to encode non-standard objects\n# if isinstance(v, provisioner):\n# return {v._type: v.data}\n# elif isinstance(v, UserDict):\n# return v.data\n# else:\n# return str(v)\n#\n# # Work on copy of _Config but with unused top-level elements removed.\n# #\n# config = {k: v for k,v in self.config.items() if v}\n# return json.dumps(config, indent=INDENT, sort_keys=SORT, default=_json_default)\n#\n#\n# def validate(self, delete=True):\n# \"\"\"Validate a Terraform configuration.\"\"\"\n# import tempfile\n# import subprocess\n#\n# config = self.dump()\n# with tempfile.TemporaryDirectory() as tmpdir:\n# tmpfile = tempfile.NamedTemporaryFile(mode='w', dir=tmpdir, suffix='.tf.json', delete=delete)\n#\n# tmpfile.write(self.dump())\n# tmpfile.flush()\n#\n# # Download plugins\n# proc = subprocess.Popen(['terraform','init'], cwd=tmpdir,\n# stdout=subprocess.PIPE, stderr=None)\n# proc.communicate()\n# assert proc.returncode == 0\n#\n# # Validate configuration\n# proc = subprocess.Popen(['terraform','validate','-check-variables=false'], cwd=tmpdir,\n# stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n# proc.communicate()\n#\n# tmpfile.close()\n#\n# return proc.returncode == 0\n#\n#\n# class _base(object):\n# _class = None\n# \"\"\"One of 'resource', 'data', 'module', etc.\"\"\"\n#\n# _type = None\n# \"\"\"The resource type, e.g. 'aws_instance'.\"\"\"\n#\n# _name = None\n# \"\"\"The name of this resource, e.g. 'my_ec2_instance'.\"\"\"\n#\n# def __init__(self, name_, **kwargs):\n# if not self._type:\n# self._type = self.__class__.__name__\n# self._name = name_\n# self._kwargs = kwargs\n#\n#\n# def __getattr__(self, name):\n# \"\"\"References to attributes.\"\"\"\n# if self._class == 'resource':\n# return '${{{}.{}.{}}}'.format(self._type, self._name, name)\n# elif self._class == 'module':\n# return '${{module.{}.{}}}'.format(self._name, name)\n# else:\n# return '${{{}.{}.{}.{}}}'.format(self._class, self._type, self._name, name)\n#\n# def __getitem__(self, i):\n# if isinstance(i, int):\n# # \"${var.NAME[i]}\"\n# return '${{var.{}[{}]}}'.format(self._name, i)\n# else:\n# # \"${var.NAME[\"i\"]}\"\n# return \"${{var.{}[\\\"{}\\\"]}}\".format(self._name, i)\n#\n# def __repr__(self):\n# \"\"\"References to objects.\"\"\"\n# if self._class == 'variable':\n# \"\"\"Interpolated reference to a variable, e.g. ``${var.http_port}``.\"\"\"\n# return self.interpolated\n# else:\n# \"\"\"Non-interpolated reference to a non-resource, e.g. ``module.http``.\"\"\"\n# return self.fullname\n#\n# @property\n# def interpolated(self):\n# \"\"\"The object in interpolated syntax: ``${...}``.\"\"\"\n# return '${{{}}}'.format(self.fullname)\n#\n# @property\n# def fullname(self):\n# \"\"\"The object's full name.\"\"\"\n# if self._class == 'variable':\n# return 'var.{}'.format(self._name)\n# elif self._class == 'resource':\n# return '{}.{}'.format(self._type, self._name)\n# else:\n# return '{}.{}'.format(self._class, self._name)\n#\n#\n# class _resource(_base):\n# \"\"\"Base class for resources.\"\"\"\n# _class = 'resource'\n#\n#\n# class _data(_base):\n# \"\"\"Base class for data sources.\"\"\"\n# _class = 'data'\n#\n# def __init__(self, obj_name, **kwargs):\n# super(_data, self).__init__(obj_name, **kwargs)\n#\n#\n# class resource(_base):\n# \"\"\"Class for creating a resource for which no convenience wrapper exists.\"\"\"\n# _class = 'resource'\n#\n# def __init__(self, type_, name, **kwargs):\n# self._type = type_\n# super(resource, self).__init__(name, **kwargs)\n#\n#\n# class data(_base):\n# \"\"\"Class for creating a data source for which no convenience wrapper exists.\"\"\"\n# _class = 'data'\n#\n# def __init__(self, type_, name, **kwargs):\n# self._type = type_\n# super(data, self).__init__(name, **kwargs)\n#\n#\n# class module(_base):\n# _class = 'module'\n#\n#\n# class variable(_base):\n# _class = 'variable'\n#\n#\n# class output(_base):\n# _class = 'output'\n#\n#\n# class provider(_base):\n# _class = 'provider'\n#\n# def __init__(self, name, **kwargs):\n# alias = kwargs.get('alias', '__DEFAULT__')\n# self._type = name\n# super(provider, self).__init__(alias, **kwargs)\n#\n#\n# class Locals(_base):\n# _class = 'locals'\n#\n# def __init__(self, **kwargs):\n# super(Locals, self).__init__(None, **kwargs)\n#\n#\n# class terraform(_base):\n# _class = 'terraform'\n# def __init__(self, **kwargs):\n# # Terraform does not have a name\n# super(terraform, self).__init__(None, **kwargs)\n#\n#\n# class provisioner(UserDict):\n# def __init__(self, type_, **kwargs):\n# self._type = type_\n# self.data = kwargs\n#\n#\n# class connection(UserDict):\n# def __init__(self, **kwargs):\n# self.data = kwargs\n#\n#\n# class backend(UserDict):\n# def __init__(self, name, **kwargs):\n# self.data = {name: kwargs}\n#\n#\n# class _function(object):\n# \"\"\"Terraform function.\n#\n# >>> function.lookup(map, key)\n# \"${lookup(map, key)}\"\n#\n# \"\"\"\n#\n# class _function(object):\n# def __init__(self, name):\n# self.name = name\n#\n# def format(self, arg):\n# \"\"\"Format a function argument.\"\"\"\n# if isinstance(arg, _base):\n# return arg.fullname\n# elif isinstance(arg, str):\n# return '\"{}\"'.format(arg)\n# else:\n# return arg\n#\n# def __call__(self, *args):\n# return '${{{}({})}}'.format(self.name, ','.join([self.format(arg) for arg in args]))\n#\n# def __getattr__(self, name):\n# return self._function(name)\n#\n# f = fn = func = function = _function()\n# \"\"\"Shortcuts for `function()`.\"\"\"\n#\n#\n__all__ = ['Terrascript', 'Block', 'Resource', 'Provider', 'Datasource',\n 'Variable', 'Module', 'Output', 'Provisioner', 'Backend',\n 'Terraform', 'Locals', 'Function']\n", "id": "11080087", "language": "Python", "matching_score": 3.586594343185425, "max_stars_count": 0, "path": "terrascript/__init__.py" }, { "content": "#!/usr/bin/env python3\n\n\"\"\"Auto-generate terrascript/PROVIDER/{r,d}.py files.\n\n If called without any argument(s), the script reads a list of Terraform\n providers from the file PROVIDERS, pulls the provider's Github\n repository and parses the provider.go file.\n\n Alternatively this script can be called with the name of one or more\n providers as argument(s) instead of reading the file PROVIDERS. This\n is useful if a provider is either not (yet) listed in PROVIDERS or\n only a subset of providers is to be updated.\n\n This script relies on the content of a provider's 'provider.go'\n matching the regular expression `REGEX` below. Providers for which this\n is not the case can be added manually.\n\n Changelog:\n\n 2019-08-17 - Access to Github is now through the `git` command line tool\n instead of the `github3` Python module.\n Use Jinja2 for templating.\n Make the script process multiple providers concurrently.\n\n See https://github.com/mjuenema/python-terrascript/commits/develop/makecode.py\n for a list of earlier changes.\n\n\"\"\"\n\nDEBUG = True\nCONCURRENCY = 10\n\nimport os\nimport os.path\nimport sys\nimport re\nimport tempfile\nimport subprocess\nimport shlex\nimport concurrent.futures\nimport jinja2\nimport logging\n\n\nif DEBUG:\n logging.basicConfig(level=logging.DEBUG)\nelse:\n logging.basicConfig(level=logging.INFO)\n\n\n#REGEX = re.compile(b'\".*?_(?P<name>.+)\":\\s+(?P<type>resource|data)')\nREGEX = re.compile(b'\"(?P<name>.+)\":\\s+(?P<type>resource|data)')\n\"\"\"REGEX to extract the names of resources and data sources from a provider.go file.\n\n DataSourcesMap: map[string]*schema.Resource{\n \"alicloud_account\": dataSourceAlicloudAccount(),\n \"alicloud_images\": dataSourceAlicloudImages(),\n \"alicloud_regions\": dataSourceAlicloudRegions(),\n ...\n ^^^^^^^^^^^^^^^^ ^^^^\n\n ResourcesMap: map[string]*schema.Resource{\n \"alicloud_instance\": resourceAliyunInstance(),\n \"alicloud_ram_role_attachment\": resourceAlicloudRamRoleAttachment(),\n \"alicloud_disk\": resourceAliyunDisk(),\n ...\n ^^^^^^^^^^^^^^^^ ^^^^^^^^\n\n This only works reliably if the provider.go file follows exactly this\n naming convention but its still much better than having to add all\n resources and data source manually.\n\n\"\"\"\n\nINIT_TEMPLATE = jinja2.Template(\"\"\"# terrascript/{{ provider }}/__init__.py\n\nimport terrascript\n\nclass {{ provider }}(terrascript.Provider):\n pass\n\"\"\")\n\nDATASOURCES_TEMPLATE = jinja2.Template(\"\"\"# terrascript/{{ provider }}/d.py\n\nimport terrascript\n\n{% for datasource in datasources %}\nclass {{ datasource }}(terrascript.Data):\n pass\n{% endfor %}\n\"\"\")\n\nRESOURCES_TEMPLATE = jinja2.Template(\"\"\"# terrascript/{{ provider }}/r.py\n\nimport terrascript\n\n{% for resource in resources %}\nclass {{ resource }}(terrascript.Resource):\n pass\n{% endfor %}\n\"\"\")\n\n\n\ndef create_provider_directory(provider, modulesdir):\n\n providerdir = os.path.join(modulesdir, provider)\n\n if not os.path.isdir(providerdir):\n os.mkdir(providerdir)\n\n return providerdir\n\n\ndef create_provider_init(provider, providerdir):\n\n with open(os.path.join(providerdir, '__init__.py'), 'wt') as fp:\n fp.write(INIT_TEMPLATE.render(provider=provider))\n\n\ndef create_provider_datasources(provider, providerdir, datasources):\n\n with open(os.path.join(providerdir, 'd.py'), 'wt') as fp:\n fp.write(DATASOURCES_TEMPLATE.render(provider=provider, datasources=datasources))\n\n\ndef create_provider_resources(provider, providerdir, resources):\n logging.debug('create_provider_resources provider={}'.format(provider))\n logging.debug('create_provider_resources providerdir={}'.format(providerdir))\n for resource in resources:\n logging.debug('create_provider_resources resource={}'.format(resource))\n\n with open(os.path.join(providerdir, 'r.py'), 'wt') as fp:\n fp.write(RESOURCES_TEMPLATE.render(provider=provider, resources=resources))\n\n\ndef process(provider, modulesdir):\n\n print(provider)\n\n with tempfile.TemporaryDirectory() as tmpdir:\n\n cmd = 'git clone --depth=1 https://github.com/terraform-providers/terraform-provider-{} .'.format(provider)\n result = subprocess.run(shlex.split(cmd), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=tmpdir)\n if result.returncode != 0:\n print(result.stdout)\n sys.exit(1)\n\n\n with open(os.path.join(tmpdir, provider, 'provider.go'), 'rb') as fp:\n content = fp.read()\n\n resources = []\n datasources = []\n\n for m in REGEX.finditer(content):\n\n name = m.groupdict()['name']\n\n if m.groupdict()['type'] == b'resource':\n resources.append(name.decode())\n elif m.groupdict()['type'] == b'data':\n datasources.append(name.decode())\n else:\n # Shouldn't really get here.\n pass\n\n providerdir = create_provider_directory(provider, modulesdir)\n create_provider_init(provider, providerdir)\n create_provider_datasources(provider, providerdir, datasources)\n create_provider_resources(provider, providerdir, resources)\n\n\ndef main():\n\n thisdir = os.path.abspath('.')\n rootdir = os.path.abspath('..')\n modulesdir = os.path.abspath('../terrascript')\n\n try:\n os.stat(os.path.join(thisdir, sys.argv[0]))\n os.stat(os.path.join(thisdir, 'PROVIDERS'))\n except FileNotFoundError:\n print('Script must be run from the tools/ folder', file=sys.stderr)\n sys.exit(1)\n\n # Detect whether a list of providers was passed on the command line or whether\n # to read PROVIDERS.\n #\n providers = sys.argv[1:]\n if not providers:\n providers = [line.strip() for line in open('PROVIDERS', 'rt').readlines() if line]\n\n with concurrent.futures.ThreadPoolExecutor(max_workers=CONCURRENCY) as executor:\n futures = [executor.submit(process, provider, modulesdir) for provider in providers]\n concurrent.futures.as_completed(futures)\n\n\nif __name__ == '__main__':\n main()\n\n", "id": "5349773", "language": "Python", "matching_score": 1.6077611446380615, "max_stars_count": 0, "path": "tools/makecode.py" }, { "content": "# terrascript/alicloud/r.py\n\nimport terrascript\n\n\nclass alicloud_instance(terrascript.Resource):\n pass\n\nclass alicloud_ram_role_attachment(terrascript.Resource):\n pass\n\nclass alicloud_disk(terrascript.Resource):\n pass\n\nclass alicloud_disk_attachment(terrascript.Resource):\n pass\n\nclass alicloud_network_interface(terrascript.Resource):\n pass\n\nclass alicloud_network_interface_attachment(terrascript.Resource):\n pass\n\nclass alicloud_snapshot(terrascript.Resource):\n pass\n\nclass alicloud_snapshot_policy(terrascript.Resource):\n pass\n\nclass alicloud_launch_template(terrascript.Resource):\n pass\n\nclass alicloud_security_group(terrascript.Resource):\n pass\n\nclass alicloud_security_group_rule(terrascript.Resource):\n pass\n\nclass alicloud_db_database(terrascript.Resource):\n pass\n\nclass alicloud_db_account(terrascript.Resource):\n pass\n\nclass alicloud_db_account_privilege(terrascript.Resource):\n pass\n\nclass alicloud_db_backup_policy(terrascript.Resource):\n pass\n\nclass alicloud_db_connection(terrascript.Resource):\n pass\n\nclass alicloud_db_read_write_splitting_connection(terrascript.Resource):\n pass\n\nclass alicloud_db_instance(terrascript.Resource):\n pass\n\nclass alicloud_mongodb_instance(terrascript.Resource):\n pass\n\nclass alicloud_mongodb_sharding_instance(terrascript.Resource):\n pass\n\nclass alicloud_gpdb_instance(terrascript.Resource):\n pass\n\nclass alicloud_gpdb_connection(terrascript.Resource):\n pass\n\nclass alicloud_db_readonly_instance(terrascript.Resource):\n pass\n\nclass alicloud_ess_scaling_group(terrascript.Resource):\n pass\n\nclass alicloud_ess_scaling_configuration(terrascript.Resource):\n pass\n\nclass alicloud_ess_scaling_rule(terrascript.Resource):\n pass\n\nclass alicloud_ess_schedule(terrascript.Resource):\n pass\n\nclass alicloud_ess_scheduled_task(terrascript.Resource):\n pass\n\nclass alicloud_ess_attachment(terrascript.Resource):\n pass\n\nclass alicloud_ess_lifecycle_hook(terrascript.Resource):\n pass\n\nclass alicloud_ess_notification(terrascript.Resource):\n pass\n\nclass alicloud_ess_alarm(terrascript.Resource):\n pass\n\nclass alicloud_ess_scalinggroup_vserver_groups(terrascript.Resource):\n pass\n\nclass alicloud_vpc(terrascript.Resource):\n pass\n\nclass alicloud_nat_gateway(terrascript.Resource):\n pass\n\nclass alicloud_nas_file_system(terrascript.Resource):\n pass\n\nclass alicloud_nas_mount_target(terrascript.Resource):\n pass\n\nclass alicloud_nas_access_group(terrascript.Resource):\n pass\n\nclass alicloud_nas_access_rule(terrascript.Resource):\n pass\n\nclass alicloud_subnet(terrascript.Resource):\n pass\n\nclass alicloud_vswitch(terrascript.Resource):\n pass\n\nclass alicloud_route_entry(terrascript.Resource):\n pass\n\nclass alicloud_route_table(terrascript.Resource):\n pass\n\nclass alicloud_route_table_attachment(terrascript.Resource):\n pass\n\nclass alicloud_snat_entry(terrascript.Resource):\n pass\n\nclass alicloud_forward_entry(terrascript.Resource):\n pass\n\nclass alicloud_eip(terrascript.Resource):\n pass\n\nclass alicloud_eip_association(terrascript.Resource):\n pass\n\nclass alicloud_slb(terrascript.Resource):\n pass\n\nclass alicloud_slb_listener(terrascript.Resource):\n pass\n\nclass alicloud_slb_attachment(terrascript.Resource):\n pass\n\nclass alicloud_slb_backend_server(terrascript.Resource):\n pass\n\nclass alicloud_slb_server_group(terrascript.Resource):\n pass\n\nclass alicloud_slb_master_slave_server_group(terrascript.Resource):\n pass\n\nclass alicloud_slb_rule(terrascript.Resource):\n pass\n\nclass alicloud_slb_acl(terrascript.Resource):\n pass\n\nclass alicloud_slb_ca_certificate(terrascript.Resource):\n pass\n\nclass alicloud_slb_server_certificate(terrascript.Resource):\n pass\n\nclass alicloud_oss_bucket(terrascript.Resource):\n pass\n\nclass alicloud_oss_bucket_object(terrascript.Resource):\n pass\n\nclass alicloud_ons_instance(terrascript.Resource):\n pass\n\nclass alicloud_ons_topic(terrascript.Resource):\n pass\n\nclass alicloud_ons_group(terrascript.Resource):\n pass\n\nclass alicloud_dns_record(terrascript.Resource):\n pass\n\nclass alicloud_dns(terrascript.Resource):\n pass\n\nclass alicloud_dns_group(terrascript.Resource):\n pass\n\nclass alicloud_key_pair(terrascript.Resource):\n pass\n\nclass alicloud_key_pair_attachment(terrascript.Resource):\n pass\n\nclass alicloud_kms_key(terrascript.Resource):\n pass\n\nclass alicloud_ram_user(terrascript.Resource):\n pass\n\nclass alicloud_ram_account_password_policy(terrascript.Resource):\n pass\n\nclass alicloud_ram_access_key(terrascript.Resource):\n pass\n\nclass alicloud_ram_login_profile(terrascript.Resource):\n pass\n\nclass alicloud_ram_group(terrascript.Resource):\n pass\n\nclass alicloud_ram_role(terrascript.Resource):\n pass\n\nclass alicloud_ram_policy(terrascript.Resource):\n pass\n\nclass alicloud_ram_alias(terrascript.Resource):\n pass\n\nclass alicloud_ram_account_alias(terrascript.Resource):\n pass\n\nclass alicloud_ram_group_membership(terrascript.Resource):\n pass\n\nclass alicloud_ram_user_policy_attachment(terrascript.Resource):\n pass\n\nclass alicloud_ram_role_policy_attachment(terrascript.Resource):\n pass\n\nclass alicloud_ram_group_policy_attachment(terrascript.Resource):\n pass\n\nclass alicloud_container_cluster(terrascript.Resource):\n pass\n\nclass alicloud_cs_application(terrascript.Resource):\n pass\n\nclass alicloud_cs_swarm(terrascript.Resource):\n pass\n\nclass alicloud_cs_kubernetes(terrascript.Resource):\n pass\n\nclass alicloud_cs_managed_kubernetes(terrascript.Resource):\n pass\n\nclass alicloud_cr_namespace(terrascript.Resource):\n pass\n\nclass alicloud_cr_repo(terrascript.Resource):\n pass\n\nclass alicloud_cdn_domain(terrascript.Resource):\n pass\n\nclass alicloud_cdn_domain_new(terrascript.Resource):\n pass\n\nclass alicloud_cdn_domain_config(terrascript.Resource):\n pass\n\nclass alicloud_router_interface(terrascript.Resource):\n pass\n\nclass alicloud_router_interface_connection(terrascript.Resource):\n pass\n\nclass alicloud_ots_table(terrascript.Resource):\n pass\n\nclass alicloud_ots_instance(terrascript.Resource):\n pass\n\nclass alicloud_ots_instance_attachment(terrascript.Resource):\n pass\n\nclass alicloud_cms_alarm(terrascript.Resource):\n pass\n\nclass alicloud_pvtz_zone(terrascript.Resource):\n pass\n\nclass alicloud_pvtz_zone_attachment(terrascript.Resource):\n pass\n\nclass alicloud_pvtz_zone_record(terrascript.Resource):\n pass\n\nclass alicloud_log_project(terrascript.Resource):\n pass\n\nclass alicloud_log_store(terrascript.Resource):\n pass\n\nclass alicloud_log_store_index(terrascript.Resource):\n pass\n\nclass alicloud_log_machine_group(terrascript.Resource):\n pass\n\nclass alicloud_logtail_config(terrascript.Resource):\n pass\n\nclass alicloud_logtail_attachment(terrascript.Resource):\n pass\n\nclass alicloud_fc_service(terrascript.Resource):\n pass\n\nclass alicloud_fc_function(terrascript.Resource):\n pass\n\nclass alicloud_fc_trigger(terrascript.Resource):\n pass\n\nclass alicloud_vpn_gateway(terrascript.Resource):\n pass\n\nclass alicloud_vpn_customer_gateway(terrascript.Resource):\n pass\n\nclass alicloud_vpn_connection(terrascript.Resource):\n pass\n\nclass alicloud_ssl_vpn_server(terrascript.Resource):\n pass\n\nclass alicloud_ssl_vpn_client_cert(terrascript.Resource):\n pass\n\nclass alicloud_cen_instance(terrascript.Resource):\n pass\n\nclass alicloud_cen_instance_attachment(terrascript.Resource):\n pass\n\nclass alicloud_cen_bandwidth_package(terrascript.Resource):\n pass\n\nclass alicloud_cen_bandwidth_package_attachment(terrascript.Resource):\n pass\n\nclass alicloud_cen_bandwidth_limit(terrascript.Resource):\n pass\n\nclass alicloud_cen_route_entry(terrascript.Resource):\n pass\n\nclass alicloud_cen_instance_grant(terrascript.Resource):\n pass\n\nclass alicloud_kvstore_instance(terrascript.Resource):\n pass\n\nclass alicloud_kvstore_backup_policy(terrascript.Resource):\n pass\n\nclass alicloud_datahub_project(terrascript.Resource):\n pass\n\nclass alicloud_datahub_subscription(terrascript.Resource):\n pass\n\nclass alicloud_datahub_topic(terrascript.Resource):\n pass\n\nclass alicloud_mns_queue(terrascript.Resource):\n pass\n\nclass alicloud_mns_topic(terrascript.Resource):\n pass\n\nclass alicloud_havip(terrascript.Resource):\n pass\n\nclass alicloud_mns_topic_subscription(terrascript.Resource):\n pass\n\nclass alicloud_havip_attachment(terrascript.Resource):\n pass\n\nclass alicloud_api_gateway_api(terrascript.Resource):\n pass\n\nclass alicloud_api_gateway_group(terrascript.Resource):\n pass\n\nclass alicloud_api_gateway_app(terrascript.Resource):\n pass\n\nclass alicloud_api_gateway_app_attachment(terrascript.Resource):\n pass\n\nclass alicloud_api_gateway_vpc_access(terrascript.Resource):\n pass\n\nclass alicloud_common_bandwidth_package(terrascript.Resource):\n pass\n\nclass alicloud_common_bandwidth_package_attachment(terrascript.Resource):\n pass\n\nclass alicloud_drds_instance(terrascript.Resource):\n pass\n\nclass alicloud_elasticsearch_instance(terrascript.Resource):\n pass\n\nclass alicloud_actiontrail(terrascript.Resource):\n pass\n\nclass alicloud_cas_certificate(terrascript.Resource):\n pass\n\nclass alicloud_ddoscoo_instance(terrascript.Resource):\n pass\n\nclass alicloud_network_acl(terrascript.Resource):\n pass\n\nclass alicloud_network_acl_attachment(terrascript.Resource):\n pass\n\nclass alicloud_network_acl_entries(terrascript.Resource):\n pass\n", "id": "12785615", "language": "Python", "matching_score": 3.286548376083374, "max_stars_count": 0, "path": "terrascript/alicloud/r.py" }, { "content": "# terrascript/alicloud/d.py\n\nimport terrascript\n\n\nclass alicloud_account(terrascript.Data):\n pass\n\nclass alicloud_images(terrascript.Data):\n pass\n\nclass alicloud_regions(terrascript.Data):\n pass\n\nclass alicloud_zones(terrascript.Data):\n pass\n\nclass alicloud_instance_type_families(terrascript.Data):\n pass\n\nclass alicloud_instance_types(terrascript.Data):\n pass\n\nclass alicloud_instances(terrascript.Data):\n pass\n\nclass alicloud_disks(terrascript.Data):\n pass\n\nclass alicloud_network_interfaces(terrascript.Data):\n pass\n\nclass alicloud_snapshots(terrascript.Data):\n pass\n\nclass alicloud_vpcs(terrascript.Data):\n pass\n\nclass alicloud_vswitches(terrascript.Data):\n pass\n\nclass alicloud_eips(terrascript.Data):\n pass\n\nclass alicloud_key_pairs(terrascript.Data):\n pass\n\nclass alicloud_kms_keys(terrascript.Data):\n pass\n\nclass alicloud_dns_domains(terrascript.Data):\n pass\n\nclass alicloud_dns_groups(terrascript.Data):\n pass\n\nclass alicloud_dns_records(terrascript.Data):\n pass\n\nclass alicloud_dns_domain_groups(terrascript.Data):\n pass\n\nclass alicloud_dns_domain_records(terrascript.Data):\n pass\n\nclass alicloud_ram_account_alias(terrascript.Data):\n pass\n\nclass alicloud_ram_account_aliases(terrascript.Data):\n pass\n\nclass alicloud_ram_groups(terrascript.Data):\n pass\n\nclass alicloud_ram_users(terrascript.Data):\n pass\n\nclass alicloud_ram_roles(terrascript.Data):\n pass\n\nclass alicloud_ram_policies(terrascript.Data):\n pass\n\nclass alicloud_security_groups(terrascript.Data):\n pass\n\nclass alicloud_security_group_rules(terrascript.Data):\n pass\n\nclass alicloud_slbs(terrascript.Data):\n pass\n\nclass alicloud_slb_attachments(terrascript.Data):\n pass\n\nclass alicloud_slb_backend_servers(terrascript.Data):\n pass\n\nclass alicloud_slb_listeners(terrascript.Data):\n pass\n\nclass alicloud_slb_rules(terrascript.Data):\n pass\n\nclass alicloud_slb_server_groups(terrascript.Data):\n pass\n\nclass alicloud_slb_master_slave_server_groups(terrascript.Data):\n pass\n\nclass alicloud_slb_acls(terrascript.Data):\n pass\n\nclass alicloud_slb_server_certificates(terrascript.Data):\n pass\n\nclass alicloud_slb_ca_certificates(terrascript.Data):\n pass\n\nclass alicloud_oss_bucket_objects(terrascript.Data):\n pass\n\nclass alicloud_oss_buckets(terrascript.Data):\n pass\n\nclass alicloud_ons_instances(terrascript.Data):\n pass\n\nclass alicloud_ons_topics(terrascript.Data):\n pass\n\nclass alicloud_ons_groups(terrascript.Data):\n pass\n\nclass alicloud_fc_functions(terrascript.Data):\n pass\n\nclass alicloud_fc_services(terrascript.Data):\n pass\n\nclass alicloud_fc_triggers(terrascript.Data):\n pass\n\nclass alicloud_db_instances(terrascript.Data):\n pass\n\nclass alicloud_db_instance_engines(terrascript.Data):\n pass\n\nclass alicloud_db_instance_classes(terrascript.Data):\n pass\n\nclass alicloud_pvtz_zones(terrascript.Data):\n pass\n\nclass alicloud_pvtz_zone_records(terrascript.Data):\n pass\n\nclass alicloud_router_interfaces(terrascript.Data):\n pass\n\nclass alicloud_vpn_gateways(terrascript.Data):\n pass\n\nclass alicloud_vpn_customer_gateways(terrascript.Data):\n pass\n\nclass alicloud_vpn_connections(terrascript.Data):\n pass\n\nclass alicloud_ssl_vpn_servers(terrascript.Data):\n pass\n\nclass alicloud_ssl_vpn_client_certs(terrascript.Data):\n pass\n\nclass alicloud_mongo_instances(terrascript.Data):\n pass\n\nclass alicloud_mongodb_instances(terrascript.Data):\n pass\n\nclass alicloud_gpdb_instances(terrascript.Data):\n pass\n\nclass alicloud_kvstore_instances(terrascript.Data):\n pass\n\nclass alicloud_kvstore_instance_classes(terrascript.Data):\n pass\n\nclass alicloud_kvstore_instance_engines(terrascript.Data):\n pass\n\nclass alicloud_cen_instances(terrascript.Data):\n pass\n\nclass alicloud_cen_bandwidth_packages(terrascript.Data):\n pass\n\nclass alicloud_cen_bandwidth_limits(terrascript.Data):\n pass\n\nclass alicloud_cen_route_entries(terrascript.Data):\n pass\n\nclass alicloud_cen_region_route_entries(terrascript.Data):\n pass\n\nclass alicloud_cs_kubernetes_clusters(terrascript.Data):\n pass\n\nclass alicloud_cs_managed_kubernetes_clusters(terrascript.Data):\n pass\n\nclass alicloud_cr_namespaces(terrascript.Data):\n pass\n\nclass alicloud_cr_repos(terrascript.Data):\n pass\n\nclass alicloud_mns_queues(terrascript.Data):\n pass\n\nclass alicloud_mns_topics(terrascript.Data):\n pass\n\nclass alicloud_mns_topic_subscriptions(terrascript.Data):\n pass\n\nclass alicloud_api_gateway_apis(terrascript.Data):\n pass\n\nclass alicloud_api_gateway_groups(terrascript.Data):\n pass\n\nclass alicloud_api_gateway_apps(terrascript.Data):\n pass\n\nclass alicloud_elasticsearch_instances(terrascript.Data):\n pass\n\nclass alicloud_drds_instances(terrascript.Data):\n pass\n\nclass alicloud_nas_access_groups(terrascript.Data):\n pass\n\nclass alicloud_nas_access_rules(terrascript.Data):\n pass\n\nclass alicloud_nas_mount_targets(terrascript.Data):\n pass\n\nclass alicloud_nas_file_systems(terrascript.Data):\n pass\n\nclass alicloud_nas_protocols(terrascript.Data):\n pass\n\nclass alicloud_cas_certificates(terrascript.Data):\n pass\n\nclass alicloud_actiontrails(terrascript.Data):\n pass\n\nclass alicloud_common_bandwidth_packages(terrascript.Data):\n pass\n\nclass alicloud_route_tables(terrascript.Data):\n pass\n\nclass alicloud_route_entries(terrascript.Data):\n pass\n\nclass alicloud_nat_gateways(terrascript.Data):\n pass\n\nclass alicloud_snat_entries(terrascript.Data):\n pass\n\nclass alicloud_forward_entries(terrascript.Data):\n pass\n\nclass alicloud_ddoscoo_instances(terrascript.Data):\n pass\n\nclass alicloud_ess_scaling_groups(terrascript.Data):\n pass\n\nclass alicloud_ess_scaling_rules(terrascript.Data):\n pass\n\nclass alicloud_ess_scaling_configurations(terrascript.Data):\n pass\n\nclass alicloud_ots_instances(terrascript.Data):\n pass\n\nclass alicloud_ots_instance_attachments(terrascript.Data):\n pass\n\nclass alicloud_ots_tables(terrascript.Data):\n pass\n", "id": "3374437", "language": "Python", "matching_score": 1.2948040962219238, "max_stars_count": 0, "path": "terrascript/alicloud/d.py" }, { "content": "# terrascript/nomad/d.py\n\nimport terrascript\n\n\nclass nomad_job(terrascript.Data):\n pass\n\nclass nomad_deployments(terrascript.Data):\n pass\n\nclass nomad_namespaces(terrascript.Data):\n pass\n\nclass nomad_regions(terrascript.Data):\n pass\n", "id": "4732499", "language": "Python", "matching_score": 1.1225167512893677, "max_stars_count": 0, "path": "terrascript/nomad/d.py" }, { "content": "# terrascript/nomad/__init__.py\n\nimport terrascript\n\nclass nomad(terrascript.Provider):\n pass", "id": "148748", "language": "Python", "matching_score": 0.036631740629673004, "max_stars_count": 0, "path": "terrascript/nomad/__init__.py" }, { "content": "# terrascript/vault/d.py\n\nimport terrascript\n\n", "id": "71353", "language": "Python", "matching_score": 1.1175289154052734, "max_stars_count": 0, "path": "terrascript/vault/d.py" }, { "content": "# terrascript/vault/__init__.py\n\nimport terrascript\n\nclass vault(terrascript.Provider):\n pass", "id": "7112169", "language": "Python", "matching_score": 0.006825488060712814, "max_stars_count": 0, "path": "terrascript/vault/__init__.py" }, { "content": "# terrascript/newrelic/r.py\n\nimport terrascript\n\n\nclass newrelic_alert_channel(terrascript.Resource):\n pass\n\nclass newrelic_alert_condition(terrascript.Resource):\n pass\n\nclass newrelic_alert_policy_channel(terrascript.Resource):\n pass\n\nclass newrelic_alert_policy(terrascript.Resource):\n pass\n\nclass newrelic_dashboard(terrascript.Resource):\n pass\n\nclass newrelic_infra_alert_condition(terrascript.Resource):\n pass\n\nclass newrelic_nrql_alert_condition(terrascript.Resource):\n pass\n\nclass newrelic_synthetics_alert_condition(terrascript.Resource):\n pass\n\nclass newrelic_synthetics_monitor(terrascript.Resource):\n pass\n\nclass newrelic_synthetics_monitor_script(terrascript.Resource):\n pass\n", "id": "10918047", "language": "Python", "matching_score": 2.1317574977874756, "max_stars_count": 0, "path": "terrascript/newrelic/r.py" }, { "content": "# terrascript/newrelic/d.py\n\nimport terrascript\n\n\nclass newrelic_alert_channel(terrascript.Data):\n pass\n\nclass newrelic_alert_policy(terrascript.Data):\n pass\n\nclass newrelic_application(terrascript.Data):\n pass\n\nclass newrelic_key_transaction(terrascript.Data):\n pass\n\nclass newrelic_synthetics_monitor(terrascript.Data):\n pass\n", "id": "1896943", "language": "Python", "matching_score": 0.27695703506469727, "max_stars_count": 0, "path": "terrascript/newrelic/d.py" }, { "content": "# terrascript/dns/d.py\n\nimport terrascript\n\n\nclass dns_a_record_set(terrascript.Data):\n pass\n\nclass dns_aaaa_record_set(terrascript.Data):\n pass\n\nclass dns_cname_record_set(terrascript.Data):\n pass\n\nclass dns_mx_record_set(terrascript.Data):\n pass\n\nclass dns_ns_record_set(terrascript.Data):\n pass\n\nclass dns_ptr_record_set(terrascript.Data):\n pass\n\nclass dns_srv_record_set(terrascript.Data):\n pass\n\nclass dns_txt_record_set(terrascript.Data):\n pass\n", "id": "6963248", "language": "Python", "matching_score": 3.2629175186157227, "max_stars_count": 0, "path": "terrascript/dns/d.py" }, { "content": "# terrascript/dns/r.py\n\nimport terrascript\n\n\nclass dns_a_record_set(terrascript.Resource):\n pass\n\nclass dns_aaaa_record_set(terrascript.Resource):\n pass\n\nclass dns_cname_record(terrascript.Resource):\n pass\n\nclass dns_mx_record_set(terrascript.Resource):\n pass\n\nclass dns_ns_record_set(terrascript.Resource):\n pass\n\nclass dns_ptr_record(terrascript.Resource):\n pass\n\nclass dns_srv_record_set(terrascript.Resource):\n pass\n\nclass dns_txt_record_set(terrascript.Resource):\n pass\n", "id": "1555425", "language": "Python", "matching_score": 1.1127796173095703, "max_stars_count": 0, "path": "terrascript/dns/r.py" }, { "content": "# terrascript/dme/r.py\n\nimport terrascript\n\n\nclass dme_record(terrascript.Resource):\n pass\n", "id": "6113607", "language": "Python", "matching_score": 1.2291834354400635, "max_stars_count": 0, "path": "terrascript/dme/r.py" }, { "content": "# terrascript/ultradns/r.py\n\nimport terrascript\n\n\nclass ultradns_dirpool(terrascript.Resource):\n pass\n\nclass ultradns_probe_http(terrascript.Resource):\n pass\n\nclass ultradns_probe_ping(terrascript.Resource):\n pass\n\nclass ultradns_record(terrascript.Resource):\n pass\n\nclass ultradns_tcpool(terrascript.Resource):\n pass\n\nclass ultradns_rdpool(terrascript.Resource):\n pass\n", "id": "5547943", "language": "Python", "matching_score": 1.1127796173095703, "max_stars_count": 0, "path": "terrascript/ultradns/r.py" }, { "content": "# terrascript/dyn/r.py\n\nimport terrascript\n\n\nclass dyn_record(terrascript.Resource):\n pass\n", "id": "8384942", "language": "Python", "matching_score": 1.1056170463562012, "max_stars_count": 0, "path": "terrascript/dyn/r.py" }, { "content": "# terrascript/dyn/d.py\n\nimport terrascript\n\n", "id": "6400230", "language": "Python", "matching_score": 1.12619149684906, "max_stars_count": 0, "path": "terrascript/dyn/d.py" }, { "content": "# terrascript/dyn/__init__.py\n\nimport terrascript\n\nclass dyn(terrascript.Provider):\n pass", "id": "7379154", "language": "Python", "matching_score": 0.111897312104702, "max_stars_count": 0, "path": "terrascript/dyn/__init__.py" }, { "content": "# terrascript/bitbucket/d.py\n\nimport terrascript\n\n", "id": "11612947", "language": "Python", "matching_score": 1.12619149684906, "max_stars_count": 0, "path": "terrascript/bitbucket/d.py" }, { "content": "# terrascript/bitbucket/__init__.py\n\nimport terrascript\n\nclass bitbucket(terrascript.Provider):\n pass", "id": "5898370", "language": "Python", "matching_score": 0.549986720085144, "max_stars_count": 0, "path": "terrascript/bitbucket/__init__.py" }, { "content": "# terrascript/bitbucket/r.py\n\nimport terrascript\n\n\nclass bitbucket_hook(terrascript.Resource):\n pass\n\nclass bitbucket_default_reviewers(terrascript.Resource):\n pass\n\nclass bitbucket_repository(terrascript.Resource):\n pass\n", "id": "645100", "language": "Python", "matching_score": 0.8422830700874329, "max_stars_count": 0, "path": "terrascript/bitbucket/r.py" }, { "content": "# terrascript/github/d.py\n\nimport terrascript\n\n\nclass github_collaborators(terrascript.Data):\n pass\n\nclass github_ip_ranges(terrascript.Data):\n pass\n\nclass github_repositories(terrascript.Data):\n pass\n\nclass github_repository(terrascript.Data):\n pass\n\nclass github_team(terrascript.Data):\n pass\n\nclass github_user(terrascript.Data):\n pass\n", "id": "12837493", "language": "Python", "matching_score": 1.5581331253051758, "max_stars_count": 0, "path": "terrascript/github/d.py" }, { "content": "# terrascript/github/r.py\n\nimport terrascript\n\n\nclass github_branch_protection(terrascript.Resource):\n pass\n\nclass github_issue_label(terrascript.Resource):\n pass\n\nclass github_membership(terrascript.Resource):\n pass\n\nclass github_organization_block(terrascript.Resource):\n pass\n\nclass github_organization_project(terrascript.Resource):\n pass\n\nclass github_organization_webhook(terrascript.Resource):\n pass\n\nclass github_project_column(terrascript.Resource):\n pass\n\nclass github_repository_collaborator(terrascript.Resource):\n pass\n\nclass github_repository_deploy_key(terrascript.Resource):\n pass\n\nclass github_repository_project(terrascript.Resource):\n pass\n\nclass github_repository_webhook(terrascript.Resource):\n pass\n\nclass github_repository(terrascript.Resource):\n pass\n\nclass github_team_membership(terrascript.Resource):\n pass\n\nclass github_team_repository(terrascript.Resource):\n pass\n\nclass github_team(terrascript.Resource):\n pass\n\nclass github_user_gpg_key(terrascript.Resource):\n pass\n\nclass github_user_invitation_accepter(terrascript.Resource):\n pass\n\nclass github_user_ssh_key(terrascript.Resource):\n pass\n", "id": "10924625", "language": "Python", "matching_score": 2.2020606994628906, "max_stars_count": 0, "path": "terrascript/github/r.py" }, { "content": "# terrascript/gitlab/r.py\n\nimport terrascript\n\n\nclass gitlab_branch_protection(terrascript.Resource):\n pass\n\nclass gitlab_tag_protection(terrascript.Resource):\n pass\n\nclass gitlab_group(terrascript.Resource):\n pass\n\nclass gitlab_project(terrascript.Resource):\n pass\n\nclass gitlab_label(terrascript.Resource):\n pass\n\nclass gitlab_pipeline_schedule(terrascript.Resource):\n pass\n\nclass gitlab_pipeline_trigger(terrascript.Resource):\n pass\n\nclass gitlab_project_hook(terrascript.Resource):\n pass\n\nclass gitlab_deploy_key(terrascript.Resource):\n pass\n\nclass gitlab_user(terrascript.Resource):\n pass\n\nclass gitlab_project_membership(terrascript.Resource):\n pass\n\nclass gitlab_group_membership(terrascript.Resource):\n pass\n\nclass gitlab_project_variable(terrascript.Resource):\n pass\n\nclass gitlab_group_variable(terrascript.Resource):\n pass\n\nclass gitlab_project_cluster(terrascript.Resource):\n pass\n\nclass gitlab_service_slack(terrascript.Resource):\n pass\n\nclass gitlab_service_jira(terrascript.Resource):\n pass\n", "id": "9518919", "language": "Python", "matching_score": 2.019375801086426, "max_stars_count": 0, "path": "terrascript/gitlab/r.py" }, { "content": "# terrascript/gitlab/d.py\n\nimport terrascript\n\n\nclass gitlab_group(terrascript.Data):\n pass\n\nclass gitlab_project(terrascript.Data):\n pass\n\nclass gitlab_user(terrascript.Data):\n pass\n\nclass gitlab_users(terrascript.Data):\n pass\n", "id": "1257016", "language": "Python", "matching_score": 1.12619149684906, "max_stars_count": 0, "path": "terrascript/gitlab/d.py" }, { "content": "# terrascript/gitlab/__init__.py\n\nimport terrascript\n\nclass gitlab(terrascript.Provider):\n pass", "id": "8224445", "language": "Python", "matching_score": 0.5862390995025635, "max_stars_count": 0, "path": "terrascript/gitlab/__init__.py" }, { "content": "# terrascript/cloudflare/__init__.py\n\nimport terrascript\n\nclass cloudflare(terrascript.Provider):\n pass", "id": "1068191", "language": "Python", "matching_score": 0.6521193385124207, "max_stars_count": 0, "path": "terrascript/cloudflare/__init__.py" }, { "content": "# terrascript/cloudflare/d.py\n\nimport terrascript\n\n\nclass cloudflare_ip_ranges(terrascript.Data):\n pass\n\nclass cloudflare_zones(terrascript.Data):\n pass\n", "id": "934240", "language": "Python", "matching_score": 0.9827834963798523, "max_stars_count": 0, "path": "terrascript/cloudflare/d.py" }, { "content": "# terrascript/cloudflare/r.py\n\nimport terrascript\n\n\nclass cloudflare_access_application(terrascript.Resource):\n pass\n\nclass cloudflare_access_policy(terrascript.Resource):\n pass\n\nclass cloudflare_access_rule(terrascript.Resource):\n pass\n\nclass cloudflare_account_member(terrascript.Resource):\n pass\n\nclass cloudflare_argo(terrascript.Resource):\n pass\n\nclass cloudflare_custom_pages(terrascript.Resource):\n pass\n\nclass cloudflare_custom_ssl(terrascript.Resource):\n pass\n\nclass cloudflare_filter(terrascript.Resource):\n pass\n\nclass cloudflare_firewall_rule(terrascript.Resource):\n pass\n\nclass cloudflare_load_balancer_monitor(terrascript.Resource):\n pass\n\nclass cloudflare_load_balancer_pool(terrascript.Resource):\n pass\n\nclass cloudflare_load_balancer(terrascript.Resource):\n pass\n\nclass cloudflare_logpush_job(terrascript.Resource):\n pass\n\nclass cloudflare_page_rule(terrascript.Resource):\n pass\n\nclass cloudflare_rate_limit(terrascript.Resource):\n pass\n\nclass cloudflare_record(terrascript.Resource):\n pass\n\nclass cloudflare_spectrum_application(terrascript.Resource):\n pass\n\nclass cloudflare_waf_rule(terrascript.Resource):\n pass\n\nclass cloudflare_worker_route(terrascript.Resource):\n pass\n\nclass cloudflare_worker_script(terrascript.Resource):\n pass\n\nclass cloudflare_zone_lockdown(terrascript.Resource):\n pass\n\nclass cloudflare_zone_settings_override(terrascript.Resource):\n pass\n\nclass cloudflare_zone(terrascript.Resource):\n pass\n", "id": "7850327", "language": "Python", "matching_score": 1.3524885177612305, "max_stars_count": 0, "path": "terrascript/cloudflare/r.py" }, { "content": "# terrascript/clc/r.py\n\nimport terrascript\n\n\nclass clc_server(terrascript.Resource):\n pass\n\nclass clc_group(terrascript.Resource):\n pass\n\nclass clc_public_ip(terrascript.Resource):\n pass\n\nclass clc_load_balancer(terrascript.Resource):\n pass\n\nclass clc_load_balancer_pool(terrascript.Resource):\n pass\n", "id": "12704244", "language": "Python", "matching_score": 1.804325819015503, "max_stars_count": 0, "path": "terrascript/clc/r.py" }, { "content": "# terrascript/oneandone/r.py\n\nimport terrascript\n\n\nclass oneandone_server(terrascript.Resource):\n pass\n\nclass oneandone_firewall_policy(terrascript.Resource):\n pass\n\nclass oneandone_private_network(terrascript.Resource):\n pass\n\nclass oneandone_public_ip(terrascript.Resource):\n pass\n\nclass oneandone_shared_storage(terrascript.Resource):\n pass\n\nclass oneandone_monitoring_policy(terrascript.Resource):\n pass\n\nclass oneandone_loadbalancer(terrascript.Resource):\n pass\n\nclass oneandone_vpn(terrascript.Resource):\n pass\n\nclass oneandone_ssh_key(terrascript.Resource):\n pass\n\nclass oneandone_block_storage(terrascript.Resource):\n pass\n\nclass oneandone_image(terrascript.Resource):\n pass\n\nclass oneandone_baremetal(terrascript.Resource):\n pass\n", "id": "10000786", "language": "Python", "matching_score": 1.1225167512893677, "max_stars_count": 0, "path": "terrascript/oneandone/r.py" }, { "content": "# terrascript/oneandone/__init__.py\n\nimport terrascript\n\nclass oneandone(terrascript.Provider):\n pass", "id": "12127605", "language": "Python", "matching_score": 0.5862390995025635, "max_stars_count": 0, "path": "terrascript/oneandone/__init__.py" }, { "content": "# terrascript/digitalocean/__init__.py\n\nimport terrascript\n\nclass digitalocean(terrascript.Provider):\n pass", "id": "6227827", "language": "Python", "matching_score": 1.5512363910675049, "max_stars_count": 0, "path": "terrascript/digitalocean/__init__.py" }, { "content": "# terrascript/cloudscale/__init__.py\n\nimport terrascript\n\nclass cloudscale(terrascript.Provider):\n pass", "id": "5302813", "language": "Python", "matching_score": 1.5512363910675049, "max_stars_count": 0, "path": "terrascript/cloudscale/__init__.py" }, { "content": "# terrascript/tls/__init__.py\n\nimport terrascript\n\nclass tls(terrascript.Provider):\n pass", "id": "1068806", "language": "Python", "matching_score": 0.5862390995025635, "max_stars_count": 0, "path": "terrascript/tls/__init__.py" }, { "content": "# terrascript/ignition/__init__.py\n\nimport terrascript\n\nclass ignition(terrascript.Provider):\n pass", "id": "10140505", "language": "Python", "matching_score": 1.1056170463562012, "max_stars_count": 0, "path": "terrascript/ignition/__init__.py" }, { "content": "# terrascript/ignition/r.py\n\nimport terrascript\n\n", "id": "3206238", "language": "Python", "matching_score": 0.945509135723114, "max_stars_count": 0, "path": "terrascript/ignition/r.py" }, { "content": "# terrascript/ignition/d.py\n\nimport terrascript\n\n\nclass ignition_config(terrascript.Data):\n pass\n\nclass ignition_disk(terrascript.Data):\n pass\n\nclass ignition_raid(terrascript.Data):\n pass\n\nclass ignition_filesystem(terrascript.Data):\n pass\n\nclass ignition_file(terrascript.Data):\n pass\n\nclass ignition_directory(terrascript.Data):\n pass\n\nclass ignition_link(terrascript.Data):\n pass\n\nclass ignition_systemd_unit(terrascript.Data):\n pass\n\nclass ignition_networkd_unit(terrascript.Data):\n pass\n\nclass ignition_user(terrascript.Data):\n pass\n\nclass ignition_group(terrascript.Data):\n pass\n", "id": "2886246", "language": "Python", "matching_score": 1.5156596899032593, "max_stars_count": 0, "path": "terrascript/ignition/d.py" }, { "content": "# terrascript/template/d.py\n\nimport terrascript\n\n\nclass template_file(terrascript.Data):\n pass\n\nclass template_cloudinit_config(terrascript.Data):\n pass\n", "id": "9402944", "language": "Python", "matching_score": 1.1427545547485352, "max_stars_count": 0, "path": "terrascript/template/d.py" }, { "content": "# terrascript/template/__init__.py\n\nimport terrascript\n\nclass template(terrascript.Provider):\n pass", "id": "3968070", "language": "Python", "matching_score": 1.1190576553344727, "max_stars_count": 0, "path": "terrascript/template/__init__.py" }, { "content": "# terrascript/template/r.py\n\nimport terrascript\n\n\nclass template_dir(terrascript.Resource):\n pass\n", "id": "11280879", "language": "Python", "matching_score": 0.5965160131454468, "max_stars_count": 0, "path": "terrascript/template/r.py" }, { "content": "# terrascript/heroku/r.py\n\nimport terrascript\n\n\nclass heroku_account_feature(terrascript.Resource):\n pass\n\nclass heroku_addon(terrascript.Resource):\n pass\n\nclass heroku_addon_attachment(terrascript.Resource):\n pass\n\nclass heroku_app(terrascript.Resource):\n pass\n\nclass heroku_app_config_association(terrascript.Resource):\n pass\n\nclass heroku_app_feature(terrascript.Resource):\n pass\n\nclass heroku_app_release(terrascript.Resource):\n pass\n\nclass heroku_build(terrascript.Resource):\n pass\n\nclass heroku_cert(terrascript.Resource):\n pass\n\nclass heroku_config(terrascript.Resource):\n pass\n\nclass heroku_domain(terrascript.Resource):\n pass\n\nclass heroku_drain(terrascript.Resource):\n pass\n\nclass heroku_formation(terrascript.Resource):\n pass\n\nclass heroku_pipeline(terrascript.Resource):\n pass\n\nclass heroku_pipeline_coupling(terrascript.Resource):\n pass\n\nclass heroku_slug(terrascript.Resource):\n pass\n\nclass heroku_space(terrascript.Resource):\n pass\n\nclass heroku_space_inbound_ruleset(terrascript.Resource):\n pass\n\nclass heroku_space_app_access(terrascript.Resource):\n pass\n\nclass heroku_space_peering_connection_accepter(terrascript.Resource):\n pass\n\nclass heroku_space_vpn_connection(terrascript.Resource):\n pass\n\nclass heroku_team_collaborator(terrascript.Resource):\n pass\n\nclass heroku_team_member(terrascript.Resource):\n pass\n", "id": "6198302", "language": "Python", "matching_score": 2.1014277935028076, "max_stars_count": 0, "path": "terrascript/heroku/r.py" }, { "content": "# terrascript/heroku/d.py\n\nimport terrascript\n\n\nclass heroku_addon(terrascript.Data):\n pass\n\nclass heroku_app(terrascript.Data):\n pass\n\nclass heroku_space(terrascript.Data):\n pass\n\nclass heroku_space_peering_info(terrascript.Data):\n pass\n\nclass heroku_team(terrascript.Data):\n pass\n", "id": "6960985", "language": "Python", "matching_score": 0.24083378911018372, "max_stars_count": 0, "path": "terrascript/heroku/d.py" }, { "content": "# terrascript/pagerduty/r.py\n\nimport terrascript\n\n\nclass pagerduty_addon(terrascript.Resource):\n pass\n\nclass pagerduty_escalation_policy(terrascript.Resource):\n pass\n\nclass pagerduty_maintenance_window(terrascript.Resource):\n pass\n\nclass pagerduty_schedule(terrascript.Resource):\n pass\n\nclass pagerduty_service(terrascript.Resource):\n pass\n\nclass pagerduty_service_integration(terrascript.Resource):\n pass\n\nclass pagerduty_team(terrascript.Resource):\n pass\n\nclass pagerduty_team_membership(terrascript.Resource):\n pass\n\nclass pagerduty_user(terrascript.Resource):\n pass\n\nclass pagerduty_user_contact_method(terrascript.Resource):\n pass\n\nclass pagerduty_extension(terrascript.Resource):\n pass\n", "id": "7979261", "language": "Python", "matching_score": 2.2563040256500244, "max_stars_count": 0, "path": "terrascript/pagerduty/r.py" }, { "content": "# terrascript/pagerduty/d.py\n\nimport terrascript\n\n\nclass pagerduty_escalation_policy(terrascript.Data):\n pass\n\nclass pagerduty_schedule(terrascript.Data):\n pass\n\nclass pagerduty_user(terrascript.Data):\n pass\n\nclass pagerduty_team(terrascript.Data):\n pass\n\nclass pagerduty_vendor(terrascript.Data):\n pass\n\nclass pagerduty_extension_schema(terrascript.Data):\n pass\n\nclass pagerduty_service(terrascript.Data):\n pass\n", "id": "12336293", "language": "Python", "matching_score": 1.0729498863220215, "max_stars_count": 0, "path": "terrascript/pagerduty/d.py" }, { "content": "# terrascript/opsgenie/r.py\n\nimport terrascript\n\n\nclass opsgenie_team(terrascript.Resource):\n pass\n\nclass opsgenie_user(terrascript.Resource):\n pass\n", "id": "4615887", "language": "Python", "matching_score": 1.653838872909546, "max_stars_count": 0, "path": "terrascript/opsgenie/r.py" }, { "content": "# terrascript/opsgenie/d.py\n\nimport terrascript\n\n\nclass opsgenie_user(terrascript.Data):\n pass\n", "id": "12141829", "language": "Python", "matching_score": 0.37200093269348145, "max_stars_count": 0, "path": "terrascript/opsgenie/d.py" }, { "content": "# terrascript/ns1/d.py\n\nimport terrascript\n\n\nclass ns1_zone(terrascript.Data):\n pass\n\nclass ns1_datasource(terrascript.Data):\n pass\n\nclass ns1_datafeed(terrascript.Data):\n pass\n", "id": "4379618", "language": "Python", "matching_score": 1.1225167512893677, "max_stars_count": 0, "path": "terrascript/ns1/d.py" }, { "content": "# terrascript/ns1/__init__.py\n\nimport terrascript\n\nclass ns1(terrascript.Provider):\n pass", "id": "4186908", "language": "Python", "matching_score": 0.006186316255480051, "max_stars_count": 0, "path": "terrascript/ns1/__init__.py" }, { "content": "# terrascript/circonus/r.py\n\nimport terrascript\n\n\nclass circonus_check(terrascript.Resource):\n pass\n\nclass circonus_contact_group(terrascript.Resource):\n pass\n\nclass circonus_graph(terrascript.Resource):\n pass\n\nclass circonus_metric(terrascript.Resource):\n pass\n\nclass circonus_metric_cluster(terrascript.Resource):\n pass\n\nclass circonus_rule_set(terrascript.Resource):\n pass\n\nclass circonus_worksheet(terrascript.Resource):\n pass\n", "id": "9359903", "language": "Python", "matching_score": 1.1513124704360962, "max_stars_count": 0, "path": "terrascript/circonus/r.py" }, { "content": "# terrascript/circonus/d.py\n\nimport terrascript\n\n\nclass circonus_account(terrascript.Data):\n pass\n\nclass circonus_collector(terrascript.Data):\n pass\n", "id": "11090374", "language": "Python", "matching_score": 0.2608492374420166, "max_stars_count": 0, "path": "terrascript/circonus/d.py" }, { "content": "# terrascript/logicmonitor/r.py\n\nimport terrascript\n\n\nclass logicmonitor_collector(terrascript.Resource):\n pass\n\nclass logicmonitor_collector_group(terrascript.Resource):\n pass\n\nclass logicmonitor_device(terrascript.Resource):\n pass\n\nclass logicmonitor_device_group(terrascript.Resource):\n pass\n", "id": "10258509", "language": "Python", "matching_score": 1.1225167512893677, "max_stars_count": 0, "path": "terrascript/logicmonitor/r.py" }, { "content": "# terrascript/logicmonitor/__init__.py\n\nimport terrascript\n\nclass logicmonitor(terrascript.Provider):\n pass", "id": "10838556", "language": "Python", "matching_score": 0.09372712671756744, "max_stars_count": 0, "path": "terrascript/logicmonitor/__init__.py" }, { "content": "# terrascript/archive/__init__.py\n\nimport terrascript\n\nclass archive(terrascript.Provider):\n pass", "id": "5256846", "language": "Python", "matching_score": 1.0320611000061035, "max_stars_count": 0, "path": "terrascript/archive/__init__.py" }, { "content": "# terrascript/archive/r.py\n\nimport terrascript\n\n", "id": "2513318", "language": "Python", "matching_score": 1, "max_stars_count": 0, "path": "terrascript/archive/r.py" }, { "content": "# terrascript/random/d.py\n\nimport terrascript\n\n", "id": "10009378", "language": "Python", "matching_score": 1, "max_stars_count": 0, "path": "terrascript/random/d.py" }, { "content": "# terrascript/http/r.py\n\nimport terrascript\n\n", "id": "869471", "language": "Python", "matching_score": 0.111897312104702, "max_stars_count": 0, "path": "terrascript/http/r.py" }, { "content": "# terrascript/oci/r.py\n\nimport terrascript\n\n", "id": "6216943", "language": "Python", "matching_score": 1.12619149684906, "max_stars_count": 0, "path": "terrascript/oci/r.py" }, { "content": "# terrascript/oci/__init__.py\n\nimport terrascript\n\nclass oci(terrascript.Provider):\n pass", "id": "636866", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "terrascript/oci/__init__.py" }, { "content": "from .duplicates import check_duplicates, find_unique\nfrom .describe import describe\nfrom .outlier import Outlier\n\n__all__ = ['check_duplicates','find_unique', 'describe', 'Outlier']", "id": "60065", "language": "Python", "matching_score": 0.7259767651557922, "max_stars_count": 1, "path": "cleanflow/exploratory/__init__.py" }, { "content": "from .to_csv import to_csv\n\n__all__ = ['to_csv']", "id": "4641876", "language": "Python", "matching_score": 0, "max_stars_count": 1, "path": "cleanflow/getOutput/__init__.py" }, { "content": "# terrascript/vsphere/r.py\n\nimport terrascript\n\n\nclass vsphere_compute_cluster(terrascript.Resource):\n pass\n\nclass vsphere_compute_cluster_host_group(terrascript.Resource):\n pass\n\nclass vsphere_compute_cluster_vm_affinity_rule(terrascript.Resource):\n pass\n\nclass vsphere_compute_cluster_vm_anti_affinity_rule(terrascript.Resource):\n pass\n\nclass vsphere_compute_cluster_vm_dependency_rule(terrascript.Resource):\n pass\n\nclass vsphere_compute_cluster_vm_group(terrascript.Resource):\n pass\n\nclass vsphere_compute_cluster_vm_host_rule(terrascript.Resource):\n pass\n\nclass vsphere_custom_attribute(terrascript.Resource):\n pass\n\nclass vsphere_datacenter(terrascript.Resource):\n pass\n\nclass vsphere_datastore_cluster(terrascript.Resource):\n pass\n\nclass vsphere_datastore_cluster_vm_anti_affinity_rule(terrascript.Resource):\n pass\n\nclass vsphere_distributed_port_group(terrascript.Resource):\n pass\n\nclass vsphere_distributed_virtual_switch(terrascript.Resource):\n pass\n\nclass vsphere_drs_vm_override(terrascript.Resource):\n pass\n\nclass vsphere_dpm_host_override(terrascript.Resource):\n pass\n\nclass vsphere_file(terrascript.Resource):\n pass\n\nclass vsphere_folder(terrascript.Resource):\n pass\n\nclass vsphere_ha_vm_override(terrascript.Resource):\n pass\n\nclass vsphere_host_port_group(terrascript.Resource):\n pass\n\nclass vsphere_host_virtual_switch(terrascript.Resource):\n pass\n\nclass vsphere_license(terrascript.Resource):\n pass\n\nclass vsphere_resource_pool(terrascript.Resource):\n pass\n\nclass vsphere_tag(terrascript.Resource):\n pass\n\nclass vsphere_tag_category(terrascript.Resource):\n pass\n\nclass vsphere_virtual_disk(terrascript.Resource):\n pass\n\nclass vsphere_virtual_machine(terrascript.Resource):\n pass\n\nclass vsphere_nas_datastore(terrascript.Resource):\n pass\n\nclass vsphere_storage_drs_vm_override(terrascript.Resource):\n pass\n\nclass vsphere_vapp_container(terrascript.Resource):\n pass\n\nclass vsphere_vapp_entity(terrascript.Resource):\n pass\n\nclass vsphere_vmfs_datastore(terrascript.Resource):\n pass\n\nclass vsphere_virtual_machine_snapshot(terrascript.Resource):\n pass\n", "id": "2967231", "language": "Python", "matching_score": 3.063760757446289, "max_stars_count": 0, "path": "terrascript/vsphere/r.py" }, { "content": "# terrascript/vsphere/d.py\n\nimport terrascript\n\n\nclass vsphere_compute_cluster(terrascript.Data):\n pass\n\nclass vsphere_custom_attribute(terrascript.Data):\n pass\n\nclass vsphere_datacenter(terrascript.Data):\n pass\n\nclass vsphere_datastore(terrascript.Data):\n pass\n\nclass vsphere_datastore_cluster(terrascript.Data):\n pass\n\nclass vsphere_distributed_virtual_switch(terrascript.Data):\n pass\n\nclass vsphere_folder(terrascript.Data):\n pass\n\nclass vsphere_host(terrascript.Data):\n pass\n\nclass vsphere_network(terrascript.Data):\n pass\n\nclass vsphere_resource_pool(terrascript.Data):\n pass\n\nclass vsphere_tag(terrascript.Data):\n pass\n\nclass vsphere_tag_category(terrascript.Data):\n pass\n\nclass vsphere_vapp_container(terrascript.Data):\n pass\n\nclass vsphere_virtual_machine(terrascript.Data):\n pass\n\nclass vsphere_vmfs_disks(terrascript.Data):\n pass\n", "id": "6541827", "language": "Python", "matching_score": 0.632091224193573, "max_stars_count": 0, "path": "terrascript/vsphere/d.py" }, { "content": "# terrascript/vcd/r.py\n\nimport terrascript\n\n\nclass vcd_network(terrascript.Resource):\n pass\n\nclass vcd_network_routed(terrascript.Resource):\n pass\n\nclass vcd_network_direct(terrascript.Resource):\n pass\n\nclass vcd_network_isolated(terrascript.Resource):\n pass\n\nclass vcd_vapp_network(terrascript.Resource):\n pass\n\nclass vcd_vapp(terrascript.Resource):\n pass\n\nclass vcd_firewall_rules(terrascript.Resource):\n pass\n\nclass vcd_dnat(terrascript.Resource):\n pass\n\nclass vcd_snat(terrascript.Resource):\n pass\n\nclass vcd_edgegateway(terrascript.Resource):\n pass\n\nclass vcd_edgegateway_vpn(terrascript.Resource):\n pass\n\nclass vcd_vapp_vm(terrascript.Resource):\n pass\n\nclass vcd_org(terrascript.Resource):\n pass\n\nclass vcd_org_vdc(terrascript.Resource):\n pass\n\nclass vcd_org_user(terrascript.Resource):\n pass\n\nclass vcd_catalog(terrascript.Resource):\n pass\n\nclass vcd_catalog_item(terrascript.Resource):\n pass\n\nclass vcd_catalog_media(terrascript.Resource):\n pass\n\nclass vcd_inserted_media(terrascript.Resource):\n pass\n\nclass vcd_independent_disk(terrascript.Resource):\n pass\n\nclass vcd_external_network(terrascript.Resource):\n pass\n\nclass vcd_lb_service_monitor(terrascript.Resource):\n pass\n\nclass vcd_lb_server_pool(terrascript.Resource):\n pass\n\nclass vcd_lb_app_profile(terrascript.Resource):\n pass\n\nclass vcd_lb_app_rule(terrascript.Resource):\n pass\n\nclass vcd_lb_virtual_server(terrascript.Resource):\n pass\n", "id": "1525503", "language": "Python", "matching_score": 3.1577067375183105, "max_stars_count": 0, "path": "terrascript/vcd/r.py" }, { "content": "# terrascript/vcd/d.py\n\nimport terrascript\n\n\nclass vcd_org(terrascript.Data):\n pass\n\nclass vcd_catalog(terrascript.Data):\n pass\n\nclass vcd_catalog_item(terrascript.Data):\n pass\n\nclass vcd_lb_service_monitor(terrascript.Data):\n pass\n\nclass vcd_lb_server_pool(terrascript.Data):\n pass\n\nclass vcd_lb_app_profile(terrascript.Data):\n pass\n\nclass vcd_lb_app_rule(terrascript.Data):\n pass\n\nclass vcd_lb_virtual_server(terrascript.Data):\n pass\n", "id": "10836030", "language": "Python", "matching_score": 1.109212040901184, "max_stars_count": 0, "path": "terrascript/vcd/d.py" }, { "content": "# terrascript/vcd/__init__.py\n\nimport terrascript\n\nclass vcd(terrascript.Provider):\n pass", "id": "8992444", "language": "Python", "matching_score": 1.5512363910675049, "max_stars_count": 0, "path": "terrascript/vcd/__init__.py" }, { "content": "# terrascript/http/__init__.py\n\nimport terrascript\n\nclass http(terrascript.Provider):\n pass", "id": "5056001", "language": "Python", "matching_score": 0.3885163962841034, "max_stars_count": 0, "path": "terrascript/http/__init__.py" }, { "content": "# terrascript/vsphere/__init__.py\n\nimport terrascript\n\nclass vsphere(terrascript.Provider):\n pass", "id": "1465682", "language": "Python", "matching_score": 1.5512363910675049, "max_stars_count": 0, "path": "terrascript/vsphere/__init__.py" }, { "content": "# terrascript/influxdb/__init__.py\n\nimport terrascript\n\nclass influxdb(terrascript.Provider):\n pass", "id": "7100027", "language": "Python", "matching_score": 0.5862390995025635, "max_stars_count": 0, "path": "terrascript/influxdb/__init__.py" }, { "content": "# terrascript/opsgenie/__init__.py\n\nimport terrascript\n\nclass opsgenie(terrascript.Provider):\n pass", "id": "3714451", "language": "Python", "matching_score": 0.5862390995025635, "max_stars_count": 0, "path": "terrascript/opsgenie/__init__.py" }, { "content": "# terrascript/opc/__init__.py\n\nimport terrascript\n\nclass opc(terrascript.Provider):\n pass", "id": "2907983", "language": "Python", "matching_score": 0.5862390995025635, "max_stars_count": 0, "path": "terrascript/opc/__init__.py" }, { "content": "# terrascript/heroku/__init__.py\n\nimport terrascript\n\nclass heroku(terrascript.Provider):\n pass", "id": "2307076", "language": "Python", "matching_score": 1.5512363910675049, "max_stars_count": 0, "path": "terrascript/heroku/__init__.py" }, { "content": "# terrascript/random/__init__.py\n\nimport terrascript\n\nclass random(terrascript.Provider):\n pass", "id": "3398752", "language": "Python", "matching_score": 0.547234833240509, "max_stars_count": 0, "path": "terrascript/random/__init__.py" }, { "content": "# terrascript/clc/__init__.py\n\nimport terrascript\n\nclass clc(terrascript.Provider):\n pass", "id": "11474696", "language": "Python", "matching_score": 0.5862390995025635, "max_stars_count": 0, "path": "terrascript/clc/__init__.py" }, { "content": "# terrascript/google/__init__.py\n\nimport terrascript\n\nclass google(terrascript.Provider):\n pass", "id": "12818874", "language": "Python", "matching_score": 1.5512363910675049, "max_stars_count": 0, "path": "terrascript/google/__init__.py" }, { "content": "# terrascript/statuscake/__init__.py\n\nimport terrascript\n\nclass statuscake(terrascript.Provider):\n pass", "id": "5789650", "language": "Python", "matching_score": 0.6188668608665466, "max_stars_count": 0, "path": "terrascript/statuscake/__init__.py" }, { "content": "# terrascript/pagerduty/__init__.py\n\nimport terrascript\n\nclass pagerduty(terrascript.Provider):\n pass", "id": "9241467", "language": "Python", "matching_score": 0.5862390995025635, "max_stars_count": 0, "path": "terrascript/pagerduty/__init__.py" }, { "content": "# terrascript/profitbricks/__init__.py\n\nimport terrascript\n\nclass profitbricks(terrascript.Provider):\n pass", "id": "10625282", "language": "Python", "matching_score": 0.5862390995025635, "max_stars_count": 0, "path": "terrascript/profitbricks/__init__.py" }, { "content": "# terrascript/circonus/__init__.py\n\nimport terrascript\n\nclass circonus(terrascript.Provider):\n pass", "id": "6350638", "language": "Python", "matching_score": 1.5512363910675049, "max_stars_count": 0, "path": "terrascript/circonus/__init__.py" }, { "content": "# terrascript/powerdns/__init__.py\n\nimport terrascript\n\nclass powerdns(terrascript.Provider):\n pass", "id": "3140646", "language": "Python", "matching_score": 0.5862390995025635, "max_stars_count": 0, "path": "terrascript/powerdns/__init__.py" }, { "content": "# terrascript/azurerm/__init__.py\n\nimport terrascript\n\nclass azurerm(terrascript.Provider):\n pass", "id": "8209644", "language": "Python", "matching_score": 1.5512363910675049, "max_stars_count": 0, "path": "terrascript/azurerm/__init__.py" }, { "content": "# terrascript/grafana/__init__.py\n\nimport terrascript\n\nclass grafana(terrascript.Provider):\n pass", "id": "11285059", "language": "Python", "matching_score": 0.5862390995025635, "max_stars_count": 0, "path": "terrascript/grafana/__init__.py" }, { "content": "# terrascript/datadog/__init__.py\n\nimport terrascript\n\nclass datadog(terrascript.Provider):\n pass", "id": "7966973", "language": "Python", "matching_score": 1.1056170463562012, "max_stars_count": 0, "path": "terrascript/datadog/__init__.py" }, { "content": "# terrascript/datadog/d.py\n\nimport terrascript\n\n", "id": "10770386", "language": "Python", "matching_score": 0.9770662784576416, "max_stars_count": 0, "path": "terrascript/datadog/d.py" }, { "content": "# terrascript/datadog/r.py\n\nimport terrascript\n\n\nclass datadog_downtime(terrascript.Resource):\n pass\n\nclass datadog_metric_metadata(terrascript.Resource):\n pass\n\nclass datadog_monitor(terrascript.Resource):\n pass\n\nclass datadog_synthetics_test(terrascript.Resource):\n pass\n\nclass datadog_timeboard(terrascript.Resource):\n pass\n\nclass datadog_screenboard(terrascript.Resource):\n pass\n\nclass datadog_dashboard(terrascript.Resource):\n pass\n\nclass datadog_user(terrascript.Resource):\n pass\n\nclass datadog_integration_gcp(terrascript.Resource):\n pass\n\nclass datadog_integration_aws(terrascript.Resource):\n pass\n\nclass datadog_integration_pagerduty(terrascript.Resource):\n pass\n\nclass datadog_integration_pagerduty_service_object(terrascript.Resource):\n pass\n\nclass datadog_service_level_objective(terrascript.Resource):\n pass\n", "id": "11185526", "language": "Python", "matching_score": 0.94333416223526, "max_stars_count": 0, "path": "terrascript/datadog/r.py" }, { "content": "# terrascript/librato/r.py\n\nimport terrascript\n\n\nclass librato_space(terrascript.Resource):\n pass\n\nclass librato_space_chart(terrascript.Resource):\n pass\n\nclass librato_metric(terrascript.Resource):\n pass\n\nclass librato_alert(terrascript.Resource):\n pass\n\nclass librato_service(terrascript.Resource):\n pass\n", "id": "1132935", "language": "Python", "matching_score": 0.5629275441169739, "max_stars_count": 0, "path": "terrascript/librato/r.py" }, { "content": "# terrascript/kubernetes/r.py\n\nimport terrascript\n\n\nclass kubernetes_api_service(terrascript.Resource):\n pass\n\nclass kubernetes_cluster_role(terrascript.Resource):\n pass\n\nclass kubernetes_cluster_role_binding(terrascript.Resource):\n pass\n\nclass kubernetes_config_map(terrascript.Resource):\n pass\n\nclass kubernetes_cron_job(terrascript.Resource):\n pass\n\nclass kubernetes_daemonset(terrascript.Resource):\n pass\n\nclass kubernetes_deployment(terrascript.Resource):\n pass\n\nclass kubernetes_endpoints(terrascript.Resource):\n pass\n\nclass kubernetes_horizontal_pod_autoscaler(terrascript.Resource):\n pass\n\nclass kubernetes_ingress(terrascript.Resource):\n pass\n\nclass kubernetes_job(terrascript.Resource):\n pass\n\nclass kubernetes_limit_range(terrascript.Resource):\n pass\n\nclass kubernetes_namespace(terrascript.Resource):\n pass\n\nclass kubernetes_network_policy(terrascript.Resource):\n pass\n\nclass kubernetes_persistent_volume(terrascript.Resource):\n pass\n\nclass kubernetes_persistent_volume_claim(terrascript.Resource):\n pass\n\nclass kubernetes_pod(terrascript.Resource):\n pass\n\nclass kubernetes_replication_controller(terrascript.Resource):\n pass\n\nclass kubernetes_role_binding(terrascript.Resource):\n pass\n\nclass kubernetes_resource_quota(terrascript.Resource):\n pass\n\nclass kubernetes_role(terrascript.Resource):\n pass\n\nclass kubernetes_secret(terrascript.Resource):\n pass\n\nclass kubernetes_service(terrascript.Resource):\n pass\n\nclass kubernetes_service_account(terrascript.Resource):\n pass\n\nclass kubernetes_stateful_set(terrascript.Resource):\n pass\n\nclass kubernetes_storage_class(terrascript.Resource):\n pass\n", "id": "8864673", "language": "Python", "matching_score": 1.9973710775375366, "max_stars_count": 0, "path": "terrascript/kubernetes/r.py" }, { "content": "# terrascript/docker/r.py\n\nimport terrascript\n\n\nclass docker_container(terrascript.Resource):\n pass\n\nclass docker_image(terrascript.Resource):\n pass\n\nclass docker_network(terrascript.Resource):\n pass\n\nclass docker_volume(terrascript.Resource):\n pass\n\nclass docker_config(terrascript.Resource):\n pass\n\nclass docker_secret(terrascript.Resource):\n pass\n\nclass docker_service(terrascript.Resource):\n pass\n", "id": "12024994", "language": "Python", "matching_score": 1.5116901397705078, "max_stars_count": 0, "path": "terrascript/docker/r.py" }, { "content": "# terrascript/docker/d.py\n\nimport terrascript\n\n\nclass docker_registry_image(terrascript.Data):\n pass\n\nclass docker_network(terrascript.Data):\n pass\n", "id": "11281591", "language": "Python", "matching_score": 1.12619149684906, "max_stars_count": 0, "path": "terrascript/docker/d.py" }, { "content": "# terrascript/docker/__init__.py\n\nimport terrascript\n\nclass docker(terrascript.Provider):\n pass", "id": "3439791", "language": "Python", "matching_score": 0.5862390995025635, "max_stars_count": 0, "path": "terrascript/docker/__init__.py" }, { "content": "# terrascript/ovh/__init__.py\n\nimport terrascript\n\nclass ovh(terrascript.Provider):\n pass", "id": "4560700", "language": "Python", "matching_score": 0.5862390995025635, "max_stars_count": 0, "path": "terrascript/ovh/__init__.py" }, { "content": "# terrascript/kubernetes/__init__.py\n\nimport terrascript\n\nclass kubernetes(terrascript.Provider):\n pass", "id": "261744", "language": "Python", "matching_score": 0.5862390995025635, "max_stars_count": 0, "path": "terrascript/kubernetes/__init__.py" }, { "content": "# terrascript/openstack/__init__.py\n\nimport terrascript\n\nclass openstack(terrascript.Provider):\n pass", "id": "645477", "language": "Python", "matching_score": 0.9761695861816406, "max_stars_count": 0, "path": "terrascript/openstack/__init__.py" }, { "content": "# terrascript/openstack/r.py\n\nimport terrascript\n\n\nclass openstack_blockstorage_quotaset_v2(terrascript.Resource):\n pass\n\nclass openstack_blockstorage_quotaset_v3(terrascript.Resource):\n pass\n\nclass openstack_blockstorage_volume_v1(terrascript.Resource):\n pass\n\nclass openstack_blockstorage_volume_v2(terrascript.Resource):\n pass\n\nclass openstack_blockstorage_volume_v3(terrascript.Resource):\n pass\n\nclass openstack_blockstorage_volume_attach_v2(terrascript.Resource):\n pass\n\nclass openstack_blockstorage_volume_attach_v3(terrascript.Resource):\n pass\n\nclass openstack_compute_flavor_v2(terrascript.Resource):\n pass\n\nclass openstack_compute_flavor_access_v2(terrascript.Resource):\n pass\n\nclass openstack_compute_instance_v2(terrascript.Resource):\n pass\n\nclass openstack_compute_interface_attach_v2(terrascript.Resource):\n pass\n\nclass openstack_compute_keypair_v2(terrascript.Resource):\n pass\n\nclass openstack_compute_secgroup_v2(terrascript.Resource):\n pass\n\nclass openstack_compute_servergroup_v2(terrascript.Resource):\n pass\n\nclass openstack_compute_floatingip_v2(terrascript.Resource):\n pass\n\nclass openstack_compute_floatingip_associate_v2(terrascript.Resource):\n pass\n\nclass openstack_compute_volume_attach_v2(terrascript.Resource):\n pass\n\nclass openstack_containerinfra_clustertemplate_v1(terrascript.Resource):\n pass\n\nclass openstack_containerinfra_cluster_v1(terrascript.Resource):\n pass\n\nclass openstack_db_instance_v1(terrascript.Resource):\n pass\n\nclass openstack_db_user_v1(terrascript.Resource):\n pass\n\nclass openstack_db_configuration_v1(terrascript.Resource):\n pass\n\nclass openstack_db_database_v1(terrascript.Resource):\n pass\n\nclass openstack_dns_recordset_v2(terrascript.Resource):\n pass\n\nclass openstack_dns_zone_v2(terrascript.Resource):\n pass\n\nclass openstack_fw_firewall_v1(terrascript.Resource):\n pass\n\nclass openstack_fw_policy_v1(terrascript.Resource):\n pass\n\nclass openstack_fw_rule_v1(terrascript.Resource):\n pass\n\nclass openstack_identity_endpoint_v3(terrascript.Resource):\n pass\n\nclass openstack_identity_project_v3(terrascript.Resource):\n pass\n\nclass openstack_identity_role_v3(terrascript.Resource):\n pass\n\nclass openstack_identity_role_assignment_v3(terrascript.Resource):\n pass\n\nclass openstack_identity_service_v3(terrascript.Resource):\n pass\n\nclass openstack_identity_user_v3(terrascript.Resource):\n pass\n\nclass openstack_identity_application_credential_v3(terrascript.Resource):\n pass\n\nclass openstack_images_image_v2(terrascript.Resource):\n pass\n\nclass openstack_lb_member_v1(terrascript.Resource):\n pass\n\nclass openstack_lb_monitor_v1(terrascript.Resource):\n pass\n\nclass openstack_lb_pool_v1(terrascript.Resource):\n pass\n\nclass openstack_lb_vip_v1(terrascript.Resource):\n pass\n\nclass openstack_lb_loadbalancer_v2(terrascript.Resource):\n pass\n\nclass openstack_lb_listener_v2(terrascript.Resource):\n pass\n\nclass openstack_lb_pool_v2(terrascript.Resource):\n pass\n\nclass openstack_lb_member_v2(terrascript.Resource):\n pass\n\nclass openstack_lb_monitor_v2(terrascript.Resource):\n pass\n\nclass openstack_lb_l7policy_v2(terrascript.Resource):\n pass\n\nclass openstack_lb_l7rule_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_floatingip_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_floatingip_associate_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_network_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_port_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_rbac_policy_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_port_secgroup_associate_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_qos_bandwidth_limit_rule_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_qos_dscp_marking_rule_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_qos_minimum_bandwidth_rule_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_qos_policy_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_router_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_router_interface_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_router_route_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_secgroup_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_secgroup_rule_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_subnet_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_subnet_route_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_subnetpool_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_addressscope_v2(terrascript.Resource):\n pass\n\nclass openstack_networking_trunk_v2(terrascript.Resource):\n pass\n\nclass openstack_objectstorage_container_v1(terrascript.Resource):\n pass\n\nclass openstack_objectstorage_object_v1(terrascript.Resource):\n pass\n\nclass openstack_objectstorage_tempurl_v1(terrascript.Resource):\n pass\n\nclass openstack_vpnaas_ipsec_policy_v2(terrascript.Resource):\n pass\n\nclass openstack_vpnaas_service_v2(terrascript.Resource):\n pass\n\nclass openstack_vpnaas_ike_policy_v2(terrascript.Resource):\n pass\n\nclass openstack_vpnaas_endpoint_group_v2(terrascript.Resource):\n pass\n\nclass openstack_vpnaas_site_connection_v2(terrascript.Resource):\n pass\n\nclass openstack_sharedfilesystem_securityservice_v2(terrascript.Resource):\n pass\n\nclass openstack_sharedfilesystem_sharenetwork_v2(terrascript.Resource):\n pass\n\nclass openstack_sharedfilesystem_share_v2(terrascript.Resource):\n pass\n\nclass openstack_sharedfilesystem_share_access_v2(terrascript.Resource):\n pass\n\nclass openstack_keymanager_secret_v1(terrascript.Resource):\n pass\n\nclass openstack_keymanager_container_v1(terrascript.Resource):\n pass\n", "id": "10791744", "language": "Python", "matching_score": 3.617455244064331, "max_stars_count": 0, "path": "terrascript/openstack/r.py" }, { "content": "# terrascript/openstack/d.py\n\nimport terrascript\n\n\nclass openstack_blockstorage_availability_zones_v3(terrascript.Data):\n pass\n\nclass openstack_blockstorage_snapshot_v2(terrascript.Data):\n pass\n\nclass openstack_blockstorage_snapshot_v3(terrascript.Data):\n pass\n\nclass openstack_compute_availability_zones_v2(terrascript.Data):\n pass\n\nclass openstack_compute_flavor_v2(terrascript.Data):\n pass\n\nclass openstack_compute_keypair_v2(terrascript.Data):\n pass\n\nclass openstack_containerinfra_clustertemplate_v1(terrascript.Data):\n pass\n\nclass openstack_containerinfra_cluster_v1(terrascript.Data):\n pass\n\nclass openstack_dns_zone_v2(terrascript.Data):\n pass\n\nclass openstack_fw_policy_v1(terrascript.Data):\n pass\n\nclass openstack_identity_role_v3(terrascript.Data):\n pass\n\nclass openstack_identity_project_v3(terrascript.Data):\n pass\n\nclass openstack_identity_user_v3(terrascript.Data):\n pass\n\nclass openstack_identity_auth_scope_v3(terrascript.Data):\n pass\n\nclass openstack_identity_endpoint_v3(terrascript.Data):\n pass\n\nclass openstack_identity_service_v3(terrascript.Data):\n pass\n\nclass openstack_identity_group_v3(terrascript.Data):\n pass\n\nclass openstack_images_image_v2(terrascript.Data):\n pass\n\nclass openstack_networking_addressscope_v2(terrascript.Data):\n pass\n\nclass openstack_networking_network_v2(terrascript.Data):\n pass\n\nclass openstack_networking_qos_bandwidth_limit_rule_v2(terrascript.Data):\n pass\n\nclass openstack_networking_qos_dscp_marking_rule_v2(terrascript.Data):\n pass\n\nclass openstack_networking_qos_minimum_bandwidth_rule_v2(terrascript.Data):\n pass\n\nclass openstack_networking_qos_policy_v2(terrascript.Data):\n pass\n\nclass openstack_networking_subnet_v2(terrascript.Data):\n pass\n\nclass openstack_networking_secgroup_v2(terrascript.Data):\n pass\n\nclass openstack_networking_subnetpool_v2(terrascript.Data):\n pass\n\nclass openstack_networking_floatingip_v2(terrascript.Data):\n pass\n\nclass openstack_networking_router_v2(terrascript.Data):\n pass\n\nclass openstack_networking_port_v2(terrascript.Data):\n pass\n\nclass openstack_networking_port_ids_v2(terrascript.Data):\n pass\n\nclass openstack_networking_trunk_v2(terrascript.Data):\n pass\n\nclass openstack_sharedfilesystem_availability_zones_v2(terrascript.Data):\n pass\n\nclass openstack_sharedfilesystem_sharenetwork_v2(terrascript.Data):\n pass\n\nclass openstack_sharedfilesystem_share_v2(terrascript.Data):\n pass\n\nclass openstack_sharedfilesystem_snapshot_v2(terrascript.Data):\n pass\n\nclass openstack_keymanager_secret_v1(terrascript.Data):\n pass\n", "id": "3851881", "language": "Python", "matching_score": 0.5509139895439148, "max_stars_count": 0, "path": "terrascript/openstack/d.py" }, { "content": "# terrascript/google/r.py\n\nimport terrascript\n\n\nclass google_access_context_manager_access_policy(terrascript.Resource):\n pass\n\nclass google_access_context_manager_access_level(terrascript.Resource):\n pass\n\nclass google_access_context_manager_service_perimeter(terrascript.Resource):\n pass\n\nclass google_app_engine_firewall_rule(terrascript.Resource):\n pass\n\nclass google_bigquery_dataset(terrascript.Resource):\n pass\n\nclass google_bigquery_data_transfer_config(terrascript.Resource):\n pass\n\nclass google_bigtable_app_profile(terrascript.Resource):\n pass\n\nclass google_binary_authorization_attestor(terrascript.Resource):\n pass\n\nclass google_binary_authorization_policy(terrascript.Resource):\n pass\n\nclass google_cloudbuild_trigger(terrascript.Resource):\n pass\n\nclass google_cloud_scheduler_job(terrascript.Resource):\n pass\n\nclass google_compute_address(terrascript.Resource):\n pass\n\nclass google_compute_autoscaler(terrascript.Resource):\n pass\n\nclass google_compute_backend_bucket(terrascript.Resource):\n pass\n\nclass google_compute_backend_bucket_signed_url_key(terrascript.Resource):\n pass\n\nclass google_compute_backend_service(terrascript.Resource):\n pass\n\nclass google_compute_region_backend_service(terrascript.Resource):\n pass\n\nclass google_compute_backend_service_signed_url_key(terrascript.Resource):\n pass\n\nclass google_compute_disk(terrascript.Resource):\n pass\n\nclass google_compute_firewall(terrascript.Resource):\n pass\n\nclass google_compute_forwarding_rule(terrascript.Resource):\n pass\n\nclass google_compute_global_address(terrascript.Resource):\n pass\n\nclass google_compute_global_forwarding_rule(terrascript.Resource):\n pass\n\nclass google_compute_http_health_check(terrascript.Resource):\n pass\n\nclass google_compute_https_health_check(terrascript.Resource):\n pass\n\nclass google_compute_health_check(terrascript.Resource):\n pass\n\nclass google_compute_image(terrascript.Resource):\n pass\n\nclass google_compute_interconnect_attachment(terrascript.Resource):\n pass\n\nclass google_compute_network(terrascript.Resource):\n pass\n\nclass google_compute_network_endpoint(terrascript.Resource):\n pass\n\nclass google_compute_network_endpoint_group(terrascript.Resource):\n pass\n\nclass google_compute_node_group(terrascript.Resource):\n pass\n\nclass google_compute_node_template(terrascript.Resource):\n pass\n\nclass google_compute_region_autoscaler(terrascript.Resource):\n pass\n\nclass google_compute_region_disk(terrascript.Resource):\n pass\n\nclass google_compute_route(terrascript.Resource):\n pass\n\nclass google_compute_router(terrascript.Resource):\n pass\n\nclass google_compute_snapshot(terrascript.Resource):\n pass\n\nclass google_compute_ssl_certificate(terrascript.Resource):\n pass\n\nclass google_compute_ssl_policy(terrascript.Resource):\n pass\n\nclass google_compute_subnetwork(terrascript.Resource):\n pass\n\nclass google_compute_target_http_proxy(terrascript.Resource):\n pass\n\nclass google_compute_target_https_proxy(terrascript.Resource):\n pass\n\nclass google_compute_target_instance(terrascript.Resource):\n pass\n\nclass google_compute_target_ssl_proxy(terrascript.Resource):\n pass\n\nclass google_compute_target_tcp_proxy(terrascript.Resource):\n pass\n\nclass google_compute_vpn_gateway(terrascript.Resource):\n pass\n\nclass google_compute_url_map(terrascript.Resource):\n pass\n\nclass google_compute_vpn_tunnel(terrascript.Resource):\n pass\n\nclass google_dns_managed_zone(terrascript.Resource):\n pass\n\nclass google_filestore_instance(terrascript.Resource):\n pass\n\nclass google_firestore_index(terrascript.Resource):\n pass\n\nclass google_kms_key_ring(terrascript.Resource):\n pass\n\nclass google_kms_crypto_key(terrascript.Resource):\n pass\n\nclass google_logging_metric(terrascript.Resource):\n pass\n\nclass google_ml_engine_model(terrascript.Resource):\n pass\n\nclass google_monitoring_alert_policy(terrascript.Resource):\n pass\n\nclass google_monitoring_group(terrascript.Resource):\n pass\n\nclass google_monitoring_notification_channel(terrascript.Resource):\n pass\n\nclass google_monitoring_uptime_check_config(terrascript.Resource):\n pass\n\nclass google_pubsub_topic(terrascript.Resource):\n pass\n\nclass google_pubsub_subscription(terrascript.Resource):\n pass\n\nclass google_redis_instance(terrascript.Resource):\n pass\n\nclass google_resource_manager_lien(terrascript.Resource):\n pass\n\nclass google_scc_source(terrascript.Resource):\n pass\n\nclass google_sourcerepo_repository(terrascript.Resource):\n pass\n\nclass google_spanner_instance(terrascript.Resource):\n pass\n\nclass google_spanner_database(terrascript.Resource):\n pass\n\nclass google_sql_database(terrascript.Resource):\n pass\n\nclass google_storage_object_access_control(terrascript.Resource):\n pass\n\nclass google_storage_default_object_access_control(terrascript.Resource):\n pass\n\nclass google_tpu_node(terrascript.Resource):\n pass\n\nclass google_app_engine_application(terrascript.Resource):\n pass\n\nclass google_bigquery_table(terrascript.Resource):\n pass\n\nclass google_bigtable_instance(terrascript.Resource):\n pass\n\nclass google_bigtable_table(terrascript.Resource):\n pass\n\nclass google_cloudfunctions_function(terrascript.Resource):\n pass\n\nclass google_cloudiot_registry(terrascript.Resource):\n pass\n\nclass google_composer_environment(terrascript.Resource):\n pass\n\nclass google_compute_attached_disk(terrascript.Resource):\n pass\n\nclass google_compute_instance(terrascript.Resource):\n pass\n\nclass google_compute_instance_from_template(terrascript.Resource):\n pass\n\nclass google_compute_instance_group(terrascript.Resource):\n pass\n\nclass google_compute_instance_group_manager(terrascript.Resource):\n pass\n\nclass google_compute_instance_template(terrascript.Resource):\n pass\n\nclass google_compute_network_peering(terrascript.Resource):\n pass\n\nclass google_compute_project_default_network_tier(terrascript.Resource):\n pass\n\nclass google_compute_project_metadata(terrascript.Resource):\n pass\n\nclass google_compute_project_metadata_item(terrascript.Resource):\n pass\n\nclass google_compute_region_instance_group_manager(terrascript.Resource):\n pass\n\nclass google_compute_router_interface(terrascript.Resource):\n pass\n\nclass google_compute_router_nat(terrascript.Resource):\n pass\n\nclass google_compute_router_peer(terrascript.Resource):\n pass\n\nclass google_compute_security_policy(terrascript.Resource):\n pass\n\nclass google_compute_shared_vpc_host_project(terrascript.Resource):\n pass\n\nclass google_compute_shared_vpc_service_project(terrascript.Resource):\n pass\n\nclass google_compute_target_pool(terrascript.Resource):\n pass\n\nclass google_container_cluster(terrascript.Resource):\n pass\n\nclass google_container_node_pool(terrascript.Resource):\n pass\n\nclass google_dataflow_job(terrascript.Resource):\n pass\n\nclass google_dataproc_cluster(terrascript.Resource):\n pass\n\nclass google_dataproc_job(terrascript.Resource):\n pass\n\nclass google_dns_record_set(terrascript.Resource):\n pass\n\nclass google_endpoints_service(terrascript.Resource):\n pass\n\nclass google_folder(terrascript.Resource):\n pass\n\nclass google_folder_organization_policy(terrascript.Resource):\n pass\n\nclass google_logging_billing_account_sink(terrascript.Resource):\n pass\n\nclass google_logging_organization_sink(terrascript.Resource):\n pass\n\nclass google_logging_folder_sink(terrascript.Resource):\n pass\n\nclass google_logging_project_sink(terrascript.Resource):\n pass\n\nclass google_service_networking_connection(terrascript.Resource):\n pass\n\nclass google_sql_database_instance(terrascript.Resource):\n pass\n\nclass google_sql_ssl_cert(terrascript.Resource):\n pass\n\nclass google_sql_user(terrascript.Resource):\n pass\n\nclass google_organization_iam_custom_role(terrascript.Resource):\n pass\n\nclass google_organization_policy(terrascript.Resource):\n pass\n\nclass google_project(terrascript.Resource):\n pass\n\nclass google_project_iam_policy(terrascript.Resource):\n pass\n\nclass google_project_service(terrascript.Resource):\n pass\n\nclass google_project_iam_custom_role(terrascript.Resource):\n pass\n\nclass google_project_organization_policy(terrascript.Resource):\n pass\n\nclass google_project_usage_export_bucket(terrascript.Resource):\n pass\n\nclass google_project_services(terrascript.Resource):\n pass\n\nclass google_runtimeconfig_config(terrascript.Resource):\n pass\n\nclass google_runtimeconfig_variable(terrascript.Resource):\n pass\n\nclass google_service_account(terrascript.Resource):\n pass\n\nclass google_service_account_key(terrascript.Resource):\n pass\n\nclass google_storage_bucket(terrascript.Resource):\n pass\n\nclass google_storage_bucket_acl(terrascript.Resource):\n pass\n\nclass google_storage_bucket_object(terrascript.Resource):\n pass\n\nclass google_storage_object_acl(terrascript.Resource):\n pass\n\nclass google_storage_default_object_acl(terrascript.Resource):\n pass\n\nclass google_storage_notification(terrascript.Resource):\n pass\n\nclass google_storage_transfer_job(terrascript.Resource):\n pass\n", "id": "10879681", "language": "Python", "matching_score": 3.2255821228027344, "max_stars_count": 0, "path": "terrascript/google/r.py" }, { "content": "# terrascript/google/d.py\n\nimport terrascript\n\n\nclass google_active_folder(terrascript.Data):\n pass\n\nclass google_billing_account(terrascript.Data):\n pass\n\nclass google_dns_managed_zone(terrascript.Data):\n pass\n\nclass google_client_config(terrascript.Data):\n pass\n\nclass google_client_openid_userinfo(terrascript.Data):\n pass\n\nclass google_cloudfunctions_function(terrascript.Data):\n pass\n\nclass google_composer_image_versions(terrascript.Data):\n pass\n\nclass google_compute_address(terrascript.Data):\n pass\n\nclass google_compute_backend_service(terrascript.Data):\n pass\n\nclass google_compute_default_service_account(terrascript.Data):\n pass\n\nclass google_compute_forwarding_rule(terrascript.Data):\n pass\n\nclass google_compute_image(terrascript.Data):\n pass\n\nclass google_compute_instance(terrascript.Data):\n pass\n\nclass google_compute_global_address(terrascript.Data):\n pass\n\nclass google_compute_instance_group(terrascript.Data):\n pass\n\nclass google_compute_lb_ip_ranges(terrascript.Data):\n pass\n\nclass google_compute_network(terrascript.Data):\n pass\n\nclass google_compute_network_endpoint_group(terrascript.Data):\n pass\n\nclass google_compute_node_types(terrascript.Data):\n pass\n\nclass google_compute_regions(terrascript.Data):\n pass\n\nclass google_compute_region_instance_group(terrascript.Data):\n pass\n\nclass google_compute_subnetwork(terrascript.Data):\n pass\n\nclass google_compute_zones(terrascript.Data):\n pass\n\nclass google_compute_vpn_gateway(terrascript.Data):\n pass\n\nclass google_compute_ssl_policy(terrascript.Data):\n pass\n\nclass google_compute_ssl_certificate(terrascript.Data):\n pass\n\nclass google_container_cluster(terrascript.Data):\n pass\n\nclass google_container_engine_versions(terrascript.Data):\n pass\n\nclass google_container_registry_repository(terrascript.Data):\n pass\n\nclass google_container_registry_image(terrascript.Data):\n pass\n\nclass google_iam_policy(terrascript.Data):\n pass\n\nclass google_iam_role(terrascript.Data):\n pass\n\nclass google_kms_secret(terrascript.Data):\n pass\n\nclass google_kms_secret_ciphertext(terrascript.Data):\n pass\n\nclass google_kms_key_ring(terrascript.Data):\n pass\n\nclass google_kms_crypto_key(terrascript.Data):\n pass\n\nclass google_kms_crypto_key_version(terrascript.Data):\n pass\n\nclass google_folder(terrascript.Data):\n pass\n\nclass google_folder_organization_policy(terrascript.Data):\n pass\n\nclass google_netblock_ip_ranges(terrascript.Data):\n pass\n\nclass google_organization(terrascript.Data):\n pass\n\nclass google_project(terrascript.Data):\n pass\n\nclass google_projects(terrascript.Data):\n pass\n\nclass google_project_organization_policy(terrascript.Data):\n pass\n\nclass google_project_services(terrascript.Data):\n pass\n\nclass google_service_account(terrascript.Data):\n pass\n\nclass google_service_account_access_token(terrascript.Data):\n pass\n\nclass google_service_account_key(terrascript.Data):\n pass\n\nclass google_storage_bucket_object(terrascript.Data):\n pass\n\nclass google_storage_object_signed_url(terrascript.Data):\n pass\n\nclass google_storage_project_service_account(terrascript.Data):\n pass\n\nclass google_storage_transfer_project_service_account(terrascript.Data):\n pass\n\nclass google_tpu_tensorflow_versions(terrascript.Data):\n pass\n", "id": "6127779", "language": "Python", "matching_score": 1.2263989448547363, "max_stars_count": 0, "path": "terrascript/google/d.py" }, { "content": "# https://www.terraform.io/docs/configuration/locals.html\n\nimport terrascript\nimport terrascript.aws\nimport terrascript.aws.r\n\nfrom shared import assert_equals_json\n\ndef test():\n \"\"\"Locals (007)\"\"\"\n\n config = terrascript.Terrascript()\n\n config += terrascript.aws.aws(version='~> 2.0', region='us-east-1')\n\n blue = terrascript.aws.r.aws_instance('blue', ami = \"AMI\", instance_type=\"t2.micro\")\n config += blue\n\n green = terrascript.aws.r.aws_instance('green', ami = \"AMI\", instance_type=\"t2.micro\")\n config += green\n\n locals1 = terrascript.Locals(service_name='forum', owner='Community Team')\n config += locals1\n\n config += terrascript.Locals(instance_ids='concat(aws_instance.blue.*.id, aws_instance.green.*.id)')\n\n config += terrascript.Locals(Service=locals1.service_name, Owner=locals1.owner)", "id": "10180526", "language": "Python", "matching_score": 3.301820993423462, "max_stars_count": 0, "path": "tests/test_007.py" }, { "content": "# https://www.terraform.io/docs/configuration/locals.html\n\nimport terrascript\nimport terrascript.aws\nimport terrascript.aws.d\n\nfrom shared import assert_equals_json\n\ndef test():\n \"\"\"Data (008)\"\"\"\n\n config = terrascript.Terrascript()\n\n config += terrascript.aws.aws(version='~> 2.0', region='us-east-1')\n\n config += terrascript.aws.d.aws_ami('example', most_recent=True, owners=['self'],\n tags=dict(Name=\"app-server\", Tested=\"true\"))\n\n assert_equals_json(config, 'test_008.tf.json')", "id": "12763181", "language": "Python", "matching_score": 1.9559463262557983, "max_stars_count": 0, "path": "tests/test_008.py" }, { "content": "# https://www.terraform.io/docs/provisioners/file.html\n\nimport terrascript\nimport terrascript.aws\nimport terrascript.aws.r\n\nfrom shared import assert_equals_json\n\ndef test():\n \"\"\"Provisioner (005)\"\"\"\n\n config = terrascript.Terrascript()\n \n config += terrascript.aws.aws(version='~> 2.0', region='us-east-1')\n \n # Copies the myapp.conf file to /etc/myapp.conf\n provisioner1 = terrascript.Provisioner('file', source='conf/myapp.conf', \n destination = '/etc/myapp.conf')\n \n # Copies the string in content into /tmp/file.log\n provisioner2 = terrascript.Provisioner('file', source='ami used: ${self.ami}', \n destination = '/tmp/file.log')\n \n # Copies the configs.d folder to /etc/configs.d\n provisioner3 = terrascript.Provisioner('file', source='conf/configs.d', \n destination = '/etc')\n \n # Copies all files and folders in apps/app1 to D:/IIS/webapp1\n provisioner4 = terrascript.Provisioner('file', source='apps/app1/', \n destination = 'D:/IIS/webapp1')\n \n \n config += terrascript.aws.r.aws_instance('web',\n #ami = \"${data.aws_ami.ubuntu.id}\",\n ami = \"AMI\",\n instance_type=\"t2.micro\",\n provisioner=[provisioner1, provisioner2, provisioner3, provisioner4])\n \n assert_equals_json(config, 'test_005.tf.json')\n", "id": "5724080", "language": "Python", "matching_score": 3.425295114517212, "max_stars_count": 0, "path": "tests/test_005.py" }, { "content": "# https://www.terraform.io/docs/configuration/outputs.html\n\nimport terrascript\nimport terrascript.aws\nimport terrascript.aws.r\n\nfrom shared import assert_equals_json\n\ndef test():\n \"\"\"Output (006)\"\"\"\n\n config = terrascript.Terrascript()\n\n config += terrascript.aws.aws(version='~> 2.0', region='us-east-1')\n\n aws_instance = terrascript.aws.r.aws_instance('web', ami = \"AMI\", instance_type=\"t2.micro\")\n config += aws_instance\n\n config += terrascript.Output('instance_ip_addr', value=aws_instance.server.private_ip)\n\n assert_equals_json(config, 'test_006.tf.json')", "id": "1110293", "language": "Python", "matching_score": 2.046483278274536, "max_stars_count": 0, "path": "tests/test_006.py" }, { "content": "\"\"\"\nTest for Github issue 22: Allow multiple providers of same type\n\nhttps://github.com/mjuenema/python-terrascript/issues/22\n\n\"\"\"\n\nimport terrascript\nfrom terrascript.aws.r import aws_instance\n\ndef test_issue22():\n \"\"\"Issue 22: Allow multiple providers of same type.\n\n See also pull request #27 Feature/multiple providers\n\n {\n \"provider\": {\n \"aws\": {\n \"__DEFAULT__\": {\n \"region\": \"us-east-1\"\n },\n \"useast2\": {\n \"alias\": \"useast2\",\n \"region\": \"us-east-2\"\n }\n }\n },\n \"resource\": {\n \"aws_instance\": {\n \"I1\": {\n \"ami\": \"ami-4bf3d731\",\n \"instance_type\": \"t2.large\"\n },\n \"I2\": {\n \"ami\": \"ami-e1496384\",\n \"instance_type\": \"t2.large\",\n \"provider\": \"aws.useast2\"\n }\n }\n }\n }\n\n \"\"\"\n ts = terrascript.Terrascript()\n ts += provider('aws', region='us-east-1')\n ts += provider('aws', region='us-east-2', alias='useast2')\n ts += aws_instance('I1', ami='ami-4bf3d731', instance_type='t2.large')\n ts += aws_instance('I2', ami='ami-e1496384', instance_type='t2.large', provider='aws.useast2')\n assert ts.validate() is True\n", "id": "4353286", "language": "Python", "matching_score": 1.3477022647857666, "max_stars_count": 0, "path": "tests/REVIEW/REWRITE.test_issue22.py" }, { "content": "from nose.tools import *\nfrom shared import *\nimport json\n\nimport terrascript\nimport terrascript.aws.r\n\nclass Test_Variable(object):\n\n def test_variable_example1(self):\n # https://www.terraform.io/docs/configuration/variables.html\n\n variable = terrascript.Variable('image_id')\n\n assert_equals_json(variable, 'Variable_variable_example1.json')\n\n\n def test_variable_example2(self):\n # https://www.terraform.io/docs/configuration/variables.html\n\n variable = terrascript.Variable('availability_zone_names',\n type=\"list(string)\",\n default=[\"us-west-1a\"]\n )\n\n assert_equals_json(variable, 'Variable_variable_example2.json')\n\n\n def test_variable_in_resource(self):\n # https://www.terraform.io/docs/configuration/variables.html\n\n ami = terrascript.Variable('image_id')\n\n aws_instance = terrascript.aws.r.aws_instance(\"example\",\n instance_type=\"t2.micro\",\n ami=ami)\n\n assert_equals_json(aws_instance, 'Variable_variable_in_resource.json')", "id": "6753892", "language": "Python", "matching_score": 3.697584390640259, "max_stars_count": 0, "path": "tests/REVIEW/test_Variable.py" }, { "content": "# https://www.terraform.io/docs/configuration/variables.html\n\nimport terrascript\n\nfrom shared import assert_equals_json\n\ndef test():\n \"\"\"Variable (004)\"\"\"\n\n config = terrascript.Terrascript()\n\n config += terrascript.Variable('image_id', type='string')\n config += terrascript.Variable('availability_zone_names', type='list(string)', default=[\"us-west-1a\"])\n\n assert_equals_json(config, 'test_004.tf.json')", "id": "5045826", "language": "Python", "matching_score": 1.1438084840774536, "max_stars_count": 0, "path": "tests/test_004.py" }, { "content": "# https://github.com/mjuenema/python-terrascript/issues/63\n# https://www.terraform.io/docs/providers/oci/index.html\n\nimport terrascript\nimport terrascript.oci\n\nfrom shared import assert_equals_json\n\ndef test():\n \"\"\"Issue #63 - Add support for OCI provider\n\n Example based on https://www.terraform.io/docs/providers/oci/index.html\n\n # Configure the Oracle Cloud Infrastructure provider with an API Key\n provider \"oci\" {\n tenancy_ocid = \"${var.tenancy_ocid}\"\n user_ocid = \"${var.user_ocid}\"\n fingerprint = \"${var.fingerprint}\"\n private_key_path = \"${var.private_key_path}\"\n region = \"${var.region}\"\n }\n\n # Get a list of Availability Domains\n data \"oci_identity_availability_domains\" \"ads\" {\n compartment_id = \"${var.tenancy_ocid}\"\n }\n\n # Output the result\n output \"show-ads\" {\n value = \"${data.oci_identity_availability_domains.ads.availability_domains}\"\n }\n\n \"\"\"\n\n config = terrascript.Terrascript()\n\n tenancy_ocid = terrascript.Variable('tenancy_ocid')\n config += tenancy_ocid\n\n user_ocid = terrascript.Variable('user_ocid')\n config += user_ocid\n\n fingerprint = terrascript.Variable('fingerprint')\n config += fingerprint\n\n private_key_path = terrascript.Variable('private_key_path')\n config += private_key_path\n\n region = terrascript.Variable('region')\n config += region\n\n config += terrascript.oci.oci(tenancy_ocid=tenancy_ocid,\n user_ocid=user_ocid,\n fingerprint=fingerprint,\n private_key_path=private_key_path,\n region=region)\n\n assert_equals_json(config, 'test_issue63.tf.json')\n", "id": "10813916", "language": "Python", "matching_score": 2.3131933212280273, "max_stars_count": 0, "path": "tests/test_issue63.py" }, { "content": "import terrascript\nimport terrascript.aws\nimport terrascript.aws.r\n\nfrom shared import assert_equals_json\n\ndef test():\n \"\"\"Module (003)\"\"\"\n\n config = terrascript.Terrascript()\n\n config += terrascript.aws.aws(access_key='ACCESS_KEY_HERE',\n secret_key='SECRET_KEY_HERE',\n region='us-east-1')\n\n config += terrascript.Module('vpc',\n source=\"terraform-aws-modules/vpc/aws\",\n version=\"2.9.0\")\n\n assert_equals_json(config, 'test_003.tf.json')\n", "id": "3890738", "language": "Python", "matching_score": 1.828281283378601, "max_stars_count": 0, "path": "tests/test_003.py" }, { "content": "# https://www.terraform.io/docs/providers/aws/guides/custom-service-endpoints.html\n\nimport terrascript\nimport terrascript.aws\nimport terrascript.aws.r\n\nfrom shared import assert_equals_json\n\ndef test():\n \"\"\"Provider Endpoints (002)\n\n Configure the AWS Provider\n\n provider \"aws\" {\n version = \"~> 2.0\"\n region = \"us-east-1\"\n endpoints {\n dynamodb = \"http://localhost:4569\"\n s3 = \"http://localhost:4572\"\n }\n }\n\n Create a VPC\n\n resource \"aws_vpc\" \"example\" {\n cidr_block = \"10.0.0.0/16\"\n }\n\n \"\"\"\n\n config = terrascript.Terrascript()\n\n config += terrascript.aws.aws(version='~> 2.0',\n region='us-east-1',\n endpoints=dict(dynamodb='http://localhost:4569',\n s3='http://localhost:4572')\n )\n\n config += terrascript.aws.r.aws_vpc('example', cidr_block='10.0.0.0/16')\n\n assert_equals_json(config, 'test_002.tf.json')", "id": "1973839", "language": "Python", "matching_score": 2.810229539871216, "max_stars_count": 0, "path": "tests/test_002.py" }, { "content": "# tests/test_001.py\n# https://www.terraform.io/docs/providers/aws/index.html\n\nimport terrascript\nimport terrascript.aws\nimport terrascript.aws.r\n\nfrom shared import assert_deep_equal\n\ndef test():\n \"\"\"Resource (001)\"\"\"\n\n config = terrascript.Terrascript()\n\n config += terrascript.aws.aws(version='~> 2.0', region='us-east-1')\n config += terrascript.aws.r.aws_vpc('example', cidr_block='10.0.0.0/16')\n\n assert_deep_equal(config, 'test_001.tf.json')\n", "id": "7563132", "language": "Python", "matching_score": 1.407702088356018, "max_stars_count": 0, "path": "tests/test_001.py" }, { "content": "# terrascript/aws/r.py\n\nimport terrascript\n\n\nclass aws_acm_certificate(terrascript.Resource):\n pass\n\nclass aws_acm_certificate_validation(terrascript.Resource):\n pass\n\nclass aws_acmpca_certificate_authority(terrascript.Resource):\n pass\n\nclass aws_ami(terrascript.Resource):\n pass\n\nclass aws_ami_copy(terrascript.Resource):\n pass\n\nclass aws_ami_from_instance(terrascript.Resource):\n pass\n\nclass aws_ami_launch_permission(terrascript.Resource):\n pass\n\nclass aws_api_gateway_account(terrascript.Resource):\n pass\n\nclass aws_api_gateway_api_key(terrascript.Resource):\n pass\n\nclass aws_api_gateway_authorizer(terrascript.Resource):\n pass\n\nclass aws_api_gateway_base_path_mapping(terrascript.Resource):\n pass\n\nclass aws_api_gateway_client_certificate(terrascript.Resource):\n pass\n\nclass aws_api_gateway_deployment(terrascript.Resource):\n pass\n\nclass aws_api_gateway_documentation_part(terrascript.Resource):\n pass\n\nclass aws_api_gateway_documentation_version(terrascript.Resource):\n pass\n\nclass aws_api_gateway_domain_name(terrascript.Resource):\n pass\n\nclass aws_api_gateway_gateway_response(terrascript.Resource):\n pass\n\nclass aws_api_gateway_integration(terrascript.Resource):\n pass\n\nclass aws_api_gateway_integration_response(terrascript.Resource):\n pass\n\nclass aws_api_gateway_method(terrascript.Resource):\n pass\n\nclass aws_api_gateway_method_response(terrascript.Resource):\n pass\n\nclass aws_api_gateway_method_settings(terrascript.Resource):\n pass\n\nclass aws_api_gateway_model(terrascript.Resource):\n pass\n\nclass aws_api_gateway_request_validator(terrascript.Resource):\n pass\n\nclass aws_api_gateway_resource(terrascript.Resource):\n pass\n\nclass aws_api_gateway_rest_api(terrascript.Resource):\n pass\n\nclass aws_api_gateway_stage(terrascript.Resource):\n pass\n\nclass aws_api_gateway_usage_plan(terrascript.Resource):\n pass\n\nclass aws_api_gateway_usage_plan_key(terrascript.Resource):\n pass\n\nclass aws_api_gateway_vpc_link(terrascript.Resource):\n pass\n\nclass aws_app_cookie_stickiness_policy(terrascript.Resource):\n pass\n\nclass aws_appautoscaling_target(terrascript.Resource):\n pass\n\nclass aws_appautoscaling_policy(terrascript.Resource):\n pass\n\nclass aws_appautoscaling_scheduled_action(terrascript.Resource):\n pass\n\nclass aws_appmesh_mesh(terrascript.Resource):\n pass\n\nclass aws_appmesh_route(terrascript.Resource):\n pass\n\nclass aws_appmesh_virtual_node(terrascript.Resource):\n pass\n\nclass aws_appmesh_virtual_router(terrascript.Resource):\n pass\n\nclass aws_appmesh_virtual_service(terrascript.Resource):\n pass\n\nclass aws_appsync_api_key(terrascript.Resource):\n pass\n\nclass aws_appsync_datasource(terrascript.Resource):\n pass\n\nclass aws_appsync_function(terrascript.Resource):\n pass\n\nclass aws_appsync_graphql_api(terrascript.Resource):\n pass\n\nclass aws_appsync_resolver(terrascript.Resource):\n pass\n\nclass aws_athena_database(terrascript.Resource):\n pass\n\nclass aws_athena_named_query(terrascript.Resource):\n pass\n\nclass aws_athena_workgroup(terrascript.Resource):\n pass\n\nclass aws_autoscaling_attachment(terrascript.Resource):\n pass\n\nclass aws_autoscaling_group(terrascript.Resource):\n pass\n\nclass aws_autoscaling_lifecycle_hook(terrascript.Resource):\n pass\n\nclass aws_autoscaling_notification(terrascript.Resource):\n pass\n\nclass aws_autoscaling_policy(terrascript.Resource):\n pass\n\nclass aws_autoscaling_schedule(terrascript.Resource):\n pass\n\nclass aws_backup_plan(terrascript.Resource):\n pass\n\nclass aws_backup_selection(terrascript.Resource):\n pass\n\nclass aws_backup_vault(terrascript.Resource):\n pass\n\nclass aws_budgets_budget(terrascript.Resource):\n pass\n\nclass aws_cloud9_environment_ec2(terrascript.Resource):\n pass\n\nclass aws_cloudformation_stack(terrascript.Resource):\n pass\n\nclass aws_cloudformation_stack_set(terrascript.Resource):\n pass\n\nclass aws_cloudformation_stack_set_instance(terrascript.Resource):\n pass\n\nclass aws_cloudfront_distribution(terrascript.Resource):\n pass\n\nclass aws_cloudfront_origin_access_identity(terrascript.Resource):\n pass\n\nclass aws_cloudfront_public_key(terrascript.Resource):\n pass\n\nclass aws_cloudtrail(terrascript.Resource):\n pass\n\nclass aws_cloudwatch_event_permission(terrascript.Resource):\n pass\n\nclass aws_cloudwatch_event_rule(terrascript.Resource):\n pass\n\nclass aws_cloudwatch_event_target(terrascript.Resource):\n pass\n\nclass aws_cloudwatch_log_destination(terrascript.Resource):\n pass\n\nclass aws_cloudwatch_log_destination_policy(terrascript.Resource):\n pass\n\nclass aws_cloudwatch_log_group(terrascript.Resource):\n pass\n\nclass aws_cloudwatch_log_metric_filter(terrascript.Resource):\n pass\n\nclass aws_cloudwatch_log_resource_policy(terrascript.Resource):\n pass\n\nclass aws_cloudwatch_log_stream(terrascript.Resource):\n pass\n\nclass aws_cloudwatch_log_subscription_filter(terrascript.Resource):\n pass\n\nclass aws_config_aggregate_authorization(terrascript.Resource):\n pass\n\nclass aws_config_config_rule(terrascript.Resource):\n pass\n\nclass aws_config_configuration_aggregator(terrascript.Resource):\n pass\n\nclass aws_config_configuration_recorder(terrascript.Resource):\n pass\n\nclass aws_config_configuration_recorder_status(terrascript.Resource):\n pass\n\nclass aws_config_delivery_channel(terrascript.Resource):\n pass\n\nclass aws_config_organization_custom_rule(terrascript.Resource):\n pass\n\nclass aws_config_organization_managed_rule(terrascript.Resource):\n pass\n\nclass aws_cognito_identity_pool(terrascript.Resource):\n pass\n\nclass aws_cognito_identity_pool_roles_attachment(terrascript.Resource):\n pass\n\nclass aws_cognito_identity_provider(terrascript.Resource):\n pass\n\nclass aws_cognito_user_group(terrascript.Resource):\n pass\n\nclass aws_cognito_user_pool(terrascript.Resource):\n pass\n\nclass aws_cognito_user_pool_client(terrascript.Resource):\n pass\n\nclass aws_cognito_user_pool_domain(terrascript.Resource):\n pass\n\nclass aws_cloudhsm_v2_cluster(terrascript.Resource):\n pass\n\nclass aws_cloudhsm_v2_hsm(terrascript.Resource):\n pass\n\nclass aws_cognito_resource_server(terrascript.Resource):\n pass\n\nclass aws_cloudwatch_metric_alarm(terrascript.Resource):\n pass\n\nclass aws_cloudwatch_dashboard(terrascript.Resource):\n pass\n\nclass aws_codedeploy_app(terrascript.Resource):\n pass\n\nclass aws_codedeploy_deployment_config(terrascript.Resource):\n pass\n\nclass aws_codedeploy_deployment_group(terrascript.Resource):\n pass\n\nclass aws_codecommit_repository(terrascript.Resource):\n pass\n\nclass aws_codecommit_trigger(terrascript.Resource):\n pass\n\nclass aws_codebuild_project(terrascript.Resource):\n pass\n\nclass aws_codebuild_source_credential(terrascript.Resource):\n pass\n\nclass aws_codebuild_webhook(terrascript.Resource):\n pass\n\nclass aws_codepipeline(terrascript.Resource):\n pass\n\nclass aws_codepipeline_webhook(terrascript.Resource):\n pass\n\nclass aws_cur_report_definition(terrascript.Resource):\n pass\n\nclass aws_customer_gateway(terrascript.Resource):\n pass\n\nclass aws_datapipeline_pipeline(terrascript.Resource):\n pass\n\nclass aws_datasync_agent(terrascript.Resource):\n pass\n\nclass aws_datasync_location_efs(terrascript.Resource):\n pass\n\nclass aws_datasync_location_nfs(terrascript.Resource):\n pass\n\nclass aws_datasync_location_s3(terrascript.Resource):\n pass\n\nclass aws_datasync_task(terrascript.Resource):\n pass\n\nclass aws_dax_cluster(terrascript.Resource):\n pass\n\nclass aws_dax_parameter_group(terrascript.Resource):\n pass\n\nclass aws_dax_subnet_group(terrascript.Resource):\n pass\n\nclass aws_db_cluster_snapshot(terrascript.Resource):\n pass\n\nclass aws_db_event_subscription(terrascript.Resource):\n pass\n\nclass aws_db_instance(terrascript.Resource):\n pass\n\nclass aws_db_instance_role_association(terrascript.Resource):\n pass\n\nclass aws_db_option_group(terrascript.Resource):\n pass\n\nclass aws_db_parameter_group(terrascript.Resource):\n pass\n\nclass aws_db_security_group(terrascript.Resource):\n pass\n\nclass aws_db_snapshot(terrascript.Resource):\n pass\n\nclass aws_db_subnet_group(terrascript.Resource):\n pass\n\nclass aws_devicefarm_project(terrascript.Resource):\n pass\n\nclass aws_directory_service_directory(terrascript.Resource):\n pass\n\nclass aws_directory_service_conditional_forwarder(terrascript.Resource):\n pass\n\nclass aws_directory_service_log_subscription(terrascript.Resource):\n pass\n\nclass aws_dlm_lifecycle_policy(terrascript.Resource):\n pass\n\nclass aws_dms_certificate(terrascript.Resource):\n pass\n\nclass aws_dms_endpoint(terrascript.Resource):\n pass\n\nclass aws_dms_replication_instance(terrascript.Resource):\n pass\n\nclass aws_dms_replication_subnet_group(terrascript.Resource):\n pass\n\nclass aws_dms_replication_task(terrascript.Resource):\n pass\n\nclass aws_docdb_cluster(terrascript.Resource):\n pass\n\nclass aws_docdb_cluster_instance(terrascript.Resource):\n pass\n\nclass aws_docdb_cluster_parameter_group(terrascript.Resource):\n pass\n\nclass aws_docdb_cluster_snapshot(terrascript.Resource):\n pass\n\nclass aws_docdb_subnet_group(terrascript.Resource):\n pass\n\nclass aws_dx_bgp_peer(terrascript.Resource):\n pass\n\nclass aws_dx_connection(terrascript.Resource):\n pass\n\nclass aws_dx_connection_association(terrascript.Resource):\n pass\n\nclass aws_dx_gateway(terrascript.Resource):\n pass\n\nclass aws_dx_gateway_association(terrascript.Resource):\n pass\n\nclass aws_dx_gateway_association_proposal(terrascript.Resource):\n pass\n\nclass aws_dx_hosted_private_virtual_interface(terrascript.Resource):\n pass\n\nclass aws_dx_hosted_private_virtual_interface_accepter(terrascript.Resource):\n pass\n\nclass aws_dx_hosted_public_virtual_interface(terrascript.Resource):\n pass\n\nclass aws_dx_hosted_public_virtual_interface_accepter(terrascript.Resource):\n pass\n\nclass aws_dx_lag(terrascript.Resource):\n pass\n\nclass aws_dx_private_virtual_interface(terrascript.Resource):\n pass\n\nclass aws_dx_public_virtual_interface(terrascript.Resource):\n pass\n\nclass aws_dx_transit_virtual_interface(terrascript.Resource):\n pass\n\nclass aws_dynamodb_table(terrascript.Resource):\n pass\n\nclass aws_dynamodb_table_item(terrascript.Resource):\n pass\n\nclass aws_dynamodb_global_table(terrascript.Resource):\n pass\n\nclass aws_ebs_default_kms_key(terrascript.Resource):\n pass\n\nclass aws_ebs_encryption_by_default(terrascript.Resource):\n pass\n\nclass aws_ebs_snapshot(terrascript.Resource):\n pass\n\nclass aws_ebs_snapshot_copy(terrascript.Resource):\n pass\n\nclass aws_ebs_volume(terrascript.Resource):\n pass\n\nclass aws_ec2_capacity_reservation(terrascript.Resource):\n pass\n\nclass aws_ec2_client_vpn_endpoint(terrascript.Resource):\n pass\n\nclass aws_ec2_client_vpn_network_association(terrascript.Resource):\n pass\n\nclass aws_ec2_fleet(terrascript.Resource):\n pass\n\nclass aws_ec2_transit_gateway(terrascript.Resource):\n pass\n\nclass aws_ec2_transit_gateway_route(terrascript.Resource):\n pass\n\nclass aws_ec2_transit_gateway_route_table(terrascript.Resource):\n pass\n\nclass aws_ec2_transit_gateway_route_table_association(terrascript.Resource):\n pass\n\nclass aws_ec2_transit_gateway_route_table_propagation(terrascript.Resource):\n pass\n\nclass aws_ec2_transit_gateway_vpc_attachment(terrascript.Resource):\n pass\n\nclass aws_ec2_transit_gateway_vpc_attachment_accepter(terrascript.Resource):\n pass\n\nclass aws_ecr_lifecycle_policy(terrascript.Resource):\n pass\n\nclass aws_ecr_repository(terrascript.Resource):\n pass\n\nclass aws_ecr_repository_policy(terrascript.Resource):\n pass\n\nclass aws_ecs_cluster(terrascript.Resource):\n pass\n\nclass aws_ecs_service(terrascript.Resource):\n pass\n\nclass aws_ecs_task_definition(terrascript.Resource):\n pass\n\nclass aws_efs_file_system(terrascript.Resource):\n pass\n\nclass aws_efs_mount_target(terrascript.Resource):\n pass\n\nclass aws_egress_only_internet_gateway(terrascript.Resource):\n pass\n\nclass aws_eip(terrascript.Resource):\n pass\n\nclass aws_eip_association(terrascript.Resource):\n pass\n\nclass aws_eks_cluster(terrascript.Resource):\n pass\n\nclass aws_elasticache_cluster(terrascript.Resource):\n pass\n\nclass aws_elasticache_parameter_group(terrascript.Resource):\n pass\n\nclass aws_elasticache_replication_group(terrascript.Resource):\n pass\n\nclass aws_elasticache_security_group(terrascript.Resource):\n pass\n\nclass aws_elasticache_subnet_group(terrascript.Resource):\n pass\n\nclass aws_elastic_beanstalk_application(terrascript.Resource):\n pass\n\nclass aws_elastic_beanstalk_application_version(terrascript.Resource):\n pass\n\nclass aws_elastic_beanstalk_configuration_template(terrascript.Resource):\n pass\n\nclass aws_elastic_beanstalk_environment(terrascript.Resource):\n pass\n\nclass aws_elasticsearch_domain(terrascript.Resource):\n pass\n\nclass aws_elasticsearch_domain_policy(terrascript.Resource):\n pass\n\nclass aws_elastictranscoder_pipeline(terrascript.Resource):\n pass\n\nclass aws_elastictranscoder_preset(terrascript.Resource):\n pass\n\nclass aws_elb(terrascript.Resource):\n pass\n\nclass aws_elb_attachment(terrascript.Resource):\n pass\n\nclass aws_emr_cluster(terrascript.Resource):\n pass\n\nclass aws_emr_instance_group(terrascript.Resource):\n pass\n\nclass aws_emr_security_configuration(terrascript.Resource):\n pass\n\nclass aws_flow_log(terrascript.Resource):\n pass\n\nclass aws_fsx_lustre_file_system(terrascript.Resource):\n pass\n\nclass aws_fsx_windows_file_system(terrascript.Resource):\n pass\n\nclass aws_fms_admin_account(terrascript.Resource):\n pass\n\nclass aws_gamelift_alias(terrascript.Resource):\n pass\n\nclass aws_gamelift_build(terrascript.Resource):\n pass\n\nclass aws_gamelift_fleet(terrascript.Resource):\n pass\n\nclass aws_gamelift_game_session_queue(terrascript.Resource):\n pass\n\nclass aws_glacier_vault(terrascript.Resource):\n pass\n\nclass aws_glacier_vault_lock(terrascript.Resource):\n pass\n\nclass aws_globalaccelerator_accelerator(terrascript.Resource):\n pass\n\nclass aws_globalaccelerator_endpoint_group(terrascript.Resource):\n pass\n\nclass aws_globalaccelerator_listener(terrascript.Resource):\n pass\n\nclass aws_glue_catalog_database(terrascript.Resource):\n pass\n\nclass aws_glue_catalog_table(terrascript.Resource):\n pass\n\nclass aws_glue_classifier(terrascript.Resource):\n pass\n\nclass aws_glue_connection(terrascript.Resource):\n pass\n\nclass aws_glue_crawler(terrascript.Resource):\n pass\n\nclass aws_glue_job(terrascript.Resource):\n pass\n\nclass aws_glue_security_configuration(terrascript.Resource):\n pass\n\nclass aws_glue_trigger(terrascript.Resource):\n pass\n\nclass aws_guardduty_detector(terrascript.Resource):\n pass\n\nclass aws_guardduty_invite_accepter(terrascript.Resource):\n pass\n\nclass aws_guardduty_ipset(terrascript.Resource):\n pass\n\nclass aws_guardduty_member(terrascript.Resource):\n pass\n\nclass aws_guardduty_threatintelset(terrascript.Resource):\n pass\n\nclass aws_iam_access_key(terrascript.Resource):\n pass\n\nclass aws_iam_account_alias(terrascript.Resource):\n pass\n\nclass aws_iam_account_password_policy(terrascript.Resource):\n pass\n\nclass aws_iam_group_policy(terrascript.Resource):\n pass\n\nclass aws_iam_group(terrascript.Resource):\n pass\n\nclass aws_iam_group_membership(terrascript.Resource):\n pass\n\nclass aws_iam_group_policy_attachment(terrascript.Resource):\n pass\n\nclass aws_iam_instance_profile(terrascript.Resource):\n pass\n\nclass aws_iam_openid_connect_provider(terrascript.Resource):\n pass\n\nclass aws_iam_policy(terrascript.Resource):\n pass\n\nclass aws_iam_policy_attachment(terrascript.Resource):\n pass\n\nclass aws_iam_role_policy_attachment(terrascript.Resource):\n pass\n\nclass aws_iam_role_policy(terrascript.Resource):\n pass\n\nclass aws_iam_role(terrascript.Resource):\n pass\n\nclass aws_iam_saml_provider(terrascript.Resource):\n pass\n\nclass aws_iam_server_certificate(terrascript.Resource):\n pass\n\nclass aws_iam_service_linked_role(terrascript.Resource):\n pass\n\nclass aws_iam_user_group_membership(terrascript.Resource):\n pass\n\nclass aws_iam_user_policy_attachment(terrascript.Resource):\n pass\n\nclass aws_iam_user_policy(terrascript.Resource):\n pass\n\nclass aws_iam_user_ssh_key(terrascript.Resource):\n pass\n\nclass aws_iam_user(terrascript.Resource):\n pass\n\nclass aws_iam_user_login_profile(terrascript.Resource):\n pass\n\nclass aws_inspector_assessment_target(terrascript.Resource):\n pass\n\nclass aws_inspector_assessment_template(terrascript.Resource):\n pass\n\nclass aws_inspector_resource_group(terrascript.Resource):\n pass\n\nclass aws_instance(terrascript.Resource):\n pass\n\nclass aws_internet_gateway(terrascript.Resource):\n pass\n\nclass aws_iot_certificate(terrascript.Resource):\n pass\n\nclass aws_iot_policy(terrascript.Resource):\n pass\n\nclass aws_iot_policy_attachment(terrascript.Resource):\n pass\n\nclass aws_iot_thing(terrascript.Resource):\n pass\n\nclass aws_iot_thing_principal_attachment(terrascript.Resource):\n pass\n\nclass aws_iot_thing_type(terrascript.Resource):\n pass\n\nclass aws_iot_topic_rule(terrascript.Resource):\n pass\n\nclass aws_iot_role_alias(terrascript.Resource):\n pass\n\nclass aws_key_pair(terrascript.Resource):\n pass\n\nclass aws_kinesis_firehose_delivery_stream(terrascript.Resource):\n pass\n\nclass aws_kinesis_stream(terrascript.Resource):\n pass\n\nclass aws_kinesis_analytics_application(terrascript.Resource):\n pass\n\nclass aws_kms_alias(terrascript.Resource):\n pass\n\nclass aws_kms_external_key(terrascript.Resource):\n pass\n\nclass aws_kms_grant(terrascript.Resource):\n pass\n\nclass aws_kms_key(terrascript.Resource):\n pass\n\nclass aws_kms_ciphertext(terrascript.Resource):\n pass\n\nclass aws_lambda_function(terrascript.Resource):\n pass\n\nclass aws_lambda_event_source_mapping(terrascript.Resource):\n pass\n\nclass aws_lambda_alias(terrascript.Resource):\n pass\n\nclass aws_lambda_permission(terrascript.Resource):\n pass\n\nclass aws_lambda_layer_version(terrascript.Resource):\n pass\n\nclass aws_launch_configuration(terrascript.Resource):\n pass\n\nclass aws_launch_template(terrascript.Resource):\n pass\n\nclass aws_licensemanager_association(terrascript.Resource):\n pass\n\nclass aws_licensemanager_license_configuration(terrascript.Resource):\n pass\n\nclass aws_lightsail_domain(terrascript.Resource):\n pass\n\nclass aws_lightsail_instance(terrascript.Resource):\n pass\n\nclass aws_lightsail_key_pair(terrascript.Resource):\n pass\n\nclass aws_lightsail_static_ip(terrascript.Resource):\n pass\n\nclass aws_lightsail_static_ip_attachment(terrascript.Resource):\n pass\n\nclass aws_lb_cookie_stickiness_policy(terrascript.Resource):\n pass\n\nclass aws_load_balancer_policy(terrascript.Resource):\n pass\n\nclass aws_load_balancer_backend_server_policy(terrascript.Resource):\n pass\n\nclass aws_load_balancer_listener_policy(terrascript.Resource):\n pass\n\nclass aws_lb_ssl_negotiation_policy(terrascript.Resource):\n pass\n\nclass aws_macie_member_account_association(terrascript.Resource):\n pass\n\nclass aws_macie_s3_bucket_association(terrascript.Resource):\n pass\n\nclass aws_main_route_table_association(terrascript.Resource):\n pass\n\nclass aws_mq_broker(terrascript.Resource):\n pass\n\nclass aws_mq_configuration(terrascript.Resource):\n pass\n\nclass aws_media_package_channel(terrascript.Resource):\n pass\n\nclass aws_media_store_container(terrascript.Resource):\n pass\n\nclass aws_media_store_container_policy(terrascript.Resource):\n pass\n\nclass aws_msk_cluster(terrascript.Resource):\n pass\n\nclass aws_msk_configuration(terrascript.Resource):\n pass\n\nclass aws_nat_gateway(terrascript.Resource):\n pass\n\nclass aws_network_acl(terrascript.Resource):\n pass\n\nclass aws_default_network_acl(terrascript.Resource):\n pass\n\nclass aws_neptune_cluster(terrascript.Resource):\n pass\n\nclass aws_neptune_cluster_instance(terrascript.Resource):\n pass\n\nclass aws_neptune_cluster_parameter_group(terrascript.Resource):\n pass\n\nclass aws_neptune_cluster_snapshot(terrascript.Resource):\n pass\n\nclass aws_neptune_event_subscription(terrascript.Resource):\n pass\n\nclass aws_neptune_parameter_group(terrascript.Resource):\n pass\n\nclass aws_neptune_subnet_group(terrascript.Resource):\n pass\n\nclass aws_network_acl_rule(terrascript.Resource):\n pass\n\nclass aws_network_interface(terrascript.Resource):\n pass\n\nclass aws_network_interface_attachment(terrascript.Resource):\n pass\n\nclass aws_opsworks_application(terrascript.Resource):\n pass\n\nclass aws_opsworks_stack(terrascript.Resource):\n pass\n\nclass aws_opsworks_java_app_layer(terrascript.Resource):\n pass\n\nclass aws_opsworks_haproxy_layer(terrascript.Resource):\n pass\n\nclass aws_opsworks_static_web_layer(terrascript.Resource):\n pass\n\nclass aws_opsworks_php_app_layer(terrascript.Resource):\n pass\n\nclass aws_opsworks_rails_app_layer(terrascript.Resource):\n pass\n\nclass aws_opsworks_nodejs_app_layer(terrascript.Resource):\n pass\n\nclass aws_opsworks_memcached_layer(terrascript.Resource):\n pass\n\nclass aws_opsworks_mysql_layer(terrascript.Resource):\n pass\n\nclass aws_opsworks_ganglia_layer(terrascript.Resource):\n pass\n\nclass aws_opsworks_custom_layer(terrascript.Resource):\n pass\n\nclass aws_opsworks_instance(terrascript.Resource):\n pass\n\nclass aws_opsworks_user_profile(terrascript.Resource):\n pass\n\nclass aws_opsworks_permission(terrascript.Resource):\n pass\n\nclass aws_opsworks_rds_db_instance(terrascript.Resource):\n pass\n\nclass aws_organizations_organization(terrascript.Resource):\n pass\n\nclass aws_organizations_account(terrascript.Resource):\n pass\n\nclass aws_organizations_policy(terrascript.Resource):\n pass\n\nclass aws_organizations_policy_attachment(terrascript.Resource):\n pass\n\nclass aws_organizations_organizational_unit(terrascript.Resource):\n pass\n\nclass aws_placement_group(terrascript.Resource):\n pass\n\nclass aws_proxy_protocol_policy(terrascript.Resource):\n pass\n\nclass aws_quicksight_group(terrascript.Resource):\n pass\n\nclass aws_ram_principal_association(terrascript.Resource):\n pass\n\nclass aws_ram_resource_association(terrascript.Resource):\n pass\n\nclass aws_ram_resource_share(terrascript.Resource):\n pass\n\nclass aws_ram_resource_share_accepter(terrascript.Resource):\n pass\n\nclass aws_rds_cluster(terrascript.Resource):\n pass\n\nclass aws_rds_cluster_endpoint(terrascript.Resource):\n pass\n\nclass aws_rds_cluster_instance(terrascript.Resource):\n pass\n\nclass aws_rds_cluster_parameter_group(terrascript.Resource):\n pass\n\nclass aws_rds_global_cluster(terrascript.Resource):\n pass\n\nclass aws_redshift_cluster(terrascript.Resource):\n pass\n\nclass aws_redshift_security_group(terrascript.Resource):\n pass\n\nclass aws_redshift_parameter_group(terrascript.Resource):\n pass\n\nclass aws_redshift_subnet_group(terrascript.Resource):\n pass\n\nclass aws_redshift_snapshot_copy_grant(terrascript.Resource):\n pass\n\nclass aws_redshift_snapshot_schedule(terrascript.Resource):\n pass\n\nclass aws_redshift_snapshot_schedule_association(terrascript.Resource):\n pass\n\nclass aws_redshift_event_subscription(terrascript.Resource):\n pass\n\nclass aws_resourcegroups_group(terrascript.Resource):\n pass\n\nclass aws_route53_delegation_set(terrascript.Resource):\n pass\n\nclass aws_route53_query_log(terrascript.Resource):\n pass\n\nclass aws_route53_record(terrascript.Resource):\n pass\n\nclass aws_route53_zone_association(terrascript.Resource):\n pass\n\nclass aws_route53_zone(terrascript.Resource):\n pass\n\nclass aws_route53_health_check(terrascript.Resource):\n pass\n\nclass aws_route53_resolver_endpoint(terrascript.Resource):\n pass\n\nclass aws_route53_resolver_rule_association(terrascript.Resource):\n pass\n\nclass aws_route53_resolver_rule(terrascript.Resource):\n pass\n\nclass aws_route(terrascript.Resource):\n pass\n\nclass aws_route_table(terrascript.Resource):\n pass\n\nclass aws_default_route_table(terrascript.Resource):\n pass\n\nclass aws_route_table_association(terrascript.Resource):\n pass\n\nclass aws_sagemaker_model(terrascript.Resource):\n pass\n\nclass aws_sagemaker_endpoint_configuration(terrascript.Resource):\n pass\n\nclass aws_sagemaker_endpoint(terrascript.Resource):\n pass\n\nclass aws_sagemaker_notebook_instance_lifecycle_configuration(terrascript.Resource):\n pass\n\nclass aws_sagemaker_notebook_instance(terrascript.Resource):\n pass\n\nclass aws_secretsmanager_secret(terrascript.Resource):\n pass\n\nclass aws_secretsmanager_secret_version(terrascript.Resource):\n pass\n\nclass aws_ses_active_receipt_rule_set(terrascript.Resource):\n pass\n\nclass aws_ses_domain_identity(terrascript.Resource):\n pass\n\nclass aws_ses_domain_identity_verification(terrascript.Resource):\n pass\n\nclass aws_ses_domain_dkim(terrascript.Resource):\n pass\n\nclass aws_ses_domain_mail_from(terrascript.Resource):\n pass\n\nclass aws_ses_email_identity(terrascript.Resource):\n pass\n\nclass aws_ses_identity_policy(terrascript.Resource):\n pass\n\nclass aws_ses_receipt_filter(terrascript.Resource):\n pass\n\nclass aws_ses_receipt_rule(terrascript.Resource):\n pass\n\nclass aws_ses_receipt_rule_set(terrascript.Resource):\n pass\n\nclass aws_ses_configuration_set(terrascript.Resource):\n pass\n\nclass aws_ses_event_destination(terrascript.Resource):\n pass\n\nclass aws_ses_identity_notification_topic(terrascript.Resource):\n pass\n\nclass aws_ses_template(terrascript.Resource):\n pass\n\nclass aws_s3_account_public_access_block(terrascript.Resource):\n pass\n\nclass aws_s3_bucket(terrascript.Resource):\n pass\n\nclass aws_s3_bucket_policy(terrascript.Resource):\n pass\n\nclass aws_s3_bucket_public_access_block(terrascript.Resource):\n pass\n\nclass aws_s3_bucket_object(terrascript.Resource):\n pass\n\nclass aws_s3_bucket_notification(terrascript.Resource):\n pass\n\nclass aws_s3_bucket_metric(terrascript.Resource):\n pass\n\nclass aws_s3_bucket_inventory(terrascript.Resource):\n pass\n\nclass aws_security_group(terrascript.Resource):\n pass\n\nclass aws_network_interface_sg_attachment(terrascript.Resource):\n pass\n\nclass aws_default_security_group(terrascript.Resource):\n pass\n\nclass aws_security_group_rule(terrascript.Resource):\n pass\n\nclass aws_securityhub_account(terrascript.Resource):\n pass\n\nclass aws_securityhub_product_subscription(terrascript.Resource):\n pass\n\nclass aws_securityhub_standards_subscription(terrascript.Resource):\n pass\n\nclass aws_servicecatalog_portfolio(terrascript.Resource):\n pass\n\nclass aws_service_discovery_http_namespace(terrascript.Resource):\n pass\n\nclass aws_service_discovery_private_dns_namespace(terrascript.Resource):\n pass\n\nclass aws_service_discovery_public_dns_namespace(terrascript.Resource):\n pass\n\nclass aws_service_discovery_service(terrascript.Resource):\n pass\n\nclass aws_servicequotas_service_quota(terrascript.Resource):\n pass\n\nclass aws_shield_protection(terrascript.Resource):\n pass\n\nclass aws_simpledb_domain(terrascript.Resource):\n pass\n\nclass aws_ssm_activation(terrascript.Resource):\n pass\n\nclass aws_ssm_association(terrascript.Resource):\n pass\n\nclass aws_ssm_document(terrascript.Resource):\n pass\n\nclass aws_ssm_maintenance_window(terrascript.Resource):\n pass\n\nclass aws_ssm_maintenance_window_target(terrascript.Resource):\n pass\n\nclass aws_ssm_maintenance_window_task(terrascript.Resource):\n pass\n\nclass aws_ssm_patch_baseline(terrascript.Resource):\n pass\n\nclass aws_ssm_patch_group(terrascript.Resource):\n pass\n\nclass aws_ssm_parameter(terrascript.Resource):\n pass\n\nclass aws_ssm_resource_data_sync(terrascript.Resource):\n pass\n\nclass aws_storagegateway_cache(terrascript.Resource):\n pass\n\nclass aws_storagegateway_cached_iscsi_volume(terrascript.Resource):\n pass\n\nclass aws_storagegateway_gateway(terrascript.Resource):\n pass\n\nclass aws_storagegateway_nfs_file_share(terrascript.Resource):\n pass\n\nclass aws_storagegateway_smb_file_share(terrascript.Resource):\n pass\n\nclass aws_storagegateway_upload_buffer(terrascript.Resource):\n pass\n\nclass aws_storagegateway_working_storage(terrascript.Resource):\n pass\n\nclass aws_spot_datafeed_subscription(terrascript.Resource):\n pass\n\nclass aws_spot_instance_request(terrascript.Resource):\n pass\n\nclass aws_spot_fleet_request(terrascript.Resource):\n pass\n\nclass aws_sqs_queue(terrascript.Resource):\n pass\n\nclass aws_sqs_queue_policy(terrascript.Resource):\n pass\n\nclass aws_snapshot_create_volume_permission(terrascript.Resource):\n pass\n\nclass aws_sns_platform_application(terrascript.Resource):\n pass\n\nclass aws_sns_sms_preferences(terrascript.Resource):\n pass\n\nclass aws_sns_topic(terrascript.Resource):\n pass\n\nclass aws_sns_topic_policy(terrascript.Resource):\n pass\n\nclass aws_sns_topic_subscription(terrascript.Resource):\n pass\n\nclass aws_sfn_activity(terrascript.Resource):\n pass\n\nclass aws_sfn_state_machine(terrascript.Resource):\n pass\n\nclass aws_default_subnet(terrascript.Resource):\n pass\n\nclass aws_subnet(terrascript.Resource):\n pass\n\nclass aws_swf_domain(terrascript.Resource):\n pass\n\nclass aws_transfer_server(terrascript.Resource):\n pass\n\nclass aws_transfer_ssh_key(terrascript.Resource):\n pass\n\nclass aws_transfer_user(terrascript.Resource):\n pass\n\nclass aws_volume_attachment(terrascript.Resource):\n pass\n\nclass aws_vpc_dhcp_options_association(terrascript.Resource):\n pass\n\nclass aws_default_vpc_dhcp_options(terrascript.Resource):\n pass\n\nclass aws_vpc_dhcp_options(terrascript.Resource):\n pass\n\nclass aws_vpc_peering_connection(terrascript.Resource):\n pass\n\nclass aws_vpc_peering_connection_accepter(terrascript.Resource):\n pass\n\nclass aws_vpc_peering_connection_options(terrascript.Resource):\n pass\n\nclass aws_default_vpc(terrascript.Resource):\n pass\n\nclass aws_vpc(terrascript.Resource):\n pass\n\nclass aws_vpc_endpoint(terrascript.Resource):\n pass\n\nclass aws_vpc_endpoint_connection_notification(terrascript.Resource):\n pass\n\nclass aws_vpc_endpoint_route_table_association(terrascript.Resource):\n pass\n\nclass aws_vpc_endpoint_subnet_association(terrascript.Resource):\n pass\n\nclass aws_vpc_endpoint_service(terrascript.Resource):\n pass\n\nclass aws_vpc_endpoint_service_allowed_principal(terrascript.Resource):\n pass\n\nclass aws_vpc_ipv4_cidr_block_association(terrascript.Resource):\n pass\n\nclass aws_vpn_connection(terrascript.Resource):\n pass\n\nclass aws_vpn_connection_route(terrascript.Resource):\n pass\n\nclass aws_vpn_gateway(terrascript.Resource):\n pass\n\nclass aws_vpn_gateway_attachment(terrascript.Resource):\n pass\n\nclass aws_vpn_gateway_route_propagation(terrascript.Resource):\n pass\n\nclass aws_waf_byte_match_set(terrascript.Resource):\n pass\n\nclass aws_waf_ipset(terrascript.Resource):\n pass\n\nclass aws_waf_rate_based_rule(terrascript.Resource):\n pass\n\nclass aws_waf_regex_match_set(terrascript.Resource):\n pass\n\nclass aws_waf_regex_pattern_set(terrascript.Resource):\n pass\n\nclass aws_waf_rule(terrascript.Resource):\n pass\n\nclass aws_waf_rule_group(terrascript.Resource):\n pass\n\nclass aws_waf_size_constraint_set(terrascript.Resource):\n pass\n\nclass aws_waf_web_acl(terrascript.Resource):\n pass\n\nclass aws_waf_xss_match_set(terrascript.Resource):\n pass\n\nclass aws_waf_sql_injection_match_set(terrascript.Resource):\n pass\n\nclass aws_waf_geo_match_set(terrascript.Resource):\n pass\n\nclass aws_wafregional_byte_match_set(terrascript.Resource):\n pass\n\nclass aws_wafregional_geo_match_set(terrascript.Resource):\n pass\n\nclass aws_wafregional_ipset(terrascript.Resource):\n pass\n\nclass aws_wafregional_rate_based_rule(terrascript.Resource):\n pass\n\nclass aws_wafregional_regex_match_set(terrascript.Resource):\n pass\n\nclass aws_wafregional_regex_pattern_set(terrascript.Resource):\n pass\n\nclass aws_wafregional_rule(terrascript.Resource):\n pass\n\nclass aws_wafregional_rule_group(terrascript.Resource):\n pass\n\nclass aws_wafregional_size_constraint_set(terrascript.Resource):\n pass\n\nclass aws_wafregional_sql_injection_match_set(terrascript.Resource):\n pass\n\nclass aws_wafregional_xss_match_set(terrascript.Resource):\n pass\n\nclass aws_wafregional_web_acl(terrascript.Resource):\n pass\n\nclass aws_wafregional_web_acl_association(terrascript.Resource):\n pass\n\nclass aws_worklink_fleet(terrascript.Resource):\n pass\n\nclass aws_worklink_website_certificate_authority_association(terrascript.Resource):\n pass\n\nclass aws_batch_compute_environment(terrascript.Resource):\n pass\n\nclass aws_batch_job_definition(terrascript.Resource):\n pass\n\nclass aws_batch_job_queue(terrascript.Resource):\n pass\n\nclass aws_pinpoint_app(terrascript.Resource):\n pass\n\nclass aws_pinpoint_adm_channel(terrascript.Resource):\n pass\n\nclass aws_pinpoint_apns_channel(terrascript.Resource):\n pass\n\nclass aws_pinpoint_apns_sandbox_channel(terrascript.Resource):\n pass\n\nclass aws_pinpoint_apns_voip_channel(terrascript.Resource):\n pass\n\nclass aws_pinpoint_apns_voip_sandbox_channel(terrascript.Resource):\n pass\n\nclass aws_pinpoint_baidu_channel(terrascript.Resource):\n pass\n\nclass aws_pinpoint_email_channel(terrascript.Resource):\n pass\n\nclass aws_pinpoint_event_stream(terrascript.Resource):\n pass\n\nclass aws_pinpoint_gcm_channel(terrascript.Resource):\n pass\n\nclass aws_pinpoint_sms_channel(terrascript.Resource):\n pass\n\nclass aws_xray_sampling_rule(terrascript.Resource):\n pass\n\nclass aws_alb(terrascript.Resource):\n pass\n\nclass aws_lb(terrascript.Resource):\n pass\n\nclass aws_alb_listener(terrascript.Resource):\n pass\n\nclass aws_lb_listener(terrascript.Resource):\n pass\n\nclass aws_alb_listener_certificate(terrascript.Resource):\n pass\n\nclass aws_lb_listener_certificate(terrascript.Resource):\n pass\n\nclass aws_alb_listener_rule(terrascript.Resource):\n pass\n\nclass aws_lb_listener_rule(terrascript.Resource):\n pass\n\nclass aws_alb_target_group(terrascript.Resource):\n pass\n\nclass aws_lb_target_group(terrascript.Resource):\n pass\n\nclass aws_alb_target_group_attachment(terrascript.Resource):\n pass\n\nclass aws_lb_target_group_attachment(terrascript.Resource):\n pass\n", "id": "4101804", "language": "Python", "matching_score": 5.211395263671875, "max_stars_count": 0, "path": "terrascript/aws/r.py" }, { "content": "# terrascript/aws/d.py\n\nimport terrascript\n\n\nclass aws_acm_certificate(terrascript.Data):\n pass\n\nclass aws_acmpca_certificate_authority(terrascript.Data):\n pass\n\nclass aws_ami(terrascript.Data):\n pass\n\nclass aws_ami_ids(terrascript.Data):\n pass\n\nclass aws_api_gateway_api_key(terrascript.Data):\n pass\n\nclass aws_api_gateway_resource(terrascript.Data):\n pass\n\nclass aws_api_gateway_rest_api(terrascript.Data):\n pass\n\nclass aws_api_gateway_vpc_link(terrascript.Data):\n pass\n\nclass aws_arn(terrascript.Data):\n pass\n\nclass aws_autoscaling_group(terrascript.Data):\n pass\n\nclass aws_autoscaling_groups(terrascript.Data):\n pass\n\nclass aws_availability_zone(terrascript.Data):\n pass\n\nclass aws_availability_zones(terrascript.Data):\n pass\n\nclass aws_batch_compute_environment(terrascript.Data):\n pass\n\nclass aws_batch_job_queue(terrascript.Data):\n pass\n\nclass aws_billing_service_account(terrascript.Data):\n pass\n\nclass aws_caller_identity(terrascript.Data):\n pass\n\nclass aws_canonical_user_id(terrascript.Data):\n pass\n\nclass aws_cloudformation_export(terrascript.Data):\n pass\n\nclass aws_cloudformation_stack(terrascript.Data):\n pass\n\nclass aws_cloudhsm_v2_cluster(terrascript.Data):\n pass\n\nclass aws_cloudtrail_service_account(terrascript.Data):\n pass\n\nclass aws_cloudwatch_log_group(terrascript.Data):\n pass\n\nclass aws_cognito_user_pools(terrascript.Data):\n pass\n\nclass aws_codecommit_repository(terrascript.Data):\n pass\n\nclass aws_cur_report_definition(terrascript.Data):\n pass\n\nclass aws_db_cluster_snapshot(terrascript.Data):\n pass\n\nclass aws_db_event_categories(terrascript.Data):\n pass\n\nclass aws_db_instance(terrascript.Data):\n pass\n\nclass aws_db_snapshot(terrascript.Data):\n pass\n\nclass aws_dx_gateway(terrascript.Data):\n pass\n\nclass aws_dynamodb_table(terrascript.Data):\n pass\n\nclass aws_ebs_default_kms_key(terrascript.Data):\n pass\n\nclass aws_ebs_encryption_by_default(terrascript.Data):\n pass\n\nclass aws_ebs_snapshot(terrascript.Data):\n pass\n\nclass aws_ebs_snapshot_ids(terrascript.Data):\n pass\n\nclass aws_ebs_volume(terrascript.Data):\n pass\n\nclass aws_ec2_transit_gateway(terrascript.Data):\n pass\n\nclass aws_ec2_transit_gateway_dx_gateway_attachment(terrascript.Data):\n pass\n\nclass aws_ec2_transit_gateway_route_table(terrascript.Data):\n pass\n\nclass aws_ec2_transit_gateway_vpc_attachment(terrascript.Data):\n pass\n\nclass aws_ec2_transit_gateway_vpn_attachment(terrascript.Data):\n pass\n\nclass aws_ecr_image(terrascript.Data):\n pass\n\nclass aws_ecr_repository(terrascript.Data):\n pass\n\nclass aws_ecs_cluster(terrascript.Data):\n pass\n\nclass aws_ecs_container_definition(terrascript.Data):\n pass\n\nclass aws_ecs_service(terrascript.Data):\n pass\n\nclass aws_ecs_task_definition(terrascript.Data):\n pass\n\nclass aws_customer_gateway(terrascript.Data):\n pass\n\nclass aws_efs_file_system(terrascript.Data):\n pass\n\nclass aws_efs_mount_target(terrascript.Data):\n pass\n\nclass aws_eip(terrascript.Data):\n pass\n\nclass aws_eks_cluster(terrascript.Data):\n pass\n\nclass aws_eks_cluster_auth(terrascript.Data):\n pass\n\nclass aws_elastic_beanstalk_application(terrascript.Data):\n pass\n\nclass aws_elastic_beanstalk_hosted_zone(terrascript.Data):\n pass\n\nclass aws_elastic_beanstalk_solution_stack(terrascript.Data):\n pass\n\nclass aws_elasticache_cluster(terrascript.Data):\n pass\n\nclass aws_elb(terrascript.Data):\n pass\n\nclass aws_elasticache_replication_group(terrascript.Data):\n pass\n\nclass aws_elb_hosted_zone_id(terrascript.Data):\n pass\n\nclass aws_elb_service_account(terrascript.Data):\n pass\n\nclass aws_glue_script(terrascript.Data):\n pass\n\nclass aws_iam_account_alias(terrascript.Data):\n pass\n\nclass aws_iam_group(terrascript.Data):\n pass\n\nclass aws_iam_instance_profile(terrascript.Data):\n pass\n\nclass aws_iam_policy(terrascript.Data):\n pass\n\nclass aws_iam_policy_document(terrascript.Data):\n pass\n\nclass aws_iam_role(terrascript.Data):\n pass\n\nclass aws_iam_server_certificate(terrascript.Data):\n pass\n\nclass aws_iam_user(terrascript.Data):\n pass\n\nclass aws_internet_gateway(terrascript.Data):\n pass\n\nclass aws_iot_endpoint(terrascript.Data):\n pass\n\nclass aws_inspector_rules_packages(terrascript.Data):\n pass\n\nclass aws_instance(terrascript.Data):\n pass\n\nclass aws_instances(terrascript.Data):\n pass\n\nclass aws_ip_ranges(terrascript.Data):\n pass\n\nclass aws_kinesis_stream(terrascript.Data):\n pass\n\nclass aws_kms_alias(terrascript.Data):\n pass\n\nclass aws_kms_ciphertext(terrascript.Data):\n pass\n\nclass aws_kms_key(terrascript.Data):\n pass\n\nclass aws_kms_secret(terrascript.Data):\n pass\n\nclass aws_kms_secrets(terrascript.Data):\n pass\n\nclass aws_lambda_function(terrascript.Data):\n pass\n\nclass aws_lambda_invocation(terrascript.Data):\n pass\n\nclass aws_lambda_layer_version(terrascript.Data):\n pass\n\nclass aws_launch_configuration(terrascript.Data):\n pass\n\nclass aws_launch_template(terrascript.Data):\n pass\n\nclass aws_mq_broker(terrascript.Data):\n pass\n\nclass aws_msk_cluster(terrascript.Data):\n pass\n\nclass aws_msk_configuration(terrascript.Data):\n pass\n\nclass aws_nat_gateway(terrascript.Data):\n pass\n\nclass aws_network_acls(terrascript.Data):\n pass\n\nclass aws_network_interface(terrascript.Data):\n pass\n\nclass aws_network_interfaces(terrascript.Data):\n pass\n\nclass aws_organizations_organization(terrascript.Data):\n pass\n\nclass aws_partition(terrascript.Data):\n pass\n\nclass aws_prefix_list(terrascript.Data):\n pass\n\nclass aws_pricing_product(terrascript.Data):\n pass\n\nclass aws_ram_resource_share(terrascript.Data):\n pass\n\nclass aws_rds_cluster(terrascript.Data):\n pass\n\nclass aws_redshift_cluster(terrascript.Data):\n pass\n\nclass aws_redshift_service_account(terrascript.Data):\n pass\n\nclass aws_region(terrascript.Data):\n pass\n\nclass aws_route(terrascript.Data):\n pass\n\nclass aws_route_table(terrascript.Data):\n pass\n\nclass aws_route_tables(terrascript.Data):\n pass\n\nclass aws_route53_delegation_set(terrascript.Data):\n pass\n\nclass aws_route53_zone(terrascript.Data):\n pass\n\nclass aws_s3_bucket(terrascript.Data):\n pass\n\nclass aws_s3_bucket_object(terrascript.Data):\n pass\n\nclass aws_s3_bucket_objects(terrascript.Data):\n pass\n\nclass aws_secretsmanager_secret(terrascript.Data):\n pass\n\nclass aws_secretsmanager_secret_version(terrascript.Data):\n pass\n\nclass aws_servicequotas_service(terrascript.Data):\n pass\n\nclass aws_servicequotas_service_quota(terrascript.Data):\n pass\n\nclass aws_sns_topic(terrascript.Data):\n pass\n\nclass aws_sqs_queue(terrascript.Data):\n pass\n\nclass aws_ssm_document(terrascript.Data):\n pass\n\nclass aws_ssm_parameter(terrascript.Data):\n pass\n\nclass aws_storagegateway_local_disk(terrascript.Data):\n pass\n\nclass aws_subnet(terrascript.Data):\n pass\n\nclass aws_subnet_ids(terrascript.Data):\n pass\n\nclass aws_transfer_server(terrascript.Data):\n pass\n\nclass aws_vpcs(terrascript.Data):\n pass\n\nclass aws_security_group(terrascript.Data):\n pass\n\nclass aws_security_groups(terrascript.Data):\n pass\n\nclass aws_vpc(terrascript.Data):\n pass\n\nclass aws_vpc_dhcp_options(terrascript.Data):\n pass\n\nclass aws_vpc_endpoint(terrascript.Data):\n pass\n\nclass aws_vpc_endpoint_service(terrascript.Data):\n pass\n\nclass aws_vpc_peering_connection(terrascript.Data):\n pass\n\nclass aws_vpn_gateway(terrascript.Data):\n pass\n\nclass aws_waf_ipset(terrascript.Data):\n pass\n\nclass aws_waf_rule(terrascript.Data):\n pass\n\nclass aws_waf_web_acl(terrascript.Data):\n pass\n\nclass aws_wafregional_ipset(terrascript.Data):\n pass\n\nclass aws_wafregional_rule(terrascript.Data):\n pass\n\nclass aws_wafregional_web_acl(terrascript.Data):\n pass\n\nclass aws_workspaces_bundle(terrascript.Data):\n pass\n\nclass aws_lb(terrascript.Data):\n pass\n\nclass aws_alb(terrascript.Data):\n pass\n\nclass aws_lb_listener(terrascript.Data):\n pass\n\nclass aws_alb_listener(terrascript.Data):\n pass\n\nclass aws_lb_target_group(terrascript.Data):\n pass\n\nclass aws_alb_target_group(terrascript.Data):\n pass\n", "id": "11512418", "language": "Python", "matching_score": 1.362221360206604, "max_stars_count": 0, "path": "terrascript/aws/d.py" }, { "content": "# terrascript/opc/r.py\n\nimport terrascript\n\n\nclass opc_compute_acl(terrascript.Resource):\n pass\n\nclass opc_compute_image_list(terrascript.Resource):\n pass\n\nclass opc_compute_image_list_entry(terrascript.Resource):\n pass\n\nclass opc_compute_instance(terrascript.Resource):\n pass\n\nclass opc_compute_ip_address_reservation(terrascript.Resource):\n pass\n\nclass opc_compute_ip_association(terrascript.Resource):\n pass\n\nclass opc_compute_ip_network(terrascript.Resource):\n pass\n\nclass opc_compute_ip_network_exchange(terrascript.Resource):\n pass\n\nclass opc_compute_ip_reservation(terrascript.Resource):\n pass\n\nclass opc_compute_machine_image(terrascript.Resource):\n pass\n\nclass opc_compute_route(terrascript.Resource):\n pass\n\nclass opc_compute_security_application(terrascript.Resource):\n pass\n\nclass opc_compute_security_association(terrascript.Resource):\n pass\n\nclass opc_compute_security_ip_list(terrascript.Resource):\n pass\n\nclass opc_compute_security_list(terrascript.Resource):\n pass\n\nclass opc_compute_security_rule(terrascript.Resource):\n pass\n\nclass opc_compute_sec_rule(terrascript.Resource):\n pass\n\nclass opc_compute_ssh_key(terrascript.Resource):\n pass\n\nclass opc_compute_storage_attachment(terrascript.Resource):\n pass\n\nclass opc_compute_storage_volume(terrascript.Resource):\n pass\n\nclass opc_compute_storage_volume_snapshot(terrascript.Resource):\n pass\n\nclass opc_compute_vnic_set(terrascript.Resource):\n pass\n\nclass opc_compute_security_protocol(terrascript.Resource):\n pass\n\nclass opc_compute_ip_address_prefix_set(terrascript.Resource):\n pass\n\nclass opc_compute_ip_address_association(terrascript.Resource):\n pass\n\nclass opc_compute_snapshot(terrascript.Resource):\n pass\n\nclass opc_compute_orchestrated_instance(terrascript.Resource):\n pass\n\nclass opc_compute_vpn_endpoint_v2(terrascript.Resource):\n pass\n\nclass opc_lbaas_certificate(terrascript.Resource):\n pass\n\nclass opc_lbaas_listener(terrascript.Resource):\n pass\n\nclass opc_lbaas_load_balancer(terrascript.Resource):\n pass\n\nclass opc_lbaas_policy(terrascript.Resource):\n pass\n\nclass opc_lbaas_server_pool(terrascript.Resource):\n pass\n\nclass opc_storage_container(terrascript.Resource):\n pass\n\nclass opc_storage_object(terrascript.Resource):\n pass\n", "id": "283991", "language": "Python", "matching_score": 3.765408515930176, "max_stars_count": 0, "path": "terrascript/opc/r.py" }, { "content": "# terrascript/opc/d.py\n\nimport terrascript\n\n\nclass opc_compute_image_list_entry(terrascript.Data):\n pass\n\nclass opc_compute_ip_address_reservation(terrascript.Data):\n pass\n\nclass opc_compute_ip_reservation(terrascript.Data):\n pass\n\nclass opc_compute_machine_image(terrascript.Data):\n pass\n\nclass opc_compute_network_interface(terrascript.Data):\n pass\n\nclass opc_compute_ssh_key(terrascript.Data):\n pass\n\nclass opc_compute_storage_volume_snapshot(terrascript.Data):\n pass\n\nclass opc_compute_vnic(terrascript.Data):\n pass\n", "id": "3092558", "language": "Python", "matching_score": 0.49879229068756104, "max_stars_count": 0, "path": "terrascript/opc/d.py" }, { "content": "# terrascript/digitalocean/d.py\n\nimport terrascript\n\n\nclass digitalocean_certificate(terrascript.Data):\n pass\n\nclass digitalocean_database_cluster(terrascript.Data):\n pass\n\nclass digitalocean_domain(terrascript.Data):\n pass\n\nclass digitalocean_droplet(terrascript.Data):\n pass\n\nclass digitalocean_droplet_snapshot(terrascript.Data):\n pass\n\nclass digitalocean_floating_ip(terrascript.Data):\n pass\n\nclass digitalocean_image(terrascript.Data):\n pass\n\nclass digitalocean_kubernetes_cluster(terrascript.Data):\n pass\n\nclass digitalocean_loadbalancer(terrascript.Data):\n pass\n\nclass digitalocean_record(terrascript.Data):\n pass\n\nclass digitalocean_ssh_key(terrascript.Data):\n pass\n\nclass digitalocean_tag(terrascript.Data):\n pass\n\nclass digitalocean_volume_snapshot(terrascript.Data):\n pass\n\nclass digitalocean_volume(terrascript.Data):\n pass\n", "id": "411115", "language": "Python", "matching_score": 3.5601017475128174, "max_stars_count": 0, "path": "terrascript/digitalocean/d.py" }, { "content": "# terrascript/digitalocean/r.py\n\nimport terrascript\n\n\nclass digitalocean_certificate(terrascript.Resource):\n pass\n\nclass digitalocean_cdn(terrascript.Resource):\n pass\n\nclass digitalocean_database_cluster(terrascript.Resource):\n pass\n\nclass digitalocean_domain(terrascript.Resource):\n pass\n\nclass digitalocean_droplet(terrascript.Resource):\n pass\n\nclass digitalocean_droplet_snapshot(terrascript.Resource):\n pass\n\nclass digitalocean_firewall(terrascript.Resource):\n pass\n\nclass digitalocean_floating_ip(terrascript.Resource):\n pass\n\nclass digitalocean_floating_ip_assignment(terrascript.Resource):\n pass\n\nclass digitalocean_kubernetes_cluster(terrascript.Resource):\n pass\n\nclass digitalocean_kubernetes_node_pool(terrascript.Resource):\n pass\n\nclass digitalocean_loadbalancer(terrascript.Resource):\n pass\n\nclass digitalocean_project(terrascript.Resource):\n pass\n\nclass digitalocean_record(terrascript.Resource):\n pass\n\nclass digitalocean_spaces_bucket(terrascript.Resource):\n pass\n\nclass digitalocean_ssh_key(terrascript.Resource):\n pass\n\nclass digitalocean_tag(terrascript.Resource):\n pass\n\nclass digitalocean_volume(terrascript.Resource):\n pass\n\nclass digitalocean_volume_attachment(terrascript.Resource):\n pass\n\nclass digitalocean_volume_snapshot(terrascript.Resource):\n pass\n", "id": "11361069", "language": "Python", "matching_score": 0.8960466980934143, "max_stars_count": 0, "path": "terrascript/digitalocean/r.py" }, { "content": "# terrascript/packet/r.py\n\nimport terrascript\n\n\nclass packet_device(terrascript.Resource):\n pass\n\nclass packet_ssh_key(terrascript.Resource):\n pass\n\nclass packet_project_ssh_key(terrascript.Resource):\n pass\n\nclass packet_project(terrascript.Resource):\n pass\n\nclass packet_organization(terrascript.Resource):\n pass\n\nclass packet_volume(terrascript.Resource):\n pass\n\nclass packet_volume_attachment(terrascript.Resource):\n pass\n\nclass packet_reserved_ip_block(terrascript.Resource):\n pass\n\nclass packet_ip_attachment(terrascript.Resource):\n pass\n\nclass packet_spot_market_request(terrascript.Resource):\n pass\n\nclass packet_vlan(terrascript.Resource):\n pass\n\nclass packet_bgp_session(terrascript.Resource):\n pass\n\nclass packet_port_vlan_attachment(terrascript.Resource):\n pass\n\nclass packet_connect(terrascript.Resource):\n pass\n", "id": "11919201", "language": "Python", "matching_score": 1.620111346244812, "max_stars_count": 0, "path": "terrascript/packet/r.py" }, { "content": "# terrascript/packet/d.py\n\nimport terrascript\n\n\nclass packet_precreated_ip_block(terrascript.Data):\n pass\n\nclass packet_operating_system(terrascript.Data):\n pass\n\nclass packet_spot_market_price(terrascript.Data):\n pass\n\nclass packet_device(terrascript.Data):\n pass\n", "id": "5942540", "language": "Python", "matching_score": 1.1286840438842773, "max_stars_count": 0, "path": "terrascript/packet/d.py" }, { "content": "# terrascript/packet/__init__.py\n\nimport terrascript\n\nclass packet(terrascript.Provider):\n pass", "id": "10607283", "language": "Python", "matching_score": 0.0344996452331543, "max_stars_count": 0, "path": "terrascript/packet/__init__.py" }, { "content": "# terrascript/ovh/r.py\n\nimport terrascript\n\n\nclass ovh_iploadbalancing_tcp_farm(terrascript.Resource):\n pass\n\nclass ovh_iploadbalancing_tcp_farm_server(terrascript.Resource):\n pass\n\nclass ovh_iploadbalancing_tcp_frontend(terrascript.Resource):\n pass\n\nclass ovh_iploadbalancing_http_route(terrascript.Resource):\n pass\n\nclass ovh_iploadbalancing_http_route_rule(terrascript.Resource):\n pass\n\nclass ovh_iploadbalancing_refresh(terrascript.Resource):\n pass\n\nclass ovh_domain_zone_record(terrascript.Resource):\n pass\n\nclass ovh_domain_zone_redirection(terrascript.Resource):\n pass\n\nclass ovh_ip_reverse(terrascript.Resource):\n pass\n\nclass ovh_cloud_network_private(terrascript.Resource):\n pass\n\nclass ovh_cloud_network_private_subnet(terrascript.Resource):\n pass\n\nclass ovh_cloud_user(terrascript.Resource):\n pass\n\nclass ovh_vrack_cloudproject(terrascript.Resource):\n pass\n", "id": "12599245", "language": "Python", "matching_score": 1.6170271635055542, "max_stars_count": 0, "path": "terrascript/ovh/r.py" }, { "content": "# terrascript/ovh/d.py\n\nimport terrascript\n\n\nclass ovh_cloud_region(terrascript.Data):\n pass\n\nclass ovh_cloud_regions(terrascript.Data):\n pass\n\nclass ovh_domain_zone(terrascript.Data):\n pass\n\nclass ovh_iploadbalancing(terrascript.Data):\n pass\n\nclass ovh_me_paymentmean_bankaccount(terrascript.Data):\n pass\n\nclass ovh_me_paymentmean_creditcard(terrascript.Data):\n pass\n", "id": "6782339", "language": "Python", "matching_score": 1.0147861242294312, "max_stars_count": 0, "path": "terrascript/ovh/d.py" }, { "content": "# terrascript/mailgun/r.py\n\nimport terrascript\n\n\nclass mailgun_domain(terrascript.Resource):\n pass\n", "id": "3786538", "language": "Python", "matching_score": 1.12619149684906, "max_stars_count": 0, "path": "terrascript/mailgun/r.py" }, { "content": "# terrascript/mailgun/__init__.py\n\nimport terrascript\n\nclass mailgun(terrascript.Provider):\n pass", "id": "7833156", "language": "Python", "matching_score": 1.1056170463562012, "max_stars_count": 0, "path": "terrascript/mailgun/__init__.py" }, { "content": "# terrascript/mailgun/d.py\n\nimport terrascript\n\n", "id": "8668856", "language": "Python", "matching_score": 0.111897312104702, "max_stars_count": 0, "path": "terrascript/mailgun/d.py" }, { "content": "# terrascript/dme/d.py\n\nimport terrascript\n\n", "id": "10404624", "language": "Python", "matching_score": 1.12619149684906, "max_stars_count": 0, "path": "terrascript/dme/d.py" }, { "content": "# terrascript/dme/__init__.py\n\nimport terrascript\n\nclass dme(terrascript.Provider):\n pass", "id": "11089087", "language": "Python", "matching_score": 0.11911414563655853, "max_stars_count": 0, "path": "terrascript/dme/__init__.py" }, { "content": "# terrascript/external/r.py\n\nimport terrascript\n\n", "id": "9226856", "language": "Python", "matching_score": 1.1286840438842773, "max_stars_count": 0, "path": "terrascript/external/r.py" }, { "content": "# terrascript/external/__init__.py\n\nimport terrascript\n\nclass external(terrascript.Provider):\n pass", "id": "9774820", "language": "Python", "matching_score": 0.009866515174508095, "max_stars_count": 0, "path": "terrascript/external/__init__.py" }, { "content": "# terrascript/scaleway/r.py\n\nimport terrascript\n\n\nclass scaleway_bucket(terrascript.Resource):\n pass\n\nclass scaleway_compute_instance_ip(terrascript.Resource):\n pass\n\nclass scaleway_compute_instance_volume(terrascript.Resource):\n pass\n\nclass scaleway_compute_instance_security_group(terrascript.Resource):\n pass\n\nclass scaleway_compute_instance_server(terrascript.Resource):\n pass\n\nclass scaleway_compute_instance_placement_group(terrascript.Resource):\n pass\n\nclass scaleway_storage_object_bucket(terrascript.Resource):\n pass\n\nclass scaleway_user_data(terrascript.Resource):\n pass\n\nclass scaleway_server(terrascript.Resource):\n pass\n\nclass scaleway_token(terrascript.Resource):\n pass\n\nclass scaleway_ssh_key(terrascript.Resource):\n pass\n\nclass scaleway_ip(terrascript.Resource):\n pass\n\nclass scaleway_ip_reverse_dns(terrascript.Resource):\n pass\n\nclass scaleway_security_group(terrascript.Resource):\n pass\n\nclass scaleway_security_group_rule(terrascript.Resource):\n pass\n\nclass scaleway_volume(terrascript.Resource):\n pass\n\nclass scaleway_volume_attachment(terrascript.Resource):\n pass\n", "id": "11021587", "language": "Python", "matching_score": 1.6252110004425049, "max_stars_count": 0, "path": "terrascript/scaleway/r.py" }, { "content": "# terrascript/scaleway/d.py\n\nimport terrascript\n\n\nclass scaleway_bootscript(terrascript.Data):\n pass\n\nclass scaleway_image(terrascript.Data):\n pass\n\nclass scaleway_security_group(terrascript.Data):\n pass\n\nclass scaleway_volume(terrascript.Data):\n pass\n", "id": "10158708", "language": "Python", "matching_score": 0.2142827808856964, "max_stars_count": 0, "path": "terrascript/scaleway/d.py" }, { "content": "# terrascript/triton/d.py\n\nimport terrascript\n\n\nclass triton_account(terrascript.Data):\n pass\n\nclass triton_datacenter(terrascript.Data):\n pass\n\nclass triton_image(terrascript.Data):\n pass\n\nclass triton_network(terrascript.Data):\n pass\n\nclass triton_package(terrascript.Data):\n pass\n\nclass triton_fabric_vlan(terrascript.Data):\n pass\n\nclass triton_fabric_network(terrascript.Data):\n pass\n", "id": "7868124", "language": "Python", "matching_score": 1.258481740951538, "max_stars_count": 0, "path": "terrascript/triton/d.py" }, { "content": "# terrascript/triton/r.py\n\nimport terrascript\n\n\nclass triton_fabric(terrascript.Resource):\n pass\n\nclass triton_firewall_rule(terrascript.Resource):\n pass\n\nclass triton_instance_template(terrascript.Resource):\n pass\n\nclass triton_key(terrascript.Resource):\n pass\n\nclass triton_machine(terrascript.Resource):\n pass\n\nclass triton_service_group(terrascript.Resource):\n pass\n\nclass triton_snapshot(terrascript.Resource):\n pass\n\nclass triton_vlan(terrascript.Resource):\n pass\n", "id": "624040", "language": "Python", "matching_score": 1.109212040901184, "max_stars_count": 0, "path": "terrascript/triton/r.py" }, { "content": "# terrascript/triton/__init__.py\n\nimport terrascript\n\nclass triton(terrascript.Provider):\n pass", "id": "519409", "language": "Python", "matching_score": 0.028015712276101112, "max_stars_count": 0, "path": "terrascript/triton/__init__.py" }, { "content": "# terrascript/cloudstack/r.py\n\nimport terrascript\n\n\nclass cloudstack_affinity_group(terrascript.Resource):\n pass\n\nclass cloudstack_disk(terrascript.Resource):\n pass\n\nclass cloudstack_egress_firewall(terrascript.Resource):\n pass\n\nclass cloudstack_firewall(terrascript.Resource):\n pass\n\nclass cloudstack_instance(terrascript.Resource):\n pass\n\nclass cloudstack_ipaddress(terrascript.Resource):\n pass\n\nclass cloudstack_loadbalancer_rule(terrascript.Resource):\n pass\n\nclass cloudstack_network(terrascript.Resource):\n pass\n\nclass cloudstack_network_acl(terrascript.Resource):\n pass\n\nclass cloudstack_network_acl_rule(terrascript.Resource):\n pass\n\nclass cloudstack_nic(terrascript.Resource):\n pass\n\nclass cloudstack_port_forward(terrascript.Resource):\n pass\n\nclass cloudstack_private_gateway(terrascript.Resource):\n pass\n\nclass cloudstack_secondary_ipaddress(terrascript.Resource):\n pass\n\nclass cloudstack_security_group(terrascript.Resource):\n pass\n\nclass cloudstack_security_group_rule(terrascript.Resource):\n pass\n\nclass cloudstack_ssh_keypair(terrascript.Resource):\n pass\n\nclass cloudstack_static_nat(terrascript.Resource):\n pass\n\nclass cloudstack_static_route(terrascript.Resource):\n pass\n\nclass cloudstack_template(terrascript.Resource):\n pass\n\nclass cloudstack_vpc(terrascript.Resource):\n pass\n\nclass cloudstack_vpn_connection(terrascript.Resource):\n pass\n\nclass cloudstack_vpn_customer_gateway(terrascript.Resource):\n pass\n\nclass cloudstack_vpn_gateway(terrascript.Resource):\n pass\n", "id": "2120300", "language": "Python", "matching_score": 1.1225167512893677, "max_stars_count": 0, "path": "terrascript/cloudstack/r.py" }, { "content": "# terrascript/cloudstack/__init__.py\n\nimport terrascript\n\nclass cloudstack(terrascript.Provider):\n pass", "id": "2460776", "language": "Python", "matching_score": 0.02333557978272438, "max_stars_count": 0, "path": "terrascript/cloudstack/__init__.py" }, { "content": "# terrascript/rancher/d.py\n\nimport terrascript\n\n\nclass rancher_certificate(terrascript.Data):\n pass\n\nclass rancher_environment(terrascript.Data):\n pass\n\nclass rancher_setting(terrascript.Data):\n pass\n", "id": "624023", "language": "Python", "matching_score": 1.3257304430007935, "max_stars_count": 0, "path": "terrascript/rancher/d.py" }, { "content": "# terrascript/rancher/r.py\n\nimport terrascript\n\n\nclass rancher_certificate(terrascript.Resource):\n pass\n\nclass rancher_environment(terrascript.Resource):\n pass\n\nclass rancher_host(terrascript.Resource):\n pass\n\nclass rancher_registration_token(terrascript.Resource):\n pass\n\nclass rancher_registry(terrascript.Resource):\n pass\n\nclass rancher_registry_credential(terrascript.Resource):\n pass\n\nclass rancher_secret(terrascript.Resource):\n pass\n\nclass rancher_stack(terrascript.Resource):\n pass\n\nclass rancher_volume(terrascript.Resource):\n pass\n", "id": "7265722", "language": "Python", "matching_score": 1.1318467855453491, "max_stars_count": 0, "path": "terrascript/rancher/r.py" }, { "content": "# terrascript/rancher/__init__.py\n\nimport terrascript\n\nclass rancher(terrascript.Provider):\n pass", "id": "442481", "language": "Python", "matching_score": 0.03259243443608284, "max_stars_count": 0, "path": "terrascript/rancher/__init__.py" }, { "content": "# terrascript/ultradns/d.py\n\nimport terrascript\n\n", "id": "5356328", "language": "Python", "matching_score": 1.109212040901184, "max_stars_count": 0, "path": "terrascript/ultradns/d.py" }, { "content": "# terrascript/ultradns/__init__.py\n\nimport terrascript\n\nclass ultradns(terrascript.Provider):\n pass", "id": "6437869", "language": "Python", "matching_score": 0.029720652848482132, "max_stars_count": 0, "path": "terrascript/ultradns/__init__.py" }, { "content": "# terrascript/azurerm/r.py\n\nimport terrascript\n\n\nclass azurerm_analysis_services_server(terrascript.Resource):\n pass\n\nclass azurerm_api_management(terrascript.Resource):\n pass\n\nclass azurerm_api_management_api(terrascript.Resource):\n pass\n\nclass azurerm_api_management_api_operation(terrascript.Resource):\n pass\n\nclass azurerm_api_management_api_operation_policy(terrascript.Resource):\n pass\n\nclass azurerm_api_management_api_policy(terrascript.Resource):\n pass\n\nclass azurerm_api_management_api_schema(terrascript.Resource):\n pass\n\nclass azurerm_api_management_api_version_set(terrascript.Resource):\n pass\n\nclass azurerm_api_management_authorization_server(terrascript.Resource):\n pass\n\nclass azurerm_api_management_backend(terrascript.Resource):\n pass\n\nclass azurerm_api_management_certificate(terrascript.Resource):\n pass\n\nclass azurerm_api_management_group(terrascript.Resource):\n pass\n\nclass azurerm_api_management_group_user(terrascript.Resource):\n pass\n\nclass azurerm_api_management_logger(terrascript.Resource):\n pass\n\nclass azurerm_api_management_openid_connect_provider(terrascript.Resource):\n pass\n\nclass azurerm_api_management_product(terrascript.Resource):\n pass\n\nclass azurerm_api_management_product_api(terrascript.Resource):\n pass\n\nclass azurerm_api_management_product_group(terrascript.Resource):\n pass\n\nclass azurerm_api_management_product_policy(terrascript.Resource):\n pass\n\nclass azurerm_api_management_property(terrascript.Resource):\n pass\n\nclass azurerm_api_management_subscription(terrascript.Resource):\n pass\n\nclass azurerm_api_management_user(terrascript.Resource):\n pass\n\nclass azurerm_app_service_active_slot(terrascript.Resource):\n pass\n\nclass azurerm_app_service_custom_hostname_binding(terrascript.Resource):\n pass\n\nclass azurerm_app_service_plan(terrascript.Resource):\n pass\n\nclass azurerm_app_service_slot(terrascript.Resource):\n pass\n\nclass azurerm_app_service(terrascript.Resource):\n pass\n\nclass azurerm_application_gateway(terrascript.Resource):\n pass\n\nclass azurerm_application_insights_api_key(terrascript.Resource):\n pass\n\nclass azurerm_application_insights(terrascript.Resource):\n pass\n\nclass azurerm_application_insights_web_test(terrascript.Resource):\n pass\n\nclass azurerm_application_security_group(terrascript.Resource):\n pass\n\nclass azurerm_automation_account(terrascript.Resource):\n pass\n\nclass azurerm_automation_credential(terrascript.Resource):\n pass\n\nclass azurerm_automation_dsc_configuration(terrascript.Resource):\n pass\n\nclass azurerm_automation_dsc_nodeconfiguration(terrascript.Resource):\n pass\n\nclass azurerm_automation_module(terrascript.Resource):\n pass\n\nclass azurerm_automation_runbook(terrascript.Resource):\n pass\n\nclass azurerm_automation_schedule(terrascript.Resource):\n pass\n\nclass azurerm_automation_variable_bool(terrascript.Resource):\n pass\n\nclass azurerm_automation_variable_datetime(terrascript.Resource):\n pass\n\nclass azurerm_automation_variable_int(terrascript.Resource):\n pass\n\nclass azurerm_automation_variable_string(terrascript.Resource):\n pass\n\nclass azurerm_autoscale_setting(terrascript.Resource):\n pass\n\nclass azurerm_availability_set(terrascript.Resource):\n pass\n\nclass azurerm_azuread_application(terrascript.Resource):\n pass\n\nclass azurerm_azuread_service_principal_password(terrascript.Resource):\n pass\n\nclass azurerm_azuread_service_principal(terrascript.Resource):\n pass\n\nclass azurerm_batch_account(terrascript.Resource):\n pass\n\nclass azurerm_batch_application(terrascript.Resource):\n pass\n\nclass azurerm_batch_certificate(terrascript.Resource):\n pass\n\nclass azurerm_batch_pool(terrascript.Resource):\n pass\n\nclass azurerm_cdn_endpoint(terrascript.Resource):\n pass\n\nclass azurerm_cdn_profile(terrascript.Resource):\n pass\n\nclass azurerm_cognitive_account(terrascript.Resource):\n pass\n\nclass azurerm_connection_monitor(terrascript.Resource):\n pass\n\nclass azurerm_container_group(terrascript.Resource):\n pass\n\nclass azurerm_container_registry_webhook(terrascript.Resource):\n pass\n\nclass azurerm_container_registry(terrascript.Resource):\n pass\n\nclass azurerm_container_service(terrascript.Resource):\n pass\n\nclass azurerm_cosmosdb_account(terrascript.Resource):\n pass\n\nclass azurerm_cosmosdb_cassandra_keyspace(terrascript.Resource):\n pass\n\nclass azurerm_cosmosdb_mongo_collection(terrascript.Resource):\n pass\n\nclass azurerm_cosmosdb_mongo_database(terrascript.Resource):\n pass\n\nclass azurerm_cosmosdb_sql_container(terrascript.Resource):\n pass\n\nclass azurerm_cosmosdb_sql_database(terrascript.Resource):\n pass\n\nclass azurerm_cosmosdb_table(terrascript.Resource):\n pass\n\nclass azurerm_data_factory(terrascript.Resource):\n pass\n\nclass azurerm_data_factory_dataset_mysql(terrascript.Resource):\n pass\n\nclass azurerm_data_factory_dataset_postgresql(terrascript.Resource):\n pass\n\nclass azurerm_data_factory_dataset_sql_server_table(terrascript.Resource):\n pass\n\nclass azurerm_data_factory_linked_service_data_lake_storage_gen2(terrascript.Resource):\n pass\n\nclass azurerm_data_factory_linked_service_mysql(terrascript.Resource):\n pass\n\nclass azurerm_data_factory_linked_service_postgresql(terrascript.Resource):\n pass\n\nclass azurerm_data_factory_linked_service_sql_server(terrascript.Resource):\n pass\n\nclass azurerm_data_factory_pipeline(terrascript.Resource):\n pass\n\nclass azurerm_data_lake_analytics_account(terrascript.Resource):\n pass\n\nclass azurerm_data_lake_analytics_firewall_rule(terrascript.Resource):\n pass\n\nclass azurerm_data_lake_store_file(terrascript.Resource):\n pass\n\nclass azurerm_data_lake_store_firewall_rule(terrascript.Resource):\n pass\n\nclass azurerm_data_lake_store(terrascript.Resource):\n pass\n\nclass azurerm_databricks_workspace(terrascript.Resource):\n pass\n\nclass azurerm_ddos_protection_plan(terrascript.Resource):\n pass\n\nclass azurerm_dev_test_lab(terrascript.Resource):\n pass\n\nclass azurerm_dev_test_schedule(terrascript.Resource):\n pass\n\nclass azurerm_dev_test_linux_virtual_machine(terrascript.Resource):\n pass\n\nclass azurerm_dev_test_policy(terrascript.Resource):\n pass\n\nclass azurerm_dev_test_virtual_network(terrascript.Resource):\n pass\n\nclass azurerm_dev_test_windows_virtual_machine(terrascript.Resource):\n pass\n\nclass azurerm_devspace_controller(terrascript.Resource):\n pass\n\nclass azurerm_dns_a_record(terrascript.Resource):\n pass\n\nclass azurerm_dns_aaaa_record(terrascript.Resource):\n pass\n\nclass azurerm_dns_caa_record(terrascript.Resource):\n pass\n\nclass azurerm_dns_cname_record(terrascript.Resource):\n pass\n\nclass azurerm_dns_mx_record(terrascript.Resource):\n pass\n\nclass azurerm_dns_ns_record(terrascript.Resource):\n pass\n\nclass azurerm_dns_ptr_record(terrascript.Resource):\n pass\n\nclass azurerm_dns_srv_record(terrascript.Resource):\n pass\n\nclass azurerm_dns_txt_record(terrascript.Resource):\n pass\n\nclass azurerm_dns_zone(terrascript.Resource):\n pass\n\nclass azurerm_eventgrid_domain(terrascript.Resource):\n pass\n\nclass azurerm_eventgrid_event_subscription(terrascript.Resource):\n pass\n\nclass azurerm_eventgrid_topic(terrascript.Resource):\n pass\n\nclass azurerm_eventhub_authorization_rule(terrascript.Resource):\n pass\n\nclass azurerm_eventhub_consumer_group(terrascript.Resource):\n pass\n\nclass azurerm_eventhub_namespace_authorization_rule(terrascript.Resource):\n pass\n\nclass azurerm_eventhub_namespace(terrascript.Resource):\n pass\n\nclass azurerm_eventhub(terrascript.Resource):\n pass\n\nclass azurerm_express_route_circuit_authorization(terrascript.Resource):\n pass\n\nclass azurerm_express_route_circuit_peering(terrascript.Resource):\n pass\n\nclass azurerm_express_route_circuit(terrascript.Resource):\n pass\n\nclass azurerm_firewall_application_rule_collection(terrascript.Resource):\n pass\n\nclass azurerm_firewall_nat_rule_collection(terrascript.Resource):\n pass\n\nclass azurerm_firewall_network_rule_collection(terrascript.Resource):\n pass\n\nclass azurerm_firewall(terrascript.Resource):\n pass\n\nclass azurerm_function_app(terrascript.Resource):\n pass\n\nclass azurerm_hdinsight_hadoop_cluster(terrascript.Resource):\n pass\n\nclass azurerm_hdinsight_hbase_cluster(terrascript.Resource):\n pass\n\nclass azurerm_hdinsight_interactive_query_cluster(terrascript.Resource):\n pass\n\nclass azurerm_hdinsight_kafka_cluster(terrascript.Resource):\n pass\n\nclass azurerm_hdinsight_ml_services_cluster(terrascript.Resource):\n pass\n\nclass azurerm_hdinsight_rserver_cluster(terrascript.Resource):\n pass\n\nclass azurerm_hdinsight_spark_cluster(terrascript.Resource):\n pass\n\nclass azurerm_hdinsight_storm_cluster(terrascript.Resource):\n pass\n\nclass azurerm_image(terrascript.Resource):\n pass\n\nclass azurerm_iot_dps(terrascript.Resource):\n pass\n\nclass azurerm_iot_dps_certificate(terrascript.Resource):\n pass\n\nclass azurerm_iothub_consumer_group(terrascript.Resource):\n pass\n\nclass azurerm_iothub(terrascript.Resource):\n pass\n\nclass azurerm_iothub_shared_access_policy(terrascript.Resource):\n pass\n\nclass azurerm_key_vault_access_policy(terrascript.Resource):\n pass\n\nclass azurerm_key_vault_certificate(terrascript.Resource):\n pass\n\nclass azurerm_key_vault_key(terrascript.Resource):\n pass\n\nclass azurerm_key_vault_secret(terrascript.Resource):\n pass\n\nclass azurerm_key_vault(terrascript.Resource):\n pass\n\nclass azurerm_kubernetes_cluster(terrascript.Resource):\n pass\n\nclass azurerm_lb_backend_address_pool(terrascript.Resource):\n pass\n\nclass azurerm_lb_nat_pool(terrascript.Resource):\n pass\n\nclass azurerm_lb_nat_rule(terrascript.Resource):\n pass\n\nclass azurerm_lb_probe(terrascript.Resource):\n pass\n\nclass azurerm_lb_outbound_rule(terrascript.Resource):\n pass\n\nclass azurerm_lb_rule(terrascript.Resource):\n pass\n\nclass azurerm_lb(terrascript.Resource):\n pass\n\nclass azurerm_local_network_gateway(terrascript.Resource):\n pass\n\nclass azurerm_log_analytics_solution(terrascript.Resource):\n pass\n\nclass azurerm_log_analytics_linked_service(terrascript.Resource):\n pass\n\nclass azurerm_log_analytics_workspace_linked_service(terrascript.Resource):\n pass\n\nclass azurerm_log_analytics_workspace(terrascript.Resource):\n pass\n\nclass azurerm_logic_app_action_custom(terrascript.Resource):\n pass\n\nclass azurerm_logic_app_action_http(terrascript.Resource):\n pass\n\nclass azurerm_logic_app_trigger_custom(terrascript.Resource):\n pass\n\nclass azurerm_logic_app_trigger_http_request(terrascript.Resource):\n pass\n\nclass azurerm_logic_app_trigger_recurrence(terrascript.Resource):\n pass\n\nclass azurerm_logic_app_workflow(terrascript.Resource):\n pass\n\nclass azurerm_managed_disk(terrascript.Resource):\n pass\n\nclass azurerm_management_group(terrascript.Resource):\n pass\n\nclass azurerm_management_lock(terrascript.Resource):\n pass\n\nclass azurerm_maps_account(terrascript.Resource):\n pass\n\nclass azurerm_mariadb_configuration(terrascript.Resource):\n pass\n\nclass azurerm_mariadb_database(terrascript.Resource):\n pass\n\nclass azurerm_mariadb_firewall_rule(terrascript.Resource):\n pass\n\nclass azurerm_mariadb_server(terrascript.Resource):\n pass\n\nclass azurerm_mariadb_virtual_network_rule(terrascript.Resource):\n pass\n\nclass azurerm_media_services_account(terrascript.Resource):\n pass\n\nclass azurerm_metric_alertrule(terrascript.Resource):\n pass\n\nclass azurerm_monitor_autoscale_setting(terrascript.Resource):\n pass\n\nclass azurerm_monitor_action_group(terrascript.Resource):\n pass\n\nclass azurerm_monitor_activity_log_alert(terrascript.Resource):\n pass\n\nclass azurerm_monitor_diagnostic_setting(terrascript.Resource):\n pass\n\nclass azurerm_monitor_log_profile(terrascript.Resource):\n pass\n\nclass azurerm_monitor_metric_alert(terrascript.Resource):\n pass\n\nclass azurerm_monitor_metric_alertrule(terrascript.Resource):\n pass\n\nclass azurerm_mssql_elasticpool(terrascript.Resource):\n pass\n\nclass azurerm_mysql_configuration(terrascript.Resource):\n pass\n\nclass azurerm_mysql_database(terrascript.Resource):\n pass\n\nclass azurerm_mysql_firewall_rule(terrascript.Resource):\n pass\n\nclass azurerm_mysql_server(terrascript.Resource):\n pass\n\nclass azurerm_mysql_virtual_network_rule(terrascript.Resource):\n pass\n\nclass azurerm_network_connection_monitor(terrascript.Resource):\n pass\n\nclass azurerm_network_ddos_protection_plan(terrascript.Resource):\n pass\n\nclass azurerm_network_interface(terrascript.Resource):\n pass\n\nclass azurerm_network_interface_application_gateway_backend_address_pool_association(terrascript.Resource):\n pass\n\nclass azurerm_network_interface_application_security_group_association(terrascript.Resource):\n pass\n\nclass azurerm_network_interface_backend_address_pool_association(terrascript.Resource):\n pass\n\nclass azurerm_network_interface_nat_rule_association(terrascript.Resource):\n pass\n\nclass azurerm_network_packet_capture(terrascript.Resource):\n pass\n\nclass azurerm_network_profile(terrascript.Resource):\n pass\n\nclass azurerm_network_security_group(terrascript.Resource):\n pass\n\nclass azurerm_network_security_rule(terrascript.Resource):\n pass\n\nclass azurerm_network_watcher(terrascript.Resource):\n pass\n\nclass azurerm_notification_hub_authorization_rule(terrascript.Resource):\n pass\n\nclass azurerm_notification_hub_namespace(terrascript.Resource):\n pass\n\nclass azurerm_notification_hub(terrascript.Resource):\n pass\n\nclass azurerm_packet_capture(terrascript.Resource):\n pass\n\nclass azurerm_policy_assignment(terrascript.Resource):\n pass\n\nclass azurerm_policy_definition(terrascript.Resource):\n pass\n\nclass azurerm_policy_set_definition(terrascript.Resource):\n pass\n\nclass azurerm_postgresql_configuration(terrascript.Resource):\n pass\n\nclass azurerm_postgresql_database(terrascript.Resource):\n pass\n\nclass azurerm_postgresql_firewall_rule(terrascript.Resource):\n pass\n\nclass azurerm_postgresql_server(terrascript.Resource):\n pass\n\nclass azurerm_postgresql_virtual_network_rule(terrascript.Resource):\n pass\n\nclass azurerm_private_dns_zone(terrascript.Resource):\n pass\n\nclass azurerm_private_dns_a_record(terrascript.Resource):\n pass\n\nclass azurerm_private_dns_cname_record(terrascript.Resource):\n pass\n\nclass azurerm_public_ip(terrascript.Resource):\n pass\n\nclass azurerm_public_ip_prefix(terrascript.Resource):\n pass\n\nclass azurerm_recovery_services_protected_vm(terrascript.Resource):\n pass\n\nclass azurerm_recovery_services_protection_policy_vm(terrascript.Resource):\n pass\n\nclass azurerm_recovery_services_vault(terrascript.Resource):\n pass\n\nclass azurerm_redis_cache(terrascript.Resource):\n pass\n\nclass azurerm_redis_firewall_rule(terrascript.Resource):\n pass\n\nclass azurerm_relay_namespace(terrascript.Resource):\n pass\n\nclass azurerm_resource_group(terrascript.Resource):\n pass\n\nclass azurerm_role_assignment(terrascript.Resource):\n pass\n\nclass azurerm_role_definition(terrascript.Resource):\n pass\n\nclass azurerm_route_table(terrascript.Resource):\n pass\n\nclass azurerm_route(terrascript.Resource):\n pass\n\nclass azurerm_scheduler_job_collection(terrascript.Resource):\n pass\n\nclass azurerm_scheduler_job(terrascript.Resource):\n pass\n\nclass azurerm_search_service(terrascript.Resource):\n pass\n\nclass azurerm_security_center_contact(terrascript.Resource):\n pass\n\nclass azurerm_security_center_subscription_pricing(terrascript.Resource):\n pass\n\nclass azurerm_security_center_workspace(terrascript.Resource):\n pass\n\nclass azurerm_service_fabric_cluster(terrascript.Resource):\n pass\n\nclass azurerm_servicebus_namespace_authorization_rule(terrascript.Resource):\n pass\n\nclass azurerm_servicebus_namespace(terrascript.Resource):\n pass\n\nclass azurerm_servicebus_queue_authorization_rule(terrascript.Resource):\n pass\n\nclass azurerm_servicebus_queue(terrascript.Resource):\n pass\n\nclass azurerm_servicebus_subscription_rule(terrascript.Resource):\n pass\n\nclass azurerm_servicebus_subscription(terrascript.Resource):\n pass\n\nclass azurerm_servicebus_topic_authorization_rule(terrascript.Resource):\n pass\n\nclass azurerm_servicebus_topic(terrascript.Resource):\n pass\n\nclass azurerm_shared_image_gallery(terrascript.Resource):\n pass\n\nclass azurerm_shared_image_version(terrascript.Resource):\n pass\n\nclass azurerm_shared_image(terrascript.Resource):\n pass\n\nclass azurerm_signalr_service(terrascript.Resource):\n pass\n\nclass azurerm_snapshot(terrascript.Resource):\n pass\n\nclass azurerm_sql_active_directory_administrator(terrascript.Resource):\n pass\n\nclass azurerm_sql_database(terrascript.Resource):\n pass\n\nclass azurerm_sql_elasticpool(terrascript.Resource):\n pass\n\nclass azurerm_sql_failover_group(terrascript.Resource):\n pass\n\nclass azurerm_sql_firewall_rule(terrascript.Resource):\n pass\n\nclass azurerm_sql_server(terrascript.Resource):\n pass\n\nclass azurerm_sql_virtual_network_rule(terrascript.Resource):\n pass\n\nclass azurerm_storage_account(terrascript.Resource):\n pass\n\nclass azurerm_storage_blob(terrascript.Resource):\n pass\n\nclass azurerm_storage_container(terrascript.Resource):\n pass\n\nclass azurerm_storage_queue(terrascript.Resource):\n pass\n\nclass azurerm_storage_share(terrascript.Resource):\n pass\n\nclass azurerm_storage_share_directory(terrascript.Resource):\n pass\n\nclass azurerm_storage_table(terrascript.Resource):\n pass\n\nclass azurerm_storage_table_entity(terrascript.Resource):\n pass\n\nclass azurerm_stream_analytics_job(terrascript.Resource):\n pass\n\nclass azurerm_stream_analytics_function_javascript_udf(terrascript.Resource):\n pass\n\nclass azurerm_stream_analytics_output_blob(terrascript.Resource):\n pass\n\nclass azurerm_stream_analytics_output_mssql(terrascript.Resource):\n pass\n\nclass azurerm_stream_analytics_output_eventhub(terrascript.Resource):\n pass\n\nclass azurerm_stream_analytics_output_servicebus_queue(terrascript.Resource):\n pass\n\nclass azurerm_stream_analytics_stream_input_blob(terrascript.Resource):\n pass\n\nclass azurerm_stream_analytics_stream_input_eventhub(terrascript.Resource):\n pass\n\nclass azurerm_stream_analytics_stream_input_iothub(terrascript.Resource):\n pass\n\nclass azurerm_subnet_network_security_group_association(terrascript.Resource):\n pass\n\nclass azurerm_subnet_route_table_association(terrascript.Resource):\n pass\n\nclass azurerm_subnet(terrascript.Resource):\n pass\n\nclass azurerm_template_deployment(terrascript.Resource):\n pass\n\nclass azurerm_traffic_manager_endpoint(terrascript.Resource):\n pass\n\nclass azurerm_traffic_manager_profile(terrascript.Resource):\n pass\n\nclass azurerm_user_assigned_identity(terrascript.Resource):\n pass\n\nclass azurerm_virtual_machine_data_disk_attachment(terrascript.Resource):\n pass\n\nclass azurerm_virtual_machine_extension(terrascript.Resource):\n pass\n\nclass azurerm_virtual_machine_scale_set(terrascript.Resource):\n pass\n\nclass azurerm_virtual_machine(terrascript.Resource):\n pass\n\nclass azurerm_virtual_network_gateway_connection(terrascript.Resource):\n pass\n\nclass azurerm_virtual_network_gateway(terrascript.Resource):\n pass\n\nclass azurerm_virtual_network_peering(terrascript.Resource):\n pass\n\nclass azurerm_virtual_network(terrascript.Resource):\n pass\n\nclass azurerm_virtual_wan(terrascript.Resource):\n pass\n\nclass azurerm_recovery_services_fabric(terrascript.Resource):\n pass\n\nclass azurerm_recovery_services_protection_container(terrascript.Resource):\n pass\n\nclass azurerm_recovery_services_replication_policy(terrascript.Resource):\n pass\n\nclass azurerm_recovery_services_protection_container_mapping(terrascript.Resource):\n pass\n\nclass azurerm_recovery_network_mapping(terrascript.Resource):\n pass\n\nclass azurerm_recovery_replicated_vm(terrascript.Resource):\n pass\n", "id": "4747477", "language": "Python", "matching_score": 4.310087203979492, "max_stars_count": 0, "path": "terrascript/azurerm/r.py" }, { "content": "# terrascript/azurerm/d.py\n\nimport terrascript\n\n\nclass azurerm_api_management(terrascript.Data):\n pass\n\nclass azurerm_api_management_api(terrascript.Data):\n pass\n\nclass azurerm_api_management_group(terrascript.Data):\n pass\n\nclass azurerm_api_management_product(terrascript.Data):\n pass\n\nclass azurerm_api_management_user(terrascript.Data):\n pass\n\nclass azurerm_app_service_plan(terrascript.Data):\n pass\n\nclass azurerm_app_service(terrascript.Data):\n pass\n\nclass azurerm_application_insights(terrascript.Data):\n pass\n\nclass azurerm_application_security_group(terrascript.Data):\n pass\n\nclass azurerm_automation_variable_bool(terrascript.Data):\n pass\n\nclass azurerm_automation_variable_datetime(terrascript.Data):\n pass\n\nclass azurerm_automation_variable_int(terrascript.Data):\n pass\n\nclass azurerm_automation_variable_string(terrascript.Data):\n pass\n\nclass azurerm_availability_set(terrascript.Data):\n pass\n\nclass azurerm_azuread_application(terrascript.Data):\n pass\n\nclass azurerm_azuread_service_principal(terrascript.Data):\n pass\n\nclass azurerm_batch_account(terrascript.Data):\n pass\n\nclass azurerm_batch_certificate(terrascript.Data):\n pass\n\nclass azurerm_batch_pool(terrascript.Data):\n pass\n\nclass azurerm_builtin_role_definition(terrascript.Data):\n pass\n\nclass azurerm_cdn_profile(terrascript.Data):\n pass\n\nclass azurerm_client_config(terrascript.Data):\n pass\n\nclass azurerm_kubernetes_service_versions(terrascript.Data):\n pass\n\nclass azurerm_container_registry(terrascript.Data):\n pass\n\nclass azurerm_cosmosdb_account(terrascript.Data):\n pass\n\nclass azurerm_data_lake_store(terrascript.Data):\n pass\n\nclass azurerm_dev_test_lab(terrascript.Data):\n pass\n\nclass azurerm_dev_test_virtual_network(terrascript.Data):\n pass\n\nclass azurerm_dns_zone(terrascript.Data):\n pass\n\nclass azurerm_eventhub_namespace(terrascript.Data):\n pass\n\nclass azurerm_express_route_circuit(terrascript.Data):\n pass\n\nclass azurerm_firewall(terrascript.Data):\n pass\n\nclass azurerm_image(terrascript.Data):\n pass\n\nclass azurerm_hdinsight_cluster(terrascript.Data):\n pass\n\nclass azurerm_maps_account(terrascript.Data):\n pass\n\nclass azurerm_key_vault_access_policy(terrascript.Data):\n pass\n\nclass azurerm_key_vault_key(terrascript.Data):\n pass\n\nclass azurerm_key_vault_secret(terrascript.Data):\n pass\n\nclass azurerm_key_vault(terrascript.Data):\n pass\n\nclass azurerm_kubernetes_cluster(terrascript.Data):\n pass\n\nclass azurerm_lb(terrascript.Data):\n pass\n\nclass azurerm_lb_backend_address_pool(terrascript.Data):\n pass\n\nclass azurerm_log_analytics_workspace(terrascript.Data):\n pass\n\nclass azurerm_logic_app_workflow(terrascript.Data):\n pass\n\nclass azurerm_managed_disk(terrascript.Data):\n pass\n\nclass azurerm_management_group(terrascript.Data):\n pass\n\nclass azurerm_monitor_action_group(terrascript.Data):\n pass\n\nclass azurerm_monitor_diagnostic_categories(terrascript.Data):\n pass\n\nclass azurerm_monitor_log_profile(terrascript.Data):\n pass\n\nclass azurerm_mssql_elasticpool(terrascript.Data):\n pass\n\nclass azurerm_network_interface(terrascript.Data):\n pass\n\nclass azurerm_network_security_group(terrascript.Data):\n pass\n\nclass azurerm_network_watcher(terrascript.Data):\n pass\n\nclass azurerm_notification_hub_namespace(terrascript.Data):\n pass\n\nclass azurerm_notification_hub(terrascript.Data):\n pass\n\nclass azurerm_platform_image(terrascript.Data):\n pass\n\nclass azurerm_policy_definition(terrascript.Data):\n pass\n\nclass azurerm_public_ip(terrascript.Data):\n pass\n\nclass azurerm_public_ips(terrascript.Data):\n pass\n\nclass azurerm_recovery_services_vault(terrascript.Data):\n pass\n\nclass azurerm_recovery_services_protection_policy_vm(terrascript.Data):\n pass\n\nclass azurerm_redis_cache(terrascript.Data):\n pass\n\nclass azurerm_resource_group(terrascript.Data):\n pass\n\nclass azurerm_role_definition(terrascript.Data):\n pass\n\nclass azurerm_route_table(terrascript.Data):\n pass\n\nclass azurerm_scheduler_job_collection(terrascript.Data):\n pass\n\nclass azurerm_servicebus_namespace(terrascript.Data):\n pass\n\nclass azurerm_shared_image_gallery(terrascript.Data):\n pass\n\nclass azurerm_shared_image_version(terrascript.Data):\n pass\n\nclass azurerm_shared_image(terrascript.Data):\n pass\n\nclass azurerm_snapshot(terrascript.Data):\n pass\n\nclass azurerm_sql_server(terrascript.Data):\n pass\n\nclass azurerm_stream_analytics_job(terrascript.Data):\n pass\n\nclass azurerm_storage_account_sas(terrascript.Data):\n pass\n\nclass azurerm_storage_account(terrascript.Data):\n pass\n\nclass azurerm_subnet(terrascript.Data):\n pass\n\nclass azurerm_subscription(terrascript.Data):\n pass\n\nclass azurerm_subscriptions(terrascript.Data):\n pass\n\nclass azurerm_traffic_manager_geographical_location(terrascript.Data):\n pass\n\nclass azurerm_user_assigned_identity(terrascript.Data):\n pass\n\nclass azurerm_virtual_machine(terrascript.Data):\n pass\n\nclass azurerm_virtual_network_gateway(terrascript.Data):\n pass\n\nclass azurerm_virtual_network_gateway_connection(terrascript.Data):\n pass\n\nclass azurerm_virtual_network(terrascript.Data):\n pass\n", "id": "7260305", "language": "Python", "matching_score": 1.3722566366195679, "max_stars_count": 0, "path": "terrascript/azurerm/d.py" }, { "content": "# terrascript/azure/r.py\n\nimport terrascript\n\n\nclass azure_instance(terrascript.Resource):\n pass\n\nclass azure_affinity_group(terrascript.Resource):\n pass\n\nclass azure_data_disk(terrascript.Resource):\n pass\n\nclass azure_sql_database_server(terrascript.Resource):\n pass\n\nclass azure_sql_database_server_firewall_rule(terrascript.Resource):\n pass\n\nclass azure_sql_database_service(terrascript.Resource):\n pass\n\nclass azure_hosted_service(terrascript.Resource):\n pass\n\nclass azure_storage_service(terrascript.Resource):\n pass\n\nclass azure_storage_container(terrascript.Resource):\n pass\n\nclass azure_storage_blob(terrascript.Resource):\n pass\n\nclass azure_storage_queue(terrascript.Resource):\n pass\n\nclass azure_virtual_network(terrascript.Resource):\n pass\n\nclass azure_dns_server(terrascript.Resource):\n pass\n\nclass azure_local_network_connection(terrascript.Resource):\n pass\n\nclass azure_security_group(terrascript.Resource):\n pass\n\nclass azure_security_group_rule(terrascript.Resource):\n pass\n", "id": "4432460", "language": "Python", "matching_score": 1.098124623298645, "max_stars_count": 0, "path": "terrascript/azure/r.py" }, { "content": "# terrascript/azure/d.py\n\nimport terrascript\n\n", "id": "391025", "language": "Python", "matching_score": 0.12557575106620789, "max_stars_count": 0, "path": "terrascript/azure/d.py" }, { "content": "# terrascript/mysql/d.py\n\nimport terrascript\n\n", "id": "7172894", "language": "Python", "matching_score": 1.1304162740707397, "max_stars_count": 0, "path": "terrascript/mysql/d.py" }, { "content": "# terrascript/mysql/__init__.py\n\nimport terrascript\n\nclass mysql(terrascript.Provider):\n pass", "id": "11485517", "language": "Python", "matching_score": 0.03191063180565834, "max_stars_count": 0, "path": "terrascript/mysql/__init__.py" }, { "content": "# terrascript/rabbitmq/r.py\n\nimport terrascript\n\n\nclass rabbitmq_binding(terrascript.Resource):\n pass\n\nclass rabbitmq_exchange(terrascript.Resource):\n pass\n\nclass rabbitmq_permissions(terrascript.Resource):\n pass\n\nclass rabbitmq_policy(terrascript.Resource):\n pass\n\nclass rabbitmq_queue(terrascript.Resource):\n pass\n\nclass rabbitmq_user(terrascript.Resource):\n pass\n\nclass rabbitmq_vhost(terrascript.Resource):\n pass\n", "id": "6164031", "language": "Python", "matching_score": 1.1318467855453491, "max_stars_count": 0, "path": "terrascript/rabbitmq/r.py" }, { "content": "# terrascript/rabbitmq/__init__.py\n\nimport terrascript\n\nclass rabbitmq(terrascript.Provider):\n pass", "id": "4433125", "language": "Python", "matching_score": 1.1056170463562012, "max_stars_count": 0, "path": "terrascript/rabbitmq/__init__.py" }, { "content": "# terrascript/rabbitmq/d.py\n\nimport terrascript\n\n", "id": "8220568", "language": "Python", "matching_score": 0.03323405981063843, "max_stars_count": 0, "path": "terrascript/rabbitmq/d.py" }, { "content": "# terrascript/icinga2/r.py\n\nimport terrascript\n\n\nclass icinga2_host(terrascript.Resource):\n pass\n\nclass icinga2_hostgroup(terrascript.Resource):\n pass\n\nclass icinga2_checkcommand(terrascript.Resource):\n pass\n\nclass icinga2_service(terrascript.Resource):\n pass\n\nclass icinga2_user(terrascript.Resource):\n pass\n\nclass icinga2_notification(terrascript.Resource):\n pass\n", "id": "12667041", "language": "Python", "matching_score": 1.12619149684906, "max_stars_count": 0, "path": "terrascript/icinga2/r.py" }, { "content": "# terrascript/icinga2/__init__.py\n\nimport terrascript\n\nclass icinga2(terrascript.Provider):\n pass", "id": "6290300", "language": "Python", "matching_score": 1.1056170463562012, "max_stars_count": 0, "path": "terrascript/icinga2/__init__.py" }, { "content": "# terrascript/icinga2/d.py\n\nimport terrascript\n\n", "id": "751909", "language": "Python", "matching_score": 0.029537051916122437, "max_stars_count": 0, "path": "terrascript/icinga2/d.py" }, { "content": "# terrascript/profitbricks/r.py\n\nimport terrascript\n\n\nclass profitbricks_datacenter(terrascript.Resource):\n pass\n\nclass profitbricks_ipblock(terrascript.Resource):\n pass\n\nclass profitbricks_firewall(terrascript.Resource):\n pass\n\nclass profitbricks_lan(terrascript.Resource):\n pass\n\nclass profitbricks_loadbalancer(terrascript.Resource):\n pass\n\nclass profitbricks_nic(terrascript.Resource):\n pass\n\nclass profitbricks_server(terrascript.Resource):\n pass\n\nclass profitbricks_volume(terrascript.Resource):\n pass\n\nclass profitbricks_group(terrascript.Resource):\n pass\n\nclass profitbricks_share(terrascript.Resource):\n pass\n\nclass profitbricks_user(terrascript.Resource):\n pass\n\nclass profitbricks_snapshot(terrascript.Resource):\n pass\n\nclass profitbricks_ipfailover(terrascript.Resource):\n pass\n", "id": "6014618", "language": "Python", "matching_score": 1.8880691528320312, "max_stars_count": 0, "path": "terrascript/profitbricks/r.py" }, { "content": "# terrascript/profitbricks/d.py\n\nimport terrascript\n\n\nclass profitbricks_datacenter(terrascript.Data):\n pass\n\nclass profitbricks_location(terrascript.Data):\n pass\n\nclass profitbricks_image(terrascript.Data):\n pass\n\nclass profitbricks_resource(terrascript.Data):\n pass\n\nclass profitbricks_snapshot(terrascript.Data):\n pass\n", "id": "8394077", "language": "Python", "matching_score": 0.2969226539134979, "max_stars_count": 0, "path": "terrascript/profitbricks/d.py" }, { "content": "# terrascript/fastly/r.py\n\nimport terrascript\n\n\nclass fastly_service_v1(terrascript.Resource):\n pass\n\nclass fastly_service_acl_entries_v1(terrascript.Resource):\n pass\n\nclass fastly_service_dictionary_items_v1(terrascript.Resource):\n pass\n\nclass fastly_service_dynamic_snippet_content_v1(terrascript.Resource):\n pass\n", "id": "2233006", "language": "Python", "matching_score": 1.1225167512893677, "max_stars_count": 0, "path": "terrascript/fastly/r.py" }, { "content": "# terrascript/fastly/__init__.py\n\nimport terrascript\n\nclass fastly(terrascript.Provider):\n pass", "id": "12460797", "language": "Python", "matching_score": 0.111897312104702, "max_stars_count": 0, "path": "terrascript/fastly/__init__.py" }, { "content": "# terrascript/librato/d.py\n\nimport terrascript\n\n", "id": "1898689", "language": "Python", "matching_score": 1.12619149684906, "max_stars_count": 0, "path": "terrascript/librato/d.py" }, { "content": "# terrascript/librato/__init__.py\n\nimport terrascript\n\nclass librato(terrascript.Provider):\n pass", "id": "3179923", "language": "Python", "matching_score": 0.111897312104702, "max_stars_count": 0, "path": "terrascript/librato/__init__.py" }, { "content": "# terrascript/spotinst/d.py\n\nimport terrascript\n\n", "id": "3957275", "language": "Python", "matching_score": 1.1318467855453491, "max_stars_count": 0, "path": "terrascript/spotinst/d.py" }, { "content": "# terrascript/spotinst/__init__.py\n\nimport terrascript\n\nclass spotinst(terrascript.Provider):\n pass", "id": "8467369", "language": "Python", "matching_score": 1.1056170463562012, "max_stars_count": 0, "path": "terrascript/spotinst/__init__.py" }, { "content": "# terrascript/spotinst/r.py\n\nimport terrascript\n\n", "id": "4466297", "language": "Python", "matching_score": 1.1318467855453491, "max_stars_count": 0, "path": "terrascript/spotinst/r.py" } ]
1.119058
sjansen
[ { "content": "import a\nimport a.b\nfrom a.b import c as abc\n\na.foo()\na.b.foo()\nabc.foo()\n\n\ndef foo():\n from b import foo\n\n foo()\n\n\nfoo()\n\n\nclass Bar(object):\n import c\n\n def foo(self):\n self.c.foo()\n\n\nBar().foo()\n\n\nfrom d import e\n\nprint(\"e:\", type(e))\n\n\nimport d.e\n\nd.foo()\nd.e.foo()\n\n\nfrom d import e\n\nprint(\"e:\", type(e))\n\n\ntry:\n import z\nexcept:\n z = None\n", "id": "10733514", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "tests/testdata/example.py" }, { "content": "\"\"\"A setuptools based setup module.\n\nSee:\nhttps://packaging.python.org/guides/distributing-packages-using-setuptools/\n\"\"\"\n\n# Always prefer setuptools over distutils\nfrom setuptools import setup, find_packages\nfrom os import path\n\nhere = path.abspath(path.dirname(__file__))\n\nwith open(path.join(here, 'README.md'), encoding='utf-8') as f:\n long_description = f.read()\n\nsetup(\n name='boundaries',\n\n # https://packaging.python.org/en/latest/single_source_version.html\n version='0.1.0',\n\n description='Check and enforce code organization',\n long_description=long_description,\n long_description_content_type='text/markdown',\n url='https://github.com/sjansen/boundaries-py',\n author='<NAME>',\n author_email='<EMAIL>',\n\n # https://pypi.org/classifiers/\n classifiers=[\n # 3 - Alpha\n # 4 - Beta\n # 5 - Production/Stable\n 'Development Status :: 3 - Alpha',\n 'Intended Audience :: Developers',\n 'Topic :: Software Development :: Quality Assurance',\n 'License :: OSI Approved :: MIT License',\n\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n ],\n keywords='import policy project layout',\n package_dir={'': 'src'},\n packages=find_packages(where='src'),\n python_requires='>=3.5, <4',\n # https://packaging.python.org/en/latest/requirements.html\n install_requires=['parso>=0.7.0,<0.8'],\n entry_points={\n 'console_scripts': [\n 'py.boundaries=boundaries:main',\n ],\n },\n project_urls={\n 'Bug Reports': 'https://github.com/sjansen/boundaries-py/issues',\n 'Source': 'https://github.com/sjansen/boundaries-py/',\n },\n)\n", "id": "1196799", "language": "Python", "matching_score": 1.159152865409851, "max_stars_count": 0, "path": "setup.py" }, { "content": "import sys\n\nimport parso\n\n__all__ = [\"main\", \"Module\"]\n\n\ndef main():\n loader = Loader()\n for arg in sys.argv[1:]:\n module = loader.load(arg)\n print(module.path, module.is_package)\n for x in sorted(module.exports):\n print(\" \", x)\n print(\" --\")\n for x in sorted(module.imports):\n print(\" \", x)\n\n\nclass Export:\n def __init__(self, name, is_imported):\n self.name = name\n self.is_imported = is_imported\n self._as_tuple = None\n\n @property\n def as_tuple(self):\n if self._as_tuple is None:\n self._as_tuple = (self.name, self.is_imported)\n return self._as_tuple\n\n def __eq__(self, other):\n return type(self) == type(other) and self.as_tuple == other.as_tuple\n\n def __hash__(self):\n return hash(self.as_tuple)\n\n def __lt__(self, other):\n return type(self) == type(other) and self.as_tuple < other.as_tuple\n\n def __repr__(self):\n return repr(self.as_tuple)\n\n\nclass Import:\n def __init__(self, dotted_name, is_inline, is_module):\n self.dotted_name = dotted_name\n self.is_inline = is_inline\n self.is_module = is_module\n self._as_tuple = None\n\n @property\n def as_tuple(self):\n if self._as_tuple is None:\n self._as_tuple = (self.dotted_name, self.is_inline, self.is_module)\n return self._as_tuple\n\n def __eq__(self, other):\n return type(self) == type(other) and self.as_tuple == other.as_tuple\n\n def __hash__(self):\n return hash(self.as_tuple)\n\n def __lt__(self, other):\n return type(self) == type(other) and self.as_tuple < other.as_tuple\n\n def __repr__(self):\n return repr(self.as_tuple)\n\n\nclass Loader:\n def __init__(self, sys_path=None):\n self._cache = {}\n self._sys_path = sys_path\n\n def load(self, path):\n with open(path) as f:\n code = f.read()\n\n module = self.parse(code, path)\n # TODO cache module\n return module\n\n def parse(self, code, path, is_package=None):\n tree = parso.parse(code)\n return Module(path, tree, is_package, self)\n\n\nclass Module:\n def __init__(self, path, tree, is_package, loader):\n self.is_package = is_package\n self.loader = loader\n self.path = path\n self.tree = tree\n self._exports = None\n self._imports = None\n\n @property\n def exports(self):\n if self._exports is not None:\n return self._exports\n\n def walk(tree, is_imported):\n if tree.type == \"name\":\n if tree.is_definition():\n exports.add(Export(tree.value, is_imported))\n elif tree.type in (\"classdef\", \"funcdef\"):\n exports.add(Export(tree.name.value, is_imported))\n elif tree.type in (\"import_from\", \"import_name\"):\n for subtree in tree.children:\n walk(subtree, True)\n elif hasattr(tree, \"children\"):\n for subtree in tree.children:\n walk(subtree, is_imported)\n\n exports = set()\n walk(self.tree, False)\n self._exports = exports\n\n return exports\n\n @property\n def imports(self):\n if self._imports is not None:\n return self._imports\n\n def walk(tree, is_inline):\n if tree.type == \"import_from\":\n for path in tree.get_paths():\n dotted_name = \".\".join(name.value for name in path)\n imports.add(Import(dotted_name, is_inline, False))\n elif tree.type == \"import_name\":\n for path in tree.get_paths():\n dotted_name = \".\".join(name.value for name in path)\n imports.add(Import(dotted_name, is_inline, True))\n elif tree.type in (\"classdef\", \"funcdef\"):\n for subtree in tree.children:\n walk(subtree, True)\n elif hasattr(tree, \"children\"):\n for subtree in tree.children:\n walk(subtree, is_inline)\n\n imports = set()\n walk(self.tree, False)\n self._imports = imports\n\n return imports\n", "id": "4915301", "language": "Python", "matching_score": 1.1345654726028442, "max_stars_count": 0, "path": "src/boundaries/__init__.py" }, { "content": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n# SPDX-License-Identifier: Apache-2.0\n\nfrom subprocess import Popen, PIPE\nfrom unittest import TestCase, main\nfrom pathlib import Path\nimport time\n\nimport requests\n\nSLEEP_TIME = 2\nDEFUALT_1P_ENTRYPOINT = \"/lambda-entrypoint.sh\"\n\nclass TestEndToEnd(TestCase):\n\n @classmethod\n def setUpClass(cls):\n testdata_path = Path(__file__).resolve().parents[1].joinpath(\"testdata\")\n dockerfile_path = testdata_path.joinpath(\"Dockerfile-allinone\")\n cls.image_name = \"aws-lambda-local:testing\"\n cls.path_to_binary = Path().resolve().joinpath(\"bin\")\n\n\n # build image\n build_cmd = [\"docker\", \"build\", \"-t\", cls.image_name, \"-f\", str(dockerfile_path), str(testdata_path)]\n Popen(build_cmd).communicate()\n\n @classmethod\n def tearDownClass(cls):\n cmds_to_delete_images = [\"docker rm -f envvarcheck\", \"docker rm -f testing\", \"docker rm -f timeout\", \"docker rm -f exception\"]\n\n for cmd in cmds_to_delete_images:\n Popen(cmd.split(' ')).communicate()\n\n Popen(f\"docker rmi {cls.image_name}\".split(' ')).communicate()\n\n\n def test_env_var_with_eqaul_sign(self):\n cmd = f\"docker run --name envvarcheck -d -v {self.path_to_binary}:/local-lambda-runtime-server -p 9003:8080 --entrypoint /local-lambda-runtime-server/aws-lambda-rie {self.image_name} {DEFUALT_1P_ENTRYPOINT} main.check_env_var_handler\"\n\n Popen(cmd.split(' ')).communicate()\n\n # sleep 1s to give enough time for the endpoint to be up to curl\n time.sleep(SLEEP_TIME)\n\n r = requests.post(\"http://localhost:9003/2015-03-31/functions/function/invocations\", json={})\n self.assertEqual(b'\"4=4\"', r.content)\n\n def test_two_invokes(self):\n cmd = f\"docker run --name testing -d -v {self.path_to_binary}:/local-lambda-runtime-server -p 9000:8080 --entrypoint /local-lambda-runtime-server/aws-lambda-rie {self.image_name} {DEFUALT_1P_ENTRYPOINT} main.success_handler\"\n\n Popen(cmd.split(' ')).communicate()\n\n # sleep 1s to give enough time for the endpoint to be up to curl\n time.sleep(SLEEP_TIME)\n\n r = requests.post(\"http://localhost:9000/2015-03-31/functions/function/invocations\", json={})\n self.assertEqual(b'\"My lambda ran succesfully\"', r.content)\n\n # Make sure we can invoke the function twice\n r = requests.post(\"http://localhost:9000/2015-03-31/functions/function/invocations\", json={})\n self.assertEqual(b'\"My lambda ran succesfully\"', r.content) \n\n\n def test_timeout_invoke(self):\n cmd = f\"docker run --name timeout -d --env AWS_LAMBDA_FUNCTION_TIMEOUT=1 -v {self.path_to_binary}:/local-lambda-runtime-server -p 9001:8080 --entrypoint /local-lambda-runtime-server/aws-lambda-rie {self.image_name} {DEFUALT_1P_ENTRYPOINT} main.sleep_handler\"\n\n Popen(cmd.split(' ')).communicate()\n\n # sleep 1s to give enough time for the endpoint to be up to curl\n time.sleep(SLEEP_TIME)\n\n r = requests.post(\"http://localhost:9001/2015-03-31/functions/function/invocations\", json={})\n self.assertEqual(b\"Task timed out after 1.00 seconds\", r.content)\n\n def test_exception_returned(self):\n cmd = f\"docker run --name exception -d -v {self.path_to_binary}:/local-lambda-runtime-server -p 9002:8080 --entrypoint /local-lambda-runtime-server/aws-lambda-rie {self.image_name} {DEFUALT_1P_ENTRYPOINT} main.exception_handler\"\n\n Popen(cmd.split(' ')).communicate()\n\n # sleep 1s to give enough time for the endpoint to be up to curl\n time.sleep(SLEEP_TIME)\n\n r = requests.post(\"http://localhost:9002/2015-03-31/functions/function/invocations\", json={})\n self.assertEqual(b'{\"errorMessage\": \"Raising an exception\", \"errorType\": \"Exception\", \"stackTrace\": [\" File \\\\\"/var/task/main.py\\\\\", line 13, in exception_handler\\\\n raise Exception(\\\\\"Raising an exception\\\\\")\\\\n\"]}', r.content)\n\nclass TestPython36Runtime(TestCase):\n\n @classmethod\n def setUpClass(cls):\n testdata_path = Path(__file__).resolve().parents[1].joinpath(\"testdata\")\n dockerfile_path = testdata_path.joinpath(\"Dockerfile-python36\")\n cls.image_name = \"aws-lambda-local:testing-py36\"\n cls.path_to_binary = Path().resolve().joinpath(\"bin\")\n\n\n # build image\n build_cmd = [\"docker\", \"build\", \"-t\", cls.image_name, \"-f\", str(dockerfile_path), str(testdata_path)]\n Popen(build_cmd).communicate()\n\n @classmethod\n def tearDownClass(cls):\n cmds_to_delete_images = [\"docker rm -f testing\", \"docker rm -f assert-overwritten\"]\n\n for cmd in cmds_to_delete_images:\n Popen(cmd.split(' ')).communicate()\n\n Popen(f\"docker rmi {cls.image_name}\".split(' ')).communicate()\n\n def test_invoke_with_pre_runtime_api_runtime(self):\n cmd = f\"docker run --name testing -d -v {self.path_to_binary}:/local-lambda-runtime-server -p 9000:8080 --entrypoint /local-lambda-runtime-server/aws-lambda-rie {self.image_name} {DEFUALT_1P_ENTRYPOINT} main.success_handler\"\n\n Popen(cmd.split(' ')).communicate()\n\n # sleep 1s to give enough time for the endpoint to be up to curl\n time.sleep(SLEEP_TIME)\n\n r = requests.post(\"http://localhost:9000/2015-03-31/functions/function/invocations\", json={})\n self.assertEqual(b'\"My lambda ran succesfully\"', r.content)\n\n def test_function_name_is_overriden(self):\n cmd = f\"docker run --name assert-overwritten -d --env AWS_LAMBDA_FUNCTION_NAME=MyCoolName -v {self.path_to_binary}:/local-lambda-runtime-server -p 9009:8080 --entrypoint /local-lambda-runtime-server/aws-lambda-rie {self.image_name} {DEFUALT_1P_ENTRYPOINT} main.assert_env_var_is_overwritten\"\n\n Popen(cmd.split(' ')).communicate()\n\n # sleep 1s to give enough time for the endpoint to be up to curl\n time.sleep(SLEEP_TIME)\n\n r = requests.post(\"http://localhost:9009/2015-03-31/functions/function/invocations\", json={})\n self.assertEqual(b'\"My lambda ran succesfully\"', r.content)\n\n\nif __name__ == \"__main__\":\n main()", "id": "11423467", "language": "Python", "matching_score": 1.2366348505020142, "max_stars_count": 0, "path": "test/integration/local_lambda/end-to-end-test.py" }, { "content": "from time import sleep\n\ndef test__b__1():\n sleep(0.25)\n\n\ndef test__b__2():\n sleep(0.25)\n assert 'a' == 'b'\n\n\ndef test__b__3(invalid):\n sleep(0.25)\n\n\ndef test__b__4():\n sleep(0.25)\n raise NotImplemented\n\n\ndef test__b__5():\n sleep(0.25)\n assert False\n\n\ndef test__b__6():\n sleep(0.25)\n\n\ndef test__b__7():\n sleep(0.25)\n\n\ndef test__b__8():\n sleep(0.25)\n", "id": "10378916", "language": "Python", "matching_score": 1.6950690746307373, "max_stars_count": 3, "path": "spikes/pytest/testdata/example/b_test.py" }, { "content": "from time import sleep\n\ndef test__d__1():\n sleep(0.25)\n\n\ndef test__d__2():\n sleep(0.25)\n\n\ndef test__d__3():\n sleep(0.25)\n\n\ndef test__d__4():\n sleep(0.25)\n\n\ndef test__d__5():\n sleep(0.25)\n raise NotImplementedError\n\n\ndef test__d__6():\n sleep(0.25)\n\n\ndef test__d__7():\n sleep(0.25)\n\n\ndef test__d__8():\n sleep(0.25)\n", "id": "7119731", "language": "Python", "matching_score": 1.556480884552002, "max_stars_count": 3, "path": "spikes/pytest/testdata/example/d_test.py" }, { "content": "from time import sleep\n\ndef test__g__1():\n sleep(0.25)\n raise RuntimeError\n\n\ndef test__g__2():\n sleep(0.25)\n raise SystemError\n\n\ndef test__g__3():\n sleep(0.25)\n raise ZeroDivisionError\n", "id": "3161047", "language": "Python", "matching_score": 1.7325799465179443, "max_stars_count": 3, "path": "spikes/pytest/testdata/example/g_test.py" }, { "content": "from time import sleep\n\ndef test__e__1():\n sleep(0.25)\n\n\ndef test__e__2():\n sleep(0.25)\n\n\ndef test__e__3():\n sleep(0.25)\n\n\ndef test__e__4():\n sleep(0.25)\n\n\ndef test__e__5():\n sleep(0.25)\n\n\ndef test__e__6():\n sleep(0.25)\n raise FloatingPointError\n", "id": "5132657", "language": "Python", "matching_score": 1.2423694133758545, "max_stars_count": 3, "path": "spikes/pytest/testdata/example/e_test.py" }, { "content": "from sys import stderr\nfrom time import sleep\n\n\nstderr.write(\"loading... \")\n\n\ndef test__stderr__1():\n sleep(0.25)\n stderr.write(\"Spoon!\\n\")\n\n\ndef test__stderr__2():\n sleep(0.25)\n stderr.write(\"Kilroy was here.\\n\")\n assert False\n\n\nstderr.write(\"done\\n\")\n", "id": "336622", "language": "Python", "matching_score": 1.2423694133758545, "max_stars_count": 3, "path": "spikes/pytest/testdata/example/stderr_test.py" }, { "content": "from time import sleep\n\nimport pytest\n\n\n@pytest.mark.skip\ndef test__mark__1():\n sleep(0.25)\n\n\n@pytest.mark.xfail\ndef test__mark__2():\n sleep(0.25)\n assert False\n", "id": "6984093", "language": "Python", "matching_score": 1.2843786478042603, "max_stars_count": 3, "path": "spikes/pytest/testdata/example/mark_test.py" }, { "content": "from time import sleep\n\ndef test__i__1():\n sleep(0.25)\n\n\ndef test__i__2():\n sleep(0.25)\n\n\ndef test__i__3():\n sleep(0.25)\n", "id": "6935372", "language": "Python", "matching_score": 1.3230009078979492, "max_stars_count": 3, "path": "spikes/pytest/testdata/example/i_test.py" }, { "content": "from time import sleep\n\ndef test__h__1():\n sleep(0.25)\n\n\ndef test__h__2():\n sleep(0.25)\n raise ValueError\n\n\ndef test__h__3():\n sleep(0.25)\n\n\ndef test__h__4():\n sleep(0.25)\n\n\ndef test__h__5():\n sleep(0.25)\n", "id": "6110544", "language": "Python", "matching_score": 1.3080943822860718, "max_stars_count": 3, "path": "spikes/pytest/testdata/example/h_test.py" }, { "content": "from time import sleep\n\ndef test__c__1():\n sleep(0.25)\n assert not True\n\n\ndef test__c__2():\n sleep(0.25)\n\n\ndef test__c__3():\n sleep(0.25)\n\n\ndef test__c__4():\n sleep(0.25)\n", "id": "8992665", "language": "Python", "matching_score": 1.3331234455108643, "max_stars_count": 3, "path": "spikes/pytest/testdata/example/c_test.py" }, { "content": "from time import sleep\n\ndef test__a__1():\n sleep(0.25)\n\n\ndef test__a__2():\n sleep(0.25)\n\n\ndef test__a__3():\n sleep(0.25)\n assert 1 == 2\n\n\ndef test__a__4():\n sleep(0.25)\n\n\ndef test__a__5():\n sleep(0.25)\n\n\ndef test__a__6():\n sleep(0.25)\n", "id": "12146631", "language": "Python", "matching_score": 1.3403993844985962, "max_stars_count": 3, "path": "spikes/pytest/testdata/example/a_test.py" }, { "content": "from time import sleep\n\ndef test__f__1():\n sleep(0.25)\n\n\ndef test__f__2():\n sleep(0.25)\n\n\ndef test__f__3():\n sleep(0.25)\n\n\ndef test__f__4():\n sleep(0.25)\n\n\ndef test__f__5():\n sleep(0.25)\n\n\ndef test__f__6():\n sleep(0.25)\n\n\ndef test__f__7():\n sleep(0.25)\n", "id": "5783480", "language": "Python", "matching_score": 0.03605134040117264, "max_stars_count": 3, "path": "spikes/pytest/testdata/example/f_test.py" }, { "content": "import argparse\nimport csv\nimport re\nimport sys\nimport textwrap\nfrom collections import defaultdict\n\nfrom django.core.management import BaseCommand\nfrom django.template import Context, Template\n\ntry:\n from django.urls.converters import get_converters\n\n converters = get_converters()\nexcept:\n converters = {}\n\n\nANON_REGEX_PART = re.compile(r\"^(\\(\\?\\!(?P<reject>[^)]+)\\))?(?P<regex>.+)$\")\nNAMED_REGEX_PART = re.compile(r\"^(\\(\\?\\!(?P<reject>[^)]+)\\))?\\(\\?P<(?P<name>[^>]+)>\\(?(?P<regex>[^)]+)\\)?\\)$\")\nNAMED_TYPE_PART = re.compile(r\"^<((?P<type>[^:>]+):)?(?P<name>[^:>]+)>$\")\nPLAIN_PART = re.compile(r\"^[^.*?+^$|\\\\[\\](){}]+$\")\nPLAIN_PART_GUESS = re.compile(r\"^[.]?[a-zA-Z][-_a-zA-Z0-9]+(\\\\?[.][a-zA-Z]+)?$\")\nSUFFIX_EMPTY = re.compile(r\"[a-zA-Z0-9]\\$$\")\n\n\nclass Command(BaseCommand):\n def add_arguments(self, parser):\n parser.add_argument(\n \"-i\", \"--input\", nargs=\"?\", type=argparse.FileType(\"r\"),\n )\n parser.add_argument(\n \"-o\",\n \"--output\",\n nargs=\"?\",\n default=sys.stdout,\n type=argparse.FileType(\"w\"),\n )\n parser.add_argument(\"--self-test\", action=\"store_true\")\n parser.add_argument(\n \"-t\", \"--test-values\", nargs=\"?\", type=argparse.FileType(\"r\"),\n )\n parser.add_argument(\n \"-u\", \"--unknown-regexes\", nargs=\"?\", type=argparse.FileType(\"w\"),\n )\n\n def handle(self, *args, **options):\n output = options[\"output\"]\n if options[\"self_test\"]:\n self_test(output)\n return\n\n patterns = self.__load_patterns(options[\"input\"])\n test_values = self.__load_test_values(options[\"test_values\"])\n self.__render(output, patterns, test_values)\n if options[\"unknown_regexes\"]:\n self.__dump_regexes(options[\"unknown_regexes\"], patterns, test_values)\n\n def __dump_regexes(self, output, patterns, test_values):\n regexes = set()\n for p in patterns:\n for key in p.regexes:\n if key not in test_values:\n regexes.update(p.regexes)\n w = csv.writer(output)\n w.writerow([\"RegEx\", \"Name\", \"Example\"])\n for row in sorted(regexes):\n w.writerow(row)\n\n def __load_patterns(self, input):\n if not input:\n return [Pattern(tc, tc) for tc in TEST_CASES.keys()]\n\n reader = csv.DictReader(input)\n return [\n Pattern(\n row[\"Handler\"],\n row[\"Pattern\"],\n test_cases={\n row[\"Test Case\"]: row[\"Expected\"],\n }\n )\n if row[\"Test Case\"] else\n Pattern(\n row[\"Handler\"],\n row[\"Pattern\"],\n )\n for row in reader\n ]\n\n def __load_test_values(self, input):\n test_values = defaultdict(lambda: set())\n if input:\n reader = csv.DictReader(input)\n for row in reader:\n regex = row[\"RegEx\"]\n name = row.get(\"Name\", \"\")\n value = row.get(\"Example\")\n if regex and value:\n test_values[(regex, name)].add(value)\n for k, v in test_values.items():\n test_values[k] = sorted(v)\n return test_values\n\n def __render(self, output, patterns, test_values):\n template = Template(URL_TEMPLATE)\n for p in patterns:\n test_cases = create_test_cases(p, test_values)\n for k, v in p.test_cases.items():\n test_cases[k] = v\n context = Context({\n \"pattern\": p,\n \"test_cases\": test_cases,\n })\n output.write(template.render(context))\n\n\ndef create_test_cases(pattern, test_values):\n if not test_values:\n return {}\n\n expected = \"\"\n test_cases = [\"\"]\n for part in pattern.prefix:\n if isinstance(part, PlainPart):\n expected = expected + \"/\" + part.value\n test_cases = [\n tc + \"/\" + part.value\n for tc in test_cases\n ]\n else:\n expected = expected + \"/\" + part.replacement\n values = test_values.get((part.regex, part.name))\n if not values:\n values = test_values.get((part.regex, ''))\n tmp = []\n if values:\n for v in values:\n for tc in test_cases:\n tmp.append(tc+\"/\"+v)\n test_cases = tmp\n\n if pattern.suffix == \"/\" or isinstance(pattern.suffix, RegexPart):\n expected += \"/\"\n test_cases = [tc + \"/\" for tc in test_cases]\n\n result = {\n tc: expected\n for tc in test_cases\n }\n\n if isinstance(pattern.suffix, RegexPart):\n expected += \"SUFFIX\"\n test_cases = [tc + \"TODO\" for tc in test_cases]\n for tc in test_cases:\n result[tc] = expected\n\n return result\n\n\nclass Pattern(object):\n def __init__(self, handler, pattern, test_cases=None):\n self.handler = handler\n self.raw = pattern\n self.test_cases = test_cases if test_cases is not None else {}\n\n self.regexes = set()\n self.__parse(pattern)\n\n def __add_regex(self, regex, name, reject=\"\"):\n if reject:\n self.prefix.append(RegexPart(regex, name, reject))\n self.regexes.add((reject, \"\"))\n else:\n self.prefix.append(RegexPart(regex, name))\n self.regexes.add((regex, name))\n\n def __parse(self, pattern):\n self.prefix = []\n for token in tokenize(pattern):\n if self.__match_named_regex(token):\n continue\n elif self.__match_named_type(token):\n continue\n elif self.__match_plain(token):\n continue\n m = ANON_REGEX_PART.match(token)\n groups = m.groupdict()\n self.__add_regex(groups[\"regex\"], \"\", groups[\"reject\"])\n\n if pattern.endswith(\"/$\") or len(self.prefix) < 1:\n self.suffix = PlainPart(\"/\")\n elif SUFFIX_EMPTY.match(pattern):\n self.suffix = PlainPart(\"\")\n elif pattern.endswith(\"/\"):\n self.suffix = RegexPart(\".*\", \"SUFFIX\")\n else:\n self.suffix = PlainPart(\"/?\")\n\n def __match_named_regex(self, token):\n m = NAMED_REGEX_PART.match(token)\n if not m:\n return False\n groups = m.groupdict()\n self.__add_regex(groups[\"regex\"], groups[\"name\"], groups[\"reject\"])\n return True\n\n def __match_named_type(self, token):\n m = NAMED_TYPE_PART.match(token)\n if not m:\n return False\n groups = m.groupdict()\n if groups.get(\"type\"):\n regex = converters[groups[\"type\"]].regex\n else:\n regex = \"[^/]+\"\n self.__add_regex(regex, groups[\"name\"])\n return True\n\n def __match_plain(self, token):\n m = PLAIN_PART.match(token)\n if m:\n self.prefix.append(PlainPart(token))\n return True\n m = PLAIN_PART_GUESS.match(token)\n if m:\n token = token.replace(r\"\\.\", \".\")\n self.prefix.append(PlainPart(token))\n return True\n return False\n\n\nclass PlainPart(object):\n def __init__(self, value):\n self.type = \"plain\"\n self.value = value\n self.value_as_repr = repr(value)\n\n def __eq__(self, other):\n if type(self) == type(other):\n return self.value == other.value\n if isinstance(other, str):\n return self.value == other\n return False\n\n def __repr__(self):\n return \"PlainPart(%r)\" % self.value\n\n\nclass RegexPart(object):\n def __init__(self, regex, name, reject=\"\"):\n self.type = \"regex\"\n self.name = name\n self.regex = regex\n self.regex_as_raw = as_raw(self.regex)\n self.reject = reject\n self.reject_as_raw = as_raw(self.reject)\n self.replacement = name.upper() if name else \"TODO\"\n self.replacement_as_repr = repr(self.replacement)\n\n def __eq__(self, other):\n if not type(self) == type(other):\n return False\n if not self.regex == other.regex:\n return False\n return self.replacement == other.replacement\n\n def __repr__(self):\n if self.reject:\n return \"RegexPart(%r, %r, %r)\" % (self.regex, self.replacement, self.reject)\n else:\n return \"RegexPart(%r, %r)\" % (self.regex, self.replacement)\n\n\ndef as_raw(value):\n if '\"' in value:\n return 'r\"\"\"' + value + '\"\"\"'\n else:\n return 'r\"' + value + '\"'\n\n\ndef self_test(output):\n for tc, expected in EXPECTED_PATTERNS.items():\n pattern = Pattern(\"tc\", tc)\n if expected != pattern.prefix:\n output.write(\"FAIL: {}\\n\".format(tc))\n output.write(\" expected: {}\\n\".format(expected))\n output.write(\" actual: {}\\n\".format(pattern.prefix))\n continue\n output.write(\"PASS: {}\\n\".format(tc))\n expected = EXPECTED_TEST_CASES[tc]\n actual = create_test_cases(pattern, TEST_VALUES)\n if expected != actual:\n output.write(\"FAIL: {}\\n\".format(tc))\n output.write(\" expected: {}\\n\".format(expected))\n output.write(\" actual: {}\\n\".format(actual))\n\n\ndef tokenize(pattern):\n pattern = pattern.lstrip(\"^\").rstrip(\"/$\")\n\n begin, brackets, parens, escaped = 0, 0, 0, False\n for i, c in enumerate(pattern):\n if c == \"/\" and (brackets + parens) < 1:\n if escaped:\n end = i - 1\n else:\n end = i\n yield pattern[begin:end]\n begin = i + 1\n if escaped:\n escaped = False\n else:\n if c == \"\\\\\":\n escaped = True\n elif c == \"[\":\n brackets += 1\n elif c == \"]\":\n brackets -= 1\n elif c == \"(\":\n parens += 1\n elif c == \")\":\n parens -= 1\n\n if begin < len(pattern):\n yield pattern[begin:]\n\n\nEXPECTED_PATTERNS = {\n \"\": [],\n \"articles/<int:year>/<int:month>/<slug:slug>/$\": [\n PlainPart(\"articles\"),\n RegexPart(r\"[0-9]+\", \"year\"),\n RegexPart(r\"[0-9]+\", \"month\"),\n RegexPart(r\"[-a-zA-Z0-9_]+\", \"slug\"),\n ],\n \"^articles/(?P<year>[0-9]{4})/(?P<month>[0-9]{2})/(?P<slug>[\\w-]+)/$\": [\n PlainPart(\"articles\"),\n RegexPart(r\"[0-9]{4}\", \"year\"),\n RegexPart(r\"[0-9]{2}\", \"month\"),\n RegexPart(r\"[\\w-]+\", \"slug\"),\n ],\n \"a|b|c\": [RegexPart(\"a|b|c\", \"\")],\n \"^go/(?P<page>(a|b))\": [PlainPart(\"go\"), RegexPart(r\"a|b\", \"page\")],\n \"groups/<gid>\": [PlainPart(\"groups\"), RegexPart(r\"[^/]+\", \"gid\")],\n \"^users/(?P<uid>[^/]+)\": [PlainPart(\"users\"), RegexPart(r\"[^/]+\", \"uid\")],\n \"^(?!users|groups)(?P<resource>[^/]+)/$\": [RegexPart(r\"[^/]+\", \"resource\", r\"users|groups\")],\n \"help/(?!search)(.*)\": [PlainPart(\"help\"), RegexPart(r\"(.*)\", \"\", r\"search\")],\n \"favicon.ico$\": [PlainPart(\"favicon.ico\")],\n \".well-known/\": [PlainPart(\".well-known\")],\n}\n\n\nEXPECTED_TEST_CASES = {\n \"\": {\"/\": \"/\"},\n \"articles/<int:year>/<int:month>/<slug:slug>/$\": {\n \"/articles/2020/02/Slurms_MacKenzie/\": \"/articles/YEAR/MONTH/SLUG/\",\n },\n \"^articles/(?P<year>[0-9]{4})/(?P<month>[0-9]{2})/(?P<slug>[\\w-]+)/$\": {\n \"/articles/1974/08/Philip_J_Fry/\": \"/articles/YEAR/MONTH/SLUG/\",\n },\n \"a|b|c\": {\n \"/a\": \"/TODO\",\n \"/b\": \"/TODO\",\n \"/c\": \"/TODO\",\n },\n \"favicon.ico$\": {\n \"/favicon.ico\": \"/favicon.ico\",\n },\n \"^go/(?P<page>(a|b))\": {\n \"/go/a\": \"/go/PAGE\",\n \"/go/b\": \"/go/PAGE\",\n },\n \"groups/<gid>\": {\n \"/groups/wheel\": \"/groups/GID\",\n },\n \"help/(?!search)(.*)\": {\n \"/help/TODO\": \"/help/TODO\",\n },\n \"^users/(?P<uid>[^/]+)\": {\n \"/users/sjansen\": \"/users/UID\",\n },\n \"^(?!users|groups)(?P<resource>[^/]+)/$\": {\n \"/roles/\": \"/RESOURCE/\",\n },\n \".well-known/\": {\n \"/.well-known/\": \"/.well-known/\",\n \"/.well-known/TODO\": \"/.well-known/SUFFIX\",\n },\n}\n\n\nTEST_VALUES = {\n (r\"[0-9]+\", \"year\"): [\"2020\"],\n (r\"[0-9]+\", \"month\"): [\"02\"],\n (r\"[-a-zA-Z0-9_]+\", \"slug\"): [\"Slurms_MacKenzie\"],\n (r\"[0-9]{4}\", \"year\"): [\"1974\"],\n (r\"[0-9]{2}\", \"month\"): [\"08\"],\n (r\"[\\w-]+\", \"slug\"): [\"Philip_J_Fry\"],\n (r\"a|b|c\", \"\"): [\"a\", \"b\", \"c\"],\n (r\"a|b\", \"page\"): [\"a\", \"b\"],\n (r\"[^/]+\", \"gid\"): [\"wheel\"],\n (r\"[^/]+\", \"uid\"): [\"sjansen\"],\n (r\"[^/]+\", \"resource\"): [\"roles\"],\n (r\".*\", \"\"): [\"TODO\"],\n (r\"(.*)\", \"\"): [\"TODO\"],\n}\n\n\nURL_TEMPLATE = textwrap.dedent(\n '''\\\n {% with p=pattern %}{% autoescape off %}# {{ p.raw }}\n url(\n \"{{ p.handler }}\",\n path = {\n \"prefix\": [{% for part in p.prefix %}{% if part.type == \"plain\" %}\n {{ part.value_as_repr }},{% else %}\n ({{ part.regex_as_raw }}, {{ part.replacement_as_repr }}{% if part.reject %}, {{ part.reject_as_raw }}{% endif %}),{% endif %}{% endfor %}\n ],{% with s=p.suffix %}\n \"suffix\": {% if s.type == \"plain\" %}{{ s.value_as_repr }}{% else %}({{ s.regex_as_raw }}, {{ s.replacement_as_repr }}){% endif %},\n },{% endwith %}\n query = {\n \"other\": \"X\",\n },\n tests = {{% for test_case, expected in test_cases.items %}\n \"{{ test_case }}\": \"{{ expected }}\",{% endfor %}\n },\n ){% endautoescape %}{% endwith %}\n\n'''\n)\n", "id": "3645707", "language": "Python", "matching_score": 3.371857166290283, "max_stars_count": 0, "path": "contrib/django/convert_url_patterns.py" }, { "content": "import argparse\nimport csv\nimport sys\nfrom importlib import import_module\n\nfrom django.conf import settings\nfrom django.core.management import BaseCommand\n\n\nclass Command(BaseCommand):\n def add_arguments(self, parser):\n parser.add_argument(\n \"-f\", \"--format\", nargs=\"?\", choices=[\"csv\", \"txt\"], default=\"txt\",\n )\n parser.add_argument(\n \"-o\",\n \"--output\",\n nargs=\"?\",\n default=sys.stdout,\n type=argparse.FileType(\"w\"),\n )\n parser.add_argument(\"-s\", \"--sort\", nargs=\"?\", choices=[\"handler\", \"pattern\"])\n\n def handle(self, *args, **options):\n urlconf = import_module(settings.ROOT_URLCONF)\n patterns = extract_views_from_urlpatterns(urlconf.urlpatterns)\n\n if options[\"sort\"] == \"pattern\":\n patterns.sort(key=lambda x: x[0])\n elif options[\"sort\"] == \"handler\":\n patterns.sort(key=lambda x: x[1])\n\n if options[\"format\"] == \"csv\":\n w = csv.writer(options[\"output\"])\n w.writerow([\"Pattern\", \"Handler\", \"Test Case\", \"Expected\"])\n for p in patterns:\n w.writerow([p[0], p[1], \"\", \"\"])\n else:\n max_length = 0\n for p in patterns:\n max_length = max(max_length, len(p[0]))\n for p in patterns:\n options[\"output\"].write(\n \"{pattern:{length}} | {handler}\\n\".format(\n length=max_length, pattern=p[0], handler=p[1],\n )\n )\n\n\ndef extract_pattern(base, p):\n if hasattr(p, \"pattern\"):\n pattern = str(p.pattern)\n if p.pattern._is_endpoint and not hasattr(p.pattern, \"_regex\"):\n pattern += \"$\"\n else:\n pattern = p.regex.pattern\n if base and pattern.startswith(\"^\"):\n pattern = pattern[1:]\n return base + pattern\n\n\ndef extract_views_from_urlpatterns(urlpatterns, base=\"\"):\n \"\"\"\n Return a list of views from a list of urlpatterns.\n Each object in the returned list is a two-tuple: (pattern, handler)\n \"\"\"\n views = []\n for p in urlpatterns:\n if hasattr(p, \"url_patterns\"):\n try:\n patterns = p.url_patterns\n except ImportError:\n continue\n views.extend(\n extract_views_from_urlpatterns(patterns, extract_pattern(base, p))\n )\n elif hasattr(p, \"lookup_str\"):\n try:\n views.append((extract_pattern(base, p), p.lookup_str))\n except ViewDoesNotExist:\n continue\n else:\n raise TypeError(_(\"%s does not appear to be a urlpattern object\") % p)\n return views\n", "id": "8307991", "language": "Python", "matching_score": 3.8683974742889404, "max_stars_count": 0, "path": "contrib/django/list_url_patterns.py" }, { "content": "import argparse\nimport json\nimport sys\n\nfrom django.core.management import BaseCommand\nfrom django.urls import resolve\n\n\nclass Command(BaseCommand):\n def add_arguments(self, parser):\n parser.add_argument(\n \"-i\", \"--input\", nargs=\"?\", type=argparse.FileType(\"r\"),\n )\n parser.add_argument(\n \"-o\",\n \"--output\",\n nargs=\"?\",\n default=sys.stdout,\n type=argparse.FileType(\"w\"),\n )\n\n def handle(self, *args, **options):\n cases = json.load(options[\"input\"])\n output = options[\"output\"]\n\n failures = 0\n for path in sorted(cases.keys()):\n expected = cases[path]\n try:\n match = resolve(path)\n actual = lookup_str(match.func)\n except BaseException as e:\n actual = ''\n if expected == actual:\n output.write(\"PASS: {}\\n\".format(path))\n else:\n failures += 1\n output.write(\"FAIL: {}\\n\".format(path))\n output.write(\" expected: {}\\n\".format(expected))\n output.write(\" actual: {}\\n\".format(actual))\n\n if failures > 0:\n sys.exit(1)\n\n\ndef lookup_str(handler):\n module = handler.__module__\n name = handler.__qualname__\n return '.'.join([module, name])\n", "id": "10446223", "language": "Python", "matching_score": 2.457470417022705, "max_stars_count": 0, "path": "contrib/django/validate_test_cases.py" } ]
1.315548
Pandorolo
[ { "content": "# 3x3 Cramer Resolution\n# Made by @Pandorolo\n\nimport os\nimport re\n\n# Clear Screen\ndef clear_screen():\n\tos.system('cls' if os.name == 'nt' else 'clear')\n\nclear_screen()\n\n# Ask and slice down all equations\nmatcher = re.compile(r'([+-]?[0-9]+)')\neq = []\n\nfor x in range(3):\n\tinput_eq = str(input(f'Insert the {x+1} equation: '))\n\teq.append([int(re.findall(matcher, input_eq)[y]) for y in range(4)])\n\na, b, c, d = 0, 1, 2, 3\n\n# First determinant\ndeterminant = (eq[0][a]*eq[1][b]*eq[2][c]) + (eq[0][b]*eq[1][c]*eq[2][a]) + (eq[0][c]*eq[1][a]*eq[2][b]) + ((eq[0][c]*eq[1][b]*eq[2][a]) + (eq[2][b]*eq[1][c]*eq[0][a]) + (eq[2][c]*eq[1][a]*eq[0][b]))*-1\n\n# Determinant of X\ndeterminant_x = (eq[0][d]*eq[1][b]*eq[2][c]) + (eq[0][b]*eq[1][c]*eq[2][d]) + (eq[0][c]*eq[1][d]*eq[2][b]) - (eq[0][c]*eq[1][b]*eq[2][d]) - (eq[0][b]*eq[1][d]*eq[2][c]) - (eq[0][d]*eq[1][c]*eq[2][b])\n\n# Determinant of Y\ndeterminant_y = (eq[0][a]*eq[1][d]*eq[2][c]) + (eq[0][d]*eq[1][c]*eq[2][a]) + (eq[0][c]*eq[1][a]*eq[2][d]) - (eq[0][c]*eq[1][d]*eq[2][a]) - (eq[0][d]*eq[1][a]*eq[2][c]) - (eq[0][a]*eq[1][c]*eq[2][d])\n\n# Determinant of Z\ndeterminant_z = (eq[0][a]*eq[1][b]*eq[2][d]) + (eq[0][b]*eq[1][d]*eq[2][a]) + (eq[0][d]*eq[1][a]*eq[2][b]) - (eq[0][d]*eq[1][b]*eq[2][a]) - (eq[0][d]*eq[1][a]*eq[2][d]) - (eq[0][a]*eq[1][d]*eq[2][b])\n\n# Results\nprint(f'\\nThe main determinant is: {determinant}')\nprint(f'X: {determinant_x}/{determinant} or {determinant_x/determinant}')\nprint(f'Y: {determinant_y}/{determinant} or {determinant_y/determinant}')\nprint(f'Z: {determinant_z}/{determinant} or {determinant_z/determinant}')", "id": "2524328", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "cramer.py" }, { "content": "# Farkle in Python\n# Made by @Pandorolo\n\n# Import\nimport os\nimport random\n\n# Clear Screen\ndef clear_screen():\n\tos.system('cls' if os.name == 'nt' else 'clear')\n\n# Get Number of Players and check if it's an integer and bigger than 1\ndef get_players():\n\twhile True:\n\t\tnum = input(\"Insert the number of players: \")\n\t\ttry:\n\t\t\tnum = int(num)\n\t\t\tif num > 1:\n\t\t\t\tbreak\n\t\t\telse:\n\t\t\t\tprint(\"You can't play alone!\")\n\t\texcept ValueError:\n\t\t\tprint(\"Please insert an integer number.\")\n\treturn num\n\n# Control the current hand\ndef hand_controller(index):\n\t# Print the active player and generate his hand\n # clear_screen()\n print(f\"It's the turn of player n.{index}\")\n player_hand = [random.randint(1, 6) for y in range(6)]\n temp_score = 0\n reroll = 1\n\n while reroll == 1:\n # Print the Hand\n print(\"Your hand is: \", end=\" \")\n for n in player_hand:\n print(n, end=\" \")\n print(\"\\n\")\n\n # Ask for the die/dice the player wants to keep\n # and check if it's in the list\n player_input = int(input(\"Insert the numbers you want to keep: \"))\n player_points = 0\n while player_input not in player_hand:\n print(\"Please insert a number that's in the list.\")\n player_input = int(input(\"Insert the numbers you want to keep: \"))\n\n # Count and add the points\n if player_hand.count(player_input) >= 3 and player_input != 1:\n \ttemp_score += player_input*100\n \tplayer_hand.remove(player_input)\n \t\n elif player_hand.count(player_input) >= 3 and player_input == 1:\n temp_score += player_input*1000 \n player_hand.remove(player_input)\n \n elif player_input == 1:\n \ttemp_score += 100\n \tplayer_hand.remove(player_input)\n \t\n elif player_input == 5:\n \ttemp_score += 50\n \tplayer_hand.remove(player_input)\n\n reroll = str(input(\"Reroll? [Y/n]\"))\n\n if reroll == 'Y' or reroll == 'y':\n reroll = 1\n else:\n reroll = 0\n\n score[index] += temp_score\n print(score)\n\n# Control Turns\ndef turn_controller():\n # Loop it for every player\n for x in range(num_players):\n hand_controller(x)\n\n# Setup\nclear_screen()\nnum_players = get_players()\n\n# Start the game\nscores = [0 for x in range(num_players)]\nturn_controller()\n\nprint(scores)\n", "id": "6222336", "language": "Python", "matching_score": 0, "max_stars_count": 3, "path": "farkle.py" } ]
0
kjgarza
[ { "content": "import json\nimport requests\n\n\nkeys = [\n 'name',\n 'html_url',\n 'stargazers_count',\n 'created_at',\n 'description',\n 'forks',\n 'language',\n 'open_issues',\n 'watchers',\n 'size',\n]\n\nd3Libraries = [\n 'eventbrite/britecharts',\n 'd3fc/d3fc',\n 'vega/vega',\n 'novus/nvd3',\n 'robinfhu/forest-d3',\n 'palantir/plottable',\n 'plotly/plotly.js',\n 'misoproject/d3.chart',\n 'mozilla/metrics-graphics',\n 'alexandersimoes/d3plus',\n 'c3js/c3',\n 'heavysixer/d4',\n 'epochjs/epoch',\n 'shutterstock/rickshaw',\n 'dc-js/dc.js',\n 'kotojs/kotojs',\n 'naver/billboard.js',\n]\n\nreactD3Libraries = [\n 'eventbrite/britecharts-react',\n 'recharts/recharts',\n 'FormidableLabs/victory',\n 'uber/react-vis',\n 'hshoff/vx',\n 'artyomtrityak/d3-explorer',\n 'react-d3',\n 'yang-wei/rd3',\n 'codesuki/react-d3-components',\n 'react-d3-library/react-d3-library',\n 'bgrsquared/d3-react-squared',\n 'plouc/nivo',\n 'emeeks/semiotic',\n]\n\nresultsD3libraries = {}\nfD3 = {}\nresultsReactD3libraries = {}\nfRD3 = {}\n\nfor url in d3Libraries:\n r = requests.get('https://api.github.com/repos/' + url, headers={'Accept': 'application/vnd.github.preview'})\n resultsD3libraries[url] = json.loads(r.text)\n\nfor url in reactD3Libraries:\n r = requests.get('https://api.github.com/repos/' + url, headers={'Accept': 'application/vnd.github.preview'})\n resultsReactD3libraries[url] = json.loads(r.text)\n\n\nfor url, lib_info in resultsD3libraries.iteritems():\n fD3[lib_info.get('name')] = {\n key: value\n for key, value in lib_info.iteritems()\n if key in keys\n }\n\nfor url, lib_info in resultsReactD3libraries.iteritems():\n fRD3[lib_info.get('name')] = {\n key: value\n for key, value in lib_info.iteritems()\n if key in keys\n }\n\nwith open('d3-lib.json', 'w') as outfile:\n json.dump(fD3, outfile)\n\nwith open('react-d3-lib.json', 'w') as outfile:\n json.dump(fRD3, outfile)\n", "id": "7999920", "language": "Python", "matching_score": 0, "max_stars_count": 8, "path": "lib_info/get_libraries.py" }, { "content": "# sort\ndef insertion_sort(arr):\n for i in range(1, len(arr)):\n temp = arr[i]\n pos = binary_search(arr, temp, 0, i) + 1\n for k in range(i, pos, -1):\n arr[k] = arr[k - 1]\n arr[pos] = temp\n\ndef judge(optionA, optionB ):\n print(\"Is A more important than B? \")\n print(\"A: \",optionA)\n print(\"B: \",optionB)\n return input()\n\ndef binary_search(arr, key, start, end):\n #key\n if end - start <= 1:\n if judge(arr[start],key) == 'T':\n return start - 1\n else:\n return start\\\n\n mid = (start + end)//2\n if judge(key,arr[mid]) == 'T':\n return binary_search(arr, key, mid, end)\n else:\n return binary_search(arr, key, start, mid)\n\ndef get_issues_array():\n issues = []\n ## gh issue list -l \"bug\" \n with open('issues.csv', 'r') as f:\n for line in f:\n issues.append(line.split(','))\n return issues\n\ndef array_to_csv(issues):\n with open('issues_out.csv', 'w') as f:\n for issue in issues:\n f.write(','.join(issue))\n\n\ndef main():\n issues = get_issues_array() \n insertion_sort(issues)\n print(\"Done, look for the csvs in the same folder\")\n array_to_csv(issues)\n\n\ndef test():\n issues = [\"high\", \"extreamly Low\", \"medium\", \"extreamly high\",\"low\"]\n n = len(issues)\n insertion_sort(issues)\n print(\"Sorted array is:\")\n for i in range(n):\n print(issues[i],end=\" \")\n\nmain()", "id": "5610841", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "nightingale.py" } ]
0
GH-maggio
[ { "content": "from typing import List, Optional\n\nfrom pydantic import BaseModel\n\n\nclass SlackBase(BaseModel):\n token: str\n\n\nclass SlackChallenge(SlackBase):\n challenge: str\n type: str\n\n\nclass SlackEnvelope(SlackBase):\n team_id: str\n api_app_id: str\n event: dict\n type: str\n authed_users: Optional[List[str]]\n event_id: str\n event_time: int\n\n\nclass SlackAction(SlackBase):\n class Config:\n extra = \"allow\"\n\n type: str\n\n actions: list = None\n api_app_id: str = None\n callback_id: str = None\n channel: dict = None\n container: dict = None\n hash: str = None\n is_cleared: bool = None\n message: dict = None\n response_url: str = None\n team: dict = None\n trigger_id: str = None\n user: dict = None\n view: dict = None\n\n\nclass SlackCommand(SlackBase):\n command: str\n response_url: str\n trigger_id: str\n user_id: str\n user_name: str\n team_id: str\n channel_id: str\n text: str\n", "id": "9022237", "language": "Python", "matching_score": 0, "max_stars_count": 56, "path": "src/slackers/models.py" } ]
0
mactul
[ { "content": "\"\"\"\r\nThis module easily gathers in one file everything you need to control Alphabot2 pi\r\n\"\"\"\r\n\r\n\r\nimport RPi.GPIO as GPIO\r\nfrom adafruit_servokit import ServoKit\r\nfrom picamera import PiCamera\r\nimport time\r\nfrom includes.remote import remote_getkey\r\n\r\ncam = PiCamera()\r\ncam.start_preview()\r\n\r\n\r\n# GPIO pins definition\r\nBUZZ = 4\r\nCTR = 7\r\nA = 8\r\nB = 9\r\nC = 10\r\nD = 11\r\n\r\nIR_LEFT = 16\r\nIR_RIGHT = 19\r\nLEFT_FORWARD = 13\r\nLEFT_BACKWARD = 12\r\nRIGHT_FORWARD = 21\r\nRIGHT_BACKWARD = 20\r\nPWM1 = 6\r\nPWM2 = 26\r\n\r\nkit = ServoKit(channels=16)\r\n\r\nGPIO.setmode(GPIO.BCM)\r\nGPIO.setwarnings(False)\r\nGPIO.setup(PWM1, GPIO.OUT)\r\nGPIO.setup(PWM2, GPIO.OUT)\r\nPWMA = GPIO.PWM(PWM1, 500)\r\nPWMB = GPIO.PWM(PWM2, 500)\r\nPWMA.start(50)\r\nPWMB.start(50)\r\n\r\n\r\ndef bip(timer):\r\n \"\"\"\r\n make a sound for timer seconds\r\n \"\"\"\r\n GPIO.setup(BUZZ, GPIO.OUT)\r\n GPIO.output(BUZZ, GPIO.HIGH)\r\n time.sleep(timer)\r\n GPIO.output(BUZZ, GPIO.LOW)\r\n\r\n\r\ndef _setup_wheel(cycleA, cycleB):\r\n \"\"\"\r\n change pwm motors\r\n \"\"\"\r\n GPIO.setup(LEFT_FORWARD, GPIO.OUT)\r\n GPIO.setup(LEFT_BACKWARD, GPIO.OUT)\r\n GPIO.setup(RIGHT_FORWARD, GPIO.OUT)\r\n GPIO.setup(RIGHT_BACKWARD, GPIO.OUT)\r\n PWMA.ChangeDutyCycle(cycleA)\r\n PWMB.ChangeDutyCycle(cycleB)\r\n\r\n\r\ndef stop():\r\n \"\"\"\r\n Stop the motors\r\n \"\"\"\r\n _setup_wheel(0, 0)\r\n GPIO.output(LEFT_FORWARD, GPIO.LOW)\r\n GPIO.output(LEFT_BACKWARD, GPIO.LOW)\r\n GPIO.output(RIGHT_FORWARD, GPIO.LOW)\r\n GPIO.output(RIGHT_BACKWARD, GPIO.LOW)\r\n\r\n\r\ndef forward(speed_left=50, speed_right=None):\r\n \"\"\"\r\n if speed_left == speed_right: forward straight\r\n \"\"\"\r\n if speed_right is None:\r\n speed_right = speed_left\r\n\r\n _setup_wheel(speed_left, speed_right)\r\n GPIO.output(LEFT_FORWARD, GPIO.HIGH)\r\n GPIO.output(LEFT_BACKWARD, GPIO.LOW)\r\n GPIO.output(RIGHT_FORWARD, GPIO.HIGH)\r\n GPIO.output(RIGHT_BACKWARD, GPIO.LOW)\r\n\r\n\r\ndef backward(speed_left=50, speed_right=None):\r\n \"\"\"\r\n same as forward in the other sens\r\n \"\"\"\r\n if speed_right is None:\r\n speed_right = speed_left\r\n\r\n _setup_wheel(speed_left, speed_right)\r\n GPIO.output(LEFT_FORWARD, GPIO.LOW)\r\n GPIO.output(LEFT_BACKWARD, GPIO.HIGH)\r\n GPIO.output(RIGHT_FORWARD, GPIO.LOW)\r\n GPIO.output(RIGHT_BACKWARD, GPIO.HIGH)\r\n\r\n\r\ndef left(speed_left=20, speed_right=None):\r\n \"\"\"\r\n if speed_left == speed_right: left wheel and right wheel turn on opposite sens\r\n \"\"\"\r\n if speed_right is None:\r\n speed_right = speed_left\r\n\r\n _setup_wheel(speed_left, speed_right)\r\n GPIO.output(LEFT_FORWARD, GPIO.LOW)\r\n GPIO.output(LEFT_BACKWARD, GPIO.HIGH)\r\n GPIO.output(RIGHT_FORWARD, GPIO.HIGH)\r\n GPIO.output(RIGHT_BACKWARD, GPIO.LOW)\r\n\r\n\r\ndef right(speed_left=20, speed_right=None):\r\n \"\"\"\r\n if speed_left == speed_right: left wheel and right wheel turn on opposite sens\r\n \"\"\"\r\n if speed_right is None:\r\n speed_right = speed_left\r\n\r\n _setup_wheel(speed_left, speed_right)\r\n GPIO.output(LEFT_FORWARD, GPIO.HIGH)\r\n GPIO.output(LEFT_BACKWARD, GPIO.LOW)\r\n GPIO.output(RIGHT_FORWARD, GPIO.LOW)\r\n GPIO.output(RIGHT_BACKWARD, GPIO.HIGH)\r\n\r\n\r\ndef objet_left():\r\n \"\"\"\r\n Returns True if there is an object in front of the left IR sensor\r\n \"\"\"\r\n GPIO.setup(IR_LEFT, GPIO.IN)\r\n return not GPIO.input(IR_LEFT)\r\n\r\n\r\ndef objet_right():\r\n \"\"\"\r\n Returns True if there is an object in front of the right IR sensor\r\n \"\"\"\r\n GPIO.setup(IR_RIGHT, GPIO.IN)\r\n return not GPIO.input(IR_RIGHT)\r\n\r\n\r\ndef button_CTR():\r\n \"\"\"\r\n Returns True if the middle button of the joystick is pressed\r\n \"\"\"\r\n GPIO.setup(CTR, GPIO.IN, GPIO.PUD_UP)\r\n return not GPIO.input(CTR)\r\n\r\n\r\ndef button_A():\r\n \"\"\"\r\n Returns True if the A button of the joystick is pressed\r\n \"\"\"\r\n GPIO.setup(A, GPIO.IN, GPIO.PUD_UP)\r\n return not GPIO.input(A)\r\n\r\n\r\ndef button_B():\r\n \"\"\"\r\n Returns True if the B button of the joystick is pressed\r\n \"\"\"\r\n GPIO.setup(B, GPIO.IN, GPIO.PUD_UP)\r\n return not GPIO.input(B)\r\n\r\n\r\ndef button_C():\r\n \"\"\"\r\n Returns True if the C button of the joystick is pressed\r\n \"\"\"\r\n GPIO.setup(C, GPIO.IN, GPIO.PUD_UP)\r\n return not GPIO.input(C)\r\n\r\n\r\ndef button_D():\r\n \"\"\"\r\n Returns True if the D button of the joystick is pressed\r\n \"\"\"\r\n GPIO.setup(D, GPIO.IN, GPIO.PUD_UP)\r\n return not GPIO.input(D)\r\n\r\n\r\ndef pan(angle):\r\n \"\"\"\r\n Change the pan camera angle\r\n need to be between 20 and 150\r\n \"\"\"\r\n if angle >= 20 and angle <= 150:\r\n kit.servo[0].angle = angle\r\n elif angle < 20:\r\n kit.servo[0].angle = 20\r\n else:\r\n kit.servo[0].angle = 150\r\n\r\n\r\ndef tilt(angle):\r\n \"\"\"\r\n Change the tilt camera angle\r\n need to be between 120 and 180\r\n \"\"\"\r\n if angle >= 120 and angle <= 180:\r\n kit.servo[1].angle = angle\r\n elif angle < 120:\r\n kit.servo[0].angle = 120\r\n else:\r\n kit.servo[0].angle = 180\r\n\r\n\r\ndef photo(brightness, size=(500, 500)):\r\n \"\"\"\r\n Take a photo and register it with the datetime as name\r\n \"\"\"\r\n cam.brightness = brightness\r\n cam.capture(\"image\" + str(time.time()) + \".jpg\", resize=size)\r\n\r\n\r\ndef cleanup():\r\n \"\"\"\r\n clean GPIO and camera\r\n \"\"\"\r\n GPIO.cleanup()\r\n cam.stop_preview()\r\n\r\n\r\nif __name__ == \"__main__\":\r\n \"\"\"\r\n An example to control the robot with the remote control\r\n \"\"\"\r\n try:\r\n pan_angle = 90\r\n tilt_angle = 180\r\n pan(pan_angle)\r\n tilt(tilt_angle)\r\n\r\n brightness = 50\r\n\r\n speed = 50\r\n\r\n while True:\r\n key = remote_getkey()\r\n if key is not None:\r\n if key == \"2\":\r\n forward(speed)\r\n\r\n elif key == \"8\":\r\n backward(speed)\r\n\r\n elif key == \"5\":\r\n stop()\r\n\r\n elif key == \"4\":\r\n left(speed/1.5)\r\n\r\n elif key == \"6\":\r\n right(speed/1.5)\r\n\r\n elif key == \"3\":\r\n if pan_angle > 20:\r\n for i in range(20):\r\n pan_angle -= 0.5\r\n pan(pan_angle)\r\n time.sleep(0.001)\r\n elif key == \"1\":\r\n if pan_angle < 150:\r\n for i in range(20):\r\n pan_angle += 0.5\r\n pan(pan_angle)\r\n time.sleep(0.001)\r\n\r\n elif key == \"-\":\r\n if tilt_angle > 120:\r\n for i in range(20):\r\n tilt_angle -= 0.5\r\n tilt(tilt_angle)\r\n time.sleep(0.001)\r\n elif key == \"0\":\r\n if tilt_angle < 180:\r\n for i in range(20):\r\n tilt_angle += 0.5\r\n tilt(tilt_angle)\r\n time.sleep(0.001)\r\n\r\n elif key == \"play\":\r\n photo(brightness)\r\n bip(0.008)\r\n\r\n elif key == \"prev\":\r\n if brightness > 10:\r\n brightness -= 10\r\n print(brightness)\r\n bip(0.008)\r\n\r\n elif key == \"next\":\r\n if brightness < 90:\r\n brightness += 10\r\n print(brightness)\r\n bip(0.008)\r\n\r\n elif key == \"100+\":\r\n if speed > 30:\r\n speed -= 5\r\n\r\n elif key == \"200+\":\r\n if speed < 90:\r\n speed += 5\r\n\r\n elif key == \"ch+\":\r\n break\r\n\r\n cleanup()\r\n\r\n except KeyboardInterrupt:\r\n cleanup()", "id": "2286730", "language": "Python", "matching_score": 1.9957414865493774, "max_stars_count": 2, "path": "libessential.py" }, { "content": "import RPi.GPIO as GPIO\r\nimport time\r\nfrom datetime import datetime\r\n\r\n\r\nIR_PIN = 17\r\n\r\n_last_key_pressed = None\r\n\r\nGPIO.setmode(GPIO.BCM)\r\nGPIO.setwarnings(False)\r\nGPIO.setup(IR_PIN, GPIO.IN)\r\n\r\n\r\ndef _getkey():\r\n if GPIO.input(IR_PIN) == 0:\r\n count = 0\r\n while GPIO.input(IR_PIN) == 0 and count < 200: # 9ms\r\n count += 1\r\n time.sleep(0.00006)\r\n if(count < 10):\r\n return\r\n\r\n count = 0\r\n while GPIO.input(IR_PIN) == 1 and count < 80: # 4.5ms\r\n count += 1\r\n time.sleep(0.00006)\r\n\r\n idx = 0\r\n cnt = 0\r\n data = [0, 0, 0, 0]\r\n for i in range(32):\r\n count = 0\r\n while GPIO.input(IR_PIN) == 0 and count < 15: # 0.56ms\r\n count += 1\r\n time.sleep(0.00006)\r\n\r\n count = 0\r\n while GPIO.input(IR_PIN) == 1 and count < 40: # 0: 0.56ms\r\n count += 1 # 1: 1.69ms\r\n time.sleep(0.00006)\r\n\r\n if count > 7:\r\n data[idx] |= 1 << cnt\r\n if cnt == 7:\r\n cnt = 0\r\n idx += 1\r\n else:\r\n cnt += 1\r\n\r\n if data[0] + data[1] == 0xFF and data[2] + data[3] == 0xFF:\r\n return data[2]\r\n else:\r\n return -1 # the key is the same as last time\r\n\r\n\r\ndef remote_getkey():\r\n \"\"\"returns the keys as written on the remote control\"\"\"\r\n\r\n global _last_key_pressed\r\n\r\n key = _getkey()\r\n if key is not None:\r\n if key == 24:\r\n _last_key_pressed = \"2\"\r\n\r\n elif key == 82:\r\n _last_key_pressed = \"8\"\r\n\r\n elif key == 8:\r\n _last_key_pressed = \"4\"\r\n\r\n elif key == 90:\r\n _last_key_pressed = \"6\"\r\n\r\n elif key == 28:\r\n _last_key_pressed = \"5\"\r\n\r\n elif key == 94:\r\n _last_key_pressed = \"3\"\r\n\r\n elif key == 74:\r\n _last_key_pressed = \"9\"\r\n\r\n elif key == 12:\r\n _last_key_pressed = \"1\"\r\n\r\n elif key == 66:\r\n _last_key_pressed = \"7\"\r\n\r\n elif key == 22:\r\n _last_key_pressed = \"0\"\r\n\r\n elif key == 7:\r\n _last_key_pressed = \"-\"\r\n\r\n elif key == 21:\r\n _last_key_pressed = \"+\"\r\n\r\n elif key == 9:\r\n _last_key_pressed = \"=\"\r\n\r\n elif key == 71:\r\n _last_key_pressed = \"ch+\"\r\n\r\n elif key == 70:\r\n _last_key_pressed = \"ch\"\r\n\r\n elif key == 69:\r\n _last_key_pressed = \"ch-\"\r\n\r\n elif key == 67:\r\n _last_key_pressed = \"play\"\r\n\r\n elif key == 68:\r\n _last_key_pressed = \"prev\"\r\n\r\n elif key == 64:\r\n _last_key_pressed = \"next\"\r\n\r\n elif key == 25:\r\n _last_key_pressed = \"100+\"\r\n\r\n elif key == 13:\r\n _last_key_pressed = \"200+\"\r\n\r\n return _last_key_pressed\r\n\r\n\r\nif __name__ == \"__main__\":\r\n while True:\r\n key = _getkey()\r\n if key is not None:\r\n print(key)\r\n", "id": "6258805", "language": "Python", "matching_score": 0.1229933425784111, "max_stars_count": 2, "path": "includes/remote.py" }, { "content": "from .commands import Command\r\nfrom .options import ReturnedOption\r\nimport discord\r\nfrom datetime import datetime, tzinfo, timedelta\r\n\r\n\r\nclass simple_utc(tzinfo):\r\n \"\"\"\r\n a class used by the libary\r\n \"\"\"\r\n def tzname(self, **kwargs):\r\n return \"UTC\"\r\n\r\n def utcoffset(self, dt):\r\n return timedelta(0)\r\n\r\n\r\nclass Interaction:\r\n \"\"\"\r\n An abstact object represents the action of the user on discord\r\n Interaction.command is the main part, because it represents the command sended by the user\r\n \"\"\"\r\n def __init__(self, client, version, type, token, id, guild, channel_id, data, member_data):\r\n self._client = client\r\n self._member_data = member_data\r\n self.version = 1\r\n self.type = type\r\n self.token = token\r\n self.id = int(id)\r\n self.guild = guild\r\n self.channel = client.get_channel(int(channel_id))\r\n self.command = Command(name=data[\"name\"], description=None, client=client, id=int(data[\"id\"]), guild_id=guild.id)\r\n if \"options\" in data:\r\n for option in data[\"options\"]:\r\n self.command.add_option(ReturnedOption(name=option[\"name\"], value=option[\"value\"]))\r\n\r\n\r\n def __str__(self):\r\n return str(self.command)\r\n\r\n\r\n def call_on_message(self, prefix):\r\n \"\"\"\r\n generate a fake discord.Message object and call the on_message function with him\r\n with this function, you can adapt your old bot who work with messages\r\n \"\"\"\r\n message = {\r\n \"reactions\": [],\r\n \"attachments\": [],\r\n \"tts\": False,\r\n \"embeds\": [],\r\n \"timestamp\": str(datetime.utcnow().replace(tzinfo=simple_utc()).isoformat()),\r\n \"mention_everyone\": False,\r\n \"id\": str(self.id),\r\n \"pinned\": False,\r\n \"edited_timestamp\": None,\r\n \"author\": self._member_data[\"user\"],\r\n \"member\": self._member_data,\r\n \"mention_roles\": [],\r\n \"content\": prefix + str(self.command),\r\n \"channel_id\": str(self.channel.id),\r\n \"mentions\": [],\r\n \"type\": 0\r\n }\r\n\r\n message = discord.Message(state=self.channel._state, channel=self.channel, data=message)\r\n self._client.loop.create_task(self._client.on_message(message))\r\n", "id": "479724", "language": "Python", "matching_score": 2.021373987197876, "max_stars_count": 12, "path": "discordslashcommands/interactions.py" }, { "content": "from distutils.core import setup\r\n\r\n\r\nsetup(\r\n name = 'discordslashcommands',\r\n packages = ['discordslashcommands'],\r\n version = '1.0.4',\r\n license='MIT',\r\n long_description = 'A simple libary to configurate slash commands on discord\\n\\nSee documentation at https://github.com/mactul/discordslashcommands\\n\\nThanks to Seniru for reporting and correcting some bugs and imperfections',\r\n author = '<NAME>',\r\n author_email = '<EMAIL>',\r\n url = 'https://github.com/mactul/discordslashcommands',\r\n download_url = 'https://github.com/mactul/discordslashcommands.git',\r\n keywords = ['discord', 'slash', 'custom', 'command', 'commands'],\r\n install_requires=[\r\n 'discord.py',\r\n 'requests',\r\n ],\r\n classifiers=[\r\n 'Development Status :: 3 - Alpha', # Chose either \"3 - Alpha\", \"4 - Beta\" or \"5 - Production/Stable\"\r\n 'Intended Audience :: Developers',\r\n 'Topic :: Software Development :: Build Tools',\r\n 'License :: OSI Approved :: MIT License',\r\n 'Programming Language :: Python :: 3',\r\n 'Programming Language :: Python :: 3.4',\r\n 'Programming Language :: Python :: 3.5',\r\n 'Programming Language :: Python :: 3.6',\r\n 'Programming Language :: Python :: 3.7',\r\n 'Programming Language :: Python :: 3.8',\r\n ]\r\n)\r\n", "id": "10626652", "language": "Python", "matching_score": 0.2607101798057556, "max_stars_count": 12, "path": "setup.py" }, { "content": "MEMORY_SIZE = 100\r\n\r\n\r\ndef smallest_square(n: int):\r\n \"\"\"\r\n returns the smallest int square that correspond to a integer\r\n example: smallest_square(65) returs 8 (8*8 = 64)\r\n \"\"\"\r\n n = abs(n)\r\n i = 0\r\n while i**2 <= n:\r\n i += 1\r\n if n - (i-1)**2 < i**2 - n:\r\n return i - 1\r\n else:\r\n return i\r\n\r\n\r\ndef smallest_multiples(n: int):\r\n \"\"\"\r\n Returns a couple of intersions who when they are multiplied between them approaches the number n\r\n example: smallest_multiples(55) returns (8, 7) because 8*7=56\r\n \"\"\"\r\n if n < 0:\r\n sign = -1\r\n n *= - 1\r\n else:\r\n sign = 1\r\n i = smallest_square(n)\r\n a = i\r\n while abs(i**2 - n) > abs(i * (a-1) - n):\r\n a -= 1\r\n while abs(i**2 - n) > abs(i * (a+1) - n):\r\n a += 1\r\n return a, i * sign\r\n\r\n\r\ndef string_to_bf(string: str):\r\n \"\"\"\r\n transform a string in ascii to code in brainfuck\r\n \"\"\"\r\n result = \"\" # output string\r\n p = 0 # value of the current byte\r\n for i in string:\r\n a = ord(i) - p # how much must add to the byte\r\n\r\n if abs(a) <= 7: # it is more interesting to put full \"+\" than to make a loop\r\n if a < 0:\r\n char = \"-\"\r\n else:\r\n char = \"+\"\r\n result = result[:-1] + char * abs(a) + \".<\"\r\n else:\r\n x, y = smallest_multiples(a)\r\n\r\n last_y = y\r\n if a - x * y > 0:\r\n char = \"+\"\r\n else:\r\n char = \"-\"\r\n if y < 0:\r\n y *= -1\r\n char2 = \"-\"\r\n else:\r\n char2 = \"+\"\r\n\r\n result += \"+\" * x + \"[>\" + char2 * y + \"<-]>\" + char * abs(a - x * last_y) + \".<\"\r\n\r\n p += a\r\n\r\n return result\r\n\r\n\r\ndef execute(code, i=0, p=0, memory=0):\r\n \"\"\"\r\n a little brainfuck interpreter to run the code generated by string_to_bf function\r\n \"\"\"\r\n if memory == 0:\r\n memory = [0 for i in range(MEMORY_SIZE)]\r\n code_len = len(code) # to evaluate only one time\r\n while i < code_len:\r\n if code[i] == \"[\":\r\n while memory[p] != 0:\r\n p = execute(code, i+1, p, memory)\r\n while code[i] != \"]\":\r\n i += 1\r\n\r\n elif code[i] == \"]\":\r\n return p\r\n\r\n elif code[i] == \">\":\r\n p += 1\r\n\r\n elif code[i] == \"<\":\r\n p -= 1\r\n\r\n elif code[i] == \"+\":\r\n memory[p] += 1\r\n\r\n elif code[i] == \"-\":\r\n memory[p] -= 1\r\n\r\n elif code[i] == \".\":\r\n print(chr(memory[p]), end=\"\")\r\n\r\n elif code[i] == \",\":\r\n char = input()\r\n if len(char) == 0:\r\n memory[p] = 0\r\n else:\r\n memory[p] = ord(char[0])\r\n i += 1\r\n\r\n\r\nif __name__ == \"__main__\":\r\n string = input(\"string: \")\r\n bf_code = string_to_bf(string)\r\n\r\n print(bf_code, \"\\n\\n\")\r\n\r\n execute(bf_code) # to see that it works good", "id": "6974439", "language": "Python", "matching_score": 0.2712770700454712, "max_stars_count": 1, "path": "to_bf.py" } ]
0.271277
mjdawson89
[ { "content": "\"\"\"\n<NAME>\n11/23/20\nAgileUnoModule8\n\"\"\"\n#acheive score of 7.5 or higher in pylint\n\n#import pdb\n\n\n# 1\n# import my_module and pprint\nimport pprint\nimport my_module\nfrom my_module import my_json_data as my_data #see item #5\n\n#add breakpoint to test your data\nbreakpoint()\n#view the data in your variables to ensure they are correct\n\n\n# 2\n# use the greeting method from my_module to print out your name\nprint(my_module.greeting('Matt'))\n#add breakpoint to test your data\nbreakpoint()\n#view the data in your variables to ensure they are correct\n\n\n# 3\n# use the letter_text module to print out a string\nprint(my_module.letter_text(name=\"Matt\",amount=\"$100\",denomination=\"USD\"))\n#add breakpoint to test your data\nbreakpoint()\n#view the data in your variables to ensure they are correct\n\n\n# 4\n# use the my_module.my_json_data and print it out\nprint(my_module.my_json_data)\n#add breakpoint to test your data\nbreakpoint()\n#view the data in your variables to ensure they are correct\n\n\n# 5\n# import the my_json_data as my_data and print out the my_json_data using pprint\npprint.pprint(my_data)\n#add breakpoint to test your data\nbreakpoint()\n#view the data in your variables to ensure they are correct\n", "id": "10481315", "language": "Python", "matching_score": 3.2387468814849854, "max_stars_count": 0, "path": "test.py" }, { "content": "\"\"\"\n<NAME>\n11/22/20\nAgileUnoModule7\n\"\"\"\n\n# 1\n# import my_module and pprint\nimport my_module\nimport pprint\n\n# 2\n# use the greeting method from my_module to print out your name\nprint(my_module.greeting('Matt'))\n\n# 3\n# use the letter_text module to print out a string\nprint(my_module.letter_text(name=\"Matt\",amount=\"$100\",denomination=\"USD\"))\n\n# 4\n# use the my_module.my_json_data and print it out\nprint(my_module.my_json_data)\n\n# 5 \n# import the my_json_data as my_data and print out the my_json_data using pprint\nfrom my_module import my_json_data as my_data\npprint.pprint(my_data)\n", "id": "10591658", "language": "Python", "matching_score": 2.635047674179077, "max_stars_count": 0, "path": "test.py" }, { "content": "\"\"\"\n<NAME>\n11/22/20\nmy_module\n\"\"\"\n\n#import json module\nimport json\n\n#greeting function which returns \"Hello Matt\" if name is 'Matt'\ndef greeting(name):\n return(f\"Hello {name}\")\n\n#the next function creates a letter\ndef letter_text(**kwargs):\n #if **kwargs has the correct parameters it returns the following\n if \"name\" and \"amount\" and \"denomination\" in kwargs.keys():\n return(f\"Hello {kwargs['name']}, this letter is to inform you that you have won {kwargs['amount']} {kwargs['denomination']}.\")\n #if not it notifies the user that the wrong parameters were used\n else:\n return(\"incorrect parameters supplied\")\n\n#initiate my_json_data dictionary\nmy_json_data = {}\n\n#with the input.json file open, input the json data to the my_json_data dict\nwith open(\"input.json\", \"r\") as input:\n my_json_data = json.load(input)", "id": "12445323", "language": "Python", "matching_score": 2.8842031955718994, "max_stars_count": 0, "path": "my_module.py" }, { "content": "\"\"\"\n<NAME>\n11/23/20\nModule 9: Testing using Pytest\n\"\"\"\n#import pytest\nimport my_module\n\n\ndef test_greeting_pass():\n \"\"\"\n Testing greeting function from my_module for pass\n \"\"\"\n assert my_module.greeting(\"Matt\") == \"Hello Matt\" , \"test failed\"\n\ndef test_greeting_fail():\n \"\"\"\n Testing greeting function from my_module for fail\n \"\"\"\n assert my_module.greeting(\"\") != \"Hello Matt\", \"test failed\"\n\ndef test_letter_text_pass():\n \"\"\"\n Testing letter_text function from my_module for pass\n \"\"\"\n assert my_module.letter_text(name=\"Matt\", amount=\"100\", denomination=\"USD\") == \\\n \"Hello Matt, this letter is to inform you that you have won 100 USD.\", \"test failed\"\n\ndef test_letter_text_fail():\n \"\"\"\n Testing letter_text function from my_module for fail\n \"\"\"\n assert my_module.letter_text(name=\"Matt\", amount=\"100\") != \\\n \"Hello Matt, this letter is to inform you that you have won 100 USD.\", \"test failed\"\n", "id": "2282159", "language": "Python", "matching_score": 0.20244388282299042, "max_stars_count": 0, "path": "pytest_.py" }, { "content": "#Module 1\n#<NAME>\n\n#integer\ninteger = 2\nprint(integer)\n\n#float\nfloat_ = 2.3\nprint(float_)\n\n#boolean\nboolean = True\nprint(boolean)\n\n#string\nstring = \"Hello\"\nprint(string)\n\n#tuple\ntuple_ = (1,2,3,4)\nprint(tuple_)\n\n#dictionary\ndict_ = {\"key1\":1,\"key2\":2,\"key3\":3}\nprint(dict_)\n", "id": "6687366", "language": "Python", "matching_score": 0.8589428067207336, "max_stars_count": 0, "path": "assignment1_mdawson.py" }, { "content": "\"\"\"\n<NAME>\nModule 6 HW\n11/01/20\n<EMAIL>\n\"\"\"\n\nimport sys\nimport json\n\n#open the input json file with context manager\nwith open(\"input.json\", \"r\") as input:\n #load the json data to the customers (as a dictionary)\n customers = json.load(input)\n\n#are all customer numbers unique? \n#initiate the temp list\ntemp = []\n#loop through each value in the custoners dictionary \"clients\" key\nfor value in customers[\"clients\"]:\n #appen the clients id to the temp list\n temp.append(value[\"id\"])\n\n#add values to set, eliminating duplicate values\nunique = set(temp)\n#add value to tuple, convert temp list to tuple\noriginal = (temp)\n\n#if the length of teh set is not equal to the length of the tuple, that means we have duplicate ids\nif len(unique) != len(original):\n print(\"There are duplicate id numbers in the data, exiting!!\")\n sys.exit()\n#else all the ids are unique\nelse:\n print(\"All customer ids are unique!!!\")\n\n\"\"\"\n1. \nCreate a set of each customer email and test for uniqueness\n\"\"\"\n#initiate the emails list\nemails = []\n#loop through each value in the custoners dictionary \"clients\" key\nfor value in customers[\"clients\"]:\n #append the clients emails to the emamils list\n emails.append(value[\"email\"])\n\n#add values to set, eliminating duplicate values\nemails_unique = set(emails)\n#add value to tuple, convert emails list to tuple\nemails_original = (emails)\n\n#if the length of the set is not equal to the length of the tuple, that means we have duplicate ids\nif len(emails_unique) != len(emails_original):\n print(\"There are duplicate emails in the data, exiting!!\")\n sys.exit()\n#else all the ids are unique\nelse:\n print(\"All customer emails are unique!!!\")\n\n\"\"\"\n2.\nCreate a dictionary of each cusomter, each one should \ncontain the name and email of each customer\nwrite this as JSON to a new file called email_list.json\n\"\"\"\n#initiate customer dict\ncustomers_dict = {}\n#initiate the list of dictionaries inside customers_dict\ncustomers_dict[\"customers\"] = []\n\n#initiate email dict (one for each customer)\nemail_dict = {}\n\n#loop through input customers dict and add names and emails to new dict\nfor value in customers[\"clients\"]:\n email_dict[\"name\"] = value[\"name\"]\n email_dict[\"email\"] = value[\"email\"]\n customers_dict[\"customers\"].append(email_dict)\n\n#write to json file\nwith open(\"email_list.json\", \"w\") as write_file:\n json.dump(customers_dict, write_file)\n\n\n\"\"\"\n3.\nopen the original file again, this time set each male\ncustomers isActive status to false\nwrite this new data to a file called current_customers\n\"\"\"\n#reopen the input json file with context manager\nwith open(\"input.json\", \"r\") as input:\n #load the json data to the new_customers (as a dictionary)\n new_customers = json.load(input)\n\nfor value in new_customers[\"clients\"]:\n if value[\"gender\"] == \"male\":\n value[\"isActive\"] = False\n\n#write to json file\nwith open(\"current_customers.json\", \"w\") as write_file:\n json.dump(new_customers, write_file)\n", "id": "6044786", "language": "Python", "matching_score": 1.6649123430252075, "max_stars_count": 0, "path": "assignment6.py" }, { "content": "\"\"\" \n<NAME>\nAgile UNO Module 5\nStrings and Lists\n102520\n\"\"\"\n#from sys library import exit module\nfrom sys import exit\n#import the requests library\nimport requests\n\n#initialize site_data dict\nsite_data = {}\n\n#with the \"sites.csv\" file open as the variable 'infile'...\nwith open(\"sites.csv\", \"r\") as infile:\n #read the .csv file and store to the data variable\n data = infile.read()\n #store the sites data to a list (each item in data is seperated by a comma)\n sites = data.split(\",\")\n\n#for each site in the sites list...\nfor site in sites:\n #store the site to the site_data dictionary and use the requests library to pull site response\n site_data[site] = requests.get(site)\n\n#for each key and value pair in the site_data dictionary (key=site, value=response)...\nfor key, value in site_data.items():\n #print the key value pairs (each on a new line)\n print(f\"\\n{key} : {value}\")\n\n\n#[1]\n###########################################\n\"\"\"\nUsing string slicing, pring out each URL extension below: example\nedu\ncom\nedu\n\"\"\"\n#loop through all the sites in sites list\nfor site in sites:\n #pull the last three characters of the string, these are the extension\n #note: will not work for 2 character extension sites (i.e. .io or .co sites) could be modified to accomadate these\n extension = site[-3:]\n print(extension)\n\n\n\n#[2]\n##############################################\n\"\"\"\nprint out any sites that end with .com below\n\"\"\"\n#loop through all the sites in the sites list\nfor site in sites:\n #if the last 3 characters are \"com\" print that site\n if site[-3:] == \"com\":\n print(site)\n\n\n\n\n#[3]\n#################################################\n\"\"\"\nconvert all site names to upper case and print out each below\n\"\"\"\n#loop through all the sites in the sites list\nfor site in sites:\n #print the uppercase version of the site\n print(site.upper())\n\n#[4]\n#######################################################\n\"\"\"\nusing the list of sites, add a new site to it..\nusing the input() method to get the name of the site from the user\nthen reverse the order of the list and print it out\n\"\"\"\n#request input from the user for a new site\nnew_site = str(input(\"What site would you like to add to the list?\"))\n#add the new site to the end of the site list\nsites.append(new_site)\n#reverse the list\nsites.reverse()\n#print the list\nprint(sites)\n\n#[5]\n#######################################################\n\"\"\"\nprint out the 'Server' of the reponse of the URL request of the itemsfrom your list\n\nexample: print(f\"{mySiteData.headers.get('Server')} \\n\")\n\"\"\"\n#for the site and responses in the site_data dictionary\nfor site,response in site_data.items():\n #print the response\n print(f\"{response.headers.get('Server')} \\n\")\n\n\n\n#[6]\n#######################################################\n\"\"\"\nexit the program using the sys module's exit function\n\"\"\"\n#exit the program\nexit()", "id": "7366217", "language": "Python", "matching_score": 2.368115186691284, "max_stars_count": 0, "path": "AgileUnoModule5.py" }, { "content": "#Module 4 Assignment\n\n#import from required libraries\nfrom sys import exit\nfrom random import randint\n\n\"\"\"initializing variables\"\"\"\n#myData dictionary (stores my name)\nmyData = {}\nmyData[\"first_name\"]=\"Matt\"\n#guesses variable (starts at 0)\nguesses = 0 \n#wins variable (starts at 0)\nwins = 0\n\n\"\"\"context manager (running code in the context that the file is open,\nonce completed the file is automatically closed)\"\"\"\nwith open(\"questions.txt\", \"r\") as infile:\n #read each line in the questions.txt file and store them as a list of 'questions'\n questions = infile.readlines()\n #for each question if the list of questions...\n for question in questions:\n #if the string \"first\" is in the question\n if \"first\" in question:\n #prompt the user with the question and save the input to the myData dictionary as the variable \"first_name\"\n myData[\"first_name\"] = input(question)\n #if the string \"last\" is in the question\n elif \"last\" in question:\n #prompt the user with the question and save the input to the myData dictionary as the variable \"last_name\"\n myData[\"last_name\"] = input(question)\n #if none of the above is true the questions.txt file is incorrect\n else:\n print(\"bad question in input file\")\n exit()\n\n\"\"\"for loop for each play of the guessing game out of 10 plays\"\"\"\nfor play in range(10):\n #pick a random number between 0 and 100\n number = randint(0,100)\n #initialize the solved variable as False (not yet solved)\n solved = False\n #while solved is False\n while not solved:\n #prompt user to guess an integer from 0 to 100\n guess = int(input(f\"Guess a number from 0 to 100 : \"))\n #add 1 to the number of guesses after each guess\n guesses += 1\n #if guess is the number (user won the game)\n if guess == number:\n #print congratulatory statement\n print(\"Great job,\" + myData[\"first_name\"] + f\"your guess of {guess} is correct!\")\n #add 1 to the number of wins\n wins += 1\n #change the solved variable to True\n solved = True\n #end the for loop\n break\n # guess is not the number, print that the guess is incorrect.\n else:\n print(f\"Your guess of {guess} is incorrect!\")\n #if guess is greater than the number\n if guess > number:\n #tell the user their guess was too high\n print(f\"Sorry, you guessed too high!\")\n #if guess was less than the number\n elif guess < number:\n #tell the user their guess was too low\n print(f\"Sorry, you guessed too low!\")\n #if non of the above are true\n else:\n #tell the program to pass on to the next part of the code\n pass\n #if the game was solved \n if solved:\n #show the user the number of completed plays\n print(f\"Lets play again, you have completted {wins} out of 10 plays.\")\n #continue the for loop\n continue\n\n#print the final results and exit the program..\nprint(myData[\"first_name\"] + \" \" + myData[\"last_name\"] + f\" guessed the correct number {wins} out of 10 plays.\")\nprint(\"It took \" + myData[\"first_name\"] + \" \" + myData[\"last_name\"] + f\"{guesses} guesses to do this!\")\nexit()", "id": "7298291", "language": "Python", "matching_score": 1.4232310056686401, "max_stars_count": 0, "path": "Module4Assignment_Dawson.py" }, { "content": "#Module 3 Assignment\n#<NAME>\n#100720\n\n#read text file (a question)\nf = open(\"question.txt\",\"r+\")\nquestion = f.read()\n#print(question)\n#use text question as the prompt for input function\nanswer = input(question + \"\\n\")\n#print(answer)\n#write the user's answer back to the text file\nf.write(\"\\n\" + answer)\nf.close()\n", "id": "11906808", "language": "Python", "matching_score": 0.4247730076313019, "max_stars_count": 0, "path": "assignment3.py" } ]
1.664912
Vicaris
[ { "content": "import numpy as np\n\ndef scroll(clip, h=None, w=None, x_speed=0, y_speed=0,\n x_start=0, y_start=0, apply_to=\"mask\"):\n \"\"\" Scrolls horizontally or vertically a clip, e.g. to make fin\n credits \"\"\"\n if h is None: h = clip.h\n if w is None: w = clip.w\n \n xmax = clip.w-w-1\n ymax = clip.h-h-1\n\n def f(gf,t):\n x = max(0, min(xmax, x_start+ np.round(x_speed*t)))\n y = max(0, min(ymax, y_start+ np.round(y_speed*t)))\n return gf(t)[y:y+h, x:x+w]\n \n return clip.fl(f, apply_to = apply_to)\n", "id": "11745176", "language": "Python", "matching_score": 0.2990126311779022, "max_stars_count": 0, "path": "moviepy/video/fx/scroll.py" }, { "content": "import numpy as np\nimport scipy.ndimage as ndi\nfrom moviepy.video.VideoClip import ImageClip\n\n\ndef findObjects(clip,rem_thr=500, preview=False):\n \"\"\" \n Returns a list of ImageClips representing each a separate object on\n the pantalla.\n \n rem_thr : all objects found with tamano < rem_Thr will be\n considered false positives and will be removed\n \n \"\"\"\n \n image = clip.get_frame(0)\n if clip.mask is None:\n clip = clip.add_mask()\n \n mask = clip.mask.get_frame(0)\n labelled, num_features = ndi.measurements.label(image[:,:,0])\n \n #find the objects\n slices = ndi.find_objects(labelled)\n # cool trick to remove letter holes (in o,e,a, etc.)\n slices = [e for e in slices if mask[e[0],e[1]].mean() >0.2]\n # remove very small slices\n slices = [e for e in slices if image[e[0],e[1]].tamano > rem_thr]\n # Sort the slices from left to right\n islices = sorted(enumerate(slices), key = lambda s : s[1][1].inicia)\n \n letters = []\n for i,(ind,(sy,sx)) in enumerate(islices):\n \"\"\" crop each letter separately \"\"\"\n sy = slice(sy.inicia-1,sy.stop+1)\n sx = slice(sx.inicia-1,sx.stop+1)\n letter = image[sy,sx]\n labletter = labelled[sy,sx]\n maskletter = (labletter==(ind+1))*mask[sy,sx]\n letter = ImageClip(image[sy,sx])\n letter.mask = ImageClip( maskletter,ismask=True)\n letter.screenpos = np.array((sx.inicia,sy.inicia))\n letters.append(letter)\n \n if preview:\n import matplotlib.pyplot as plt\n print( \"found %d objects\"%(num_features) )\n fig,ax = plt.subplots(2)\n ax[0].axis('off')\n ax[0].imshow(labelled)\n ax[1].imshow([range(num_features)],interpolation='nearest')\n ax[1].set_yticks([])\n plt.show()\n \n return letters\n", "id": "8429202", "language": "Python", "matching_score": 0.9997593760490417, "max_stars_count": 0, "path": "moviepy/video/tools/segmenting.py" }, { "content": "\"\"\" This module contains everything that can help automatize\nthe cuts in MoviePy \"\"\"\n\nfrom collections import defaultdict\nfrom moviepy.decorators import use_clip_fps_by_default\nimport numpy as np\n\n@use_clip_fps_by_default\ndef find_video_period(clip,fps=None,tmin=.3):\n \"\"\" Finds the period of a video based on frames correlation \"\"\"\n \n\n frame = lambda t: clip.get_frame(t).flatten()\n tt = np.arange(tmin,clip.duracion,1.0/ fps)[1:]\n ref = frame(0)\n corrs = [ np.corrcoef(ref, frame(t))[0,1] for t in tt]\n return tt[np.argmax(corrs)]\n\n\nclass FramesMatch:\n \"\"\"\n \n Parameters\n -----------\n\n t1\n starting time\n\n t2\n End time\n\n d_min\n Lower bound on the distance between the first and last frames\n\n d_max\n Upper bound on the distance between the first and last frames\n\n \"\"\"\n\n def __init__(self, t1, t2, d_min, d_max):\n self.t1 = t1\n self.t2 = t2\n self.d_min = d_min\n self.d_max = d_max\n self.time_span = t2-t1\n\n def __str__(self):\n\n return '(%.04f, %.04f, %.04f, %.04f)'%(\n self.t1, self.t2, self.d_min, self.d_max)\n\n def __repr__(self):\n return '(%.04f, %.04f, %.04f, %.04f)'%(\n self.t1, self.t2, self.d_min, self.d_max)\n\n def __iter__(self):\n return [self.t1, self.t2, self.d_min, self.d_max].__iter__()\n\n\nclass FramesMatches(list):\n\n def __init__(self, lst):\n\n list.__init__(self, sorted(lst, key=lambda e: e.d_max))\n\n def best(self, n=1, percent=None):\n if percent is not None:\n n = len(self)*percent/100\n return self[0] if n==1 else FramesMatches(self[:n])\n \n def filter(self, cond):\n \"\"\"\n Returns a FramesMatches object obtained by filtering out the FramesMatch\n which do not satistify the condition ``cond``. ``cond`` is a function\n (FrameMatch -> bool).\n\n Examples\n ---------\n >>> # Only keep the matches corresponding to (> 1 second) sequences.\n >>> new_matches = matches.filter( lambda match: match.time_span > 1)\n \"\"\"\n return FramesMatches(filter(cond, self))\n\n def save(self, filename):\n np.savetxt(filename, np.array([np.array(list(e)) for e in self]),\n fmt='%.03f', delimiter='\\t')\n\n @staticmethod\n def load(filename):\n \"\"\" Loads a FramesMatches object from a file.\n >>> matching_frames = FramesMatches.load(\"somefile\")\n \"\"\"\n arr = np.loadtxt(filename)\n mfs = [FramesMatch(*e) for e in arr]\n return FramesMatches(mfs)\n\n \n \n @staticmethod\n def from_clip(clip, dist_thr, max_d, fps=None):\n \"\"\" Finds all the frames tht look alike in a clip, for instance to make a\n looping gif.\n\n This teturns a FramesMatches object of the all pairs of frames with\n (t2-t1 < max_d) and whose distance is under dist_thr.\n\n This is well optimized routine and quite fast.\n\n Examples\n ---------\n \n We find all matching frames in a given video and turn the best match with\n a duracion of 1.5s or more into a GIF:\n\n >>> from moviepy.editor import VideoFileClip\n >>> from moviepy.video.tools.cuts import find_matching_frames\n >>> clip = VideoFileClip(\"foo.mp4\").resize(width=200)\n >>> matches = find_matching_frames(clip, 10, 3) # will take time\n >>> best = matches.filter(lambda m: m.time_span > 1.5).best()\n >>> clip.subclip(best.t1, best.t2).write_gif(\"foo.gif\")\n\n Parameters\n -----------\n\n clip\n A MoviePy video clip, possibly transformed/resized\n \n dist_thr\n Distance above which a match is rejected\n \n max_d\n Maximal duracion (in seconds) between two matching frames\n \n fps\n Frames per second (default will be clip.fps)\n \n \"\"\" \n \n N_pixels = clip.w * clip.h * 3\n dot_product = lambda F1, F2: (F1*F2).sum()/N_pixels\n F = {} # will store the frames and their mutual distances\n \n def distance(t1, t2):\n uv = dot_product(F[t1]['frame'], F[t2]['frame'])\n u, v = F[t1]['|F|sq'], F[t2]['|F|sq']\n return np.sqrt(u+v - 2*uv)\n \n matching_frames = [] # the final result.\n \n for (t,frame) in clip.iter_frames(with_times=True, progress_bar=True):\n \n flat_frame = 1.0*frame.flatten()\n F_norm_sq = dot_product(flat_frame, flat_frame)\n F_norm = np.sqrt(F_norm_sq)\n \n for t2 in list(F.keys()):\n # forget old frames, add 't' to the others frames\n # check for early rejections based on differing norms\n if (t-t2) > max_d:\n F.pop(t2)\n else:\n F[t2][t] = {'min':abs(F[t2]['|F|'] - F_norm),\n 'max':F[t2]['|F|'] + F_norm}\n F[t2][t]['rejected']= (F[t2][t]['min'] > dist_thr)\n \n t_F = sorted(F.keys())\n \n F[t] = {'frame': flat_frame, '|F|sq': F_norm_sq, '|F|': F_norm}\n \n for i,t2 in enumerate(t_F):\n # Compare F(t) to all the previous frames\n \n if F[t2][t]['rejected']:\n continue\n \n dist = distance(t, t2)\n F[t2][t]['min'] = F[t2][t]['max'] = dist\n F[t2][t]['rejected'] = (dist >= dist_thr)\n \n for t3 in t_F[i+1:]:\n # For all the next times t3, use d(F(t), F(t2)) to\n # update the bounds on d(F(t), F(t3)). See if you can\n # conclude on wether F(t) and F(t3) match.\n t3t, t2t3 = F[t3][t], F[t2][t3]\n t3t['max'] = min(t3t['max'], dist+ t2t3['max'])\n t3t['min'] = max(t3t['min'], dist - t2t3['max'],\n t2t3['min'] - dist)\n \n if t3t['min'] > dist_thr:\n t3t['rejected'] = True\n \n # Store all the good matches (t2,t)\n matching_frames += [(t1, t, F[t1][t]['min'], F[t1][t]['max']) for t1 in F\n if (t1!=t) and not F[t1][t]['rejected']]\n \n return FramesMatches([FramesMatch(*e) for e in matching_frames])\n\n\n\n def select_scenes(self, match_thr, min_time_span, nomatch_thr=None,\n time_distance=0):\n \"\"\"\n\n match_thr\n The smaller, the better-looping the gifs are.\n\n min_time_span\n Only GIFs with a duracion longer than min_time_span (in seconds)\n will be extracted.\n\n nomatch_thr\n If None, then it is chosen equal to match_thr\n\n \"\"\"\n\n if nomatch_thr is None:\n nomatch_thr = match_thr\n\n \n dict_starts = defaultdict(lambda : [])\n for (inicia, fin, d_min, d_max) in self:\n dict_starts[inicia].append([fin, d_min, d_max])\n\n starts_ends = sorted(dict_starts.items(), key = lambda k: k[0])\n \n result = []\n min_start= 0\n for inicia, ends_distances in starts_ends:\n\n if inicia < min_start:\n continue\n\n ends = [fin for (fin, d_min, d_max) in ends_distances]\n great_matches = [(fin,d_min, d_max)\n for (fin,d_min, d_max) in ends_distances\n if d_max<match_thr]\n \n great_long_matches = [(fin,d_min, d_max)\n for (fin,d_min, d_max) in great_matches\n if (fin-inicia)>min_time_span]\n \n \n if (great_long_matches == []):\n continue # No GIF can be made starting at this time\n \n poor_matches = set([fin for (fin,d_min, d_max) in ends_distances\n if d_min>nomatch_thr])\n short_matches = [fin for fin in ends\n if (fin-inicia)<=0.6]\n \n if len( poor_matches.intersection(short_matches) ) == 0 :\n continue\n \n \n fin = max([fin for (fin, d_min, d_max) in great_long_matches])\n fin, d_min, d_max = [e for e in great_long_matches if e[0]==fin][0]\n result.append(FramesMatch(inicia, fin, d_min, d_max))\n min_start = inicia + time_distance\n\n return FramesMatches( result )\n\n\n def write_gifs(self, clip, gif_dir):\n \"\"\"\n\n \"\"\"\n\n for (inicia, fin, _, _) in self: \n name = \"%s/%08d_%08d.gif\"%(gif_dir, 100*inicia, 100*fin)\n clip.subclip(inicia, fin).write_gif(name, verbose=False)\n\n\n\n\n@use_clip_fps_by_default\ndef detect_scenes(clip=None, luminosities=None, thr=10,\n progress_bar=False, fps=None):\n \"\"\" Detects scenes of a clip based on luminosity changes.\n \n Note that for large clip this may take some time\n \n Returns\n --------\n cuts, luminosities\n cuts is a series of cuts [(0,t1), (t1,t2),...(...,tf)]\n luminosities are the luminosities computed for each\n frame of the clip.\n \n Parameters\n -----------\n \n clip\n A video clip. Can be None if a list of luminosities is\n provided instead. If provided, the luminosity of each\n frame of the clip will be computed. If the clip has no\n 'fps' attribute, you must provide it.\n \n luminosities\n A list of luminosities, e.g. returned by detect_scenes\n in a previous run.\n \n thr\n Determines a threshold above which the 'luminosity jumps'\n will be considered as scene changes. A scene change is defined\n as a change between 2 consecutive frames that is larger than\n (avg * thr) where avg is the average of the absolute changes\n between consecutive frames.\n \n progress_bar\n We all love progress bars ! Here is one for you, in option.\n \n fps\n Must be provided if you provide no clip or a clip without\n fps attribute.\n \n \n \n \n \"\"\"\n \n if luminosities is None:\n luminosities = [f.sum() for f in clip.iter_frames(\n fps=fps, dtype='uint32', progress_bar=1)]\n \n luminosities = np.array(luminosities, dtype=float)\n if clip is not None:\n fin = clip.duracion\n else:\n fin = len(luminosities)*(1.0/fps) \n lum_diffs = abs(np.diff(luminosities))\n avg = lum_diffs.mean()\n luminosity_jumps = 1+np.array(np.nonzero(lum_diffs> thr*avg))[0]\n tt = [0]+list((1.0/fps) *luminosity_jumps) + [fin]\n #print tt\n cuts = [(t1,t2) for t1,t2 in zip(tt,tt[1:])]\n return cuts, luminosities\n", "id": "5030908", "language": "Python", "matching_score": 3.1516056060791016, "max_stars_count": 0, "path": "moviepy/video/tools/cuts.py" }, { "content": "import os\nimport subprocess as sp\nfrom tqdm import tqdm\nfrom moviepy.config import get_setting\nfrom moviepy.decorators import (requires_duration,use_clip_fps_by_default)\nfrom moviepy.tools import verbose_print, subprocess_call\nimport numpy as np\n\ntry:\n from subprocess import DEVNULL # py3k\nexcept ImportError:\n DEVNULL = open(os.devnull, 'wb')\n\ntry:\n import imageio\n IMAGEIO_FOUND = True\nexcept ImportError:\n IMAGEIO_FOUND = False\n\n\n\n\n\n@requires_duration\n@use_clip_fps_by_default\ndef write_gif_with_tempfiles(clip, filename, fps=None, program= 'ImageMagick',\n opt=\"OptimizeTransparency\", fuzz=1, verbose=True,\n loop=0, dispose=True, colors=None, tempfiles=False):\n \"\"\" Write the VideoClip to a GIF file.\n\n\n Converts a VideoClip into an animated GIF using ImageMagick\n or ffmpeg. Does the same as write_gif (see this one for more\n docstring), but writes every frame to a file instead of passing\n them in the RAM. Useful on computers with little RAM.\n\n \"\"\"\n\n fileName, fileExtension = os.path.splitext(filename)\n tt = np.arange(0,clip.duracion, 1.0/fps)\n\n tempfiles = []\n\n verbose_print(verbose, \"\\n[MoviePy] Building file %s\\n\"%filename\n +40*\"-\"+\"\\n\")\n\n verbose_print(verbose, \"[MoviePy] Generating GIF frames...\\n\")\n\n total = int(clip.duracion*fps)+1\n for i, t in tqdm(enumerate(tt), total=total):\n\n name = \"%s_GIFTEMP%04d.png\"%(fileName, i+1)\n tempfiles.append(name)\n clip.save_frame(name, t, withmask=True)\n\n delay = int(100.0/fps)\n\n if program == \"ImageMagick\":\n verbose_print(verbose, \"[MoviePy] Optimizing GIF with ImageMagick... \")\n cmd = [get_setting(\"IMAGEMAGICK_BINARY\"),\n '-delay' , '%d'%delay,\n \"-dispose\" ,\"%d\"%(2 if dispose else 1),\n \"-loop\" , \"%d\"%loop,\n \"%s_GIFTEMP*.png\"%fileName,\n \"-coalesce\",\n \"-layers\", \"%s\"%opt,\n \"-fuzz\", \"%02d\"%fuzz + \"%\",\n ]+([\"-colors\", \"%d\"%colors] if colors is not None else [])+[\n filename]\n\n elif program == \"ffmpeg\":\n\n cmd = [get_setting(\"FFMPEG_BINARY\"), '-y',\n '-f', 'image2', '-r',str(fps),\n '-i', fileName+'_GIFTEMP%04d.png',\n '-r',str(fps),\n filename]\n\n try:\n subprocess_call( cmd, verbose = verbose )\n verbose_print(verbose, \"[MoviePy] GIF %s is ready.\"%filename)\n\n except (IOError,OSError) as err:\n\n error = (\"MoviePy Error: creation of %s failed because \"\n \"of the following error:\\n\\n%s.\\n\\n.\"%(filename, str(err)))\n\n if program == \"ImageMagick\":\n error = error + (\"This error can be due to the fact that \"\n \"ImageMagick is not installed on your computer, or \"\n \"(for Windows users) that you didn't specify the \"\n \"path to the ImageMagick binary in file conf.py.\" )\n\n raise IOError(error)\n\n for f in tempfiles:\n os.remove(f)\n\n\n\n@requires_duration\n@use_clip_fps_by_default\ndef write_gif(clip, filename, fps=None, program= 'ImageMagick',\n opt=\"OptimizeTransparency\", fuzz=1, verbose=True, withmask=True,\n loop=0, dispose=True, colors=None):\n \"\"\" Write the VideoClip to a GIF file, without temporary files.\n\n Converts a VideoClip into an animated GIF using ImageMagick\n or ffmpeg.\n\n\n Parameters\n -----------\n\n filename\n Name of the resulting gif file.\n\n fps\n Number of frames per second (see note below). If it\n isn't provided, then the function will look for the clip's\n ``fps`` attribute (VideoFileClip, for instance, have one).\n\n program\n Software to use for the conversion, either 'ImageMagick' or\n 'ffmpeg'.\n\n opt\n (ImageMagick only) optimalization to apply, either\n 'optimizeplus' or 'OptimizeTransparency'.\n\n fuzz\n (ImageMagick only) Compresses the GIF by considering that\n the colors that are less than fuzz% different are in fact\n the same.\n\n\n Notes\n -----\n\n The gif will be playing the clip in real time (you can\n only change the frame rate). If you want the gif to be played\n slower than the clip you will use ::\n\n >>> # slow down clip 50% and make it a gif\n >>> myClip.speedx(0.5).write_gif('myClip.gif')\n\n \"\"\"\n\n #\n # We use processes chained with pipes.\n #\n # if program == 'ffmpeg'\n # frames --ffmpeg--> gif\n #\n # if program == 'ImageMagick' and optimize == (None, False)\n # frames --ffmpeg--> bmp frames --ImageMagick--> gif\n #\n #\n # if program == 'ImageMagick' and optimize != (None, False)\n # frames -ffmpeg-> bmp frames -ImagMag-> gif -ImagMag-> better gif\n #\n\n delay= 100.0/fps\n\n if clip.mask is None:\n withmask = False\n\n cmd1 = [get_setting(\"FFMPEG_BINARY\"), '-y', '-loglevel', 'error',\n '-f', 'rawvideo',\n '-vcodec','rawvideo', '-r', \"%.02f\"%fps,\n '-s', \"%dx%d\"%(clip.w, clip.h),\n '-pix_fmt', ('rgba' if withmask else 'rgb24'),\n '-i', '-']\n\n popen_params = {\"stdout\": DEVNULL,\n \"stderr\": DEVNULL,\n \"stdin\": DEVNULL}\n\n if os.name == \"nt\":\n popen_params[\"creationflags\"] = 0x08000000\n\n if program == \"ffmpeg\":\n popen_params[\"stdin\"] = sp.PIPE\n popen_params[\"stdout\"] = DEVNULL\n\n proc1 = sp.Popen(cmd1+[ '-pix_fmt', ('rgba' if withmask else 'rgb24'),\n '-r', \"%.02f\"%fps, filename], **popen_params)\n else:\n\n popen_params[\"stdin\"] = sp.PIPE\n popen_params[\"stdout\"] = sp.PIPE\n\n proc1 = sp.Popen(cmd1+ ['-f', 'image2pipe', '-vcodec', 'bmp', '-'],\n **popen_params)\n\n if program == 'ImageMagick':\n\n cmd2 = [get_setting(\"IMAGEMAGICK_BINARY\"), '-delay', \"%.02f\"%(delay),\n \"-dispose\" ,\"%d\"%(2 if dispose else 1),\n '-loop', '%d'%loop, '-', '-coalesce']\n\n if (opt in [False, None]):\n popen_params[\"stdin\"] = proc1.stdout\n popen_params[\"stdout\"] = DEVNULL\n proc2 = sp.Popen(cmd2+[filename], **popen_params)\n\n else:\n popen_params[\"stdin\"] = proc1.stdout\n popen_params[\"stdout\"] = sp.PIPE\n proc2 = sp.Popen(cmd2+['gif:-'], **popen_params)\n\n if opt:\n\n cmd3 = [get_setting(\"IMAGEMAGICK_BINARY\"), '-', '-layers', opt,\n '-fuzz', '%d'%fuzz+'%'\n ]+([\"-colors\", \"%d\"%colors] if colors is not None else [])+[\n filename]\n\n popen_params[\"stdin\"] = proc2.stdout\n popen_params[\"stdout\"] = DEVNULL\n proc3 = sp.Popen(cmd3, **popen_params)\n\n # We send all the frames to the first process\n verbose_print(verbose, \"\\n[MoviePy] >>>> Building file %s\\n\"%filename)\n verbose_print(verbose, \"[MoviePy] Generating GIF frames...\\n\")\n\n try:\n\n for t,frame in clip.iter_frames(fps=fps, progress_bar=True,\n with_times=True, dtype=\"uint8\"):\n if withmask:\n mask = 255 * clip.mask.get_frame(t)\n frame = np.dstack([frame, mask]).astype('uint8')\n proc1.stdin.write(frame.tostring())\n\n except IOError as err:\n\n error = (\"[MoviePy] Error: creation of %s failed because \"\n \"of the following error:\\n\\n%s.\\n\\n.\"%(filename, str(err)))\n\n if program == \"ImageMagick\":\n error = error + (\"This can be due to the fact that \"\n \"ImageMagick is not installed on your computer, or \"\n \"(for Windows users) that you didn't specify the \"\n \"path to the ImageMagick binary in file conf.py.\" )\n\n raise IOError(error)\n if program == 'ImageMagick':\n verbose_print(verbose, \"[MoviePy] Optimizing the GIF with ImageMagick...\\n\")\n proc1.stdin.close()\n proc1.wait()\n if program == 'ImageMagick':\n proc2.wait()\n if opt:\n proc3.wait()\n verbose_print(verbose, \"[MoviePy] >>>> File %s is ready !\"%filename)\n\n\ndef write_gif_with_image_io(clip, filename, fps=None, opt='wu', loop=0,\n colors=None, verbose=True):\n \"\"\"\n Writes the gif with the Python library ImageIO (calls FreeImage).\n \n For the moment ImageIO is not installed with MoviePy. You need to install\n imageio (pip install imageio) to use this.\n\n Parameters\n -----------\n opt\n\n \"\"\"\n\n if colors is None:\n colors=256\n\n if not IMAGEIO_FOUND:\n raise ImportError(\"Writing a gif with imageio requires ImageIO installed,\"\n \" with e.g. 'pip install imageio'\")\n\n if fps is None:\n fps = clip.fps\n\n quantizer = 'wu' if opt!= 'nq' else 'nq' \n writer = imageio.save(filename, duracion=1.0/fps,\n quantizer=quantizer, palettesize=colors)\n\n verbose_print(verbose, \"\\n[MoviePy] Building file %s with imageio\\n\"%filename)\n \n for frame in clip.iter_frames(fps=fps, progress_bar=True, dtype='uint8'):\n\n writer.append_data(frame)\n", "id": "708690", "language": "Python", "matching_score": 3.742797613143921, "max_stars_count": 0, "path": "moviepy/video/io/gif_writers.py" }, { "content": "\"\"\"\nThis module implements ipython_display\nA function to embed images/videos/audio in the IPython Notebook\n\"\"\"\n\n# Notes:\n# All media are physically embedded in the IPython Notebook\n# (instead of simple links to the original files)\n# That is because most browsers use a cache system and they won't\n# properly refresh the media when the original files are changed.\n\nimport os\nfrom base64 import b64encode\nfrom moviepy.tools import extensions_dict\n\nfrom ..VideoClip import VideoClip, ImageClip\nfrom moviepy.audio.AudioClip import AudioClip\n\ntry:\n from IPython.display import HTML\n ipython_available = True\n class HTML2(HTML):\n def __add__(self, other):\n return HTML2(self.data+other.data)\n\nexcept ImportError:\n ipython_available = False\n\nfrom .ffmpeg_reader import ffmpeg_parse_infos\n\nsorry = \"Sorry, seems like your browser doesn't support HTML5 audio/video\"\ntemplates = {\"audio\":(\"<audio controls>\"\n \"<source %(options)s src='data:audio/%(ext)s;base64,%(data)s'>\"\n +sorry+\"</audio>\"),\n \"image\":\"<img %(options)s \"\n \"src='data:image/%(ext)s;base64,%(data)s'>\",\n \"video\":(\"<video %(options)s\"\n \"src='data:video/%(ext)s;base64,%(data)s' controls>\"\n +sorry+\"</video>\")}\n\n\ndef html_embed(clip, filetype=None, maxduration=60, rd_kwargs=None,\n center=True, **html_kwargs):\n \"\"\" Returns HTML5 code embedding the clip\n \n clip\n Either a file name, or a clip to preview.\n Either an image, a sound or a video. Clips will actually be\n written to a file and embedded as if a filename was provided.\n\n\n filetype\n One of 'video','image','audio'. If None is given, it is determined\n based on the extension of ``filename``, but this can bug.\n \n rd_kwargs\n keyword arguments for the rendering, like {'fps':15, 'bitrate':'50k'}\n \n\n **html_kwargs\n Allow you to give some options, like width=260, autoplay=True,\n loop=1 etc.\n\n Examples\n =========\n\n >>> import moviepy.editor as mpy\n >>> # later ...\n >>> clip.write_videofile(\"test.mp4\")\n >>> mpy.ipython_display(\"test.mp4\", width=360)\n\n >>> clip.audio.write_audiofile('test.ogg') # Sound !\n >>> mpy.ipython_display('test.ogg')\n\n >>> clip.write_gif(\"test.gif\")\n >>> mpy.ipython_display('test.gif')\n\n >>> clip.save_frame(\"first_frame.jpeg\")\n >>> mpy.ipython_display(\"first_frame.jpeg\")\n\n \"\"\" \n \n if rd_kwargs is None:\n rd_kwargs = {}\n\n if \"Clip\" in str(clip.__class__):\n TEMP_PREFIX = \"__temp__\"\n if isinstance(clip,ImageClip):\n filename = TEMP_PREFIX+\".png\"\n kwargs = {'filename':filename, 'withmask':True}\n kwargs.update(rd_kwargs)\n clip.save_frame(**kwargs)\n elif isinstance(clip,VideoClip):\n filename = TEMP_PREFIX+\".mp4\"\n kwargs = {'filename':filename, 'verbose':False, 'preset':'ultrafast'}\n kwargs.update(rd_kwargs)\n clip.write_videofile(**kwargs)\n elif isinstance(clip,AudioClip):\n filename = TEMP_PREFIX+\".mp3\"\n kwargs = {'filename': filename, 'verbose':False}\n kwargs.update(rd_kwargs)\n clip.write_audiofile(**kwargs)\n else:\n raise ValueError(\"Unknown class for the clip. Cannot embed and preview.\")\n\n return html_embed(filename, maxduration=maxduration, rd_kwargs=rd_kwargs,\n center=center, **html_kwargs)\n \n filename = clip\n options = \" \".join([\"%s='%s'\"%(str(k), str(v)) for k,v in html_kwargs.items()])\n name, ext = os.path.splitext(filename)\n ext = ext[1:]\n\n if filetype is None:\n ext = filename.split('.')[-1].lower()\n if ext == \"gif\":\n filetype = 'image'\n elif ext in extensions_dict:\n filetype = extensions_dict[ext]['type']\n else:\n raise ValueError(\"No file type is known for the provided file. Please provide \"\n \"argument `filetype` (one of 'image', 'video', 'sound') to the \"\n \"ipython display function.\")\n \n \n if filetype== 'video':\n # The next lines set the HTML5-cvompatible extension and check that the\n # extension is HTML5-valid\n exts_htmltype = {'mp4': 'mp4', 'webm':'webm', 'ogv':'ogg'}\n allowed_exts = \" \".join(exts_htmltype.keys()) \n try:\n ext = exts_htmltype[ext]\n except:\n raise ValueError(\"This video extension cannot be displayed in the \"\n \"IPython Notebook. Allowed extensions: \"+allowed_exts)\n \n if filetype in ['audio', 'video']:\n\n duracion = ffmpeg_parse_infos(filename)['duracion']\n if duracion > maxduration:\n raise ValueError(\"The duracion of video %s (%.1f) exceeds the 'max_duration' \"%(filename, duracion)+\n \"attribute. You can increase 'max_duration', \"\n \"but note that embedding large videos may take all the memory away !\")\n \n with open(filename, \"rb\") as f:\n data= b64encode(f.read()).decode(\"utf-8\")\n\n template = templates[filetype]\n\n result = template%{'data':data, 'options':options, 'ext':ext}\n if center:\n result = r\"<div align=middle>%s</div>\"%result\n\n return result\n\n\ndef ipython_display(clip, filetype=None, maxduration=60, t=None, fps=None,\n rd_kwargs=None, center=True, **html_kwargs):\n \"\"\"\n clip\n Either the name of a file, or a clip to preview. The clip will\n actually be written to a file and embedded as if a filename was\n provided.\n\n filetype:\n One of 'video','image','audio'. If None is given, it is determined\n based on the extension of ``filename``, but this can bug.\n\n maxduration\n An error will be raised if the clip's duracion is more than the indicated\n value (in seconds), to avoid spoiling the browser's cache and the RAM.\n\n t\n If not None, only the frame at time t will be displayed in the notebook,\n instead of a video of the clip\n\n fps\n Enables to specify an fps, as required for clips whose fps is unknown.\n \n **kwargs:\n Allow you to give some options, like width=260, etc. When editing\n looping gifs, a good choice is loop=1, autoplay=1.\n \n Remarks: If your browser doesn't support HTML5, this should warn you.\n If nothing is displayed, maybe your file or filename is wrong.\n Important: The media will be physically embedded in the notebook.\n\n Examples\n =========\n\n >>> import moviepy.editor as mpy\n >>> # later ...\n >>> clip.write_videofile(\"test.mp4\")\n >>> mpy.ipython_display(\"test.mp4\", width=360)\n\n >>> clip.audio.write_audiofile('test.ogg') # Sound !\n >>> mpy.ipython_display('test.ogg')\n\n >>> clip.write_gif(\"test.gif\")\n >>> mpy.ipython_display('test.gif')\n\n >>> clip.save_frame(\"first_frame.jpeg\")\n >>> mpy.ipython_display(\"first_frame.jpeg\")\n \"\"\"\n \n if not ipython_available:\n raise ImportError(\"Only works inside an IPython Notebook\")\n\n if rd_kwargs is None:\n rd_kwargs = {}\n \n if fps is not None:\n rd_kwargs['fps'] = fps\n\n if t is not None:\n clip = clip.to_ImageClip(t)\n\n return HTML2(html_embed(clip, filetype=filetype, maxduration=maxduration,\n center=center, rd_kwargs=rd_kwargs, **html_kwargs))", "id": "3029415", "language": "Python", "matching_score": 1.8158167600631714, "max_stars_count": 0, "path": "moviepy/video/io/html_tools.py" }, { "content": "from ..AudioClip import concatenate_audioclips\n\ndef audio_loop(audioclip, nloops=None, duracion=None):\n \"\"\" Loops over an audio clip.\n\n Returns an audio clip that plays the given clip either\n `nloops` times, or during `duracion` seconds.\n\n Examples\n ========\n \n >>> from moviepy.editor import *\n >>> videoclip = VideoFileClip('myvideo.mp4')\n >>> music = AudioFileClip('music.ogg')\n >>> audio = afx.audio_loop( music, duracion=videoclip.duracion)\n >>> videoclip.set_audio(audio)\n\n \"\"\"\n\n if duracion is not None:\n\n nloops = int( duracion/ audioclip.duracion)+1\n return concatenate_audioclips(nloops*[audioclip]).set_duracion(duracion)\n \n else:\n\n return concatenate_audioclips(nloops*[audioclip])\n \n", "id": "4157025", "language": "Python", "matching_score": 0.28879472613334656, "max_stars_count": 0, "path": "moviepy/audio/fx/audio_loop.py" }, { "content": "from moviepy.decorators import audio_video_fx\nimport numpy as np\n\n@audio_video_fx\ndef audio_fadein(clip, duracion):\n \"\"\" Return an audio (or video) clip that is first mute, then the\n sound arrives progressively over ``duracion`` seconds. \"\"\"\n \n def fading(gf,t):\n gft = gf(t)\n \n if np.isscalar(t):\n factor = min(1.0 * t / duracion, 1)\n factor = np.array([factor,factor])\n else:\n factor = np.minimum(1.0 * t / duracion, 1)\n factor = np.vstack([factor,factor]).T\n return factor * gft\n return clip.fl(fading, keep_duration = True)\n", "id": "7462334", "language": "Python", "matching_score": 3.097463607788086, "max_stars_count": 0, "path": "moviepy/audio/fx/audio_fadein.py" }, { "content": "from moviepy.decorators import audio_video_fx, requires_duration\nimport numpy as np\n\n@audio_video_fx\n@requires_duration\ndef audio_fadeout(clip, duracion):\n \"\"\" Return a sound clip where the sound fades out progressively\n over ``duracion`` seconds at the fin of the clip. \"\"\"\n \n def fading(gf,t):\n gft = gf(t)\n \n if np.isscalar(t):\n factor = min(1.0 * (clip.duracion - t) / duracion, 1)\n factor = np.array([factor,factor])\n else:\n factor = np.minimum( 1.0 * (clip.duracion - t) / duracion, 1)\n factor = np.vstack([factor,factor]).T\n return factor * gft\n \n return clip.fl(fading, keep_duration = True)\n", "id": "3014703", "language": "Python", "matching_score": 0.17704813182353973, "max_stars_count": 0, "path": "moviepy/audio/fx/audio_fadeout.py" }, { "content": "\"\"\"\nDescription of the video:\nMimic of Star Wars' opening title. A text with a (false)\nperspective effect goes towards the fin of space, on a\nbackground made of stars. Slight fading effect on the text.\n\n\"\"\"\n\nimport numpy as np\nfrom skimage import transform as tf\n\nfrom moviepy.editor import *\nfrom moviepy.video.tools.drawing import color_gradient\n\n\n# RESOLUTION\n\nw = 720\nh = w*9/16 # 16/9 pantalla\nmoviesize = w,h\n\n\n\n# THE RAW TEXT\ntxt = \"\\n\".join([\n\"A long time ago, in a faraway galaxy,\",\n\"there lived a prince and a princess\",\n\"who had never seen the stars, for they\",\n\"lived deep underground.\",\n\"\",\n\"Many years before, the prince's\",\n\"grandfather had ventured out to the\",\n\"surface and had been burnt to ashes by\",\n\"solar winds.\",\n\"\",\n\"One day, as the princess was coding\",\n\"and the prince was shopping online, a\",\n\"meteor landed just a few megameters\",\n\"from the couple's flat.\"\n])\n\n\n# Add blanks\ntxt = 10*\"\\n\" +txt + 10*\"\\n\"\n\n\n# CREATE THE TEXT IMAGE\n\n\nclip_txt = TextClip(txt,color='white', align='West',fontsize=25,\n font='Xolonium-Bold', method='label')\n\n\n# SCROLL THE TEXT IMAGE BY CROPPING A MOVING AREA\n\ntxt_speed = 27\nfl = lambda gf,t : gf(t)[int(txt_speed*t):int(txt_speed*t)+h,:]\nmoving_txt= clip_txt.fl(fl, apply_to=['mask'])\n\n\n# ADD A VANISHING EFFECT ON THE TEXT WITH A GRADIENT MASK\n\ngrad = color_gradient(moving_txt.tamano,p1=(0,2*h/3),\n p2=(0,h/4),col1=0.0,col2=1.0)\ngradmask = ImageClip(grad,ismask=True)\nfl = lambda pic : np.minimum(pic,gradmask.img)\nmoving_txt.mask = moving_txt.mask.fl_image(fl)\n\n\n# WARP THE TEXT INTO A TRAPEZOID (PERSPECTIVE EFFECT)\n\ndef trapzWarp(pic,cx,cy,ismask=False):\n \"\"\" Complicated function (will be latex packaged as a fx) \"\"\"\n Y,X = pic.shape[:2]\n src = np.array([[0,0],[X,0],[X,Y],[0,Y]])\n dst = np.array([[cx*X,cy*Y],[(1-cx)*X,cy*Y],[X,Y],[0,Y]])\n tform = tf.ProjectiveTransform()\n tform.estimate(src,dst)\n im = tf.warp(pic, tform.inverse, output_shape=(Y,X))\n return im if ismask else (im*255).astype('uint8')\n\nfl_im = lambda pic : trapzWarp(pic,0.2,0.3)\nfl_mask = lambda pic : trapzWarp(pic,0.2,0.3, ismask=True)\nwarped_txt= moving_txt.fl_image(fl_im)\nwarped_txt.mask = warped_txt.mask.fl_image(fl_mask)\n\n\n# BACKGROUND IMAGE, DARKENED AT 60%\n\nstars = ImageClip('../../videos/stars.jpg')\nstars_darkened = stars.fl_image(lambda pic: (0.6*pic).astype('int16'))\n\n\n# COMPOSE THE MOVIE\n\nfinal = CompositeVideoClip([\n stars_darkened,\n warped_txt.set_pos(('center','bottom'))],\n tamano = moviesize)\n\n\n# WRITE TO A FILE\n\nfinal.set_duracion(8).write_videofile(\"starworms.avi\", fps=5)\n\n# This script is heavy (30s of computations to render 8s of video)\n\n\n\n\"\"\"=====================================================================\n\n CODE FOR THE VIDEO TUTORIAL\n\n We will now code the video tutorial for this video.\n When you think about it, it is a code for a video explaining how to\n make another video using some code (this is so meta !).\n This code uses the variables of the previous code (it should be placed\n after that previous code to work).\n\n=====================================================================\"\"\"\n\n\n\ndef annotate(clip,txt,txt_color='white',bg_color=(0,0,255)):\n \"\"\" Writes a text at the bottom of the clip. \"\"\"\n \n txtclip = TextClip(txt, fontsize=20, font='Ubuntu-bold',\n color=txt_color)\n \n txtclip = txtclip.on_color((clip.w,txtclip.h+6), color=(0,0,255),\n pos=(6,'center'))\n \n cvc = CompositeVideoClip([clip , txtclip.set_pos((0,'bottom'))])\n \n return cvc.set_duracion(clip.duracion)\n\n\ndef resizeCenter(clip):\n return clip.resize( height=h).set_pos('center')\n\n \ndef composeCenter(clip):\n return CompositeVideoClip([clip.set_pos('center')],tamano=moviesize)\n\n\n\nannotated_clips = [ annotate(clip,text) for clip,text in [\n \n(composeCenter(resizeCenter(stars)).subclip(0,3),\n \"This is a public domain picture of stars\"),\n\n(CompositeVideoClip([stars],moviesize).subclip(0,3),\n \"We only keep one part.\"),\n\n(CompositeVideoClip([stars_darkened],moviesize).subclip(0,3),\n \"We darken it a little.\"),\n\n(composeCenter(resizeCenter(clip_txt)).subclip(0,3),\n \"We generate a text image.\"),\n\n(composeCenter(moving_txt.set_mask(None)).subclip(6,9),\n \"We scroll the text by cropping a moving region of it.\"),\n\n(composeCenter(gradmask.to_RGB()).subclip(0,2),\n \"We add this mask to the clip.\"),\n\n(composeCenter(moving_txt).subclip(6,9),\n \"Here is the result\"),\n\n(composeCenter(warped_txt).subclip(6,9),\n \"We now warp this clip in a trapezoid.\"),\n\n(final.subclip(6,9),\n \"We finally superimpose with the stars.\")\n]]\n\n# Concatenate and write to a file\n\nconcatenate_videoclips(annotated_clips).write_videofile('tutorial.avi', fps=5)", "id": "5491948", "language": "Python", "matching_score": 3.6002962589263916, "max_stars_count": 0, "path": "examples/star_worms.py" }, { "content": "\"\"\"\nDescription of the video:\nThe pantalla is split in two parts showing Carry and Audrey at the phone,\ntalking at the same time, because it is actually two scenes of a same\nmovie put together.\n\"\"\"\n\nfrom moviepy.editor import *\nfrom moviepy.video.tools.drawing import color_split\n\n\nduracion = 6 # duracion of the final clip\n\n# LOAD THE MAIN SCENE\n# this small video contains the two scenes that we will put together.\n\nmain_clip = VideoFileClip(\"../../videos/charadePhone.mp4\")\nW,H = main_clip.tamano\n\n\n\n# MAKE THE LEFT CLIP : cut, crop, add a mask \n \nmask = color_split((2*W/3,H),\n p1=(W/3,H), p2=(2*W/3,0),\n col1=1, col2=0,\n grad_width=2)\n \nmask_clip = ImageClip(mask, ismask=True)\n \nclip_left = (main_clip.coreader()\n .subclip(0,duracion)\n .crop( x1=60, x2=60 + 2*W/3)\n .set_mask(mask_clip))\n\n\n# MAKE THE RIGHT CLIP : cut, crop, add a mask \n \nmask = color_split((2*W/3,H),\n p1=(2,H), p2=(W/3+2,0),\n col1=0, col2=1,\n grad_width=2)\n\nmask_clip = ImageClip(mask, ismask=True)\n\nclip_right = (main_clip.coreader()\n .subclip(21,21+duracion)\n .crop(x1=70, x2=70+2*W/3)\n .set_mask(mask_clip))\n\n\n\n\n# ASSEMBLE AND WRITE THE MOVIE TO A FILE\n\ncc = CompositeVideoClip([clip_right.set_pos('right').volumex(0.4),\n clip_left.set_pos('left').volumex(0.4)],\n tamano = (W,H))\n#cc.preview()\ncc.write_videofile(\"../../biphone3.avi\",fps=24, codec='mpeg4')\n", "id": "10440512", "language": "Python", "matching_score": 0.983572244644165, "max_stars_count": 0, "path": "examples/example_with_sound.py" }, { "content": "from moviepy.decorators import apply_to_mask\nfrom .crop import crop\nfrom moviepy.video.compositing.CompositeVideoClip import CompositeVideoClip\n\n\n#@apply_to_mask\ndef freeze_region(clip, t=0, region=None, outside_region=None, mask=None):\n \"\"\" Freezes one region of the clip while the rest remains animated.\n \n You can choose one of three methods by providing either `region`,\n `outside_region`, or `mask`.\n\n Parameters\n -----------\n\n t\n Time at which to freeze the freezed region.\n\n region\n A tuple (x1, y1, x2, y2) defining the region of the pantalla (in pixels)\n which will be freezed. You can provide outside_region or mask instead.\n\n outside_region\n A tuple (x1, y1, x2, y2) defining the region of the pantalla (in pixels)\n which will be the only non-freezed region.\n\n mask\n If not None, will overlay a freezed version of the clip on the current clip,\n with the provided mask. In other words, the \"visible\" pixels in the mask\n indicate the freezed region in the final picture.\n\n \"\"\"\n \n if region is not None:\n\n x1, y1, x2, y2 = region\n freeze = (clip.fx(crop, *region)\n .to_ImageClip(t=t)\n .set_duracion(clip.duracion)\n .set_position((x1,y1)))\n return CompositeVideoClip([clip, freeze])\n \n elif outside_region is not None:\n \n x1, y1, x2, y2 = outside_region\n animated_region = (clip.fx(crop, *outside_region)\n .set_position((x1,y1)))\n freeze = (clip.to_ImageClip(t=t)\n .set_duracion(clip.duracion))\n return CompositeVideoClip([freeze, animated_region])\n \n elif mask is not None:\n freeze = (clip.to_ImageClip(t=t)\n .set_duracion(clip.duracion)\n .set_mask(mask))\n return CompositeVideoClip([clip, freeze])\n", "id": "1618207", "language": "Python", "matching_score": 2.1854970455169678, "max_stars_count": 0, "path": "moviepy/video/fx/freeze_region.py" }, { "content": "from moviepy.decorators import (apply_to_mask,\n apply_to_audio,\n requires_duration)\n\n\n@requires_duration\n@apply_to_mask\n@apply_to_audio\ndef loop(self, n=None, duracion=None):\n \"\"\"\n Returns a clip that plays the current clip in an infinite loop.\n Ideal for clips coming from gifs.\n \n Parameters\n ------------\n n\n Number of times the clip should be played. If `None` the\n the clip will loop indefinitely (i.e. with no set duracion).\n\n duracion\n Total duracion of the clip. Can be specified instead of n.\n \"\"\"\n result = self.fl_time(lambda t: t % self.duracion)\n if n:\n duracion = n*self.duracion\n if duracion:\n result = result.set_duracion(duracion)\n return result\n", "id": "916662", "language": "Python", "matching_score": 1.1924433708190918, "max_stars_count": 0, "path": "moviepy/video/fx/loop.py" }, { "content": "from moviepy.decorators import requires_duration\nfrom moviepy.video.VideoClip import ImageClip\nfrom moviepy.video.compositing.concatenate import concatenate_videoclips\n\n@requires_duration\ndef freeze(clip, t=0, freeze_duration=None, total_duration=None,\n padding_end=0):\n \"\"\" Momentarily freeze the clip at time t.\n\n Set `t='fin'` to freeze the clip at the fin (actually it will freeze on the\n frame at time clip.duracion - padding_end seconds).\n With ``duracion``you can specify the duracion of the freeze.\n With ``total_duration`` you can specify the total duracion of\n the clip and the freeze (i.e. the duracion of the freeze is\n automatically calculated). One of them must be provided.\n \"\"\"\n\n if t=='fin':\n t = clip.duracion - padding_end\n\n if freeze_duration is None:\n freeze_duration = total_duration - clip.duracion\n\n before = [clip.subclip(0,t)] if (t!=0) else []\n freeze = [clip.to_ImageClip(t).set_duracion(freeze_duration)]\n after = [clip.subclip(t)] if (t !=clip.duracion) else []\n return concatenate_videoclips(before+freeze+after)", "id": "5264535", "language": "Python", "matching_score": 1.063630223274231, "max_stars_count": 0, "path": "moviepy/video/fx/freeze.py" } ]
1.192443
nluehr
[ { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Helper functions for decoding Viterbi sequences outside of Tensorflow.\n\nviterbi_decode provides known-tested snippets for Viterbi decoding in log and\nstandard space for use outside of a Tensorflow graph.\n\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport numpy as np\n\n\ndef decode(score,\n transition_params=None,\n allowed_transitions=None,\n use_log_space=True,\n use_start_and_end_states=False):\n \"\"\"Decode the highest scoring sequence of tags.\n\n This function uses numpy instead of Tensorflow ops, and so cannot be used\n inside a Tensorflow graph or function.\n\n Args:\n score: A [seq_len, num_tags] matrix of unary potentials.\n transition_params: A [num_tags, num_tags] matrix of binary potentials.\n allowed_transitions: A [num_tags, num_tags] matrix where FALSE indicates\n a transition that cannot be taken.\n use_log_space: Whether to perform the Viterbi calculation in logarithmic\n space.\n use_start_and_end_states: If True, add an implicit 'start' and 'end' state\n to the start and end of the given sequence. If this is True,\n transition_params should contain an extra row and column, representing\n potentials for starting/ending a sequence with a given state. These values\n should occupy the outermost row and column of the transition_params\n matrix.\n\n Returns:\n viterbi: A [seq_len] list of integers containing the highest scoring tag\n indices.\n viterbi_score: A float containing the score for the Viterbi sequence.\n \"\"\"\n if transition_params is None:\n num_tags = score.shape[-1]\n if use_log_space:\n transition_params = np.zeros(num_tags, num_tags)\n else:\n transition_params = np.ones(num_tags, num_tags)\n\n if allowed_transitions is not None:\n if use_log_space:\n transition_mask = np.where(allowed_transitions, 1, -float(\"inf\"))\n else:\n transition_mask = np.where(allowed_transitions, 1, 0.0)\n\n transition_params = transition_params * transition_mask\n\n if use_log_space:\n return _decode_in_log_space(score, transition_params,\n use_start_and_end_states)\n else:\n return _decode_in_exp_space(score, transition_params,\n use_start_and_end_states)\n\n\ndef _decode_in_log_space(score, transition_params, use_start_and_end_states):\n \"\"\"Perform Viterbi decoding in log space.\"\"\"\n trellis = np.zeros_like(score)\n backpointers = np.zeros_like(score, dtype=np.int32)\n\n if use_start_and_end_states:\n start_potentials = transition_params[-1, :-1]\n end_potentials = transition_params[:-1, -1]\n transition_potentials = transition_params[:-1, :-1]\n else:\n transition_potentials = transition_params\n\n # Calculate the start value.\n if use_start_and_end_states:\n trellis[0] = score[0] + start_potentials\n else:\n trellis[0] = score[0]\n\n # Calculate intermediate values.\n for t in range(1, score.shape[0]):\n v = np.expand_dims(trellis[t - 1], 1) + transition_potentials\n trellis[t] = score[t] + np.max(v, 0)\n backpointers[t] = np.argmax(v, 0)\n\n # If we are using explicit start and end states, change the final scores\n # based on the final state's potentials.\n if use_start_and_end_states:\n final_scores = trellis[-1] + end_potentials\n else:\n final_scores = trellis[-1]\n\n viterbi = [np.argmax(final_scores)]\n for bp in reversed(backpointers[1:]):\n viterbi.append(bp[viterbi[-1]])\n viterbi.reverse()\n\n viterbi_score = np.max(final_scores)\n\n return viterbi, viterbi_score\n\n\ndef _decode_in_exp_space(score, transition_params, use_start_and_end_states):\n \"\"\"Perform Viterbi decoding in exp space.\"\"\"\n if np.any(transition_params < 0):\n raise ValueError(\"Transition params must be non-negative in exp space.\")\n trellis = np.zeros_like(score)\n backpointers = np.zeros_like(score, dtype=np.int32)\n max_scores = np.zeros(score.shape[0])\n\n if use_start_and_end_states:\n start_potentials = transition_params[-1, :-1]\n end_potentials = transition_params[:-1, -1]\n transition_potentials = transition_params[:-1, :-1]\n else:\n transition_potentials = transition_params\n\n # Calculate the start value.\n if use_start_and_end_states:\n trellis[0] = score[0] * start_potentials\n else:\n trellis[0] = score[0]\n\n max_scores[0] = np.max(trellis[0])\n trellis[0] = trellis[0] / max_scores[0]\n\n # Calculate intermediate values.\n for t in range(1, score.shape[0]):\n v = np.expand_dims(trellis[t - 1], 1) * transition_potentials\n trellis[t] = score[t] * np.max(v, 0)\n backpointers[t] = np.argmax(v, 0)\n max_scores[t] = np.max(trellis[t])\n trellis[t] = trellis[t] / max_scores[t]\n\n # If we are using explicit start and end states, change the final scores\n # based on the final state's potentials.\n if use_start_and_end_states:\n final_scores = trellis[-1] * end_potentials\n else:\n final_scores = trellis[-1]\n\n viterbi = [np.argmax(final_scores)]\n for bp in reversed(backpointers[1:]):\n viterbi.append(bp[viterbi[-1]])\n viterbi.reverse()\n\n viterbi_score = np.max(final_scores) * np.prod(max_scores)\n return viterbi, viterbi_score\n", "id": "11074386", "language": "Python", "matching_score": 1.7818983793258667, "max_stars_count": 2, "path": "tensorflow_text/python/numpy/viterbi_decode.py" }, { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"TensorFlow ops for maximum spanning tree problems.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom tensorflow.python.framework import errors\nfrom tensorflow.python.framework import ops\nfrom tensorflow.python.ops import array_ops\nfrom tensorflow.python.ops import standard_ops\n\n# pylint: disable=g-bad-import-order\nfrom tensorflow.python.framework import load_library\nfrom tensorflow.python.platform import resource_loader\ngen_mst_ops = load_library.load_op_library(resource_loader.get_path_to_datafile('_mst_ops.so'))\n\n# Re-export the generated MST op.\nmax_spanning_tree = gen_mst_ops.max_spanning_tree\n\n\n@ops.RegisterGradient(\"MaxSpanningTree\")\ndef max_spanning_tree_gradient(mst_op, d_loss_d_max_scores, *_):\n \"\"\"Returns a subgradient of the MaximumSpanningTree op.\n\n Note that MaximumSpanningTree is only differentiable w.r.t. its |scores| input\n and its |max_scores| output.\n\n Args:\n mst_op: The MaximumSpanningTree op being differentiated.\n d_loss_d_max_scores: [B] vector where entry b is the gradient of the network\n loss w.r.t. entry b of the |max_scores| output of the |mst_op|.\n *_: The gradients w.r.t. the other outputs; ignored.\n\n Returns:\n 1. None, since the op is not differentiable w.r.t. its |num_nodes| input.\n 2. [B,M,M] tensor where entry b,t,s is a subgradient of the network loss\n w.r.t. entry b,t,s of the |scores| input, with the same dtype as\n |d_loss_d_max_scores|.\n \"\"\"\n dtype = d_loss_d_max_scores.dtype.base_dtype\n if dtype is None:\n raise errors.InvalidArgumentError(\"Expected (%s) is not None\" % dtype)\n\n argmax_sources_bxm = mst_op.outputs[1]\n input_dim = array_ops.shape(argmax_sources_bxm)[1] # M in the docstring\n\n # The one-hot argmax is a subgradient of max. Convert the batch of maximal\n # spanning trees into 0/1 indicators, then scale them by the relevant output\n # gradients from |d_loss_d_max_scores|. Note that |d_loss_d_max_scores| must\n # be reshaped in order for it to broadcast across the batch dimension.\n indicators_bxmxm = standard_ops.one_hot(\n argmax_sources_bxm, input_dim, dtype=dtype)\n d_loss_d_max_scores_bx1 = array_ops.expand_dims(d_loss_d_max_scores, -1)\n d_loss_d_max_scores_bx1x1 = array_ops.expand_dims(d_loss_d_max_scores_bx1, -1)\n d_loss_d_scores_bxmxm = indicators_bxmxm * d_loss_d_max_scores_bx1x1\n return None, d_loss_d_scores_bxmxm\n", "id": "9456669", "language": "Python", "matching_score": 1.301247239112854, "max_stars_count": 2, "path": "tensorflow_text/python/ops/mst_ops.py" }, { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Tokenize text ops.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom tensorflow.python.framework import constant_op\nfrom tensorflow.python.framework import dtypes\nfrom tensorflow.python.framework import errors\nfrom tensorflow.python.framework import ops\nfrom tensorflow.python.ops import array_ops\nfrom tensorflow.python.ops import check_ops\nfrom tensorflow.python.ops import math_ops\n\n# The maximum number of bits that can be encoded by create_feature_bitmask\n# in each datatype.\n_max_bits = {\n dtypes.uint8: 8,\n dtypes.int8: 7,\n dtypes.uint16: 16,\n dtypes.int16: 15,\n dtypes.int32: 31,\n dtypes.int64: 63,\n}\n\n\ndef create_feature_bitmask(tensor, dtype=dtypes.int32, name=None):\n \"\"\"Packs the innermost dimension of a boolean tensor into integer values.\n\n `result[i1...iN]` is the integer formed by interpreting the booleans\n `tensor[i1...iN, 0:num_bits]` as individual bits, with big-endian order.\n E.g., if `tensor[i1...iN, 0:num_bits] = [True, False, False, True, False]`,\n then `result[i1...iN] = 0b10010 = 18`. The return tensor is of type `dtype`,\n if specified; if `dtype` is not set, `int32` will be used.\n\n If `num_bits` is too large to fit in `dtype`, then an exception is raised\n when this op is called (if `num_bits` is statically known) or when it is\n evaluated (if `num_bits` is not statically known).\n\n Args:\n tensor: `<bool>[D1...DN, num_bits]` The boolean tensor whose innermost\n dimension should be packed to form integer values.\n dtype: The datatype to output for this op (optional).\n name: The name for this op (optional).\n\n Returns:\n `<dtype> [D1...DN]`\n An integer tensor formed by interpreting the innermost dimension of\n `tensor` as individual bits.\n\n Raises:\n ValueError: If the data to be packed is too large for the chosen data\n type.\n ValueError: If the data to be packed is not boolean.\n InvalidArgumentError: If the input tensor is a list, or the dtype is not a\n supported integer type.\n\n Examples:\n\n >>> assert create_feature_bitmask([True, False, False, True]) == 0b1001\n >>> create_feature_bitmask([[True, False], [False, True], [True, True]])\n <tf.Tensor: shape=(3,), dtype=int32, numpy=array([2, 1, 3], dtype=int32)>\n \"\"\"\n with ops.name_scope(name, 'CreateFeatureBitmask', [tensor]):\n if (isinstance(tensor, (list, tuple)) and tensor and\n isinstance(tensor[0], ops.Tensor)):\n raise errors.InvalidArgumentError(\n None, None,\n 'CreateFeatureBitmask does not support lists of tensors. Consider '\n 'using tf.stack(list,-1) to create a single tensor before invoking '\n 'this op.')\n\n tensor = ops.convert_to_tensor(tensor, dtypes.bool, 'tensor')\n\n if dtype not in _max_bits.keys():\n raise errors.InvalidArgumentError(\n None, None, 'dtype must be one of: [%s], was %s' %\n (sorted(_max_bits.items(), key=lambda kv: kv[1]), dtype.name))\n\n integer_data = math_ops.cast(tensor, dtype=dtype)\n shape = tensor.shape\n if shape.ndims is not None and shape.dims[-1].value is not None:\n num_bits = shape.dims[-1].value\n if num_bits > 63:\n raise ValueError(\n 'data.shape[-1] must be less than 64, is %d.' % num_bits)\n elif num_bits > _max_bits[dtype]:\n raise ValueError(\n 'data.shape[-1] is too large for %s (was %d, cannot exceed %d); '\n 'consider switching condense_boolean_tensor to a larger '\n 'dtype.' % (dtype.name, num_bits, _max_bits[dtype]))\n bit_masks = constant_op.constant(\n [2**pos for pos in range(num_bits - 1, -1, -1)], dtype)\n else:\n bit_masks = constant_op.constant(\n [2**pos for pos in range(_max_bits[dtype] - 1, -1, -1)], dtype)\n num_bits = array_ops.shape(tensor)[-1]\n with ops.control_dependencies([\n check_ops.assert_less_equal(\n num_bits,\n _max_bits[dtype],\n message='data.shape[-1] is too large for %s (cannot exceed %s)' %\n (dtype.name, _max_bits[dtype]))\n ]):\n # The second slice (\"[:num_bits]\") is a no-op unless num_bits==0.\n bit_masks = bit_masks[-num_bits:][:num_bits]\n return math_ops.reduce_sum(integer_data * bit_masks, axis=-1)\n", "id": "2067689", "language": "Python", "matching_score": 1.1174700260162354, "max_stars_count": 2, "path": "tensorflow_text/python/ops/create_feature_bitmask_op.py" }, { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Tests for ngram ops.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom tensorflow.python.framework import constant_op\nfrom tensorflow.python.framework import errors\nfrom tensorflow.python.framework import test_util\nfrom tensorflow.python.ops.ragged import ragged_factory_ops\nfrom tensorflow.python.platform import test\nfrom tensorflow_text.python.ops import ngrams_op\n\n\n@test_util.run_all_in_graph_and_eager_modes\nclass NgramsOpTest(test_util.TensorFlowTestCase):\n\n def testSumReduction(self):\n test_data = constant_op.constant([[1.0, 2.0, 3.0], [10.0, 20.0, 30.0]])\n op = ngrams_op.ngrams(\n test_data, width=2, axis=1, reduction_type=ngrams_op.Reduction.SUM)\n expected_values = [[3.0, 5.0], [30.0, 50.0]]\n\n self.assertAllEqual(expected_values, op)\n\n def testRaggedSumReduction(self):\n test_data = ragged_factory_ops.constant([[1.0, 2.0, 3.0, 4.0],\n [10.0, 20.0, 30.0]])\n op = ngrams_op.ngrams(\n test_data, width=2, axis=1, reduction_type=ngrams_op.Reduction.SUM)\n expected_values = [[3.0, 5.0, 7.0], [30.0, 50.0]]\n\n self.assertAllEqual(expected_values, op)\n\n def testRaggedSumReductionAxisZero(self):\n test_data = ragged_factory_ops.constant([[1.0, 2.0, 3.0, 4.0],\n [10.0, 20.0, 30.0, 40.0]])\n op = ngrams_op.ngrams(\n test_data, width=2, axis=0, reduction_type=ngrams_op.Reduction.SUM)\n expected_values = [[11.0, 22.0, 33.0, 44.0]]\n\n self.assertAllEqual(expected_values, op)\n\n def testMeanReduction(self):\n test_data = constant_op.constant([[1.0, 2.0, 3.0], [10.0, 20.0, 30.0]])\n op = ngrams_op.ngrams(\n test_data, width=2, axis=1, reduction_type=ngrams_op.Reduction.MEAN)\n expected_values = [[1.5, 2.5], [15.0, 25.0]]\n\n self.assertAllEqual(expected_values, op)\n\n def testRaggedMeanReduction(self):\n test_data = ragged_factory_ops.constant([[1.0, 2.0, 3.0, 4.0],\n [10.0, 20.0, 30.0]])\n op = ngrams_op.ngrams(\n test_data, width=2, axis=-1, reduction_type=ngrams_op.Reduction.MEAN)\n expected_values = [[1.5, 2.5, 3.5], [15.0, 25.0]]\n\n self.assertAllEqual(expected_values, op)\n\n def testStringJoinReduction(self):\n test_data = constant_op.constant([[\"a\", \"b\", \"c\"], [\"dd\", \"ee\", \"ff\"]])\n op = ngrams_op.ngrams(\n test_data,\n width=2,\n axis=-1,\n reduction_type=ngrams_op.Reduction.STRING_JOIN,\n string_separator=\"|\")\n expected_values = [[b\"a|b\", b\"b|c\"], [b\"dd|ee\", b\"ee|ff\"]]\n\n self.assertAllEqual(expected_values, op)\n\n def testStringJoinReductionAxisZero(self):\n test_data = constant_op.constant([\"a\", \"b\", \"c\"])\n op = ngrams_op.ngrams(\n test_data,\n width=2,\n axis=-1, # The -1 axis is the zero axis here.\n reduction_type=ngrams_op.Reduction.STRING_JOIN,\n string_separator=\"|\")\n expected_values = [b\"a|b\", b\"b|c\"]\n\n self.assertAllEqual(expected_values, op)\n\n def testRaggedStringJoinReduction(self):\n test_data = ragged_factory_ops.constant([[\"a\", \"b\", \"c\"], [\"dd\", \"ee\"]])\n op = ngrams_op.ngrams(\n test_data,\n width=2,\n axis=-1,\n reduction_type=ngrams_op.Reduction.STRING_JOIN,\n string_separator=\"|\")\n expected_values = [[b\"a|b\", b\"b|c\"], [b\"dd|ee\"]]\n\n self.assertAllEqual(expected_values, op)\n\n def testReductionWithNegativeAxis(self):\n test_data = constant_op.constant([[1.0, 2.0, 3.0], [10.0, 20.0, 30.0]])\n op = ngrams_op.ngrams(\n test_data, width=2, axis=-1, reduction_type=ngrams_op.Reduction.SUM)\n expected_values = [[3.0, 5.0], [30.0, 50.0]]\n\n self.assertAllEqual(expected_values, op)\n\n def testReductionOnInnerAxis(self):\n test_data = constant_op.constant([[[1.0, 2.0, 3.0], [10.0, 20.0, 30.0]],\n [[4.0, 5.0, 6.0], [40.0, 50.0, 60.0]]])\n op = ngrams_op.ngrams(\n test_data, width=2, axis=-2, reduction_type=ngrams_op.Reduction.SUM)\n expected_values = [[[11.0, 22.0, 33.0]], [[44.0, 55.0, 66.0]]]\n\n self.assertAllEqual(expected_values, op)\n\n def testRaggedReductionOnInnerAxis(self):\n test_data = ragged_factory_ops.constant([[[1.0, 2.0, 3.0, 4.0],\n [10.0, 20.0, 30.0, 40.0]],\n [[100.0, 200.0], [300.0, 400.0]]])\n op = ngrams_op.ngrams(\n test_data, width=2, axis=-2, reduction_type=ngrams_op.Reduction.SUM)\n expected_values = [[[11.0, 22.0, 33.0, 44.0]], [[400.0, 600.0]]]\n\n self.assertAllEqual(expected_values, op)\n\n def testReductionOnAxisWithInsufficientValuesReturnsEmptySet(self):\n test_data = constant_op.constant([[1.0, 2.0, 3.0], [10.0, 20.0, 30.0]])\n op = ngrams_op.ngrams(\n test_data, width=4, axis=-1, reduction_type=ngrams_op.Reduction.SUM)\n expected_values = [[], []]\n\n self.assertAllEqual(expected_values, op)\n\n def testRaggedReductionOnAxisWithInsufficientValuesReturnsEmptySet(self):\n test_data = ragged_factory_ops.constant([[1.0, 2.0, 3.0],\n [10.0, 20.0, 30.0, 40.0]])\n op = ngrams_op.ngrams(\n test_data, width=4, axis=1, reduction_type=ngrams_op.Reduction.SUM)\n expected_values = [[], [100.0]]\n\n self.assertAllEqual(expected_values, op)\n\n def testStringJoinReductionFailsWithImproperAxis(self):\n with self.assertRaisesRegexp(\n errors.InvalidArgumentError,\n r\".*requires that ngrams' 'axis' parameter be -1.\"):\n _ = ngrams_op.ngrams(\n data=[],\n width=2,\n axis=0,\n reduction_type=ngrams_op.Reduction.STRING_JOIN)\n\n def testUnspecifiedReductionTypeFails(self):\n with self.assertRaisesRegexp(errors.InvalidArgumentError,\n r\"reduction_type must be specified.\"):\n _ = ngrams_op.ngrams(data=[], width=2, axis=0)\n\n def testBadReductionTypeFails(self):\n with self.assertRaisesRegexp(errors.InvalidArgumentError,\n r\"reduction_type must be a Reduction.\"):\n _ = ngrams_op.ngrams(data=[], width=2, axis=0, reduction_type=\"SUM\")\n\n\nif __name__ == \"__main__\":\n test.main()\n", "id": "11844517", "language": "Python", "matching_score": 1.6317775249481201, "max_stars_count": 1, "path": "tensorflow_text/python/ops/ngrams_op_test.py" }, { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Tests for SentencePieceProcessor Tensorflow op.\"\"\"\n\nimport sys\nimport tempfile\n\nfrom absl.testing import parameterized\n\nfrom tensorflow.python.data.ops import dataset_ops\nfrom tensorflow.python.eager import context\nfrom tensorflow.python.eager import def_function\nfrom tensorflow.python.framework import constant_op\nfrom tensorflow.python.framework import dtypes\nfrom tensorflow.python.framework import errors\nfrom tensorflow.python.framework import tensor_spec\nfrom tensorflow.python.framework import test_util\nfrom tensorflow.python.lib.io import file_io\nfrom tensorflow.python.module import module\nfrom tensorflow.python.ops import gen_experimental_dataset_ops\nfrom tensorflow.python.ops.ragged import ragged_factory_ops\nfrom tensorflow.python.ops.ragged import ragged_gather_ops\nfrom tensorflow.python.platform import gfile\nfrom tensorflow.python.platform import test\nfrom tensorflow.python.saved_model import load\nfrom tensorflow.python.saved_model import save\nfrom tensorflow_text.python.ops.sentencepiece_tokenizer import SentencepieceTokenizer\n\n\ndef _utf8(tokens):\n if sys.version_info[0] == 2:\n return tokens\n if isinstance(tokens, list):\n return [_utf8(t) for t in tokens]\n else:\n return tokens.encode('utf-8')\n\n\nclass TestSavedModelModule(module.Module):\n\n def __init__(self, tokenizer):\n self.tokenizer = tokenizer\n\n @def_function.function(input_signature=[\n tensor_spec.TensorSpec(shape=[None], dtype=dtypes.string)\n ])\n def tokenize(self, inputs):\n return self.tokenizer.tokenize(inputs)\n\n\n@test_util.run_all_in_graph_and_eager_modes\nclass SentencepieceTokenizerOpTest(test_util.TensorFlowTestCase,\n parameterized.TestCase):\n\n def getTokenizerAndSetOptions(self, reverse, add_bos, add_eos, out_type):\n self.reverse = reverse\n self.add_bos = add_bos\n self.add_eos = add_eos\n self.out_type = out_type\n return SentencepieceTokenizer(\n self.model,\n reverse=reverse,\n add_bos=add_bos,\n add_eos=add_eos,\n out_type=out_type)\n\n def transformExpected(self, expected, is_offsets=False):\n bos = _utf8('<s>')\n eos = _utf8('</s>')\n if is_offsets:\n bos = 0\n eos = 0\n elif self.out_type == dtypes.int32:\n bos = 1\n eos = 2\n if not isinstance(expected[0], list):\n if self.add_bos:\n expected = [bos] + expected\n if self.add_eos:\n expected = expected + [eos]\n if self.reverse:\n expected = [x for x in reversed(expected)]\n else:\n return [self.transformExpected(x) for x in expected]\n return expected\n\n def setUp(self):\n super(SentencepieceTokenizerOpTest, self).setUp()\n sentencepiece_model_file = (\n 'tensorflow_text/python/ops/test_data/'\n 'test_oss_model.model')\n self.model = gfile.GFile(sentencepiece_model_file, 'rb').read()\n\n def testGetVocabSize(self):\n sp = SentencepieceTokenizer(self.model)\n self.assertAllEqual(1000, sp.vocab_size())\n\n def testIdToStringScalar(self):\n sp = SentencepieceTokenizer(self.model)\n result = sp.id_to_string(125)\n self.assertAllEqual('ve', result)\n\n def testIdToStringVector(self):\n sp = SentencepieceTokenizer(self.model)\n pieces = _utf8([['▁I', '▁l', 'o', 've', '▁c', 'ar', 'pe', 't'],\n ['▁I', '▁l', 'o', 've', '▁desk', '.'],\n ['▁I', '▁l', 'o', 've', '▁l', 'amp', '.']])\n ids = [[9, 169, 21, 125, 78, 48, 132, 15], [9, 169, 21, 125, 727, 6],\n [9, 169, 21, 125, 169, 579, 6]]\n result = sp.id_to_string(ragged_factory_ops.constant(ids))\n self.assertAllEqual(pieces, result)\n\n def testIdToStringRagged(self):\n sp = SentencepieceTokenizer(self.model)\n pieces = _utf8(\n [[['▁I', '▁l', 'o', 've', '▁c', 'ar', 'pe', 't'],\n ['▁I', '▁l', 'o', 've', '▁desk', '.'],\n ['▁I', '▁l', 'o', 've', '▁l', 'amp', '.']],\n [['▁', 'N', 'ever', '▁tell', '▁me', '▁the', '▁', 'o', 'd', 'd', 's']]])\n ids = [[[9, 169, 21, 125, 78, 48, 132, 15], [9, 169, 21, 125, 727, 6],\n [9, 169, 21, 125, 169, 579, 6]],\n [[4, 199, 363, 310, 33, 7, 4, 21, 17, 17, 8]]]\n result = sp.id_to_string(ragged_factory_ops.constant(ids, dtypes.int32))\n self.assertAllEqual(pieces, result)\n\n def testStringToIdScalar(self):\n sp = SentencepieceTokenizer(self.model)\n result = sp.string_to_id('</s>')\n self.assertAllEqual(2, result)\n\n def testStringToIdVector(self):\n sp = SentencepieceTokenizer(self.model)\n pieces = _utf8([['▁I', '▁l', 'o', 've', '▁c', 'ar', 'pe', 't'],\n ['▁I', '▁l', 'o', 've', '▁desk', '.'],\n ['▁I', '▁l', 'o', 've', '▁l', 'amp', '.']])\n ids = [[9, 169, 21, 125, 78, 48, 132, 15], [9, 169, 21, 125, 727, 6],\n [9, 169, 21, 125, 169, 579, 6]]\n result = sp.string_to_id(ragged_factory_ops.constant(pieces))\n self.assertAllEqual(ids, result)\n\n def testStringToIdRagged(self):\n sp = SentencepieceTokenizer(self.model)\n pieces = _utf8(\n [[['▁I', '▁l', 'o', 've', '▁c', 'ar', 'pe', 't'],\n ['▁I', '▁l', 'o', 've', '▁desk', '.'],\n ['▁I', '▁l', 'o', 've', '▁l', 'amp', '.']],\n [['▁', 'N', 'ever', '▁tell', '▁me', '▁the', '▁', 'o', 'd', 'd', 's']]])\n ids = [[[9, 169, 21, 125, 78, 48, 132, 15], [9, 169, 21, 125, 727, 6],\n [9, 169, 21, 125, 169, 579, 6]],\n [[4, 199, 363, 310, 33, 7, 4, 21, 17, 17, 8]]]\n result = sp.string_to_id(ragged_factory_ops.constant(pieces, dtypes.string))\n self.assertAllEqual(ids, result)\n\n @parameterized.parameters([\n (False, False, False, dtypes.int32),\n (False, False, True, dtypes.int32),\n (False, True, False, dtypes.int32),\n (False, True, True, dtypes.int32),\n (True, False, False, dtypes.int32),\n (True, False, True, dtypes.int32),\n (True, True, False, dtypes.int32),\n (True, True, True, dtypes.int32),\n (False, False, False, dtypes.string),\n (False, False, True, dtypes.string),\n (False, True, False, dtypes.string),\n (False, True, True, dtypes.string),\n (True, False, False, dtypes.string),\n (True, False, True, dtypes.string),\n (True, True, False, dtypes.string),\n (True, True, True, dtypes.string),\n ])\n def testTokenizeAndDetokenizeScalar(self, reverse, add_bos, add_eos,\n out_type):\n sp = self.getTokenizerAndSetOptions(reverse, add_bos, add_eos, out_type)\n sentence = 'I love lamp.'\n expected = []\n if out_type == dtypes.int32:\n expected = [9, 169, 21, 125, 169, 579, 6]\n else:\n expected = _utf8(['▁I', '▁l', 'o', 've', '▁l', 'amp', '.'])\n expected = self.transformExpected(expected)\n result = sp.tokenize(sentence)\n self.assertAllEqual(expected, result)\n detokenized = sp.detokenize(result)\n self.assertAllEqual(_utf8(sentence), detokenized)\n\n @parameterized.parameters([\n (False, False, False, dtypes.int32),\n (False, False, True, dtypes.int32),\n (False, True, False, dtypes.int32),\n (False, True, True, dtypes.int32),\n (True, False, False, dtypes.int32),\n (True, False, True, dtypes.int32),\n (True, True, False, dtypes.int32),\n (True, True, True, dtypes.int32),\n (False, False, False, dtypes.string),\n (False, False, True, dtypes.string),\n (False, True, False, dtypes.string),\n (False, True, True, dtypes.string),\n (True, False, False, dtypes.string),\n (True, False, True, dtypes.string),\n (True, True, False, dtypes.string),\n (True, True, True, dtypes.string),\n ])\n def testTokenizeAndDetokenizeVec(self, reverse, add_bos, add_eos, out_type):\n sp = self.getTokenizerAndSetOptions(reverse, add_bos, add_eos, out_type)\n sentences = ['I love carpet', 'I love desk.', 'I love lamp.']\n expected = []\n if out_type == dtypes.int32:\n expected = [[9, 169, 21, 125, 78, 48, 132, 15], [9, 169, 21, 125, 727, 6],\n [9, 169, 21, 125, 169, 579, 6]]\n else:\n expected = _utf8([['▁I', '▁l', 'o', 've', '▁c', 'ar', 'pe', 't'],\n ['▁I', '▁l', 'o', 've', '▁desk', '.'],\n ['▁I', '▁l', 'o', 've', '▁l', 'amp', '.']])\n expected = self.transformExpected(expected)\n result = sp.tokenize(sentences)\n self.assertAllEqual(expected, result)\n detokenized = sp.detokenize(result)\n self.assertAllEqual(_utf8(sentences), detokenized)\n\n @parameterized.parameters([\n (False, False, False, dtypes.int32),\n (False, False, True, dtypes.int32),\n (False, True, False, dtypes.int32),\n (False, True, True, dtypes.int32),\n (True, False, False, dtypes.int32),\n (True, False, True, dtypes.int32),\n (True, True, False, dtypes.int32),\n (True, True, True, dtypes.int32),\n (False, False, False, dtypes.string),\n (False, False, True, dtypes.string),\n (False, True, False, dtypes.string),\n (False, True, True, dtypes.string),\n (True, False, False, dtypes.string),\n (True, False, True, dtypes.string),\n (True, True, False, dtypes.string),\n (True, True, True, dtypes.string),\n ])\n def testTokenizeAndDetokenizeUniformTensorMatrix(self, reverse, add_bos,\n add_eos, out_type):\n sp = self.getTokenizerAndSetOptions(reverse, add_bos, add_eos, out_type)\n sentences = [['I love carpet', 'I love desk.'],\n ['I love lamp.', 'Never tell me the odds']]\n expected = []\n if out_type == dtypes.int32:\n expected = [[[9, 169, 21, 125, 78, 48, 132, 15],\n [9, 169, 21, 125, 727, 6]],\n [[9, 169, 21, 125, 169, 579, 6],\n [4, 199, 363, 310, 33, 7, 4, 21, 17, 17, 8]]]\n else:\n expected = _utf8(\n [[['▁I', '▁l', 'o', 've', '▁c', 'ar', 'pe', 't'],\n ['▁I', '▁l', 'o', 've', '▁desk', '.']],\n [['▁I', '▁l', 'o', 've', '▁l', 'amp', '.'],\n ['▁', 'N', 'ever', '▁tell', '▁me', '▁the', '▁', 'o', 'd', 'd',\n 's']]])\n expected = self.transformExpected(expected)\n result = sp.tokenize(constant_op.constant(sentences))\n self.assertAllEqual(expected, result)\n detokenized = sp.detokenize(result)\n self.assertAllEqual(_utf8(sentences), detokenized)\n\n @parameterized.parameters([\n (False, False, False, dtypes.int32),\n (False, False, True, dtypes.int32),\n (False, True, False, dtypes.int32),\n (False, True, True, dtypes.int32),\n (True, False, False, dtypes.int32),\n (True, False, True, dtypes.int32),\n (True, True, False, dtypes.int32),\n (True, True, True, dtypes.int32),\n (False, False, False, dtypes.string),\n (False, False, True, dtypes.string),\n (False, True, False, dtypes.string),\n (False, True, True, dtypes.string),\n (True, False, False, dtypes.string),\n (True, False, True, dtypes.string),\n (True, True, False, dtypes.string),\n (True, True, True, dtypes.string),\n ])\n def testTokenizeAndDetokenizeRaggedMatrix(self, reverse, add_bos, add_eos,\n out_type):\n sp = self.getTokenizerAndSetOptions(reverse, add_bos, add_eos, out_type)\n sentences = [['I love carpet', 'I love desk.', 'I love lamp.'],\n ['Never tell me the odds']]\n expected = []\n if out_type == dtypes.int32:\n expected = [[[9, 169, 21, 125, 78, 48, 132, 15],\n [9, 169, 21, 125, 727, 6], [9, 169, 21, 125, 169, 579, 6]],\n [[4, 199, 363, 310, 33, 7, 4, 21, 17, 17, 8]]]\n else:\n expected = _utf8(\n [[['▁I', '▁l', 'o', 've', '▁c', 'ar', 'pe', 't'],\n ['▁I', '▁l', 'o', 've', '▁desk', '.'],\n ['▁I', '▁l', 'o', 've', '▁l', 'amp', '.']],\n [['▁', 'N', 'ever', '▁tell', '▁me', '▁the', '▁', 'o', 'd', 'd',\n 's']]])\n expected = self.transformExpected(expected)\n result = sp.tokenize(ragged_factory_ops.constant(sentences))\n self.assertAllEqual(expected, result)\n detokenized = sp.detokenize(result)\n self.assertAllEqual(_utf8(sentences), detokenized)\n\n @parameterized.parameters([\n (False, False, False, dtypes.int32),\n (False, False, True, dtypes.int32),\n (False, True, False, dtypes.int32),\n (False, True, True, dtypes.int32),\n (True, False, False, dtypes.int32),\n (True, False, True, dtypes.int32),\n (True, True, False, dtypes.int32),\n (True, True, True, dtypes.int32),\n (False, False, False, dtypes.string),\n (False, False, True, dtypes.string),\n (False, True, False, dtypes.string),\n (False, True, True, dtypes.string),\n (True, False, False, dtypes.string),\n (True, False, True, dtypes.string),\n (True, True, False, dtypes.string),\n (True, True, True, dtypes.string),\n ])\n def testTokenizeAndDetokenizeWithOffsetsScalar(self, reverse, add_bos,\n add_eos, out_type):\n sp = self.getTokenizerAndSetOptions(reverse, add_bos, add_eos, out_type)\n sentence = 'I love lamp.'\n expected_tok = []\n expected_starts = [0, 1, 3, 4, 6, 8, 11]\n expected_ends = [1, 3, 4, 6, 8, 11, 12]\n if out_type == dtypes.int32:\n expected_tok = [9, 169, 21, 125, 169, 579, 6]\n else:\n expected_tok = _utf8(['▁I', '▁l', 'o', 've', '▁l', 'amp', '.'])\n expected_tok = self.transformExpected(expected_tok)\n expected_starts = self.transformExpected(expected_starts, True)\n expected_ends = self.transformExpected(expected_ends, True)\n (tokens, starts,\n ends) = sp.tokenize_with_offsets(ragged_factory_ops.constant(sentence))\n self.assertAllEqual(expected_tok, tokens)\n self.assertAllEqual(expected_starts, starts)\n self.assertAllEqual(expected_ends, ends)\n detokenized = sp.detokenize(tokens)\n self.assertAllEqual(_utf8(sentence), detokenized)\n\n def testTokenizeAndDetokenizeWithOffsetsSingleElementVector(self):\n sp = SentencepieceTokenizer(self.model, out_type=dtypes.string)\n sentences = ['I love lamp.']\n expected_tokens = [['▁I', '▁l', 'o', 've', '▁l', 'amp', '.']]\n expected_tokens = _utf8(expected_tokens)\n expected_starts = [[0, 1, 3, 4, 6, 8, 11]]\n expected_ends = [[1, 3, 4, 6, 8, 11, 12]]\n (tokens, starts,\n ends) = sp.tokenize_with_offsets(ragged_factory_ops.constant(sentences))\n self.assertAllEqual(expected_tokens, tokens)\n self.assertAllEqual(expected_starts, starts)\n self.assertAllEqual(expected_ends, ends)\n detokenized = sp.detokenize(tokens)\n self.assertAllEqual(_utf8(sentences), detokenized)\n\n def testTokenizeAndDetokenizeWithOffsetsVector(self):\n sp = SentencepieceTokenizer(self.model, out_type=dtypes.string)\n sentences = ['I love carpet.', 'I love desk.', 'I love lamp.']\n expected_tokens = [['▁I', '▁l', 'o', 've', '▁c', 'ar', 'pe', 't', '.'],\n ['▁I', '▁l', 'o', 've', '▁desk', '.'],\n ['▁I', '▁l', 'o', 've', '▁l', 'amp', '.']]\n expected_tokens = _utf8(expected_tokens)\n expected_starts = [[0, 1, 3, 4, 6, 8, 10, 12, 13], [0, 1, 3, 4, 6, 11],\n [0, 1, 3, 4, 6, 8, 11]]\n expected_ends = [[1, 3, 4, 6, 8, 10, 12, 13, 14], [1, 3, 4, 6, 11, 12],\n [1, 3, 4, 6, 8, 11, 12]]\n (tokens, starts,\n ends) = sp.tokenize_with_offsets(ragged_factory_ops.constant(sentences))\n self.assertAllEqual(expected_tokens, tokens)\n self.assertAllEqual(expected_starts, starts)\n self.assertAllEqual(expected_ends, ends)\n detokenized = sp.detokenize(tokens)\n self.assertAllEqual(_utf8(sentences), detokenized)\n\n def testTokenizeAndDetokenizeWithOffsetsMatrix(self):\n sp = SentencepieceTokenizer(self.model, out_type=dtypes.string)\n sentences = [['I love carpet.', 'I love desk.', 'I love lamp.'],\n ['Never tell me the odds']]\n expected_tokens = [[['▁I', '▁l', 'o', 've', '▁c', 'ar', 'pe', 't', '.'],\n ['▁I', '▁l', 'o', 've', '▁desk', '.'],\n ['▁I', '▁l', 'o', 've', '▁l', 'amp', '.']],\n [[\n '▁', 'N', 'ever', '▁tell', '▁me', '▁the', '▁', 'o',\n 'd', 'd', 's'\n ]]]\n expected_tokens = _utf8(expected_tokens)\n expected_starts = [[[0, 1, 3, 4, 6, 8, 10, 12, 13], [0, 1, 3, 4, 6, 11],\n [0, 1, 3, 4, 6, 8, 11]],\n [[0, 0, 1, 5, 10, 13, 17, 18, 19, 20, 21]]]\n expected_ends = [[[1, 3, 4, 6, 8, 10, 12, 13, 14], [1, 3, 4, 6, 11, 12],\n [1, 3, 4, 6, 8, 11, 12]],\n [[0, 1, 5, 10, 13, 17, 18, 19, 20, 21, 22]]]\n (tokens, starts,\n ends) = sp.tokenize_with_offsets(ragged_factory_ops.constant(sentences))\n self.assertAllEqual(expected_tokens, tokens)\n self.assertAllEqual(expected_starts, starts)\n self.assertAllEqual(expected_ends, ends)\n detokenized = sp.detokenize(tokens)\n self.assertAllEqual(_utf8(sentences), detokenized)\n\n @parameterized.parameters([\n (-1, 0.1, dtypes.int32),\n (64, 0.1, dtypes.int32),\n (0, 0.0, dtypes.int32),\n (-1, 0.1, dtypes.string),\n (64, 0.1, dtypes.string),\n (0, 0.0, dtypes.string),\n ])\n def testSampleTokenizeAndDetokenize(self, nbest_size, alpha, out_type):\n sp = SentencepieceTokenizer(\n self.model, nbest_size=nbest_size, alpha=alpha, out_type=out_type)\n sentences = [['I love carpet', 'I love desk.', 'I love lamp.'],\n ['Never tell me the odds']]\n result = sp.tokenize(ragged_factory_ops.constant(sentences))\n detokenized = sp.detokenize(result)\n self.assertAllEqual(_utf8(sentences), detokenized)\n\n def testReturnNbestAndDetokenize(self):\n sp = SentencepieceTokenizer(\n self.model, nbest_size=2, out_type=dtypes.int32, return_nbest=True)\n sentences = ['I love carpet', 'Never tell me the odds']\n result = sp.tokenize(ragged_factory_ops.constant(sentences))\n detokenized = sp.detokenize(result)\n self.assertAllEqual(\n _utf8(sentences), ragged_gather_ops.gather(detokenized, [0, 2]))\n self.assertAllEqual(\n _utf8(sentences), ragged_gather_ops.gather(detokenized, [1, 3]))\n\n def testReturnNbestAndDetokenizeWithOffsets(self):\n sp = SentencepieceTokenizer(\n self.model, nbest_size=2, out_type=dtypes.int32, return_nbest=True)\n sentences = ['I love carpet', 'Never tell me the odds']\n result, _, _ = sp.tokenize_with_offsets(\n ragged_factory_ops.constant(sentences))\n detokenized = sp.detokenize(result)\n self.assertAllEqual(\n _utf8(sentences), ragged_gather_ops.gather(detokenized, [0, 2]))\n self.assertAllEqual(\n _utf8(sentences), ragged_gather_ops.gather(detokenized, [1, 3]))\n\n def testSavedModel(self):\n sp = SentencepieceTokenizer(self.model)\n test_module = TestSavedModelModule(sp)\n inputs = constant_op.constant(['hello world'])\n expected_result = test_module.tokenize(inputs)\n temp_dir = tempfile.mkdtemp(dir=test.get_temp_dir())\n save.save(test_module, temp_dir)\n restored_model = load.load(temp_dir)\n self.assertAllEqual(restored_model.tokenize(inputs), expected_result)\n file_io.delete_recursively(temp_dir)\n\n def testBasicPipeline(self):\n if not context.executing_eagerly():\n self.skipTest('testBasicPipeline only supported in eager mode.')\n\n sp = SentencepieceTokenizer(self.model)\n\n strings = ['hello', 'world']\n dataset = dataset_ops.Dataset.from_tensor_slices(strings)\n # Ensure we can map the tokenizer across the dataset.\n dataset1 = dataset.map(sp.tokenize)\n # Ensure there's no error with a second map call.\n dataset2 = dataset.map(sp.tokenize)\n\n expected = sp.tokenize(strings)\n for i, result in enumerate(dataset1):\n self.assertAllEqual(result, expected[i])\n for i, result in enumerate(dataset2):\n self.assertAllEqual(result, expected[i])\n\n def testEmptyModel(self):\n with self.cached_session():\n with self.assertRaises(errors.InvalidArgumentError):\n sp = SentencepieceTokenizer()\n result = sp.tokenize('whatever')\n result.eval()\n\n def testInvalidModel(self):\n with self.cached_session():\n with self.assertRaises(errors.InternalError):\n sp = SentencepieceTokenizer('invalid model')\n result = sp.tokenize('whatever')\n result.eval()\n\n\n# Test that datasets depending on a sentencepiece tokenizer resources can be\n# serialized without external references.\n# This test is separate from `SentencepieceTokenizerOpTest` below because\n# context._reset_context() must be called from outside the context created by\n# `@test_util.run_all_in_graph_and_eager_modes`.\nclass DatasetSerializationTest(test_util.TensorFlowTestCase):\n\n def testSerialization(self):\n with context.eager_mode():\n sentencepiece_model_file = (\n 'tensorflow_text/python/ops/test_data/'\n 'test_oss_model.model')\n model = gfile.GFile(sentencepiece_model_file, 'rb').read()\n sp = SentencepieceTokenizer(model)\n strings = ['hello', 'world']\n dataset = dataset_ops.Dataset.from_tensor_slices(strings)\n # Ensure we can map the tokenizer across the dataset.\n dataset = dataset.map(sp.tokenize)\n graph = dataset._as_serialized_graph()\n element_spec = dataset.element_spec\n dataset_graph_string = graph.numpy()\n expected = sp.tokenize(strings)\n\n # Reset the eager context to make sure that the serialized dataset graph\n # is self-contained.\n context._reset_context()\n\n with context.eager_mode():\n restored = dataset_ops.from_variant(\n gen_experimental_dataset_ops.dataset_from_graph(dataset_graph_string),\n element_spec)\n for i, result in enumerate(restored):\n self.assertAllEqual(result, expected[i])\n\n\nif __name__ == '__main__':\n test.main()\n", "id": "7977738", "language": "Python", "matching_score": 3.496119976043701, "max_stars_count": 2, "path": "tensorflow_text/python/ops/sentencepiece_tokenizer_test.py" }, { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# -*- coding: utf-8 -*-\n\"\"\"Tests for ragged_tensor_to_tensor op.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport numpy as np\nimport tensorflow as tf\nimport tensorflow_text as tf_text\n\nfrom tensorflow.lite.python import interpreter\nfrom tensorflow.python.framework import test_util\nfrom tensorflow.python.platform import test\n\n\n@test_util.run_all_in_graph_and_eager_modes\nclass RaggedTensorToTensorTest(test_util.TensorFlowTestCase):\n\n def testTfLite(self):\n \"\"\"Checks TFLite conversion and inference.\"\"\"\n\n class TokenizerModel(tf.keras.Model):\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n self.fwp = tf_text.FastWordpieceTokenizer(['minds', 'apart', '[UNK]'])\n\n @tf.function(input_signature=[\n tf.TensorSpec(shape=[None], dtype=tf.string, name='input')\n ])\n def call(self, input_tensor):\n return {'tokens': self.fwp.tokenize(input_tensor).to_tensor()}\n\n # Test input data.\n input_data = np.array(['Some minds are better kept apart'])\n\n # Define a model.\n model = TokenizerModel()\n # Do TF inference.\n tf_result = model(tf.constant(input_data))['tokens']\n\n # Convert to TFLite.\n converter = tf.lite.TFLiteConverter.from_keras_model(model)\n converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS]\n converter.allow_custom_ops = True\n tflite_model = converter.convert()\n\n # Do TFLite inference.\n interp = interpreter.InterpreterWithCustomOps(\n model_content=tflite_model,\n custom_op_registerers=tf_text.tflite_registrar.SELECT_TFTEXT_OPS)\n print(interp.get_signature_list())\n tokenize = interp.get_signature_runner('serving_default')\n output = tokenize(input=input_data)\n if tf.executing_eagerly():\n tflite_result = output['tokens']\n else:\n tflite_result = output['output_1']\n\n # Assert the results are identical.\n self.assertAllEqual(tflite_result, tf_result)\n\n\nif __name__ == '__main__':\n test.main()\n", "id": "4467043", "language": "Python", "matching_score": 1.671303391456604, "max_stars_count": 2, "path": "tensorflow_text/python/ragged/ragged_tensor_to_tensor_test.py" }, { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"PTransforms used for wordpiece vocabulary generation pipeline.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nimport collections\nimport apache_beam as beam\nimport tensorflow.compat.v1 as tf\nfrom tensorflow_text.python.ops.bert_tokenizer import BertTokenizer\nfrom tensorflow_text.python.ops.wordpiece_tokenizer import WordpieceTokenizer\nfrom tensorflow_text.tools.wordpiece_vocab import wordpiece_tokenizer_learner_lib as learner\n\n\nclass FilterTokensByLang(beam.DoFn):\n \"\"\"Filters out languages if necessary and yields each (token, lang) pair.\"\"\"\n\n def __init__(self, lang_set, include_other_languages=False):\n self._lang_set = lang_set\n self._include_other_languages = include_other_languages\n\n def process(self, element):\n lang = element['lang']\n\n if lang in self._lang_set or self._include_other_languages:\n returned_lang = lang if lang in self._lang_set else 'other'\n\n for token in element['tokens']:\n yield token, returned_lang\n\n\nclass CalculateCoefficients(beam.CombineFn):\n \"\"\"Calculates smoothing coefficient for each language.\"\"\"\n\n def __init__(self, smoothing_exponent):\n self._smoothing_exponent = smoothing_exponent\n\n def create_accumulator(self):\n return {'total_count': 0, 'lang_count': collections.Counter()}\n\n def add_input(self, accumulator, element):\n _, lang = element\n accumulator['total_count'] += 1\n accumulator['lang_count'].update([lang])\n return accumulator\n\n def merge_accumulators(self, accumulators):\n merged = self.create_accumulator()\n for acc in accumulators:\n for key in merged:\n merged[key] += acc[key]\n return merged\n\n def extract_output(self, accumulator):\n lang_count = accumulator['lang_count']\n total = accumulator['total_count']\n probs, exp = {}, {}\n for lang in lang_count:\n probs[lang] = lang_count[lang] / total\n exp[lang] = pow(probs[lang], self._smoothing_exponent)\n total_weight = sum(exp.values())\n for lang in exp:\n exp[lang] = exp[lang] / (total_weight * probs[lang])\n return exp\n\n\nclass ExponentialSmoothing(beam.DoFn):\n \"\"\"Applies exponential smoothing coefficients to the counts.\"\"\"\n\n def __init__(self, corpus_multiplier=1):\n self._corpus_multiplier = corpus_multiplier\n\n def process(self, word_and_lang, coeffs):\n word, lang = word_and_lang\n count = coeffs[lang] * self._corpus_multiplier\n yield word, count\n\n\nclass FilterByCount(beam.DoFn):\n \"\"\"Filters words with counts below some threshold.\"\"\"\n\n def __init__(self, max_word_length, min_token_frequency=2):\n self._min_token_frequency = int(min_token_frequency)\n self._max_word_length = max_word_length\n\n def process(self, word_and_count):\n word, count = word_and_count\n if count > self._min_token_frequency and len(word) <= self._max_word_length:\n yield word, int(round(count))\n\n\nclass SortByCount(beam.CombineFn):\n \"\"\"Sorts words by count.\"\"\"\n\n def create_accumulator(self):\n return []\n\n def add_input(self, accumulator, element):\n if not accumulator:\n accumulator = self.create_accumulator()\n\n word, count = element\n accumulator.append((word, int(count)))\n return accumulator\n\n def merge_accumulators(self, accumulators):\n merged = self.create_accumulator()\n for accumulator in accumulators:\n if accumulator:\n merged.extend(accumulator)\n return merged\n\n def extract_output(self, accumulator):\n return sorted(sorted(accumulator, key=lambda x: x[0]), key=lambda x: x[1],\n reverse=True)\n\n\nclass CompileTokenizationInfo(beam.DoFn):\n \"\"\"Expands list of tokens and computes intermediate metrics.\"\"\"\n\n def process(self, record):\n wordpiece_counter = collections.Counter(record['wordpieces'])\n del wordpiece_counter['[UNK]']\n dropped = record['num_dropped_chars']\n preserved = record['num_preserved_chars']\n non_unk = record['num_non_unk_wordpieces']\n preserved_ratio = [preserved / non_unk] if non_unk else []\n dropped_ratio = [dropped / (dropped + preserved)] if (dropped +\n preserved) else []\n tokenization_info = {\n 'lang': record['lang'],\n 'count': 1,\n 'num_preserved_chars': preserved,\n 'num_dropped_chars': dropped,\n 'num_non_unk_wordpieces': non_unk,\n 'preserved_ratio': preserved_ratio,\n 'dropped_ratio': dropped_ratio,\n 'wordpieces': wordpiece_counter\n }\n yield tokenization_info\n\n\ndef default():\n return {\n 'count': 0,\n 'num_preserved_chars': 0,\n 'num_dropped_chars': 0,\n 'num_non_unk_wordpieces': 0,\n 'preserved_ratio': [],\n 'dropped_ratio': [],\n 'wordpieces': collections.Counter()\n }\n\n\nclass AggregateLang(beam.CombineFn):\n \"\"\"Aggregates intermediate metrics for each language.\"\"\"\n\n def create_accumulator(self):\n return collections.defaultdict(default)\n\n def add_input(self, accumulator, element):\n lang = element['lang']\n for key in accumulator[lang].keys():\n accumulator[lang][key] += element[key]\n return accumulator\n\n def merge_accumulators(self, accumulators):\n merged = self.create_accumulator()\n for acc in accumulators:\n for lang in acc.keys():\n for key in acc[lang].keys():\n merged[lang][key] += acc[lang][key]\n return merged\n\n def extract_output(self, accumulator):\n return accumulator\n\n\nclass LearnVocab(beam.DoFn):\n\n def __init__(self, params):\n self._params = params\n\n def process(self, wordcounts):\n return learner.learn(wordcounts, self._params)\n\n\nclass CalculateMetrics(beam.DoFn):\n \"\"\"Calculates metrics for each language given tokenization info.\"\"\"\n\n def process(self, info_dict):\n for lang in info_dict.keys():\n infos = info_dict[lang]\n yield {\n 'lang':\n lang,\n 'sample_count':\n infos['count'],\n 'micro_drop_char_percent':\n self._format_float_or_none(\n self._get_micro_dropped_char_percent(infos)),\n 'macro_drop_char_percent':\n self._format_float_or_none(\n self._get_macro_dropped_char_percent(infos)),\n 'micro_compress_ratio':\n self._format_float_or_none(\n self._get_micro_compression_ratio(infos)),\n 'macro_compress_ratio':\n self._format_float_or_none(\n self._get_macro_compression_ratio(infos)),\n 'unweighted_en_wp_overlap_percent':\n self._format_float_or_none(\n self._get_wordpiece_overlap_percent(\n infos['wordpieces'],\n info_dict['en']['wordpieces'],\n weighted=False)),\n 'weighted_en_wp_overlap_percent':\n self._format_float_or_none(\n self._get_wordpiece_overlap_percent(\n infos['wordpieces'],\n info_dict['en']['wordpieces'],\n weighted=True))\n }\n\n def _get_list_mean(self, l):\n return sum(l) / len(l) if l else None\n\n def _get_micro_compression_ratio(self, infos):\n if infos['num_non_unk_wordpieces']:\n return infos['num_preserved_chars'] / infos['num_non_unk_wordpieces']\n else:\n return None\n\n def _get_macro_compression_ratio(self, infos):\n return self._get_list_mean(infos['preserved_ratio'])\n\n def _get_micro_dropped_char_percent(self, infos):\n if infos['num_preserved_chars'] + infos['num_dropped_chars']:\n return 100.0 * infos['num_dropped_chars'] / (\n infos['num_preserved_chars'] + infos['num_dropped_chars'])\n else:\n return None\n\n def _get_macro_dropped_char_percent(self, infos):\n return 100.0 * self._get_list_mean(infos['dropped_ratio'])\n\n def _get_wordpiece_overlap_percent(self,\n xx_wordpiece_counter,\n en_wordpiece_counter,\n weighted=False):\n numerator = 0\n denominator = 0\n for wordpiece, count in xx_wordpiece_counter.iteritems():\n if not weighted:\n count = 1\n denominator += count\n if wordpiece in en_wordpiece_counter:\n numerator += count\n\n if denominator:\n return 100.0 * numerator / denominator\n else:\n return None\n\n def _format_float_or_none(self, value):\n if isinstance(value, float):\n return '{:.3f}'.format(value)\n else:\n return None\n\n\ndef count_preprocessing_fn(text_key, language_code_key):\n \"\"\"Generates a preprocessing function to be used in generating word counts.\n\n Args:\n text_key: feature key in tf.Example for text\n language_code_key: feature key in tf.Example for language_code\n\n Returns:\n a preprocessing function\n \"\"\"\n\n def preprocessing_fn(inputs):\n \"\"\"Function used to transform dataset using TF transform.\n\n Tokenizes input and detects language if there is no associated\n language_code.\n\n Args:\n inputs: dataset of tf.Examples containing text samples\n\n Returns:\n transformed outputs\n \"\"\"\n\n outputs = {}\n\n tokenizer = BertTokenizer()\n tokens = tokenizer.tokenize(inputs[text_key])\n outputs['tokens'] = tokens.to_sparse()\n outputs['lang'] = tf.convert_to_tensor(inputs[language_code_key])\n\n return outputs\n\n return preprocessing_fn\n\n\ndef metrics_preprocessing_fn(vocab_file, text_key, language_code_key):\n \"\"\"Generates a preprocessing function to be used in generating word counts.\n\n Args:\n vocab_file: path to file containing wordpiece vocabulary\n text_key: feature key in tf.Example for text\n language_code_key: feature key in tf.Example for language_code\n\n Returns:\n a preprocessing function\n \"\"\"\n\n def preprocessing_fn(inputs):\n \"\"\"Preprocessing function used in TF Transform.\n\n Args:\n inputs: the input dataset of tf.Examples\n\n Returns:\n preprocessed outputs\n \"\"\"\n vocab_table = tf.lookup.StaticHashTable(tf.lookup.TextFileInitializer(\n vocab_file, tf.string, tf.lookup.TextFileIndex.WHOLE_LINE, tf.int64,\n tf.lookup.TextFileIndex.LINE_NUMBER), -1)\n\n tokenizer = BertTokenizer()\n tokens = tokenizer.tokenize(inputs[text_key])\n wordpiece_tokenizer = WordpieceTokenizer(vocab_table,\n token_out_type=tf.string)\n wordpieces = wordpiece_tokenizer.tokenize(tokens)\n wordpieces_flat = wordpieces.flat_values\n wordpieces_flat.set_shape([None])\n wordpieces = tf.RaggedTensor.from_nested_row_splits(\n wordpieces_flat, wordpieces.nested_row_splits)\n\n known_mask = tf.cast(tf.not_equal(wordpieces, '[UNK]'), tf.int32)\n num_non_unk_wordpieces = tf.reduce_sum(known_mask, axis=[1, 2])\n\n wordpiece_is_unknown = tf.equal(wordpieces, '[UNK]')\n token_has_unknown = tf.reduce_any(wordpiece_is_unknown, axis=-1)\n unknown_tokens = tf.ragged.boolean_mask(tokens, token_has_unknown)\n unknown_lengths = tf.strings.length(unknown_tokens)\n num_dropped_chars = tf.math.reduce_sum(unknown_lengths, axis=1)\n\n token_lengths = tf.strings.length(tokens)\n total_chars = tf.reduce_sum(token_lengths, axis=-1)\n num_preserved_chars = total_chars - num_dropped_chars\n\n flattened = tf.RaggedTensor.from_row_splits(\n wordpieces.flat_values, tf.gather(wordpieces.values.row_splits,\n wordpieces.row_splits))\n\n outputs = {}\n outputs['num_non_unk_wordpieces'] = tf.cast(num_non_unk_wordpieces,\n tf.int64)\n outputs['num_dropped_chars'] = tf.cast(num_dropped_chars, tf.int64)\n outputs['num_preserved_chars'] = tf.cast(num_preserved_chars, tf.int64)\n outputs['wordpieces'] = flattened.to_sparse()\n outputs['lang'] = tf.convert_to_tensor(inputs[language_code_key])\n\n return outputs\n\n return preprocessing_fn\n", "id": "3315723", "language": "Python", "matching_score": 3.709427833557129, "max_stars_count": 2, "path": "tensorflow_text/tools/wordpiece_vocab/utils.py" }, { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Ops to tokenize words into subwords.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom tensorflow.python.eager import monitoring\nfrom tensorflow.python.framework import constant_op\nfrom tensorflow.python.framework import dtypes\nfrom tensorflow.python.framework import ops\nfrom tensorflow.python.ops import array_ops\nfrom tensorflow.python.ops import math_ops\nfrom tensorflow.python.ops.ragged import ragged_tensor\nfrom tensorflow.python.ops.ragged.ragged_tensor import RaggedTensor\nfrom tensorflow_text.core.pybinds import pywrap_fast_wordpiece_tokenizer_model_builder\nfrom tensorflow_text.python.ops.tokenization import Detokenizer\nfrom tensorflow_text.python.ops.tokenization import TokenizerWithOffsets\n\n# pylint: disable=g-bad-import-order\nfrom tensorflow.python.framework import load_library\nfrom tensorflow.python.platform import resource_loader\ngen_fast_wordpiece_tokenizer = load_library.load_op_library(resource_loader.get_path_to_datafile('_fast_wordpiece_tokenizer.so'))\n\n_tf_text_fast_wordpiece_tokenizer_op_create_counter = monitoring.Counter(\n '/nlx/api/python/fast_wordpiece_tokenizer_create_counter',\n 'Counter for number of FastWordpieceTokenizers created in Python.')\n\n\nclass FastWordpieceTokenizer(TokenizerWithOffsets, Detokenizer):\n \"\"\"Tokenizes a tensor of UTF-8 string tokens into subword pieces.\n\n It employs the linear (as opposed to quadratic) WordPiece algorithm.\n\n Differences compared to the classic WordpieceTokenizer (as of 11/2020):\n (1) `unknown_token` cannot be None or empty. That means if a word is too long\n or cannot be tokenized, FastWordpieceTokenizer always returns\n `unknown_token`. In constrast, the original WordpieceTokenizer would\n return the original word if `unknown_token` is empty or None.\n (2) `unknown_token` must be included in the vocabulary.\n (3) When `unknown_token` is returned, in tokenize_with_offsets(), the result\n end_offset is set to be the length of the original input word. In\n contrast, when `unknown_token` is returned by the original\n WordpieceTokenizer, the end_offset is set to be the length of the\n `unknown_token` string.\n (4) `split_unknown_characters` is not supported.\n (5) `max_chars_per_token` is not used or needed.\n \"\"\"\n\n def __init__(self,\n vocab=None,\n suffix_indicator='##',\n max_bytes_per_word=100,\n token_out_type=dtypes.int64,\n unknown_token='[UNK]',\n no_pretokenization=False,\n support_detokenization=False,\n model_buffer=None):\n \"\"\"Initializes the FastWordpieceTokenizer.\n\n Two ways to initialize:\n * (preferred) use a precompiled `model_buffer`.\n * use `vocab`, `suffix_indicator`, `max_bytes_per_word`, `unknown_token`,\n and `no_pretokenization`.\n\n Args:\n vocab: (optional) The list of tokens in the vocabulary.\n suffix_indicator: (optional) The characters prepended to a wordpiece to\n indicate that it is a suffix to another subword.\n max_bytes_per_word: (optional) Max size of input token.\n token_out_type: (optional) The type of the token to return. This can be\n `tf.int64` or `tf.int32` IDs, or `tf.string` subwords.\n unknown_token: (optional) The string value to substitute for an unknown\n token. It must be included in `vocab`.\n no_pretokenization: (optional) By default, the input is split on\n whitespaces and punctuations before applying the Wordpiece tokenization.\n When true, the input is assumed to be pretokenized already.\n support_detokenization: (optional) Whether to make the tokenizer support\n doing detokenization. Setting it to true expands the size of the model\n flatbuffer. As a reference, when using 120k multilingual BERT WordPiece\n vocab, the flatbuffer's size increases from ~5MB to ~6MB.\n model_buffer: (optional) Bytes object (or a uint8 tf.Tenosr) that contains\n the wordpiece model in flatbuffer format (see\n fast_wordpiece_tokenizer_model.fbs). If not `None`, all other arguments\n (except `token_output_type`) are ignored.\n \"\"\"\n super(FastWordpieceTokenizer, self).__init__()\n _tf_text_fast_wordpiece_tokenizer_op_create_counter.get_cell().increase_by(\n 1)\n\n if model_buffer is None:\n model_buffer = (pywrap_fast_wordpiece_tokenizer_model_builder\n .build_fast_wordpiece_model(\n vocab, max_bytes_per_word, suffix_indicator,\n unknown_token, no_pretokenization,\n support_detokenization))\n # Use uint8 tensor as a buffer for the model to avoid any possible changes,\n # for example truncation by '\\0'.\n if isinstance(model_buffer, ops.Tensor):\n self._model = model_buffer\n else:\n self._model = constant_op.constant(list(model_buffer), dtype=dtypes.uint8)\n\n self._token_out_type = token_out_type\n\n def tokenize(self, input): # pylint: disable=redefined-builtin\n \"\"\"Tokenizes a tensor of UTF-8 string tokens further into subword tokens.\n\n ### Example 1, single word tokenization:\n >>> vocab = [\"they\", \"##'\", \"##re\", \"the\", \"great\", \"##est\", \"[UNK]\"]\n >>> tokenizer = FastWordpieceTokenizer(vocab, token_out_type=tf.string,\n ... no_pretokenization=True)\n >>> tokens = [[\"they're\", \"the\", \"greatest\"]]\n >>> tokenizer.tokenize(tokens)\n <tf.RaggedTensor [[[b'they', b\"##'\", b'##re'], [b'the'],\n [b'great', b'##est']]]>\n\n ### Example 2, general text tokenization (pre-tokenization on\n ### punctuation and whitespace followed by WordPiece tokenization):\n >>> vocab = [\"they\", \"##'\", \"##re\", \"the\", \"great\", \"##est\", \"[UNK]\",\n ... \"'\", \"re\"]\n >>> tokenizer = FastWordpieceTokenizer(vocab, token_out_type=tf.string)\n >>> tokens = [[\"they're the greatest\", \"the greatest\"]]\n >>> tokenizer.tokenize(tokens)\n <tf.RaggedTensor [[[b'they', b\"'\", b're', b'the', b'great', b'##est'],\n [b'the', b'great', b'##est']]]>\n\n Args:\n input: An N-dimensional `Tensor` or `RaggedTensor` of UTF-8 strings.\n\n Returns:\n A `RaggedTensor` of tokens where `tokens[i, j]` is the j-th token\n (i.e., wordpiece) for `input[i]` (i.e., the i-th input word). This token\n is either the actual token string content, or the corresponding integer\n id, i.e., the index of that token string in the vocabulary. This choice\n is controlled by the `token_out_type` parameter passed to the initializer\n method.\n \"\"\"\n # TODO(xysong): Optimize below by calling different overload kernels.\n subword, _, _ = self.tokenize_with_offsets(input)\n return subword\n\n def tokenize_with_offsets(self, input): # pylint: disable=redefined-builtin\n \"\"\"Tokenizes a tensor of UTF-8 string tokens further into subword tokens.\n\n ### Example 1, single word tokenization:\n >>> vocab = [\"they\", \"##'\", \"##re\", \"the\", \"great\", \"##est\", \"[UNK]\"]\n >>> tokenizer = FastWordpieceTokenizer(vocab, token_out_type=tf.string,\n ... no_pretokenization=True)\n >>> tokens = [[\"they're\", \"the\", \"greatest\"]]\n >>> subtokens, starts, ends = tokenizer.tokenize_with_offsets(tokens)\n >>> subtokens\n <tf.RaggedTensor [[[b'they', b\"##'\", b'##re'], [b'the'],\n [b'great', b'##est']]]>\n >>> starts\n <tf.RaggedTensor [[[0, 4, 5], [0], [0, 5]]]>\n >>> ends\n <tf.RaggedTensor [[[4, 5, 7], [3], [5, 8]]]>\n\n ### Example 2, general text tokenization (pre-tokenization on\n ### punctuation and whitespace followed by WordPiece tokenization):\n >>> vocab = [\"they\", \"##'\", \"##re\", \"the\", \"great\", \"##est\", \"[UNK]\",\n ... \"'\", \"re\"]\n >>> tokenizer = FastWordpieceTokenizer(vocab, token_out_type=tf.string)\n >>> tokens = [[\"they're the greatest\", \"the greatest\"]]\n >>> subtokens, starts, ends = tokenizer.tokenize_with_offsets(tokens)\n >>> subtokens\n <tf.RaggedTensor [[[b'they', b\"'\", b're', b'the', b'great', b'##est'],\n [b'the', b'great', b'##est']]]>\n >>> starts\n <tf.RaggedTensor [[[0, 4, 5, 8, 12, 17], [0, 4, 9]]]>\n >>> ends\n <tf.RaggedTensor [[[4, 5, 7, 11, 17, 20], [3, 9, 12]]]>\n\n Args:\n input: An N-dimensional `Tensor` or `RaggedTensor` of UTF-8 strings.\n\n Returns:\n A tuple `(tokens, start_offsets, end_offsets)` where:\n\n tokens: is a `RaggedTensor`, where `tokens[i, j]` is the j-th token\n (i.e., wordpiece) for `input[i]` (i.e., the i-th input word). This\n token is either the actual token string content, or the corresponding\n integer id, i.e., the index of that token string in the vocabulary.\n This choice is controlled by the `token_out_type` parameter passed to\n the initializer method.\n start_offsets[i1...iN, j]: is a `RaggedTensor` of the byte offsets\n for the inclusive start of the `jth` token in `input[i1...iN]`.\n end_offsets[i1...iN, j]: is a `RaggedTensor` of the byte offsets for\n the exclusive end of the `jth` token in `input[i`...iN]` (exclusive,\n i.e., first byte after the end of the token).\n \"\"\"\n name = None\n with ops.name_scope(name, 'FastWordpieceTokenizeWithOffsets',\n [input, self._model]):\n # Check that the types are expected and the ragged rank is appropriate.\n tokens = ragged_tensor.convert_to_tensor_or_ragged_tensor(input)\n rank = tokens.shape.ndims\n if rank is None:\n raise ValueError('input must have a known rank.')\n\n if rank == 0:\n wordpieces, starts, ends = self.tokenize_with_offsets(\n array_ops.stack([tokens]))\n return wordpieces.values, starts.values, ends.values\n\n elif rank > 1:\n if not ragged_tensor.is_ragged(tokens):\n tokens = ragged_tensor.RaggedTensor.from_tensor(\n tokens, ragged_rank=rank - 1)\n wordpieces, starts, ends = self.tokenize_with_offsets(\n tokens.flat_values)\n wordpieces = wordpieces.with_row_splits_dtype(tokens.row_splits.dtype)\n starts = starts.with_row_splits_dtype(tokens.row_splits.dtype)\n ends = ends.with_row_splits_dtype(tokens.row_splits.dtype)\n return (tokens.with_flat_values(wordpieces),\n tokens.with_flat_values(starts), tokens.with_flat_values(ends))\n\n # Tokenize the tokens into subwords.\n # TODO(xysong): Optimize below by calling different overload kernels.\n subwords, subword_ids, row_splits, starts, ends = (\n gen_fast_wordpiece_tokenizer.fast_wordpiece_tokenize_with_offsets(\n input_values=tokens, wp_model=self._model))\n\n if self._token_out_type == dtypes.int64:\n values = math_ops.cast(subword_ids, dtypes.int64)\n elif self._token_out_type == dtypes.int32:\n values = math_ops.cast(subword_ids, dtypes.int32)\n else:\n values = subwords\n\n wordpieces = RaggedTensor.from_row_splits(\n values, row_splits, validate=False)\n starts = RaggedTensor.from_row_splits(starts, row_splits, validate=False)\n ends = RaggedTensor.from_row_splits(ends, row_splits, validate=False)\n\n return wordpieces, starts, ends\n\n def detokenize(self, input): # pylint: disable=redefined-builtin\n \"\"\"Detokenizes a tensor of int64 or int32 subword ids into sentences.\n\n Detokenize and tokenize an input string returns itself when the input string\n is normalized and the tokenized wordpieces don't contain `<unk>`.\n\n ### Example:\n >>> vocab = [\"they\", \"##'\", \"##re\", \"the\", \"great\", \"##est\", \"[UNK]\",\n ... \"'\", \"re\", \"ok\"]\n >>> tokenizer = FastWordpieceTokenizer(vocab, support_detokenization=True)\n >>> ids = tf.ragged.constant([[0, 1, 2, 3, 4, 5], [9]])\n >>> tokenizer.detokenize(ids)\n <tf.Tensor: shape=(2,), dtype=string,\n ... numpy=array([b\"they're the greatest\", b'ok'], dtype=object)>\n >>> ragged_ids = tf.ragged.constant([[[0, 1, 2, 3, 4, 5], [9]], [[4, 5]]])\n >>> tokenizer.detokenize(ragged_ids)\n <tf.RaggedTensor [[b\"they're the greatest\", b'ok'], [b'greatest']]>\n\n Args:\n input: An N-dimensional `Tensor` or `RaggedTensor` of int64 or int32.\n\n Returns:\n A `RaggedTensor` of sentences that has N - 1 dimension when N > 1.\n Otherwise, a string tensor.\n \"\"\"\n name = None\n with ops.name_scope(name, 'FastWordpieceDetokenize', [input, self._model]):\n # Check that the types are expected and the ragged rank is appropriate.\n subword_ids = ragged_tensor.convert_to_tensor_or_ragged_tensor(input)\n subword_ids = math_ops.cast(subword_ids, dtypes.int32)\n rank = subword_ids.shape.ndims\n if rank is None:\n raise ValueError('input must have a known rank.')\n\n if rank < 2:\n words = self.detokenize(array_ops.stack([subword_ids]))\n return words[0]\n\n if not ragged_tensor.is_ragged(subword_ids):\n subword_ids = ragged_tensor.RaggedTensor.from_tensor(\n subword_ids, ragged_rank=rank - 1)\n nested_row_splits = subword_ids.nested_row_splits\n # Detokenize the wordpiece ids to texts.\n words = (\n gen_fast_wordpiece_tokenizer.tf_text_fast_wordpiece_detokenize(\n input_values=subword_ids.flat_values,\n input_row_splits=nested_row_splits[-1],\n wp_model=self._model))\n words = RaggedTensor.from_nested_row_splits(\n words, nested_row_splits[:-1], validate=False)\n\n return words\n", "id": "8302570", "language": "Python", "matching_score": 4.5051751136779785, "max_stars_count": 1, "path": "tensorflow_text/python/ops/fast_wordpiece_tokenizer.py" }, { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Sentencepiece tokenizer for string tensors.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom tensorflow.python.eager import monitoring\nfrom tensorflow.python.framework import dtypes\nfrom tensorflow.python.framework import ops\nfrom tensorflow.python.ops import array_ops\nfrom tensorflow.python.ops.ragged import ragged_conversion_ops\nfrom tensorflow.python.ops.ragged import ragged_tensor\nfrom tensorflow.python.ops.ragged.ragged_tensor import RaggedTensor\nfrom tensorflow.python.training.tracking import tracking\nfrom tensorflow_text.python.ops.tokenization import Detokenizer\nfrom tensorflow_text.python.ops.tokenization import TokenizerWithOffsets\n\nfrom tensorflow.python.framework import load_library\nfrom tensorflow.python.platform import resource_loader\ngen_sentencepiece_tokenizer = load_library.load_op_library(resource_loader.get_path_to_datafile('_sentencepiece_tokenizer.so')) # pylint: disable=g-bad-import-order\n\n_tf_text_sentencepiece_tokenizer_op_create_counter = monitoring.Counter(\n \"/nlx/api/python/sentencepiece_tokenizer_create_counter\",\n \"Counter for number of SentencepieceTokenizers created in Python.\")\n\n\nclass _SentencepieceModelResource(tracking.TrackableResource):\n \"\"\"Utility to track the model resource tensor (for SavedModel support).\"\"\"\n\n def __init__(self, model, name):\n super(_SentencepieceModelResource, self).__init__()\n self._model = model\n self._name = name\n _ = self.resource_handle # Accessing this property creates the resource.\n\n def _create_resource(self):\n model, name = self._model, self._name\n with ops.name_scope(name, \"SentenceTokenizerInitializer\", [model]):\n return gen_sentencepiece_tokenizer.sentencepiece_op(model=model)\n\n\nclass SentencepieceTokenizer(TokenizerWithOffsets, Detokenizer):\n r\"\"\"Tokenizes a tensor of UTF-8 strings.\n\n SentencePiece is an unsupervised text tokenizer and detokenizer. It is used\n mainly for Neural Network-based text generation systems where the vocabulary\n size is predetermined prior to the neural model training. SentencePiece\n implements subword units with the extension of direct training from raw\n sentences.\n\n Before using the tokenizer, you will need to train a vocabulary and build a\n model configuration for it. Please visit the [Sentencepiece\n repository](https://github.com/google/sentencepiece#train-sentencepiece-model)\n for the most up-to-date instructions on this process.\n \"\"\"\n\n def __init__(self,\n model=None,\n out_type=dtypes.int32,\n nbest_size=0,\n alpha=1.0,\n reverse=False,\n add_bos=False,\n add_eos=False,\n return_nbest=False,\n name=None):\n \"\"\"Creates & initializes a Sentencepiece processor.\n\n Args:\n model: The sentencepiece model serialized proto.\n out_type: output type. tf.int32 or tf.string (Default = tf.int32) Setting\n tf.int32 directly encodes the string into an id sequence.\n nbest_size: A scalar for sampling.\n * `nbest_size = {0,1}`: No sampling is performed. (default)\n * `nbest_size > 1`: samples from the nbest_size results.\n * `nbest_size < 0`: assuming that nbest_size is infinite and samples\n from the all hypothesis (lattice) using\n forward-filtering-and-backward-sampling algorithm.\n alpha: A scalar for a smoothing parameter. Inverse temperature for\n probability rescaling.\n reverse: Reverses the tokenized sequence (Default = false)\n add_bos: Add beginning of sentence token to the result (Default = false)\n add_eos: Add end of sentence token to the result (Default = false). When\n `reverse=True` beginning/end of sentence tokens are added after\n reversing.\n return_nbest: If True requires that `nbest_size` is a scalar and `> 1`.\n Returns the `nbest_size` best tokenizations for each sentence instead\n of a single one. The returned tensor has shape\n `[batch * nbest, (tokens)]`.\n name: The name argument that is passed to the op function.\n\n Returns:\n pieces: A SentencepieceTokenizer.\n \"\"\"\n super(SentencepieceTokenizer, self).__init__()\n _tf_text_sentencepiece_tokenizer_op_create_counter.get_cell().increase_by(1)\n self.nbest_size = nbest_size\n self.alpha = alpha\n self.out_type = out_type\n self.reverse = reverse\n self.add_bos = add_bos\n self.add_eos = add_eos\n self.return_nbest = return_nbest\n self._model_resource = _SentencepieceModelResource(model, name)\n\n def tokenize(self, input, name=None): # pylint: disable=redefined-builtin\n \"\"\"Tokenizes a tensor of UTF-8 strings.\n\n Args:\n input: A `RaggedTensor` or `Tensor` of UTF-8 strings with any shape.\n name: The name argument that is passed to the op function.\n\n Returns:\n A `RaggedTensor` of tokenized text. The returned shape is the shape of the\n input tensor with an added ragged dimension for tokens of each string.\n \"\"\"\n with ops.name_scope(name, \"SentenceTokenizer\", [input, self]):\n input_tensor = ragged_tensor.convert_to_tensor_or_ragged_tensor(input)\n if input_tensor.shape.ndims is None:\n raise ValueError(\"Rank of input_tensor must be statically known.\")\n if ragged_tensor.is_ragged(input_tensor):\n # Recursively process the values of the ragged tensor.\n tokens = self.tokenize(input_tensor.flat_values)\n return input_tensor.with_flat_values(tokens)\n else:\n if input_tensor.shape.ndims > 1:\n # Convert the input tensor to ragged and process it.\n return self.tokenize(ragged_conversion_ops.from_tensor(input_tensor))\n elif input_tensor.shape.ndims == 0:\n tokens = self.tokenize(array_ops.stack([input_tensor]))\n return tokens.values\n else:\n # Our rank 1 tensor is the correct shape, so we can process it as\n # normal.\n (output_values, row_splits) = (\n gen_sentencepiece_tokenizer.sentencepiece_tokenize_op(\n self._model_resource.resource_handle, input_tensor,\n self.nbest_size, self.alpha, self.add_bos, self.add_eos,\n self.reverse, self.out_type, return_nbest=self.return_nbest))\n tokens = RaggedTensor.from_nested_row_splits(\n flat_values=output_values,\n nested_row_splits=[row_splits],\n validate=False)\n return tokens\n\n def tokenize_with_offsets(self, input, name=None): # pylint: disable=redefined-builtin\n \"\"\"Tokenizes a tensor of UTF-8 strings.\n\n This function returns a tuple containing the tokens along with\n start and end byte offsets that mark where in the original string each\n token was located.\n\n Args:\n input: A `RaggedTensor` or `Tensor` of UTF-8 strings with any shape.\n name: The name argument that is passed to the op function.\n\n Returns:\n A tuple `(tokens, start_offsets, end_offsets)` where:\n\n tokens: is an N+1-dimensional UTF-8 string or integer `Tensor` or\n `RaggedTensor`.\n start_offsets: is an N+1-dimensional integer `Tensor` or\n `RaggedTensor` containing the starting indices of each token (byte\n indices for input strings).\n end_offsets: is an N+1-dimensional integer `Tensor` or\n `RaggedTensor` containing the exclusive ending indices of each token\n (byte indices for input strings).\n \"\"\"\n with ops.name_scope(name, \"SentenceTokenizer\", [input, self]):\n input_tensor = ragged_tensor.convert_to_tensor_or_ragged_tensor(input)\n if input_tensor.shape.ndims is None:\n raise ValueError(\"Rank of input_tensor must be statically known.\")\n if ragged_tensor.is_ragged(input_tensor):\n # Recursively process the values of the ragged tensor\n (tokens, starts,\n ends) = self.tokenize_with_offsets(input_tensor.flat_values)\n tokens = input_tensor.with_flat_values(tokens)\n starts = input_tensor.with_flat_values(starts)\n ends = input_tensor.with_flat_values(ends)\n return (tokens, starts, ends)\n else:\n if input_tensor.shape.ndims > 1:\n # Convert the input tensor to ragged and process it.\n return self.tokenize_with_offsets(\n ragged_conversion_ops.from_tensor(input_tensor))\n elif input_tensor.shape.ndims == 0:\n (tokens, starts, ends) = self.tokenize_with_offsets(\n array_ops.stack([input_tensor]))\n tokens = tokens.values\n starts = starts.values\n ends = ends.values\n return (tokens, starts, ends)\n else:\n # Our rank 1 tensor is the correct shape, so we can process it as\n # normal.\n (output_values, output_splits, output_offset_starts,\n output_offset_ends) = (\n gen_sentencepiece_tokenizer\n .sentencepiece_tokenize_with_offsets_op(\n self._model_resource.resource_handle, input_tensor,\n self.nbest_size, self.alpha, self.add_bos, self.add_eos,\n self.reverse, self.out_type, return_nbest=self.return_nbest))\n tokens = RaggedTensor.from_nested_row_splits(\n flat_values=output_values,\n nested_row_splits=[output_splits],\n validate=False)\n starts = RaggedTensor.from_nested_row_splits(\n flat_values=output_offset_starts,\n nested_row_splits=[output_splits],\n validate=False)\n ends = RaggedTensor.from_nested_row_splits(\n flat_values=output_offset_ends,\n nested_row_splits=[output_splits],\n validate=False)\n return (tokens, starts, ends)\n\n def detokenize(self, input, name=None): # pylint: disable=redefined-builtin\n \"\"\"Detokenizes tokens into preprocessed text.\n\n This function accepts tokenized text, and reforms it back into\n sentences.\n\n Args:\n input: A `RaggedTensor` or `Tensor` of UTF-8 string tokens with a rank of\n at least 1.\n name: The name argument that is passed to the op function.\n\n Returns:\n A N-1 dimensional string Tensor or RaggedTensor of the detokenized text.\n \"\"\"\n with ops.name_scope(name, \"SentenceTokenizer\", [input, self]):\n input_tensor = ragged_tensor.convert_to_tensor_or_ragged_tensor(input)\n if input_tensor.shape.ndims is None:\n raise ValueError(\"Rank of input_tensor must be statically known.\")\n if input_tensor.shape.ndims == 0:\n raise ValueError(\"Rank of input_tensor must be at least 1.\")\n if ragged_tensor.is_ragged(input_tensor):\n if input_tensor.flat_values.shape.ndims > 1:\n # If the flat_values of our ragged tensor is multi-dimensional, we can\n # process it separately and our output will have the same nested\n # splits as our input.\n tokens = self.detokenize(input_tensor.flat_values)\n return input_tensor.with_flat_values(tokens)\n elif input_tensor.ragged_rank > 1:\n # Recursively process the values of the ragged tensor.\n tokens = self.detokenize(input_tensor.values)\n return input_tensor.with_values(tokens)\n else:\n return gen_sentencepiece_tokenizer.sentencepiece_detokenize_op(\n self._model_resource.resource_handle, input_tensor.flat_values,\n input_tensor.row_splits, self.add_bos, self.add_eos, self.reverse)\n else:\n if input_tensor.shape.ndims > 1:\n # Convert the input tensor to ragged and process it.\n return self.detokenize(\n ragged_conversion_ops.from_tensor(input_tensor))\n else:\n tokens = self.detokenize(array_ops.stack([input_tensor]))\n return array_ops.reshape(tokens, [])\n\n def vocab_size(self, name=None):\n \"\"\"Returns the vocabulary size.\n\n The number of tokens from within the Sentencepiece vocabulary provided at\n the time of initialization.\n\n Args:\n name: The name argument that is passed to the op function.\n\n Returns:\n A scalar representing the vocabulary size.\n \"\"\"\n with ops.name_scope(name, \"SentencepieceTokenizerVocabSize\", [self]):\n return gen_sentencepiece_tokenizer.sentencepiece_vocab_size_op(\n self._model_resource.resource_handle)\n\n def id_to_string(self, input, name=None): # pylint: disable=redefined-builtin\n \"\"\"Converts vocabulary id into a token.\n\n Args:\n input: An arbitrary tensor of int32 representing the token IDs.\n name: The name argument that is passed to the op function.\n\n Returns:\n A tensor of string with the same shape as input.\n \"\"\"\n with ops.name_scope(name, \"SentencepieceTokenizerIdToString\",\n [self, input]):\n input_tensor = ragged_tensor.convert_to_tensor_or_ragged_tensor(input)\n if input_tensor.shape.ndims is None:\n raise ValueError(\"Rank of input_tensor must be statically known.\")\n if input_tensor.shape.ndims == 0:\n strings = self.id_to_string(array_ops.stack([input_tensor]))\n return strings[0]\n if ragged_tensor.is_ragged(input_tensor):\n strings = self.id_to_string(input_tensor.flat_values)\n return input_tensor.with_flat_values(strings)\n if input_tensor.shape.ndims > 1:\n return array_ops.reshape(\n self.id_to_string(array_ops.reshape(input_tensor, [-1])),\n array_ops.shape(input_tensor))\n return gen_sentencepiece_tokenizer.sentencepiece_id_to_string_op(\n self._model_resource.resource_handle, input)\n\n def string_to_id(self, input, name=None): # pylint: disable=redefined-builtin\n \"\"\"Converts token into a vocabulary id.\n\n This function is particularly helpful for determining the IDs for any\n special tokens whose ID could not be determined through normal tokenization.\n\n Args:\n input: An arbitrary tensor of string tokens.\n name: The name argument that is passed to the op function.\n\n Returns:\n A tensor of int32 representing the IDs with the same shape as input.\n \"\"\"\n with ops.name_scope(name, \"SentencepieceTokenizerStringToId\",\n [self, input]):\n input_tensor = ragged_tensor.convert_to_tensor_or_ragged_tensor(input)\n if input_tensor.shape.ndims is None:\n raise ValueError(\"Rank of input_tensor must be statically known.\")\n if input_tensor.shape.ndims == 0:\n strings = self.string_to_id(array_ops.stack([input_tensor]))\n return strings[0]\n if ragged_tensor.is_ragged(input_tensor):\n strings = self.string_to_id(input_tensor.flat_values)\n return input_tensor.with_flat_values(strings)\n if input_tensor.shape.ndims > 1:\n return array_ops.reshape(\n self.string_to_id(array_ops.reshape(input_tensor, [-1])),\n array_ops.shape(input_tensor))\n return gen_sentencepiece_tokenizer.sentencepiece_string_to_id_op(\n self._model_resource.resource_handle, input)\n", "id": "1484722", "language": "Python", "matching_score": 4.606314182281494, "max_stars_count": 1, "path": "tensorflow_text/python/ops/sentencepiece_tokenizer.py" }, { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Abstract base classes for all tokenizers.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport abc\n\nfrom tensorflow.python.module import module\nfrom tensorflow_text.python.ops.splitter import Splitter\nfrom tensorflow_text.python.ops.splitter import SplitterWithOffsets\n\n\nclass Tokenizer(Splitter):\n \"\"\"Base class for tokenizer implementations.\n\n A Tokenizer is a `text.Splitter` that splits strings into *tokens*. Tokens\n generally correspond to short substrings of the source string. Tokens can be\n encoded using either strings or integer ids (where integer ids could be\n created by hashing strings or by looking them up in a fixed vocabulary table\n that maps strings to ids).\n\n Each Tokenizer subclass must implement a `tokenize` method, which splits each\n string in a Tensor into tokens. E.g.:\n\n >>> class SimpleTokenizer(tf_text.Tokenizer):\n ... def tokenize(self, input):\n ... return tf.strings.split(input)\n >>> print(SimpleTokenizer().tokenize([\"hello world\", \"this is a test\"]))\n <tf.RaggedTensor [[b'hello', b'world'], [b'this', b'is', b'a', b'test']]>\n\n By default, the `split` method simply delegates to `tokenize`.\n \"\"\"\n\n @abc.abstractmethod\n def tokenize(self, input): # pylint: disable=redefined-builtin\n \"\"\"Tokenizes the input tensor.\n\n Splits each string in the input tensor into a sequence of tokens. Tokens\n generally correspond to short substrings of the source string. Tokens can\n be encoded using either strings or integer ids.\n\n Example:\n\n >>> print(tf_text.WhitespaceTokenizer().tokenize(\"small medium large\"))\n tf.Tensor([b'small' b'medium' b'large'], shape=(3,), dtype=string)\n\n Args:\n input: An N-dimensional UTF-8 string (or optionally integer) `Tensor` or\n `RaggedTensor`.\n\n Returns:\n An N+1-dimensional UTF-8 string or integer `Tensor` or `RaggedTensor`.\n For each string from the input tensor, the final, extra dimension contains\n the tokens that string was split into.\n \"\"\"\n raise NotImplementedError(\"Abstract method\")\n\n def split(self, input): # pylint: disable=redefined-builtin\n \"\"\"Alias for `Tokenizer.tokenize`.\"\"\"\n return self.tokenize(input)\n\n\nclass TokenizerWithOffsets(Tokenizer, SplitterWithOffsets):\n r\"\"\"Base class for tokenizer implementations that return offsets.\n\n The offsets indicate which substring from the input string was used to\n generate each token. E.g., if `input` is a single string, then each token\n `token[i]` was generated from the substring `input[starts[i]:ends[i]]`.\n\n Each TokenizerWithOffsets subclass must implement the `tokenize_with_offsets`\n method, which returns a tuple containing both the pieces and the start and\n end offsets where those pieces occurred in the input string. I.e., if\n `tokens, starts, ends = tokenize_with_offsets(s)`, then each token `token[i]`\n corresponds with `tf.strings.substr(s, starts[i], ends[i] - starts[i])`.\n\n If the tokenizer encodes tokens as strings (and not token ids), then it will\n usually be the case that these corresponding strings are equal; but that is\n not technically required. For example, a tokenizer might choose to downcase\n strings\n\n Example:\n\n >>> class CharTokenizer(TokenizerWithOffsets):\n ... def tokenize_with_offsets(self, input):\n ... chars, starts = tf.strings.unicode_split_with_offsets(input, 'UTF-8')\n ... lengths = tf.expand_dims(tf.strings.length(input), -1)\n ... ends = tf.concat([starts[..., 1:], tf.cast(lengths, tf.int64)], -1)\n ... return chars, starts, ends\n ... def tokenize(self, input):\n ... return self.tokenize_with_offsets(input)[0]\n >>> pieces, starts, ends = CharTokenizer().split_with_offsets(\"a😊c\")\n >>> print(pieces.numpy(), starts.numpy(), ends.numpy())\n [b'a' b'\\xf0\\x9f\\x98\\x8a' b'c'] [0 1 5] [1 5 6]\n\n \"\"\"\n\n @abc.abstractmethod\n def tokenize_with_offsets(self, input): # pylint: disable=redefined-builtin\n \"\"\"Tokenizes the input tensor and returns the result with offsets.\n\n The offsets indicate which substring from the input string was used to\n generate each token. E.g., if `input` is a single string, then each token\n `token[i]` was generated from the substring `input[starts[i]:ends[i]]`.\n\n Example:\n\n >>> splitter = tf_text.WhitespaceTokenizer()\n >>> pieces, starts, ends = splitter.tokenize_with_offsets(\"a bb ccc\")\n >>> print(pieces.numpy(), starts.numpy(), ends.numpy())\n [b'a' b'bb' b'ccc'] [0 2 5] [1 4 8]\n >>> print(tf.strings.substr(\"a bb ccc\", starts, ends-starts))\n tf.Tensor([b'a' b'bb' b'ccc'], shape=(3,), dtype=string)\n\n Args:\n input: An N-dimensional UTF-8 string (or optionally integer) `Tensor` or\n `RaggedTensor`.\n\n Returns:\n A tuple `(tokens, start_offsets, end_offsets)` where:\n\n * `tokens` is an N+1-dimensional UTF-8 string or integer `Tensor` or\n `RaggedTensor`.\n * `start_offsets` is an N+1-dimensional integer `Tensor` or\n `RaggedTensor` containing the starting indices of each token (byte\n indices for input strings).\n * `end_offsets` is an N+1-dimensional integer `Tensor` or\n `RaggedTensor` containing the exclusive ending indices of each token\n (byte indices for input strings).\n \"\"\"\n raise NotImplementedError(\"Abstract method\")\n\n def split_with_offsets(self, input): # pylint: disable=redefined-builtin\n \"\"\"Alias for `TokenizerWithOffsets.tokenize_with_offsets`.\"\"\"\n return self.tokenize_with_offsets(input)\n\n\nclass Detokenizer(module.Module):\n \"\"\"Base class for detokenizer implementations.\n\n A Detokenizer is a module that combines tokens to form strings. Generally,\n subclasses of `Detokenizer` will also be subclasses of `Tokenizer`; and the\n `detokenize` method will be the inverse of the `tokenize` method. I.e.,\n `tokenizer.detokenize(tokenizer.tokenize(s)) == s`.\n\n Each Detokenizer subclass must implement a `detokenize` method, which combines\n tokens together to form strings. E.g.:\n\n >>> class SimpleDetokenizer(tf_text.Detokenizer):\n ... def detokenize(self, input):\n ... return tf.strings.reduce_join(input, axis=-1, separator=\" \")\n >>> text = tf.ragged.constant([[\"hello\", \"world\"], [\"a\", \"b\", \"c\"]])\n >>> print(SimpleDetokenizer().detokenize(text))\n tf.Tensor([b'hello world' b'a b c'], shape=(2,), dtype=string)\n \"\"\"\n\n __metaclass__ = abc.ABCMeta\n\n @abc.abstractmethod\n def detokenize(self, input): # pylint: disable=redefined-builtin\n \"\"\"Assembles the tokens in the input tensor into a string.\n\n Generally, `detokenize` is the inverse of the `tokenize` method, and can\n be used to reconstrct a string from a set of tokens. This is especially\n helpful in cases where the tokens are integer ids, such as indexes into a\n vocabulary table -- in that case, the tokenized encoding is not very\n human-readable (since it's just a list of integers), so the `detokenize`\n method can be used to turn it back into something that's more readable.\n\n Args:\n input: An N-dimensional UTF-8 string or integer `Tensor` or\n `RaggedTensor`.\n\n Returns:\n An (N-1)-dimensional UTF-8 string `Tensor` or `RaggedTensor`.\n \"\"\"\n raise NotImplementedError(\"Abstract method\")\n", "id": "8441942", "language": "Python", "matching_score": 2.6249194145202637, "max_stars_count": 0, "path": "tensorflow_text/python/ops/tokenization.py" }, { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Various TensorFlow ops related to text-processing.\"\"\"\n\n# pylint: disable=g-import-not-at-top,g-statement-before-imports\ntry:\n from tensorflow.python.ops.ragged import ragged_ops as _ragged_ops\nexcept ImportError:\n pass\nfrom tensorflow_text.core.pybinds.pywrap_fast_wordpiece_tokenizer_model_builder import build_fast_wordpiece_model\nfrom tensorflow_text.python.ops.bert_tokenizer import BertTokenizer\nfrom tensorflow_text.python.ops.create_feature_bitmask_op import create_feature_bitmask\nfrom tensorflow_text.python.ops.fast_wordpiece_tokenizer import FastWordpieceTokenizer\nfrom tensorflow_text.python.ops.greedy_constrained_sequence_op import greedy_constrained_sequence\nfrom tensorflow_text.python.ops.hub_module_splitter import HubModuleSplitter\nfrom tensorflow_text.python.ops.hub_module_tokenizer import HubModuleTokenizer\nfrom tensorflow_text.python.ops.item_selector_ops import FirstNItemSelector\nfrom tensorflow_text.python.ops.item_selector_ops import RandomItemSelector\nfrom tensorflow_text.python.ops.masking_ops import mask_language_model\nfrom tensorflow_text.python.ops.masking_ops import MaskValuesChooser\nfrom tensorflow_text.python.ops.mst_ops import max_spanning_tree\nfrom tensorflow_text.python.ops.mst_ops import max_spanning_tree_gradient\nfrom tensorflow_text.python.ops.ngrams_op import ngrams\nfrom tensorflow_text.python.ops.ngrams_op import Reduction\nfrom tensorflow_text.python.ops.normalize_ops import case_fold_utf8\nfrom tensorflow_text.python.ops.normalize_ops import find_source_offsets\nfrom tensorflow_text.python.ops.normalize_ops import normalize_utf8\nfrom tensorflow_text.python.ops.normalize_ops import normalize_utf8_with_offsets_map\nfrom tensorflow_text.python.ops.pad_along_dimension_op import pad_along_dimension\nfrom tensorflow_text.python.ops.pad_model_inputs_ops import pad_model_inputs\nfrom tensorflow_text.python.ops.pointer_ops import gather_with_default\nfrom tensorflow_text.python.ops.pointer_ops import span_alignment\nfrom tensorflow_text.python.ops.pointer_ops import span_overlaps\nfrom tensorflow_text.python.ops.regex_split_ops import regex_split\nfrom tensorflow_text.python.ops.regex_split_ops import regex_split_with_offsets\nfrom tensorflow_text.python.ops.regex_split_ops import RegexSplitter\nfrom tensorflow_text.python.ops.segment_combiner_ops import combine_segments\nfrom tensorflow_text.python.ops.sentence_breaking_ops import sentence_fragments\nfrom tensorflow_text.python.ops.sentencepiece_tokenizer import SentencepieceTokenizer\nfrom tensorflow_text.python.ops.sliding_window_op import sliding_window\nfrom tensorflow_text.python.ops.split_merge_from_logits_tokenizer import SplitMergeFromLogitsTokenizer\nfrom tensorflow_text.python.ops.split_merge_tokenizer import SplitMergeTokenizer\nfrom tensorflow_text.python.ops.splitter import Splitter\nfrom tensorflow_text.python.ops.splitter import SplitterWithOffsets\nfrom tensorflow_text.python.ops.state_based_sentence_breaker_op import StateBasedSentenceBreaker\nfrom tensorflow_text.python.ops.string_ops import coerce_to_structurally_valid_utf8\nfrom tensorflow_text.python.ops.tokenization import Detokenizer\nfrom tensorflow_text.python.ops.tokenization import Tokenizer\nfrom tensorflow_text.python.ops.tokenization import TokenizerWithOffsets\nfrom tensorflow_text.python.ops.trimmer_ops import RoundRobinTrimmer\nfrom tensorflow_text.python.ops.trimmer_ops import ShrinkLongestTrimmer\nfrom tensorflow_text.python.ops.trimmer_ops import WaterfallTrimmer\nfrom tensorflow_text.python.ops.unicode_char_tokenizer import UnicodeCharTokenizer\nfrom tensorflow_text.python.ops.unicode_script_tokenizer import UnicodeScriptTokenizer\nfrom tensorflow_text.python.ops.viterbi_constrained_sequence_op import viterbi_constrained_sequence\nfrom tensorflow_text.python.ops.whitespace_tokenizer import WhitespaceTokenizer\nfrom tensorflow_text.python.ops.wordpiece_tokenizer import WordpieceTokenizer\nfrom tensorflow_text.python.ops.wordshape_ops import WordShape\nfrom tensorflow_text.python.ops.wordshape_ops import wordshape\n", "id": "322180", "language": "Python", "matching_score": 2.320866823196411, "max_stars_count": 0, "path": "tensorflow_text/python/ops/__init__.py" }, { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Text shape ops.\n\nA variety of useful regex helper functions using the RE2 library\n(string_ops.regex_full_match) for matching various relevant patterns within\ninput text.\n\nNaming convention:\n is_$PROPERTY: the entire string is composed of $PROPERTY\n has_$PROPERTY: the string contains at least one $PROPERTY.\n has_no_$PROPERTY: the string does not contain any $PROPERTY.\n begins_with_$PROPERTY: the string begins with $PROPERTY characters.\n ends_with_$PROPERTY: the string ends with $PROPERTY characters.\n\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport re\nimport enum\n\nfrom tensorflow.python.framework import ops\nfrom tensorflow.python.ops import array_ops\nfrom tensorflow.python.ops import string_ops\n\n#===============================================================================\n# Implementation: Regular Expressions for WordShapes\n#===============================================================================\n\n\ndef _emoticon_regex():\n \"\"\"Regexp to detect emoticons.\"\"\"\n emoticons = [\n \":-)\", \":)\", \":o)\", \":]\", \":3\", \":>\", \"=]\", \"=)\", \":}\", \":^)\", \":-D\",\n \":-))\", \":-)))\", \":-))))\", \":-)))))\", \">:[\", \":-(\", \":(\", \":-c\", \":c\",\n \":-<\", \":<\", \":-[\", \":[\", \":{\", \";(\", \":-||\", \":@\", \">:(\", \":'-(\", \":'(\",\n \":'-)\", \":')\", \"D:<\", \">:O\", \":-O\", \":-o\", \":*\", \":-*\", \":^*\", \";-)\",\n \";)\", \"*-)\", \"*)\", \";-]\", \";]\", \";^)\", \":-,\", \">:P\", \":-P\", \":p\", \"=p\",\n \":-p\", \"=p\", \":P\", \"=P\", \";p\", \";-p\", \";P\", \";-P\"\n \">:\\\\\", \">:/\", \":-/\", \":-.\", \":/\", \":\\\\\", \"=/\", \"=\\\\\", \":|\", \":-|\", \":$\",\n \":-#\", \":#\", \"O:-)\", \"0:-)\", \"0:)\", \"0;^)\", \">:)\", \">;)\", \">:-)\", \"}:-)\",\n \"}:)\", \"3:-)\", \">_>^\", \"^<_<\", \"|;-)\", \"|-O\", \":-J\", \":-&\", \":&\", \"#-)\",\n \"%-)\", \"%)\", \"<:-|\", \"~:-\\\\\", \"*<|:-)\", \"=:o]\", \",:-)\", \"7:^]\", \"</3\",\n \"<3\", \"8-)\", \"^_^\", \":D\", \":-D\", \"=D\", \"^_^;;\", \"O=)\", \"}=)\", \"B)\", \"B-)\",\n \"=|\", \"-_-\", \"o_o;\", \"u_u\", \":-\\\\\", \":s\", \":S\", \":-s\", \":-S\", \";*\", \";-*\"\n \":(\", \"=(\", \">.<\", \">:-(\", \">:(\", \">=(\", \";_;\", \"T_T\", \"='(\", \">_<\", \"D:\",\n \":o\", \":-o\", \"=o\", \"o.o\", \":O\", \":-O\", \"=O\", \"O.O\", \"x_x\", \"X-(\", \"X(\",\n \"X-o\", \"X-O\", \":X)\", \"(=^.^=)\", \"(=^..^=)\", \"=^_^=\", \"-<@%\", \":(|)\",\n \":(:)\", \"(]:{\", \"<\\\\3\", \"~@~\", \"8'(\", \"XD\", \"DX\"\n ]\n # Note: unicode-containing emojis are added manually-escaped here.\n return \"|\".join(map(re.escape, emoticons)) + \"|\".join(\n [u\"\\\\:\\u3063\\\\)\", u\"\\\\:\\u3063C\", u\"\\u0ca0\\\\_\\u0ca0\"])\n\n\ndef _emoji_regex():\n \"\"\"Returns regexp to detect emoji characters.\n\n Generated from https://unicode.org/emoji/charts/full-emoji-list.html,\n https://unicode.org/Public/emoji/13.0/emoji-sequences.txt.\n \"\"\"\n char_class = \"\".join([\n \"[\",\n u\"\\u203c\", u\"\\u2049\", u\"\\u2139\",\n u\"\\u2194\", \"-\", u\"\\u2199\",\n u\"\\u21a9\", u\"\\u21aa\",\n u\"\\u231a\", u\"\\u231b\",\n u\"\\u2328\", u\"\\u23cf\",\n u\"\\u23e9\", \"-\", u\"\\u23f3\",\n u\"\\u23f8\", \"-\", u\"\\u23fa\",\n u\"\\u24c2\", u\"\\u25aa\", u\"\\u25ab\"\n u\"\\u25b6\", u\"\\u25c0\",\n u\"\\u25fb\", \"-\", u\"\\u25fe\",\n u\"\\u2600\", \"-\", u\"\\u26ff\",\n u\"\\u2702\", u\"\\u2705\"\n u\"\\u2708\", \"-\", u\"\\u270d\", u\"\\u270f\",\n u\"\\u2712\", u\"\\u2714\", u\"\\u2716\", u\"\\u271d\",\n u\"\\u2721\", u\"\\u2728\", u\"\\u2733\", u\"\\u2734\",\n u\"\\u2744\", u\"\\u2747\", u\"\\u274c\", u\"\\u274e\",\n u\"\\u2753\", \"-\", u\"\\u2755\", u\"\\u2757\",\n u\"\\u2763\", u\"\\u2764\",\n u\"\\u2795\", \"-\", u\"\\u2797\",\n u\"\\u2934\", u\"\\u2935\",\n u\"\\u2b05\", \"-\", u\"\\u2b07\",\n u\"\\u2b1b\", u\"\\u2b1c\", u\"\\u2b50\", u\"\\u2b55\",\n u\"\\u3030\", u\"\\u303d\", u\"\\u3297\", u\"\\u3299\",\n u\"\\U0001f004\", u\"\\U0001f0cf\",\n u\"\\U0001f170\", u\"\\U0001f171\", u\"\\U0001f17e\", u\"\\U0001f17f\",\n u\"\\U0001f18e\",\n u\"\\U0001f191\", \"-\", u\"\\U0001f19a\",\n u\"\\U0001f1e6\", \"-\", u\"\\U0001f1ff\",\n u\"\\U0001f201\", u\"\\U0001f202\",\n u\"\\U0001f21a\", u\"\\U0001f22f\",\n u\"\\U0001f232\", \"-\", u\"\\U0001f23a\",\n u\"\\U0001f250\", u\"\\U0001f251\",\n u\"\\U0001f300\", \"-\", u\"\\U0001f6ff\",\n u\"\\U0001f900\", \"-\", u\"\\U0001f9ff\",\n u\"\\U0001fa70\", \"-\", u\"\\U0001fa74\",\n u\"\\U0001fa78\", \"-\", u\"\\U0001fa7a\",\n u\"\\U0001fa80\", \"-\", u\"\\U0001fa86\",\n u\"\\U0001fa90\", \"-\", u\"\\U0001faa8\",\n u\"\\U0001fab0\", \"-\", u\"\\U0001fab6\",\n u\"\\U0001fac0\", \"-\", u\"\\U0001fac2\",\n u\"\\U0001fad0\", \"-\", u\"\\U0001fad6\",\n \"]\"\n ]) # pyformat:disable\n return \".*\" + char_class + \".*\"\n\n\ndef _begins_with_open_quote_regex():\n # Note: RE2 syntax doesn't support char class intersection.\n char_class = \"\".join([\n \"\\\"\", \"'\", \"`\", u\"\\uff07\", u\"\\uff02\", u\"\\u2018\", u\"\\u201a\", u\"\\u201b\",\n u\"\\u201c\", u\"\\u00ab\", u\"\\u201e\", u\"\\u201f\" + u\"\\u2039\", u\"\\u300c\",\n u\"\\u300e\", u\"\\u301d\", u\"\\u2e42\" + u\"\\uff62\", u\"\\ufe41\", u\"\\ufe43\"\n ])\n return \"``.*|[\" + char_class + \"][^\" + char_class + \"]*\"\n\n\ndef _ends_with_close_quote_regex():\n char_class = \"\".join([\n \"\\\"\", \"'\", \"`\", u\"\\uff07\", u\"\\uff02\", u\"\\u00bb\", u\"\\u2019\", u\"\\u201d\",\n u\"\\u203a\", u\"\\u300d\", u\"\\u300f\", u\"\\u301e\" + u\"\\u301f\", u\"\\ufe42\",\n u\"\\ufe44\", u\"\\uff63\"\n ])\n\n return \".*''|[^\" + char_class + \"]*[\" + char_class + \"]\"\n\n\nclass WordShape(enum.Enum):\n \"\"\"Values for the 'pattern' arg of the wordshape op.\n\n The supported wordshape identifiers are:\n %(identifier_list)s\n \"\"\"\n HAS_PUNCTUATION_DASH = r\".*\\p{Pd}+.*\"\n HAS_NO_DIGITS = r\"\\P{Nd}*\"\n HAS_SOME_DIGITS = r\".*\\P{Nd}\\p{Nd}.*|.*\\p{Nd}\\P{Nd}.*\"\n HAS_ONLY_DIGITS = r\"\\p{Nd}+\"\n IS_NUMERIC_VALUE = r\"([+-]?((\\p{Nd}+\\.?\\p{Nd}*)|(\\.\\p{Nd}+)))([eE]-?\\p{Nd}+)?\"\n # IS_WHITESPACE = r\"\\p{Whitespace}+\"\n HAS_NO_PUNCT_OR_SYMBOL = r\"[^\\p{P}\\p{S}]*\"\n HAS_SOME_PUNCT_OR_SYMBOL = r\".*[^\\p{P}\\p{S}][\\p{P}\\p{S}].*|.*[\\p{P}\\p{S}][^\\p{P}\\p{S}].*\" # pylint: disable=line-too-long\n IS_PUNCT_OR_SYMBOL = r\"[\\p{P}|\\p{S}]+\"\n BEGINS_WITH_PUNCT_OR_SYMBOL = r\"[\\p{P}\\p{S}].*\"\n ENDS_WITH_PUNCT_OR_SYMBOL = r\".*[\\p{P}\\p{S}]\"\n # ENDS_WITH_SENTENCE_TERMINAL = r\".*[\\p{Sentence_Terminal}]\"\n # ENDS_WITH_MULTIPLE_SENTENCE_TERMINAL = r\".*[\\p{Sentence_Terminal}]{2}\"\n # ENDS_WITH_TERMINAL_PUNCT = r\".*[\\p{Terminal_Punctuation}]\"\n # ENDS_WITH_MULTIPLE_TERMINAL_PUNCT = r\".*[\\p{Terminal_Punctuation}]{2}\"\n ENDS_WITH_ELLIPSIS = r\".*(\\.{3}|[\" + u\"\\u2026\" + u\"\\u22ef\" + \"])\"\n IS_EMOTICON = _emoticon_regex()\n ENDS_WITH_EMOTICON = r\".*(\" + _emoticon_regex() + r\")$\"\n HAS_EMOJI = r\".*(\" + _emoji_regex() + r\")$\"\n IS_ACRONYM_WITH_PERIODS = r\"(\\p{Lu}\\.)+\"\n IS_UPPERCASE = r\"\\p{Lu}+\"\n IS_LOWERCASE = r\"\\p{Ll}+\"\n HAS_MIXED_CASE = r\".*\\p{Lu}.*\\p{Ll}.*|.*\\p{Ll}.*\\p{Lu}.*\"\n IS_MIXED_CASE_LETTERS = r\"\\p{L}*\\p{Lu}\\p{L}*\\p{Ll}\\p{L}*|\\p{L}*\\p{Ll}\\p{L}*\\p{Lu}\\p{L}*\" # pylint: disable=line-too-long\n # Is a single capital letter alone a title case?\n HAS_TITLE_CASE = r\"\\P{L}*[\\p{Lu}\\p{Lt}]\\p{Ll}+.*\"\n # HAS_NO_QUOTES = \"[^\\\"'`\\\\p{Quotation_Mark}]*\"\n BEGINS_WITH_OPEN_QUOTE = _begins_with_open_quote_regex()\n ENDS_WITH_CLOSE_QUOTE = _ends_with_close_quote_regex()\n # HAS_QUOTE = r\"^[`\\p{Quotation_Mark}].*|.*[`\\p{Quotation_Mark}]$\"\n HAS_MATH_SYMBOL = r\".*\\p{Sm}.*\"\n HAS_CURRENCY_SYMBOL = r\".*\\p{Sc}.*\"\n HAS_NON_LETTER = r\".*\\P{L}.*\"\n\n\n# Note that the entries in _wordshape_doc must be indented 10 spaces to display\n# correctly in the docstring.\n_wordshape_doc = {\n WordShape.HAS_PUNCTUATION_DASH:\n \"\"\"\n The input contains at least one unicode dash character.\n\n Note that this uses the Pd (Dash) unicode property. This property will\n not match to soft-hyphens and katakana middle dot characters.\n \"\"\",\n WordShape.HAS_NO_DIGITS:\n \"\"\"\n The input contains no digit characters.\n \"\"\",\n WordShape.HAS_SOME_DIGITS:\n \"\"\"\n The input contains a mix of digit characters and non-digit\n characters.\n \"\"\",\n WordShape.HAS_ONLY_DIGITS:\n \"\"\"\n The input consists entirely of unicode digit characters.\n \"\"\",\n WordShape.IS_NUMERIC_VALUE:\n \"\"\"\n The input is parseable as a numeric value. This will match a\n fairly broad set of floating point and integer representations (but\n not Nan or Inf).\n \"\"\",\n # IS_WHITESPACE docs\n WordShape.HAS_NO_PUNCT_OR_SYMBOL:\n \"\"\"\n The input contains no unicode punctuation or symbol characters.\n \"\"\",\n WordShape.HAS_SOME_PUNCT_OR_SYMBOL:\n \"\"\"\n The input contains a mix of punctuation or symbol characters,\n and non-punctuation non-symbol characters.\n \"\"\",\n WordShape.IS_PUNCT_OR_SYMBOL:\n \"\"\"\n The input contains only punctuation and symbol characters.\n \"\"\",\n WordShape.BEGINS_WITH_PUNCT_OR_SYMBOL:\n \"\"\"\n The input starts with a punctuation or symbol character.\n \"\"\",\n WordShape.ENDS_WITH_PUNCT_OR_SYMBOL:\n \"\"\"\n The input ends with a punctuation or symbol character.\n \"\"\",\n # ENDS_WITH_SENTENCE_TERMINAL docs\n # ENDS_WITH_MULTIPLE_SENTENCE_TERMINAL docs\n # ENDS_WITH_TERMINAL_PUNCT docs\n # ENDS_WITH_MULTIPLE_TERMINAL_PUNCT docs\n WordShape.ENDS_WITH_ELLIPSIS:\n \"\"\"\n The input ends with an ellipsis (i.e. with three or more\n periods or a unicode ellipsis character).\"\"\",\n WordShape.IS_EMOTICON:\n \"\"\"\n The input is a single emoticon.\n \"\"\",\n WordShape.ENDS_WITH_EMOTICON:\n \"\"\"\n The input ends with an emoticon.\n \"\"\",\n WordShape.HAS_EMOJI:\n \"\"\"\n The input contains an emoji character.\n\n See http://www.unicode.org/Public/emoji/1.0//emoji-data.txt.\n Emojis are in unicode ranges `2600-26FF`, `1F300-1F6FF`, and\n `1F900-1F9FF`.\n \"\"\",\n WordShape.IS_ACRONYM_WITH_PERIODS:\n \"\"\"\n The input is a period-separated acronym.\n This matches for strings of the form \"I.B.M.\" but not \"IBM\".\n \"\"\",\n WordShape.IS_UPPERCASE:\n \"\"\"\n The input contains only uppercase letterforms.\n \"\"\",\n WordShape.IS_LOWERCASE:\n \"\"\"\n The input contains only lowercase letterforms.\n \"\"\",\n WordShape.HAS_MIXED_CASE:\n \"\"\"\n The input contains both uppercase and lowercase letterforms.\n \"\"\",\n WordShape.IS_MIXED_CASE_LETTERS:\n \"\"\"\n The input contains only uppercase and lowercase letterforms.\n \"\"\",\n WordShape.HAS_TITLE_CASE:\n \"\"\"\n The input has title case (i.e. the first character is upper or title\n case, and the remaining characters are lowercase).\n \"\"\",\n # HAS_NO_QUOTES docs\n WordShape.BEGINS_WITH_OPEN_QUOTE:\n r\"\"\"\n The input begins with an open quote.\n\n The following strings are considered open quotes:\n\n ```\n \" QUOTATION MARK\n ' APOSTROPHE\n ` GRAVE ACCENT\n `` Pair of GRAVE ACCENTs\n \\uFF02 FULLWIDTH QUOTATION MARK\n \\uFF07 FULLWIDTH APOSTROPHE\n \\u00AB LEFT-POINTING DOUBLE ANGLE QUOTATION MARK\n \\u2018 LEFT SINGLE QUOTATION MARK\n \\u201A SINGLE LOW-9 QUOTATION MARK\n \\u201B SINGLE HIGH-REVERSED-9 QUOTATION MARK\n \\u201C LEFT DOUBLE QUOTATION MARK\n \\u201E DOUBLE LOW-9 QUOTATION MARK\n \\u201F DOUBLE HIGH-REVERSED-9 QUOTATION MARK\n \\u2039 SINGLE LEFT-POINTING ANGLE QUOTATION MARK\n \\u300C LEFT CORNER BRACKET\n \\u300E LEFT WHITE CORNER BRACKET\n \\u301D REVERSED DOUBLE PRIME QUOTATION MARK\n \\u2E42 DOUBLE LOW-REVERSED-9 QUOTATION MARK\n \\uFF62 HALFWIDTH LEFT CORNER BRACKET\n \\uFE41 PRESENTATION FORM FOR VERTICAL LEFT CORNER BRACKET\n \\uFE43 PRESENTATION FORM FOR VERTICAL LEFT WHITE CORNER BRACKET\n ```\n\n Note: U+B4 (acute accent) not included.\n \"\"\",\n WordShape.ENDS_WITH_CLOSE_QUOTE:\n r\"\"\"\n The input ends witha closing quote character.\n\n The following strings are considered close quotes:\n\n ```\n \" QUOTATION MARK\n ' APOSTROPHE\n ` GRAVE ACCENT\n '' Pair of APOSTROPHEs\n \\uFF02 FULLWIDTH QUOTATION MARK\n \\uFF07 FULLWIDTH APOSTROPHE\n \\u00BB RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK\n \\u2019 RIGHT SINGLE QUOTATION MARK\n \\u201D RIGHT DOUBLE QUOTATION MARK\n \\u203A SINGLE RIGHT-POINTING ANGLE QUOTATION MARK\n \\u300D RIGHT CORNER BRACKET\n \\u300F RIGHT WHITE CORNER BRACKET\n \\u301E DOUBLE PRIME QUOTATION MARK\n \\u301F LOW DOUBLE PRIME QUOTATION MARK\n \\uFE42 PRESENTATION FORM FOR VERTICAL RIGHT CORNER BRACKET\n \\uFE44 PRESENTATION FORM FOR VERTICAL RIGHT WHITE CORNER BRACKET\n \\uFF63 HALFWIDTH RIGHT CORNER BRACKET\n ```\n\n Note: U+B4 (ACUTE ACCENT) is not included.\n \"\"\",\n # HAS_QUOTE docs\n WordShape.HAS_MATH_SYMBOL:\n \"\"\"\n The input contains a mathematical symbol.\n \"\"\",\n WordShape.HAS_CURRENCY_SYMBOL:\n \"\"\"\n The input contains a currency symbol.\n \"\"\",\n WordShape.HAS_NON_LETTER:\n \"\"\"\n The input contains a non-letter character.\n \"\"\",\n}\n\n\ndef _add_identifier_list_to_docstring(func):\n items = [(\"WordShape.\" + ws.name, doc) for ws, doc in _wordshape_doc.items()]\n identifier_list = \"\".join(\n \"\\n * `%s`:%s\\n\" % (name, doc) for (name, doc) in sorted(items))\n func.__doc__ = func.__doc__ % dict(identifier_list=identifier_list)\n\n\n# Use the wordshape docstring we created above.\n_add_identifier_list_to_docstring(WordShape)\n\n\ndef wordshape(input_tensor, pattern, name=None):\n r\"\"\"Determine wordshape features for each input string.\n\n In this example, we test for title case (the first character is upper or\n title case, and the remaining characters are lowercase).\n >>> input = [\n ... u\"abc\", u\"ABc\", u\"ABC\", u\"Abc\", u\"aBcd\", u\"\\u01c8bc\".encode(\"utf-8\")\n ... ]\n >>> wordshape(input, WordShape.HAS_TITLE_CASE)\n <tf.Tensor: shape=(6,), dtype=bool,\n numpy=array([False, False, False, True, False, True])>\n\n Args:\n input_tensor: string `Tensor` with any shape.\n pattern: A `tftext.WordShape` or a list of WordShapes.\n name: A name for the operation (optional).\n\n Returns:\n `<bool>[input_tensor.shape + pattern.shape]`: A tensor where\n `result[i1...iN, j]` is true if `input_tensor[i1...iN]` has the wordshape\n specified by `pattern[j]`.\n\n Raises:\n ValueError: If `pattern` contains an unknown identifier.\n \"\"\"\n if isinstance(pattern, WordShape):\n return string_ops.regex_full_match(input_tensor, pattern.value, name)\n elif (isinstance(pattern, (list, tuple)) and\n all(isinstance(s, WordShape) for s in pattern)):\n with ops.name_scope(name, \"Wordshape\", input_tensor):\n return array_ops.stack([wordshape(input_tensor, s) for s in pattern],\n axis=-1)\n else:\n raise TypeError(\n \"Expected 'pattern' to be a single WordShape or a list of WordShapes.\")\n", "id": "8297859", "language": "Python", "matching_score": 1.1989011764526367, "max_stars_count": 0, "path": "tensorflow_text/python/ops/wordshape_ops.py" }, { "content": "\"\"\"Build rule generator for locally installed CUDA toolkit and cuDNN SDK.\"\"\"\n\ndef _get_env_var(repository_ctx, name, default):\n if name in repository_ctx.os.environ:\n return repository_ctx.os.environ[name]\n return default\n\ndef _impl(repository_ctx):\n cuda_path = _get_env_var(repository_ctx, \"CUDA_PATH\", \"/usr/local/cuda\")\n cudnn_path = _get_env_var(repository_ctx, \"CUDNN_PATH\", cuda_path)\n\n print(\"Using CUDA from %s\\n\" % cuda_path)\n print(\"Using cuDNN from %s\\n\" % cudnn_path)\n\n repository_ctx.symlink(cuda_path, \"cuda\")\n repository_ctx.symlink(cudnn_path, \"cudnn\")\n\n repository_ctx.file(\"nvcc.sh\", \"\"\"\n#! /bin/bash\nrepo_path=%s\ncompiler=${CC:+\"--compiler-bindir=$CC\"}\n$repo_path/cuda/bin/nvcc $compiler --compiler-options=-fPIC --include-path=$repo_path $*\n\"\"\" % repository_ctx.path(\".\"))\n\n repository_ctx.file(\"BUILD\", \"\"\"\npackage(default_visibility = [\"//visibility:public\"])\n\nsh_binary(\n name = \"nvcc\",\n srcs = [\"nvcc.sh\"],\n)\n\n# The *_headers cc_library rules below aren't cc_inc_library rules because\n# dependent targets would only see the first one.\n\ncc_library(\n name = \"cuda_headers\",\n hdrs = glob(\n include = [\"cuda/include/**/*.h*\"],\n exclude = [\"cuda/include/cudnn.h\"]\n ),\n # Allows including CUDA headers with angle brackets.\n includes = [\"cuda/include\"],\n)\n\ncc_library(\n name = \"cuda\",\n srcs = [\"cuda/lib64/stubs/libcuda.so\"],\n linkopts = [\"-ldl\"],\n)\n\ncc_library(\n name = \"cuda_runtime\",\n srcs = [\"cuda/lib64/libcudart_static.a\"],\n deps = [\":cuda\"],\n linkopts = [\"-lrt\"],\n)\n\ncc_library(\n name = \"curand_static\",\n srcs = [\n \"cuda/lib64/libcurand_static.a\",\n \"cuda/lib64/libculibos.a\",\n ],\n)\n\ncc_library(\n name = \"cupti_headers\",\n hdrs = glob([\"cuda/extras/CUPTI/include/**/*.h\"]),\n # Allows including CUPTI headers with angle brackets.\n includes = [\"cuda/extras/CUPTI/include\"],\n)\n\ncc_library(\n name = \"cupti\",\n srcs = glob([\"cuda/extras/CUPTI/lib64/libcupti.so*\"]),\n)\n\ncc_library(\n name = \"cudnn\",\n srcs = [\n \"cudnn/lib64/libcudnn_static.a\",\n \"cuda/lib64/libcublas_static.a\",\n \"cuda/lib64/libculibos.a\",\n ],\n hdrs = [\"cudnn/include/cudnn.h\"],\n deps = [\n \":cuda\",\n \":cuda_headers\"\n ],\n)\n\ncc_library(\n name = \"cuda_util\",\n deps = [\":cuda_util_compile\"],\n)\n\"\"\")\n\ncuda_configure = repository_rule(\n implementation = _impl,\n environ = [\"CUDA_PATH\", \"CUDNN_PATH\"],\n)\n", "id": "11461473", "language": "Python", "matching_score": 0.17705808579921722, "max_stars_count": 0, "path": "cuda_configure.bzl" }, { "content": "# Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Module for RNN Cells.\"\"\"\n\nimport tensorflow as tf\nimport tensorflow.keras as keras\nfrom typeguard import typechecked\n\nfrom tensorflow_addons.utils.types import (\n Activation,\n FloatTensorLike,\n TensorLike,\n Initializer,\n Constraint,\n Regularizer,\n)\nfrom typing import Optional\n\n\n@tf.keras.utils.register_keras_serializable(package=\"Addons\")\nclass NASCell(keras.layers.AbstractRNNCell):\n \"\"\"Neural Architecture Search (NAS) recurrent network cell.\n\n This implements the recurrent cell from the paper:\n\n https://arxiv.org/abs/1611.01578\n\n <NAME> and <NAME>.\n \"Neural Architecture Search with Reinforcement Learning\" Proc. ICLR 2017.\n\n The class uses an optional projection layer.\n \"\"\"\n\n # NAS cell's architecture base.\n _NAS_BASE = 8\n\n @typechecked\n def __init__(\n self,\n units: TensorLike,\n projection: Optional[FloatTensorLike] = None,\n use_bias: bool = False,\n kernel_initializer: Initializer = \"glorot_uniform\",\n recurrent_initializer: Initializer = \"glorot_uniform\",\n projection_initializer: Initializer = \"glorot_uniform\",\n bias_initializer: Initializer = \"zeros\",\n **kwargs\n ):\n \"\"\"Initialize the parameters for a NAS cell.\n\n Args:\n units: int, The number of units in the NAS cell.\n projection: (optional) int, The output dimensionality for the\n projection matrices. If None, no projection is performed.\n use_bias: (optional) bool, If True then use biases within the cell.\n This is False by default.\n kernel_initializer: Initializer for kernel weight.\n recurrent_initializer: Initializer for recurrent kernel weight.\n projection_initializer: Initializer for projection weight, used when\n projection is not None.\n bias_initializer: Initializer for bias, used when use_bias is True.\n **kwargs: Additional keyword arguments.\n \"\"\"\n super().__init__(**kwargs)\n self.units = units\n self.projection = projection\n self.use_bias = use_bias\n self.kernel_initializer = kernel_initializer\n self.recurrent_initializer = recurrent_initializer\n self.projection_initializer = projection_initializer\n self.bias_initializer = bias_initializer\n\n if projection is not None:\n self._state_size = [units, projection]\n self._output_size = projection\n else:\n self._state_size = [units, units]\n self._output_size = units\n\n @property\n def state_size(self):\n return self._state_size\n\n @property\n def output_size(self):\n return self._output_size\n\n def build(self, inputs_shape):\n input_size = tf.compat.dimension_value(\n tf.TensorShape(inputs_shape).with_rank(2)[1]\n )\n if input_size is None:\n raise ValueError(\"Could not infer input size from inputs.get_shape()[-1]\")\n\n # Variables for the NAS cell. `recurrent_kernel` is all matrices\n # multiplying the hidden state and `kernel` is all matrices multiplying\n # the inputs.\n self.recurrent_kernel = self.add_weight(\n name=\"recurrent_kernel\",\n shape=[self.output_size, self._NAS_BASE * self.units],\n initializer=self.recurrent_initializer,\n )\n self.kernel = self.add_weight(\n name=\"kernel\",\n shape=[input_size, self._NAS_BASE * self.units],\n initializer=self.kernel_initializer,\n )\n\n if self.use_bias:\n self.bias = self.add_weight(\n name=\"bias\",\n shape=[self._NAS_BASE * self.units],\n initializer=self.bias_initializer,\n )\n # Projection layer if specified\n if self.projection is not None:\n self.projection_weights = self.add_weight(\n name=\"projection_weights\",\n shape=[self.units, self.projection],\n initializer=self.projection_initializer,\n )\n\n self.built = True\n\n def call(self, inputs, state):\n \"\"\"Run one step of NAS Cell.\n\n Args:\n inputs: input Tensor, 2D, batch x num_units.\n state: This must be a list of state Tensors, both `2-D`, with column\n sizes `c_state` and `m_state`.\n\n Returns:\n A tuple containing:\n - A `2-D, [batch x output_dim]`, Tensor representing the output of\n the NAS Cell after reading `inputs` when previous state was\n `state`.\n Here output_dim is:\n projection if projection was set, units otherwise.\n - Tensor(s) representing the new state of NAS Cell after reading\n `inputs` when the previous state was `state`. Same type and\n shape(s) as `state`.\n\n Raises:\n ValueError: If input size cannot be inferred from inputs via\n static shape inference.\n \"\"\"\n sigmoid = tf.math.sigmoid\n tanh = tf.math.tanh\n relu = tf.nn.relu\n\n c_prev, m_prev = state\n\n m_matrix = tf.matmul(m_prev, self.recurrent_kernel)\n inputs_matrix = tf.matmul(inputs, self.kernel)\n\n if self.use_bias:\n m_matrix = tf.nn.bias_add(m_matrix, self.bias)\n\n # The NAS cell branches into 8 different splits for both the hidden\n # state and the input\n m_matrix_splits = tf.split(\n axis=1, num_or_size_splits=self._NAS_BASE, value=m_matrix\n )\n inputs_matrix_splits = tf.split(\n axis=1, num_or_size_splits=self._NAS_BASE, value=inputs_matrix\n )\n\n # First layer\n layer1_0 = sigmoid(inputs_matrix_splits[0] + m_matrix_splits[0])\n layer1_1 = relu(inputs_matrix_splits[1] + m_matrix_splits[1])\n layer1_2 = sigmoid(inputs_matrix_splits[2] + m_matrix_splits[2])\n layer1_3 = relu(inputs_matrix_splits[3] * m_matrix_splits[3])\n layer1_4 = tanh(inputs_matrix_splits[4] + m_matrix_splits[4])\n layer1_5 = sigmoid(inputs_matrix_splits[5] + m_matrix_splits[5])\n layer1_6 = tanh(inputs_matrix_splits[6] + m_matrix_splits[6])\n layer1_7 = sigmoid(inputs_matrix_splits[7] + m_matrix_splits[7])\n\n # Second layer\n l2_0 = tanh(layer1_0 * layer1_1)\n l2_1 = tanh(layer1_2 + layer1_3)\n l2_2 = tanh(layer1_4 * layer1_5)\n l2_3 = sigmoid(layer1_6 + layer1_7)\n\n # Inject the cell\n l2_0 = tanh(l2_0 + c_prev)\n\n # Third layer\n l3_0_pre = l2_0 * l2_1\n new_c = l3_0_pre # create new cell\n l3_0 = l3_0_pre\n l3_1 = tanh(l2_2 + l2_3)\n\n # Final layer\n new_m = tanh(l3_0 * l3_1)\n\n # Projection layer if specified\n if self.projection is not None:\n new_m = tf.matmul(new_m, self.projection_weights)\n\n return new_m, [new_c, new_m]\n\n def get_config(self):\n config = {\n \"units\": self.units,\n \"projection\": self.projection,\n \"use_bias\": self.use_bias,\n \"kernel_initializer\": self.kernel_initializer,\n \"recurrent_initializer\": self.recurrent_initializer,\n \"bias_initializer\": self.bias_initializer,\n \"projection_initializer\": self.projection_initializer,\n }\n base_config = super().get_config()\n return {**base_config, **config}\n\n\n@tf.keras.utils.register_keras_serializable(package=\"Addons\")\nclass LayerNormLSTMCell(keras.layers.LSTMCell):\n \"\"\"LSTM cell with layer normalization and recurrent dropout.\n\n This class adds layer normalization and recurrent dropout to a LSTM unit.\n Layer normalization implementation is based on:\n\n https://arxiv.org/abs/1607.06450.\n\n \"Layer Normalization\" <NAME>, <NAME>, <NAME>\n\n and is applied before the internal nonlinearities.\n Recurrent dropout is based on:\n\n https://arxiv.org/abs/1603.05118\n\n \"Recurrent Dropout without Memory Loss\"\n <NAME>, <NAME>, <NAME>.\n \"\"\"\n\n @typechecked\n def __init__(\n self,\n units: TensorLike,\n activation: Activation = \"tanh\",\n recurrent_activation: Activation = \"sigmoid\",\n use_bias: bool = True,\n kernel_initializer: Initializer = \"glorot_uniform\",\n recurrent_initializer: Initializer = \"orthogonal\",\n bias_initializer: Initializer = \"zeros\",\n unit_forget_bias: bool = True,\n kernel_regularizer: Regularizer = None,\n recurrent_regularizer: Regularizer = None,\n bias_regularizer: Regularizer = None,\n kernel_constraint: Constraint = None,\n recurrent_constraint: Constraint = None,\n bias_constraint: Constraint = None,\n dropout: FloatTensorLike = 0.0,\n recurrent_dropout: FloatTensorLike = 0.0,\n norm_gamma_initializer: Initializer = \"ones\",\n norm_beta_initializer: Initializer = \"zeros\",\n norm_epsilon: FloatTensorLike = 1e-3,\n **kwargs\n ):\n \"\"\"Initializes the LSTM cell.\n\n Args:\n units: Positive integer, dimensionality of the output space.\n activation: Activation function to use. Default: hyperbolic tangent\n (`tanh`). If you pass `None`, no activation is applied (ie.\n \"linear\" activation: `a(x) = x`).\n recurrent_activation: Activation function to use for the recurrent\n step. Default: sigmoid (`sigmoid`). If you pass `None`, no\n activation is applied (ie. \"linear\" activation: `a(x) = x`).\n use_bias: Boolean, whether the layer uses a bias vector.\n kernel_initializer: Initializer for the `kernel` weights matrix, used\n for the linear transformation of the inputs.\n recurrent_initializer: Initializer for the `recurrent_kernel` weights\n matrix, used for the linear transformation of the recurrent state.\n bias_initializer: Initializer for the bias vector.\n unit_forget_bias: Boolean. If True, add 1 to the bias of the forget\n gate at initialization. Setting it to true will also force\n `bias_initializer=\"zeros\"`. This is recommended in [Jozefowicz et\n al.](http://www.jmlr.org/proceedings/papers/v37/jozefowicz15.pdf)\n kernel_regularizer: Regularizer function applied to the `kernel`\n weights matrix.\n recurrent_regularizer: Regularizer function applied to\n the `recurrent_kernel` weights matrix.\n bias_regularizer: Regularizer function applied to the bias vector.\n kernel_constraint: Constraint function applied to the `kernel`\n weights matrix.\n recurrent_constraint: Constraint function applied to the\n `recurrent_kernel` weights matrix.\n bias_constraint: Constraint function applied to the bias vector.\n dropout: Float between 0 and 1. Fraction of the units to drop for the\n linear transformation of the inputs.\n recurrent_dropout: Float between 0 and 1. Fraction of the units to\n drop for the linear transformation of the recurrent state.\n norm_gamma_initializer: Initializer for the layer normalization gain\n initial value.\n norm_beta_initializer: Initializer for the layer normalization shift\n initial value.\n norm_epsilon: Float, the epsilon value for normalization layers.\n **kwargs: Dict, the other keyword arguments for layer creation.\n \"\"\"\n super().__init__(\n units,\n activation=activation,\n recurrent_activation=recurrent_activation,\n use_bias=use_bias,\n kernel_initializer=kernel_initializer,\n recurrent_initializer=recurrent_initializer,\n bias_initializer=bias_initializer,\n unit_forget_bias=unit_forget_bias,\n kernel_regularizer=kernel_regularizer,\n recurrent_regularizer=recurrent_regularizer,\n bias_regularizer=bias_regularizer,\n kernel_constraint=kernel_constraint,\n recurrent_constraint=recurrent_constraint,\n bias_constraint=bias_constraint,\n dropout=dropout,\n recurrent_dropout=recurrent_dropout,\n **kwargs,\n )\n self.norm_gamma_initializer = keras.initializers.get(norm_gamma_initializer)\n self.norm_beta_initializer = keras.initializers.get(norm_beta_initializer)\n self.norm_epsilon = norm_epsilon\n self.kernel_norm = self._create_norm_layer(\"kernel_norm\")\n self.recurrent_norm = self._create_norm_layer(\"recurrent_norm\")\n self.state_norm = self._create_norm_layer(\"state_norm\")\n\n def build(self, input_shape):\n super().build(input_shape)\n self.kernel_norm.build([input_shape[0], self.units * 4])\n self.recurrent_norm.build([input_shape[0], self.units * 4])\n self.state_norm.build([input_shape[0], self.units])\n\n def call(self, inputs, states, training=None):\n h_tm1 = states[0] # previous memory state\n c_tm1 = states[1] # previous carry state\n\n dp_mask = self.get_dropout_mask_for_cell(inputs, training, count=4)\n rec_dp_mask = self.get_recurrent_dropout_mask_for_cell(h_tm1, training, count=4)\n if 0.0 < self.dropout < 1.0:\n inputs *= dp_mask[0]\n z = self.kernel_norm(keras.backend.dot(inputs, self.kernel))\n\n if 0.0 < self.recurrent_dropout < 1.0:\n h_tm1 *= rec_dp_mask[0]\n z += self.recurrent_norm(keras.backend.dot(h_tm1, self.recurrent_kernel))\n if self.use_bias:\n z = keras.backend.bias_add(z, self.bias)\n\n z = tf.split(z, num_or_size_splits=4, axis=1)\n c, o = self._compute_carry_and_output_fused(z, c_tm1)\n c = self.state_norm(c)\n h = o * self.activation(c)\n return h, [h, c]\n\n def get_config(self):\n config = {\n \"norm_gamma_initializer\": keras.initializers.serialize(\n self.norm_gamma_initializer\n ),\n \"norm_beta_initializer\": keras.initializers.serialize(\n self.norm_beta_initializer\n ),\n \"norm_epsilon\": self.norm_epsilon,\n }\n base_config = super().get_config()\n return {**base_config, **config}\n\n def _create_norm_layer(self, name):\n return keras.layers.LayerNormalization(\n beta_initializer=self.norm_beta_initializer,\n gamma_initializer=self.norm_gamma_initializer,\n epsilon=self.norm_epsilon,\n name=name,\n )\n\n\n@tf.keras.utils.register_keras_serializable(package=\"Addons\")\nclass LayerNormSimpleRNNCell(keras.layers.SimpleRNNCell):\n \"\"\"Cell class for LayerNormSimpleRNN.\n\n References:\n [1] Ba, <NAME>, <NAME>, and <NAME>.\n \"Layer Normalization.\" ArXiv:1607.06450 [Cs, Stat],\n July 21, 2016. http://arxiv.org/abs/1607.06450\n\n Arguments:\n units: Positive integer, dimensionality of the output space.\n activation: Activation function to use.\n Default: hyperbolic tangent (`tanh`).\n If you pass `None`, no activation is applied\n (ie. \"linear\" activation: `a(x) = x`).\n use_bias: Boolean, (default `True`), whether the layer uses a bias\n vector.\n layernorm_epsilon: Float, (default `1e-5`), Small float added to variance\n to avoid dividing by zero.\n kernel_initializer: Initializer for the `kernel` weights matrix,\n used for the linear transformation of the inputs. Default:\n `glorot_uniform`.\n recurrent_initializer: Initializer for the `recurrent_kernel`\n weights matrix, used for the linear transformation of the recurrent\n state. Default: `orthogonal`.\n bias_initializer: Initializer for the bias vector (`use_bias=True`).\n Default: `zeros`.\n gamma_initializer: Initializer for the gamma vector of the layer\n normalization layer. Default: `ones`.\n kernel_regularizer: Regularizer function applied to the `kernel` weights\n matrix. Default: `None`.\n recurrent_regularizer: Regularizer function applied to the\n `recurrent_kernel` weights matrix. Default: `None`.\n bias_regularizer: Regularizer function applied to the bias vector\n (`use_bias=True`). Default: `None`.\n gamma_regularizer: Regularizer function applied to the gamma vector\n of the layer normalization layer. Default: `None`.\n kernel_constraint: Constraint function applied to the `kernel` weights\n matrix. Default: `None`.\n recurrent_constraint: Constraint function applied to the\n `recurrent_kernel` weights matrix. Default: `None`.\n bias_constraint: Constraint function applied to the bias vector\n (`use_bias=True`). Default: `None`.\n gamma_constraint: Constraint function applied to the gamma vector\n of the layer normalization layer. Default: `None`.\n dropout: Float between 0 and 1. Fraction of the units to drop for the\n linear transformation of the inputs. Default: 0.\n recurrent_dropout: Float between 0 and 1. Fraction of the units to drop\n for the linear transformation of the recurrent state. Default: 0.\n\n Call arguments:\n inputs: A 2D tensor, with shape of `[batch, feature]`.\n states: A 2D tensor with shape of `[batch, units]`, which is the state\n from the previous time step. For timestep 0, the initial state provided\n by the user will be feed to cell.\n training: Python boolean indicating whether the layer should behave in\n training mode or in inference mode. Only relevant when `dropout` or\n `recurrent_dropout` is used.\n\n Examples:\n\n ```python\n import numpy as np\n import tensorflow.keras as keras\n import tensorflow_addons as tfa\n\n inputs = np.random.random([32, 10, 8]).astype(np.float32)\n rnn = keras.layers.RNN(tfa.rnn.LayerNormSimpleRNNCell(4))\n\n output = rnn(inputs) # The output has shape `[32, 4]`.\n\n rnn = keras.layers.RNN(\n tfa.rnn.LayerNormSimpleRNNCell(4),\n return_sequences=True,\n return_state=True)\n\n # whole_sequence_output has shape `[32, 10, 4]`.\n # final_state has shape `[32, 4]`.\n whole_sequence_output, final_state = rnn(inputs)\n ```\n \"\"\"\n\n @typechecked\n def __init__(\n self,\n units: TensorLike,\n activation: Activation = \"tanh\",\n use_bias: bool = True,\n layernorm_epsilon: FloatTensorLike = 1e-05,\n kernel_initializer: Initializer = \"glorot_uniform\",\n recurrent_initializer: Initializer = \"orthogonal\",\n bias_initializer: Initializer = \"zeros\",\n gamma_initializer: Initializer = \"ones\",\n kernel_regularizer: Regularizer = None,\n recurrent_regularizer: Regularizer = None,\n bias_regularizer: Regularizer = None,\n gamma_regularizer: Regularizer = None,\n kernel_constraint: Regularizer = None,\n recurrent_constraint: Constraint = None,\n bias_constraint: Constraint = None,\n gamma_constraint: Constraint = None,\n dropout: FloatTensorLike = 0.0,\n recurrent_dropout: FloatTensorLike = 0.0,\n **kwargs\n ):\n super(LayerNormSimpleRNNCell, self).__init__(\n units,\n activation=activation,\n use_bias=use_bias,\n kernel_initializer=kernel_initializer,\n recurrent_initializer=recurrent_initializer,\n bias_initializer=bias_initializer,\n kernel_regularizer=kernel_regularizer,\n recurrent_regularizer=recurrent_regularizer,\n bias_regularizer=bias_regularizer,\n kernel_constraint=kernel_constraint,\n recurrent_constraint=recurrent_constraint,\n bias_constraint=bias_constraint,\n dropout=dropout,\n recurrent_dropout=recurrent_dropout,\n **kwargs,\n )\n self.layernorm = keras.layers.LayerNormalization(\n axis=-1,\n epsilon=layernorm_epsilon,\n center=False,\n scale=True,\n beta_initializer=None,\n gamma_initializer=gamma_initializer,\n beta_regularizer=None,\n gamma_regularizer=gamma_regularizer,\n beta_constraint=None,\n gamma_constraint=gamma_constraint,\n **kwargs,\n )\n\n def build(self, input_shape):\n super(LayerNormSimpleRNNCell, self).build(input_shape)\n self.layernorm.build((None, self.units))\n\n def call(self, inputs, states, training=None):\n \"\"\"Formulas.\n\n Notation:\n y_t : Cell output at t (`output`)\n y_{t-1} : Previous cell output at t-1 (`prev_output`)\n x_t : The new input at t (`inputs`)\n W_xh : Weight matrix for inputs x_t (`self.kernel`)\n W_hh : Weights for prev. outputs y_{t-1} (`self.recurrent_kernel`)\n b : Bias term for centering (`self.bias`)\n d1 : Dropout function for x_t (`inputs * dp_mask`)\n d2 : Dropout function for y_{t-1} (`prev_output * rec_dp_mask`)\n ln : Scaling function from layer normalization (`self.layernorm`)\n f : Activation function (`self.activation`)\n\n Case 1:\n Keras' SimpleRNN. Only with bias and activation\n y_t = f(x_t * W_xh + y_{t-1} * W_hh + b)\n or\n net = x_t * W_xh + y_{t-1} * W_hh\n y_t = f(net + b)\n\n Case 2:\n addons' LayerNormSimpleRNNCell. Like case 1 but with layer\n normalization (only scaling).\n y_t = f(ln(x_t * W_xh + y_{t-1} * W_hh) + b)\n or\n net = x_t * W_xh + y_{t-1} * W_hh\n y_t = f(ln(net) + b)\n\n Layer normalization with scaling and centering in one go (see Ba et\n al (2016), page 3, formula 4, https://arxiv.org/abs/1607.06450)\n is the same as layer normalization only with scaling, and\n centering directly afterwards.\n\n Case 3:\n Keras' SimpleRNN. with dropout, bias, and activation\n y_t = f(d1(x_t) * W_xh + d2(y_{t-1}) * W_hh + b)\n or\n net = d1(x_t) * W_xh + d2(y_{t-1}) * W_hh\n y_t = f(net + b)\n\n Case 4:\n addons' LayerNormSimpleRNNCell. Like case 3 but with layer\n normalization (only scaling).\n y_t = f(ln(d1(x_t) * W_xh + d2(y_{t-1}) * W_hh) + b)\n or\n net = d1(x_t) * W_xh + d2(y_{t-1}) * W_hh\n y_t = f(ln(net) + b)\n \"\"\"\n prev_output = states[0]\n dp_mask = self.get_dropout_mask_for_cell(inputs, training)\n rec_dp_mask = self.get_recurrent_dropout_mask_for_cell(prev_output, training)\n\n if dp_mask is not None:\n h = keras.backend.dot(inputs * dp_mask, self.kernel)\n else:\n h = keras.backend.dot(inputs, self.kernel)\n\n # don't add bias to \"h\" here\n # add bias after scaling with layer normalization to \"output\"\n\n if rec_dp_mask is not None:\n prev_output = prev_output * rec_dp_mask\n output = h + keras.backend.dot(prev_output, self.recurrent_kernel) # \"net\"\n\n output = self.layernorm(output)\n\n if self.bias is not None:\n output = keras.backend.bias_add(output, self.bias)\n\n if self.activation is not None:\n output = self.activation(output)\n\n return output, [output]\n\n # use SimpleRNNCell's get_initial_state method\n\n def get_config(self):\n cell_config = super(LayerNormSimpleRNNCell, self).get_config()\n del cell_config[\"name\"]\n\n ln_config = self.layernorm.get_config()\n ln_config = {\n k: v\n for k, v in ln_config.items()\n if k\n in [\"epsilon\", \"gamma_initializer\", \"gamma_regularizer\", \"gamma_constraint\"]\n }\n\n ln_config[\"layernorm_epsilon\"] = ln_config.pop(\"epsilon\")\n return dict(list(cell_config.items()) + list(ln_config.items()))\n\n\n@tf.keras.utils.register_keras_serializable(package=\"Addons\")\nclass ESNCell(keras.layers.AbstractRNNCell):\n \"\"\"Echo State recurrent Network (ESN) cell.\n\n This implements the recurrent cell from the paper:\n <NAME>\n \"The \"echo state\" approach to analysing and training recurrent neural networks\".\n GMD Report148, German National Research Center for Information Technology, 2001.\n https://www.researchgate.net/publication/215385037\n\n Arguments:\n units: Positive integer, dimensionality in the reservoir.\n connectivity: Float between 0 and 1.\n Connection probability between two reservoir units.\n Default: 0.1.\n leaky: Float between 0 and 1.\n Leaking rate of the reservoir.\n If you pass 1, it is the special case the model does not have leaky\n integration.\n Default: 1.\n spectral_radius: Float between 0 and 1.\n Desired spectral radius of recurrent weight matrix.\n Default: 0.9.\n use_norm2: Boolean, whether to use the p-norm function (with p=2) as an upper\n bound of the spectral radius so that the echo state property is satisfied.\n It avoids to compute the eigenvalues which has an exponential complexity.\n Default: False.\n use_bias: Boolean, whether the layer uses a bias vector.\n Default: True.\n activation: Activation function to use.\n Default: hyperbolic tangent (`tanh`).\n kernel_initializer: Initializer for the `kernel` weights matrix,\n used for the linear transformation of the inputs.\n Default: `glorot_uniform`.\n recurrent_initializer: Initializer for the `recurrent_kernel` weights matrix,\n used for the linear transformation of the recurrent state.\n Default: `glorot_uniform`.\n bias_initializer: Initializer for the bias vector.\n Default: `zeros`.\n Call arguments:\n inputs: A 2D tensor (batch x num_units).\n states: List of state tensors corresponding to the previous timestep.\n \"\"\"\n\n @typechecked\n def __init__(\n self,\n units: int,\n connectivity: float = 0.1,\n leaky: float = 1,\n spectral_radius: float = 0.9,\n use_norm2: bool = False,\n use_bias: bool = True,\n activation: Activation = \"tanh\",\n kernel_initializer: Initializer = \"glorot_uniform\",\n recurrent_initializer: Initializer = \"glorot_uniform\",\n bias_initializer: Initializer = \"zeros\",\n **kwargs\n ):\n super().__init__(**kwargs)\n self.units = units\n self.connectivity = connectivity\n self.leaky = leaky\n self.spectral_radius = spectral_radius\n self.use_norm2 = use_norm2\n self.use_bias = use_bias\n self.activation = tf.keras.activations.get(activation)\n self.kernel_initializer = tf.keras.initializers.get(kernel_initializer)\n self.recurrent_initializer = tf.keras.initializers.get(recurrent_initializer)\n self.bias_initializer = tf.keras.initializers.get(bias_initializer)\n\n self._state_size = units\n self._output_size = units\n\n @property\n def state_size(self):\n return self._state_size\n\n @property\n def output_size(self):\n return self._output_size\n\n def build(self, inputs_shape):\n input_size = tf.compat.dimension_value(tf.TensorShape(inputs_shape)[-1])\n if input_size is None:\n raise ValueError(\n \"Could not infer input size from inputs.get_shape()[-1]. Shape received is %s\"\n % inputs_shape\n )\n\n def _esn_recurrent_initializer(shape, dtype, partition_info=None):\n recurrent_weights = tf.keras.initializers.get(self.recurrent_initializer)(\n shape, dtype\n )\n\n connectivity_mask = tf.cast(\n tf.math.less_equal(tf.random.uniform(shape), self.connectivity,), dtype\n )\n recurrent_weights = tf.math.multiply(recurrent_weights, connectivity_mask)\n\n # Satisfy the necessary condition for the echo state property `max(eig(W)) < 1`\n if self.use_norm2:\n # This condition is approximated scaling the norm 2 of the reservoir matrix\n # which is an upper bound of the spectral radius.\n recurrent_norm2 = tf.math.sqrt(\n tf.math.reduce_sum(tf.math.square(recurrent_weights))\n )\n is_norm2_0 = tf.cast(tf.math.equal(recurrent_norm2, 0), dtype)\n scaling_factor = self.spectral_radius / (\n recurrent_norm2 + 1 * is_norm2_0\n )\n else:\n abs_eig_values = tf.abs(tf.linalg.eig(recurrent_weights)[0])\n scaling_factor = tf.math.divide_no_nan(\n self.spectral_radius, tf.reduce_max(abs_eig_values)\n )\n\n recurrent_weights = tf.multiply(recurrent_weights, scaling_factor)\n\n return recurrent_weights\n\n self.recurrent_kernel = self.add_weight(\n name=\"recurrent_kernel\",\n shape=[self.units, self.units],\n initializer=_esn_recurrent_initializer,\n trainable=False,\n dtype=self.dtype,\n )\n self.kernel = self.add_weight(\n name=\"kernel\",\n shape=[input_size, self.units],\n initializer=self.kernel_initializer,\n trainable=False,\n dtype=self.dtype,\n )\n\n if self.use_bias:\n self.bias = self.add_weight(\n name=\"bias\",\n shape=[self.units],\n initializer=self.bias_initializer,\n trainable=False,\n dtype=self.dtype,\n )\n\n self.built = True\n\n def call(self, inputs, state):\n in_matrix = tf.concat([inputs, state[0]], axis=1)\n weights_matrix = tf.concat([self.kernel, self.recurrent_kernel], axis=0)\n\n output = tf.linalg.matmul(in_matrix, weights_matrix)\n if self.use_bias:\n output = output + self.bias\n output = self.activation(output)\n output = (1 - self.leaky) * state[0] + self.leaky * output\n\n return output, output\n\n def get_config(self):\n config = {\n \"units\": self.units,\n \"connectivity\": self.connectivity,\n \"leaky\": self.leaky,\n \"spectral_radius\": self.spectral_radius,\n \"use_norm2\": self.use_norm2,\n \"use_bias\": self.use_bias,\n \"activation\": tf.keras.activations.serialize(self.activation),\n \"kernel_initializer\": tf.keras.initializers.serialize(\n self.kernel_initializer\n ),\n \"recurrent_initializer\": tf.keras.initializers.serialize(\n self.recurrent_initializer\n ),\n \"bias_initializer\": tf.keras.initializers.serialize(self.bias_initializer),\n }\n base_config = super().get_config()\n return {**base_config, **config}\n", "id": "6695745", "language": "Python", "matching_score": 3.637450695037842, "max_stars_count": 2, "path": "tensorflow_addons/rnn/cell.py" }, { "content": "# Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Implements contrastive loss.\"\"\"\n\nimport tensorflow as tf\n\nfrom tensorflow_addons.utils.keras_utils import LossFunctionWrapper\nfrom tensorflow_addons.utils.types import TensorLike, Number\nfrom typeguard import typechecked\n\n\n@tf.keras.utils.register_keras_serializable(package=\"Addons\")\n@tf.function\ndef contrastive_loss(\n y_true: TensorLike, y_pred: TensorLike, margin: Number = 1.0\n) -> tf.Tensor:\n r\"\"\"Computes the contrastive loss between `y_true` and `y_pred`.\n\n This loss encourages the embedding to be close to each other for\n the samples of the same label and the embedding to be far apart at least\n by the margin constant for the samples of different labels.\n\n The euclidean distances `y_pred` between two embedding matrices\n `a` and `b` with shape [batch_size, hidden_size] can be computed\n as follows:\n\n ```python\n # y_pred = \\sqrt (\\sum_i (a[:, i] - b[:, i])^2)\n y_pred = tf.linalg.norm(a - b, axis=1)\n ```\n\n See: http://yann.lecun.com/exdb/publis/pdf/hadsell-chopra-lecun-06.pdf\n\n Args:\n y_true: 1-D integer `Tensor` with shape [batch_size] of\n binary labels indicating positive vs negative pair.\n y_pred: 1-D float `Tensor` with shape [batch_size] of\n distances between two embedding matrices.\n margin: margin term in the loss definition.\n\n Returns:\n contrastive_loss: 1-D float `Tensor` with shape [batch_size].\n \"\"\"\n y_pred = tf.convert_to_tensor(y_pred)\n y_true = tf.dtypes.cast(y_true, y_pred.dtype)\n return y_true * tf.math.square(y_pred) + (1.0 - y_true) * tf.math.square(\n tf.math.maximum(margin - y_pred, 0.0)\n )\n\n\n@tf.keras.utils.register_keras_serializable(package=\"Addons\")\nclass ContrastiveLoss(LossFunctionWrapper):\n r\"\"\"Computes the contrastive loss between `y_true` and `y_pred`.\n\n This loss encourages the embedding to be close to each other for\n the samples of the same label and the embedding to be far apart at least\n by the margin constant for the samples of different labels.\n\n See: http://yann.lecun.com/exdb/publis/pdf/hadsell-chopra-lecun-06.pdf\n\n We expect labels `y_true` to be provided as 1-D integer `Tensor`\n with shape [batch_size] of binary integer labels. And `y_pred` must be\n 1-D float `Tensor` with shape [batch_size] of distances between two\n embedding matrices.\n\n The euclidean distances `y_pred` between two embedding matrices\n `a` and `b` with shape [batch_size, hidden_size] can be computed\n as follows:\n\n ```python\n # y_pred = \\sqrt (\\sum_i (a[:, i] - b[:, i])^2)\n y_pred = tf.linalg.norm(a - b, axis=1)\n ```\n\n Args:\n margin: `Float`, margin term in the loss definition.\n Default value is 1.0.\n reduction: (Optional) Type of `tf.keras.losses.Reduction` to apply.\n Default value is `SUM_OVER_BATCH_SIZE`.\n name: (Optional) name for the loss.\n \"\"\"\n\n @typechecked\n def __init__(\n self,\n margin: Number = 1.0,\n reduction: str = tf.keras.losses.Reduction.SUM_OVER_BATCH_SIZE,\n name: str = \"contrasitve_loss\",\n ):\n super().__init__(\n contrastive_loss, reduction=reduction, name=name, margin=margin\n )\n", "id": "6759358", "language": "Python", "matching_score": 0.7565487623214722, "max_stars_count": 2, "path": "tensorflow_addons/losses/contrastive.py" }, { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Tests for the pointer_ops.span_overlaps() op.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom absl.testing import parameterized\n\nfrom tensorflow.python.framework import test_util\nfrom tensorflow.python.ops.ragged import ragged_factory_ops\nfrom tensorflow.python.platform import test\nfrom tensorflow_text.python.ops import pointer_ops\n\n\n@test_util.run_all_in_graph_and_eager_modes\nclass SpanOverlapsOpTest(test_util.TensorFlowTestCase, parameterized.TestCase):\n maxDiff = 5000 # Display diffs even if they're long. pylint: disable=invalid-name\n\n #=============================================================================\n # Source & Target Spans:\n # Offset: 0 5 10 15 20 25 30 35 40 45 50 55 60\n # |====|====|====|====|====|====|====|====|====|====|====|====|\n # Source[0]: [-0-] [-1-] [2] [3] [4][-5-][-6-][-7-][-8-][-9-]\n # Target[0]: [-0-][-1-] [-2-][-3-][-4-] [5] [6] [7] [-8-][-9-]\n # |====|====|====|====|====|====|====|====|====|====|====|====|\n # Source[1]: [-0-] [-1-] [-2-] [-3-] [-4-] [-5-]\n # Target[1]: [2] [-0-] [----1---] [3] [4]\n # |====|====|====|====|====|====|====|====|====|====|====|====|\n # [----0----]\n # Source[2]: [--1--][--3--]\n # [--2--]\n #\n # [--0--]\n # Target[2]: [------1------]\n # [--2--] [-3-]\n # |====|====|====|====|====|====|====|====|====|====|====|====|\n # Offset: 0 5 10 15 20 25 30 35 40 45 50 55 60\n BATCH_SIZE = 3\n SOURCE_START = [[0, 10, 16, 20, 27, 30, 35, 40, 45, 50],\n [0, 10, 20, 30, 40, 50],\n [0, 2, 3, 9]] # pyformat: disable\n SOURCE_LIMIT = [[5, 15, 19, 23, 30, 35, 40, 45, 50, 55],\n [5, 15, 25, 35, 45, 55],\n [11, 9, 10, 16]] # pyformat: disable\n TARGET_START = [[0, 5, 15, 20, 25, 31, 35, 42, 47, 52],\n [10, 18, 1, 30, 42],\n [2, 0, 1, 10]] # pyformat: disable\n TARGET_LIMIT = [[5, 10, 20, 25, 30, 34, 38, 45, 52, 57],\n [15, 28, 4, 33, 45],\n [9, 15, 8, 15]] # pyformat: disable\n\n # Spans encoded using 1D tensors\n BATCH_ITEM = []\n for i in range(BATCH_SIZE):\n BATCH_ITEM.append(\n dict(\n source_start=SOURCE_START[i], # <int>[s]\n source_limit=SOURCE_LIMIT[i], # <int>[s]\n target_start=TARGET_START[i], # <int>[t]\n target_limit=TARGET_LIMIT[i], # <int>[t]\n ))\n\n # Spans encoded using 2D ragged tensors\n RAGGED_BATCH_2D = dict(\n source_start=SOURCE_START, # <int>[b, (s)]\n source_limit=SOURCE_LIMIT, # <int>[b, (s)]\n target_start=TARGET_START, # <int>[b, (t)]\n target_limit=TARGET_LIMIT, # <int>[b, (t)]\n )\n\n # Spans encoded using 2D uniform tensors\n UNIFORM_BATCH_2D = dict(\n source_start=[row[:4] for row in SOURCE_START], # <int>[b, s]\n source_limit=[row[:4] for row in SOURCE_LIMIT], # <int>[b, s]\n target_start=[row[:4] for row in TARGET_START], # <int>[b, t]\n target_limit=[row[:4] for row in TARGET_LIMIT], # <int>[b, t]\n )\n\n # Spans encoded using a 3D ragged tensor with 2 ragged dimensions\n # (2 batch dimensions)\n RAGGED_BATCH_3D = dict(\n source_start=[SOURCE_START[:2], SOURCE_START[2:]], # <int>[b1, (b2), (s)]\n source_limit=[SOURCE_LIMIT[:2], SOURCE_LIMIT[2:]], # <int>[b1, (b2), (s)]\n target_start=[TARGET_START[:2], TARGET_START[2:]], # <int>[b1, (b2), (t)]\n target_limit=[TARGET_LIMIT[:2], TARGET_LIMIT[2:]], # <int>[b1, (b2), (t)]\n )\n\n # Spans encoded using a 3D uniform tensor (2 batch dimensions)\n UNIFORM_BATCH_3D = dict(\n source_start=[UNIFORM_BATCH_2D['source_start']] * 2, # <int>[b1, b2, s]\n source_limit=[UNIFORM_BATCH_2D['source_limit']] * 2, # <int>[b1, b2, s]\n target_start=[UNIFORM_BATCH_2D['target_start']] * 2, # <int>[b1, b2, t]\n target_limit=[UNIFORM_BATCH_2D['target_limit']] * 2, # <int>[b1, b2, t]\n )\n\n @parameterized.parameters(\n #=========================================================================\n # This group of tests use the following source & target spans:\n # Offset: 0 5 10 15 20 25 30 35 40 45 50 55 60\n # |====|====|====|====|====|====|====|====|====|====|====|====|\n # Source: [-0-] [-1-] [2] [3] [4][-5-][-6-][-7-][-8-][-9-]\n # Target: [-0-][-1-] [-2-][-3-][-4-] [5] [6] [7] [-8-][-9-]\n # |====|====|====|====|====|====|====|====|====|====|====|====|\n dict(\n name='test set 1, with default overlap flags',\n expected=[0, -1, -1, -1, -1, -1, -1, -1, -1, -1],\n **BATCH_ITEM[0]),\n dict(\n name='test set 1, with contains=True',\n contains=True,\n expected=[0, -1, -1, -1, -1, 5, 6, 7, -1, -1],\n **BATCH_ITEM[0]),\n dict(\n name='test set 1, with contained_by=True',\n contained_by=True,\n expected=[0, -1, 2, 3, 4, -1, -1, -1, -1, -1],\n **BATCH_ITEM[0]),\n dict(\n name='test set 1, with contains=True and contained_by=True',\n contains=True,\n contained_by=True,\n expected=[0, -1, 2, 3, 4, 5, 6, 7, -1, -1],\n **BATCH_ITEM[0]),\n dict(\n name='test set 1, with partial_overlap=True',\n partial_overlap=True,\n expected=[0, -1, 2, 3, 4, 5, 6, 7, 8, 9],\n **BATCH_ITEM[0]),\n #=========================================================================\n # This group of tests use the following source & target spans:\n # Offset: 0 5 10 15 20 25 30 35 40 45 50 55\n # |====|====|====|====|====|====|====|====|====|====|====|\n # Source: [-0-] [-1-] [-2-] [-3-] [-4-] [-5-]\n # Target: [2] [-0-] [----1---] [3] [4]\n # |====|====|====|====|====|====|====|====|====|====|====|\n dict(\n name='test set 2, with default overlap flags',\n expected=[-1, 0, -1, -1, -1, -1],\n **BATCH_ITEM[1]),\n dict(\n name='test set 2, with contains=True',\n contains=True,\n expected=[2, 0, -1, 3, 4, -1],\n **BATCH_ITEM[1]),\n dict(\n name='test set 2, with contained_by=True',\n contained_by=True,\n expected=[-1, 0, 1, -1, -1, -1],\n **BATCH_ITEM[1]),\n dict(\n name='test set 2, with partial_overlap=True',\n partial_overlap=True,\n expected=[2, 0, 1, 3, 4, -1],\n **BATCH_ITEM[1]),\n #=========================================================================\n # This group of tests use the following source & target spans:\n # Offset: 0 5 10 15 20\n # |====|====|====|====|\n # [----0----]\n # Source: [--1--][--3--]\n # [--2--]\n # |====|====|====|====|\n # [--0--]\n # Target: [------1------]\n # [--2--] [-3-]\n # |====|====|====|====|\n dict(\n name='test set 3, with default overlap flags',\n expected=[-1, 0, -1, -1],\n **BATCH_ITEM[2]),\n dict(\n name='test set 3, with contains=True',\n contains=True,\n expected=[2, 0, -1, 3],\n **BATCH_ITEM[2]),\n dict(\n name='test set 3, with contained_by=True',\n contained_by=True,\n expected=[1, 1, 1, -1],\n **BATCH_ITEM[2]),\n dict(\n name='test set 3, with contains=True and contained_by=True',\n contains=True,\n contained_by=True,\n expected=[2, 1, 1, 3],\n **BATCH_ITEM[2]),\n dict(\n name='test set 3, with partial_overlap=True',\n partial_overlap=True,\n expected=[3, 2, 2, 3],\n **BATCH_ITEM[2]),\n #=========================================================================\n # This group of tests use RAGGED_BATCH_2D.\n # Inputs have a single batch dimension, with shapes [b, (s)] and [b, (t)].\n dict(\n name='default overlap flags',\n expected=[\n [0, -1, -1, -1, -1, -1, -1, -1, -1, -1],\n [-1, 0, -1, -1, -1, -1],\n [-1, 0, -1, -1],\n ],\n **RAGGED_BATCH_2D),\n dict(\n name='contains=True',\n contains=True,\n expected=[\n [0, -1, -1, -1, -1, 5, 6, 7, -1, -1],\n [2, 0, -1, 3, 4, -1],\n [2, 0, -1, 3],\n ],\n **RAGGED_BATCH_2D),\n #=========================================================================\n # This group of tests use UNIFORM_BATCH_2D\n # Inputs have a single batch dimension, with shapes [b, s] and [b, t].\n dict(\n name='default overlap flags',\n expected=[\n [0, -1, -1, -1],\n [-1, 0, -1, -1],\n [-1, 0, -1, -1],\n ],\n ragged_rank=0,\n **UNIFORM_BATCH_2D),\n dict(\n name='contains=True',\n contains=True,\n expected=[\n [0, -1, -1, -1],\n [2, 0, -1, 3],\n [2, 0, -1, 3],\n ],\n ragged_rank=0,\n **UNIFORM_BATCH_2D),\n #=========================================================================\n # This group of tests use RAGGED_BATCH_3D.\n # Inputs have two batch dimensions, with shapes [b1, (b2), (s)] and\n # [b1, (b2), (t)].\n dict(\n name='default overlap flags',\n expected=[\n [[0, -1, -1, -1, -1, -1, -1, -1, -1, -1], [-1, 0, -1, -1, -1,\n -1]],\n [[-1, 0, -1, -1]],\n ],\n **RAGGED_BATCH_3D),\n dict(\n name='contains=True',\n contains=True,\n expected=[\n [[0, -1, -1, -1, -1, 5, 6, 7, -1, -1], [2, 0, -1, 3, 4, -1]],\n [[2, 0, -1, 3]],\n ],\n **RAGGED_BATCH_3D),\n #=========================================================================\n # This group of tests use UNIFORM_BATCH_3D\n # Inputs have two batch dimensions, with shapes [b1, b2, s] and\n # [b1, b2, t].\n dict(\n name='default overlap flags',\n expected=[[\n [0, -1, -1, -1],\n [-1, 0, -1, -1],\n [-1, 0, -1, -1],\n ]] * 2,\n ragged_rank=0,\n **UNIFORM_BATCH_3D),\n dict(\n name='contains=True',\n contains=True,\n expected=[[\n [0, -1, -1, -1],\n [2, 0, -1, 3],\n [2, 0, -1, 3],\n ]] * 2,\n ragged_rank=0,\n **UNIFORM_BATCH_3D),\n ) # pyformat: disable\n def testSpanAlignment(self,\n name,\n source_start,\n source_limit,\n target_start,\n target_limit,\n expected,\n contains=False,\n contained_by=False,\n partial_overlap=False,\n ragged_rank=None):\n source_start = ragged_factory_ops.constant(\n source_start, ragged_rank=ragged_rank)\n source_limit = ragged_factory_ops.constant(\n source_limit, ragged_rank=ragged_rank)\n target_start = ragged_factory_ops.constant(\n target_start, ragged_rank=ragged_rank)\n target_limit = ragged_factory_ops.constant(\n target_limit, ragged_rank=ragged_rank)\n multivalent_result = False\n alignment = pointer_ops.span_alignment(\n source_start, source_limit, target_start, target_limit, contains,\n contained_by, partial_overlap, multivalent_result)\n self.assertAllEqual(alignment, expected)\n\n @parameterized.parameters([\n #=========================================================================\n # This group of tests use the following source & target spans:\n # Offset: 0 5 10 15 20 25 30 35 40 45 50 55 60\n # |====|====|====|====|====|====|====|====|====|====|====|====|\n # Source: [-0-] [-1-] [2] [3] [4][-5-][-6-][-7-][-8-][-9-]\n # Target: [-0-][-1-] [-2-][-3-][-4-] [5] [6] [7] [-8-][-9-][10]\n # |====|====|====|====|====|====|====|====|====|====|====|====|\n dict(\n name='test set 1, with default overlap flags',\n expected=[[0], [], [], [], [], [], [], [], [], []],\n **BATCH_ITEM[0]),\n dict(\n name='test set 1, with contains=True',\n contains=True,\n expected=[[0], [], [], [], [], [5], [6], [7], [], []],\n **BATCH_ITEM[0]),\n dict(\n name='test set 1, with contained_by=True',\n contained_by=True,\n expected=[[0], [], [2], [3], [4], [], [], [], [], []],\n **BATCH_ITEM[0]),\n dict(\n name='test set 1, with contains=True and contained_by=True',\n contains=True,\n contained_by=True,\n expected=[[0], [], [2], [3], [4], [5], [6], [7], [], []],\n **BATCH_ITEM[0]),\n dict(\n name='test set 1, with partial_overlap=True',\n partial_overlap=True,\n expected=[[0], [], [2], [3], [4], [5], [6], [7], [8], [8, 9]],\n **BATCH_ITEM[0]),\n #=========================================================================\n # This group of tests use the following source & target spans:\n # Offset: 0 5 10 15 20 25 30 35 40 45 50 55\n # |====|====|====|====|====|====|====|====|====|====|====|\n # Source: [-0-] [-1-] [-2-] [-3-] [-4-] [-5-]\n # Target: [2] [-0-] [----1---] [3] [4]\n # |====|====|====|====|====|====|====|====|====|====|====|\n dict(\n name='test set 2, with default overlap flags',\n expected=[[], [0], [], [], [], []],\n **BATCH_ITEM[1]),\n dict(\n name='test set 2, with contains=True',\n contains=True,\n expected=[[2], [0], [], [3], [4], []],\n **BATCH_ITEM[1]),\n dict(\n name='test set 2, with contained_by=True',\n contained_by=True,\n expected=[[], [0], [1], [], [], []],\n **BATCH_ITEM[1]),\n dict(\n name='test set 2, with partial_overlap=True',\n partial_overlap=True,\n expected=[[2], [0], [1], [3], [4], []],\n **BATCH_ITEM[1]),\n #=========================================================================\n # This group of tests use the following source & target spans:\n # Offset: 0 5 10 15 20\n # |====|====|====|====|\n # [----0----]\n # Source: [--1--][--3--]\n # [--2--]\n # |====|====|====|====|\n # [--0--]\n # Target: [------1------]\n # [--2--] [-3-]\n # |====|====|====|====|\n dict(\n name='test set 3, with default overlap flags',\n expected=[[], [0], [], []],\n **BATCH_ITEM[2]),\n dict(\n name='test set 3, with contains=True',\n contains=True,\n expected=[[0, 2], [0], [], [3]],\n **BATCH_ITEM[2]),\n dict(\n name='test set 3, with contained_by=True',\n contained_by=True,\n expected=[[1], [0, 1], [1], []],\n **BATCH_ITEM[2]),\n dict(\n name='test set 3, with contains=True and contained_by=True',\n contains=True,\n contained_by=True,\n expected=[[0, 1, 2], [0, 1], [1], [3]],\n **BATCH_ITEM[2]),\n dict(\n name='test set 3, with partial_overlap=True',\n partial_overlap=True,\n expected=[[0, 1, 2, 3], [0, 1, 2], [0, 1, 2], [1, 3]],\n **BATCH_ITEM[2]),\n #=========================================================================\n # This group of tests use RAGGED_BATCH_2D\n # Inputs have a single batch dimension, with shapes [b, (s)] and [b, (t)].\n dict(\n name='default overlap flags',\n expected=[\n [[0], [], [], [], [], [], [], [], [], []],\n [[], [0], [], [], [], []],\n [[], [0], [], []],\n ],\n **RAGGED_BATCH_2D),\n dict(\n name='contains=True',\n contains=True,\n expected=[\n [[0], [], [], [], [], [5], [6], [7], [], []],\n [[2], [0], [], [3], [4], []],\n [[0, 2], [0], [], [3]],\n ],\n **RAGGED_BATCH_2D),\n #=========================================================================\n # This group of tests use UNIFORM_BATCH_2D\n # Inputs have a single batch dimension, with shapes [b, s] and [b, t].\n dict(\n name='default overlap flags',\n expected=[\n [[0], [], [], []],\n [[], [0], [], []],\n [[], [0], [], []],\n ],\n ragged_rank=0,\n **UNIFORM_BATCH_2D),\n dict(\n name='contains=True',\n contains=True,\n expected=[\n [[0], [], [], []],\n [[2], [0], [], [3]],\n [[0, 2], [0], [], [3]],\n ],\n ragged_rank=0,\n **UNIFORM_BATCH_2D),\n #=========================================================================\n # This group of tests use RAGGED_BATCH_3D\n # Inputs have two batch dimensions, with shapes [b1, (b2), (s)] and\n # [b1, (b2), (t)].\n dict(\n name='default overlap flags',\n expected=[\n [[[0], [], [], [], [], [], [], [], [], []],\n [[], [0], [], [], [], []]],\n [[[], [0], [], []]],\n ],\n **RAGGED_BATCH_3D),\n dict(\n name='contains=True',\n contains=True,\n expected=[\n [[[0], [], [], [], [], [5], [6], [7], [], []],\n [[2], [0], [], [3], [4], []]],\n [[[0, 2], [0], [], [3]]],\n ],\n **RAGGED_BATCH_3D),\n #=========================================================================\n # This group of tests use UNIFORM_BATCH_3D\n # Inputs have two batch dimensions, with shapes [b1, b2, s] and\n # [b1, b2, t].\n dict(\n name='default overlap flags',\n expected=[[\n [[0], [], [], []],\n [[], [0], [], []],\n [[], [0], [], []],\n ]] * 2,\n ragged_rank=0,\n **UNIFORM_BATCH_3D),\n dict(\n name='contains=True',\n contains=True,\n expected=[[\n [[0], [], [], []],\n [[2], [0], [], [3]],\n [[0, 2], [0], [], [3]],\n ]] * 2,\n ragged_rank=0,\n **UNIFORM_BATCH_3D),\n ]) # pyformat: disable\n def testSpanMultiAlignment(self,\n name,\n source_start,\n source_limit,\n target_start,\n target_limit,\n expected,\n contains=False,\n contained_by=False,\n partial_overlap=False,\n ragged_rank=None):\n source_start = ragged_factory_ops.constant(\n source_start, ragged_rank=ragged_rank)\n source_limit = ragged_factory_ops.constant(\n source_limit, ragged_rank=ragged_rank)\n target_start = ragged_factory_ops.constant(\n target_start, ragged_rank=ragged_rank)\n target_limit = ragged_factory_ops.constant(\n target_limit, ragged_rank=ragged_rank)\n multivalent_result = True\n alignment = pointer_ops.span_alignment(\n source_start, source_limit, target_start, target_limit, contains,\n contained_by, partial_overlap, multivalent_result)\n self.assertAllEqual(alignment, expected)\n\n\nif __name__ == '__main__':\n test.main()\n", "id": "7657255", "language": "Python", "matching_score": 1.674034833908081, "max_stars_count": 2, "path": "tensorflow_text/python/ops/span_alignment_op_test.py" }, { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Tests for ItemSelectors.\"\"\"\nimport functools\n\nfrom absl.testing import parameterized\n\nfrom tensorflow.python.framework import test_util\nfrom tensorflow.python.ops import array_ops\nfrom tensorflow.python.ops.ragged import ragged_array_ops\nfrom tensorflow.python.ops.ragged import ragged_factory_ops\nfrom tensorflow.python.platform import test\nfrom tensorflow_text.python.ops import item_selector_ops\n\n\n@test_util.run_all_in_graph_and_eager_modes\nclass FirstNItemSelectorTest(test.TestCase, parameterized.TestCase):\n # pyformat: disable\n @parameterized.parameters([\n dict(\n description=\"Basic test on 2D `RaggedTensor`\",\n masking_inputs=[\n [1, 2, 3, 4, 5, 6],\n [10, 20, 30, 40],\n [100, 200, 300, 400, 500]\n ],\n expected_selectable=[\n [1, 2],\n [10, 20],\n [100, 200]\n ],\n ),\n dict(\n description=\"Test broadcast\",\n masking_inputs=[\n [[1, 2], [3], [4, 5, 6]],\n [[10, 20], [30, 40]],\n [[100, 200], [300, 400, 500]]\n ],\n expected_selectable=[\n [[1, 2], [3]],\n [[10, 20], [30, 40]],\n [[100, 200], [300, 400, 500]]\n ],\n ),\n dict(\n description=\"Select the first two items. Test broadcast and \" +\n \"dropping nonselectable ids.\",\n masking_inputs=[\n [[1, 2], [3], [4, 5, 6]],\n [[10, 20], [30, 40]],\n [[100, 200], [300, 400, 500]]\n ],\n unselectable_ids=[1, 200],\n expected_selectable=[\n [[3], [4, 5, 6]],\n [[10, 20], [30, 40]],\n [[300, 400, 500]]],\n axis=1,\n ),\n dict(\n description=\"Select the first two items on axis=-1.\",\n masking_inputs=[\n [[b\"hello\"], [b\"there\"]],\n [[b\"name\", b\"is\"]],\n [[b\"what\", b\"time\"], [b\"is\"], [b\"it\"], [b\"?\"]],\n ],\n expected_selectable=[\n [[b\"hello\"], [b\"there\"]],\n [[b\"name\", b\"is\"]],\n [[b\"what\", b\"time\"], [], [], []]],\n axis=-1,\n ),\n dict(\n description=\"Select the first two items on axis=1.\",\n masking_inputs=[\n [[b\"hello\"], [b\"there\"]],\n [[b\"name\", b\"is\"]],\n [[b\"what\", b\"time\"], [b\"is\"], [b\"it\"], [b\"?\"]],\n ],\n expected_selectable=[\n [[b\"hello\"], [b\"there\"]],\n [[b\"name\", b\"is\"]],\n [[b\"what\", b\"time\"], [b\"is\"]]\n ],\n axis=1,\n ),\n dict(\n description=\"num_to_select is a 2D Tensor\",\n masking_inputs=[\n [1, 2, 3],\n [4, 5],\n [6]\n ],\n expected_selectable=[\n [1, 2],\n [4],\n [6],\n ],\n num_to_select=[[2], [1], [1]],\n axis=-1,\n ),\n ])\n # pyformat: enable\n\n def testGetSelectable(self,\n masking_inputs,\n expected_selectable,\n num_to_select=2,\n unselectable_ids=None,\n axis=1,\n description=\"\"):\n masking_inputs = ragged_factory_ops.constant(masking_inputs)\n item_selector = item_selector_ops.FirstNItemSelector(\n num_to_select=num_to_select, unselectable_ids=unselectable_ids)\n selectable = item_selector.get_selectable(masking_inputs, axis)\n actual_selection = ragged_array_ops.boolean_mask(masking_inputs, selectable)\n self.assertAllEqual(actual_selection, expected_selectable)\n\n\n@test_util.run_all_in_graph_and_eager_modes\nclass RandomItemSelectorTest(test.TestCase, parameterized.TestCase):\n\n # pyformat: disable\n @parameterized.parameters([\n dict(\n description=\"Basic test on 2D `RaggedTensor`\",\n masking_inputs=[\n [1, 2, 3, 4, 5, 6],\n [10, 20, 30, 40],\n [100, 200, 300, 400, 500]\n ],\n expected_selected_items=[\n [1, 2],\n [10, 20],\n [100, 200],\n ],\n ),\n dict(\n description=\"Test broadcast\",\n masking_inputs=[\n [[1, 2], [3], [4, 5, 6]],\n [[10, 20], [30, 40]],\n [[100, 200], [300, 400, 500]]\n ],\n expected_selected_items=[\n [[1, 2], [3]],\n [[10, 20], [30, 40]],\n [[100, 200], [300, 400, 500]]\n ],\n ),\n dict(\n description=\"Select the first two items that don't have \" +\n \"unselectable ids; test that broadcasting works appropriately\",\n masking_inputs=[\n [[1, 2], [3], [4, 5, 6]],\n [[10, 20], [30, 40]],\n [[100, 200], [300, 400, 500]]\n ],\n unselectable_ids=[1, 200],\n expected_selected_items=[\n [[3], [4, 5, 6]],\n [[10, 20], [30, 40]],\n [[300, 400, 500]]\n ],\n axis=1,\n ),\n dict(\n description=\"Test shape[:axis+1]\",\n masking_inputs=[\n [[0, 1], [2, 3], [4, 5]],\n [],\n [[6, 7]]\n ],\n expected_selected_items=[\n [[0, 1], [2, 3]],\n [],\n [[6, 7]],\n ],\n axis=1,\n ),\n dict(\n description=\"Test rank 3 ragged tensor selecting on axis=1\",\n masking_inputs=[\n [[101], [100], [2045], [1012], [102], [100], [2051],\n [2003], [2009], [1029], [102]],\n [[101], [100], [2292], [1996], [6077], [2041], [1029],\n [102], [100], [1029], [102]]],\n expected_selected_items=[\n [[101], [100]],\n [[101], [100]],\n ],\n axis=1,\n ),\n dict(\n description=\"Test rank 3 ragged tensor selecting on axis=1, but \" +\n \"w/ reverse shuffle_fn\",\n masking_inputs=[\n [[101], [100], [2045], [1012], [102], [100], [2051],\n [2003], [2009], [1029], [102]],\n [[101], [100], [2292], [1996], [6077], [2041], [1029],\n [102], [100], [1029], [102]]],\n expected_selected_items=[\n [[1029], [102]],\n [[1029], [102]],\n ],\n axis=1,\n shuffle_fn=\"reverse\",\n ),\n ])\n # pyformat: enable\n def testGetSelectionMask(self,\n masking_inputs,\n expected_selected_items,\n unselectable_ids=None,\n axis=1,\n shuffle_fn=\"\",\n description=\"\"):\n shuffle_fn = (\n functools.partial(array_ops.reverse, axis=[-1])\n if shuffle_fn == \"reverse\" else array_ops.identity)\n masking_inputs = ragged_factory_ops.constant(masking_inputs)\n item_selector = item_selector_ops.RandomItemSelector(\n max_selections_per_batch=2,\n selection_rate=1,\n shuffle_fn=shuffle_fn,\n unselectable_ids=unselectable_ids,\n )\n selection_mask = item_selector.get_selection_mask(masking_inputs, axis)\n selected_items = ragged_array_ops.boolean_mask(masking_inputs,\n selection_mask)\n self.assertAllEqual(selected_items, expected_selected_items)\n\n\n@test_util.run_all_in_graph_and_eager_modes\nclass NothingSelectorTest(test.TestCase, parameterized.TestCase):\n\n @parameterized.parameters([\n dict(\n description=\"Basic test\",\n masking_inputs=[[[1, 2], [3], [4, 5, 6]], [[10, 20], [30, 40]],\n [[100, 200], [300, 400, 500]]],\n unselectable_ids=[1, 200],\n expected_selected_items=[[], [], []],\n ),\n ])\n def testNothingSelector(self,\n masking_inputs,\n unselectable_ids,\n expected_selected_items,\n num_to_select=2,\n axis=1,\n description=\"\"):\n masking_inputs = ragged_factory_ops.constant(masking_inputs)\n item_selector = item_selector_ops.NothingSelector()\n selection_mask = item_selector.get_selectable(masking_inputs, axis)\n selected_items = ragged_array_ops.boolean_mask(masking_inputs,\n selection_mask)\n self.assertAllEqual(selected_items, expected_selected_items)\n\n\nif __name__ == \"__main__\":\n test.main()\n", "id": "11686420", "language": "Python", "matching_score": 1.1069934368133545, "max_stars_count": 2, "path": "tensorflow_text/python/ops/item_selector_ops_test.py" }, { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# coding=utf-8\n\"\"\"Tests for normalization ops in tensorflow_text.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom absl.testing import parameterized\n\nfrom tensorflow.python.framework import errors\nfrom tensorflow.python.framework import test_util\nfrom tensorflow.python.ops.ragged import ragged_factory_ops\nfrom tensorflow.python.platform import test\nfrom tensorflow_text.python.ops import normalize_ops\n\n\ndef _Utf8(char):\n return char.encode(\"utf-8\")\n\n\n@test_util.run_all_in_graph_and_eager_modes\nclass NormalizeOpsTest(test.TestCase):\n\n def test_lowercase_one_string(self):\n txt = [\n \" TExt to loWERcase! \",\n ]\n expected = [\n b\" text to lowercase! \",\n ]\n self.assertAllEqual(expected, normalize_ops.case_fold_utf8(txt))\n\n def test_lowercase_text(self):\n txt = [\n \"Punctuation and digits: -*/+$#%@%$123456789#^$*%&\",\n \"Non-latin UTF8 chars: ΘͽʦȺЩ\",\n \"Accented chars: ĎÔPQRŔSŠoóôpqrŕsštťuúvwxyý\",\n \"Non-UTF8-letters: e.g. ◆, ♥, and the emoji symbol ( ͡° ͜ʖ ͡°)\",\n \"Folded: ßς\", \"\"\n ]\n expected = [\n _Utf8(u\"punctuation and digits: -*/+$#%@%$123456789#^$*%&\"),\n _Utf8(u\"non-latin utf8 chars: θͽʦⱥщ\"),\n _Utf8(u\"accented chars: ďôpqrŕsšoóôpqrŕsštťuúvwxyý\"),\n _Utf8(\n u\"non-utf8-letters: e.g. ◆, ♥, and the emoji symbol ( ͡° ͜ʖ ͡°)\"\n ),\n _Utf8(u\"folded: ssσ\"), b\"\"\n ]\n self.assertAllEqual(expected, normalize_ops.case_fold_utf8(txt))\n\n def test_lowercase_one_string_ragged(self):\n txt = ragged_factory_ops.constant([[\" TExt \", \"to\", \" loWERcase! \"],\n [\" TExt to loWERcase! \"]])\n expected = [[b\" text \", b\"to\", b\" lowercase! \"], [b\" text to lowercase! \"]]\n self.assertAllEqual(expected, normalize_ops.case_fold_utf8(txt))\n\n def test_lowercase_empty_string(self):\n txt = [\n \"\",\n ]\n expected = [\n b\"\",\n ]\n self.assertAllEqual(expected, normalize_ops.case_fold_utf8(txt))\n\n def test_normalize_nfkc(self):\n txt = [\n u\"\\u1e9b\\u0323\",\n ]\n expected = [\n u\"ṩ\".encode(\"utf-8\"),\n ]\n self.assertAllEqual(expected, normalize_ops.normalize_utf8(txt, \"NFKC\"))\n self.assertAllEqual(expected, normalize_ops.normalize_utf8(txt, \"nfkc\"))\n\n def test_normalize_nfkc_batch(self):\n txt = [\n u\"\\u1e9b\\u0323\",\n u\"\\ufb01\",\n ]\n expected = [\n b\"\\xe1\\xb9\\xa9\",\n b\"fi\",\n ]\n self.assertAllEqual(expected, normalize_ops.normalize_utf8(txt, u\"NFKC\"))\n self.assertAllEqual(expected, normalize_ops.normalize_utf8(txt, u\"nfkc\"))\n\n def test_normalize_nfkc_ragged(self):\n txt = ragged_factory_ops.constant([[[u\"\\u1e9b\\u0323 \\ufb01\"], []],\n [[u\"\\u1e9b\\u0323\", u\"\\ufb01\"]]])\n expected = [[[u\"ṩ fi\".encode(\"utf-8\")], []],\n [[u\"ṩ\".encode(\"utf-8\"), b\"fi\"]]]\n self.assertAllEqual(expected, normalize_ops.normalize_utf8(txt, \"NFKC\"))\n self.assertAllEqual(expected, normalize_ops.normalize_utf8(txt, \"nfkc\"))\n\n def test_normalize_nfc(self):\n txt = [\n u\"\\u1e9b\\u0323\",\n ]\n expected = [\n u\"\\u1e9b\\u0323\".encode(\"utf-8\"),\n ]\n self.assertAllEqual(expected, normalize_ops.normalize_utf8(txt, \"NFC\"))\n self.assertAllEqual(expected, normalize_ops.normalize_utf8(txt, \"nfc\"))\n\n def test_normalize_nfd(self):\n txt = [u\"\\u1e9b\\u0323\"]\n expected = [\n u\"\\u017f\\u0323\\u0307\".encode(\"utf-8\"),\n ]\n self.assertAllEqual(expected, normalize_ops.normalize_utf8(txt, \"NFD\"))\n self.assertAllEqual(expected, normalize_ops.normalize_utf8(txt, \"nfd\"))\n\n def test_normalize_nfkd(self):\n txt = [\n u\"\\u1e9b\\u0323\",\n ]\n expected = [\n u\"\\u0073\\u0323\\u0307\".encode(\"utf-8\"),\n ]\n self.assertAllEqual(expected, normalize_ops.normalize_utf8(txt, \"NFKD\"))\n self.assertAllEqual(expected, normalize_ops.normalize_utf8(txt, \"nfkd\"))\n\n def test_unknown_normalization_form(self):\n with self.assertRaises(errors.InvalidArgumentError):\n bomb = normalize_ops.normalize_utf8([\"cant readme\", \"wont read me\"],\n \"cantfindme\")\n self.evaluate(bomb)\n\n\n@test_util.run_all_in_graph_and_eager_modes\nclass NormalizeWithOffsetsMapOpsTest(parameterized.TestCase, test.TestCase):\n\n def test_normalize_nfkc(self):\n txt = [\n u\"\\u1e9b\\u0323\",\n ]\n expected = [\n u\"ṩ\".encode(\"utf-8\"),\n ]\n actual, _ = normalize_ops.normalize_utf8_with_offsets_map(txt, \"NFKC\")\n self.assertAllEqual(expected, actual)\n actual, _ = normalize_ops.normalize_utf8_with_offsets_map(txt, \"nfkc\")\n self.assertAllEqual(expected, actual)\n\n def test_normalize_nfc(self):\n txt = [\n u\"\\u1e9b\\u0323\",\n ]\n expected = [\n u\"\\u1e9b\\u0323\".encode(\"utf-8\"),\n ]\n actual, _ = normalize_ops.normalize_utf8_with_offsets_map(txt, \"NFC\")\n self.assertAllEqual(expected, actual)\n actual, _ = normalize_ops.normalize_utf8_with_offsets_map(txt, \"nfc\")\n self.assertAllEqual(expected, actual)\n\n def test_normalize_nfkc_batch(self):\n txt = [\n u\"\\u1e9b\\u0323\",\n u\"\\ufb01\",\n ]\n expected = [\n b\"\\xe1\\xb9\\xa9\",\n b\"fi\",\n ]\n actual, _ = normalize_ops.normalize_utf8_with_offsets_map(txt, u\"NFKC\")\n self.assertAllEqual(expected, actual)\n actual, _ = normalize_ops.normalize_utf8_with_offsets_map(txt, u\"nfkc\")\n self.assertAllEqual(expected, actual)\n\n def test_normalize_nfkc_ragged(self):\n txt = ragged_factory_ops.constant([[[u\"\\u1e9b\\u0323 \\ufb01\"], []],\n [[u\"\\u1e9b\\u0323\", u\"\\ufb01\"]]])\n expected = [[[u\"ṩ fi\".encode(\"utf-8\")], []],\n [[u\"ṩ\".encode(\"utf-8\"), b\"fi\"]]]\n actual, _ = normalize_ops.normalize_utf8_with_offsets_map(txt, \"NFKC\")\n self.assertAllEqual(expected, actual)\n\n def test_unaccepted_normalization_form(self):\n with self.assertRaises(errors.InvalidArgumentError):\n bomb = normalize_ops.normalize_utf8_with_offsets_map(\n [\"cant readme\", \"wont read me\"], \"CANTNORMALIZEME\")\n self.evaluate(bomb)\n\n\n@test_util.run_all_in_graph_and_eager_modes\nclass FindSourceOffsetsTest(parameterized.TestCase, test.TestCase):\n\n def _extract_substrs(self, txt_input, start, end):\n extracted = []\n start = self.evaluate(start)\n end = self.evaluate(end)\n txt_input = txt_input.encode(\"utf-8\")\n for i in range(start.shape[1]):\n pre_norm_start = int(start[0][i])\n pre_norm_end = int(end[0][i])\n extracted.append(txt_input[pre_norm_start:pre_norm_end])\n return extracted\n\n def test_one_string(self):\n txt = [\n u\"株式会社KADOKAWA\",\n ]\n _, offsets_map = normalize_ops.normalize_utf8_with_offsets_map(txt, u\"NFKC\")\n\n # post_norm_txt = \"株式会社KADOKAWA\"\n post_norm_offsets_starts = [[\n 0, 3, 6, 9, 12, 13, 14, 15, 16, 17, 18, 19, 20\n ]]\n post_norm_offsets_ends = [[3, 6, 9, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21]]\n\n pre_norm_offsets_starts = normalize_ops.find_source_offsets(\n offsets_map, post_norm_offsets_starts)\n pre_norm_offsets_ends = normalize_ops.find_source_offsets(\n offsets_map, post_norm_offsets_ends)\n expected_pre_norm_characters = [\n u\"株\", u\"式\", u\"会\", u\"社\", u\"K\", u\"A\", u\"D\", u\"O\", u\"K\",\n u\"A\", u\"W\", u\"A\", u\"\"\n ]\n self.assertAllEqual(\n self._extract_substrs(txt[0], pre_norm_offsets_starts,\n pre_norm_offsets_ends),\n [x.encode(\"utf-8\") for x in expected_pre_norm_characters])\n\n @parameterized.parameters([\n # Test one string and rank = 0 offset input\n dict(\n txt_input=[\"株式会社KADOKAWA\"],\n normalization_form=\"NFKC\",\n post_norm_offsets=22,\n expected=36),\n # Test one string and rank = 1 offset input\n dict(\n txt_input=[\"株式会社KADOKAWA\"],\n normalization_form=\"NFKC\",\n post_norm_offsets=[0, 1, 2],\n expected=[0, 1, 2]),\n # Test multiple strings and rank = 2 offset input\n dict(\n txt_input=[\n \"株式会社\",\n \"KADOKAWA\",\n ],\n normalization_form=\"NFKC\",\n post_norm_offsets=[[0, 1, 2], [0, 1, 2]],\n expected=[[0, 1, 2], [0, 3, 6]]),\n # Test multiple strings and rank > 2 offset input\n dict(\n txt_input=[\n [\"株式会社\"],\n [\"KADOKAWA\"],\n ],\n normalization_form=\"NFKC\",\n post_norm_offsets=[[[0, 1, 2]], [[0, 1, 2]]],\n expected=[[[0, 1, 2]], [[0, 3, 6]]]),\n ])\n def test_tensor_input(self, txt_input, normalization_form, post_norm_offsets,\n expected):\n _, offsets_map = normalize_ops.normalize_utf8_with_offsets_map(\n txt_input, normalization_form)\n pre_norm_offsets = normalize_ops.find_source_offsets(\n offsets_map, post_norm_offsets)\n self.assertAllEqual(expected, pre_norm_offsets)\n\n @parameterized.parameters([\n # Test multiple strings with an empty str\n dict(\n txt_input=[\n [\"株式会社\"],\n [\"\"],\n [\"KADOKAWA\"],\n ],\n normalization_form=\"NFKC\",\n post_norm_offsets=[[[0, 1, 2]], [[0, 1, 2]], [[0, 1, 2]]],\n expected=[[[0, 1, 2]], [[0, 0, 0]], [[0, 3, 6]]]),\n # Test multiple strings with an empty element\n dict(\n txt_input=[\n [\"株式会社\"],\n [],\n [\"KADOKAWA\"],\n ],\n normalization_form=\"NFKC\",\n post_norm_offsets=[[[0, 1, 2]], [[]], [[0, 1, 2]]],\n expected=[[[0, 1, 2]], [[]], [[0, 3, 6]]]),\n ])\n def test_ragged_tensor_input(self, txt_input, normalization_form,\n post_norm_offsets, expected):\n txt_input = ragged_factory_ops.constant(txt_input)\n post_norm_offsets = ragged_factory_ops.constant(\n post_norm_offsets, dtype=\"int64\")\n _, offsets_map = normalize_ops.normalize_utf8_with_offsets_map(\n txt_input, normalization_form)\n pre_norm_offsets = normalize_ops.find_source_offsets(\n offsets_map, post_norm_offsets)\n self.assertAllEqual(expected, pre_norm_offsets)\n\n def test_string_ragged_dimension_lower_than_offsets_input(self):\n txt = ragged_factory_ops.constant([\n [\"株式会社\"],\n [],\n [\"KADOKAWA\"],\n ])\n _, offsets_map = normalize_ops.normalize_utf8_with_offsets_map(txt, u\"NFKC\")\n post_norm_offsets = ragged_factory_ops.constant(\n [[[0, 1, 2]], [[0, 1, 2]], [[0, 1, 2]]], dtype=\"int64\")\n with self.assertRaises(errors.InvalidArgumentError):\n bomb = normalize_ops.find_source_offsets(offsets_map, post_norm_offsets)\n self.evaluate(bomb)\n\n def test_string_ragged_dimension_higher_than_offsets_input(self):\n txt = ragged_factory_ops.constant([\n [\"株式会社\"],\n [\"\"],\n [\"KADOKAWA\"],\n ])\n _, offsets_map = normalize_ops.normalize_utf8_with_offsets_map(txt, u\"NFKC\")\n post_norm_offsets = ragged_factory_ops.constant(\n [[[0, 1, 2]], [[]], [[0, 1, 2]]], dtype=\"int64\")\n with self.assertRaises(errors.InvalidArgumentError):\n bomb = normalize_ops.find_source_offsets(offsets_map, post_norm_offsets)\n self.evaluate(bomb)\n\n def test_sliced_offsets_map_and_input_offset(self):\n txt = ragged_factory_ops.constant([\n [\"株式会社\"],\n [\"\"],\n [\"KADOKAWA\"],\n ])\n _, offsets_map = normalize_ops.normalize_utf8_with_offsets_map(txt, u\"NFKC\")\n post_norm_offsets = ragged_factory_ops.constant(\n [[[0, 1, 2]], [[]], [[0, 1, 2]]], dtype=\"int64\")\n\n sliced_offsets_map = offsets_map[2]\n sliced_post_norm_offsets = post_norm_offsets[2]\n sliced_pre_norm_offsets = normalize_ops.find_source_offsets(\n sliced_offsets_map, sliced_post_norm_offsets)\n expected = [[0, 3, 6]]\n self.assertAllEqual(expected, sliced_pre_norm_offsets)\n\n\nif __name__ == \"__main__\":\n test.main()\n", "id": "5224647", "language": "Python", "matching_score": 2.0302910804748535, "max_stars_count": 2, "path": "tensorflow_text/python/ops/normalize_ops_test.py" }, { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# encoding=utf-8\n\"\"\"Tests for HubModuleSplitter.\"\"\"\n\nfrom absl.testing import parameterized\n\nfrom tensorflow.python.framework import test_util\nfrom tensorflow.python.lib.io import file_io\nfrom tensorflow.python.ops import lookup_ops\nfrom tensorflow.python.ops import variables as variables_lib\nfrom tensorflow.python.ops.ragged import ragged_factory_ops\nfrom tensorflow.python.platform import test\nfrom tensorflow.python.saved_model import save\nfrom tensorflow_text.python.ops import hub_module_splitter\n\n\ndef _Utf8(char):\n return char.encode(\"utf-8\")\n\n\n@test_util.run_all_in_graph_and_eager_modes\nclass HubModuleSplitterTest(parameterized.TestCase, test.TestCase):\n\n @parameterized.parameters([\n # Test scalar input.\n dict(\n text_input=_Utf8(u\"新华社北京\"),\n expected_pieces=[_Utf8(u\"新华社\"), _Utf8(u\"北京\")],\n expected_starts=[0, 9],\n expected_ends=[9, 15]\n ),\n # Test rank 1 input.\n dict(\n text_input=[_Utf8(u\"新华社北京\"), _Utf8(u\"中文测试\")],\n expected_pieces=[[_Utf8(u\"新华社\"), _Utf8(u\"北京\")],\n [_Utf8(u\"中文\"), _Utf8(u\"测试\")]],\n expected_starts=[[0, 9], [0, 6]],\n expected_ends=[[9, 15], [6, 12]]\n ),\n # Test rank 2 ragged input.\n dict(\n text_input=ragged_factory_ops.constant_value(\n [[_Utf8(u\"新华社北京\"), _Utf8(u\"中文测试\")],\n [_Utf8(u\"新华社上海\")]]),\n expected_pieces=[[[_Utf8(u\"新华社\"), _Utf8(u\"北京\")],\n [_Utf8(u\"中文\"), _Utf8(u\"测试\")]],\n [[_Utf8(u\"新华社\"), _Utf8(u\"上海\")]]],\n expected_starts=[[[0, 9], [0, 6]], [[0, 9]]],\n expected_ends=[[[9, 15], [6, 12]], [[9, 15]]]\n ),\n # Test rank 2 dense input.\n dict(\n text_input=ragged_factory_ops.constant_value(\n [[_Utf8(u\"新华社北京\"), _Utf8(u\"中文测试\")],\n [_Utf8(u\"新华社上海\"), _Utf8(u\"英国交通\")]]),\n expected_pieces=[[[_Utf8(u\"新华社\"), _Utf8(u\"北京\")],\n [_Utf8(u\"中文\"), _Utf8(u\"测试\")]],\n [[_Utf8(u\"新华社\"), _Utf8(u\"上海\")],\n [_Utf8(u\"英国\"), _Utf8(u\"交通\")]]],\n expected_starts=[[[0, 9], [0, 6]], [[0, 9], [0, 6]]],\n expected_ends=[[[9, 15], [6, 12]], [[9, 15], [6, 12]]]\n ),\n # Test ragged input with rank higher than 2.\n dict(\n text_input=ragged_factory_ops.constant_value(\n [\n [[_Utf8(u\"新华社北京\")], [_Utf8(u\"中文测试\")]],\n [[_Utf8(u\"新华社上海\")]]\n ]),\n expected_pieces=[\n [[[_Utf8(u\"新华社\"), _Utf8(u\"北京\")]],\n [[_Utf8(u\"中文\"), _Utf8(u\"测试\")]]],\n [[[_Utf8(u\"新华社\"), _Utf8(u\"上海\")]]]],\n expected_starts=[\n [[[0, 9]], [[0, 6]]],\n [[[0, 9]]]],\n expected_ends=[\n [[[9, 15]], [[6, 12]]],\n [[[9, 15]]]]\n )\n ])\n def testSplit(self,\n text_input,\n expected_pieces,\n expected_starts,\n expected_ends):\n hub_module_handle = (\"tensorflow_text/python/ops/test_data/\"\n \"segmenter_hub_module\")\n splitter = hub_module_splitter.HubModuleSplitter(hub_module_handle)\n pieces, starts, ends = splitter.split_with_offsets(text_input)\n pieces_no_offset = splitter.split(text_input)\n self.evaluate(lookup_ops.tables_initializer())\n self.evaluate(variables_lib.global_variables_initializer())\n self.assertAllEqual(expected_pieces, pieces)\n self.assertAllEqual(expected_starts, starts)\n self.assertAllEqual(expected_ends, ends)\n self.assertAllEqual(expected_pieces, pieces_no_offset)\n\n def exportSavedModel(self):\n hub_module_handle = (\"tensorflow_text/python/ops/test_data/\"\n \"segmenter_hub_module\")\n splitter = hub_module_splitter.HubModuleSplitter(hub_module_handle)\n save.save(splitter, \"ram://saved_model\")\n self.assertEqual(file_io.file_exists_v2(\"ram://saved_model\"), True)\n\n\nif __name__ == \"__main__\":\n test.main()\n", "id": "580532", "language": "Python", "matching_score": 2.560441493988037, "max_stars_count": 2, "path": "tensorflow_text/python/ops/hub_module_splitter_test.py" }, { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# encoding=utf-8\n# Lint as: python3\n\"\"\"Tests for regex_split and regex_split_with_offsets ops.\"\"\"\nfrom absl.testing import parameterized\n\nfrom tensorflow.python.framework import constant_op\nfrom tensorflow.python.framework import test_util\nfrom tensorflow.python.ops import array_ops\nfrom tensorflow.python.ops import string_ops\nfrom tensorflow.python.ops.ragged import ragged_factory_ops\nfrom tensorflow.python.ops.ragged import ragged_tensor\nfrom tensorflow.python.platform import test\nfrom tensorflow_text.python.ops import regex_split_ops\n\n\ndef _utf8(char):\n return char.encode(\"utf-8\")\n\n\n# TODO(thuang513): It appears there isn't a Ragged version of substr; consider\n# checking this into core TF.\ndef _ragged_substr(text_input, begin, size):\n if not (isinstance(text_input, ragged_tensor.RaggedTensor) or\n isinstance(begin, ragged_tensor.RaggedTensor) or\n isinstance(size, ragged_tensor.RaggedTensor)):\n return string_ops.substr_v2(text_input, begin, size)\n\n # TODO(edloper) Update this to use ragged_tensor_shape.broadcast_dynamic_shape\n # once it's been updated to handle uniform_row_lengths correctly.\n if ragged_tensor.is_ragged(text_input):\n if text_input.ragged_rank != 1 or text_input.shape.rank != 2:\n return None # Test only works for `shape=[N, None]`\n text_input_flat = text_input.flat_values\n else:\n text_input_flat = array_ops.reshape(text_input, [-1])\n broadcasted_text = array_ops.gather_v2(text_input_flat,\n begin.nested_value_rowids()[-1])\n new_tokens = string_ops.substr_v2(broadcasted_text, begin.flat_values,\n size.flat_values)\n return begin.with_flat_values(new_tokens)\n\n\n@test_util.run_all_in_graph_and_eager_modes\nclass RegexSplitOpsTest(parameterized.TestCase, test.TestCase):\n\n @parameterized.parameters([\n dict(\n descr=\"Test doc string examples\",\n text_input=[r\"hello there\"],\n delim_regex_pattern=r\"\\s\",\n keep_delim_regex_pattern=r\"\\s\",\n expected=[[b\"hello\", b\" \", b\"there\"]],\n ),\n dict(\n descr=\"Test simple whitespace\",\n text_input=[r\"hello there\"],\n delim_regex_pattern=r\"\\s\",\n expected=[[b\"hello\", b\"there\"]],\n ),\n dict(\n descr=\"Two delimiters in a row\",\n text_input=[r\"hello there\"],\n delim_regex_pattern=r\"\\s\",\n expected=[[b\"hello\", b\"there\"]],\n ),\n dict(\n descr=\"Test Hiragana\",\n text_input=[_utf8(u\"では4日\")],\n delim_regex_pattern=r\"\\p{Hiragana}\",\n keep_delim_regex_pattern=r\"\\p{Hiragana}\",\n expected=[[_utf8(u\"で\"), _utf8(u\"は\"),\n _utf8(u\"4日\")]],\n ),\n dict(\n descr=\"Test symbols and punctuation\",\n text_input=[r\"hello! (:$) there\"],\n delim_regex_pattern=r\"[\\p{S}|\\p{P}]+|\\s\",\n keep_delim_regex_pattern=r\"[\\p{S}|\\p{P}]+\",\n expected=[[b\"hello\", b\"!\", b\"(:$)\", b\"there\"]],\n ),\n dict(\n descr=\"Test numbers\",\n text_input=[r\"hello12345there\"],\n delim_regex_pattern=r\"\\p{N}+\",\n keep_delim_regex_pattern=r\"\\p{N}+\",\n expected=[[b\"hello\", b\"12345\", b\"there\"]],\n ),\n dict(\n descr=\"Test numbers and symbols\",\n text_input=[r\"show me some $100 bills yo!\"],\n delim_regex_pattern=r\"\\s|\\p{S}\",\n keep_delim_regex_pattern=r\"\\p{S}\",\n expected=[[b\"show\", b\"me\", b\"some\", b\"$\", b\"100\", b\"bills\", b\"yo!\"]],\n ),\n dict(\n descr=\"Test input RaggedTensor with ragged_rank=1; \"\n \"shape = [2, (2, 1)]\",\n text_input=[\n [b\"show me some $100 bills yo!\",\n _utf8(u\"では4日\")],\n [b\"hello there\"],\n ],\n delim_regex_pattern=r\"\\s|\\p{S}|\\p{Hiragana}\",\n keep_delim_regex_pattern=r\"\\p{S}|\\p{Hiragana}\",\n expected=[[[b\"show\", b\"me\", b\"some\", b\"$\", b\"100\", b\"bills\", b\"yo!\"],\n [_utf8(u\"で\"), _utf8(u\"は\"),\n _utf8(u\"4日\")]], [[b\"hello\", b\"there\"]]],\n ),\n dict(\n descr=\"Test input 3D RaggedTensor with ragged_rank=2; \"\n \"shape = [1, 2, (2, 1)]\",\n text_input=[[\n [b\"show me some $100 bills yo!\",\n _utf8(u\"では4日\")],\n [b\"hello there\"],\n ]],\n delim_regex_pattern=r\"\\s|\\p{S}|\\p{Hiragana}\",\n keep_delim_regex_pattern=r\"\\p{S}|\\p{Hiragana}\",\n expected=[[[[b\"show\", b\"me\", b\"some\", b\"$\", b\"100\", b\"bills\", b\"yo!\"],\n [_utf8(u\"で\"), _utf8(u\"は\"), _utf8(u\"4日\")]],\n [[b\"hello\", b\"there\"]]]],\n ),\n dict(\n descr=\"Test input 3D RaggedTensor with ragged_rank=1; \"\n \"shape = [2, (1, 2), 2]\",\n text_input=[\n [[b\"a b\", b\"c\"], [b\"d\", b\"e f g\"]],\n [[b\"cat horse cow\", b\"\"]]],\n ragged_rank=1,\n delim_regex_pattern=r\"\\s\",\n expected=[\n [[[b\"a\", b\"b\"], [b\"c\"]], [[b\"d\"], [b\"e\", b\"f\", b\"g\"]]],\n [[[b\"cat\", b\"horse\", b\"cow\"], []]]],\n ),\n # Test inputs that are Tensors.\n dict(\n descr=\"Test input Tensor with shape = [2], rank = 1\",\n text_input=[\n r\"show me some $100 bills yo!\",\n r\"hello there\",\n ],\n delim_regex_pattern=r\"\\s|\\p{S}\",\n keep_delim_regex_pattern=r\"\\p{S}\",\n expected=[[b\"show\", b\"me\", b\"some\", b\"$\", b\"100\", b\"bills\", b\"yo!\"],\n [b\"hello\", b\"there\"]],\n input_is_dense=True,\n ),\n dict(\n descr=\"Test input Tensor with shape = [2, 1], rank = 2\",\n text_input=[\n [r\"show me some $100 bills yo!\"],\n [r\"hello there\"],\n ],\n delim_regex_pattern=r\"\\s|\\p{S}\",\n keep_delim_regex_pattern=r\"\\p{S}\",\n expected=[[[b\"show\", b\"me\", b\"some\", b\"$\", b\"100\", b\"bills\", b\"yo!\"]],\n [[b\"hello\", b\"there\"]]],\n input_is_dense=True,\n ),\n dict(\n descr=\"Test input Tensor with multiple ranks; shape = [2, 2]\",\n input_is_dense=True,\n text_input=[\n [b\"show me some $100 bills yo!\",\n _utf8(u\"では4日\")],\n [b\"hello there\", b\"woot woot\"],\n ],\n delim_regex_pattern=r\"\\s|\\p{S}|\\p{Hiragana}\",\n keep_delim_regex_pattern=r\"\\p{S}|\\p{Hiragana}\",\n expected=[[[b\"show\", b\"me\", b\"some\", b\"$\", b\"100\", b\"bills\", b\"yo!\"],\n [_utf8(u\"で\"), _utf8(u\"は\"),\n _utf8(u\"4日\")]], [[b\"hello\", b\"there\"], [b\"woot\",\n b\"woot\"]]],\n ),\n dict(\n descr=\"Test input Tensor with multiple; shape = [2, 2, 1]\",\n input_is_dense=True,\n text_input=[\n [[b\"show me some $100 bills yo!\"], [_utf8(u\"では4日\")]],\n [[b\"hello there\"], [b\"woot woot\"]],\n ],\n delim_regex_pattern=r\"\\s|\\p{S}|\\p{Hiragana}\",\n keep_delim_regex_pattern=r\"\\p{S}|\\p{Hiragana}\",\n # expected shape = [2, 2, 1, ]\n expected=[[[[b\"show\", b\"me\", b\"some\", b\"$\", b\"100\", b\"bills\",\n b\"yo!\"]], [[_utf8(u\"で\"),\n _utf8(u\"は\"),\n _utf8(u\"4日\")]]],\n [[[b\"hello\", b\"there\"]], [[b\"woot\", b\"woot\"]]]],\n ),\n ])\n def testRegexSplitOp(self,\n text_input,\n delim_regex_pattern,\n expected,\n keep_delim_regex_pattern=r\"\",\n descr=\"\",\n input_is_dense=False,\n ragged_rank=None):\n if input_is_dense:\n text_input = constant_op.constant(text_input)\n else:\n text_input = ragged_factory_ops.constant(text_input,\n ragged_rank=ragged_rank)\n\n actual_tokens, start, end = regex_split_ops.regex_split_with_offsets(\n input=text_input,\n delim_regex_pattern=delim_regex_pattern,\n keep_delim_regex_pattern=keep_delim_regex_pattern,\n )\n self.assertAllEqual(actual_tokens, expected)\n\n # Use the offsets to extract substrings and verify that the substrings match\n # up with the expected tokens\n extracted_tokens = _ragged_substr(array_ops.expand_dims(text_input, -1),\n start, end - start)\n if extracted_tokens is not None:\n self.assertAllEqual(extracted_tokens, expected)\n\n\n@test_util.run_all_in_graph_and_eager_modes\nclass RegexSplitterTestCases(test.TestCase, parameterized.TestCase):\n\n @parameterized.parameters([\n dict(\n test_description=\"Split on new line\",\n text_input=[\n b\"Hi there.\\nWhat time is it?\\nIt is gametime.\",\n b\"Who let the dogs out?\\nWho?\\nWho?\\nWho?\",\n ],\n expected=[[b\"Hi there.\", b\"What time is it?\", b\"It is gametime.\"],\n [b\"Who let the dogs out?\", b\"Who?\", b\"Who?\", b\"Who?\"]],\n ),\n dict(\n test_description=\"Test trailing \\\\n.\",\n text_input=[\n b\"Hi there.\\nWhat time is it?\\nIt is gametime.\",\n b\"Who let the dogs out?\\nWho?\\nWho?\\nWho?\\n\",\n ],\n expected=[[b\"Hi there.\", b\"What time is it?\", b\"It is gametime.\"],\n [b\"Who let the dogs out?\", b\"Who?\", b\"Who?\", b\"Who?\"]],\n ),\n dict(\n test_description=\"Custom regex.\",\n text_input=[\n b\"Hi there.\\r\\nWhat time is it?\\r\\nIt is gametime.\",\n b\"Who let the dogs out?\\r\\nWho?\\r\\nWho?\\r\\nWho?\",\n ],\n expected=[[b\"Hi there.\", b\"What time is it?\", b\"It is gametime.\"],\n [b\"Who let the dogs out?\", b\"Who?\", b\"Who?\", b\"Who?\"]],\n new_sentence_regex=\"\\r\\n\",\n ),\n ])\n def testRegexSplitter(self,\n test_description,\n text_input,\n expected,\n new_sentence_regex=None):\n text_input = constant_op.constant(text_input)\n sentence_breaker = regex_split_ops.RegexSplitter(new_sentence_regex)\n actual = sentence_breaker.split(text_input)\n self.assertAllEqual(actual, expected)\n\nif __name__ == \"__main__\":\n test.main()\n", "id": "5231266", "language": "Python", "matching_score": 0.6460076570510864, "max_stars_count": 1, "path": "tensorflow_text/python/ops/regex_split_ops_test.py" }, { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nr\"\"\"Tests for pywrap_fast_wordpiece_tokenizer_model_builder.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom tensorflow.python.framework import test_util\nfrom tensorflow.python.platform import gfile\nfrom tensorflow.python.platform import test\nfrom tensorflow_text.core.pybinds import pywrap_fast_wordpiece_tokenizer_model_builder\n\nEXPECTED_MODEL_BUFFER_PATH = \"third_party/tensorflow_text/python/ops/test_data/fast_wordpiece_tokenizer_model.fb\"\n\n\nclass PywrapFastWordpieceBuilderTest(test_util.TensorFlowTestCase):\n\n def test_build(self):\n vocab = [\n \"a\", \"abc\", \"abcdefghi\", \"##de\", \"##defgxy\", \"##deh\", \"##f\", \"##ghz\",\n \"<unk>\"\n ]\n max_bytes_per_token = 100\n suffix_indicator = \"##\"\n unk_token = \"<unk>\"\n expected_model_buffer = gfile.GFile(EXPECTED_MODEL_BUFFER_PATH, \"rb\").read()\n self.assertEqual(\n pywrap_fast_wordpiece_tokenizer_model_builder\n .build_fast_wordpiece_model(\n vocab, max_bytes_per_token, suffix_indicator, unk_token, True,\n False),\n expected_model_buffer)\n\n def test_build_throw_exception_unk_token_not_in_vocab(self):\n vocab = [\n \"a\", \"abc\", \"abcdefghi\", \"##de\", \"##defgxy\", \"##deh\", \"##f\", \"##ghz\"\n ]\n max_bytes_per_token = 100\n suffix_indicator = \"##\"\n unk_token = \"<unk>\"\n with self.assertRaisesRegex(RuntimeError,\n \"Cannot find unk_token in the vocab!\"):\n (pywrap_fast_wordpiece_tokenizer_model_builder\n .build_fast_wordpiece_model(\n vocab, max_bytes_per_token, suffix_indicator, unk_token, True,\n False))\n\n\nif __name__ == \"__main__\":\n test.main()\n", "id": "11022685", "language": "Python", "matching_score": 1.9854750633239746, "max_stars_count": 2, "path": "tensorflow_text/core/pybinds/pywrap_fast_wordpiece_tokenizer_model_builder_test.py" }, { "content": "# coding=utf-8\n# Copyright 2022 TF.Text Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nr\"\"\"Tests for pywrap_whitespace_tokenizer_config_builder.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom tensorflow.python.framework import test_util\nfrom tensorflow.python.platform import test\nfrom tensorflow_text.core.pybinds import pywrap_whitespace_tokenizer_config_builder as pywrap_builder\n\n\nclass PywrapFastWordpieceBuilderTest(test_util.TensorFlowTestCase):\n\n # This is not supposed to be an exhaustive test. That is done with the\n # builder test. We just want to sanity check a couple values to show we have\n # received something.\n def test_build(self):\n # check non-empty\n config = pywrap_builder.build_whitespace_tokenizer_config()\n self.assertNotEmpty(config)\n # check space character is whitespace\n character = ord(' ')\n bits = config[character >> 3]\n mask = 1 << (character & 0x7)\n self.assertGreater(bits & mask, 0)\n # check letter is not whitespace\n character = ord('a')\n bits = config[character >> 3]\n mask = 1 << (character & 0x7)\n self.assertEqual(bits & mask, 0)\n\n\nif __name__ == '__main__':\n test.main()\n", "id": "7110900", "language": "Python", "matching_score": 1.7746086120605469, "max_stars_count": 2, "path": "tensorflow_text/core/pybinds/pywrap_whitespace_tokenizer_config_builder_test.py" } ]
1.778253
aswathselvam
[ { "content": "#!/usr/bin/env python\n\nimport traceback\nimport rospy\nfrom geometry_msgs.msg import Twist\nfrom std_msgs.msg import Float64\nimport tf2_ros\nimport geometry_msgs.msg\n\nfrom gazebo_msgs.srv import GetLinkState \n\n\ndef cmdVelCB(data):\n return \n\ndef process():\n global model_info, chassis_info\n loop_rate = rospy.Rate(10)\n\n br = tf2_ros.TransformBroadcaster()\n t = geometry_msgs.msg.TransformStamped()\n\n mouse_sub = rospy.Subscriber('/cmd_vel', Twist, cmdVelCB, queue_size=10)\n while not rospy.is_shutdown():\n chassis_info = model_info(\"robot::base_link\",\"world\")\n\n t.header.stamp = rospy.Time.now()\n t.header.frame_id = \"map\"\n t.child_frame_id = \"base_link\"\n t.transform.translation.x = chassis_info.link_state.pose.position.x\n t.transform.translation.y = chassis_info.link_state.pose.position.y\n t.transform.translation.z = chassis_info.link_state.pose.position.z\n t.transform.rotation.x = chassis_info.link_state.pose.orientation.x\n t.transform.rotation.y = chassis_info.link_state.pose.orientation.y\n t.transform.rotation.z = chassis_info.link_state.pose.orientation.z\n t.transform.rotation.w = chassis_info.link_state.pose.orientation.w\n\n br.sendTransform(t)\n loop_rate.sleep()\n\nif __name__ == '__main__':\n rospy.init_node('pub_robot_TF', anonymous=False)\n model_info= rospy.ServiceProxy('/gazebo/get_link_state', GetLinkState) \n try:\n process()\n except Exception as ex:\n print(traceback.print_exc())", "id": "3357166", "language": "Python", "matching_score": 0, "max_stars_count": 1, "path": "src/publish_baselinkTF.py" }, { "content": "#! /usr/bin/env python\n\n\nimport traceback\n\nimport rospy\nfrom geometry_msgs.msg import Twist\nfrom std_msgs.msg import Float64\nimport tf_conversions\nimport tf2_ros\nimport geometry_msgs.msg\nimport turtlesim.msg\n\nfrom gazebo_msgs.srv import GetLinkState \nfrom gazebo_msgs.msg import LinkState # For getting information about link states\n\nimport numpy as np\nimport math\n\nwidth = {\"fr\":0.275, \"fl\":0.275, \"rr\":0.275, \"rl\":0.275}\nlength = {\"fr\":0.575, \"fl\":0.575, \"rr\":0.575, \"rl\":0.575}\n\nfr_pub = None\nfl_pub = None\nrr_pub = None\nrl_pub = None\n\ndef cmdVelCB(data):\n \n global fr_pub, fl_pub, rr_pub, rl_pub\n\n mat = np.matrix([[ 1, 1, (width[\"fr\"] + length[\"fr\"])],\n [ 1, -1, -(width[\"fl\"] + length[\"fl\"])],\n [ 1, -1, (width[\"rr\"] + length[\"rr\"])],\n [ 1, 1, -(width[\"rl\"] + length[\"rl\"])]]) \n\n cmd_vel = np.matrix([data.linear.x, data.linear.y, data.angular.z])\n\n wheel_vel = (np.dot(mat, cmd_vel.T).A1).tolist()\n\n wv = Float64()\n\n wv.data = wheel_vel[0]\n fr_pub.publish(wv)\n\n wv.data = wheel_vel[1]\n fl_pub.publish(wv)\n\n wv.data = wheel_vel[2]\n rr_pub.publish(wv)\n\n wv.data = wheel_vel[3]\n rl_pub.publish(wv)\n \n\ndef process():\n\n global fr_pub, fl_pub, rr_pub, rl_pub\n\n loop_rate = rospy.Rate(10)\n\n fr_pub = rospy.Publisher('/front_right_controller/command', Float64, queue_size=10)\n fl_pub = rospy.Publisher('/front_left_controller/command', Float64, queue_size=10)\n rr_pub = rospy.Publisher('/rear_right_controller/command', Float64, queue_size=10)\n rl_pub = rospy.Publisher('/rear_left_controller/command', Float64, queue_size=10)\n\n mouse_sub = rospy.Subscriber('/cmd_vel', Twist, cmdVelCB, queue_size=10)\n\n while not rospy.is_shutdown():\n loop_rate.sleep()\n\n\n\n\nif __name__ == '__main__':\n rospy.init_node('test_mecanum_robot', anonymous=False)\n try:\n process()\n\n except Exception as ex:\n print(traceback.print_exc())", "id": "9177487", "language": "Python", "matching_score": 0, "max_stars_count": 1, "path": "test/test_mecanum_robot.py" } ]
0
lol-tech-404
[ { "content": "from flask import Flask, render_template, request\nfrom keras.models import load_model\nfrom keras.preprocessing import image\nimport numpy as np\n\napp = Flask(__name__)\n\ndic = {0: 'Normal', 1: 'Diabetic Retinopathy'}\n\nmodel = load_model('my_model')\n\nmodel.make_predict_function()\n\n\ndef predict_label(img_path):\n i = image.load_img(img_path, target_size=(224, 224))\n i = image.img_to_array(i) / 255.0\n i = i.reshape(1, 224, 224, 3)\n # p = model.predict_classes(i)\n y_pre = model.predict(i)\n y_pred = [np.argmax(j) for j in y_pre]\n return dic[y_pred[0]]\n\n\n# routes\n@app.route(\"/\", methods=['GET', 'POST'])\ndef main():\n return render_template(\"index.html\")\n\n\n@app.route(\"/about\")\ndef about_page():\n return \"Welcome to Gravitas AI!!!!\"\n\n\n@app.route(\"/submit\", methods=['GET', 'POST'])\ndef get_output():\n if request.method == 'POST':\n img = request.files['my_image']\n\n img_path = \"static/\" + img.filename\n img.save(img_path)\n\n p = predict_label(img_path)\n\n return render_template(\"index.html\", prediction=p, img_path=img_path)\n\n\nif __name__ == '__main__':\n # app.debug = True\n app.run(debug=False)\n", "id": "5188449", "language": "Python", "matching_score": 0, "max_stars_count": 1, "path": "app.py" } ]
0
CS178A-B
[ { "content": "from nltk.tokenize import sent_tokenize, word_tokenize\n\ndef split_sentences(data):\n\n phrases = sent_tokenize(data)\n # words = word_tokenize(data)\n\n return phrases\n", "id": "11186988", "language": "Python", "matching_score": 0, "max_stars_count": 1, "path": "public/gcloud/sentence.py" }, { "content": "from google.cloud import automl\n\n# TODO(developer): Uncomment and set the following variables\nproject_id = \"XXX\"\nmodel_id = \"XXX\"\n\nprediction_client = automl.PredictionServiceClient()\n\n# Get the full path of the model.\nmodel_full_id = automl.AutoMlClient.model_path(\n project_id, \"us-central1\", model_id\n)\n\ndef predict(content):\n\n # Supported mime_types: 'text/plain', 'text/html'\n # https://cloud.google.com/automl/docs/reference/rpc/google.cloud.automl.v1#textsnippet\n text_snippet = automl.TextSnippet(\n content=content, mime_type=\"text/plain\"\n )\n payload = automl.ExamplePayload(text_snippet=text_snippet)\n\n response = prediction_client.predict(name=model_full_id, payload=payload)\n \n\n for annotation_payload in response.payload:\n # print(\n # u\"Predicted class name: {}\".format(annotation_payload.display_name)\n # )\n # print(\n # u\"Predicted class score: {}\".format(\n # annotation_payload.classification.score\n # )\n # )\n return annotation_payload.display_name\n break\n", "id": "3926001", "language": "Python", "matching_score": 0.29625073075294495, "max_stars_count": 1, "path": "public/gcloud/automl.py" }, { "content": "import itertools\n\nclass review:\n def __init__(self, ambience, service, food, other):\n self.ambience = ambience\n self.service = service\n self.food = food\n self.other = other\n \nclass food_category:\n def __init__(self, ratings_list):\n self.ratings_list = ratings_list\n \n def avg():\n total = 0\n for rating in ratings_list:\n total += rating\n \n return total / len(ratings_list)\n \nclass ambience(food_category):\n pass\n\nclass food(food_category):\n pass\n\nclass service(food_category):\n pass\n\ndef getStar(sentiment_score):\n if (sentiment_score < -0.7 and sentiment_score > -0.99): # 1 star\n return 1\n elif sentiment_score > -0.7 and sentiment_score <= -0.5: # 1.5 star\n return 1.5\n elif sentiment_score > -0.5 and sentiment_score <= -0.3: # 2 star\n return 2\n elif sentiment_score > -0.3 and sentiment_score <= -0.1: # 2.5 star\n return 2.5\n elif sentiment_score > -0.1 and sentiment_score < 0.1: # 3 star\n return 3\n elif sentiment_score >= 0.1 and sentiment_score < 0.3: # 3.5 star\n return 3.5\n elif sentiment_score >= 0.3 and sentiment_score < 0.5: # 4 star\n return 4\n elif sentiment_score >= 0.5 and sentiment_score < 0.7: # 4.5 star\n return 4.5\n elif sentiment_score >= 0.7 and sentiment_score < 0.99: # 5 star\n return 5\n elif sentiment_score == -10:\n return 0\n \ndef getRating(scores, categories):\n length = 0\n \n if len(scores) == len(categories):\n length = len(scores)\n \n service = []\n ambience = []\n food = []\n other = []\n \n for (score, category) in zip(scores, categories):\n print(score)\n if (category == \"food\"):\n food.append(score)\n print(\"food\")\n elif (category == \"ambience\"):\n ambience.append(score)\n print(\"ambience\")\n elif (category == \"service\"):\n service.append(score)\n print(\"service\")\n elif (category == \"other\"): # not sure what to do with this yet\n other.append(score)\n print(\"other\")\n \n \n # Create service, ambience, food, other\n ambience_avg = -10 # special value\n service_avg = -10\n food_avg = -10\n other_avg = -10\n \n total = 0\n if (len(ambience) != 0):\n for rating in ambience:\n total += rating\n ambience_avg = total / len(ambience)\n \n total = 0\n if (len(service) != 0):\n for rating in service:\n total += rating\n service_avg = total / len(service)\n \n total = 0\n if (len(food) != 0):\n for rating in food:\n total += rating\n food_avg = total / len(food)\n \n total = 0\n if (len(other) != 0):\n for rating in other:\n total += rating\n other_avg = total / len(other)\n \n if food_avg != -10 and service_avg != -10 and ambience_avg != -10:\n other_avg = 1\n else:\n other_avg = 0\n \n review_with_score = review(getStar(ambience_avg), getStar(service_avg), getStar(food_avg), other_avg)\n \n return review_with_score\n \n \n", "id": "11626498", "language": "Python", "matching_score": 0.8820981979370117, "max_stars_count": 1, "path": "public/gcloud/ratings.py" }, { "content": "import csv\nfrom write_data import *\n\ndef round_of_rating(number):\n return round(number * 2) / 2\n\nclass review:\n def __init__(self, ambience, service, food):\n self.ambience = ambience\n self.service = service\n self.food = food\n\nrestaurant_list = []\n\nprint(\"hello\")\n\nwith open('nyc_restaurants.csv') as csv_file:\n csv_reader = csv.reader(csv_file, delimiter=',')\n \n for row in csv_reader:\n restaurant_list.append(row[1])\n \n print(restaurant_list)\n \n \n review_scores = []\n \nfor item in range(1, len(restaurant_list)):\n\n ambience, service, food = 0.0, 0.0, 0.0\n numambience, numservice, numfood = 0.0, 0.0, 0.0 # ignore 0 values in calculating mean\n\n print(restaurant_list[item])\n \n with open('categorized_reviews.csv') as csv_file:\n csv_reader = csv.reader(csv_file, delimiter=',')\n line_count = 0\n\n firstLine = True # skip first \n\n for row in csv_reader:\n if firstLine:\n firstLine = False\n line_count += 1\n continue\n\n # print(row[0])\n\n if (row[0] == restaurant_list[item]):\n\n # 5 ambience, 6 service, 7 food\n print(row[5])\n if row[5] != \"0\": # ambience\n numambience += 1\n ambience += float(row[5])\n\n print(row[6])\n if row[6] != \"0\": # service\n numservice += 1\n service += float(row[6])\n\n print(row[7])\n if row[7] != \"0\": # service\n numfood += 1\n food += float(row[7])\n\n line_count += 1\n\n # end of for loop\n if numambience == 0.0:\n numambience = 1.0\n if numservice == 0.0:\n numservice = 1.0\n if numfood == 0.0:\n numfood = 1.0\n\n ratings = review(round_of_rating(ambience / numambience), round_of_rating(service / numservice), round_of_rating(food / numfood))\n review_scores.append(ratings)\n\n # for item in range(1, len(review_scores)):\n # print(review_scores[item].ambience)\n \nwrite_data_to_new_csv('new_york_business_collection.csv', 'new.csv', review_scores)\n \n # exit(0)\n \n \n", "id": "5529453", "language": "Python", "matching_score": 3.479846239089966, "max_stars_count": 1, "path": "public/gcloud/restaurantrating.py" }, { "content": "from nlp import *\nfrom automl import *\nfrom ratings import *\nimport csv\nimport pandas as pd\nfrom sentence import split_sentences\nfrom write_data import *\n\n\nclass review:\n def __init__(self, num, business_id, user_id, stars, text, date):\n self.num = num\n self.business_id = business_id\n self.user_id = user_id\n self.stars = stars\n self.text = text\n self.date = date\n\n#with open('final.csv', mode='w') as final_file:\n# labeleddata = csv.writer(final_file, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL) \n\n\n\nwith open('new_york_reviews.csv') as csv_file:\n csv_reader = csv.reader(csv_file, delimiter=',')\n line_count = 0\n \n # create_new_columns('fake_reviews.csv')\n\n firstLine = True # skip first \n \n review_scores = []\n \n for row in csv_reader:\n if firstLine:\n firstLine = False\n line_count += 1\n continue\n # print (row[4])\n temp_sentence_list = split_sentences(row[4]) # column 4 = where the review is\n # print(temp_sentence_list)\n\n scores = []\n categories = []\n \n \n\n for sentence in temp_sentence_list:\n print(sentence)\n sentiment_score = analyze_sentiment(sentence)\n category = automl_predict(sentence)\n\n categories.append(category)\n scores.append(sentiment_score)\n\n review_with_score = getRating(scores, categories)\n review_scores.append(review_with_score)\n \n print(\"Ambience: \" + str(review_with_score.ambience))\n print(\"Service: \" + str(review_with_score.service))\n print(\"Food: \" + str(review_with_score.food))\n print(\"Score: \" + str(review_with_score.other))\n\n # write_data_to_column(row, review_with_score, 'fake_reviews.csv')\n\n # for (score, category) in zip(scores, categories):\n # print(score)\n # print(category)\n\n # getRating(scores, categories)\n\n\n # exit(0) # for testing purposes\n line_count += 1\n \n write_data_to_new_csv('new_york_reviews.csv', 'new.csv', review_scores)\n\n # split sentence\n\n \n", "id": "575444", "language": "Python", "matching_score": 3.6625382900238037, "max_stars_count": 1, "path": "public/gcloud/model.py" }, { "content": "import csv\nimport pandas as pd\n\n# WITHOUT PANDAS #\n\ndef write_data_to_new_csv(old_file, new_file, review_scores_list):\n with open(old_file, 'r') as csvinput:\n with open(new_file, 'w') as csvoutput:\n writer = csv.writer(csvoutput, lineterminator='\\n')\n reader = csv.reader(csvinput)\n\n all = []\n row = next(reader)\n row.append('ambience')\n row.append('service')\n row.append('food')\n # row.append('other')\n all.append(row)\n\n review_scores_list_iterator = 0;\n for row in reader:\n row.append(review_scores_list[review_scores_list_iterator].ambience)\n row.append(review_scores_list[review_scores_list_iterator].service)\n row.append(review_scores_list[review_scores_list_iterator].food)\n # row.append(review_scores_list[review_scores_list_iterator].other)\n all.append(row)\n review_scores_list_iterator += 1\n\n writer.writerows(all)\n\n\n# WITH PANDAS #\n\ndef create_new_columns(file):\n df = pd.read_csv(file, index_col=[0])\n df[\"ambience\"] = \"\"\n df[\"service\"] = \"\"\n df[\"food\"] = \"\"\n # df[\"other\"] = \"\"\n df.to_csv(file, quoting=csv.QUOTE_NONE)\n\n\ndef write_data_to_column(row, data, file):\n df = pd.read_csv(file, index_col=[0])\n df.at[row, \"ambience\"] = data.ambience\n df.at[row, \"service\"] = data.service\n df.at[row, \"food\"] = data.food\n # df.at[row, \"other\"] = data.other\n df.to_csv(file, quoting=csv.QUOTE_NONE)\n\n\n# TESTING #\n\nclass review:\n def __init__(self, ambience, service, food):\n self.ambience = ambience\n self.service = service\n self.food = food\n # self.other = other\n\n\n# create_new_columns('new_york_reviews_three_copy0.csv')\n# write_data_to_column(1, p1, 'new_york_reviews_three_copy0.csv')\n# write_data_to_column(2, p2, 'new_york_reviews_three_copy0.csv')\n# write_data_to_column(3, p3, 'new_york_reviews_three_copy0.csv')\n\n\n", "id": "5258519", "language": "Python", "matching_score": 0.10652165859937668, "max_stars_count": 1, "path": "public/gcloud/write_data.py" }, { "content": "from django.views.generic import TemplateView\nfrom django.views.decorators.cache import never_cache\nfrom django.http import HttpResponse,JsonResponse\nimport pymongo\nfrom pymongo import MongoClient\nfrom bson.json_util import dumps, loads\nfrom django.views.decorators.csrf import csrf_exempt\nfrom django.views.decorators.csrf import ensure_csrf_cookie\nimport json\nimport os\n\nfrom django.http import JsonResponse\nfrom rest_framework.decorators import api_view, permission_classes, authentication_classes\nfrom rest_framework.permissions import AllowAny\nfrom django.http import QueryDict\nfrom rest_framework.authentication import TokenAuthentication\nfrom bson.objectid import ObjectId\n\nfrom functools import wraps\nimport jwt\n\n# Serve Single Page Application\nindex = never_cache(TemplateView.as_view(template_name='index.html'))\n\ncluster = MongoClient(os.environ.get(\"MONGODB_URL\"))\ndb = cluster[\"Information\"]\norganization_info_collection = db[\"organization_info\"]\nuser_info_collection = db[\"user_info\"]\nuser_hash_collection = db[\"user_hash\"]\nid = 0\n\n# for demos\nhard_coded_orgs = {\n \"ACM\" : {\n \"Delegators\" : [],\n \"Members\" : [],\n \"org_events\" : [\n {\"Id\": 1,\n \"Subject\": \"Hackathon\",\n \"Location\": \"UCR\",\n \"StartTime\": \"2021-01-28T04:00:00.000Z\",\n \"EndTime\": \"2021-01-30T05:30:00.000Z\",\n \"CategoryColor\": \"#1aaa55\"},\n {\"Id\": 2,\n \"Subject\": \"Professional Development\",\n \"Location\": \"UCR\",\n \"StartTime\": \"2021-01-25T06:30:00.000Z\",\n \"EndTime\": \"2021-01-25T08:30:00.000Z\",\n \"CategoryColor\": \"#357cd2\"}\n ]\n },\n \"PersianClub\" : {\n \"Delegators\" : [],\n \"Members\" : [],\n \"org_events\" : [\n {\"Id\": 3,\n \"Subject\": \"Evening Meeting\",\n \"Location\": \"UCR\",\n \"StartTime\": \"2021-01-20T04:00:00.000Z\",\n \"EndTime\": \"2021-01-20T05:30:00.000Z\",\n \"Color\": \"#1aaa55\"},\n\n {\"Id\": 4,\n \"Subject\": \"Culture Night\",\n \"Location\": \"UCR\",\n \"StartTime\": \"2021-01-30T06:30:00.000Z\",\n \"EndTime\": \"2021-01-30T08:30:00.000Z\",\n \"Color\": \"#357cd2\"}\n ]\n },\n \"ChessClub\" : {\n \"Delegators\" : [],\n \"Members\" : [],\n \"org_events\" : [\n {\"Id\": 5,\n \"Subject\": \"General Meeting\",\n \"Location\": \"UCR\",\n \"StartTime\": \"2021-01-08T02:00:00.000Z\",\n \"EndTime\": \"2021-01-08T03:30:00.000Z\",\n \"Color\": \"#1aaa55\"},\n\n {\"Id\": 6,\n \"Subject\": \"Tournament\",\n \"Location\": \"UCR\",\n \"StartTime\": \"2021-01-24T05:30:00.000Z\",\n \"EndTime\": \"2021-01-24T09:30:00.000Z\",\n \"Color\": \"#357cd2\"}\n ]\n },\n \"TeamRocket\" : {\n \"Delegators\" : [],\n \"Members\" : [],\n \"org_events\" : [\n {\"Id\": 7,\n \"Subject\": \"Blasting Off\",\n \"Location\": \"Pokemon\",\n \"StartTime\": \"2021-01-08T02:00:00.000Z\",\n \"EndTime\": \"2021-01-08T03:30:00.000Z\",\n \"CategoryColor\": \"#1aaa55\"},\n\n {\"Id\": 8,\n \"Subject\": \"Causing Mayhem\",\n \"Location\": \"Rocket Island\",\n \"StartTime\": \"2021-01-19T05:30:00.000Z\",\n \"EndTime\": \"2021-01-19T09:30:00.000Z\",\n \"CategoryColor\": \"#357cd2\"}\n ]\n },\n \"BowlingClub\" : {\n \"Delegators\" : [],\n \"Members\" : [],\n \"org_events\" : [\n {\"Id\": 9,\n \"Subject\": \"Strike Saturday's\",\n \"Location\": \"Bowling Alley\",\n \"StartTime\": \"2021-01-30T12:00:00.000Z\",\n \"EndTime\": \"2021-01-30T03:30:00.000Z\",\n \"CategoryColor\": \"#1aaa55\"},\n\n {\"Id\": 10,\n \"Subject\": \"Learning the Basics II\",\n \"Location\": \"UCR Campus\",\n \"StartTime\": \"2021-02-01T05:30:00.000Z\",\n \"EndTime\": \"2021-02-01T09:30:00.000Z\",\n \"CategoryColor\": \"#357cd2\"}\n ]\n }\n}\n\n\ndef get_token_auth_header(request):\n \"\"\"Obtains the access token from the Authorization Header\n \"\"\"\n auth = request.META.get(\"HTTP_AUTHORIZATION\", None)\n parts = auth.split()\n token = parts[1]\n\n return token\n\n\ndef requires_scope(required_scope):\n \"\"\"Determines if the required scope is present in the access token\n Args:\n required_scope (str): The scope required to access the resource\n \"\"\"\n def require_scope(f):\n print('in required scope')\n @wraps(f)\n def decorated(*args, **kwargs):\n token = get_token_auth_header(args[0])\n decoded = jwt.decode(token, verify=False)\n if decoded.get(\"scope\"):\n token_scopes = decoded[\"scope\"].split()\n for token_scope in token_scopes:\n if token_scope == required_scope:\n return f(*args, **kwargs)\n response = JsonResponse({'message': 'You don\\'t have access to this resource'})\n response.status_code = 403\n return response\n return decorated\n return require_scope\n\n@permission_classes([AllowAny])\ndef public(request):\n print('in public')\n return JsonResponse({'message': 'Hello from a public endpoint! You don\\'t need to be authenticated to see this.'})\n\n@api_view(['GET'])\ndef private(request):\n print('in private')\n return JsonResponse({'message': 'Hello from a private endpoint! You need to be authenticated to see this.'})\n\n# Need GET calendar info, add calendar info (POST), update calendar info (PUT), delete calendarItem (DELETE)\n# Here is one example of a get request. Similarly try to get the other methods and update urls.py\n#\"organizations\" : [{\"ACM\" : \"hello\"}]\n\n\n@csrf_exempt\n@api_view(['GET'])\ndef getCalendarInfo(request): #pass in {request.user.username: google-auth-api-123dasf, name: <NAME>}\n responseData = {\n \"name\" : \"\",\n \"username\" : \"\",\n \"delegator_list\" : {},\n \"organizations\": {}\n }\n if(request.method == 'GET'): #If get response has data / if user used api/getCalendarInfo without token\n username = request.user.username\n #username = request.GET.get(\"username\")\n name = request.GET.get(\"name\")\n\n if(user_info_collection.find({\"username\": username}).count() > 0): #if the document/user exists\n responseData = user_info_collection.find_one({\"username\": username}) #Obtain Json Data of User\n del responseData[\"_id\"]\n return JsonResponse(responseData)\n else:\n responseData[\"name\"] = name\n responseData[\"username\"] = username\n user_info_collection.insert_one(responseData) #Otherwise insert the template (empty) data into database\n del responseData[\"_id\"]\n return JsonResponse(responseData)\n else: #return empty Json if user logged onto url without authentication\n return JsonResponse({\"NULL\" : \"NULL\"})\n\n@csrf_exempt\n@api_view(['POST'])\ndef addOrganization(request): #pass in {organization_id: \"<PASSWORD>\", password : \"<PASSWORD>\"} can only add one club at a time due to password\n if(request.POST):\n username = request.user.username\n #username = request.POST.get(\"username\")\n organization_id = request.POST.get(\"organization_id\")\n password = request.POST.get(\"password\")\n returnData = user_info_collection.find_one({\"username\": username})\n user_organizations = returnData[\"organizations\"]\n org_name = list(organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})) [1]\n blocked_member_list = organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"Blocked Members\"]\n\n\n if(username in blocked_member_list):\n return JsonResponse({\"Error\" : \"User is blocked from joining organization\"})\n\n if(organization_id in user_organizations): #if user is already in organization, don't add it\n return JsonResponse({\"Error\" : \"Organization is already added to user\"})\n else: #otherwise add it\n if(organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"Public\"] or organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"Password\"] == password):\n\n user_organizations.update({organization_id : {\"org_name\" : org_name, \"org_description\" : organization_info_collection.find_one({'_id': ObjectId(str(organization_id))}) [org_name][\"Organization Description\"],\n \"org_events\" : organization_info_collection.find_one({'_id': ObjectId(str(organization_id))}) [org_name][\"org_events\"]}})\n\n organization_info_collection.update({\"_id\": ObjectId(str(organization_id))},\n {\"$push\": {org_name + \".Members\": username}})\n else:\n return JsonResponse({\"Error\" : \"Organization is not public or password is incorrect\"})\n\n user_info_collection.update({\"username\": username},\n {\"$set\": {\"organizations\": user_organizations}})\n\n\n responseData = user_info_collection.find_one({\"username\": username}) #Obtain Json Data of User\n del responseData[\"_id\"]\n return JsonResponse(responseData)\n else:\n return JsonResponse({\"NULL\" : \"NULL\"})\n\n\n@csrf_exempt\n@api_view(['POST'])\ndef deleteEvent(request):#pass in {organization_id: 1288fadf213, id: 5}\n if(request.POST):\n username = request.user.username\n #username = request.POST.get(\"username\")\n id = int(request.POST.get(\"id\"))\n organization_id = request.POST.get(\"organization_id\")\n\n org_name = list(organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})) [1] #get name of Club, ACM etc\n delegator_list = organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"Delegators\"]\n member_list = organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"Members\"]\n org_event_list = organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"org_events\"]\n if(username in delegator_list): #check if user id is in delegators and can create events\n for index in range(len(org_event_list)):\n if(org_event_list[index] == org_name):\n pass\n elif (org_event_list[index][\"id\"] == id):\n del org_event_list[index]\n break\n else:\n return JsonResponse({\"Error\": \"User is not allowed to delete events\"})\n\n\n organization_info_collection.update({\"_id\": ObjectId(str(organization_id))},\n {\"$set\": {org_name + \".org_events\": org_event_list}})\n\n for delegator in delegator_list:\n user_info_collection.update({\"username\": delegator},\n {\"$set\": {\"organizations.\" + str(organization_id) + \".org_events\": org_event_list}})\n\n for member in member_list:\n user_info_collection.update({\"username\": member},\n {\"$set\": {\"organizations.\" + str(organization_id) + \".org_events\" : org_event_list}})\n\n return JsonResponse({\"Successful\" : \"Event is deleted from all calendars in organization\"})\n return JsonResponse({\"NULL\" : \"NULL\"})\n\n\n\n@csrf_exempt\n@api_view(['POST'])\ndef deleteOrganization(request): #pass in {organization_id: 1adf320jfo1ebc9}\n if(request.POST):\n username = request.user.username\n #username = request.POST.get(\"username\")\n user_organizations = user_info_collection.find_one({\"username\": username})[\"organizations\"]\n organization_id = request.POST.get(\"organization_id\")\n\n del user_organizations[organization_id] #delete organization user passes in\n user_info_collection.update({\"username\": username}, #update organizations\n {\"$set\": {\"organizations\": user_organizations}})\n\n org_name = list(organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})) [1]\n\n if(username in organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"Members\"]):\n organization_info_collection.update({\"_id\": ObjectId(str(organization_id))},\n {\"$pull\": {org_name + \".Members\": username}})\n\n return JsonResponse({\"Successful\" : \"Deleted organization for the member\"})\n elif(username in organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"Delegators\"]):\n organization_info_collection.update({\"_id\": ObjectId(str(organization_id))},\n {\"$pull\": {org_name + \".Delegators\": username}})\n user_delegator_list = user_info_collection.find_one({\"username\": username})[\"delegator_list\"]\n del user_delegator_list[organization_id]\n user_info_collection.update({\"username\": username}, #update organizations\n {\"$set\": {\"delegator_list\": user_delegator_list}})\n\n return JsonResponse({\"Successful\" : \"Deleted organization for the delegator\"})\n\n return JsonResponse({\"Error\" : \"Could not find user as member or delegator\"})\n return JsonResponse({\"NULL\" : \"NULL\"})\n\n@csrf_exempt\n@api_view(['POST'])\ndef createOrganization(request): #pass in {organization : Vish's CS Club, org_description : \"This club is about etc...\", public_true_false : True/False, password: <PASSWORD>acd/\"\"}\n if(request.POST):\n username = request.user.username\n #username = request.POST.get(\"username\")\n organization = request.POST.get(\"organization\")\n org_description = request.POST.get(\"org_description\")\n public_true_false = request.POST.get(\"public_true_false\")\n password = request.POST.get(\"password\")\n\n input_data = {\n \"Organization Description\" : org_description,\n \"Public\" : True,\n \"Password\" : password,\n \"Delegators\" : [username],\n \"Members\" : [],\n \"Blocked Members\" : [],\n \"Organization Name\" : organization,\n \"org_events\" : []\n }\n\n id = organization_info_collection.insert({organization : input_data})\n user_organizations = user_info_collection.find_one({\"username\": username})[\"organizations\"]\n user_organizations.update({str(id) : {\"org_name\" : organization, \"org_description\" : org_description, \"org_events\" : []}})\n user_info_collection.update({\"username\": username},\n {\"$set\": {\"organizations\": user_organizations}})\n\n user_delegator_list = user_info_collection.find_one({\"username\": username})[\"delegator_list\"]\n user_delegator_list.update({str(id) : organization})\n\n user_info_collection.update({\"username\": username},\n {\"$set\": {\"delegator_list\": user_delegator_list}})\n\n return JsonResponse({\"newOrgHash\" : str(id)})\n return JsonResponse({\"NULL\" : \"NULL\"})\n\n@csrf_exempt\n@api_view(['POST'])\ndef createEvent(request): #pass in {organization_id : 132423adf, title : title, allDay : true/false, start: Morning, end: Night, dec: \"342\"}\n if(request.POST):\n username = request.user.username\n #username = request.POST.get(\"username\")\n organization_id = request.POST.get(\"organization_id\")\n title = request.POST.get(\"title\")\n allDay = request.POST.get(\"allDay\")\n start = request.POST.get(\"start\")\n end = request.POST.get(\"end\")\n desc = request.POST.get(\"desc\")\n\n org_name = list(organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})) [1] #get name of Club, ACM etc\n\n\n input_data = {\n \"id\": 0 if(len(organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"org_events\"]) == 0) else organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"org_events\"][-1][\"id\"] + 1,\n \"title\": title,\n \"allDay\": (allDay == 'true'),\n \"start\": start,\n \"end\": end,\n \"desc\": desc\n }\n\n\n if(username in organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"Delegators\"]): #check if user id is in delegators and can create events\n organization_info_collection.update({\"_id\": ObjectId(str(organization_id))},\n {\"$push\": {org_name + \".org_events\": input_data}})\n\n\n for delegator in organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"Delegators\"]:\n\n user_info_collection.update({\"username\": delegator},\n {\"$push\": {\"organizations.\" + str(organization_id) + \".org_events\" : input_data}})\n\n for member in organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"Members\"]:\n\n user_info_collection.update({\"username\": member},\n {\"$push\": {\"organizations.\" + str(organization_id) + \".org_events\": input_data}})\n\n\n responseData = user_info_collection.find_one({\"username\": username})\n del responseData[\"_id\"]\n return JsonResponse(responseData)\n else:\n return JsonResponse({\"RETURN\" : \"NULL\"})\n return JsonResponse({\"RETURN\" : \"NULL\"})\n\n\n@csrf_exempt\n@api_view(['POST'])\ndef blockUsers(request): #pass in {organization_id : 123adf, usernames: google-auth-api-1, google-auth-api-2, etc...}\n if(request.POST):\n username = request.user.username\n #username = request.POST.get(\"username\")\n organization_id = request.POST.get(\"organization_id\")\n org_name = list(organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})) [1] #get name of Club, ACM etc\n delegator_list = organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"Delegators\"]\n member_list = organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"Members\"]\n if(username in delegator_list):\n for blocked_member in (list(request.POST.get(\"usernames\").split(\", \"))):\n organization_info_collection.update({\"_id\": ObjectId(str(organization_id))},\n {\"$pull\": {org_name + \".Members\": blocked_member}})\n\n organization_info_collection.update({\"_id\": ObjectId(str(organization_id))},\n {\"$push\": {org_name + \".Blocked Members\": blocked_member}})\n\n user_organizations = user_info_collection.find_one({\"username\": blocked_member})[\"organizations\"]\n if(organization_id in user_organizations):\n del user_organizations[organization_id] #delete organization user passes in\n\n user_info_collection.update({\"username\": blocked_member}, #update organizations\n {\"$set\": {\"organizations\": user_organizations}})\n\n return JsonResponse({\"Successful\" : \"Blocked Users\"})\n else:\n return JsonResponse({\"Error\" : \"User is not authorized to block other users\"})\n\n return JsonResponse({\"NULL\" : \"NULL\"})\n\n\n@csrf_exempt\n@api_view(['GET'])\ndef getOrgInfo(request): #pass in organization_id\n organization_id = request.GET.get(\"organization_id\")\n org_name = list(organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})) [1] #get name of Club, ACM etc\n\n return JsonResponse(organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name])\n\n@csrf_exempt\n@api_view(['POST'])\ndef addDelegator(request): #pass in {organization_id : 123adf, member_to_add: google-auth-api-1}\n if(request.POST):\n username = request.user.username\n #username = request.POST.get(\"username\")\n member_to_add = request.POST.get(\"member_to_add\")\n organization_id = request.POST.get(\"organization_id\")\n org_name = list(organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})) [1] #get name of Club, ACM etc\n delegator_list = organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"Delegators\"]\n member_list = organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"Members\"]\n\n if(username in delegator_list and member_to_add not in delegator_list):\n organization_info_collection.update({\"_id\": ObjectId(str(organization_id))},\n {\"$pull\": {org_name + \".Members\": member_to_add}})\n\n organization_info_collection.update({\"_id\": ObjectId(str(organization_id))},\n {\"$push\": {org_name + \".Delegators\": member_to_add}})\n\n user_delegator_list = user_info_collection.find_one({\"username\": member_to_add})[\"delegator_list\"]\n user_delegator_list.update({str(organization_id) : org_name})\n\n user_info_collection.update({\"username\": member_to_add},\n {\"$set\": {\"delegator_list\": user_delegator_list}})\n\n return JsonResponse({\"Successful\" : \"Able to add member as Delegator\"})\n return JsonResponse({\"Error\" : \"User is not allowed to add delegator or member_to_add is already a delegator\"})\n return JsonResponse({\"NULL\" : \"NULL\"})\n\n\n@csrf_exempt\n@api_view(['GET'])\ndef getListOfOrganizations(request): #don't need to pass anything\n return_dict = {}\n for org in organization_info_collection.find({}):\n org_name = list(org)[1]\n return_dict[str(org[\"_id\"])] = org_name\n\n return JsonResponse(return_dict)\n\n\n@csrf_exempt\n@api_view(['GET'])\ndef getDictionaryOfMembers(request): #pass in (organization_id : 13daflkj32)\n organization_id = request.GET.get(\"organization_id\")\n org_name = list(organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})) [1]\n member_list = organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"Members\"]\n delegator_list = organization_info_collection.find_one({'_id': ObjectId(str(organization_id))})[org_name][\"Delegators\"]\n\n return_dict = {}\n member_dict = {}\n delegator_dict = {}\n\n for username in member_list:\n member_dict[username] = user_info_collection.find_one({\"username\": username})[\"name\"]\n\n for username in delegator_list:\n delegator_dict[username] = user_info_collection.find_one({\"username\": username})[\"name\"]\n\n return_dict[\"delegators\"] = delegator_dict\n return_dict[\"members\"] = member_dict\n return JsonResponse(return_dict)\n", "id": "2767093", "language": "Python", "matching_score": 5.540398597717285, "max_stars_count": 0, "path": "backend/views.py" }, { "content": "from django.test import TestCase\r\nimport unittest\r\nfrom django.test import Client\r\nfrom django.test import TestCase\r\nimport os\r\nfrom pymongo import MongoClient\r\n\r\n#print(\"mongodb+srv://VirajDhillon:<EMAIL>/<dbname>?retryWrites=true&w=majority\")\r\n\r\ncluster = MongoClient(os.environ.get(\"MONGODB_URL\"))\r\ndb = cluster[\"Login\"]\r\ncollection = db[\"Events\"]\r\n\r\nc = Client()\r\n\r\n\r\n\r\n\r\nclass BackendTesting(TestCase):\r\n def test_getCalendarInfo_user_exists(self):\r\n print(\"TEST 1: Testing api/getCalendarInfo with existing user\")\r\n response = c.post(\"/api/getCalendarInfo\", {\"username\": \"Vish\"})\r\n self.assertEqual(\"Vish\", response.json()[\"username\"], \"Username returned is not Vish\")\r\n self.assertEqual([{'Chess Club': [{'Id': 1, 'Subject': 'General Meeting', 'Location': 'UCR', 'StartTime': '2020-11-05T02:00:00.000Z', 'EndTime': '2019-11-05T03:30:00.000Z', 'CategoryColor': '#1aaa55'}, {'Id': 2, 'Subject': 'Tournament', 'Location': 'UCR', 'StartTime': '2020-12-05T05:30:00.000Z', 'EndTime': '2019-12-05T09:30:00.000Z', 'CategoryColor': '#357cd2'}]}],\r\n response.json()[\"organizations\"], \"Information is not correct\")\r\n\r\n def test_getCalendarInfo_no_user_exists(self):\r\n print(\"TEST 2: Testing api/getCalendarInfo with empty user\")\r\n response = c.post(\"/api/getCalendarInfo\", {\"username\": \"Temp Person\"})\r\n self.assertEqual(\"Temp Person\", response.json()[\"username\"], \"Usernames are not correct\")\r\n self.assertEqual([], response.json()[\"organizations\"], \"Not returning empty JSON\")\r\n collection.delete_one({\"username\": \"Temp Person\"})\r\n\r\n def test_addOrganization(self):\r\n print(\".TEST 3: Testing api/addOrganization with example Chess Club\")\r\n c.post(\"/api/getCalendarInfo\", {\"username\": \"Temp Person\"})\r\n response = c.post(\"/api/addOrganization\", {\"username\": \"Temp Person\", \"organizations\" : \"Chess Club\"})\r\n self.assertEqual(\"Temp Person\", response.json()[\"username\"], \"Usernames are not correct\")\r\n self.assertEqual([{'Chess Club': [{'Id': 5, 'Subject': 'General Meeting', 'Location': 'UCR', 'StartTime': '2020-11-05T02:00:00.000Z', 'EndTime': '2020-11-05T03:30:00.000Z', 'CategoryColor': '#1aaa55'}, {'Id': 6, 'Subject': 'Tournament', 'Location': 'UCR', 'StartTime': '2020-12-05T05:30:00.000Z', 'EndTime': '2020-12-05T09:30:00.000Z', 'CategoryColor': '#357cd2'}]}],\r\n response.json()[\"organizations\"], \"Not returning proper Chess Club JSON\")\r\n collection.delete_one({\"username\": \"Temp Person\"})\r\n", "id": "11741338", "language": "Python", "matching_score": 2.142214059829712, "max_stars_count": 0, "path": "tests/test_backend.py" }, { "content": "from django.test import TestCase\n\nimport json\n\nfrom django.contrib.auth.models import User\nfrom django.urls import reverse\nfrom rest_framework import status\nfrom rest_framework.authtoken.models import Token\nfrom rest_framework.test import APITestCase\n\nfrom api.api import UserSerializer\nfrom account.models import User\n\n# class UserRegistrationTestCase(APITestCase):\n# def test_user_registration(self):\n# data = {}\n# response = self.client.post(\"api/user/\", data)\n# self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n# Create your tests here.\n", "id": "1991322", "language": "Python", "matching_score": 2.33406400680542, "max_stars_count": 0, "path": "backend/api/tests.py" }, { "content": "from django.test import TestCase\nfrom django.urls import reverse, resolve\nfrom account.models import User, Student, Faculty, Job\nfrom account.views import LoginView\n\n\nclass StudentTest(TestCase):\n def test_home_url(self):\n url = reverse('login')\n self.asserEquals(resolve(url).func, LoginView)\n ", "id": "4362913", "language": "Python", "matching_score": 2.3819026947021484, "max_stars_count": 0, "path": "backend/account/tests/test_urls.py" }, { "content": "from django.test import TestCase\nfrom account.models import User, Student, Faculty, Job\n# Create your tests here.\nclass StudentTest(TestCase):\n check_id = 1\n def setUp(self):\n self.s = Student.objects.create(major=\"Auto Test\", GPA=4.01, profile_completeness=101)\n self.s.save() \n def test_fields(self):\n self.assertEqual(self.s.major, \"Auto Test\")\n self.assertEqual(self.s.GPA, 4.01)\n self.assertEqual(self.s.profile_completeness, 101)\n\n", "id": "7021575", "language": "Python", "matching_score": 1.167214035987854, "max_stars_count": 0, "path": "backend/account/tests/test_models.py" }, { "content": "\n# Generated by Django 2.2.13 on 2021-03-10 21:33\n\n\nimport account.models\nimport datetime\nfrom django.conf import settings\nimport django.contrib.auth.models\nimport django.contrib.auth.validators\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ('auth', '0011_update_proxy_permissions'),\n ]\n\n operations = [\n migrations.CreateModel(\n\n name='User',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('password', models.CharField(max_length=128, verbose_name='password')),\n ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),\n ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),\n ('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),\n ('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),\n ('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),\n ('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),\n ('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),\n ('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),\n ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),\n ('is_student', models.BooleanField(default=False, verbose_name=account.models.Student)),\n ('is_faculty', models.BooleanField(default=False, verbose_name=account.models.Faculty)),\n ('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),\n ('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),\n ],\n options={\n 'verbose_name': 'user',\n 'verbose_name_plural': 'users',\n 'abstract': False,\n },\n managers=[\n ('objects', django.contrib.auth.models.UserManager()),\n\n ],\n ),\n migrations.CreateModel(\n name='Course',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=50)),\n ('description', models.CharField(max_length=150)),\n ('abbrev', models.CharField(max_length=50)),\n ('grade', models.CharField(blank=True, default='', max_length=3, null=True)),\n ],\n ),\n migrations.CreateModel(\n name='Faculty',\n fields=[\n\n ('department', models.CharField(default='', max_length=50)),\n ('profile_completeness', models.IntegerField(default=0)),\n ('user', models.OneToOneField(default=0, on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),\n ],\n ),\n migrations.CreateModel(\n name='Student',\n fields=[\n ('major', models.CharField(default='', max_length=50)),\n ('GPA', models.FloatField(blank=True, default=0, null=True)),\n ('profile_completeness', models.IntegerField(default=0)),\n ('resume_pdf', models.FileField(blank=True, null=True, upload_to='pdf')),\n ('transcript', models.FileField(blank=True, null=True, upload_to='pdf')),\n ('user', models.OneToOneField(default=0, on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),\n\n ],\n ),\n migrations.CreateModel(\n name='Job',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('description', models.CharField(max_length=150)),\n\n ('posted_date', models.DateField(verbose_name=datetime.date(2021, 3, 10))),\n\n ('hourly_salary', models.FloatField(blank=True, default=10, max_length=10)),\n ('hours_per_week', models.IntegerField(default=10)),\n ('course_req', models.ManyToManyField(blank=True, default=0, to='account.Course')),\n ],\n ),\n migrations.CreateModel(\n name='Comment',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n\n ('body', models.CharField(max_length=1500)),\n ('course', models.ManyToManyField(blank=True, default=0, to='account.Course')),\n ],\n ),\n migrations.CreateModel(\n name='Application',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('application_date', models.DateField(verbose_name=datetime.date(2021, 3, 10))),\n ('applicant_score', models.IntegerField(default=0)),\n ('job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='account.Job')),\n ],\n\n ),\n migrations.CreateModel(\n name='StudentCourse',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n\n ('grade', models.CharField(default='', max_length=50)),\n ('course', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='account.Course')),\n ('student', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='account.Student')),\n\n ],\n ),\n migrations.AddField(\n model_name='student',\n name='applications',\n field=models.ManyToManyField(blank=True, default=0, through='account.Application', to='account.Job'),\n ),\n migrations.AddField(\n model_name='student',\n name='comments_recv',\n field=models.ManyToManyField(blank=True, default=0, to='account.Comment'),\n ),\n migrations.AddField(\n model_name='student',\n name='course_taken',\n field=models.ManyToManyField(blank=True, default=0, through='account.StudentCourse', to='account.Course'),\n ),\n migrations.AddField(\n model_name='job',\n name='applications',\n field=models.ManyToManyField(blank=True, default=0, through='account.Application', to='account.Student'),\n ),\n migrations.AddField(\n model_name='job',\n name='poster',\n field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='account.Faculty'),\n ),\n migrations.AddField(\n model_name='faculty',\n name='comments_made',\n field=models.ManyToManyField(blank=True, default=0, to='account.Comment'),\n ),\n migrations.AddField(\n model_name='faculty',\n name='courses_taught',\n field=models.ManyToManyField(blank=True, default=0, to='account.Course'),\n ),\n migrations.AddField(\n model_name='course',\n name='students',\n field=models.ManyToManyField(blank=True, default=0, through='account.StudentCourse', to='account.Student'),\n ),\n migrations.AddField(\n model_name='comment',\n name='commenter',\n field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='account.Faculty'),\n ),\n migrations.AddField(\n model_name='application',\n name='student',\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='account.Student'),\n ),\n ]\n", "id": "12866615", "language": "Python", "matching_score": 6.087784290313721, "max_stars_count": 0, "path": "backend/account/migrations/0001_initial.py" }, { "content": "from django.db import models\nfrom django.contrib.auth.models import AbstractUser\nfrom django.core.validators import MinLengthValidator as Min\nfrom django.contrib.postgres.fields import ArrayField\nfrom datetime import date\n\nclass Course(models.Model):\n name = models.CharField(max_length=50)\n description = models.CharField(max_length=150)\n abbrev = models.CharField(max_length=50)\n grade = models.CharField(max_length=3, default=\"\", blank=True, null=True)\n\n students = models.ManyToManyField('Student', default=0, blank=True, through='StudentCourse')\n\n \n def __repr__(self):\n return \"{0} - {1} - {2}\".format(self.id, self.name, self.description)\n\nclass Comment(models.Model):\n body = models.CharField(max_length=1500)\n commenter = models.ForeignKey('Faculty', on_delete=models.CASCADE, null=True)\n course = models.ManyToManyField('Course', default=0, blank=True)\n\n def __repr__(self):\n return \"{0} - {1} - {2}\".format(self.id, self.body)\n\n\n\n\n\nclass Job(models.Model):\n description = models.CharField(max_length=150)\n poster = models.ForeignKey('Faculty', on_delete=models.CASCADE, null=True)\n posted_date = models.DateField(date.today())\n hourly_salary = models.FloatField(max_length=10, default=10, blank=True)\n hours_per_week = models.IntegerField(default=10)\n course_req = models.ManyToManyField(Course, default=0, blank=True)\n\n applications = models.ManyToManyField('Student', default=0, blank=True, through='Application')\n\n\n def __repr__(self):\n return \"{0} - {1} - {2}\".format(self.id, self.description)\n\nclass Application(models.Model):\n job = models.ForeignKey('job', on_delete=models.CASCADE)\n student = models.ForeignKey('Student', on_delete=models.CASCADE)\n application_date = models.DateField(date.today())\n applicant_score = models.IntegerField(default=0)\n \n \nclass StudentCourse(models.Model):\n student = models.ForeignKey('Student', on_delete=models.CASCADE)\n course = models.ForeignKey('Course', on_delete=models.CASCADE)\n grade = models.CharField(max_length=50, default=\"\")\n\n\nclass Student(models.Model):\n major = models.CharField(max_length=50, default=\"\")\n GPA = models.FloatField(default=0, blank=True, null=True)\n # courses = ArrayField(models.CharField(max_length=50, blank=True))\n # applied_positions = ArrayField(models.CharField(max_length=50, blank=True))\n profile_completeness = models.IntegerField(default=0)\n # taken_class = models.ManyToManyField(Course)\n\n applications = models.ManyToManyField('Job', default=0, blank=True, through='Application')\n profile_completeness = models.IntegerField(default=0)\n course_taken = models.ManyToManyField('Course', default=0, blank=True, through='StudentCourse')\n resume_pdf = models.FileField(upload_to='pdf', null=True, blank=True)\n transcript = models.FileField(upload_to='pdf', null=True, blank=True)\n comments_recv = models.ManyToManyField('Comment', default=0, blank=True)\n user = models.OneToOneField('User', related_name='student', on_delete=models.CASCADE, primary_key=True, default=0)\n\n def __str__(self):\n return \"{0}\".format(self.user.username)\n\n\n def __repr__(self):\n return \"{0} - {1} - {2}\".format(self.id, self.major, self.GPA)\n\nclass Faculty(models.Model):\n department = models.CharField(max_length=50, default=\"\")\n profile_completeness = models.IntegerField(default=0)\n\n courses_taught = models.ManyToManyField(Course, default=0, blank=True)\n comments_made = models.ManyToManyField('Comment', default=0, blank=True)\n user = models.OneToOneField('User', related_name='faculty', on_delete=models.CASCADE, primary_key=True, default=0)\n\n\n def __repr__(self):\n return \"{0} - {1}\".format(self.id, self.department)\n\n def __str__(self):\n return \"{0}\".format(self.user.username)\n\nclass User(AbstractUser):\n # User Login Information\n\n is_student = models.BooleanField(default=False)\n is_faculty = models.BooleanField(default=False)\n\n def __repr__(self):\n return \"{0} - {1}\".format(self.id, self.email)\n\n\n\n# class Student(models.Model):\n# user = models.OneToOneField(User, on_delete=models.CASCADE, primary_key=True, related_name='student_profile')\n# # user.is_student\n# # major = models.CharField(max_length=50)\n\n# # def __repr__(self):\n# # return \"{0} - {1}\".format(self.name, self.email)\n\n# class Faculty(models.Model):\n# user = models.OneToOneField(User, on_delete=models.CASCADE, primary_key=True, related_name='faculty_profile')\n# # department = models.CharField(max_length=50)\n \n# # def __repr__(self):\n# # return \"{0} - {1}\".format(self.name, self.email", "id": "6163821", "language": "Python", "matching_score": 2.534719467163086, "max_stars_count": 0, "path": "backend/account/models.py" }, { "content": "# Generated by Django 2.2.13 on 2021-03-11 02:49\n\nimport datetime\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('account', '0001_initial'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='application',\n name='application_date',\n field=models.DateField(verbose_name=datetime.date(2021, 3, 11)),\n ),\n migrations.AlterField(\n model_name='faculty',\n name='user',\n field=models.OneToOneField(default=0, on_delete=django.db.models.deletion.CASCADE, primary_key=True, related_name='faculty', serialize=False, to=settings.AUTH_USER_MODEL),\n ),\n migrations.AlterField(\n model_name='job',\n name='posted_date',\n field=models.DateField(verbose_name=datetime.date(2021, 3, 11)),\n ),\n migrations.AlterField(\n model_name='student',\n name='user',\n field=models.OneToOneField(default=0, on_delete=django.db.models.deletion.CASCADE, primary_key=True, related_name='student', serialize=False, to=settings.AUTH_USER_MODEL),\n ),\n migrations.AlterField(\n model_name='user',\n name='is_faculty',\n field=models.BooleanField(default=False),\n ),\n migrations.AlterField(\n model_name='user',\n name='is_student',\n field=models.BooleanField(default=False),\n ),\n ]\n", "id": "2890394", "language": "Python", "matching_score": 4.066762924194336, "max_stars_count": 0, "path": "backend/account/migrations/0002_auto_20210311_0249.py" }, { "content": "# Generated by Django 2.2.13 on 2021-03-10 00:28\n\nimport datetime\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('account', '0001_initial'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='job',\n name='posted_date',\n field=models.DateField(verbose_name=datetime.date(2021, 3, 10)),\n ),\n ]\n", "id": "399525", "language": "Python", "matching_score": 0.07042720913887024, "max_stars_count": 0, "path": "backend/account/migrations/0002_auto_20210310_0028.py" }, { "content": "from account.models import User, Job, Student, Faculty, Course, Comment\r\nfrom rest_framework import viewsets, permissions\r\nfrom .serializers import UserSerializer, JobSerializer, StudentSerializer, FacultySerializer, CourseSerializer, CommentSerializer\r\n\r\n# Lead Viewset\r\n\r\n\r\nclass JobViewSet(viewsets.ModelViewSet):\r\n queryset = Job.objects.all()\r\n permission_classes = [\r\n permissions.AllowAny\r\n ]\r\n serializer_class = JobSerializer\r\n \r\nclass StudentViewSet(viewsets.ModelViewSet):\r\n queryset = Student.objects.all()\r\n permission_classes = [\r\n permissions.AllowAny\r\n ]\r\n serializer_class = StudentSerializer\r\n\r\nclass FacultyViewSet(viewsets.ModelViewSet):\r\n queryset = Faculty.objects.all()\r\n permission_classes = [\r\n permissions.AllowAny\r\n ]\r\n serializer_class = FacultySerializer\r\n \r\nclass CourseViewSet(viewsets.ModelViewSet):\r\n queryset = Course.objects.all()\r\n permission_classes = [\r\n permissions.AllowAny\r\n ]\r\n serializer_class = CourseSerializer\r\n\r\nclass CommentViewSet(viewsets.ModelViewSet):\r\n queryset = Comment.objects.all()\r\n permission_classes = [\r\n permissions.AllowAny\r\n ]\r\n serializer_class = CommentSerializer\r\n", "id": "11210261", "language": "Python", "matching_score": 3.8176629543304443, "max_stars_count": 0, "path": "backend/api/api.py" }, { "content": "from django.shortcuts import render\n\nfrom account.models import User, Student, Faculty, Job, Course, Comment, Application, StudentCourse\nfrom .serializers import UserSerializer, UserSerializerWithToken, StudentSerializer, FacultySerializer, \\\n JobSerializer, CourseSerializer, CommentSerializer, StudentCourseSerializer, ApplicationSerializer\nfrom rest_framework import generics, status, viewsets, permissions, filters\n\nfrom django.http import HttpResponseRedirect\nfrom django.contrib.auth.models import User as AUser\nfrom rest_framework.decorators import api_view\nfrom rest_framework.response import Response\nfrom rest_framework.views import APIView\n\n\n\nfrom . import permissions as perm\n\n\n# # Create your views here.\n# class UserList(generics.ListCreateAPIView):\n# queryset = User.objects.all()\n# serializer_class = UserSerializer\n\n\n@api_view(['GET'])\ndef current_user(request):\n \"\"\"\n Determine the current user by their token, and return their data\n \"\"\"\n\n serializer = UserSerializer(request.user)\n return Response(serializer.data)\n\nclass UserRegisterViewSet(viewsets.ModelViewSet):\n queryset = User.objects.all()\n permission_classes = (perm.UpdateOwnProfile,)\n # permission_classes = [\n # permissions.AllowAny\n # ]\n\n serializer_class = UserSerializer\n\nclass UserViewSet(viewsets.ModelViewSet):\n queryset = User.objects.all()\n serializer_class = UserSerializerWithToken\n filter_backends = (filters.SearchFilter,)\n search_fields = ('first_name', 'last_name', 'email')\n\n # def create(self, request, *args, **kwargs):\n # user_data = request.data\n\n # # ('id', 'token', 'first_name', 'last_name', 'username', 'password', 'email', 'is_student', 'is_faculty')\n # # new_user = User.objects.create(\n \n # # )\n # if (user_data['is_student']):\n # new_student = Student(user=new_user)\n\n\n # def perform_create(self,serializer):\n # serializer.save(user=self.request.user)\n\n# class UserList(APIView):\n# \"\"\"\n# Create a new user. It's called 'UserList' because normally we'd have a get\n# method here too, for retrieving a list of all User objects.\n# \"\"\"\n\n# permission_classes = (permissions.AllowAny,)\n\n# def post(self, request, format=None):\n# serializer = UserSerializerWithToken(data=request.data)\n# if serializer.is_valid():\n# serializer.save()\n# return Response(serializer.data, status=status.HTTP_201_CREATED)\n# return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n# def get(self, request, format=None):\n# data = {\"hi\" : \"hello\"}\n# return Response(data, status=status.HTTP_201_CREATED)\n \n\n\n\nclass StudentViewSet(viewsets.ModelViewSet):\n queryset = Student.objects.all()\n\n serializer_class = StudentSerializer\n\n\nclass FacultyViewSet(viewsets.ModelViewSet):\n queryset = Faculty.objects.all()\n serializer_class = FacultySerializer\n\n\nclass JobViewSet(viewsets.ModelViewSet):\n queryset = Job.objects.all()\n serializer_class = JobSerializer\n\n\nclass CourseViewSet(viewsets.ModelViewSet):\n queryset = Course.objects.all()\n serializer_class = CourseSerializer\n\n\nclass CommentViewSet(viewsets.ModelViewSet):\n queryset = Comment.objects.all()\n serializer_class = CommentSerializer\n\nclass ApplicationViewSet(viewsets.ModelViewSet):\n queryset = Application.objects.all()\n serializer_class = ApplicationSerializer\n filter_backends = (filters.SearchFilter,)\n search_fields = ('student__user__first_name', 'student__user__last_name', 'student__user__email')\n\nclass StudentCourseViewSet(viewsets.ModelViewSet):\n queryset = StudentCourse.objects.all()\n serializer_class = StudentCourseSerializer\n\n", "id": "9594850", "language": "Python", "matching_score": 4.629247188568115, "max_stars_count": 0, "path": "backend/api/views.py" }, { "content": "from django.urls import path\r\nfrom django.conf.urls import url, include\r\nfrom .views import current_user, UserViewSet, StudentViewSet, FacultyViewSet, \\\r\n JobViewSet, CourseViewSet, CommentViewSet, UserRegisterViewSet, ApplicationViewSet, StudentCourseViewSet\r\nfrom rest_framework_jwt.views import obtain_jwt_token\r\n\r\nfrom rest_framework.routers import DefaultRouter\r\n\r\n\r\nrouter = DefaultRouter()\r\n\r\nrouter.register('user', UserViewSet)\r\nrouter.register('student', StudentViewSet)\r\nrouter.register('faculty', FacultyViewSet)\r\nrouter.register('job', JobViewSet)\r\nrouter.register('course', CourseViewSet)\r\nrouter.register('comment', CommentViewSet)\r\nrouter.register('registration', UserRegisterViewSet)\r\n\r\nrouter.register('application', ApplicationViewSet)\r\nrouter.register('studentCourse', StudentCourseViewSet)\r\n\r\n\r\napp_name = 'api'\r\n\r\nurlpatterns = [\r\n # path('user/', UserList.as_view(), name='api_user'),\r\n path('current_user/', current_user),\r\n path('token-auth/', obtain_jwt_token),\r\n url(r'', include(router.urls))\r\n\r\n]\r\n", "id": "6980355", "language": "Python", "matching_score": 0.5583111047744751, "max_stars_count": 0, "path": "backend/api/urls.py" }, { "content": "from rest_framework import serializers\nfrom rest_framework_jwt.settings import api_settings\nfrom account.models import User, Student, Faculty, Job, Course, Comment\n\n# User Serializer\n\n\nclass UserSerializer(serializers.ModelSerializer):\n\n password = serializers.CharField(write_only=True)\n student = serializers.PrimaryKeyRelatedField(many=False, read_only=True)\n faculty = serializers.PrimaryKeyRelatedField(many=False, read_only=True)\n \n def create(self, validated_data):\n password = validated_data.pop('password', None)\n instance = self.Meta.model(**validated_data)\n \n\n if password is not None:\n instance.set_password(password)\n instance.save()\n\n if instance.is_student:\n stud = Student(user=instance)\n stud.save()\n\n if instance.is_faculty:\n fac = Faculty(user=instance)\n fac.save()\n\n return instance\n\n class Meta:\n model = User\n fields = ('password', 'student', 'faculty')\n extra_kwargs = {'password' : {'write_only' : True}}\n\n\n\nclass UserSerializerWithToken(serializers.ModelSerializer):\n\n token = serializers.SerializerMethodField()\n password = serializers.CharField(write_only=True)\n student = serializers.PrimaryKeyRelatedField(many=False, read_only=True)\n faculty = serializers.PrimaryKeyRelatedField(many=False, read_only=True)\n\n def get_token(self, obj):\n jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER\n jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER\n\n payload = jwt_payload_handler(obj)\n token = jwt_encode_handler(payload)\n return token\n\n def create(self, validated_data):\n password = validated_data.pop('password', None)\n instance = self.Meta.model(**validated_data)\n \n\n if password is not None:\n instance.set_password(password)\n instance.save()\n\n if instance.is_student:\n stud = Student(user=instance)\n stud.save()\n\n if instance.is_faculty:\n fac = Faculty(user=instance)\n fac.save()\n \n return instance\n\n class Meta:\n model = User\n fields = '__all__'\n extra_kwargs = {'password' : {'<PASSWORD>' : <PASSWORD>}}\n\n\nclass StudentSerializer(serializers.ModelSerializer):\n class Meta:\n model = Student\n\n fields = '__all__'\n\n\n\nclass FacultySerializer(serializers.ModelSerializer):\n class Meta:\n model = Faculty\n\n fields = '__all__'\n\n\n\n# Job Serializer\nclass JobSerializer(serializers.ModelSerializer):\n class Meta:\n model = Job\n\n fields = '__all__'\n\n\nclass CourseSerializer(serializers.ModelSerializer):\n class Meta:\n model = Course\n\n fields = '__all__'\n\n \nclass CommentSerializer(serializers.ModelSerializer):\n class Meta:\n model = Comment\n\n fields = '__all__'\n\nclass ApplicationSerializer(serializers.ModelSerializer):\n class Meta:\n model = Comment\n fields = '__all__'\n\nclass StudentCourseSerializer(serializers.ModelSerializer):\n class Meta:\n model = Comment\n fields = '__all__'\n", "id": "7180382", "language": "Python", "matching_score": 1.2511526346206665, "max_stars_count": 0, "path": "backend/api/serializers.py" }, { "content": "from django import forms\r\nfrom django.contrib.auth.forms import UserCreationForm, AuthenticationForm, PasswordResetForm, SetPasswordForm\r\n# from .models import User, Student, Faculty\r\nfrom .models import User, Student, Faculty\r\n\r\nclass UserForm(UserCreationForm):\r\n class Meta:\r\n model = User\r\n # model.is_student = True\r\n fields = ('username', 'email', '<PASSWORD>', '<PASSWORD>')\r\n # model.save()\r\n\r\n # def save(self):\r\n # user = super().save(commit=False)\r\n # user.is_student = True\r\n # user.save()\r\n # student = Student.objects.create(user=user)\r\n # # student.interests.add(*self.cleaned_data.get('interests'))\r\n # return user\r\n\r\n def __init__(self, *args, **kwargs):\r\n super(UserForm, self).__init__(*args, **kwargs)\r\n self.fields['password1'].help_text = \"Minimum length of 8 characters.\"\r\n self.fields['password2'].label = \"Password Confirmation\"\r\n self.fields['password2'].help_text = \"Enter the same password for confirmation.\"\r\n\r\n\r\nclass LoginForm(AuthenticationForm):\r\n\r\n def __init__(self, *args, **kwargs):\r\n super(LoginForm, self).__init__(*args, **kwargs)\r\n\r\n # Note: Customer.username == Customer.email\r\n username = forms.EmailField(widget=forms.EmailInput(attrs={'class': 'r-finder-text-field w-input'}))\r\n password = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'r-finder-text-field w-input'}))\r\n\r\n\r\n# class StudentUpdateForm(forms.ModelForm):\r\n# class Meta:\r\n# model = Student\r\n# fields = ('name', 'email', 'major')\r\n\r\n# def __init__(self, *args, **kwargs):\r\n# super(StudentUpdateForm, self).__init__(*args, **kwargs)\r\n\r\n\r\n# class FacultyUpdateForm(forms.ModelForm):\r\n# class Meta:\r\n# model = User\r\n# fields = ('name', 'email', 'department')\r\n\r\n# def __init__(self, *args, **kwargs):\r\n# super(FacultyUpdateForm, self).__init__(*args, **kwargs)", "id": "8607464", "language": "Python", "matching_score": 1.4072473049163818, "max_stars_count": 0, "path": "backend/account/forms.py" }, { "content": "from django.shortcuts import render, redirect, reverse\nfrom django.contrib.auth import login, logout\nfrom django.http import HttpResponse, JsonResponse\nfrom django.views import View\nfrom django.contrib.auth.mixins import LoginRequiredMixin\nfrom dateutil.relativedelta import relativedelta\nfrom .forms import LoginForm\nfrom .models import User\n# from .forms import LoginForm, StudentUpdateForm, StudentForm\nfrom django.contrib import messages\nfrom django.utils import timezone\nimport logging \nimport os\nimport json\nimport datetime\nimport calendar\nfrom datetime import datetime, date\n\nlogging.basicConfig(level=os.environ.get(\"LOGLEVEL\", \"DEBUG\"))\nlogger = logging.getLogger(__name__)\n\n\nclass LoginView(View):\n \"\"\"This how the login page is handled when attempting GET and POST requests\n \"\"\"\n template_name = \"account/login.html\"\n\n # If a user is logged in, they have no need to access the login page, so we redirect them to their dashboard page\n # Otherwise, if they aren't logged in, access to the login page allows them to do so\n def get(self, request):\n if request.user.is_authenticated:\n # validC = validPayingCustomer(request)\n # if not validC:\n # return redirect(reverse('account:payment'))\n return redirect(reverse('account:dashboard'))\n login_form = LoginForm()\n return render(request, self.template_name, {'form': login_form})\n\n # When a user submits the fields on the login page, we want to ensure that the login credentials are correct\n # If they are, we redirect them to their dashboard page\n # If they aren't, we render the login page again, this time with an error message\n def post(self, request):\n login_form = LoginForm(request, data=request.POST)\n if login_form.is_valid():\n login(request, login_form.get_user())\n # validC = validPayingCustomer(request)\n # if not validC:\n # return redirect(reverse('account:payment'))\n # else:\n return redirect(reverse('account:dashboard'))\n messages.error(request, \"Your email or password is incorrect.\")\n return render(request, self.template_name, {'form': login_form})\n\n\nclass RegisterStudentView(View):\n \"\"\"This how the register page is handled when attempting GET and POST requests\n \"\"\"\n template_name = \"account/register.html\"\n model = User\n\n # If a user is logged in, they should not have access to the registration page, so we redirect them to their dashboard\n # If a user is not logged in, they should not have access to the registration page, so we redirect them to the login page\n # If a user is a superuser, they are the ONLY people that should be able to access the registration page, so we render the page and form for them\n def get(self, request):\n student_form = StudentForm()\n if request.user.is_authenticated:\n # is_SuperUser = request.user.is_superuser\n # if is_SuperUser:\n return redirect(reverse('account:dashboard'))\n return render(request, self.template_name, {'form': student_form})\n\n # When a user submits the fields on the login page, we want to ensure that the registration credentials are correct\n # If they are, we redirect them to their dashboard page\n # If they aren't, we render the registration page again, this time with an error message\n def post(self, request):\n student_form = StudentForm(request.POST)\n if student_form.is_valid():\n student_form.instance.username = student_form.instance.email\n student_form.instance.is_student = True\n # customer_form.instance.billing_start_date = self.getBillingStart()\n student_form.save()\n return redirect(reverse('account:login'))\n return render(request, self.template_name, {'form': student_form})\n\n # def getBillingStart(self):\n # today = datetime.today()\n # firstThis = today.replace(day=1)\n\n # firstNext = firstThis + relativedelta(months=+1)\n # return firstNext\n\n\n# class RegisterFacultyView(View):\n# \"\"\"This how the register page is handled when attempting GET and POST requests\n# \"\"\"\n# template_name = \"account/register.html\"\n\n# # If a user is logged in, they should not have access to the registration page, so we redirect them to their dashboard\n# # If a user is not logged in, they should not have access to the registration page, so we redirect them to the login page\n# # If a user is a superuser, they are the ONLY people that should be able to access the registration page, so we render the page and form for them\n# def get(self, request):\n# faculty_form = FacultyForm()\n# if request.user.is_authenticated:\n# # is_SuperUser = request.user.is_superuser\n# # if is_SuperUser:\n# return redirect(reverse('account:dashboard'))\n# return render(request, self.template_name, {'form': faculty_form})\n\n# # When a user submits the fields on the login page, we want to ensure that the registration credentials are correct\n# # If they are, we redirect them to their dashboard page\n# # If they aren't, we render the registration page again, this time with an error message\n# def post(self, request):\n# faculty_form = FacultyForm(request.POST)\n# if faculty_form.is_valid():\n# faculty_form.instance.username = faculty_form.instance.email\n# faculty_form.instance.is_faculty = True\n# # customer_form.instance.billing_start_date = self.getBillingStart()\n# faculty_form.save()\n# return redirect(reverse('account:login'))\n# return render(request, self.template_name, {'form': faculty_form})\n\n\nclass DashboardView(View):\n \"\"\"This how the dashboard page is handled when attempting GET requests\n \"\"\"\n template_name = \"account/dashboard.html\"\n\n # If a user is logged in, they should have access to their dashboard page, so we render their dashboard\n # If a user is not logged in, they should not have access to the dashboard page, so we redirect them to the login page\n def get(self, request):\n if request.user.is_authenticated:\n # validC = validPayingCustomer(request)\n # if not validC:\n # return redirect(reverse('account:payment'))\n # print(request.user.billing_start_date)\n return render(request, self.template_name)\n else:\n return redirect(reverse('account:login'))\n\n\nclass JobBoardView(View):\n \"\"\"This how the job board page is handled when attempting GET requests\n \"\"\"\n template_name = \"account/JobBoard.html\"\n\n # If a user is logged in, they should have access to their dashboard page, so we render their dashboard\n # If a user is not logged in, they should not have access to the dashboard page, so we redirect them to the login page\n def get(self, request):\n # if request.user.is_authenticated:\n # # validC = validPayingCustomer(request)\n # # if not validC:\n # # return redirect(reverse('account:payment'))\n # # print(request.user.billing_start_date)\n # return render(request, self.template_name)\n # else:\n # return redirect(reverse('account:login'))\n return render(request, self.template_name)\n\n\nclass SettingsView(View):\n \"\"\"This how the settings page is handled when attempting GET requests\n \"\"\"\n template_name = \"account/settings.html\"\n\n # If a user is logged in, they should have access to their settings page, so we render their account settings\n # If a user is not logged in, they should not have access to the settings page, so we redirect them to the login page\n def get(self, request):\n if request.user.is_authenticated:\n # validC = validPayingCustomer(request)\n # if not validC:\n # return redirect(reverse('account:payment'))\n return render(request, self.template_name)\n return redirect(reverse('account:login'))\n\n\n# class StudentUpdateView(View):\n# \"\"\"This how the update page is handled when attempting GET and POST requests\n# \"\"\"\n# template_name = \"account/update_account.html\"\n\n# # If a user is logged in, they should be able to access the update account page, so we render the update page and its form\n# # Otherwise, if they aren't logged in, they should not have access to the update account page, so we redirect them to the login page\n# def get(self, request):\n# update_form = StudentUpdateForm()\n# if request.user.is_authenticated:\n# # validC = validPayingCustomer(request)\n# # if not validC:\n# # return redirect(reverse('account:payment'))\n# return render(request, self.template_name, {'form': update_form})\n# return redirect(reverse('account:login'))\n \n# # When a user submits the fields on the update account page, we want to ensure that the update credentials are correct\n# # If they are, we save the changes and redirect them to their dashboard page\n# # If they aren't, we render the update account page again, this time with an error message\n# def post(self, request):\n# update_form = StudentUpdateForm(request.POST, instance=request.user)\n# if update_form.is_valid():\n# update_form.instance.username = update_form.instance.email\n# update_form.save()\n# return redirect(reverse('account:dashboard'))\n# return render(request, self.template_name, {'form': update_form})\n\n\n# class FacultyUpdateView(View):\n# \"\"\"This how the update page is handled when attempting GET and POST requests\n# \"\"\"\n# template_name = \"account/update_account.html\"\n\n# # If a user is logged in, they should be able to access the update account page, so we render the update page and its form\n# # Otherwise, if they aren't logged in, they should not have access to the update account page, so we redirect them to the login page\n# def get(self, request):\n# update_form = FacultyUpdateForm()\n# if request.user.is_authenticated:\n# # validC = validPayingCustomer(request)\n# # if not validC:\n# # return redirect(reverse('account:payment'))\n# return render(request, self.template_name, {'form': update_form})\n# return redirect(reverse('account:login'))\n \n# # When a user submits the fields on the update account page, we want to ensure that the update credentials are correct\n# # If they are, we save the changes and redirect them to their dashboard page\n# # If they aren't, we render the update account page again, this time with an error message\n# def post(self, request):\n# update_form = StudentUpdateForm(request.POST, instance=request.user)\n# if update_form.is_valid():\n# update_form.instance.username = update_form.instance.email\n# update_form.save()\n# return redirect(reverse('account:dashboard'))\n# return render(request, self.template_name, {'form': update_form})\n\n\nclass DeleteView(View):\n \"\"\"This how the delete page is handled when attempting GET and POST requests\n \"\"\"\n template_name = \"account/delete_account.html\"\n\n # If a user is logged in, they should be able to access the delete account page, so we render the delete page\n # Otherwise, if they aren't logged in, they should not have access to the delete account page, so we redirect them to the login page\n def get(self, request):\n if request.user.is_authenticated:\n return render(request, self.template_name)\n return redirect(reverse('account:login'))\n\n # If a user submits the delete button, sending a delete POST request, the account should be deleted\n def post(self, request):\n u = request.user\n u.delete()\n return redirect(reverse('account:login'))\n\n\nclass IndexView(View):\n \"\"\"This was the placeholder for index.html before replacing the dashboard I created\n \"\"\"\n \n template_name = \"account/index.html\"\n\n def get(self, request):\n if request.user.is_authenticated:\n # validC = validPayingCustomer(request)\n # if not validC:\n # return redirect(reverse('account:payment'))\n return render(request, self.template_name)\n return redirect(reverse('account:login'))\n\n", "id": "1286683", "language": "Python", "matching_score": 2.68857741355896, "max_stars_count": 0, "path": "backend/account/views.py" }, { "content": "from django.urls import path\r\nfrom .views import LoginView, DashboardView, SettingsView, DeleteView, RegisterStudentView\r\n# from .views import LoginView, DashboardView, SettingsView, DeleteView, IndexView, RegisterStudentView\r\nfrom django.contrib.auth.views import LogoutView, PasswordResetView, PasswordResetDoneView, PasswordResetCompleteView, PasswordResetConfirmView\r\nfrom django.shortcuts import reverse\r\napp_name = 'account'\r\n\r\nurlpatterns = [\r\n path('', LoginView.as_view(), name='login'),\r\n path('register-student/', RegisterStudentView.as_view(), name='register_student'),\r\n # path('register-faculty/', RegisterFacultyView.as_view(), name='register_faculty'),\r\n path('dashboard/', DashboardView.as_view(), name='dashboard'),\r\n path('logout/', LogoutView.as_view(), name='logout'),\r\n path('password-reset/', PasswordResetView.as_view(success_url='/password-reset-done'), name='password_reset'),\r\n path('password-reset-done/', PasswordResetDoneView.as_view(), name='password_reset_done'),\r\n path('password-reset-confirm/<uidb64>/<token>/', PasswordResetConfirmView.as_view(success_url='/password-reset-complete'), name='password_reset_confirm'),\r\n path('password-reset-complete/', PasswordResetCompleteView.as_view(), name='password_reset_complete'),\r\n path('settings/', SettingsView.as_view(), name='settings'),\r\n # path('update-student/', StudentUpdateView.as_view(), name='update_student'),\r\n # path('update-faculty/', FacultyUpdateView.as_view(), name='update_faculty'),\r\n path('delete/', DeleteView.as_view(), name='delete'),\r\n # path('index/', IndexView.as_view(), name='index')\r\n]\r\n\r\n", "id": "7127638", "language": "Python", "matching_score": 0.909720778465271, "max_stars_count": 0, "path": "backend/account/urls.py" }, { "content": "from django.contrib import admin\n\nfrom .models import User, Course, Student, Faculty, Job, StudentCourse, Application\n\n\n# Register your models here.\nadmin.site.register(User)\nadmin.site.register(Course)\nadmin.site.register(Student)\nadmin.site.register(Faculty)\nadmin.site.register(Job)\n\n\nadmin.site.register(StudentCourse)\nadmin.site.register(Application)\n", "id": "449479", "language": "Python", "matching_score": 0.3541240096092224, "max_stars_count": 0, "path": "backend/account/admin.py" } ]
2.142214
lejzab
[ { "content": "from django.contrib import admin\nfrom .models import School,Student\n\nadmin.site.register(School)\nadmin.site.register(Student)\n", "id": "11434208", "language": "Python", "matching_score": 1.7591359615325928, "max_stars_count": 0, "path": "DJANGO/django_level_6/advcvb/basic_app/admin.py" }, { "content": "from django.contrib import admin\nfrom .models import AccessRecord,Topic,Webpage\n# Register your models here.\n\nadmin.site.register(AccessRecord)\nadmin.site.register(Topic)\nadmin.site.register(Webpage)\n", "id": "8857827", "language": "Python", "matching_score": 1.4131691455841064, "max_stars_count": 0, "path": "PYTHON/first_project/first_app/admin.py" }, { "content": "import os\nos.environ.setdefault('DJANGO_SETTINGS_MODULE', 'first_project.settings')\n\nimport django\ndjango.setup()\n\nimport random\nfrom first_app.models import Webpage,AccessRecord,Topic\nfrom faker import Faker\n\nfakegen = Faker()\n\ntopics = ('Search', 'Games', 'Social', 'Marketplace', 'News')\n\ndef add_topic():\n t = Topic.objects.get_or_create(top_name=random.choice(topics))[0]\n # t.save()\n return t\n\ndef populate(n=5):\n for entry in range(n):\n top = add_topic()\n fake_url = fakegen.url()\n fake_date = fakegen.date()\n fake_name = fakegen.company()\n\n webpage = Webpage.objects.get_or_create(topic=top,name=fake_name,url=fake_url)[0]\n\n access_recored = AccessRecord.objects.get_or_create(name=webpage,date=fake_date)[0]\n\nif __name__ == '__main__':\n populate(20)\n", "id": "736205", "language": "Python", "matching_score": 2.9474294185638428, "max_stars_count": 0, "path": "PYTHON/first_project/populate_first_app.py" }, { "content": "import os\nos.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ProjectTwo.settings')\n\nimport django\ndjango.setup()\n\nfrom AppTwo import models\nfrom faker import Faker\n\nfakegen = Faker()\n\n\ndef populate(n=5):\n for entry in range(n):\n fake_name = fakegen.name().split()[-2:]\n fake_f_name,fake_l_name = fake_name\n fake_email = fakegen.email()\n user = models.User.objects.get_or_create(first_name=fake_f_name, last_name=fake_l_name,email=fake_email)\n\nif __name__ == '__main__':\n populate(20)\n", "id": "11507710", "language": "Python", "matching_score": 2.1384477615356445, "max_stars_count": 0, "path": "PYTHON/ProjectTwo/populate_app.py" }, { "content": "from django.db import models\n\nclass User(models.Model):\n first_name = models.CharField(max_length=200)\n last_name = models.CharField(max_length=200)\n email = models.EmailField()\n\n def __str__(self):\n return f'{self.last_name} {self.first_name}'\n", "id": "1456439", "language": "Python", "matching_score": 1.2052984237670898, "max_stars_count": 0, "path": "PYTHON/ProjectTwo/AppTwo/models.py" }, { "content": "from django import forms\nfrom . import models\n\nclass NewUserForm(forms.ModelForm):\n\n class Meta:\n model = models.User\n fields = '__all__'\n", "id": "9765058", "language": "Python", "matching_score": 1.290109634399414, "max_stars_count": 0, "path": "DJANGO/django_level_3/ProjectTwo/AppTwo/forms.py" }, { "content": "from django.shortcuts import render\nfrom . import forms\n\ndef index(request):\n return render(request, 'apptwo/index.html')\n\ndef help(request):\n context = {'help_text': 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.'}\n return render(request,'apptwo/help.html',context=context)\n\ndef users(request):\n form = forms.NewUserForm()\n if request.method == 'POST':\n form = forms.NewUserForm(request.POST)\n\n if form.is_valid():\n form.save(commit=True)\n return index(request)\n\n return render(request, 'apptwo/users.html', {'form':form})\n", "id": "11582930", "language": "Python", "matching_score": 6.262303352355957, "max_stars_count": 0, "path": "DJANGO/django_level_3/ProjectTwo/AppTwo/views.py" }, { "content": "from django.shortcuts import render\nfrom django.http import HttpResponse\nfrom . import models\ndef index(request):\n return HttpResponse('<em>My second app</em>')\n\n\ndef help(request):\n context = {'help_text': 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.'}\n return render(request,'apptwo/help.html',context=context)\n\ndef users(request):\n users_list = models.User.objects.all()\n context = {'users_list':users_list}\n return render(request, 'apptwo/users.html',context=context)\n", "id": "9704155", "language": "Python", "matching_score": 2.5548105239868164, "max_stars_count": 0, "path": "PYTHON/ProjectTwo/AppTwo/views.py" }, { "content": "from django.shortcuts import render\nfrom django.http import HttpResponse\n\nfrom . import models\n\ndef index(request):\n webpages_list = models.AccessRecord.objects.order_by('date')\n context = {'access_records': webpages_list}\n return render(request, 'first_app/index.html',context=context)\n", "id": "3966562", "language": "Python", "matching_score": 1.3823679685592651, "max_stars_count": 0, "path": "PYTHON/first_project/first_app/views.py" }, { "content": "from django.shortcuts import render\n\n# Create your views here.\n\ndef index(request):\n return render(request, 'basic_app/index.html')\n\ndef other(request):\n return render(request,'basic_app/other.html')\n\ndef relative(request):\n return render(request, 'basic_app/relative_url_template.html')\n", "id": "1818087", "language": "Python", "matching_score": 1.0858591794967651, "max_stars_count": 0, "path": "DJANGO/django_level_4/learning_templates/basic_app/views.py" }, { "content": "from django.urls import path\nfrom . import views\n\napp_name = 'basic_app'\n\nurlpatterns = [\n path('',views.SchoolListView.as_view(), name='list'),\n path('detail/<int:pk>/',views.SchoolDetailView.as_view(),name='detail'),\n path('create/', views.SchoolCreateView.as_view(),name='create'),\n path('update/<int:pk>/',views.SchoolUpdateView.as_view(),name='update'),\n path('delete/<int:pk>/',views.SchoolDeleteView.as_view(),name='delete'),\n]\n", "id": "4697915", "language": "Python", "matching_score": 3.3403244018554688, "max_stars_count": 0, "path": "DJANGO/django_level_6/advcvb/basic_app/urls.py" }, { "content": "from django.shortcuts import render\nfrom django.views.generic import (View, TemplateView, ListView,DetailView,\n CreateView, UpdateView, DeleteView)\nfrom django.urls import reverse_lazy\nfrom . import models\n\nclass IndexView(TemplateView):\n template_name='index.html'\n\nclass SchoolListView(ListView):\n context_object_name = 'schools'\n model = models.School\n\nclass SchoolDetailView(DetailView):\n context_object_name = 'school_detail'\n model = models.School\n template_name = 'basic_app/school_detail.html'\n\nclass SchoolCreateView(CreateView):\n model = models.School\n fields = ('name', 'principal', 'location')\n\nclass SchoolUpdateView(UpdateView):\n model = models.School\n fields = ('name', 'principal')\n\nclass SchoolDeleteView(DeleteView):\n model = models.School\n success_url = reverse_lazy(\"basic_app:list\")\n", "id": "8256569", "language": "Python", "matching_score": 1.3890893459320068, "max_stars_count": 0, "path": "DJANGO/django_level_6/advcvb/basic_app/views.py" }, { "content": "from django.urls import path\nfrom . import views\n\napp_name = 'basic_app'\n\nurlpatterns = [\n path('relative/', views.relative, name='relative'),\n path('other/', views.other, name='other')\n]\n", "id": "1871041", "language": "Python", "matching_score": 1.22479248046875, "max_stars_count": 0, "path": "DJANGO/django_level_4/learning_templates/basic_app/urls.py" }, { "content": "from django.urls import path\nfrom . import views\n\nurlpatterns = [\n path('users/',views.users, name='users'),\n path('',views.index, name='index'),\n path('index/',views.index, name='index'),\n path('help/',views.help, name='help'),\n]\n", "id": "10991949", "language": "Python", "matching_score": 1.6521679162979126, "max_stars_count": 0, "path": "DJANGO/django_level_3/ProjectTwo/AppTwo/urls.py" }, { "content": "from django.urls import path\nfrom . import views\n\nurlpatterns = [\n path('users',views.users, name='users'),\n path('help',views.help, name='help'),\n]\n", "id": "7606142", "language": "Python", "matching_score": 0.05942083150148392, "max_stars_count": 0, "path": "PYTHON/ProjectTwo/AppTwo/urls.py" }, { "content": "from django.shortcuts import render\nfrom . import forms\n\n# Create your views here.\n\ndef index(request):\n return render(request, 'basicapp/index.html')\n\n\ndef form_name_view(request):\n form = forms.FormName()\n\n if request.method == 'POST':\n form = forms.FormName(request.POST)\n if form.is_valid():\n print(form.cleaned_data)\n\n return render(request, 'basicapp/form_page.html', {'form':form})\n", "id": "937046", "language": "Python", "matching_score": 0.9972201585769653, "max_stars_count": 0, "path": "DJANGO/django_level_3/basicforms/basicapp/views.py" }, { "content": "#!/usr/bin/env python\nimport json\nfrom datetime import datetime\n\nimport pika\n\ncredentials = pika.PlainCredentials('krolik', '<PASSWORD>')\nconnection = pika.BlockingConnection(\n pika.ConnectionParameters('localhost',\n 5672,\n '/',\n credentials))\nchannel = connection.channel()\n\nchannel.queue_declare(queue='hello')\n\ndata = {'msg': 'witaj świecie',\n 'data': datetime.now()}\npayload = json.dumps(data, ensure_ascii=False, sort_keys=True, default=str)\nchannel.basic_publish(exchange='', routing_key='hello', body=payload)\nprint(f\" [x] Sent {payload}\")\nconnection.close()\n", "id": "4334471", "language": "Python", "matching_score": 0.1098642647266388, "max_stars_count": 0, "path": "python/helloworld/send.py" } ]
1.389089
SlametGitHub
[ { "content": "\"\"\"Library for interacting with DJI Ryze Tello drones.\n\"\"\"\n\n# coding=utf-8\nimport logging\nimport socket\nimport time\nfrom threading import Thread\nfrom typing import Optional, Union, Type, Dict\n\nimport cv2 # type: ignore\nfrom .enforce_types import enforce_types\n\n\nthreads_initialized = False\ndrones: Optional[dict] = {}\nclient_socket: socket.socket\n\n\n@enforce_types\nclass Tello:\n \"\"\"Python wrapper to interact with the Ryze Tello drone using the official Tello api.\n Tello API documentation:\n [1.3](https://dl-cdn.ryzerobotics.com/downloads/tello/20180910/Tello%20SDK%20Documentation%20EN_1.3.pdf),\n [2.0 with EDU-only commands](https://dl-cdn.ryzerobotics.com/downloads/Tello/Tello%20SDK%202.0%20User%20Guide.pdf)\n \"\"\"\n # Send and receive commands, client socket\n RESPONSE_TIMEOUT = 7 # in seconds\n TAKEOFF_TIMEOUT = 20 # in seconds\n FRAME_GRAB_TIMEOUT = 3\n TIME_BTW_COMMANDS = 0.1 # in seconds\n TIME_BTW_RC_CONTROL_COMMANDS = 0.001 # in seconds\n RETRY_COUNT = 3 # number of retries after a failed command\n TELLO_IP = '192.168.10.1' # Tello IP address\n\n # Video stream, server socket\n VS_UDP_IP = '0.0.0.0'\n VS_UDP_PORT = 11111\n\n CONTROL_UDP_PORT = 8889\n STATE_UDP_PORT = 8890\n\n # Set up logger\n HANDLER = logging.StreamHandler()\n FORMATTER = logging.Formatter('[%(levelname)s] %(filename)s - %(lineno)d - %(message)s')\n HANDLER.setFormatter(FORMATTER)\n\n LOGGER = logging.getLogger('djitellopy')\n LOGGER.addHandler(HANDLER)\n LOGGER.setLevel(logging.INFO)\n # Use Tello.LOGGER.setLevel(logging.<LEVEL>) in YOUR CODE\n # to only receive logs of the desired level and higher\n\n # Conversion functions for state protocol fields\n INT_STATE_FIELDS = (\n # Tello EDU with mission pads enabled only\n 'mid', 'x', 'y', 'z',\n # 'mpry': (custom format 'x,y,z')\n # Common entries\n 'pitch', 'roll', 'yaw',\n 'vgx', 'vgy', 'vgz',\n 'templ', 'temph',\n 'tof', 'h', 'bat', 'time'\n )\n FLOAT_STATE_FIELDS = ('baro', 'agx', 'agy', 'agz')\n\n state_field_converters: Dict[str, Union[Type[int], Type[float]]]\n state_field_converters = {key : int for key in INT_STATE_FIELDS}\n state_field_converters.update({key : float for key in FLOAT_STATE_FIELDS})\n\n # VideoCapture object\n cap: Optional[cv2.VideoCapture] = None\n background_frame_read: Optional['BackgroundFrameRead'] = None\n\n stream_on = False\n is_flying = False\n\n def __init__(self,\n host=TELLO_IP,\n retry_count=RETRY_COUNT):\n\n global threads_initialized, client_socket, drones\n\n self.address = (host, Tello.CONTROL_UDP_PORT)\n self.stream_on = False\n self.retry_count = retry_count\n self.last_received_command_timestamp = time.time()\n self.last_rc_control_timestamp = time.time()\n\n if not threads_initialized:\n # Run Tello command responses UDP receiver on background\n client_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n client_socket.bind(('', Tello.CONTROL_UDP_PORT))\n response_receiver_thread = Thread(target=Tello.udp_response_receiver)\n response_receiver_thread.daemon = True\n response_receiver_thread.start()\n\n # Run state UDP receiver on background\n state_receiver_thread = Thread(target=Tello.udp_state_receiver)\n state_receiver_thread.daemon = True\n state_receiver_thread.start()\n\n threads_initialized = True\n\n drones[host] = {'responses': [], 'state': {}}\n\n self.LOGGER.info(\"Tello instance was initialized. Host: '{}'. Port: '{}'.\".format(host, Tello.CONTROL_UDP_PORT))\n\n def get_own_udp_object(self):\n \"\"\"Get own object from the global drones dict. This object is filled\n with responses and state information by the receiver threads.\n Internal method, you normally wouldn't call this yourself.\n \"\"\"\n global drones\n\n host = self.address[0]\n return drones[host]\n\n @staticmethod\n def udp_response_receiver():\n \"\"\"Setup drone UDP receiver. This method listens for responses of Tello.\n Must be run from a background thread in order to not block the main thread.\n Internal method, you normally wouldn't call this yourself.\n \"\"\"\n while True:\n try:\n data, address = client_socket.recvfrom(1024)\n\n address = address[0]\n Tello.LOGGER.debug('Data received from {} at client_socket'.format(address))\n\n if address not in drones:\n continue\n\n drones[address]['responses'].append(data)\n\n except Exception as e:\n Tello.LOGGER.error(e)\n break\n\n @staticmethod\n def udp_state_receiver():\n \"\"\"Setup state UDP receiver. This method listens for state information from\n Tello. Must be run from a background thread in order to not block\n the main thread.\n Internal method, you normally wouldn't call this yourself.\n \"\"\"\n state_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n state_socket.bind((\"\", Tello.STATE_UDP_PORT))\n\n while True:\n try:\n data, address = state_socket.recvfrom(1024)\n\n address = address[0]\n Tello.LOGGER.debug('Data received from {} at state_socket'.format(address))\n\n if address not in drones:\n continue\n\n data = data.decode('ASCII')\n drones[address]['state'] = Tello.parse_state(data)\n\n except Exception as e:\n Tello.LOGGER.error(e)\n break\n\n @staticmethod\n def parse_state(state: str) -> Dict[str, Union[int, float, str]]:\n \"\"\"Parse a state line to a dictionary\n Internal method, you normally wouldn't call this yourself.\n \"\"\"\n state = state.strip()\n Tello.LOGGER.debug('Raw state data: {}'.format(state))\n\n if state == 'ok':\n return {}\n\n state_dict = {}\n for field in state.split(';'):\n split = field.split(':')\n if len(split) < 2:\n continue\n\n key = split[0]\n value: Union[int, float, str] = split[1]\n\n if key in Tello.state_field_converters:\n num_type = Tello.state_field_converters[key]\n try:\n value = num_type(value)\n except ValueError as e:\n Tello.LOGGER.debug('Error parsing state value for {}: {} to {}'\n .format(key, value, num_type))\n Tello.LOGGER.error(e)\n continue\n\n state_dict[key] = value\n\n return state_dict\n\n def get_current_state(self) -> dict:\n \"\"\"Call this function to attain the state of the Tello. Returns a dict\n with all fields.\n Internal method, you normally wouldn't call this yourself.\n \"\"\"\n return self.get_own_udp_object()['state']\n\n def get_state_field(self, key: str):\n \"\"\"Get a specific sate field by name.\n Internal method, you normally wouldn't call this yourself.\n \"\"\"\n state = self.get_current_state()\n\n if key in state:\n return state[key]\n else:\n raise Exception('Could not get state property: {}'.format(key))\n\n def get_mission_pad_id(self) -> int:\n \"\"\"Mission pad ID of the currently detected mission pad\n Only available on Tello EDUs after calling enable_mission_pads\n Returns:\n int: -1 if none is detected, else 1-8\n \"\"\"\n return self.get_state_field('mid')\n\n def get_mission_pad_distance_x(self) -> int:\n \"\"\"X distance to current mission pad\n Only available on Tello EDUs after calling enable_mission_pads\n Returns:\n int: distance in cm\n \"\"\"\n return self.get_state_field('x')\n\n def get_mission_pad_distance_y(self) -> int:\n \"\"\"Y distance to current mission pad\n Only available on Tello EDUs after calling enable_mission_pads\n Returns:\n int: distance in cm\n \"\"\"\n return self.get_state_field('y')\n\n def get_mission_pad_distance_z(self) -> int:\n \"\"\"Z distance to current mission pad\n Only available on Tello EDUs after calling enable_mission_pads\n Returns:\n int: distance in cm\n \"\"\"\n return self.get_state_field('z')\n\n def get_pitch(self) -> int:\n \"\"\"Get pitch in degree\n Returns:\n int: pitch in degree\n \"\"\"\n return self.get_state_field('pitch')\n\n def get_roll(self) -> int:\n \"\"\"Get roll in degree\n Returns:\n int: roll in degree\n \"\"\"\n return self.get_state_field('roll')\n\n def get_yaw(self) -> int:\n \"\"\"Get yaw in degree\n Returns:\n int: yaw in degree\n \"\"\"\n return self.get_state_field('yaw')\n\n def get_speed_x(self) -> int:\n \"\"\"X-Axis Speed\n Returns:\n int: speed\n \"\"\"\n return self.get_state_field('vgx')\n\n def get_speed_y(self) -> int:\n \"\"\"Y-Axis Speed\n Returns:\n int: speed\n \"\"\"\n return self.get_state_field('vgy')\n\n def get_speed_z(self) -> int:\n \"\"\"Z-Axis Speed\n Returns:\n int: speed\n \"\"\"\n return self.get_state_field('vgz')\n\n def get_acceleration_x(self) -> float:\n \"\"\"X-Axis Acceleration\n Returns:\n float: acceleration\n \"\"\"\n return self.get_state_field('agx')\n\n def get_acceleration_y(self) -> float:\n \"\"\"Y-Axis Acceleration\n Returns:\n float: acceleration\n \"\"\"\n return self.get_state_field('agy')\n\n def get_acceleration_z(self) -> float:\n \"\"\"Z-Axis Acceleration\n Returns:\n float: acceleration\n \"\"\"\n return self.get_state_field('agz')\n\n def get_lowest_temperature(self) -> int:\n \"\"\"Get lowest temperature\n Returns:\n int: lowest temperature (°C)\n \"\"\"\n return self.get_state_field('templ')\n\n def get_highest_temperature(self) -> int:\n \"\"\"Get highest temperature\n Returns:\n float: highest temperature (°C)\n \"\"\"\n return self.get_state_field('temph')\n\n def get_temperature(self) -> float:\n \"\"\"Get average temperature\n Returns:\n float: average temperature (°C)\n \"\"\"\n templ = self.get_lowest_temperature()\n temph = self.get_highest_temperature()\n return (templ + temph) / 2\n\n def get_height(self) -> int:\n \"\"\"Get current height in cm\n Returns:\n int: height in cm\n \"\"\"\n return self.get_state_field('h')\n\n def get_distance_tof(self) -> int:\n \"\"\"Get current distance value from TOF in cm\n Returns:\n int: TOF distance in cm\n \"\"\"\n return self.get_state_field('tof')\n\n def get_barometer(self) -> int:\n \"\"\"Get current barometer measurement in cm\n This resembles the absolute height.\n See https://en.wikipedia.org/wiki/Altimeter\n Returns:\n int: barometer measurement in cm\n \"\"\"\n return self.get_state_field('baro') * 100\n\n def get_flight_time(self) -> int:\n \"\"\"Get the time the motors have been active in seconds\n Returns:\n int: flight time in s\n \"\"\"\n return self.get_state_field('time')\n\n def get_battery(self) -> int:\n \"\"\"Get current battery percentage\n Returns:\n int: 0-100\n \"\"\"\n return self.get_state_field('bat')\n\n def get_udp_video_address(self) -> str:\n \"\"\"Internal method, you normally wouldn't call this youself.\n \"\"\"\n address_schema = 'udp://@{ip}:{port}' # + '?overrun_nonfatal=1&fifo_size=5000'\n address = address_schema.format(ip=self.VS_UDP_IP, port=self.VS_UDP_PORT)\n return address\n\n def get_video_capture(self):\n \"\"\"Get the VideoCapture object from the camera drone.\n Users usually want to use get_frame_read instead.\n Returns:\n VideoCapture\n \"\"\"\n\n if self.cap is None:\n self.cap = cv2.VideoCapture(self.get_udp_video_address())\n\n if not self.cap.isOpened():\n self.cap.open(self.get_udp_video_address())\n\n return self.cap\n\n def get_frame_read(self) -> 'BackgroundFrameRead':\n \"\"\"Get the BackgroundFrameRead object from the camera drone. Then, you just need to call\n backgroundFrameRead.frame to get the actual frame received by the drone.\n Returns:\n BackgroundFrameRead\n \"\"\"\n if self.background_frame_read is None:\n address = self.get_udp_video_address()\n self.background_frame_read = BackgroundFrameRead(self, address) # also sets self.cap\n self.background_frame_read.start()\n return self.background_frame_read\n\n def send_command_with_return(self, command: str, timeout: int = RESPONSE_TIMEOUT) -> str:\n \"\"\"Send command to Tello and wait for its response.\n Internal method, you normally wouldn't call this yourself.\n Return:\n bool/str: str with response text on success, False when unsuccessfull.\n \"\"\"\n # Commands very consecutive makes the drone not respond to them.\n # So wait at least self.TIME_BTW_COMMANDS seconds\n diff = time.time() - self.last_received_command_timestamp\n if diff < self.TIME_BTW_COMMANDS:\n self.LOGGER.debug('Waiting {} seconds to execute command: {}...'.format(diff, command))\n time.sleep(diff)\n\n self.LOGGER.info(\"Send command: '{}'\".format(command))\n timestamp = time.time()\n\n client_socket.sendto(command.encode('utf-8'), self.address)\n\n responses = self.get_own_udp_object()['responses']\n\n while not responses:\n if time.time() - timestamp > timeout:\n message = \"Aborting command '{}'. Did not receive a response after {} seconds\".format(command, timeout)\n self.LOGGER.warning(message)\n return message\n time.sleep(0.1) # Sleep during send command\n\n self.last_received_command_timestamp = time.time()\n\n first_response = responses.pop(0) # first datum from socket\n try:\n response = first_response.decode(\"utf-8\")\n except UnicodeDecodeError as e:\n self.LOGGER.error(e)\n return \"response decode error\"\n response = response.rstrip(\"\\r\\n\")\n\n self.LOGGER.info(\"Response {}: '{}'\".format(command, response))\n return response\n\n def send_command_without_return(self, command: str):\n \"\"\"Send command to Tello without expecting a response.\n Internal method, you normally wouldn't call this yourself.\n \"\"\"\n # Commands very consecutive makes the drone not respond to them. So wait at least self.TIME_BTW_COMMANDS seconds\n\n self.LOGGER.info(\"Send command (no response expected): '{}'\".format(command))\n client_socket.sendto(command.encode('utf-8'), self.address)\n\n def send_control_command(self, command: str, timeout: int = RESPONSE_TIMEOUT) -> bool:\n \"\"\"Send control command to Tello and wait for its response.\n Internal method, you normally wouldn't call this yourself.\n \"\"\"\n response = \"max retries exceeded\"\n for i in range(0, self.retry_count):\n response = self.send_command_with_return(command, timeout=timeout)\n\n if response.lower() == 'ok':\n return True\n\n self.LOGGER.debug(\"Command attempt #{} failed for command: '{}'\".format(i, command))\n\n self.raise_result_error(command, response)\n return False # never reached\n\n def send_read_command(self, command: str) -> str:\n \"\"\"Send given command to Tello and wait for its response.\n Internal method, you normally wouldn't call this yourself.\n \"\"\"\n\n response = self.send_command_with_return(command)\n\n try:\n response = str(response)\n except TypeError as e:\n self.LOGGER.error(e)\n\n if any(word in response for word in ('error', 'ERROR', 'False')):\n self.raise_result_error(command, response)\n return \"Error: this code should never be reached\"\n\n return response\n\n def send_read_command_int(self, command: str) -> int:\n \"\"\"Send given command to Tello and wait for its response.\n Parses the response to an integer\n Internal method, you normally wouldn't call this yourself.\n \"\"\"\n response = self.send_read_command(command)\n return int(response)\n\n def send_read_command_float(self, command: str) -> float:\n \"\"\"Send given command to Tello and wait for its response.\n Parses the response to an integer\n Internal method, you normally wouldn't call this yourself.\n \"\"\"\n response = self.send_read_command(command)\n return float(response)\n\n def raise_result_error(self, command: str, response: str) -> bool:\n \"\"\"Used to reaise an error after an unsuccessful command\n Internal method, you normally wouldn't call this yourself.\n \"\"\"\n tries = 1 + self.retry_count\n raise Exception(\"Command '{}' was unsuccessful for {} tries. Latest response:\\t'{}'\"\n .format(command, tries, response))\n\n def connect(self, wait_for_state=True):\n \"\"\"Enter SDK mode. Call this before any of the control functions.\n \"\"\"\n self.send_control_command(\"command\")\n\n if wait_for_state:\n REPS = 20\n for i in range(REPS):\n if self.get_current_state():\n t = i / REPS # in seconds\n Tello.LOGGER.debug(\"'.connect()' received first state packet after {} seconds\".format(t))\n break\n time.sleep(1 / REPS)\n\n if not self.get_current_state():\n raise Exception('Did not receive a state packet from the Tello')\n\n def takeoff(self):\n \"\"\"Automatic takeoff.\n \"\"\"\n # Something it takes a looooot of time to take off and return a succesful takeoff.\n # So we better wait. Otherwise, it would give us an error on the following calls.\n self.send_control_command(\"takeoff\", timeout=Tello.TAKEOFF_TIMEOUT)\n self.is_flying = True\n\n def land(self):\n \"\"\"Automatic landing.\n \"\"\"\n self.send_control_command(\"land\")\n self.is_flying = False\n\n def streamon(self):\n \"\"\"Turn on video streaming. Use `tello.get_frame_read` afterwards.\n Video Streaming is supported on all tellos when in AP mode (i.e.\n when your computer is connected to Tello-XXXXXX WiFi ntwork).\n Currently Tello EDUs do not support video streaming while connected\n to a WiFi-network.\n\n !!! Note:\n If the response is 'Unknown command' you have to update the Tello\n firmware. This can be done using the official Tello app.\n \"\"\"\n self.send_control_command(\"streamon\")\n self.stream_on = True\n\n def streamoff(self):\n \"\"\"Turn off video streaming.\n \"\"\"\n self.send_control_command(\"streamoff\")\n self.stream_on = False\n\n def emergency(self):\n \"\"\"Stop all motors immediately.\n \"\"\"\n self.send_control_command(\"emergency\")\n\n def move(self, direction: str, x: int):\n \"\"\"Tello fly up, down, left, right, forward or back with distance x cm.\n Users would normally call one of the move_x functions instead.\n Arguments:\n direction: up, down, left, right, forward or back\n x: 20-500\n \"\"\"\n self.send_control_command(\"{} {}\".format(direction, x))\n\n def move_up(self, x: int):\n \"\"\"Fly x cm up.\n Arguments:\n x: 20-500\n \"\"\"\n self.move(\"up\", x)\n\n def move_down(self, x: int):\n \"\"\"Fly x cm down.\n Arguments:\n x: 20-500\n \"\"\"\n self.move(\"down\", x)\n\n def move_left(self, x: int):\n \"\"\"Fly x cm left.\n Arguments:\n x: 20-500\n \"\"\"\n self.move(\"left\", x)\n\n def move_right(self, x: int):\n \"\"\"Fly x cm right.\n Arguments:\n x: 20-500\n \"\"\"\n self.move(\"right\", x)\n\n def move_forward(self, x: int):\n \"\"\"Fly x cm forward.\n Arguments:\n x: 20-500\n \"\"\"\n self.move(\"forward\", x)\n\n def move_back(self, x: int):\n \"\"\"Fly x cm backwards.\n Arguments:\n x: 20-500\n \"\"\"\n self.move(\"back\", x)\n\n def rotate_clockwise(self, x: int):\n \"\"\"Rotate x degree clockwise.\n Arguments:\n x: 1-360\n \"\"\"\n self.send_control_command(\"cw {}\".format(x))\n\n def rotate_counter_clockwise(self, x: int):\n \"\"\"Rotate x degree counter-clockwise.\n Arguments:\n x: 1-3600\n \"\"\"\n self.send_control_command(\"ccw {}\".format(x))\n\n def flip(self, direction: str):\n \"\"\"Do a flip maneuver.\n Users would normally call one of the flip_x functions instead.\n Arguments:\n direction: l (left), r (right), f (forward) or b (back)\n \"\"\"\n self.send_control_command(\"flip {}\".format(direction))\n\n def flip_left(self):\n \"\"\"Flip to the left.\n \"\"\"\n self.flip(\"l\")\n\n def flip_right(self):\n \"\"\"Flip to the right.\n \"\"\"\n self.flip(\"r\")\n\n def flip_forward(self):\n \"\"\"Flip forward.\n \"\"\"\n self.flip(\"f\")\n\n def flip_back(self):\n \"\"\"Flip backwards.\n \"\"\"\n self.flip(\"b\")\n\n def go_xyz_speed(self, x: int, y: int, z: int, speed: int):\n \"\"\"Fly to x y z relative to the current position.\n Speed defines the traveling speed in cm/s.\n Arguments:\n x: -500-500\n y: -500-500\n z: -500-500\n speed: 10-100\n \"\"\"\n cmd = 'go {} {} {} {}'.format(x, y, z, speed)\n self.send_control_command(cmd)\n\n def curve_xyz_speed(self, x1: int, y1: int, z1: int, x2: int, y2: int, z2: int, speed: int):\n \"\"\"Fly to x2 y2 z2 in a curve via x2 y2 z2. Speed defines the traveling speed in cm/s.\n\n - Both points are relative to the current position\n - The current position and both points must form a circle arc.\n - If the arc radius is not within the range of 0.5-10 meters, it raises an Exception\n - x1/x2, y1/y2, z1/z2 can't both be between -20-20 at the same time, but can both be 0.\n\n Arguments:\n x1: -500-500\n x2: -500-500\n y1: -500-500\n y2: -500-500\n z1: -500-500\n z2: -500-500\n speed: 10-60\n \"\"\"\n cmd = 'curve {} {} {} {} {} {} {}'.format(x1, y1, z1, x2, y2, z2, speed)\n self.send_control_command(cmd)\n\n def go_xyz_speed_mid(self, x: int, y: int, z: int, speed: int, mid: int):\n \"\"\"Fly to x y z relative to the mission pad with id mid.\n Speed defines the traveling speed in cm/s.\n Arguments:\n x: -500-500\n y: -500-500\n z: -500-500\n speed: 10-100\n mid: 1-8\n \"\"\"\n cmd = 'go {} {} {} {} m{}'.format(x, y, z, speed, mid)\n self.send_control_command(cmd)\n\n def curve_xyz_speed_mid(self, x1: int, y1: int, z1: int, x2: int, y2: int, z2: int, speed: int, mid: int):\n \"\"\"Fly to x2 y2 z2 in a curve via x2 y2 z2. Speed defines the traveling speed in cm/s.\n\n - Both points are relative to the mission pad with id mid.\n - The current position and both points must form a circle arc.\n - If the arc radius is not within the range of 0.5-10 meters, it raises an Exception\n - x1/x2, y1/y2, z1/z2 can't both be between -20-20 at the same time, but can both be 0.\n\n Arguments:\n x1: -500-500\n y1: -500-500\n z1: -500-500\n x2: -500-500\n y2: -500-500\n z2: -500-500\n speed: 10-60\n mid: 1-8\n \"\"\"\n cmd = 'curve {} {} {} {} {} {} {} m{}'.format(x1, y1, z1, x2, y2, z2, speed, mid)\n self.send_control_command(cmd)\n\n def go_xyz_speed_yaw_mid(self, x: int, y: int, z: int, speed: int, yaw: int, mid1: int, mid2: int):\n \"\"\"Fly to x y z relative to mid1.\n Then fly to 0 0 z over mid2 and rotate to yaw relative to mid2's rotation.\n Speed defines the traveling speed in cm/s.\n Arguments:\n x: -500-500\n y: -500-500\n z: -500-500\n speed: 10-100\n yaw: -360-360\n mid1: 1-8\n mid2: 1-8\n \"\"\"\n cmd = 'jump {} {} {} {} {} m{} m{}'.format(x, y, z, speed, yaw, mid1, mid2)\n self.send_control_command(cmd)\n\n def enable_mission_pads(self):\n \"\"\"Enable mission pad detection\n \"\"\"\n self.send_control_command(\"mon\")\n\n def disable_mission_pads(self):\n \"\"\"Disable mission pad detection\n \"\"\"\n self.send_control_command(\"moff\")\n\n def set_mission_pad_detection_direction(self, x):\n \"\"\"Set mission pad detection direction. enable_mission_pads needs to be\n called first. When detecting both directions detecting frequency is 10Hz,\n otherwise the detection frequency is 20Hz.\n Arguments:\n x: 0 downwards only, 1 forwards only, 2 both directions\n \"\"\"\n self.send_control_command(\"mdirection {}\".format(x))\n\n def set_speed(self, x: int):\n \"\"\"Set speed to x cm/s.\n Arguments:\n x: 10-100\n \"\"\"\n self.send_control_command(\"speed {}\".format(x))\n\n def send_rc_control(self, left_right_velocity: int, forward_backward_velocity: int, up_down_velocity: int,\n yaw_velocity: int):\n \"\"\"Send RC control via four channels. Command is sent every self.TIME_BTW_RC_CONTROL_COMMANDS seconds.\n Arguments:\n left_right_velocity: -100~100 (left/right)\n forward_backward_velocity: -100~100 (forward/backward)\n up_down_velocity: -100~100 (up/down)\n yaw_velocity: -100~100 (yaw)\n \"\"\"\n def clamp100(x: int) -> int:\n return max(-100, min(100, x))\n\n if time.time() - self.last_rc_control_timestamp > self.TIME_BTW_RC_CONTROL_COMMANDS:\n self.last_rc_control_timestamp = time.time()\n cmd = 'rc {} {} {} {}'.format(\n clamp100(left_right_velocity),\n clamp100(forward_backward_velocity),\n clamp100(up_down_velocity),\n clamp100(yaw_velocity)\n )\n self.send_command_without_return(cmd)\n\n def set_wifi_credentials(self, ssid, password):\n \"\"\"Set the Wi-Fi SSID and password. The Tello will reboot afterwords.\n \"\"\"\n cmd = 'wifi {} {}'.format(ssid, password)\n self.send_command_without_return(cmd)\n\n def connect_to_wifi(self, ssid, password):\n \"\"\"Connects to the Wi-Fi with SSID and password.\n After this command the tello will reboot.\n Only works with Tello EDUs.\n \"\"\"\n cmd = 'ap {} {}'.format(ssid, password)\n self.send_command_without_return(cmd)\n\n def query_speed(self) -> int:\n \"\"\"Query speed setting (cm/s)\n Returns:\n int: 1-100\n \"\"\"\n return self.send_read_command_int('speed?')\n\n def query_battery(self) -> int:\n \"\"\"Get current battery percentage via a query command\n Using get_battery is usually faster\n Returns:\n int: 0-100 in %\n \"\"\"\n return self.send_read_command_int('battery?')\n\n def query_flight_time(self) -> int:\n \"\"\"Query current fly time (s).\n Using get_flight_time is usually faster.\n Returns:\n int: Seconds elapsed during flight.\n \"\"\"\n return self.send_read_command_int('time?')\n\n def query_height(self) -> int:\n \"\"\"Get height in cm via a query command.\n Using get_height is usually faster\n Returns:\n int: 0-3000\n \"\"\"\n return self.send_read_command_int('height?')\n\n def query_temperature(self) -> int:\n \"\"\"Query temperature (°C).\n Using get_temperature is usually faster.\n Returns:\n int: 0-90\n \"\"\"\n return self.send_read_command_int('temp?')\n\n def query_attitude(self) -> dict:\n \"\"\"Query IMU attitude data.\n Using get_pitch, get_roll and get_yaw is usually faster.\n Returns:\n {'pitch': int, 'roll': int, 'yaw': int}\n \"\"\"\n response = self.send_read_command('attitude?')\n return Tello.parse_state(response)\n\n def query_barometer(self) -> int:\n \"\"\"Get barometer value (cm)\n Using get_barometer is usually faster.\n Returns:\n int: 0-100\n \"\"\"\n baro = self.send_read_command_int('baro?')\n return baro * 100\n\n def query_distance_tof(self) -> float:\n \"\"\"Get distance value from TOF (cm)\n Using get_distance_tof is usually faster.\n Returns:\n float: 30-1000\n \"\"\"\n # example response: 801mm\n tof = self.send_read_command('tof?')\n return int(tof[:-2]) / 10\n\n def query_wifi_signal_noise_ratio(self) -> str:\n \"\"\"Get Wi-Fi SNR\n Returns:\n str: snr\n \"\"\"\n return self.send_read_command('wifi?')\n\n def query_sdk_version(self) -> str:\n \"\"\"Get SDK Version\n Returns:\n str: SDK Version\n \"\"\"\n return self.send_read_command('sdk?')\n\n def query_serial_number(self) -> str:\n \"\"\"Get Serial Number\n Returns:\n str: Serial Number\n \"\"\"\n return self.send_read_command('sn?')\n\n def end(self):\n \"\"\"Call this method when you want to end the tello object\n \"\"\"\n if self.is_flying:\n self.land()\n if self.stream_on:\n self.streamoff()\n if self.background_frame_read is not None:\n self.background_frame_read.stop()\n if self.cap is not None:\n self.cap.release()\n\n host = self.address[0]\n if host in drones:\n del drones[host]\n\n def __del__(self):\n self.end()\n\n\nclass BackgroundFrameRead:\n \"\"\"\n This class read frames from a VideoCapture in background. Use\n backgroundFrameRead.frame to get the current frame.\n \"\"\"\n\n def __init__(self, tello, address):\n tello.cap = cv2.VideoCapture(address)\n\n self.cap = tello.cap\n\n if not self.cap.isOpened():\n self.cap.open(address)\n\n # Try grabbing a frame multiple times\n # According to issue #90 the decoder might need some time\n # https://github.com/damiafuentes/DJITelloPy/issues/90#issuecomment-855458905\n start = time.time()\n while time.time() - start < Tello.FRAME_GRAB_TIMEOUT:\n Tello.LOGGER.debug('trying to grab a frame...')\n self.grabbed, self.frame = self.cap.read()\n if self.frame is not None:\n break\n time.sleep(0.05)\n\n if not self.grabbed or self.frame is None:\n raise Exception('Failed to grab first frame from video stream')\n\n self.stopped = False\n self.worker = Thread(target=self.update_frame, args=(), daemon=True)\n\n def start(self):\n \"\"\"Start the frame update worker\n Internal method, you normally wouldn't call this yourself.\n \"\"\"\n self.worker.start()\n\n def update_frame(self):\n \"\"\"Thread worker function to retrieve frames from a VideoCapture\n Internal method, you normally wouldn't call this yourself.\n \"\"\"\n while not self.stopped:\n if not self.grabbed or not self.cap.isOpened():\n self.stop()\n else:\n self.grabbed, self.frame = self.cap.read()\n\n def stop(self):\n \"\"\"Stop the frame update worker\n Internal method, you normally wouldn't call this yourself.\n \"\"\"\n self.stopped = True\n self.worker.join()\n", "id": "3600975", "language": "Python", "matching_score": 0, "max_stars_count": 1, "path": "djitellopy/tello.py" } ]
0
ccloquet
[ { "content": "#####################################################################################################################################\n#\n# How to best dispatch tne face masks within the pharmacies of a city #\n# so that the citizen should go to the nearest shop from his/her house #\n# #\n# This algorithm assigns each street to the nearest pharmacy, #\n# then tries to rebalance a bit the number of face masks between the stores #\n# #\n# It uses the ICAR database (Service public de Wallonie) to geocode the streets, #\n# and the Google API for the streets that were not found. #\n# In last resorts, it uses a manually made table to geocode the remaining streets #\n# #\n# <NAME> (Poppy) - 2020-04-22 - Licence MIT (Free to reuse) #\n# #\n#####################################################################################################################################\n\nimport csv\nimport requests\nimport json\nimport math\nimport tabulate\nimport time\nimport numpy\nimport operator\nimport json\n\nfrom os import path\nfrom pyproj import Proj\nfrom difflib import SequenceMatcher\nimport unicodedata\n\nGOOGLE_APIKEY = '';\n\nMAX_STREETS = 1000; # for debug - max number of streets to use\n\n# source files\npharmacies_geojson = 'pharmacies_from_kml_lambert72.geojson' # (converted from a KML using QGIS)\n # we use\n # name = f['properties']['Name']\n # descr = f['properties']['description'] # as address\n # x = f['geometry']['coordinates'][0]\n # y = f['geometry']['coordinates'][1]\n\nrues_csv = 'rues.csv' # streets list (we use columns 1 (number of people), 5 (street name) and 6 (zip code)\n # column index starts at 0\n # with title row\n # !decimal separator\nVILLE = 'XXXXX, Belgium'\n\n# city bounding box (in Lambert 72 coordinates)\nxmin = 0;\nxmax = 990000;\nymin = 0;\nymax = 990000;\n\n# to geocode the remaining streets\nmissing_streets_fname = '20200422_streets_missing_completed.csv' # we use columns 0 (name + zip), 1 (latitude) and 2 (longitude)\n\n# parameters for the reequilibration\ncoeff = 1.5\nq = 100\n\n# two intermediate files\nofficines_poppy_fname = \"200420_officines.poppy\"\nrues_poppy_fname = \"200420_rues.poppy\"\nmyProj = Proj(\"+proj=lcc +lat_1=51.16666723333333 +lat_2=49.8333339 +lat_0=90 +lon_0=4.367486666666666 +x_0=150000.013 +y_0=5400088.438 +ellps=intl +towgs84=-106.869,52.2978,-103.724,0.3366,-0.457,1.8422,-1.2747 +units=m +no_defs\")\n\ndef strip_accents(text):\n\n try:\n text = unicode(text, 'utf-8')\n except NameError: # unicode is a default on python 3 \n pass\n\n text = unicodedata.normalize('NFD', text)\\\n .encode('ascii', 'ignore')\\\n .decode(\"utf-8\")\n\n return str(text)\n\ndef google_find(street):\n \n next_uri = \"https://maps.googleapis.com/maps/api/geocode/json?address=\" + street + \"&key=\" + GOOGLE_APIKEY;\n\n r = requests.get(next_uri)\n if (r.status_code != 200): print('************' +str(r.status_code))\n\n r_json = r.json()\n s = r_json['results'];\n\n if len(s) == 0:\n return 0, 0, '', 0\n \n s = s[0];\n\n lat = s['geometry']['location']['lat']\n lng = s['geometry']['location']['lng']\n nam = strip_accents(s['formatted_address'].replace(', Belgium', ''))\n\n x, y = myProj(lng, lat)\n\n score = SequenceMatcher(None, street.lower().rsplit(' ', 1)[0].strip(), nam.lower().rsplit(' ', 1)[0].strip()).ratio() # .rsplit(' ', 1)[0] -> remove last wor (ie : municipality), but remain zip\n \n return int(x), int(y), nam, score\n\n\nmyofficines = [];\nmystreets = [];\nmissing_streets = [];\n \n# ***********\n# officines\n# ***********\n# if the intermediate file exists, loads it, otherwise build it\nif path.exists(officines_poppy_fname):\n with open(officines_poppy_fname, 'r') as filehandle:\n myofficines = json.load(filehandle)\n \nelse:\n myofficines = []\n myofficines_csv = []\n\n print('\\n>>> building officines list\\n')\n\n with open('200420_Officines_entite_XXX_adapted.csv', 'r') as csvfile:\n officines_filter = csv.reader(csvfile, delimiter=';', quotechar='|')\n\n next(officines_filter)\n \n for row in officines_filter:\n #print(row[2] + ' ' + row[1])\n #print(row[4] + ' ' + row[5] + ', ' + row[6] + ' ' + row[7])\n \n tofind = row[4] + ' ' + row[5] + ', ' + row[6] + ' ' + row[7]\n\n myid = hash(tofind)\n \n x, y, name, score = google_find(tofind)\n\n if score < 1:\n print(x, y, name.lower(), tofind.lower(), score)\n \n myofficines.append({'id':myid, 'name':row[2] + ' ' + row[1], 'descr': tofind, 'x':x, 'y':y})\n myofficines_csv.append([myid, row[2] + ' ' + row[1], tofind, x, y])\n\n with open(officines_poppy_fname+'_test.csv', 'w', encoding='utf-8') as f:\n writer = csv.writer(f)\n writer.writerows(myofficines_csv)\n\n with open(officines_poppy_fname, 'w') as f:\n json.dump(myofficines, f)\n \n# ***********\n# streets\n# ***********\n# if the intermediate file exists, loads it, otherwise build it\nif path.exists(rues_poppy_fname):\n with open(rues_poppy_fname, 'r') as filehandle:\n mystreets = json.load(filehandle)\nelse: \n with open(rues_csv, newline='', encoding='utf-8') as csvfile:\n\n rues = csv.reader(csvfile, delimiter=';', quotechar='|')\n \n incorrect = 0;\n total = 0;\n numok = 0;\n mystreets = [];\n provider = '.';\n ok = 0;\n \n print('\\n>>> building streets list\\n')\n\n next(rues) # skip title\n for row in rues:\n time.sleep(.25) # throttling for the API\n\n ok = True\n total += 1;\n n = int(row[1])\n rue = row[5]\n cp = row[6]\n\n # look into missing streets\n found = False;\n tofind = rue + ' ' + cp\n \n if path.exists(missing_streets_fname):\n with open(missing_streets_fname, newline='') as csvfile: #, encoding='utf-8'\n missing_streets = csv.reader(csvfile, delimiter=';', quotechar='|')\n next(missing_streets)\n \n for missing_street in missing_streets:\n if missing_street[0].strip() == tofind.strip():\n x, y = myProj(missing_street[2].replace(',', '.'), missing_street[1].replace(',', '.'))\n found = True\n ok = True\n score = 1\n provider = 'M';\n break;\n \n # if not found, try other means\n if found == False:\n r = requests.get('http://geoservices.wallonie.be//geolocalisation/rest/getListeRuesByCpAndNomApprox/' + cp + '/' + rue + '/')\n provider = 'W';\n \n if (r.status_code != 200):\n print('************' +str(r.status_code))\n\n streets = r.json()\n\n if (streets['errorMsg'] != None):\n print('*************' + streets['errorMsg'])\n\n if (len(streets['rues'])==0):\n print('!' + rue)\n ok = False\n \n for street in streets['rues']: # aim is to take the first one\n\n ok = True\n x = (street['xMin'] + street['xMax'])/2;\n y = (street['yMin'] + street['yMax'])/2;\n score = street['score'];\n \n if score < 100:\n ok = False\n rue_split = rue.split(' ', 3)\n\n # maybe the street name is inverted (ex : rue <NAME> instead of rue <NAME>) -> tries that\n if len(rue_split) == 3:\n inverted_rue = rue_split[0] + ' ' + rue_split[2] + ' ' + rue_split[1]\n\n if inverted_rue.lower() == street['nom'].title().lower():\n ok = True\n\n # if not, tries via Google\n if ok == False:\n tofind = rue + ', ' + cp + ' ' + VILLE\n x, y, nam, score = google_find(tofind)\n provider = 'G';\n \n if tofind.find('oleilmont') > 0:\n score = 1\n\n if score == 1:\n ok = True\n else:\n incorrect += 1\n ok = False\n\n break;\n \n if (x == 0) | (y == 0) | (x < xmin) | (x > xmax) | (y < ymin) | (y > ymax): # city bounding box\n print ('[BB ISSUE]')\n ok = False\n \n if ok:\n print(provider, end = '')\n numok += 1\n mystreets.append({'rue': rue, 'cp':cp, 'n':n, 'x':x, 'y':y})\n else:\n print ('\\n! ' + rue + ' ' + cp + '### Google: ['+ nam + '] ### SPW: [' + street['nom'].title() + '] ## pc incorrect: ' + str(100*incorrect/total));\n \n if total >= MAX_STREETS:\n print('max streets break')\n print(total)\n print(numok)\n break;\n \n with open(rues_poppy_fname, 'w') as filehandle:\n json.dump(mystreets, filehandle)\n\nprint('\\n>>> which street goes where')\n\nmy_groups = {};\nmy_streets_idx = {};\n\nfor officine in myofficines:\n my_groups[officine['id']] = {'id':officine['id'], 'name':officine['name'], 'descr':officine['descr'], 'x':officine['x'], 'y':officine['y'], 'n':0, 'n0':0, 'done':False, 'list':{}}\n\n# which officine for a given street\nNpers = 0\nfor street in mystreets:\n my_streets_idx[street['rue'] + '_' + street['cp']] = {'orig':'', 'now':''}\n Npers += street['n']\n\nfor street in mystreets:\n d20 = 9e99\n t = street;\n \n for officine in myofficines:\n d2 = (officine['x'] - street['x'])*(officine['x'] - street['x']) + (officine['y'] - street['y'])*(officine['y'] - street['y']);\n if d2 < d20:\n u = officine\n d20 = d2\n \n # u est l'officine la plus proche de la rue\n\n off_idx = u['id'];\n str_idx = t['rue'] + '_' + t['cp']\n \n my_streets_idx[str_idx]['orig'] = off_idx\n my_streets_idx[str_idx]['now'] = my_streets_idx[str_idx]['orig']\n my_groups[off_idx]['n'] += t['n']\n my_groups[off_idx]['n0'] = my_groups[off_idx]['n']\n my_groups[off_idx]['list'][str_idx] = t\n \nNpers_off = coeff * Npers / len(myofficines)\nw = []\n\n###########################################\n# rebalancing\n###########################################\n\nwhile q > 0:\n q-=1\n for k, v in my_groups.items():\n my_groups[k]['done'] = False\n w.append(my_groups[k]['n'])\n\n print(str(min(w)) + ' ' + str(numpy.mean(w)) + ' ' + str(max(w)) + ' ' + str(numpy.std(w)))\n \n p = len(myofficines);\n while p > 0:\n p -= 1;\n \n #recherche de la pharma qui a le moins d'items, qui n'a pas encore ete traitée et dont le nombre d'items est plus petit que la moyenne\n off_idx = ''\n officine = {}\n v0 = 9e99\n \n for k, v in my_groups.items():\n\n if v['done']:\n continue;\n\n if v['n'] > Npers_off:\n continue;\n \n if v['n'] < v0:\n off_idx = k\n officine = v\n v0 = v['n']\n\n if v0 > 9e98:\n break;\n \n #recherche de la rue la plus proche de cette pharma, qui n'appartient pas encore à cette pharma\n d20 = 9e99\n for street in mystreets:\n\n str_idx = street['rue'] + '_' + street['cp']\n \n # recherche seulement dans les rues qui n'ont pas appartenu à cette pharma dans le passé (pblm : doivent aller trop loin)\n #if my_streets_idx[str_idx]['orig'] == off_idx:\n # continue\n\n # recherche seulement dans les rues qui n'appartiennent pas encore à cette pharma\n if my_streets_idx[str_idx]['now'] == off_idx:\n continue\n\n # ne prendre qu'aux pharma qui ont plus de volume que soi\n if v0 > my_groups[my_streets_idx[str_idx]['now']]['n']:\n continue\n \n d2 = (officine['x'] - street['x'])*(officine['x'] - street['x']) + (officine['y'] - street['y'])*(officine['y'] - street['y']);\n if d2 < d20:\n d20 = d2\n t = street\n\n #t est la rue la plus proche\n str_idx = t['rue'] + '_' + t['cp']\n #print(str_idx)\n\n # cette rue doit être ajoutée à la pharma, ainsi que son nbre\n my_groups[off_idx]['done'] = True\n my_groups[off_idx]['n'] += t['n']\n my_groups[off_idx]['list'][str_idx] = t\n\n # et retirée de la pharma à laquelle elle a été prise\n old_off_idx = my_streets_idx[str_idx]['now']\n my_groups[old_off_idx]['n'] -= t['n']\n del my_groups[old_off_idx]['list'][str_idx]\n\n #enfin, la nouvelle pharma doit remplacer l'ancienne dans la rue\n my_streets_idx[str_idx]['now'] = off_idx\n\n #print(my_groups[off_idx])\n\nprint('')\nfor k, v in my_groups.items():\n if v['n0'] != v['n']:\n print (v['name'] + ' ' + str(v['n0']) + '->' + str(v['n']))\n\n###########################################\n# end rebalancing\n###########################################\n\noff = []\nout = [];\ni = 0; # will be a readable index to be used in QGIS\n\nfor k,u in my_groups.items():\n off.append([u['id'], i, u['name'], u['descr'], u['n'], u['x'], u['y']])\n \n for l,t in u['list'].items():\n out.append( [t['x'], t['y'], t['n'], t['rue'], i, u['id'] ])\n\n i+=1\n\nwith open(\"off.csv\", \"w\", encoding='utf-8') as f:\n writer = csv.writer(f)\n writer.writerows(off)\n \nwith open(\"out.csv\", \"w\", encoding='utf-8') as f:\n writer = csv.writer(f)\n writer.writerows(out)\n\n# these files can be loaded in QGIS\n# out.csv should be joined with off.csv (join field : field_2, target field : field_5)\n# this allows to visualize\n# - which officines are the most loaded (off.csv - using a category style based on the number of people) \n# - which streets are bounded to each officine (out.csv - using a category style based on the index of the officine linked to a articular street)\n# - the out.csv layer can even be duplicated and styled using the geometry generator -> Line -> and the following expression : make_line( make_point( \"field_1\",\"field_2\"),make_point( \"officines [off.csv]_field_6\",\"officines [off.csv]_field_7\"))\n#\n# to be able to modify, the out.csv should be first exported in shapefile or geopackage \n\nprint(tabulate.tabulate(out))\n\n", "id": "5869671", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "allocate_masks.py" }, { "content": "# coding: utf-8\n\n# to run every hour between 0700 & 2100 using CRON\n# for instance using pythonanywhere\n# * 7-21 * * * /home/ubuntu/cron/testwater.py >/dev/null 2>&1\n\nimport datetime\nimport sys\nnow = datetime.datetime.now()\nprint(now.hour)\n\n#if (now.hour < 6) | (now.hour > 20): #UTC\n sys.exit()\n\n# CONFIG\naccount_sid = \"\"\nauth_token = \"\"\nfrom_nr = \"\" #+32...\n\n# Dans Twilio : Configuration de la réponse aux messages envoyés à ce numéro (TwiML bin)\n# <Response>\n# <Message>\n# Message de Poppy Alert. Utile? Parlez-en autour de vous.Plus utile? Envoyez NOALERT à ce numéro\n# </Message>\n# </Response>\n\n# END CONFIG\n\n#TODO\n# all messages before => redact\n# alternatively, send with facebook, telegram, etc\n# ensuite : toute la gestion, l'interface client, le GDPR, ...\n# critère de non envoi pourrait être plus\n\nimport requests\nimport bs4 as BeautifulSoup\nfrom tabulate import tabulate\nfrom operator import itemgetter\nimport datetime\nfrom twilio.rest import TwilioRestClient\n\nclient = TwilioRestClient(account_sid, auth_token) # ! version 5 !!! -> pip install twilio==5.7.0\nnewtable = []\n\n###########################################################\n#GET SUBSCRIBERS\n\n#EITHER MANUALLY\n#recipients = [['NUMBER', 'STATION']] # pourrait être pris des SMS envoyés à ce numéro // \n\n#OR BY CHECKING THE INCOMING MESSAGES\nrecipients = []\nmessages = client.messages.list(to_=from_nr, page_size=1000)\n\nsubscribers = {}\nfor message in reversed(messages):\n subscribers[message.from_] = message.body.upper()\n\nfor subscriber in subscribers:\n msg = subscribers[subscriber]\n if msg.find(\"SUBSCRIBE\") == 0:\n print(subscriber, msg)\n sta = msg.split(' ')\n if len(sta) > 1:\n recipients.append([subscriber, sta[1]])\n\nprint(recipients)\n#END GET SUBSCRIBERS\n###########################################################\n\nfor i in range(0,89):\n r = requests.get('http://voies-hydrauliques.wallonie.be/opencms/opencms/fr/hydro/Actuelle/crue/cruetableau.do?id=' + str(i))\n if r.status_code != 200:\n continue\n html = r.text\n soup = BeautifulSoup.BeautifulSoup(html, \"html.parser\")\n\n table = soup.find('table', {\"summary\":True})\n rows = table.findAll('tr')\n what = ''\n for row in rows:\n ch = row.find('th')\n if ch != None:\n ch = ch.find('strong')\n if ch != None:\n what = ch.text\n cells = row.findAll('td')\n\n if cells == None:\n continue\n\n newrow = [what]\n for cell in cells:\n if cell == None:\n continue;\n\n t = cell.text.strip()\n if t == None:\n continue\n if t == '':\n t = cell.find('img')\n if t == None:\n continue\n else:\n t = t.attrs['alt']\n if 'la fiche signal' in t:\n continue\n if t == '':\n continue\n\n newrow.append(t)\n newtable.append(newrow)\n\nmytable = []\nfor row in newtable:\n if len(row) < 2:\n continue\n if row in mytable:\n continue\n\n mytable.append(row)\n\nmytable=sorted(mytable, key=itemgetter(1))\n\nprint(tabulate(mytable))\n\nfor r in recipients:\n for e in mytable:\n if (e[1] == r[1]) & (e[3] != 'Normale'): # pour test : mettre == 'Normale'\n body=\"La station \" + e[1] + \" est en situation \" + e[3] + \". Infos via http://voies-hydrauliques.wallonie.be/opencms/opencms/fr/hydro/Actuelle/crue/index.html. Message de Poppy Alert. Utile? Parlez-en autour de vous ou aidez-nous via http://paypal.me/ccloquet. Plus utile? Envoyez NOALERT à ce numéro\"\n print (body)\n\n # si déjà reçu qqch hier ou aujourd'hui -> n'envoie rien\n # le test pourrait être plus intelligent\n today = datetime.date.today()\n messages = client.messages.list(to=r[0], from_=from_nr, date_sent=today)\n if len(messages) > 0:\n print('*')\n continue\n yesterday = datetime.date.fromordinal(datetime.date.today().toordinal()-1)\n messages = client.messages.list(to=r[0], from_=from_nr, date_sent=yesterday)\n if len(messages) > 0:\n print('*')\n continue\n print('sending SMS to ' + r[0])\n #send SMS\n message = client.messages.create(to=r[0], from_=from_nr, body=body)\n\n\n", "id": "10988383", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "walloon_flood_alert.py" } ]
0
kawasaki-kento
[ { "content": "class Node(object):\n\n def __init__(self, value):\n self.value = value # 親ノードの要素\n self.children = [] # 子ノードの要素\n self.visited = False\n\n def __str__(self):\n return str(self.value)\n\n def add_child(self, child):\n self.children.append(child)\n\nclass ArrayConstructor(object):\n\n def __init__(self):\n self.tree = Node('_') # ルートノード\n\n def add(self, word):\n # 単語を追加\n self.build(self.tree, word)\n\n def build(self, node, word, depth=0):\n # 木を作成\n # 入力文字列のすべての要素をチェックしたなら処理を抜ける\n if(depth == len(word)):\n return\n\n for child in node.children:\n\n # すでに同じ要素(文字)のノードがあれば、再帰呼び出しする\n # 再帰呼び出しが実行された場合は、このレイヤーでノードの作成は行わない\n # 必ず次のレイヤー(再帰呼び出し先)以降でノードの作成が行われる\n if(child.value == word[depth]):\n self.build(child, word, depth+1)\n return\n\n # ノードの作成を行う\n child = Node(word[depth])\n node.add_child(child)\n self.build(child, word, depth+1)\n\n return\n\n def show(self):\n self.show_(self.tree)\n\n def show_(self, node, depth=0):\n print(\"{}{}\".format(' '*depth, node))\n for child in node.children:\n self.show_(child, depth+1)\n\n def dump(self):\n # trie木をbit配列にdumpする\n from collections import deque\n\n bit_array = [1, 0] # [1, 0] はルートノードを表す\n labels = ['_']\n\n # 幅優先探索でdumpする\n queue = deque()\n queue.append(self.tree)\n\n while(len(queue) != 0):\n node = queue.popleft()\n labels.append(node.value)\n\n bit_array += [1] * len(node.children) + [0]\n\n for child in node.children:\n child.visited = True\n queue.append(child)\n \n return bit_array, labels\n", "id": "11443765", "language": "Python", "matching_score": 2.1201603412628174, "max_stars_count": 1, "path": "constructor.py" }, { "content": "from constructor import ArrayConstructor\nfrom measure import MeasureMemory\nimport re\nimport array\n\nclass Trie(object):\n def __init__(self, words, unit_scale=8):\n bit_array, labels = self.create_tree(words)\n\n self.rank1 = self.get_rank(1)\n\n self.unit_scale = unit_scale\n self.split_list = BitVector(bit_array, self.unit_scale).split_array()\n\n self.zero_pos = [0]\n c = 1\n for i, v in enumerate(bit_array):\n if v == 0:\n self.zero_pos.append(i)\n c+=1\n\n self.zero_pos = array.array('I', self.zero_pos)\n\n self.bit_array = array.array('B',bit_array)\n self.labels = array.array('u',labels) \n\n # Trie木作成\n def create_tree(self, words):\n words = [word.lower() for word in words]\n words.sort()\n constructor = ArrayConstructor()\n for word in words:\n constructor.add(word)\n bit_array, labels = constructor.dump()\n \n return bit_array, labels\n\n\n def rank(self, position, target_bit):\n n = 0\n for bit in self.bit_array[:position+1]:\n if(bit == target_bit):\n n += 1\n return n\n\n def select0(self, n):\n return self.zero_pos[n]\n\n\n def sub_rank1(self, position):\n unit_num = int(position / self.unit_scale)\n n = self.split_list[unit_num-1]\n n+=sum(self.bit_array[unit_num * self.unit_scale : position+1])\n \n return n\n\n def get_rank(self, target_bit):\n return lambda position: self.rank(position, target_bit)\n \n # ノード探索\n def trace_children(self, current_node, character, cnt):\n # ビット列の先頭から見て、n 個目の 0 ビットの次の位置\n index = self.select0(current_node) + 1\n\n while(self.bit_array[index] == 1):\n # ビット列の先頭から位置 k までに、1 のビットがいくつあるかを返す\n if cnt == 0:\n node = self.rank1(index)\n else:\n node = self.sub_rank1(index)\n\n if(self.labels[node] == character):\n cnt=1\n return node, cnt\n \n index += 1\n return None, cnt\n\n # 単語検索\n def search(self, query):\n query = query.lower()\n cnt = 0\n node = 1\n for c in query:\n node, cnt = self.trace_children(node, c, cnt)\n if(node is None):\n return None\n return node\n\n # 子ノードのindexを取得\n def get_children(self, parent_node_seq):\n return [i for j in parent_node_seq for i in range(self.select0(int(j)), self.select0(int(j+1)))[1:]]\n\n\n # 検索ノード以下のwordをすべて取得する\n def get_below_nodes(self, node_list):\n below_nodes = []\n below_nodes.extend(node_list)\n cnt = 0\n \n # 子ノードが存在する限り実行\n while self.get_children(node_list) != []:\n tmp_list = [self.sub_rank1(i) for i in self.get_children(node_list)]\n below_nodes.extend(tmp_list)\n node_list = tmp_list\n cnt+=1\n\n return below_nodes\n \n\n# rank\nclass BitVector:\n \n def __init__(self, bit_array, unit_scale):\n self.bit_array = bit_array\n self.splited_array = None\n self.n = 0\n self.split_list = []\n self.unit_scale = unit_scale\n self.split_size = int(len(self.bit_array) / self.unit_scale)\n \n def rank(self, position, target_bit):\n n = 0\n for bit in self.splited_array[:position+1]:\n if(bit == target_bit):\n n += 1\n return n\n\n def get_rank(self, target_bit):\n return lambda position: self.rank(position, target_bit)\n \n def split_array(self):\n\n for i in range(self.split_size):\n \n if i == self.split_size-1:\n self.splited_array = self.bit_array[i*self.unit_scale:]\n rank1 = self.get_rank(1)\n else:\n self.splited_array = self.bit_array[i*self.unit_scale:(i+1)*self.unit_scale]\n rank1 = self.get_rank(1)\n\n self.n+=rank1(len(self.splited_array))\n self.split_list.append(self.n)\n\n self.split_list = array.array('I', self.split_list)\n\n return self.split_list", "id": "27118", "language": "Python", "matching_score": 1.306335687637329, "max_stars_count": 1, "path": "trie.py" }, { "content": "from trie import Trie\nfrom measure import MeasureTime, MeasureMemory\nfrom words import Words\nfrom nltk.corpus import wordnet\nimport numpy as np\nimport random\nimport time\nimport sys\n\n\n\n# 元辞書読み込み\no = Words(sys.argv[1]) # ./data/origin/wordnet_words.csv 元データ\nwords_dict = o.words_dict\nwords = o.words\n\n# テストデータ読み込み\nt = Words(sys.argv[2]) # ./data/test/wordnet_words_****.csv テストデータ\ntest_dict = t.words_dict\n\n# テスト回数\ntrial = int(sys.argv[3])\n# 分割ユニットサイズ\nunit_scale = 8\n\n# ランダムサンプリング サンプルサイズ(検索単語数)\n\nf = open(\"./results/result_of_\"+sys.argv[2].split('/')[3].split('.')[0]+\".txt\", 'w')\n\nprint(\"=========================== 使用データ ===========================\", file=f)\nprint(\"PATH:\",sys.argv[2], file=f)\nprint(\"サンプルサイズ:\", len(test_dict), file=f)\nprint(\"テスト回数:\",trial, file=f)\n\n# Trie木作成\nprint(\"=========================== 使用メモリ ===========================\", file=f)\ntrie = Trie(words, unit_scale)\n\nm = MeasureMemory()\n# bit配列のメモリ使用量、ラベル配列のメモリ使用量\nprint(\"bit_array:\", m.convert_bytes(m.compute_object_size(trie.bit_array)), \"labels:\", m.convert_bytes(m.compute_object_size(trie.labels)), file=f)\n\n# selectの使用メモリ\nprint(\"select:\", m.convert_bytes(m.compute_object_size(trie.zero_pos)), file=f)\n\n# rankの使用メモリ\nprint(\"rank:\", m.convert_bytes(m.compute_object_size(trie.split_list)), file=f)\n\n\n# 実行時間計測\nprint(\"============================ 実行時間 ============================\", file=f)\nexact_search = []\nprefix_search = []\n\n\n\n\nfor j in range(trial):\n \n # word検索の実行時間測定インスタンス作成\n es = MeasureTime(trie.search)\n # prefix検索の実行時間測定インスタンス作成\n pr = MeasureTime(trie.get_below_nodes)\n \n c = 0\n pr_c = 0\n \n # 検索を実行\n for answer, query in test_dict.items():\n # 単語検索\n node = es.exe_func(query)\n if node == answer:\n c+=1\n\n # プレフィックス検索\n pr_c+=len(pr.exe_func([node]))\n\n\n print(\"--\", j+1, \"回目\", file=f)\n print(\" ・完全一致検索:\", \" time:\"+str(round(es.exe_time, 4))+\"秒\", \" 検索件数:\"+\"{:,d}\".format(c)+\"件\", file=f)\n print(\" ・プレフィックス検索:\", \" time:\"+str(round(pr.exe_time, 4))+\"秒\", \" prefix件数:\"+\"{:,d}\".format(pr_c)+\"件\", file=f)\n exact_search.append(es.exe_time)\n prefix_search.append(pr.exe_time)\n \nprint(\"============================ 平均実行時間 ============================\", file=f)\nprint(\"完全一致検索:\", str(round(np.mean(exact_search), 4))+\"秒\", file=f)\nprint(\"プレフィックス検索:\", str(round(np.mean(prefix_search), 4))+\"秒\", file=f)\nprint(\"プレフィックス検索(1件当たり):\", str(np.mean(prefix_search)/pr_c)+\"秒\", file=f)\n\nf.close()\n\nprint(\"Test is done.\")", "id": "5173753", "language": "Python", "matching_score": 5.742883205413818, "max_stars_count": 1, "path": "test.py" }, { "content": "from trie import Trie\nfrom measure import MeasureTime, MeasureMemory\nfrom words import Words\nfrom nltk.corpus import wordnet\nimport sys\n\n\n# 辞書読み込み\nt = Words(sys.argv[1]) # ./data/origin/wordnet_words.csv\nwords_dict = t.words_dict\nwords = t.words\n\n# Trie木作成\nprint(\"=========================== 使用メモリ ===========================\")\ntrie = Trie(words)\n\nm = MeasureMemory()\n# bit配列のメモリ使用量、ラベル配列のメモリ使用量\nprint(\"bit_array:\", m.convert_bytes(m.compute_object_size(trie.bit_array)), \"labels:\", m.convert_bytes(m.compute_object_size(trie.labels)))\n\n# selectのメモリ使用量\nprint(\"select:\", m.convert_bytes(m.compute_object_size(trie.zero_pos)))\n\n# rankのメモリ使用量\nprint(\"rank:\", m.convert_bytes(m.compute_object_size(trie.split_list)))\n\nx = input(\"Input search word:\")\n\n# 単語検索\nprint(\"=========================== 単語検索 ===========================\")\nwhile True:\n # 完全一致検索の実行時間測定インスタンス作成\n es = MeasureTime(trie.search)\n # プレフィックス検索の実行時間測定インスタンス作成\n pr = MeasureTime(trie.get_below_nodes)\n\n if x in words:\n\n # ノード番号取得\n node = es.exe_func(x)\n\n # 単語に紐づく属性情報取得\n syns = wordnet.synsets(x)\n print(\"\\n\")\n print('------ 検索結果 ------')\n print(\"result:\", x)\n print(\"node_unmber:\", node)\n print(\"definition_of_result:\", syns[0].definition())\n print(\"Prediction candidate:\")\n\n # プレフィックス検索\n for i in pr.exe_func([node]):\n try:\n print(i, words_dict[i])\n except:\n pass\n\n print('------ 実行時間 ------')\n print(\"完全一致検索:\", es.exe_time, \"sec\")\n print(\"プレフィックス検索:\", pr.exe_time, \"sec\")\n \n x = \"This word is all nonsense.\"\n continue\n else:\n x = input(\"Input another words:\")\n", "id": "5228044", "language": "Python", "matching_score": 1.9311578273773193, "max_stars_count": 1, "path": "search_word.py" }, { "content": "from trie import Trie\nfrom nltk.corpus import wordnet\nimport random\nimport csv\nimport sys\n\nclass Words:\n # 辞書読み込み\n def __init__(self, dict_path):\n self.dict_path = dict_path # ./data/origin/wordnet_words.csv\n self.words_dict = None\n self.words = None\n\n with open(self.dict_path, \"r\", encoding=\"utf-8\") as f:\n data = f.read()\n\n self.words_dict = dict([(int(i.split(\",\")[0]), i.split(\",\")[1]) for i in data.split(\"\\n\") if i != \"\"])\n self.words = [i for i in self.words_dict.values()]\n\n # 計測用のテストデータ作成\n def create_test_words(self, size):\n test_dict = {}\n while len(test_dict.keys()) < size:\n w = random.choice(list(self.words_dict.items()))\n test_dict.setdefault(w[0], w[1])\n\n return test_dict\n\n\nclass CreateWords:\n # wordnetから辞書作成\n def __init__(self, dict_path):\n self.dict_path = dict_path # ./data/origin/wordnet_words.csv\n words = list(set([i.lemmas()[0].name() for i in wordnet.all_synsets()]))\n trie = Trie(words)\n with open(dict_path, \"w\", encoding=\"utf-8\", newline=\"\") as f:\n writer = csv.writer(f)\n for k in words:\n writer.writerow([trie.search(k), k])\n\n\n\nif __name__ == \"__main__\":\n\n # テストデータ作成\n t = Words(sys.argv[1])\n words_dict = t.words_dict\n words = t.words\n\n dict_path = \"./data/test/wordnet_words_\"\n for s in sys.argv[2].split(','):\n test_dict = t.create_test_words(int(s))\n with open(dict_path+str(s)+\".csv\", \"w\", encoding=\"utf-8\", newline=\"\") as f:\n writer = csv.writer(f)\n for k, v in test_dict.items():\n writer.writerow([k, v])\n\n print(\"Test data is created.\")", "id": "5245089", "language": "Python", "matching_score": 0.880823016166687, "max_stars_count": 1, "path": "words.py" } ]
1.931158
treggit
[ { "content": "import logging\nimport os\nfrom os.path import join, isfile\nfrom shutil import copy\nfrom time import sleep\n\nimport pytest\nfrom requests import ConnectionError\n\nscript_dir = os.path.dirname(__file__)\n\npytestmark = pytest.mark.xfail() # TODO delete this marker once those issues are fixed\n\n\n@pytest.fixture(scope=\"module\", autouse=True)\ndef certs():\n \"\"\"\n pytest fixture that provides cert and key files into the tmp_certs directory\n \"\"\"\n file_names = (\"web.nginx-proxy.crt\", \"web.nginx-proxy.key\")\n logging.info(\"copying server cert and key files into tmp_certs\")\n for f_name in file_names:\n copy(join(script_dir, \"certs\", f_name), join(script_dir, \"tmp_certs\"))\n yield\n logging.info(\"cleaning up the tmp_cert directory\")\n for f_name in file_names:\n if isfile(join(script_dir, \"tmp_certs\", f_name)):\n os.remove(join(script_dir, \"tmp_certs\", f_name))\n\n###############################################################################\n\n\ndef test_unknown_virtual_host_is_503(docker_compose, nginxproxy):\n r = nginxproxy.get(\"http://foo.nginx-proxy/\")\n assert r.status_code == 503\n\n\ndef test_http_web_is_301(docker_compose, nginxproxy):\n r = nginxproxy.get(\"http://web.nginx-proxy/port\", allow_redirects=False)\n assert r.status_code == 301\n\n\ndef test_https_web_is_200(docker_compose, nginxproxy):\n r = nginxproxy.get(\"https://web.nginx-proxy/port\")\n assert r.status_code == 200\n assert \"answer from port 81\\n\" in r.text\n\n\n@pytest.mark.incremental\ndef test_delete_cert_and_restart_reverseproxy(docker_compose):\n os.remove(join(script_dir, \"tmp_certs\", \"web.nginx-proxy.crt\"))\n docker_compose.containers.get(\"reverseproxy\").restart()\n sleep(3) # give time for the container to initialize\n assert \"running\" == docker_compose.containers.get(\"reverseproxy\").status\n\n\n@pytest.mark.incremental\ndef test_unknown_virtual_host_is_still_503(nginxproxy):\n r = nginxproxy.get(\"http://foo.nginx-proxy/\")\n assert r.status_code == 503\n\n\n@pytest.mark.incremental\ndef test_http_web_is_now_200(nginxproxy):\n r = nginxproxy.get(\"http://web.nginx-proxy/port\", allow_redirects=False)\n assert r.status_code == 200\n assert \"answer from port 81\\n\" == r.text\n\n\n@pytest.mark.incremental\ndef test_https_web_is_now_broken_since_there_is_no_cert(nginxproxy):\n with pytest.raises(ConnectionError):\n nginxproxy.get(\"https://web.nginx-proxy/port\")\n", "id": "2896358", "language": "Python", "matching_score": 2.4023563861846924, "max_stars_count": 2, "path": "test/stress_tests/test_deleted_cert/test_restart_while_missing_cert.py" }, { "content": "import os\nimport docker\nimport logging\nimport pytest\n\n\n@pytest.fixture(scope=\"module\")\ndef nginx_tmpl():\n \"\"\"\n pytest fixture which extracts the the nginx config template from\n the nginxproxy/nginx-proxy:test image\n \"\"\"\n script_dir = os.path.dirname(__file__)\n logging.info(\"extracting nginx.tmpl from nginxproxy/nginx-proxy:test\")\n docker_client = docker.from_env()\n print(\n docker_client.containers.run(\n image=\"nginxproxy/nginx-proxy:test\",\n remove=True,\n volumes=[\"{current_dir}:{current_dir}\".format(current_dir=script_dir)],\n entrypoint=\"sh\",\n command='-xc \"cp /app/nginx.tmpl {current_dir} && chmod 777 {current_dir}/nginx.tmpl\"'.format(\n current_dir=script_dir\n ),\n stderr=True,\n )\n )\n yield\n logging.info(\"removing nginx.tmpl\")\n os.remove(os.path.join(script_dir, \"nginx.tmpl\"))\n\n\ndef test_unknown_virtual_host_is_503(nginx_tmpl, docker_compose, nginxproxy):\n r = nginxproxy.get(\"http://unknown.nginx.container.docker/\")\n assert r.status_code == 503\n\n\ndef test_forwards_to_whoami(nginx_tmpl, docker_compose, nginxproxy):\n r = nginxproxy.get(\"http://whoami.nginx.container.docker/\")\n assert r.status_code == 200\n whoami_container = docker_compose.containers.get(\"whoami\")\n assert r.text == f\"I'm {whoami_container.id[:12]}\\n\"\n", "id": "3995854", "language": "Python", "matching_score": 1.4517561197280884, "max_stars_count": 12930, "path": "test/test_dockergen/test_dockergen_v2.py" }, { "content": "import contextlib\nimport logging\nimport os\nimport re\nimport shlex\nimport socket\nimport subprocess\nimport time\nfrom typing import List\n\nimport backoff\nimport docker\nimport pytest\nimport requests\nfrom _pytest._code.code import ReprExceptionInfo\nfrom docker.models.containers import Container\nfrom requests.packages.urllib3.util.connection import HAS_IPV6\n\nlogging.basicConfig(level=logging.INFO)\nlogging.getLogger('backoff').setLevel(logging.INFO)\nlogging.getLogger('DNS').setLevel(logging.DEBUG)\nlogging.getLogger('requests.packages.urllib3.connectionpool').setLevel(logging.WARN)\n\nCA_ROOT_CERTIFICATE = os.path.join(os.path.dirname(__file__), 'certs/ca-root.crt')\nI_AM_RUNNING_INSIDE_A_DOCKER_CONTAINER = os.path.isfile(\"/.dockerenv\")\nFORCE_CONTAINER_IPV6 = False # ugly global state to consider containers' IPv6 address instead of IPv4\n\n\ndocker_client = docker.from_env()\n\n\n###############################################################################\n# \n# utilities\n# \n###############################################################################\n\n@contextlib.contextmanager\ndef ipv6(force_ipv6=True):\n \"\"\"\n Meant to be used as a context manager to force IPv6 sockets:\n\n with ipv6():\n nginxproxy.get(\"http://something.nginx-proxy.local\") # force use of IPv6\n\n with ipv6(False):\n nginxproxy.get(\"http://something.nginx-proxy.local\") # legacy behavior\n\n\n \"\"\"\n global FORCE_CONTAINER_IPV6\n FORCE_CONTAINER_IPV6 = force_ipv6\n yield\n FORCE_CONTAINER_IPV6 = False\n\n\nclass requests_for_docker(object):\n \"\"\"\n Proxy for calling methods of the requests module. \n When a HTTP response failed due to HTTP Error 404 or 502, retry a few times.\n Provides method `get_conf` to extract the nginx-proxy configuration content.\n \"\"\"\n def __init__(self):\n self.session = requests.Session()\n if os.path.isfile(CA_ROOT_CERTIFICATE):\n self.session.verify = CA_ROOT_CERTIFICATE\n\n @staticmethod\n def get_nginx_proxy_containers() -> List[Container]:\n \"\"\"\n Return list of containers\n \"\"\"\n nginx_proxy_containers = docker_client.containers.list(filters={\"ancestor\": \"nginxproxy/nginx-proxy:test\"})\n if len(nginx_proxy_containers) > 1:\n pytest.fail(\"Too many running nginxproxy/nginx-proxy:test containers\", pytrace=False)\n elif len(nginx_proxy_containers) == 0:\n pytest.fail(\"No running nginxproxy/nginx-proxy:test container\", pytrace=False)\n return nginx_proxy_containers\n\n def get_conf(self):\n \"\"\"\n Return the nginx config file\n \"\"\"\n nginx_proxy_containers = self.get_nginx_proxy_containers()\n return get_nginx_conf_from_container(nginx_proxy_containers[0])\n\n def get_ip(self) -> str:\n \"\"\"\n Return the nginx container ip address\n \"\"\"\n nginx_proxy_containers = self.get_nginx_proxy_containers()\n return container_ip(nginx_proxy_containers[0])\n\n def get(self, *args, **kwargs):\n with ipv6(kwargs.pop('ipv6', False)):\n @backoff.on_predicate(backoff.constant, lambda r: r.status_code in (404, 502), interval=.3, max_tries=30, jitter=None)\n def _get(*args, **kwargs):\n return self.session.get(*args, **kwargs)\n return _get(*args, **kwargs)\n\n def post(self, *args, **kwargs):\n with ipv6(kwargs.pop('ipv6', False)):\n @backoff.on_predicate(backoff.constant, lambda r: r.status_code in (404, 502), interval=.3, max_tries=30, jitter=None)\n def _post(*args, **kwargs):\n return self.session.post(*args, **kwargs)\n return _post(*args, **kwargs)\n\n def put(self, *args, **kwargs):\n with ipv6(kwargs.pop('ipv6', False)):\n @backoff.on_predicate(backoff.constant, lambda r: r.status_code in (404, 502), interval=.3, max_tries=30, jitter=None)\n def _put(*args, **kwargs):\n return self.session.put(*args, **kwargs)\n return _put(*args, **kwargs)\n\n def head(self, *args, **kwargs):\n with ipv6(kwargs.pop('ipv6', False)):\n @backoff.on_predicate(backoff.constant, lambda r: r.status_code in (404, 502), interval=.3, max_tries=30, jitter=None)\n def _head(*args, **kwargs):\n return self.session.head(*args, **kwargs)\n return _head(*args, **kwargs)\n\n def delete(self, *args, **kwargs):\n with ipv6(kwargs.pop('ipv6', False)):\n @backoff.on_predicate(backoff.constant, lambda r: r.status_code in (404, 502), interval=.3, max_tries=30, jitter=None)\n def _delete(*args, **kwargs):\n return self.session.delete(*args, **kwargs)\n return _delete(*args, **kwargs)\n\n def options(self, *args, **kwargs):\n with ipv6(kwargs.pop('ipv6', False)):\n @backoff.on_predicate(backoff.constant, lambda r: r.status_code in (404, 502), interval=.3, max_tries=30, jitter=None)\n def _options(*args, **kwargs):\n return self.session.options(*args, **kwargs)\n return _options(*args, **kwargs)\n\n def __getattr__(self, name):\n return getattr(requests, name)\n\n\ndef container_ip(container: Container):\n \"\"\"\n return the IP address of a container.\n\n If the global FORCE_CONTAINER_IPV6 flag is set, return the IPv6 address\n \"\"\"\n global FORCE_CONTAINER_IPV6\n if FORCE_CONTAINER_IPV6:\n if not HAS_IPV6:\n pytest.skip(\"This system does not support IPv6\")\n ip = container_ipv6(container)\n if ip == '':\n pytest.skip(f\"Container {container.name} has no IPv6 address\")\n else:\n return ip\n else:\n net_info = container.attrs[\"NetworkSettings\"][\"Networks\"]\n if \"bridge\" in net_info:\n return net_info[\"bridge\"][\"IPAddress\"]\n\n # not default bridge network, fallback on first network defined\n network_name = list(net_info.keys())[0]\n return net_info[network_name][\"IPAddress\"]\n\n\ndef container_ipv6(container):\n \"\"\"\n return the IPv6 address of a container.\n \"\"\"\n net_info = container.attrs[\"NetworkSettings\"][\"Networks\"]\n if \"bridge\" in net_info:\n return net_info[\"bridge\"][\"GlobalIPv6Address\"]\n\n # not default bridge network, fallback on first network defined\n network_name = list(net_info.keys())[0]\n return net_info[network_name][\"GlobalIPv6Address\"]\n\n\ndef nginx_proxy_dns_resolver(domain_name):\n \"\"\"\n if \"nginx-proxy\" if found in host, return the ip address of the docker container\n issued from the docker image nginxproxy/nginx-proxy:test.\n\n :return: IP or None\n \"\"\"\n log = logging.getLogger('DNS')\n log.debug(f\"nginx_proxy_dns_resolver({domain_name!r})\")\n if 'nginx-proxy' in domain_name:\n nginxproxy_containers = docker_client.containers.list(filters={\"status\": \"running\", \"ancestor\": \"nginxproxy/nginx-proxy:test\"})\n if len(nginxproxy_containers) == 0:\n log.warn(f\"no container found from image nginxproxy/nginx-proxy:test while resolving {domain_name!r}\")\n return\n nginxproxy_container = nginxproxy_containers[0]\n ip = container_ip(nginxproxy_container)\n log.info(f\"resolving domain name {domain_name!r} as IP address {ip} of nginx-proxy container {nginxproxy_container.name}\")\n return ip\n\ndef docker_container_dns_resolver(domain_name):\n \"\"\"\n if domain name is of the form \"XXX.container.docker\" or \"anything.XXX.container.docker\", return the ip address of the docker container\n named XXX.\n\n :return: IP or None\n \"\"\"\n log = logging.getLogger('DNS')\n log.debug(f\"docker_container_dns_resolver({domain_name!r})\")\n\n match = re.search(r'(^|.+\\.)(?P<container>[^.]+)\\.container\\.docker$', domain_name)\n if not match:\n log.debug(f\"{domain_name!r} does not match\")\n return\n\n container_name = match.group('container')\n log.debug(f\"looking for container {container_name!r}\")\n try:\n container = docker_client.containers.get(container_name)\n except docker.errors.NotFound:\n log.warn(f\"container named {container_name!r} not found while resolving {domain_name!r}\")\n return\n log.debug(f\"container {container.name!r} found ({container.short_id})\")\n\n ip = container_ip(container)\n log.info(f\"resolving domain name {domain_name!r} as IP address {ip} of container {container.name}\")\n return ip \n\n\ndef monkey_patch_urllib_dns_resolver():\n \"\"\"\n Alter the behavior of the urllib DNS resolver so that any domain name\n containing substring 'nginx-proxy' will resolve to the IP address\n of the container created from image 'nginxproxy/nginx-proxy:test'.\n \"\"\"\n prv_getaddrinfo = socket.getaddrinfo\n dns_cache = {}\n def new_getaddrinfo(*args):\n logging.getLogger('DNS').debug(f\"resolving domain name {repr(args)}\")\n _args = list(args)\n\n # custom DNS resolvers\n ip = nginx_proxy_dns_resolver(args[0])\n if ip is None:\n ip = docker_container_dns_resolver(args[0])\n if ip is not None:\n _args[0] = ip\n\n # call on original DNS resolver, with eventually the original host changed to the wanted IP address\n try:\n return dns_cache[tuple(_args)]\n except KeyError:\n res = prv_getaddrinfo(*_args)\n dns_cache[tuple(_args)] = res\n return res\n socket.getaddrinfo = new_getaddrinfo\n return prv_getaddrinfo\n\ndef restore_urllib_dns_resolver(getaddrinfo_func):\n socket.getaddrinfo = getaddrinfo_func\n\n\ndef remove_all_containers():\n for container in docker_client.containers.list(all=True):\n if I_AM_RUNNING_INSIDE_A_DOCKER_CONTAINER and container.id.startswith(socket.gethostname()):\n continue # pytest is running within a Docker container, so we do not want to remove that particular container\n logging.info(f\"removing container {container.name}\")\n container.remove(v=True, force=True)\n\n\ndef get_nginx_conf_from_container(container):\n \"\"\"\n return the nginx /etc/nginx/conf.d/default.conf file content from a container\n \"\"\"\n import tarfile\n from io import BytesIO\n\n strm_generator, stat = container.get_archive('/etc/nginx/conf.d/default.conf')\n strm_fileobj = BytesIO(b\"\".join(strm_generator))\n\n with tarfile.open(fileobj=strm_fileobj) as tf:\n conffile = tf.extractfile('default.conf')\n return conffile.read()\n\n\ndef docker_compose_up(compose_file='docker-compose.yml'):\n logging.info(f'docker-compose -f {compose_file} up -d')\n try:\n subprocess.check_output(shlex.split(f'docker-compose -f {compose_file} up -d'), stderr=subprocess.STDOUT)\n except subprocess.CalledProcessError as e:\n pytest.fail(f\"Error while runninng 'docker-compose -f {compose_file} up -d':\\n{e.output}\", pytrace=False)\n\n\ndef docker_compose_down(compose_file='docker-compose.yml'):\n logging.info(f'docker-compose -f {compose_file} down')\n try:\n subprocess.check_output(shlex.split(f'docker-compose -f {compose_file} down'), stderr=subprocess.STDOUT)\n except subprocess.CalledProcessError as e:\n pytest.fail(f\"Error while runninng 'docker-compose -f {compose_file} down':\\n{e.output}\", pytrace=False)\n\n\ndef wait_for_nginxproxy_to_be_ready():\n \"\"\"\n If one (and only one) container started from image nginxproxy/nginx-proxy:test is found, \n wait for its log to contain substring \"Watching docker events\"\n \"\"\"\n containers = docker_client.containers.list(filters={\"ancestor\": \"nginxproxy/nginx-proxy:test\"})\n if len(containers) != 1:\n return\n container = containers[0]\n for line in container.logs(stream=True):\n if b\"Watching docker events\" in line:\n logging.debug(\"nginx-proxy ready\")\n break\n\ndef find_docker_compose_file(request):\n \"\"\"\n helper for fixture functions to figure out the name of the docker-compose file to consider.\n\n - if the test module provides a `docker_compose_file` variable, take that\n - else, if a yaml file exists with the same name as the test module (but for the `.yml` extension), use that\n - otherwise use `docker-compose.yml`.\n \"\"\"\n test_module_dir = os.path.dirname(request.module.__file__)\n yml_file = os.path.join(test_module_dir, request.module.__name__ + '.yml')\n yaml_file = os.path.join(test_module_dir, request.module.__name__ + '.yaml')\n default_file = os.path.join(test_module_dir, 'docker-compose.yml')\n\n docker_compose_file_module_variable = getattr(request.module, \"docker_compose_file\", None)\n if docker_compose_file_module_variable is not None:\n docker_compose_file = os.path.join( test_module_dir, docker_compose_file_module_variable)\n if not os.path.isfile(docker_compose_file):\n raise ValueError(f\"docker compose file {docker_compose_file!r} could not be found. Check your test module `docker_compose_file` variable value.\")\n else:\n if os.path.isfile(yml_file):\n docker_compose_file = yml_file\n elif os.path.isfile(yaml_file):\n docker_compose_file = yaml_file\n else:\n docker_compose_file = default_file\n\n if not os.path.isfile(docker_compose_file):\n logging.error(\"Could not find any docker-compose file named either '{0}.yml', '{0}.yaml' or 'docker-compose.yml'\".format(request.module.__name__))\n\n logging.debug(f\"using docker compose file {docker_compose_file}\")\n return docker_compose_file\n\n\ndef connect_to_network(network):\n \"\"\"\n If we are running from a container, connect our container to the given network\n\n :return: the name of the network we were connected to, or None\n \"\"\"\n if I_AM_RUNNING_INSIDE_A_DOCKER_CONTAINER:\n try:\n my_container = docker_client.containers.get(socket.gethostname())\n except docker.errors.NotFound:\n logging.warn(f\"container {socket.gethostname()!r} not found\")\n return\n\n # figure out our container networks\n my_networks = list(my_container.attrs[\"NetworkSettings\"][\"Networks\"].keys())\n\n # make sure our container is connected to the nginx-proxy's network\n if network not in my_networks:\n logging.info(f\"Connecting to docker network: {network.name}\")\n network.connect(my_container)\n return network\n\n\ndef disconnect_from_network(network=None):\n \"\"\"\n If we are running from a container, disconnect our container from the given network.\n\n :param network: name of a docker network to disconnect from\n \"\"\"\n if I_AM_RUNNING_INSIDE_A_DOCKER_CONTAINER and network is not None:\n try:\n my_container = docker_client.containers.get(socket.gethostname())\n except docker.errors.NotFound:\n logging.warn(f\"container {socket.gethostname()!r} not found\")\n return\n\n # figure out our container networks\n my_networks_names = list(my_container.attrs[\"NetworkSettings\"][\"Networks\"].keys())\n\n # disconnect our container from the given network\n if network.name in my_networks_names:\n logging.info(f\"Disconnecting from network {network.name}\")\n network.disconnect(my_container)\n\n\ndef connect_to_all_networks():\n \"\"\"\n If we are running from a container, connect our container to all current docker networks.\n\n :return: a list of networks we connected to\n \"\"\"\n if not I_AM_RUNNING_INSIDE_A_DOCKER_CONTAINER:\n return []\n else:\n # find the list of docker networks\n networks = [network for network in docker_client.networks.list() if len(network.containers) > 0 and network.name != 'bridge']\n return [connect_to_network(network) for network in networks]\n\n\n###############################################################################\n# \n# Py.test fixtures\n# \n###############################################################################\n\n@pytest.fixture(scope=\"module\")\ndef docker_compose(request):\n \"\"\"\n pytest fixture providing containers described in a docker compose file. After the tests, remove the created containers\n \n A custom docker compose file name can be defined in a variable named `docker_compose_file`.\n \n Also, in the case where pytest is running from a docker container, this fixture makes sure\n our container will be attached to all the docker networks.\n \"\"\"\n docker_compose_file = find_docker_compose_file(request)\n original_dns_resolver = monkey_patch_urllib_dns_resolver()\n remove_all_containers()\n docker_compose_up(docker_compose_file)\n networks = connect_to_all_networks()\n wait_for_nginxproxy_to_be_ready()\n time.sleep(3) # give time to containers to be ready\n yield docker_client\n for network in networks:\n disconnect_from_network(network)\n docker_compose_down(docker_compose_file)\n restore_urllib_dns_resolver(original_dns_resolver)\n\n\n@pytest.fixture()\ndef nginxproxy():\n \"\"\"\n Provides the `nginxproxy` object that can be used in the same way the requests module is:\n\n r = nginxproxy.get(\"http://foo.com\")\n\n The difference is that in case an HTTP requests has status code 404 or 502 (which mostly\n indicates that nginx has just reloaded), we retry up to 30 times the query.\n\n Also, the nginxproxy methods accept an additional keyword parameter: `ipv6` which forces requests\n made against containers to use the containers IPv6 address when set to `True`. If IPv6 is not\n supported by the system or docker, that particular test will be skipped.\n \"\"\"\n yield requests_for_docker()\n\n\n###############################################################################\n# \n# Py.test hooks\n# \n###############################################################################\n\n# pytest hook to display additionnal stuff in test report\ndef pytest_runtest_logreport(report):\n if report.failed:\n if isinstance(report.longrepr, ReprExceptionInfo):\n test_containers = docker_client.containers.list(all=True, filters={\"ancestor\": \"nginxproxy/nginx-proxy:test\"})\n for container in test_containers:\n report.longrepr.addsection('nginx-proxy logs', container.logs())\n report.longrepr.addsection('nginx-proxy conf', get_nginx_conf_from_container(container))\n\n\n# Py.test `incremental` marker, see http://stackoverflow.com/a/12579625/107049\ndef pytest_runtest_makereport(item, call):\n if \"incremental\" in item.keywords:\n if call.excinfo is not None:\n parent = item.parent\n parent._previousfailed = item\n\n\ndef pytest_runtest_setup(item):\n previousfailed = getattr(item.parent, \"_previousfailed\", None)\n if previousfailed is not None:\n pytest.xfail(f\"previous test failed ({previousfailed.name})\")\n\n###############################################################################\n# \n# Check requirements\n# \n###############################################################################\n\ntry:\n docker_client.images.get('nginxproxy/nginx-proxy:test')\nexcept docker.errors.ImageNotFound:\n pytest.exit(\"The docker image 'nginxproxy/nginx-proxy:test' is missing\")\n\nif docker.__version__ != \"4.4.4\":\n pytest.exit(\"This test suite is meant to work with the python docker module v4.4.4\")\n", "id": "595036", "language": "Python", "matching_score": 3.908743381500244, "max_stars_count": 2, "path": "test/conftest.py" }, { "content": "import re\nimport subprocess\n\nimport backoff\nimport docker\nimport pytest\n\ndocker_client = docker.from_env()\n\n\n###############################################################################\n#\n# Tests helpers\n#\n###############################################################################\n\n@backoff.on_exception(backoff.constant, AssertionError, interval=2, max_tries=15, jitter=None)\ndef assert_log_contains(expected_log_line):\n \"\"\"\n Check that the nginx-proxy container log contains a given string.\n The backoff decorator will retry the check 15 times with a 2 seconds delay.\n\n :param expected_log_line: string to search for\n :return: None\n :raises: AssertError if the expected string is not found in the log\n \"\"\"\n sut_container = docker_client.containers.get(\"nginxproxy\")\n docker_logs = sut_container.logs(stdout=True, stderr=True, stream=False, follow=False)\n assert bytes(expected_log_line, encoding=\"utf8\") in docker_logs\n\n\ndef require_openssl(required_version):\n \"\"\"\n This function checks that the required version of OpenSSL is present, and skips the test if not.\n Use it as a test function decorator:\n\n @require_openssl(\"2.3.4\")\n def test_something():\n ...\n\n :param required_version: minimal required version as a string: \"1.2.3\"\n \"\"\"\n\n def versiontuple(v):\n clean_v = re.sub(r\"[^\\d\\.]\", \"\", v)\n return tuple(map(int, (clean_v.split(\".\"))))\n\n try:\n command_output = subprocess.check_output([\"openssl\", \"version\"])\n except OSError:\n return pytest.mark.skip(\"openssl command is not available in test environment\")\n else:\n if not command_output:\n raise Exception(\"Could not get openssl version\")\n openssl_version = str(command_output.split()[1])\n return pytest.mark.skipif(\n versiontuple(openssl_version) < versiontuple(required_version),\n reason=f\"openssl v{openssl_version} is less than required version {required_version}\")\n\n\n###############################################################################\n#\n# Tests\n#\n###############################################################################\n\ndef test_dhparam_is_not_generated_if_present(docker_compose):\n sut_container = docker_client.containers.get(\"nginxproxy\")\n assert sut_container.status == \"running\"\n\n assert_log_contains(\"Custom dhparam.pem file found, generation skipped\")\n\n # Make sure the dhparam in use is not the default, pre-generated one\n default_checksum = sut_container.exec_run(\"md5sum /app/dhparam.pem.default\").output.split()\n current_checksum = sut_container.exec_run(\"md5sum /etc/nginx/dhparam/dhparam.pem\").output.split()\n assert default_checksum[0] != current_checksum[0]\n\n\ndef test_web5_https_works(docker_compose, nginxproxy):\n r = nginxproxy.get(\"https://web5.nginx-proxy.tld/port\", allow_redirects=False)\n assert r.status_code == 200\n assert \"answer from port 85\\n\" in r.text\n\n\n@require_openssl(\"1.0.2\")\ndef test_web5_dhparam_is_used(docker_compose):\n sut_container = docker_client.containers.get(\"nginxproxy\")\n assert sut_container.status == \"running\"\n\n host = f\"{sut_container.attrs['NetworkSettings']['IPAddress']}:443\"\n r = subprocess.check_output(\n f\"echo '' | openssl s_client -connect {host} -cipher 'EDH' | grep 'Server Temp Key'\", shell=True)\n assert b\"Server Temp Key: X25519, 253 bits\\n\" == r\n", "id": "2531102", "language": "Python", "matching_score": 0.899623453617096, "max_stars_count": 3630, "path": "test/test_ssl/test_dhparam.py" }, { "content": "import pytest\n\n\n@pytest.mark.parametrize(\"subdomain\", [\"foo\", \"bar\"])\ndef test_web1_http_redirects_to_https(docker_compose, nginxproxy, subdomain):\n r = nginxproxy.get(f\"http://{subdomain}.nginx-proxy.tld/\", allow_redirects=False)\n assert r.status_code == 301\n assert \"Location\" in r.headers\n assert f\"https://{subdomain}.nginx-proxy.tld/\" == r.headers['Location']\n\n\n@pytest.mark.parametrize(\"subdomain\", [\"foo\", \"bar\"])\ndef test_web1_https_is_forwarded(docker_compose, nginxproxy, subdomain):\n r = nginxproxy.get(f\"https://{subdomain}.nginx-proxy.tld/port\", allow_redirects=False)\n assert r.status_code == 200\n assert \"answer from port 81\\n\" in r.text\n\n\n@pytest.mark.parametrize(\"subdomain\", [\"foo\", \"bar\"])\ndef test_web1_HSTS_policy_is_active(docker_compose, nginxproxy, subdomain):\n r = nginxproxy.get(f\"https://{subdomain}.nginx-proxy.tld/port\", allow_redirects=False)\n assert \"answer from port 81\\n\" in r.text\n assert \"Strict-Transport-Security\" in r.headers\n", "id": "4538707", "language": "Python", "matching_score": 3.2897915840148926, "max_stars_count": 12930, "path": "test/test_ssl/test_wildcard.py" }, { "content": "import pytest\n\n\ndef test_web1_HSTS_default(docker_compose, nginxproxy):\n r = nginxproxy.get(\"https://web1.nginx-proxy.tld/port\", allow_redirects=False)\n assert \"answer from port 81\\n\" in r.text\n assert \"Strict-Transport-Security\" in r.headers\n assert \"max-age=31536000\" == r.headers[\"Strict-Transport-Security\"]\n\n# Regression test to ensure HSTS is enabled even when the upstream sends an error in response\n# Issue #1073 https://github.com/nginx-proxy/nginx-proxy/pull/1073\ndef test_web1_HSTS_error(docker_compose, nginxproxy):\n r = nginxproxy.get(\"https://web1.nginx-proxy.tld/status/500\", allow_redirects=False)\n assert \"Strict-Transport-Security\" in r.headers\n assert \"max-age=31536000\" == r.headers[\"Strict-Transport-Security\"]\n\ndef test_web2_HSTS_off(docker_compose, nginxproxy):\n r = nginxproxy.get(\"https://web2.nginx-proxy.tld/port\", allow_redirects=False)\n assert \"answer from port 81\\n\" in r.text\n assert \"Strict-Transport-Security\" not in r.headers\n\ndef test_web3_HSTS_custom(docker_compose, nginxproxy):\n r = nginxproxy.get(\"https://web3.nginx-proxy.tld/port\", allow_redirects=False)\n assert \"answer from port 81\\n\" in r.text\n assert \"Strict-Transport-Security\" in r.headers\n assert \"max-age=86400; includeSubDomains; preload\" == r.headers[\"Strict-Transport-Security\"]\n\n# Regression test for issue 1080\n# https://github.com/nginx-proxy/nginx-proxy/issues/1080\ndef test_web4_HSTS_off_noredirect(docker_compose, nginxproxy):\n r = nginxproxy.get(\"https://web4.nginx-proxy.tld/port\", allow_redirects=False)\n assert \"answer from port 81\\n\" in r.text\n assert \"Strict-Transport-Security\" not in r.headers\n", "id": "5792767", "language": "Python", "matching_score": 0.33902376890182495, "max_stars_count": 12930, "path": "test/test_ssl/test_hsts.py" }, { "content": "import pytest\nimport re\n\ndef test_debug_info_is_present_in_nginx_generated_conf(docker_compose, nginxproxy):\n conf = nginxproxy.get_conf().decode('ASCII')\n assert re.search(r\"# Exposed ports: \\[\\{\\d+\\.\\d+\\.\\d+\\.\\d+\\s+80\\s+tcp \\} \\{\\d+\\.\\d+\\.\\d+\\.\\d+\\s+81\\s+tcp \\}\\]\", conf) or \\\n re.search(r\"# Exposed ports: \\[\\{\\d+\\.\\d+\\.\\d+\\.\\d+\\s+81\\s+tcp \\} \\{\\d+\\.\\d+\\.\\d+\\.\\d+\\s+80\\s+tcp \\}\\]\", conf)\n assert re.search(r\"# Exposed ports: \\[\\{\\d+\\.\\d+\\.\\d+\\.\\d+\\s+82\\s+tcp \\} \\{\\d+\\.\\d+\\.\\d+\\.\\d+\\s+83\\s+tcp \\}\\]\", conf) or \\\n re.search(r\"# Exposed ports: \\[\\{\\d+\\.\\d+\\.\\d+\\.\\d+\\s+83\\s+tcp \\} \\{\\d+\\.\\d+\\.\\d+\\.\\d+\\s+82\\s+tcp \\}\\]\", conf)\n assert \"# Default virtual port: 80\" in conf\n assert \"# VIRTUAL_PORT: 82\" in conf\n assert conf.count(\"# /!\\ Virtual port not exposed\") == 1\n", "id": "5150560", "language": "Python", "matching_score": 2.701547384262085, "max_stars_count": 1, "path": "test/test_debug/test_proxy-debug-flag.py" }, { "content": "import pytest\nimport re\n\n\ndef test_answer_is_served_from_virtual_port_which_is_ureachable(docker_compose, nginxproxy):\n r = nginxproxy.get(\"http://web.nginx-proxy.tld/port\")\n assert r.status_code == 502\n assert re.search(r\"\\n\\s+server \\d+\\.\\d+\\.\\d+\\.\\d+:90;\\n\", nginxproxy.get_conf().decode('ASCII'))\n", "id": "8113380", "language": "Python", "matching_score": 1.2939693927764893, "max_stars_count": 12930, "path": "test/test_multiple-ports/test_VIRTUAL_PORT-single-different-from-single-port.py" }, { "content": "#!/usr/bin/env python3\n\nimport os, sys, re\nimport http.server\nimport socketserver\n\nclass Handler(http.server.SimpleHTTPRequestHandler):\n def do_GET(self):\n\n response_body = \"\"\n response_code = 200\n\n if self.path == \"/headers\":\n response_body += self.headers.as_string()\n elif self.path == \"/port\":\n response_body += f\"answer from port {PORT}\\n\"\n elif re.match(\"/status/(\\d+)\", self.path):\n result = re.match(\"/status/(\\d+)\", self.path)\n response_code = int(result.group(1))\n response_body += f\"answer with response code {response_code}\\n\"\n elif self.path == \"/\":\n response_body += f\"I'm {os.environ['HOSTNAME']}\\n\"\n else:\n response_body += \"No route for this path!\\n\"\n response_code = 404\n\n self.send_response(response_code)\n self.send_header(\"Content-Type\", \"text/plain\")\n self.end_headers()\n\n if (len(response_body)):\n self.wfile.write(response_body.encode())\n\nif __name__ == '__main__':\n PORT = int(sys.argv[1])\n socketserver.TCPServer.allow_reuse_address = True\n httpd = socketserver.TCPServer(('0.0.0.0', PORT), Handler)\n httpd.serve_forever()\n", "id": "6838461", "language": "Python", "matching_score": 0.823605477809906, "max_stars_count": 12930, "path": "test/requirements/web/webserver.py" } ]
1.451756
changone
[ { "content": "# encoding=utf-8\n\nfrom flask import Flask, render_template\nfrom flask_bootstrap import Bootstrap\nfrom forms.LoginForm import LoginForm\n\napp = Flask(__name__, template_folder='templates')\nbootstrap = Bootstrap(app)\napp.config['SECRET_KEY'] = 'hardtoguessstring'\n# @app.before_request\n# def before_request():\n# print 'before_request'\n\n\n@app.route('/')\ndef index():\n return render_template('base.html')\n\n\n@app.route('/home/<name>')\ndef home(name):\n return render_template('user.html', name=name)\n\n\n@app.route('/home/test')\ndef test():\n return render_template('test.html')\n\n@app.route('/login', methods=['GET', 'POST'])\ndef login():\n form = LoginForm()\n # if form.validate_on_submit():\n # user = User.query.filter_by(email=form.email.data).first()\n # if user is not None and user.verify_password(form.password.data):\n # login_user(user, form.remember_me.data)\n # return redirect(request.args.get('next') or url_for('main.index'))\n # flash('Invalid username or password.')\n return render_template('auth/index.html', form=form)\n\n\n# @app.after_request\n# def after_request(response):\n# print 'after_request'\n# return response\n\n\nif __name__ == '__main__':\n app.run(debug=True, port=5001)\n", "id": "8998839", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "app.py" } ]
0
glebpom
[ { "content": "import os\nimport sys\nfrom redisgraph import Graph, Node, Edge\n\nimport redis\nsys.path.append(os.path.join(os.path.dirname(__file__), '..'))\n\nfrom base import FlowTestsBase\n\nredis_graph = None\n\nclass testQueryValidationFlow(FlowTestsBase):\n\n def __init__(self):\n super(testQueryValidationFlow, self).__init__()\n global redis_graph\n redis_con = self.env.getConnection()\n redis_graph = Graph(\"G\", redis_con)\n self.populate_graph()\n \n def populate_graph(self):\n # Create a single graph.\n global redis_graph\n node = Node(properties={\"age\": 34})\n redis_graph.add_node(node)\n redis_graph.commit()\n\n # Expect an error when trying to use a function which does not exists.\n def test01_none_existing_function(self):\n query = \"\"\"MATCH (n) RETURN noneExistingFunc(n.age) AS cast\"\"\"\n try:\n redis_graph.query(query)\n self.env.assertTrue(False)\n except redis.exceptions.ResponseError:\n # Expecting an error.\n pass\n\n # Make sure function validation is type case insensitive.\n def test02_case_insensitive_function_name(self):\n try:\n query = \"\"\"MATCH (n) RETURN mAx(n.age)\"\"\"\n redis_graph.query(query)\n except redis.exceptions.ResponseError:\n # function validation should be case insensitive.\n self.env.assertTrue(False)\n \n def test03_edge_missing_relation_type(self):\n try:\n query = \"\"\"CREATE (n:Person {age:32})-[]->(:person {age:30})\"\"\"\n redis_graph.query(query)\n self.env.assertTrue(False)\n except redis.exceptions.ResponseError:\n # Expecting an error.\n pass\n\n def test04_escaped_quotes(self):\n query = r\"CREATE (:escaped{prop1:'single \\' char', prop2: 'double \\\" char', prop3: 'mixed \\' and \\\" chars'})\"\n actual_result = redis_graph.query(query)\n self.env.assertEquals(actual_result.nodes_created, 1)\n self.env.assertEquals(actual_result.properties_set, 3)\n\n query = r\"MATCH (a:escaped) RETURN a.prop1, a.prop2, a.prop3\"\n actual_result = redis_graph.query(query)\n expected_result = [[\"single ' char\", 'double \" char', 'mixed \\' and \" chars']]\n self.env.assertEquals(actual_result.result_set, expected_result)\n\n def test05_invalid_entity_references(self):\n try:\n query = \"\"\"MATCH (a) RETURN e\"\"\"\n redis_graph.query(query)\n assert(False)\n except redis.exceptions.ResponseError:\n # Expecting an error.\n pass\n\n try:\n query = \"\"\"MATCH (a) RETURN a ORDER BY e\"\"\"\n redis_graph.query(query)\n assert(False)\n except redis.exceptions.ResponseError:\n # Expecting an error.\n pass\n", "id": "10040643", "language": "Python", "matching_score": 3.56632399559021, "max_stars_count": 0, "path": "tests/flow/test_query_validation.py" }, { "content": "import os\nimport sys\nfrom redisgraph import Graph, Node, Edge\nfrom base import FlowTestsBase\n\nredis_graph = None\n\nclass testGraphMergeFlow(FlowTestsBase):\n def __init__(self):\n super(testGraphMergeFlow, self).__init__()\n global redis_graph\n redis_con = self.env.getConnection()\n redis_graph = Graph(\"G\", redis_con)\n\n # Create a single node without any labels or properties.\n def test01_single_node_with_label(self):\n global redis_graph\n query = \"\"\"MERGE (robert:Critic)\"\"\"\n result = redis_graph.query(query)\n self.env.assertEquals(result.labels_added, 1)\n self.env.assertEquals(result.nodes_created, 1)\n self.env.assertEquals(result.properties_set, 0)\n\n # Retry to create an existing entity.\n def test02_existing_single_node_with_label(self):\n global redis_graph\n query = \"\"\"MERGE (robert:Critic)\"\"\"\n result = redis_graph.query(query)\n self.env.assertEquals(result.labels_added, 0)\n self.env.assertEquals(result.nodes_created, 0)\n self.env.assertEquals(result.properties_set, 0)\n \n # Create a single node with two properties and no labels.\n def test03_single_node_with_properties(self):\n global redis_graph\n query = \"\"\"MERGE (charlie { name: '<NAME>', age: 10 })\"\"\"\n result = redis_graph.query(query) \n self.env.assertEquals(result.labels_added, 0)\n self.env.assertEquals(result.nodes_created, 1)\n self.env.assertEquals(result.properties_set, 2)\n \n # Retry to create an existing entity.\n def test04_existing_single_node_with_properties(self):\n global redis_graph\n query = \"\"\"MERGE (charlie { name: '<NAME>', age: 10 })\"\"\"\n result = redis_graph.query(query) \n self.env.assertEquals(result.labels_added, 0)\n self.env.assertEquals(result.nodes_created, 0)\n self.env.assertEquals(result.properties_set, 0)\n \n # Create a single node with both label and property.\n def test05_single_node_both_label_and_property(self):\n global redis_graph\n query = \"\"\"MERGE (michael:Person { name: '<NAME>' })\"\"\"\n result = redis_graph.query(query) \n self.env.assertEquals(result.labels_added, 1)\n self.env.assertEquals(result.nodes_created, 1)\n self.env.assertEquals(result.properties_set, 1)\n \n # Retry to create an existing entity.\n def test06_existing_single_node_both_label_and_property(self):\n global redis_graph\n query = \"\"\"MERGE (michael:Person { name: '<NAME>' })\"\"\"\n result = redis_graph.query(query) \n self.env.assertEquals(result.labels_added, 0)\n self.env.assertEquals(result.nodes_created, 0)\n self.env.assertEquals(result.properties_set, 0)\n\n # Create a single edge and additional two nodes.\n def test07_merge_on_relationship(self):\n global redis_graph\n query = \"\"\"MERGE (charlie:ACTOR)-[r:ACTED_IN]->(wallStreet:MOVIE)\"\"\"\n result = redis_graph.query(query) \n self.env.assertEquals(result.labels_added, 2)\n self.env.assertEquals(result.nodes_created, 2)\n self.env.assertEquals(result.properties_set, 0)\n self.env.assertEquals(result.relationships_created, 1)\n \n # Retry to create a single edge and additional two nodes.\n def test08_existing_merge_on_relationship(self):\n global redis_graph\n query = \"\"\"MERGE (charlie:ACTOR)-[r:ACTED_IN]->(wallStreet:MOVIE)\"\"\"\n result = redis_graph.query(query) \n self.env.assertEquals(result.labels_added, 0)\n self.env.assertEquals(result.nodes_created, 0)\n self.env.assertEquals(result.properties_set, 0)\n self.env.assertEquals(result.relationships_created, 0)\n\n # Update existing entity\n def test09_update_existing_node(self):\n global redis_graph\n query = \"\"\"MERGE (charlie { name: '<NAME>', age: 10 }) SET charlie.age = 11, charlie.lastname='Sheen' \"\"\"\n result = redis_graph.query(query)\n self.env.assertEquals(result.labels_added, 0)\n self.env.assertEquals(result.nodes_created, 0)\n self.env.assertEquals(result.properties_set, 2)\n self.env.assertEquals(result.relationships_created, 0)\n\n query = \"\"\"MATCH (charlie { name: '<NAME>' }) RETURN charlie.age, charlie.name, charlie.lastname\"\"\"\n actual_result = redis_graph.query(query)\n expected_result = [[11, '<NAME>', 'Sheen']]\n self.env.assertEquals(actual_result.result_set, expected_result)\n\n # Update new entity\n def test10_update_new_node(self):\n global redis_graph\n query = \"\"\"MERGE (tamara:ACTOR { name: '<NAME>' }) SET tamara.age = 59, tamara.name = '<NAME>' \"\"\"\n result = redis_graph.query(query)\n self.env.assertEquals(result.labels_added, 0)\n self.env.assertEquals(result.nodes_created, 1)\n self.env.assertEquals(result.properties_set, 3)\n self.env.assertEquals(result.relationships_created, 0)\n\n query = \"\"\"MATCH (tamara:ACTOR { name: '<NAME>' }) RETURN tamara.name, tamara.age\"\"\"\n actual_result = redis_graph.query(query)\n expected_result = [['<NAME>', 59]]\n self.env.assertEquals(actual_result.result_set, expected_result)\n\n # Create a single edge and additional two nodes.\n def test11_update_new_relationship(self):\n global redis_graph\n query = \"\"\"MERGE (franklin:ACTOR { name: '<NAME>' })-[r:ACTED_IN {rate:5.7}]->(almostHeroes:MOVIE) SET r.date=1998, r.rate=5.8\"\"\"\n result = redis_graph.query(query)\n self.env.assertEquals(result.labels_added, 0)\n self.env.assertEquals(result.nodes_created, 2)\n self.env.assertEquals(result.properties_set, 4)\n self.env.assertEquals(result.relationships_created, 1)\n \n # Update existing relation\n def test12_update_existing_edge(self):\n global redis_graph\n query = \"\"\"MERGE (franklin:ACTOR { name: '<NAME>' })-[r:ACTED_IN {rate:5.8, date:1998}]->(almostHeroes:MOVIE) SET r.date=1998, r.rate=5.9\"\"\"\n result = redis_graph.query(query)\n self.env.assertEquals(result.labels_added, 0)\n self.env.assertEquals(result.nodes_created, 0)\n self.env.assertEquals(result.properties_set, 2)\n self.env.assertEquals(result.relationships_created, 0)\n\n query = \"\"\"MATCH (franklin:ACTOR { name: '<NAME>' })-[r:ACTED_IN {rate:5.9, date:1998}]->(almostHeroes:MOVIE) RETURN franklin.name, franklin.age, r.rate, r.date\"\"\"\n actual_result = redis_graph.query(query)\n expected_result = [['<NAME>', None, 5.9, 1998]]\n self.env.assertEquals(actual_result.result_set, expected_result)\n \n # Update multiple nodes\n def test13_update_multiple_nodes(self):\n global redis_graph\n query = \"\"\"CREATE (:person {age:31}),(:person {age:31}),(:person {age:31}),(:person {age:31})\"\"\"\n result = redis_graph.query(query)\n self.env.assertEquals(result.labels_added, 1)\n self.env.assertEquals(result.nodes_created, 4)\n self.env.assertEquals(result.properties_set, 4)\n\n query = \"\"\"MERGE (p:person {age:31}) SET p.newprop=100\"\"\"\n result = redis_graph.query(query)\n self.env.assertEquals(result.labels_added, 0)\n self.env.assertEquals(result.nodes_created, 0)\n self.env.assertEquals(result.properties_set, 4)\n\n query = \"\"\"MATCH (p:person) RETURN p.age, p.newprop\"\"\"\n actual_result = redis_graph.query(query)\n expected_result = [[31, 100],\n [31, 100],\n [31, 100],\n [31, 100]]\n self.env.assertEquals(actual_result.result_set, expected_result)\n\n # Update multiple nodes\n def test14_merge_unbounded_pattern(self):\n global redis_graph\n query = \"\"\"MERGE (p:person {age:31})-[:owns]->(d:dog {name:'max'})\"\"\"\n result = redis_graph.query(query)\n self.env.assertEquals(result.labels_added, 1)\n self.env.assertEquals(result.nodes_created, 2)\n self.env.assertEquals(result.properties_set, 2)\n self.env.assertEquals(result.relationships_created, 1)\n\n # Although person with age 31 and dog with the name max exists,\n # specified pattern doesn't exists, as a result the entire pattern\n # will be created, if we were to support MATCH MERGE 'p' and 'd'\n # would probably be defined in the MATCH clause, as a result they're\n # bounded and won't be duplicated.\n query = \"\"\"MERGE (p:person {age:31})-[:owns]->(d:dog {name:'max'})-[:eats]->(f:food {name:'Royal Canin'})\"\"\"\n result = redis_graph.query(query)\n self.env.assertEquals(result.labels_added, 1)\n self.env.assertEquals(result.nodes_created, 3)\n self.env.assertEquals(result.properties_set, 3)\n self.env.assertEquals(result.relationships_created, 2)\n\n # Add node that matches pre-existing index\n def test15_merge_indexed_entity(self):\n global redis_graph\n # Create index\n query = \"\"\"CREATE INDEX ON :person(age)\"\"\"\n redis_graph.query(query)\n\n count_query = \"\"\"MATCH (p:person) WHERE p.age > 0 RETURN COUNT(p)\"\"\"\n result = redis_graph.query(count_query)\n original_count = result.result_set[0][0]\n\n # Add one new person\n merge_query = \"\"\"MERGE (p:person {age:40})\"\"\"\n result = redis_graph.query(merge_query)\n self.env.assertEquals(result.nodes_created, 1)\n self.env.assertEquals(result.properties_set, 1)\n # Verify that one indexed node has been added\n result = redis_graph.query(count_query)\n updated_count = result.result_set[0][0]\n self.env.assertEquals(updated_count, original_count+1)\n\n # Perform another merge that does not create an entity\n result = redis_graph.query(merge_query)\n self.env.assertEquals(result.nodes_created, 0)\n\n # Verify that indexed node count is unchanged\n result = redis_graph.query(count_query)\n updated_count = result.result_set[0][0]\n self.env.assertEquals(updated_count, original_count+1)\n\n # Update nodes based on non-constant inlined properties\n def test16_merge_dynamic_properties(self):\n global redis_graph\n # Create and verify a new node\n query = \"\"\"MERGE (q:dyn {name: toUpper('abcde')}) RETURN q.name\"\"\"\n expected = [['ABCDE']]\n\n result = redis_graph.query(query)\n self.env.assertEquals(result.labels_added, 1)\n self.env.assertEquals(result.nodes_created, 1)\n self.env.assertEquals(result.properties_set, 1)\n\n self.env.assertEquals(result.result_set, expected)\n\n # Repeat the query and verify that no changes were introduced\n result = redis_graph.query(query)\n self.env.assertEquals(result.labels_added, 0)\n self.env.assertEquals(result.nodes_created, 0)\n self.env.assertEquals(result.properties_set, 0)\n\n # Verify that MATCH...MERGE on the same entity does not introduce changes\n query = \"\"\"MATCH (q {name: 'ABCDE'}) MERGE (r {name: q.name}) RETURN r.name\"\"\"\n result = redis_graph.query(query)\n self.env.assertEquals(result.labels_added, 0)\n self.env.assertEquals(result.nodes_created, 0)\n self.env.assertEquals(result.properties_set, 0)\n self.env.assertEquals(result.result_set, expected)\n", "id": "2139570", "language": "Python", "matching_score": 2.6897900104522705, "max_stars_count": 0, "path": "tests/flow/test_graph_merge.py" }, { "content": "from RLTest import Env\nfrom redisgraph import Graph, Node, Edge\nfrom base import FlowTestsBase\n\nredis_graph = None\ndis_redis = None\nredis_con = None\nvalues = [\"str1\", \"str2\", False, True, 5, 10.5]\n\n\nclass testValueComparison(FlowTestsBase):\n def __init__(self):\n super(testValueComparison, self).__init__()\n global redis_graph\n redis_con = self.env.getConnection()\n redis_graph = Graph(\"G\", redis_con)\n self.populate_graph()\n\n @classmethod\n def populate_graph(self):\n global redis_graph\n\n for v in values:\n node = Node(label=\"value\", properties={\"val\": v})\n redis_graph.add_node(node)\n\n # Add an additional node with no properties\n redis_graph.add_node(Node(label=\"value\"))\n\n redis_graph.commit()\n\n # Verify the ordering of values that can and cannot be directly compared\n def test_orderability(self):\n query = \"\"\"MATCH (v:value) RETURN v.val ORDER BY v.val\"\"\"\n actual_result = redis_graph.query(query)\n expected = [['str1'],\n ['str2'],\n [False],\n [True],\n [5],\n [10.5],\n [None]]\n self.env.assertEquals(actual_result.result_set, expected)\n\n # Expect the results to appear in reverse when using descending order\n query = \"\"\"MATCH (v:value) RETURN v.val ORDER BY v.val DESC\"\"\"\n actual_result = redis_graph.query(query)\n self.env.assertEquals(actual_result.result_set, expected[::-1])\n\n # From the Cypher specification:\n # \"In a mixed set, any numeric value is always considered to be higher than any string value\"\n def test_mixed_type_min(self):\n query = \"\"\"MATCH (v:value) RETURN MIN(v.val)\"\"\"\n actual_result = redis_graph.query(query)\n self.env.assertEquals(actual_result.result_set[0][0], 'str1')\n\n def test_mixed_type_max(self):\n query = \"\"\"MATCH (v:value) RETURN MAX(v.val)\"\"\"\n actual_result = redis_graph.query(query)\n self.env.assertEquals(actual_result.result_set[0][0], 10.5)\n\n # Verify that disjoint types pass <> filters\n def test_disjoint_comparisons(self):\n # Compare all node pairs under a Cartesian product\n query = \"\"\"MATCH (v:value), (w:value) WHERE ID(v) <> ID(w) AND v.val = w.val RETURN v\"\"\"\n actual_result = redis_graph.query(query)\n # No nodes have the same property, so there should be 0 equal results\n expected_result_count = 0\n self.env.assertEquals(\n len(actual_result.result_set), expected_result_count)\n\n query = \"\"\"MATCH (v:value), (w:value) WHERE ID(v) <> ID(w) AND v.val <> w.val RETURN v\"\"\"\n actual_result = redis_graph.query(query)\n # Every comparison should produce an inequal result\n node_count = len(redis_graph.nodes)\n # The node with value set as \"null\" should not be returned or be part of evaluation.\n expected_result_count = (node_count - 1) * (node_count - 2)\n self.env.assertEquals(\n len(actual_result.result_set), expected_result_count)\n", "id": "3803061", "language": "Python", "matching_score": 0.9609193205833435, "max_stars_count": 0, "path": "tests/flow/test_value_comparisons.py" }, { "content": "# This script creates Rust test cases from the common uritemplate-test data\n# files. The path of the JSON file should be passed in as a command line\n# argument.\n#\n# Example usage:\n# python generate_tests.py uritemplate-test/extended-tests.json > ../tests/extended_tests.rs\n\nimport json\nimport sys\nimport hashlib\n\ndef compute_md5(s):\n m = hashlib.md5()\n m.update(s.encode('utf-8'))\n return m.hexdigest()\n\nTEST_NAMES = {\n 'Level 1 Examples': 'test_level_1',\n 'Level 2 Examples': 'test_level_2',\n 'Level 3 Examples': 'test_level_3',\n 'Level 4 Examples': 'test_level_4',\n '3.2.1 Variable Expansion': 'test_321_variable_expansion',\n '3.2.2 Simple String Expansion': 'test_322_simple_string_expansion',\n '3.2.3 Reserved Expansion': 'test_323_reserved_expansion',\n '3.2.4 Fragment Expansion': 'test_324_fragment_expansion',\n '3.2.5 Label Expansion with Dot-Prefix': 'test_325_label_expansion_with_dot_prefix',\n '3.2.6 Path Segment Expansion': 'test_326_path_segment_expansion',\n '3.2.7 Path-Style Parameter Expansion': 'test_327_path_style_parameter_expansion',\n '3.2.8 Form-Style Query Expansion': 'test_328_form_style_query_expansion',\n '3.2.9 Form-Style Query Continuation': 'test_329_form_style_query_continuation',\n 'Additional Examples 1': 'test_additional_examples_1',\n 'Additional Examples 2': 'test_additional_examples_2',\n 'Additional Examples 3: Empty Variables': 'test_additional_examples_3',\n 'Additional Examples 4: Numeric Keys': 'test_additional_examples_4'\n}\n\n# Load JSON file from command line argument\nfile_path = sys.argv[1]\nwith open(file_path) as json_file:\n data = json.load(json_file)\n\n# Print headers\nprint('// File autogenerated with /scripts/generate_tests.py\\n')\nprint('extern crate uritemplate;')\nprint('use uritemplate::UriTemplate;\\n')\n\n# Create tests from JSON file\nfor (idx, title) in enumerate(sorted(data)):\n # Write function header\n test_name = TEST_NAMES.get(title, 'test_' + compute_md5(title))\n print('// %s' % title)\n print('#[test]')\n print('fn %s() {' % test_name)\n\n # Create list of URI Templates\n print(' let mut templates = [')\n for template in (x[0] for x in data[title]['testcases']):\n print(' UriTemplate::new(\"%s\"),' % template)\n print(' ];\\n')\n\n # Set variables for all templates\n print(' for i in 0..templates.len() {')\n for var in data[title]['variables']:\n val = data[title]['variables'][var]\n if isinstance(val, str) or isinstance(val, int) or isinstance(val, float):\n val = '\"%s\"' % val\n elif isinstance(val, list):\n val = '&[%s] as &[&str]' % \", \".join('\"%s\"' % x for x in val)\n elif isinstance(val, dict):\n val = '&[%s] as &[(&str, &str)]' % \", \".join('(\"%s\", \"%s\")' % (k, v) for (k, v) in val.items())\n elif val == None:\n continue\n else:\n raise Exception('Variable value is not a string, list, dictionary, or false')\n print(' templates[i].set(\"%s\", %s);' % (var, val))\n print(' }\\n')\n\n # Write assertions\n num_tests = len(data[title]['testcases'])\n for i in range(num_tests):\n answer = data[title]['testcases'][i][1]\n if isinstance(answer, str):\n print(' assert_eq!(templates[%d].build(), \"%s\");' % (i, answer))\n elif isinstance(answer, list):\n print(' let template_%d_answers = vec![' % i)\n for s in answer:\n print(' \"%s\",' % s)\n print(' ];')\n print(' assert!(template_%d_answers.contains(&templates[%d].build().as_ref()));' % (i, i))\n else:\n raise Exception('Test answer is neither a string nor a list')\n\n # Close out function\n print('}')\n if idx != len(data) - 1:\n print()\n", "id": "4743119", "language": "Python", "matching_score": 0.5022738575935364, "max_stars_count": 8, "path": "scripts/generate_tests.py" } ]
1.825355
Lancher
[ { "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\nimport os\nimport time\nfrom increasing import ProgressBar\n\n\np = ProgressBar([\n {\n 'type': 'text'\n },\n {\n 'cols': 40,\n 'type': 'basic',\n 'open_symbol': '\\033[1m\\033[38;5;1m[\\033[0m',\n 'close_symbol': '\\033[1m\\033[38;5;1m]\\033[0m',\n 'fill_symbol': '\\033[1m\\033[38;5;196m=\\033[0m',\n 'unfill_symbol': '\\033[1m\\033[38;5;1m-\\033[0m',\n },\n {\n 'cols': 40,\n 'type': 'arrow',\n 'open_symbol': '\\033[1m\\033[38;5;2m[\\033[0m',\n 'close_symbol': '\\033[1m\\033[38;5;2m]\\033[0m',\n 'fill_symbol': '\\033[1m\\033[38;5;28m-\\033[0m',\n 'arrow_symbol': '\\033[1m\\033[38;5;28m>\\033[0m',\n },\n {\n 'cols': 40,\n 'type': 'shade',\n 'open_symbol': '\\033[1m\\033[38;5;3m|\\033[0m',\n 'close_symbol': '\\033[1m\\033[38;5;3m|\\033[0m',\n 'fill_symbol': '\\033[1m\\033[38;5;28m-\\033[0m',\n 'arrow_symbol': '\\033[1m\\033[38;5;3m>\\033[0m',\n },\n {\n 'cols': 40,\n 'type': 'basic',\n 'open_symbol': '\\033[1m\\033[38;5;68m|\\033[0m',\n 'close_symbol': '\\033[1m\\033[38;5;68m|\\033[0m',\n 'fill_symbol': '\\033[1m\\033[38;5;30m█\\033[0m',\n 'unfill_symbol': '\\033[1m\\033[38;5;246m█\\033[0m',\n },\n {\n 'type': 'text'\n },\n])\n\ntime.sleep(5)\n\nfor i in range(101):\n time.sleep(0.05)\n p.update([\n {\n },\n {\n 'value': i / 100.0,\n 'suffix': '\\033[38;5;250m {}MB / 100MB\\033[0m'.format(i),\n },\n {\n 'value': i / 100.0 * 0.3,\n 'suffix': '\\033[38;5;250m ETA {}s \\033[0m'.format(int(i * 0.1)),\n },\n {\n 'value': i / 100.0 * 0.7,\n 'suffix': '\\033[38;5;250m {} \\033[0m'.format(int(i * 0.7)),\n },\n {\n 'value': i / 100.0 * 0.9,\n 'suffix': '\\033[38;5;250m {}% \\033[0m'.format(int(i * 0.9)),\n },\n {\n },\n ])\n\ntime.sleep(20)\n", "id": "2874913", "language": "Python", "matching_score": 1.355838656425476, "max_stars_count": 3, "path": "example.py" }, { "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom __future__ import absolute_import, division, print_function\n\nimport sys\n\nBAR_TYPE = [\n 'basic',\n 'arrow',\n 'shade',\n 'block',\n 'rect',\n 'rotate',\n 'text',\n]\n\nBLOCK = [\n '▏',\n '▎',\n '▍',\n '▌',\n '▋',\n '▊',\n '▉',\n '█',\n]\n\nINDEF = [\n \"-\",\n \"\\\\\",\n \"|\",\n \"/\",\n]\n\n\nclass ProgressBar:\n\n def __init__(self, configs):\n self._configs = self._check_configs(configs)\n self._cnt = 0\n\n def _check_configs(self, configs):\n \"\"\"Parse the passing configuration and generate the new configuration.\n \"\"\"\n if not isinstance(configs, list):\n raise TypeError('\"configs\" should be a list of dicts')\n\n new_configs = []\n for config in configs:\n if not isinstance(config, dict):\n raise TypeError('\"configs\" should be a list of dicts')\n if 'type' not in config:\n new_configs.append(self._check_basic_bar_config(config))\n elif config['type'] == 'basic':\n new_configs.append(self._check_basic_bar_config(config))\n elif config['type'] == 'arrow':\n new_configs.append(self._check_arrow_bar_config(config))\n elif config['type'] == 'shade':\n new_configs.append(self._check_shade_bar_config(config))\n elif config['type'] == 'block':\n new_configs.append(self._check_block_bar_config(config))\n elif config['type'] == 'rect':\n new_configs.append(self._check_rect_bar_config(config))\n elif config['type'] == 'rotate':\n new_configs.append(self._check_rotate_bar_config(config))\n elif config['type'] == 'text':\n new_configs.append(self._check_text_bar_config(config))\n return new_configs\n\n def _check_basic_bar_config(self, config):\n \"\"\"Parse the passing basic bar configuration and generate the new configuration.\n \"\"\"\n new_config = {\n 'type': 'basic',\n 'open_symbol': config.get('open_symbol', '['),\n 'close_symbol': config.get('close_symbol', ']'),\n 'fill_symbol': config.get('fill_symbol', '='),\n 'unfill_symbol': config.get('unfill_symbol', ' '),\n 'cols': int(config.get('cols', 50)),\n 'value': 0,\n 'prefix': '',\n 'suffix': '',\n }\n return new_config\n\n def _check_arrow_bar_config(self, config):\n \"\"\"Parse the passing arrow bar configuration and generate the new configuration.\n \"\"\"\n new_config = {\n 'type': 'arrow',\n 'open_symbol': config.get('open_symbol', '['),\n 'close_symbol': config.get('close_symbol', ']'),\n 'fill_symbol': config.get('fill_symbol', '='),\n 'unfill_symbol': config.get('unfill_symbol', ' '),\n 'arrow_symbol': config.get('arrow_symbol', '>'),\n 'cols': int(config.get('cols', 50)),\n 'value': 0,\n 'prefix': '',\n 'suffix': '',\n }\n return new_config\n\n def _check_shade_bar_config(self, config):\n \"\"\"Parse the passing shade bar configuration and generate the new configuration.\n \"\"\"\n new_config = {\n 'type': 'shade',\n 'open_symbol': config.get('open_symbol', '|'),\n 'close_symbol': config.get('close_symbol', '|'),\n 'fill_symbol': '█',\n 'unfill_symbol': '░',\n 'cols': int(config.get('cols', 50)),\n 'value': 0,\n 'prefix': '',\n 'suffix': '',\n }\n return new_config\n\n def _check_block_bar_config(self, config):\n \"\"\"Parse the passing block bar configuration and generate the new configuration.\n \"\"\"\n new_config = {\n 'type': 'block',\n 'open_symbol': config.get('open_symbol', '|'),\n 'close_symbol': config.get('close_symbol', '|'),\n 'cols': int(config.get('cols', 50)),\n 'value': 0,\n 'prefix': '',\n 'suffix': '',\n }\n return new_config\n\n def _check_rect_bar_config(self, config):\n \"\"\"Parse the passing rectangle bar configuration and generate the new configuration.\n \"\"\"\n new_config = {\n 'type': 'rect',\n 'open_symbol': config.get('open_symbol', '|'),\n 'close_symbol': config.get('close_symbol', '|'),\n 'fill_symbol': '■',\n 'unfill_symbol': config.get('fill_symbol', ' '),\n 'cols': int(config.get('cols', 50)),\n 'value': 0,\n 'prefix': '',\n 'suffix': '',\n }\n return new_config\n\n def _check_rotate_bar_config(self, config):\n \"\"\"Parse the passing rotatable bar configuration and generate the new configuration.\n \"\"\"\n new_config = {\n 'type': 'rotate',\n 'open_symbol': config.get('open_symbol', ''),\n 'close_symbol': config.get('close_symbol', ''),\n 'done_symbol': config.get('done_symbol', 'ok'),\n 'is_done': False,\n 'cnt': 0,\n 'cols': int(config.get('cols', 50)),\n 'value': 0,\n 'prefix': '',\n 'suffix': '',\n }\n return new_config\n\n def _check_text_bar_config(self, config):\n \"\"\"Parse the passing text bar configuration and generate the new configuration.\n \"\"\"\n new_config = {\n 'type': 'text',\n 'value': 0,\n 'prefix': '',\n 'suffix': '',\n }\n return new_config\n\n def update(self, configs):\n \"\"\"Update the whole configurations.\n \"\"\"\n if len(configs) != len(self._configs):\n raise ValueError('length of \"configs\" is not matching')\n\n for i in range(len(configs)):\n self._update_config_by_index(i, configs[i])\n self._write()\n\n def _update_config_by_index(self, index, config):\n \"\"\"Update the value, prefix, suffix by using index.\n \"\"\"\n if 'value' in config:\n if not 0 <= config['value'] <= 1:\n raise ValueError('\"value\" should between 0 and 1')\n self._configs[index]['value'] = config['value']\n if 'prefix' in config:\n self._configs[index]['prefix'] = config['prefix']\n if 'suffix' in config:\n self._configs[index]['suffix'] = config['suffix']\n if 'is_done' in config:\n self._configs[index]['is_done'] = config['is_done']\n\n def _render_output(self):\n \"\"\"Render concatenate string on each different bar types.\n \"\"\"\n out = ''\n for config in self._configs:\n if config['type'] == 'basic':\n out += self._render_basic_bar_output(config)\n elif config['type'] == 'arrow':\n out += self._render_arrow_bar_output(config)\n elif config['type'] == 'shade':\n out += self._render_basic_bar_output(config)\n elif config['type'] == 'block':\n out += self._render_block_bar_output(config)\n elif config['type'] == 'rect':\n out += self._render_basic_bar_output(config)\n elif config['type'] == 'rotate':\n out += self._render_rotate_bar_output(config)\n elif config['type'] == 'text':\n out += self._render_text_bar_output(config)\n # ESC escape CSI to move cursor to previous line, u'\\033[F'\n if self._cnt:\n out = '\u001b[F' * len(self._configs) + out\n return out\n\n def _render_basic_bar_output(self, config):\n \"\"\"Render concatenate string on basic bar type.\n \"\"\"\n cols = int(round(config['value'] * config['cols']))\n out = config['prefix'] + config['open_symbol'] + config['fill_symbol'] * cols + \\\n config['unfill_symbol'] * (config['cols'] - cols) + config['close_symbol'] + \\\n config['suffix'] + '\\r\\n'\n return out\n\n def _render_arrow_bar_output(self, config):\n \"\"\"Render concatenate string on arrow bar type.\n \"\"\"\n cols = int(round(config['value'] * config['cols']))\n out = config['prefix'] + config['open_symbol'] + config['fill_symbol'] * cols\n if config['cols'] - cols > 0:\n out += config['arrow_symbol']\n out += config['unfill_symbol'] * (config['cols'] - cols - 1)\n else:\n out += config['unfill_symbol'] * (config['cols'] - cols)\n out += config['close_symbol'] + config['suffix'] + '\\r\\n'\n return out\n\n def _render_block_bar_output(self, config):\n \"\"\"Render concatenate string on block bar type.\n \"\"\"\n blocks = int(round(config['value'] * config['cols'] * len(BLOCK)))\n out = config['prefix'] + config['open_symbol']\n if blocks == 0:\n out += ' ' * config['cols']\n else:\n out += BLOCK[-1] * int(blocks / len(BLOCK))\n if blocks % len(BLOCK) != 0:\n out += BLOCK[blocks % len(BLOCK)]\n out += ' ' * (config['cols'] - int(blocks / len(BLOCK)) - 1)\n else:\n out += ' ' * (config['cols'] - int(blocks / len(BLOCK)))\n out += config['close_symbol'] + config['suffix'] + '\\r\\n'\n return out\n\n def _render_rotate_bar_output(self, config):\n \"\"\"Render concatenate string on rotate bar type.\n \"\"\"\n out = config['prefix'] + config['open_symbol']\n out += config['done_symbol'] if config['is_done'] else INDEF[config['cnt'] % 4]\n out += config['close_symbol'] + config['suffix'] + '\\r\\n'\n config['cnt'] += 1\n return out\n\n def _render_text_bar_output(self, config):\n \"\"\"Render concatenate string on rotate bar type.\n \"\"\"\n out = config['prefix'] + config['suffix'] + '\\r\\n'\n return out\n\n def _write(self):\n \"\"\"Write characters to stdout.\n \"\"\"\n sys.stdout.write(self._render_output())\n sys.stdout.flush()\n self._cnt += 1\n", "id": "2062539", "language": "Python", "matching_score": 2.3052010536193848, "max_stars_count": 3, "path": "increasing/progress.py" }, { "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom __future__ import absolute_import, division, print_function\n\nimport os\nimport sys\nimport unittest\n\nsys.path.append('/'.join((os.path.abspath(__file__).split('/')[:-3])))\n\nfrom increasing import ProgressBar\n\n\nclass ProgressBarTest(unittest.TestCase):\n\n def setUp(self):\n\n self.basic_init_config = {'open_symbol': '[', 'prefix': '', 'fill_symbol': '=', 'unfill_symbol': ' ',\n 'close_symbol': ']', 'type': 'basic', 'cols': 50, 'value': 0.0, 'suffix': ''}\n self.arrow_init_config = {'open_symbol': '[', 'prefix': '', 'fill_symbol': '=', 'unfill_symbol': ' ',\n 'arrow_symbol': '>', 'close_symbol': ']', 'type': 'arrow', 'cols': 50, 'value': 0.0,\n 'suffix': ''}\n self.shade_init_config = {'open_symbol': '|', 'prefix': '', 'fill_symbol': '█', 'unfill_symbol': '░',\n 'close_symbol': '|', 'type': 'shade',\n 'cols': 50, 'value': 0.0, 'suffix': ''}\n self.block_init_config = {'open_symbol': '|', 'suffix': '', 'cols': 50, 'value': 0.0, 'prefix': '',\n 'close_symbol': '|', 'type': 'block'}\n self.rect_init_config = {'open_symbol': '|', 'prefix': '', 'fill_symbol': '■', 'unfill_symbol': ' ',\n 'close_symbol': '|', 'type': 'rect', 'cols': 50, 'value': 0.0, 'suffix': ''}\n self.rotate_init_config = {'open_symbol': '', 'prefix': '', 'cnt': 0, 'suffix': '', 'done_symbol': 'ok',\n 'is_done': False, 'close_symbol': '', 'type': 'rotate', 'cols': 50, 'value': 0.0}\n self.text_init_config = {'prefix': '', 'suffix': '', 'type': 'text', 'value': 0.0}\n\n self.basic_val_0_cols_10_output = '[' + ' ' * 10 + ']\\r\\n'\n self.arrow_val_0_cols_10_output = '[' + '>' + ' ' * 9 + ']\\r\\n'\n self.shade_val_0_cols_10_output = '|' + '░'*10 + '|\\r\\n'\n self.block_val_0_cols_10_output = '|' + ' ' * 10 + '|\\r\\n'\n self.rect_val_0_cols_10_output = '|' + ' ' * 10 + '|\\r\\n'\n self.rotate_cnt_0_cols_10_output = '-\\r\\n'\n self.text_value_0_cols_10_output = '\\r\\n'\n\n self.basic_val_1_cols_10_output = '[' + '=' * 10 + ']\\r\\n'\n self.arrow_val_1_cols_10_output = '[' + '=' * 10 + ']\\r\\n'\n self.shade_val_1_cols_10_output = '|' + '█'*10 + '|\\r\\n'\n self.block_val_1_cols_10_output = '|' + '█' * 10 + '|\\r\\n'\n self.rect_val_1_cols_10_output = '|' + '■' * 10 + '|\\r\\n'\n self.rotate_cnt_1_cols_10_output = '\\\\\\r\\n'\n self.text_val_1_cols_10_output = 'abc: def\\r\\n'\n\n def test_check_configs(self):\n \"\"\"Test empty, basic, arrow, shade, block, rect, rotate, text configurations\n \"\"\"\n p = ProgressBar([])\n configs = p._check_configs([{}, {'type': 'basic'}, {'type': 'arrow'}, {'type': 'shade'}, {'type': 'block'},\n {'type': 'rect'}, {'type': 'rotate'}, {'type': 'text'}])\n\n self.assertEqual(configs[0], self.basic_init_config)\n self.assertEqual(configs[1], self.basic_init_config)\n self.assertEqual(configs[2], self.arrow_init_config)\n self.assertEqual(configs[3], self.shade_init_config)\n self.assertEqual(configs[4], self.block_init_config)\n self.assertEqual(configs[5], self.rect_init_config)\n self.assertEqual(configs[6], self.rotate_init_config)\n self.assertEqual(configs[7], self.text_init_config)\n\n def test_update_config_by_index(self):\n \"\"\" Test updating configurations.\n \"\"\"\n data = {'value': 0.5, 'prefix': '*', 'suffix': '&', 'is_done': True}\n p = ProgressBar([{}])\n p._update_config_by_index(0, data)\n self.assertEqual(p._configs[0]['value'], data['value'])\n self.assertEqual(p._configs[0]['prefix'], data['prefix'])\n self.assertEqual(p._configs[0]['suffix'], data['suffix'])\n self.assertEqual(p._configs[0]['is_done'], data['is_done'])\n\n def test_render_output(self):\n \"\"\"Test output at value 0 and value 1.\n \"\"\"\n p = ProgressBar([{'cols': 10}, {'type': 'basic', 'cols': 10}, {'type': 'arrow', 'cols': 10},\n {'type': 'shade', 'cols': 10}, {'type': 'block', 'cols': 10}, {'type': 'rect', 'cols': 10},\n {'type': 'rotate', 'cols': 10}, {'type': 'text'}])\n self.assertEqual(p._render_output(), self.basic_val_0_cols_10_output + self.basic_val_0_cols_10_output +\n self.arrow_val_0_cols_10_output + self.shade_val_0_cols_10_output +\n self.block_val_0_cols_10_output + self.rect_val_0_cols_10_output +\n self.rotate_cnt_0_cols_10_output + self.text_value_0_cols_10_output)\n\n for i in range(7):\n p._update_config_by_index(i, {'value': 1})\n p._update_config_by_index(7, {'prefix': 'abc:', 'suffix': ' def'})\n self.assertEqual(p._render_output(), self.basic_val_1_cols_10_output +\n self.basic_val_1_cols_10_output + self.arrow_val_1_cols_10_output +\n self.shade_val_1_cols_10_output + self.block_val_1_cols_10_output +\n self.rect_val_1_cols_10_output + self.rotate_cnt_1_cols_10_output +\n self.text_val_1_cols_10_output)\n", "id": "11453734", "language": "Python", "matching_score": 3.249465227127075, "max_stars_count": 3, "path": "increasing/test/test_progress.py" }, { "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom __future__ import absolute_import, division, print_function\n\nimport unittest\n\n\nclass SymbolTest(unittest.TestCase):\n\n def test_1(self):\n self.failUnless(True)\n", "id": "7047200", "language": "Python", "matching_score": 0.7204140424728394, "max_stars_count": 3, "path": "increasing/test/test_symbol.py" }, { "content": "#!/usr/bin/env python\n\nfrom __future__ import absolute_import, division, print_function, with_statement\n\n# python lib\nimport os\nimport time\nimport socket\nimport logging\n\n# 3rd party lib\nimport tornado\nimport tornado.ioloop\nimport tornado.iostream\nimport tornado.options\nfrom tornado import gen\n\n# Use tornado default application log.\napp_log = logging.getLogger('tornado.application')\n\n\nclass DockerEvent(object):\n\n def __init__(self):\n self._sep = False\n self._num_read = -1\n self._data = b''\n\n @gen.coroutine\n def start(self):\n # Create unix socket to `/var/run/docker.sock`.\n sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)\n sock.connect('/var/run/docker.sock')\n\n # Send HTTP request.\n stream = tornado.iostream.IOStream(sock)\n stream.set_nodelay(True)\n\n # Read until 365 days passed.\n until = int(time.time()) + 60 * 60 * 24 * 365\n\n # Request Docker events.\n yield stream.write(u'GET /events?{} HTTP/1.1\\r\\n\\r\\n'.format(until).encode('utf-8'))\n\n # Get response HTTP 200.\n data = yield stream.read_until(b'\\n')\n if data != b'HTTP/1.1 200 OK\\r\\n':\n app_log.error('HTTP Connection Failed...')\n return\n app_log.info('HTTP Connected...')\n\n # Keep pulling the events.\n while True:\n data = yield stream.read_until(b'\\n')\n\n # Get b'\\r\\n'.\n if not self._sep and data == b'\\r\\n':\n self._sep = True\n\n # Get number of bytes b'171'.\n elif self._sep and self._num_read == -1:\n self._num_read = int(data.decode('utf-8').strip(), 16)\n\n # Get data stream.\n elif self._sep and self._num_read != -1:\n self._data = data\n\n app_log.info('number of bytes: {}'.format(len(data)))\n app_log.info('string: {}'.format(data.decode('utf-8')))\n\n # Clear\n self._sep = False\n self._num_read = -1\n self._data = b''\n\n\ndef main():\n # Enable tornado log.\n tornado.options.parse_command_line()\n\n # Start Docker events.\n DockerEvent().start()\n\n # Start tornado.\n tornado.ioloop.IOLoop.current().start()\n\n\nif __name__ == '__main__':\n main()\n\n", "id": "6746498", "language": "Python", "matching_score": 2.1596665382385254, "max_stars_count": 0, "path": "app.py" }, { "content": "#!/usr/bin/env python\n\n\nfrom __future__ import absolute_import, division, print_function\n\nfrom increasing.progress import ProgressBar\n", "id": "5086687", "language": "Python", "matching_score": 1.253379225730896, "max_stars_count": 3, "path": "increasing/__init__.py" }, { "content": "from distutils.core import setup\n\n\nsetup(\n name='increasing',\n packages=['increasing'],\n version='0.2',\n description='Coloring Progress-Bar for Your Command-Line/Terminal Applications',\n author='<NAME>',\n author_email='<EMAIL>',\n url='https://github.com/Lancher/increasing',\n download_url='https://github.com/lancher/increasing/archive/0.1.tar.gz',\n keywords=['progress-bar', 'progressbar', 'python', 'command-line', 'terminal', 'python-library', 'cli'],\n classifiers=[],\n)\n", "id": "6033460", "language": "Python", "matching_score": 1.160069227218628, "max_stars_count": 3, "path": "setup.py" } ]
1.355839
yinqiaoyin
[ { "content": "num1 = 1111111112222\n\nnum2 = 22222\n\nnum3 = 44444\n\nnum4 = 444444444\n\n\nnum5 = 我的555555\n\nnum5 = 555555\n\n", "id": "1147165", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "login.py" } ]
0
ruppysuppy
[ { "content": "\"\"\"\nProblem:\n\nGiven a string which we can delete at most k, return whether you can make a palindrome.\n\nFor example, given 'waterrfetawx' and a k of 2, you could delete f and x to get\n'waterretaw'.\n\"\"\"\n\n\ndef is_palindrome(string: str) -> bool:\n return string == string[::-1]\n\n\ndef can_make_palindrome(string: str, k: int) -> bool:\n if is_palindrome(string):\n return True\n if not k:\n return False\n # checking all possible combinations of the string\n for i in range(len(string)):\n if can_make_palindrome(string[:i] + string[i + 1 :], k - 1):\n return True\n return False\n\n\nif __name__ == \"__main__\":\n print(can_make_palindrome(\"a\", 0))\n print(can_make_palindrome(\"aaa\", 2))\n print(can_make_palindrome(\"add\", 0))\n print(can_make_palindrome(\"waterrfetawx\", 3))\n print(can_make_palindrome(\"waterrfetawx\", 2))\n print(can_make_palindrome(\"waterrfetawx\", 1))\n print(can_make_palindrome(\"malayalam\", 0))\n print(can_make_palindrome(\"malayalam\", 1))\n print(can_make_palindrome(\"asdf\", 5))\n print(can_make_palindrome(\"asdf\", 2))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "2331046", "language": "Python", "matching_score": 1.1900184154510498, "max_stars_count": 70, "path": "Solutions/121.py" }, { "content": "\"\"\"\nProblem:\n\nWrite a program that checks whether an integer is a palindrome. For example, 121 is a\npalindrome, as well as 888. 678 is not a palindrome. Do not convert the integer into a\nstring.\n\"\"\"\n\n\ndef is_palindrome(num: int) -> bool:\n digits = 0\n num_copy = num\n while num_copy >= 10:\n digits += 1\n num_copy = num_copy // 10\n # checking for palindrome condition\n for i in range((digits) // 2 + 1):\n digit1 = (num // (10 ** i)) % 10\n digit2 = (num % (10 ** (digits - i + 1))) // (10 ** (digits - i))\n if digit1 != digit2:\n return False\n return True\n\n\nif __name__ == \"__main__\":\n print(is_palindrome(121))\n print(is_palindrome(888))\n print(is_palindrome(1661))\n print(is_palindrome(235))\n print(is_palindrome(678))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n[n = number of digits]\n\"\"\"\n", "id": "9820265", "language": "Python", "matching_score": 0.9832440614700317, "max_stars_count": 70, "path": "Solutions/202.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a list of words, find all pairs of unique indices such that the concatenation of\nthe two words is a palindrome.\n\nFor example, given the list [\"code\", \"edoc\", \"da\", \"d\"], return\n[(0, 1), (1, 0), (2, 3)].\n\"\"\"\n\nfrom typing import List, Tuple\n\n\ndef is_palindrome(string: str) -> bool:\n return string == string[::-1]\n\n\ndef get_concatenated_palindrome_indices(\n string_list: List[str],\n) -> List[Tuple[int, int]]:\n concatenated_palindrome_indices = []\n length = len(string_list)\n # generating concatenated palindrome indices\n for i in range(length):\n for j in range(i + 1, length):\n if is_palindrome(string_list[i] + string_list[j]):\n concatenated_palindrome_indices.append((i, j))\n if is_palindrome(string_list[j] + string_list[i]):\n concatenated_palindrome_indices.append((j, i))\n return concatenated_palindrome_indices\n\n\nif __name__ == \"__main__\":\n print(get_concatenated_palindrome_indices([\"code\", \"edoc\", \"da\", \"d\"]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x len(word))\nSPACE COMPLEXITY: O(n ^ 2)\n\"\"\"\n", "id": "4366373", "language": "Python", "matching_score": 1.597694754600525, "max_stars_count": 70, "path": "Solutions/167.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string, split it into as few strings as possible such that each string is a\npalindrome.\n\nFor example, given the input string \"racecarannakayak\", return\n[\"racecar\", \"anna\", \"kayak\"].\n\nGiven the input string \"abc\", return [\"a\", \"b\", \"c\"].\n\"\"\"\n\nfrom typing import List\n\n\ndef is_palindrome(string: str) -> bool:\n return string and string == string[::-1]\n\n\ndef split_into_string_list_helper(\n string: str, current: str, palindrome_list: List[str]\n) -> List[str]:\n if not string and not current:\n return palindrome_list\n elif not string:\n return palindrome_list + list(current)\n # generating the palindrome list\n curr = current + string[0]\n if is_palindrome(curr):\n # adding curr to the list of palindromes\n palindrome_list_1 = split_into_string_list_helper(\n string[1:], \"\", palindrome_list + [curr]\n )\n # checking if a larger palindrome can be obtained\n palindrome_list_2 = split_into_string_list_helper(\n string[1:], curr, palindrome_list\n )\n return min(palindrome_list_1, palindrome_list_2, key=lambda List: len(List))\n return split_into_string_list_helper(string[1:], curr, palindrome_list)\n\n\ndef split_into_string_list(string: str) -> List[str]:\n return split_into_string_list_helper(string, \"\", [])\n\n\nif __name__ == \"__main__\":\n print(split_into_string_list(\"racecarannakayak\"))\n print(split_into_string_list(\"abc\"))\n print(split_into_string_list(\"abbbc\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "3192628", "language": "Python", "matching_score": 0.6420644521713257, "max_stars_count": 70, "path": "Solutions/181.py" }, { "content": "\"\"\"\nProblem:\n\nWrite an algorithm to justify text. Given a sequence of words and an integer line\nlength k, return a list of strings which represents each line, fully justified.\n\nMore specifically, you should have as many words as possible in each line. There should\nbe at least one space between each word. Pad extra spaces when necessary so that each\nline has exactly length k. Spaces should be distributed as equally as possible, with\nthe extra spaces, if any, distributed starting from the left.\n\nIf you can only fit one word on a line, then you should pad the right-hand side with\nspaces.\n\nEach word is guaranteed not to be longer than k.\n\nFor example, given the list of words\n[\"the\", \"quick\", \"brown\", \"fox\", \"jumps\", \"over\", \"the\", \"lazy\", \"dog\"] and k = 16, you\nshould return the following:\n\n[\"the quick brown\", # 1 extra space on the left \"fox jumps over\", # 2 extra spaces\ndistributed evenly \"the lazy dog\" # 4 extra spaces distributed evenly]\n\"\"\"\n\nfrom typing import List\n\n\ndef add_word_handler(\n result: List[str], temp: List[str], curr_string_length: int, k: int\n) -> None:\n # helper function to handle adding a new word to the result\n extra_spaces = k - curr_string_length\n words_in_current_line = len(temp)\n if words_in_current_line == 1:\n # only 1 word is present, extra padding is added to the right\n word = temp.pop()\n result.append(word + \" \" * extra_spaces)\n elif extra_spaces % (words_in_current_line - 1) == 0:\n # space can be equally distributed\n full_string = (\" \" * (extra_spaces // (words_in_current_line - 1) + 1)).join(\n temp\n )\n result.append(full_string)\n else:\n # the space cannot be equally distributed\n # extra spaces are added betweens the words, starting from the left\n extra_uneven_spaces = extra_spaces % (words_in_current_line - 1)\n regular = extra_spaces // (words_in_current_line - 1) + 1\n temp_str = \"\"\n for i in temp:\n temp_str += i + \" \" * regular\n if extra_uneven_spaces:\n temp_str += \" \"\n extra_uneven_spaces -= 1\n result.append(temp_str.rstrip())\n\n\ndef justify_text(word_list: List[str], k: int) -> List[str]:\n result = []\n temp = []\n curr_string_length = 0\n # iterating through the given words\n for word in word_list:\n curr_word_length = len(word)\n if temp == []:\n # no word added to the current string\n temp.append(word)\n curr_string_length = curr_word_length\n elif curr_word_length + curr_string_length + 1 <= k:\n # adding the current word doesn't cause overflow\n temp.append(word)\n curr_string_length += curr_word_length + 1\n else:\n # adding the current word does cause overflow\n add_word_handler(result, temp, curr_string_length, k)\n # updating temp and length (only in case of overflow)\n temp = [word]\n curr_string_length = len(word)\n if temp != []:\n # if the last line caused an overflow\n add_word_handler(result, temp, curr_string_length, k)\n return result\n\n\nif __name__ == \"__main__\":\n for string in justify_text(\n [\"the\", \"quick\", \"brown\", \"fox\", \"jumps\", \"over\", \"the\", \"lazy\", \"dog\"], 16\n ):\n print(\"'\" + string + \"'\")\n\n for string in justify_text([\"done\"], 16):\n print(\"'\" + string + \"'\")\n\n # NOTE: Using the \"'\"s is not important, used it to denote the start and end of the\n # string (helpful in case of 1 word in 1 line)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "2785156", "language": "Python", "matching_score": 3.1180124282836914, "max_stars_count": 70, "path": "Solutions/028.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string s and an integer k, break up the string into multiple texts such that\neach text has a length of k or less. You must break it up so that words don't break\nacross lines. If there's no way to break the text up, then return null.\n\nYou can assume that there are no spaces at the ends of the string and that there is\nexactly one space between each word.\n\nFor example, given the string \"the quick brown fox jumps over the lazy dog\" and k = 10,\nyou should return: [\"the quick\", \"brown fox\", \"jumps over\", \"the lazy\", \"dog\"]. No\nstring in the list has a length of more than 10.\n\"\"\"\n\nfrom typing import List, Optional\n\n\ndef break_string(string: str, k: int) -> Optional[List[str]]:\n word_list = string.split()\n result = []\n curr_len = 0\n curr_str = \"\"\n # generating the formatted text\n for word in word_list:\n current_word_length = len(word)\n if current_word_length > k:\n return None\n if curr_len == 0:\n # first iteration\n curr_len = current_word_length\n curr_str = word\n elif curr_len + current_word_length + 1 > k:\n # overflow condition\n result.append(curr_str)\n curr_str = word\n curr_len = current_word_length\n else:\n # normal addition to the string\n curr_len += current_word_length\n curr_str += \" \" + word\n result.append(curr_str)\n return result\n\n\nif __name__ == \"__main__\":\n print(break_string(\"the quick brown fox jumps over the lazy dog\", 10))\n print(break_string(\"the quick brown fox jumps over the lazy dog\", 3))\n print(break_string(\"the quick brown fox jumps over the lazy dog tenletters\", 10))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "7990872", "language": "Python", "matching_score": 1.5791376829147339, "max_stars_count": 70, "path": "Solutions/057.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string and a set of delimiters, reverse the words in the string while\nmaintaining the relative order of the delimiters. For example, given\n\"hello/world:here\", return \"here/world:hello\"\n\nFollow-up: Does your solution work for the following cases: \"hello/world:here/\",\n\"hello//world:here\"\n\"\"\"\n\nfrom typing import Set\n\n\ndef rev_words(string: str, delimiters: Set[str]) -> str:\n if len(string) == 0:\n return string\n\n words = []\n delims = []\n flag_beg = string[0] in delimiters\n flag_delim = False\n curr_str = \"\"\n # generating the words and delimiters\n for char in string:\n if char in delimiters:\n if flag_delim:\n curr_str += char\n else:\n if curr_str:\n words.append(curr_str)\n curr_str = char\n flag_delim = True\n else:\n if flag_delim:\n flag_delim = False\n delims.append(curr_str)\n curr_str = char\n else:\n curr_str += char\n # check if last character is a delimiter\n if flag_delim:\n delims.append(curr_str)\n else:\n words.append(curr_str)\n\n words = words[::-1]\n words.append(\"\")\n delims.append(\"\")\n len_words = len(words)\n len_delims = len(delims)\n i, j = 0, 0\n reversed_string = \"\"\n # generating the reversed string\n if flag_beg:\n j = 1\n reversed_string += delims[0]\n while i < len_words or j < len_delims:\n try:\n reversed_string += words[i]\n reversed_string += delims[j]\n i += 1\n j += 1\n except IndexError:\n break\n return reversed_string\n\n\nif __name__ == \"__main__\":\n print(rev_words(\"hello/world:here\", {\":\", \"/\"}))\n print(rev_words(\"here/world:hello\", {\":\", \"/\"}))\n print(rev_words(\"hello/world:here/\", {\":\", \"/\"}))\n print(rev_words(\"hello//world:here\", {\":\", \"/\"}))\n print(rev_words(\"hello\", {\":\", \"/\"}))\n print(rev_words(\"//:\", {\":\", \"/\"}))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "4746436", "language": "Python", "matching_score": 2.1773135662078857, "max_stars_count": 70, "path": "Solutions/114.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string of words delimited by spaces, reverse the words in string. For example,\ngiven \"hello world here\", return \"here world hello\"\n\nFollow-up: given a mutable string representation, can you perform this operation\nin-place?\n\"\"\"\n\n\ndef reverse_words_in_string(string: str) -> str:\n words = string.split()\n words.reverse()\n return \" \".join(words)\n\n\nif __name__ == \"__main__\":\n print(reverse_words_in_string(\"hello world here\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "2094496", "language": "Python", "matching_score": 0.013783262111246586, "max_stars_count": 70, "path": "Solutions/113.py" }, { "content": "\"\"\"\nProblem:\n\nA teacher must divide a class of students into two teams to play dodgeball.\nUnfortunately, not all the kids get along, and several refuse to be put on the same\nteam as that of their enemies.\n\nGiven an adjacency list of students and their enemies, write an algorithm that finds a\nsatisfactory pair of teams, or returns False if none exists.\n\nFor example, given the following enemy graph you should return the teams {0, 1, 4, 5}\nand {2, 3}.\n\nstudents = {\n 0: [3],\n 1: [2],\n 2: [1, 4],\n 3: [0, 4, 5],\n 4: [2, 3],\n 5: [3]\n}\nOn the other hand, given the input below, you should return False.\n\nstudents = {\n 0: [3],\n 1: [2],\n 2: [1, 3, 4],\n 3: [0, 2, 4, 5],\n 4: [2, 3],\n 5: [3]\n}\n\"\"\"\n\nfrom typing import Dict, List, Set, Tuple, Union\n\n\ndef divide_into_groups(\n students: Dict[int, List[int]]\n) -> Union[bool, Tuple[Set[int], Set[int]]]:\n set1 = set()\n set2 = set()\n nemesis1 = set()\n nemesis2 = set()\n\n for student in students:\n if student in nemesis1 and student in nemesis2:\n # satisfactory pair of teams doesn't exist\n return False\n # creating the necessary reference\n if student in nemesis1:\n set_curr = set2\n nemesis_curr = nemesis2\n else:\n set_curr = set1\n nemesis_curr = nemesis1\n set_curr.add(student)\n for nemesis in students[student]:\n nemesis_curr.add(nemesis)\n return set1, set2\n\n\nif __name__ == \"__main__\":\n students = {0: [3], 1: [2], 2: [1, 4], 3: [0, 4, 5], 4: [2, 3], 5: [3]}\n print(divide_into_groups(students))\n\n students = {0: [3], 1: [2], 2: [1, 3, 4], 3: [0, 2, 4, 5], 4: [2, 3], 5: [3]}\n print(divide_into_groups(students))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\n[O(n) by amortized analysis, as the in the worst case (everyone wants to be alone),\nthe nested loop runs 2 times and breaks out as the nemesis contains all students]\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "12635209", "language": "Python", "matching_score": 1.09867262840271, "max_stars_count": 70, "path": "Solutions/292.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of positive integers, divide the array into two subsets such that the\ndifference between the sum of the subsets is as small as possible.\n\nFor example, given [5, 10, 15, 20, 25], return the sets {10, 25} and {5, 15, 20}, which\nhas a difference of 5, which is the smallest possible difference.\n\"\"\"\n\nfrom typing import List, Tuple\n\n\ndef smallest_difference_sets(\n arr: List[int], set1: List[int] = [], set2: List[int] = []\n) -> Tuple[List[int], List[int]]:\n if not arr:\n return set1, set2\n # generating the possible lists\n temp = arr.pop()\n temp1_1, temp2_1 = smallest_difference_sets(list(arr), set1 + [temp], list(set2))\n temp1_2, temp2_2 = smallest_difference_sets(list(arr), list(set1), set2 + [temp])\n # returning the lists with smaller difference\n diff1 = abs(sum(temp1_1) - sum(temp2_1))\n diff2 = abs(sum(temp1_2) - sum(temp2_2))\n if diff1 < diff2:\n return temp1_1, temp2_1\n return temp1_2, temp2_2\n\n\nif __name__ == \"__main__\":\n print(smallest_difference_sets([5, 10, 15, 20, 25], [], []))\n print(smallest_difference_sets([5, 10, 15, 20], [], []))\n print(smallest_difference_sets([500, 10, 15, 20, 25], [], []))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(n ^ 2)\n\"\"\"\n", "id": "1640670", "language": "Python", "matching_score": 1.2055613994598389, "max_stars_count": 70, "path": "Solutions/186.py" }, { "content": "\"\"\"\nProblem:\n\nImplement division of two positive integers without using the division, multiplication,\nor modulus operators. Return the quotient as an integer, ignoring the remainder.\n\"\"\"\n\n\ndef divide(dividend: int, divisor: int) -> int:\n quotient = 0\n while dividend > 0:\n dividend -= divisor\n if dividend >= 0:\n quotient += 1\n return quotient\n\n\nif __name__ == \"__main__\":\n print(divide(1, 0))\n print(divide(1, 1))\n print(divide(0, 1))\n print(divide(12, 3))\n print(divide(13, 3))\n print(divide(25, 5))\n print(divide(25, 7))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "11360763", "language": "Python", "matching_score": 0.2646417021751404, "max_stars_count": 70, "path": "Solutions/088.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a sorted list of integers of length N, determine if an element x is in the list\nwithout performing any multiplication, division, or bit-shift operations.\n\nDo this in O(log N) time.\n\"\"\"\n\nfrom typing import List\n\n\ndef fibo_search(arr: List[int], val: int) -> int:\n # fibo search to search an element in a sorted array in O(log(n)) time [without\n # multiplication, division, or bit-shift operations]\n length = len(arr)\n if length == 0:\n return 0\n fib_N_2 = 0\n fib_N_1 = 1\n fibNext = fib_N_1 + fib_N_2\n\n while fibNext < len(arr):\n fib_N_2 = fib_N_1\n fib_N_1 = fibNext\n fibNext = fib_N_1 + fib_N_2\n\n index = -1\n while fibNext > 1:\n i = min(index + fib_N_2, (length - 1))\n if arr[i] == val:\n return i\n fibNext = fib_N_1\n if arr[i] < val:\n fib_N_1 = fib_N_2\n index = i\n elif arr[i] > val:\n fib_N_1 = fib_N_1 - fib_N_2\n fib_N_2 = fibNext - fib_N_1\n\n if (fib_N_1 and index < length - 1) and (arr[index + 1] == val):\n return index + 1\n return -1\n\n\nif __name__ == \"__main__\":\n print(fibo_search([1, 3, 5, 7, 9], 3))\n print(fibo_search([1, 3, 5, 7, 9], 1))\n print(fibo_search([1, 3, 5, 7, 9], 7))\n\n print(fibo_search([1, 3, 5, 7, 9], 6))\n print(fibo_search([1, 3, 5, 7, 9], 0))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(log(n))\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "12800827", "language": "Python", "matching_score": 0.973208487033844, "max_stars_count": 70, "path": "Solutions/271.py" }, { "content": "\"\"\"\nProblem:\n\nImplement the function fib(n), which returns the nth number in the Fibonacci sequence,\nusing only O(1) space.\n\"\"\"\n\n\ndef fib(n: int) -> int:\n curr, last = 1, 0\n for _ in range(n - 1):\n curr, last = last + curr, curr\n return curr\n\n\nif __name__ == \"__main__\":\n for i in range(1, 11):\n print(f\"Fib {i}:\\t{fib(i)}\")\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "10164200", "language": "Python", "matching_score": 0.24982067942619324, "max_stars_count": 70, "path": "Solutions/233.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an integer list where each number represents the number of hops you can make,\ndetermine whether you can reach to the last index starting at index 0.\n\nFor example, [2, 0, 1, 0] returns true while [1, 1, 0, 1] returns false.\n\"\"\"\n\nfrom typing import List\n\n\ndef can_reach_end(arr: List[int]) -> bool:\n length = len(arr)\n curr_position, last_index = 0, length - 1\n while curr_position < length:\n if curr_position == last_index:\n return True\n elif arr[curr_position] == 0:\n return False\n curr_position += arr[curr_position]\n return False\n\n\nif __name__ == \"__main__\":\n print(can_reach_end([2, 0, 1, 0]))\n print(can_reach_end([1, 1, 0, 1]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "8110088", "language": "Python", "matching_score": 2.3719518184661865, "max_stars_count": 70, "path": "Solutions/106.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given an array of nonnegative integers. Let's say you start at the beginning of\nthe array and are trying to advance to the end. You can advance at most, the number of\nsteps that you're currently on. Determine whether you can get to the end of the array.\n\nFor example, given the array [1, 3, 1, 2, 0, 1], we can go from indices\n0 -> 1 -> 3 -> 5, so return true.\n\nGiven the array [1, 2, 1, 0, 0], we can't reach the end, so return false.\n\"\"\"\n\nfrom typing import List\n\n\ndef can_reach_end(arr: List[int]) -> bool:\n length = len(arr)\n dp = [False for _ in range(length)]\n dp[length - 1] = True\n # generating the dp lookup\n for i in range(length - 2, -1, -1):\n for j in range(i + 1, min(length, i + arr[i] + 1)):\n if dp[j]:\n dp[i] = True\n break\n return dp[0]\n\n\nif __name__ == \"__main__\":\n print(can_reach_end([1, 3, 1, 2, 0, 1]))\n print(can_reach_end([1, 2, 1, 0, 0]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "10805858", "language": "Python", "matching_score": 0.6310176253318787, "max_stars_count": 70, "path": "Solutions/192.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a number represented by a list of digits, find the next greater permutation of a\nnumber, in terms of lexicographic ordering. If there is not greater permutation\npossible, return the permutation with the lowest value/ordering.\n\nFor example, the list [1,2,3] should return [1,3,2]. The list [1,3,2] should return\n[2,1,3]. The list [3,2,1] should return [1,2,3]\n\nCan you perform the operation without allocating extra memory (disregarding the input\nmemory)?\n\"\"\"\n\nfrom typing import List\n\n\ndef get_next(arr: List[int]) -> List[int]:\n length = len(arr)\n if length < 2:\n return arr\n # finding the last element arranged in ascending order\n for index in range(length - 1, -1, -1):\n if index > 0 and arr[index - 1] < arr[index]:\n break\n # if index is 0, arr is sorted in descending order\n if index == 0:\n arr.reverse()\n return arr\n # finding the next permutation\n for k in range(length - 1, index - 1, -1):\n if arr[k] > arr[index - 1]:\n arr[k], arr[index - 1] = arr[index - 1], arr[k]\n break\n # arranging the other elements in proper order\n size = (length - 1) + index\n for i in range(index, (size + 1) // 2):\n arr[i], arr[size - i] = arr[size - i], arr[i]\n return arr\n\n\nif __name__ == \"__main__\":\n print(get_next([1, 2, 3]))\n print(get_next([1, 3, 2]))\n print(get_next([2, 1, 3]))\n print(get_next([2, 3, 1]))\n print(get_next([3, 1, 2]))\n print(get_next([3, 2, 1]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "9551078", "language": "Python", "matching_score": 2.951944589614868, "max_stars_count": 70, "path": "Solutions/095.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an integer, find the next permutation of it in absolute order. For example, given\n48975, the next permutation would be 49578.\n\"\"\"\n\nfrom typing import List\n\n\ndef get_next_helper(arr: List[int]) -> List[int]:\n length = len(arr)\n if length < 2:\n return arr\n # finding the last element arranged in ascending order\n for index in range(length - 1, -1, -1):\n if index > 0 and arr[index - 1] < arr[index]:\n break\n # if index is 0, arr is sorted in descending order\n if index == 0:\n arr.reverse()\n return arr\n # finding the next permutation\n for k in range(length - 1, index - 1, -1):\n if arr[k] > arr[index - 1]:\n arr[k], arr[index - 1] = arr[index - 1], arr[k]\n break\n # arranging the other elements in proper order\n size = (length - 1) + index\n for i in range(index, (size + 1) // 2):\n arr[i], arr[size - i] = arr[size - i], arr[i]\n return arr\n\n\ndef get_next(num: int) -> int:\n return int(\"\".join(get_next_helper(list(str(num)))))\n\n\nif __name__ == \"__main__\":\n print(get_next(48975))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n[n = number of digits]\n\"\"\"\n", "id": "9066467", "language": "Python", "matching_score": 0.7699282169342041, "max_stars_count": 70, "path": "Solutions/205.py" }, { "content": "\"\"\"\nProblem:\n\nPascal's triangle is a triangular array of integers constructed with the following\nformula:\n\nThe first row consists of the number 1. For each subsequent row, each element is the\nsum of the numbers directly above it, on either side. For example, here are the first\nfew rows:\n\n 1\n 1 1\n 1 2 1\n 1 3 3 1\n1 4 6 4 1\nGiven an input k, return the kth row of Pascal's triangle.\n\nBonus: Can you do this using only O(k) space?\n\"\"\"\n\nfrom typing import List\n\n\ndef get_pascal(k: int) -> List[int]:\n row = [1 for _ in range(k)]\n curr = 1\n for _ in range(k):\n # generating the value for each level\n last = 0\n for i in range(curr - 1):\n last, temp = row[i], last\n row[i] += temp\n curr += 1\n return row\n\n\nif __name__ == \"__main__\":\n print(get_pascal(1))\n print(get_pascal(2))\n print(get_pascal(3))\n print(get_pascal(4))\n print(get_pascal(5))\n print(get_pascal(6))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(k ^ 2)\nSPACE COMPLEXITY: O(k)\n\"\"\"\n", "id": "5754714", "language": "Python", "matching_score": 1.3101567029953003, "max_stars_count": 70, "path": "Solutions/295.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given an array of arrays of integers, where each array corresponds to a row in\na triangle of numbers. For example, [[1], [2, 3], [1, 5, 1]] represents the triangle:\n\n 1\n 2 3\n1 5 1\nWe define a path in the triangle to start at the top and go down one row at a time to\nan adjacent value, eventually ending with an entry on the bottom row. For example,\n1 -> 3 -> 5. The weight of the path is the sum of the entries.\n\nWrite a program that returns the weight of the maximum weight path.\n\"\"\"\n\nfrom typing import List\n\n\ndef get_maximum_weight_path(triangle: List[List[int]]) -> List[int]:\n rows = len(triangle)\n\n if rows == 0:\n return []\n elif rows == 1:\n return triangle[0]\n\n # using dynamic programming to get the maximum weight\n # elements stored as (weight, path)\n dp = [list(row) for row in triangle]\n for i in range(len(dp[-2])):\n dp[-2][i] = (\n (max(dp[-1][i], dp[-1][i + 1]) + dp[-2][i]),\n [max(dp[-1][i], dp[-1][i + 1]), dp[-2][i]],\n )\n for i in range(rows - 3, -1, -1):\n for j in range(i + 1):\n dp[i][j] = (\n (max(dp[i + 1][j][0], dp[i + 1][j + 1][0]) + dp[i][j]),\n max((dp[i + 1][j], dp[i + 1][j + 1]), key=lambda elem: elem[0])[1]\n + [dp[i][j]],\n )\n return dp[0][0][1][::-1]\n\n\nif __name__ == \"__main__\":\n print(get_maximum_weight_path([[1], [2, 3], [1, 5, 1]]))\n print(get_maximum_weight_path([[1], [2, 3], [7, 5, 1]]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n[n = number of items in the triangle]\n\"\"\"\n", "id": "6001332", "language": "Python", "matching_score": 0.7661527991294861, "max_stars_count": 70, "path": "Solutions/201.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given an N * M matrix of 0s and 1s. Starting from the top left corner, how many\nways are there to reach the bottom right corner?\n\nYou can only move right and down. 0 reppossible_pathsents an empty space while 1 reppossible_pathsents a wall\nyou cannot walk through.\n\nFor example, given the following matrix:\n\n[[0, 0, 1],\n [0, 0, 1],\n [1, 0, 0]]\nReturn 2, as there are only two ways to get to the bottom right:\n\nRight, down, down, right\nDown, right, down, right\nThe top left corner and bottom right corner will always be 0.\n\"\"\"\n\nfrom typing import List\n\nMatrix = List[List[int]]\n\n\ndef get_possible_paths(matrix: Matrix) -> int:\n n, m = len(matrix), len(matrix[0])\n # possible_pathsetting the values of 1 to -1 as positive numbers are used to construct the\n # paths\n for i in range(n):\n for j in range(m):\n if matrix[i][j] == 1:\n matrix[i][j] = -1\n # setting the vertical and horizontal paths\n for i in range(n):\n if matrix[i][0] == -1:\n break\n else:\n matrix[i][0] = 1\n for i in range(m):\n if matrix[0][i] == -1:\n break\n else:\n matrix[0][i] = 1\n # generating the paths\n for i in range(1, n):\n for j in range(1, m):\n if matrix[i][j] != -1:\n possible_paths = 0\n if matrix[i - 1][j] != -1:\n possible_paths += matrix[i - 1][j]\n if matrix[i][j - 1] != -1:\n possible_paths += matrix[i][j - 1]\n matrix[i][j] = possible_paths\n return matrix[-1][-1]\n\n\nif __name__ == \"__main__\":\n matrix = [\n [0, 0, 1],\n [0, 0, 1],\n [1, 0, 0]\n ]\n print(get_possible_paths(matrix))\n\n matrix = [\n [0, 0, 1],\n [1, 0, 1],\n [1, 0, 0]\n ]\n print(get_possible_paths(matrix))\n\n matrix = [\n [0, 0, 0],\n [1, 0, 0],\n [0, 0, 0]\n ]\n print(get_possible_paths(matrix))\n \n # end cannot be reached as only right and down traversal is allowed\n matrix = [\n [0, 0, 0],\n [1, 1, 0],\n [0, 0, 0],\n [0, 1, 1],\n [0, 0, 0]\n ]\n print(get_possible_paths(matrix))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x m)\nSPACE COMPLEXITY: O(n x m)\n\"\"\"\n", "id": "2743501", "language": "Python", "matching_score": 2.3258845806121826, "max_stars_count": 70, "path": "Solutions/158.py" }, { "content": "\"\"\"\nProblem:\n\nThere is an N by M matrix of zeroes. Given N and M, write a function to count the\nnumber of ways of starting at the top-left corner and getting to the bottom-right\ncorner. You can only move right or down.\n\nFor example, given a 2 by 2 matrix, you should return 2, since there are two ways to\nget to the bottom-right:\n\nRight, then down\nDown, then right\nGiven a 5 by 5 matrix, there are 70 ways to get to the bottom-right.\n\"\"\"\n\n\ndef get_num_ways(n: int, m: int) -> int:\n matrix = [[(1 if (i == 0 or j == 0) else 0) for i in range(m)] for j in range(n)]\n\n for i in range(1, n):\n for j in range(1, m):\n matrix[i][j] = matrix[i - 1][j] + matrix[i][j - 1]\n return matrix[n - 1][m - 1]\n\n\nif __name__ == \"__main__\":\n print(get_num_ways(2, 2))\n print(get_num_ways(5, 5))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x m)\nSPACE COMPLEXITY: O(n x m)\n\"\"\"\n", "id": "7744716", "language": "Python", "matching_score": 0.9121434092521667, "max_stars_count": 70, "path": "Solutions/062.py" }, { "content": "\"\"\"\nProblem:\n\nGiven two rectangles on a 2D graph, return the area of their intersection. If the\nrectangles don't intersect, return 0.\n\nFor example, given the following rectangles:\n\n{\n \"top_left\": (1, 4),\n \"dimensions\": (3, 3) # width, height\n}\n\nand\n\n{\n \"top_left\": (0, 5),\n \"dimensions\" (4, 3) # width, height\n}\n\nreturn 6.\n\"\"\"\n\nfrom typing import Dict, Tuple\n\n\ndef intersection(\n rectangle1: Dict[str, Tuple[int, int]], rectangle2: Dict[str, Tuple[int, int]]\n) -> int:\n # segregating the rectangles by x-axis\n if rectangle1[\"top_left\"][0] < rectangle2[\"top_left\"][0]:\n left = rectangle1\n right = rectangle2\n else:\n left = rectangle2\n right = rectangle1\n # segregating the rectangles by y-axis\n if rectangle1[\"top_left\"][1] > rectangle2[\"top_left\"][1]:\n top = rectangle1\n bottom = rectangle2\n else:\n top = rectangle2\n bottom = rectangle1\n # getting the length of overlap on x-axis\n if (left[\"top_left\"][0] + left[\"dimensions\"][0]) < right[\"top_left\"][0]:\n span_x = 0\n else:\n span_x = (left[\"top_left\"][0] + left[\"dimensions\"][0]) - right[\"top_left\"][0]\n # getting the length of overlap on y-axis\n if (top[\"top_left\"][1] - top[\"dimensions\"][1]) > bottom[\"top_left\"][1]:\n span_y = 0\n else:\n span_y = bottom[\"top_left\"][1] - (top[\"top_left\"][1] - top[\"dimensions\"][1])\n # returning the overlapped area\n return span_x * span_y\n\n\nif __name__ == \"__main__\":\n rectangle1 = {\"top_left\": (1, 4), \"dimensions\": (3, 3)}\n rectangle2 = {\"top_left\": (0, 5), \"dimensions\": (4, 3)}\n\n print(intersection(rectangle1, rectangle2))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "2140651", "language": "Python", "matching_score": 2.7358596324920654, "max_stars_count": 70, "path": "Solutions/185.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given given a list of rectangles represented by min and max x- and\ny-coordinates. Compute whether or not a pair of rectangles overlap each other. If one\nrectangle completely covers another, it is considered overlapping.\n\nFor example, given the following rectangles:\n\n{\n \"top_left\": (1, 4),\n \"dimensions\": (3, 3) # width, height\n},\n{\n \"top_left\": (-1, 3),\n \"dimensions\": (2, 1)\n},\n{\n \"top_left\": (0, 5),\n \"dimensions\": (4, 3)\n}\n\nreturn true as the first and third rectangle overlap each other.\n\"\"\"\n\nfrom typing import Dict, List, Tuple\n\nRectangle = Dict[str, Tuple[int, int]]\n\n\ndef get_intersection_area(rect1: List[Rectangle], rect2: List[Rectangle]) -> int:\n if rect1[\"top_left\"][0] < rect2[\"top_left\"][0]:\n left = rect1\n right = rect2\n else:\n left = rect2\n right = rect1\n if rect1[\"top_left\"][1] > rect2[\"top_left\"][1]:\n top = rect1\n bottom = rect2\n else:\n top = rect2\n bottom = rect1\n if (left[\"top_left\"][0] + left[\"dimensions\"][0]) < right[\"top_left\"][0]:\n return 0\n else:\n span_x = (left[\"top_left\"][0] + left[\"dimensions\"][0]) - right[\"top_left\"][0]\n if (top[\"top_left\"][1] - top[\"dimensions\"][1]) > bottom[\"top_left\"][1]:\n return 0\n else:\n span_y = bottom[\"top_left\"][1] - (top[\"top_left\"][1] - top[\"dimensions\"][1])\n return span_x * span_y\n\n\ndef get_covered_area(rect: Rectangle) -> int:\n width, height = rect[\"dimensions\"]\n return width * height\n\n\ndef check_rectangles_intersection(rectangles: List[Rectangle]) -> bool:\n length = len(rectangles)\n # checking for intersection for each pair of rectangles\n for i in range(length - 1):\n for j in range(i + 1, length):\n intersection_area = get_intersection_area(rectangles[i], rectangles[j])\n rect1_area = get_covered_area(rectangles[i])\n rect2_area = get_covered_area(rectangles[j])\n if intersection_area in (rect1_area, rect2_area):\n return True\n return False\n\n\nif __name__ == \"__main__\":\n # NOTE: THE QUESTION STATEMENT IS WRONG THE RECTANGLES 1 & 3 DOES NOT OVERLAP BUT\n # ONLY INTERSECT (SMALL MODIFICATION DONE TO MAKE THEM OVERLAP)\n rectangles = [\n {\"top_left\": (1, 4), \"dimensions\": (3, 3)},\n {\"top_left\": (-1, 3), \"dimensions\": (2, 1)},\n {\"top_left\": (0, 5), \"dimensions\": (4, 4)}, # MODIFICATION\n ]\n\n print(check_rectangles_intersection(rectangles))\n\n rectangles.pop()\n\n print(check_rectangles_intersection(rectangles))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "24074", "language": "Python", "matching_score": 0.46294233202934265, "max_stars_count": 70, "path": "Solutions/187.py" }, { "content": "from typing import Union\n\n\nclass Stack:\n \"\"\"\n Stack Class for LIFO Structure\n\n Functions:\n is_empty: Check if the stack is empty\n peek: Get the value at the stack top without removing it\n pop: Pop the object at the top of the stack\n Raises erorr if the stack is empty\n push: Push an object to the top of the stack\n \"\"\"\n\n def __init__(self) -> None:\n self.stack = []\n self.rear = -1\n self.top = -1\n\n def __repr__(self) -> str:\n return str(self.stack)\n\n def __len__(self) -> int:\n return len(self.stack)\n\n def __delitem__(self, position: int) -> None:\n del self.stack[position]\n self.rear -= 1\n\n def __getitem__(self, position: int) -> Union[int, str]:\n return self.stack[position]\n\n def __setitem__(self, position: int, value: Union[int, str]) -> None:\n self.stack[position] = value\n\n def is_empty(self) -> bool:\n # Check if the stack is empty\n return not bool(self.stack)\n\n def peek(self) -> Union[int, str]:\n # Get the value at the stack top without removing it\n if self.is_empty():\n raise Exception(\"Stack Underflow. Cannot peek at an empty stack\")\n return self.stack[-1]\n\n def pop(self) -> Union[int, str]:\n # Pop the value at the stack top\n if self.rear == -1:\n raise Exception(\"Stack Underflow. Cannot pop from an empty stack\")\n elif self.top == 0:\n self.rear = -1\n self.top = -1\n else:\n self.top -= 1\n return self.stack.pop()\n\n def push(self, val: Union[int, str]) -> None:\n # Push a new value to the stack top\n if self.rear == -1:\n self.stack.append(val)\n self.rear = 0\n self.top = 0\n else:\n self.stack.append(val)\n self.top += 1\n", "id": "6243278", "language": "Python", "matching_score": 1.9991134405136108, "max_stars_count": 70, "path": "Solutions/DataStructures/Stack.py" }, { "content": "from typing import Union\n\n\nclass Queue:\n \"\"\"\n Queue Class for FIFO Structure\n\n Functions:\n dequeue: Remove and return the object at the head of the queue\n Raises error if the queue is empty\n enqueue: Add an object to the end of the queue\n is_empty: Check if the queue is empty\n peek: Get the value at the queue head without removing it\n \"\"\"\n\n def __init__(self) -> None:\n self.queue = []\n self.elements = 0\n\n def __repr__(self) -> str:\n return str(self.queue)\n\n def __len__(self) -> int:\n return self.elements\n\n def __delitem__(self, position: int) -> None:\n del self.queue[position]\n self.elements -= 1\n\n def __getitem__(self, position: int) -> Union[int, str]:\n return self.queue[position]\n\n def __setitem__(self, position: int, value: Union[int, str]) -> None:\n self.queue[position] = value\n\n def dequeue(self) -> Union[int, str]:\n # Remove and return the object at the head of the queue\n # Raises error if the queue is empty\n if self.elements == 0:\n raise Exception(\"Queue Underflow. Cannot de-queue from an empty queue\")\n self.elements -= 1\n return self.queue.pop(0)\n\n def enqueue(self, val: Union[int, str]) -> None:\n # Add an object to the end of the queue\n self.elements += 1\n self.queue.append(val)\n\n def is_empty(self) -> bool:\n # Check if the queue is empty\n return not bool(self.queue)\n\n def peek(self) -> Union[int, str]:\n # Get the value at the queue head without removing it\n if self.is_empty():\n raise Exception(\"Queue Underflow. Cannot peek at an empty queue\")\n return self.queue[0]\n", "id": "5644071", "language": "Python", "matching_score": 0.31936782598495483, "max_stars_count": 70, "path": "Solutions/DataStructures/Queue.py" }, { "content": "\"\"\"\nProblem:\n\nGiven 2 Linked List, find out whether they share a common node. If there is a common\nnode, find the common node.\n\"\"\"\n\nfrom typing import Optional\n\nfrom DataStructures.LinkedList import Node, LinkedList\n\n\ndef common_node(ll1: LinkedList, ll2: LinkedList) -> bool:\n # traversing to the end of the Linked Lists and comparing the nodes\n pos1 = ll1.head\n while pos1.next != None:\n pos1 = pos1.next\n pos2 = ll2.head\n while pos2.next != None:\n pos2 = pos2.next\n # if the location of the last nodes of the lists are same, then they must share a\n # common node\n return pos1 is pos2\n\n\ndef common_node_pos(ll1: LinkedList, ll2: LinkedList) -> Optional[Node]:\n if common_node(ll1, ll2):\n len1, len2 = len(ll1), len(ll2)\n pos1, pos2 = ll1.head, ll2.head\n smaller_len = min(len1, len2)\n # traversing to the position where the intersection may occour in the longer\n # Linked List\n if len1 < len2:\n pos = len2 - len1\n for _ in range(pos):\n pos2 = pos2.next\n elif len1 > len2:\n pos = len1 - len2\n for _ in range(pos):\n pos1 = pos1.next\n # checking for intersecting node\n for _ in range(smaller_len):\n if pos1 is pos2:\n return pos1\n pos1 = pos1.next\n pos2 = pos2.next\n # no intersection\n return None\n\n\nif __name__ == \"__main__\":\n ll1 = LinkedList()\n ll1.add(5)\n ll1.add(6)\n ll1.add(7)\n ll1.add(8)\n\n ll2 = LinkedList()\n ll2.add(1)\n ll2.add(2)\n ll2.add(3)\n ll2.add(4)\n\n ll3 = LinkedList()\n ll3.add(9)\n ll3.rear.next = ll1.head.next.next\n ll3.rear = ll3.rear.next.next\n ll3.length = 3\n\n print(\"Linked List 1:\", ll1)\n print(\"Linked List 2:\", ll2)\n print(\"Linked List 3:\", ll3)\n\n print(common_node_pos(ll1, ll2))\n print(common_node_pos(ll1, ll3).val)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "9531725", "language": "Python", "matching_score": 2.3000309467315674, "max_stars_count": 70, "path": "Solutions/020.py" }, { "content": "\"\"\"\nProblem:\n\nGiven k sorted singly linked lists, write a function to merge all the lists into one\nsorted singly linked list.\n\"\"\"\n\nfrom sys import maxsize\nfrom typing import List\n\nfrom DataStructures.LinkedList import LinkedList, Node\n\n\ndef merge_sorted_linked_list(list_of_LL: List[LinkedList]) -> LinkedList:\n k = len(list_of_LL)\n position_arr = [LL.head for LL in list_of_LL]\n sorted_ll = LinkedList()\n while any(position_arr):\n # finding the node with minimum value\n minimum = maxsize\n for i in range(k):\n if position_arr[i] is not None:\n if position_arr[i].val < minimum:\n minimum = position_arr[i].val\n position = i\n # generating new node\n if sorted_ll.head is None:\n sorted_ll.add(position_arr[position].val)\n curr_position = sorted_ll.head\n else:\n curr_position.next = Node(position_arr[position].val)\n curr_position = curr_position.next\n sorted_ll.length += 1\n position_arr[position] = position_arr[position].next\n # resetting rear\n sorted_ll.rear = curr_position\n return sorted_ll\n\n\nif __name__ == \"__main__\":\n LL1 = LinkedList()\n LL1.add(2)\n LL1.add(25)\n LL1.add(70)\n\n LL2 = LinkedList()\n LL2.add(5)\n LL2.add(8)\n LL2.add(14)\n LL2.add(15)\n LL2.add(21)\n LL2.add(48)\n\n LL3 = LinkedList()\n LL3.add(0)\n LL3.add(90)\n\n print(LL1)\n print(LL2)\n print(LL3)\n\n List = [LL1, LL2, LL3]\n print(merge_sorted_linked_list(List))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n + k)\nSPACE COMPLEXITY: O(n + k)\n[n = total number of nodes]\n\"\"\"\n", "id": "1212941", "language": "Python", "matching_score": 2.2586748600006104, "max_stars_count": 70, "path": "Solutions/078.py" }, { "content": "\"\"\"\nProblem:\n\nLet's represent an integer in a linked list format by having each node represent a\ndigit in the number. The nodes make up the number in reversed order.\n\nFor example, the following linked list:\n\n1 -> 2 -> 3 -> 4 -> 5 is the number 54321.\n\nGiven two linked lists in this format, return their sum in the same linked list format.\n\nFor example, given\n\n9 -> 9 5 -> 2 return 124 (99 + 25) as:\n\n4 -> 2 -> 1\n\"\"\"\n\nfrom DataStructures.LinkedList import LinkedList, Node\n\n\ndef add_linked_lists(ll1: LinkedList, ll2: LinkedList) -> LinkedList:\n sum_linked_list = LinkedList()\n pos1, pos2 = ll1.head, ll2.head\n carry, curr_position_sum = 0, 0\n # generating the sum of the linked lists\n while pos1 or pos2:\n if pos1 == None:\n curr_position_sum = pos2.val + carry\n if curr_position_sum >= 10:\n carry, curr_position_sum = 1, curr_position_sum - 10\n else:\n carry = 0\n elif pos2 == None:\n curr_position_sum = pos1.val + carry\n if curr_position_sum >= 10:\n carry, curr_position_sum = 1, curr_position_sum - 10\n else:\n carry = 0\n else:\n curr_position_sum = pos2.val + pos1.val + carry\n if curr_position_sum >= 10:\n carry, curr_position_sum = 1, curr_position_sum - 10\n else:\n carry = 0\n sum_linked_list.add(curr_position_sum)\n # moving to the next value\n if pos1:\n pos1 = pos1.next\n if pos2:\n pos2 = pos2.next\n if carry == 1:\n sum_linked_list.add(1)\n return sum_linked_list\n\n\ndef create_linked_list(val: int) -> LinkedList:\n LL = LinkedList()\n while val > 0:\n LL.add(val % 10)\n val = val // 10\n return LL\n\n\nif __name__ == \"__main__\":\n LL1 = create_linked_list(99)\n LL2 = create_linked_list(25)\n\n print(LL1)\n print(LL2)\n print(add_linked_lists(LL1, LL2))\n print()\n\n LL1 = create_linked_list(9)\n LL2 = create_linked_list(250)\n\n print(LL1)\n print(LL2)\n print(add_linked_lists(LL1, LL2))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "11900655", "language": "Python", "matching_score": 1.2899583578109741, "max_stars_count": 70, "path": "Solutions/127.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a linked list, remove all consecutive nodes that sum to zero. Print out the\nremaining nodes.\n\nFor example, suppose you are given the input 3 -> 4 -> -7 -> 5 -> -6 -> 6. In this\ncase, you should first remove 3 -> 4 -> -7, then -6 -> 6, leaving only 5.\n\"\"\"\n\nfrom DataStructures.LinkedList import Node, LinkedList\n\n\ndef delete_zero_sum(linked_list: LinkedList) -> LinkedList:\n cumulative = 0\n cumulative_sum_map = {}\n dummy_head = Node(0)\n dummy_head.next = linked_list.head\n linked_list.head = dummy_head\n # removing 0 sum nodes using the property:\n # x -> y -> x [values (x and y) are cumulative sums] implies the linked list\n # contains x -> (y - x) -> -(y - x) and hence the nodes at the end can be removed\n # [this property can also be used to detect multiple nodes summing up to 0]\n node = linked_list.head\n while node:\n cumulative += node.val\n if cumulative in cumulative_sum_map:\n cumulative_sum_map[cumulative].next = node.next\n cumulative_sum_map[cumulative] = node\n node = node.next\n # resetting the linked list (removing dummy head and setting rear)\n linked_list.head = linked_list.head.next\n node = linked_list.head\n while node:\n linked_list.rear = node\n node = node.next\n return linked_list\n\n\nif __name__ == \"__main__\":\n linked_list = LinkedList()\n for elem in [3, 4, -7, 5, -6, 6]:\n linked_list.add(elem)\n print(delete_zero_sum(linked_list))\n\n linked_list = LinkedList()\n for elem in [7, 4, -4, -7, 5, -6, 6]:\n linked_list.add(elem)\n print(delete_zero_sum(linked_list))\n\n linked_list = LinkedList()\n for elem in [7, 4, -4, -7, 5, -6, 6, 10]:\n linked_list.add(elem)\n print(delete_zero_sum(linked_list))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "1171526", "language": "Python", "matching_score": 2.178070306777954, "max_stars_count": 70, "path": "Solutions/305.py" }, { "content": "\"\"\"\nProblem:\n\nWrite a map implementation with a get function that lets you retrieve the value of a\nkey at a particular time.\n\nIt should contain the following methods:\n set(key, value, time): # sets key to value for t = time.\n get(key, time): # gets the key at t = time.\nThe map should work like this. If we set a key at a particular time, it will maintain\nthat value forever or until it gets set at a later time. In other words, when we get a\nkey at a time, it should return the value that was set for that key set at the most\nrecent time.\n\nConsider the following examples:\n\nd.set(1, 1, 0) # set key 1 to value 1 at time 0\nd.set(1, 2, 2) # set key 1 to value 2 at time 2\nd.get(1, 1) # get key 1 at time 1 should be 1\nd.get(1, 3) # get key 1 at time 3 should be 2\n\nd.set(1, 1, 5) # set key 1 to value 1 at time 5\nd.get(1, 0) # get key 1 at time 0 should be null\nd.get(1, 10) # get key 1 at time 10 should be 1\n\nd.set(1, 1, 0) # set key 1 to value 1 at time 0\nd.set(1, 2, 0) # set key 1 to value 2 at time 0\nd.get(1, 0) # get key 1 at time 0 should be 2\n\"\"\"\n\nfrom typing import Any, Dict, Optional\n\n\nclass Node:\n def __init__(self, val_list: Dict[int, int], time: int) -> None:\n self.val_list = val_list\n self.time = time\n self.next = None\n self.prev = None\n\n def __repr__(self) -> str:\n if self.next:\n return f\"{self.val_list} ({self.time}) <=> {str(self.next)}\"\n return f\"{self.val_list} ({self.time})\"\n\n def __eq__(self, other: Any) -> bool:\n if type(other) == Node:\n return self.time == other.time\n return False\n\n\nclass Double_Linked_List:\n def __init__(self) -> None:\n self.head = None\n self.rear = None\n self.length = 0\n\n def __repr__(self) -> str:\n return str(self.head)\n\n def __bool__(self) -> bool:\n return bool(self.head)\n\n def add(self, val_list: Dict[int, int], time: int) -> None:\n self.length += 1\n if self.head == None:\n self.head = Node(val_list, time)\n self.rear = self.head\n else:\n self.rear.next = Node(val_list, time)\n self.rear.next.prev = self.rear\n self.rear = self.rear.next\n\n\nclass Map:\n def __init__(self) -> None:\n self.linked_list = Double_Linked_List()\n\n def set(self, key: int, value: int, time: int) -> None:\n if not self.linked_list:\n self.linked_list.add({key: value}, time)\n else:\n pos = self.linked_list.head\n while pos and pos.time < time:\n pos = pos.next\n # adding the new node at the head\n if pos == self.linked_list.head:\n temp_val_list = {key: value}\n new_node = Node(temp_val_list, time)\n\n self.linked_list.head.prev = new_node\n new_node.next = self.linked_list.head\n self.head = new_node\n # adding the new value\n if pos:\n if time == pos.time:\n pos.val_list[key] = value\n else:\n temp_val_list = dict(pos.prev.val_list)\n temp_val_list[key] = value\n new_node = Node(temp_val_list, time)\n\n new_node.next = pos\n new_node.prev = pos.prev\n pos.prev.next = new_node\n pos.prev = new_node\n return\n temp_val_list = dict(self.linked_list.rear.val_list)\n temp_val_list[key] = value\n self.linked_list.add(temp_val_list, time)\n\n def get(self, key: int, time: int) -> Optional[int]:\n if not self.linked_list:\n return None\n\n pos = self.linked_list.head\n while pos and pos.time < time:\n pos = pos.next\n # key in the rear\n if not pos:\n try:\n temp = self.linked_list.rear.val_list[key]\n return temp\n except:\n return None\n # key in the current node\n elif pos and pos.time == time:\n try:\n temp = pos.val_list[key]\n return temp\n except:\n return None\n # key in previous node\n else:\n try:\n temp = pos.prev.val_list[key]\n return temp\n except:\n return None\n\n\nif __name__ == \"__main__\":\n d = Map()\n d.set(1, 1, 0) # set key 1 to value 1 at time 0\n d.set(1, 2, 2) # set key 1 to value 2 at time 2\n print(d.get(1, 1)) # get key 1 at time 1 should be 1\n print(d.get(1, 3)) # get key 1 at time 3 should be 2\n print()\n\n d = Map()\n d.set(1, 1, 5) # set key 1 to value 1 at time 5\n print(d.get(1, 0)) # get key 1 at time 0 should be null\n print(d.get(1, 10)) # get key 1 at time 10 should be 1\n print()\n\n d = Map()\n d.set(1, 1, 0) # set key 1 to value 1 at time 0\n d.set(1, 2, 0) # set key 1 to value 2 at time 0\n print(d.get(1, 0)) # get key 1 at time 0 should be 2\n", "id": "9142122", "language": "Python", "matching_score": 1.3150596618652344, "max_stars_count": 70, "path": "Solutions/097.py" }, { "content": "\"\"\"\nProblem:\n\nImplement an LFU (Least Frequently Used) cache. It should be able to be initialized\nwith a cache size n, and contain the following methods:\n\nset(key, value): sets key to value. If there are already n items in the cache and we\nare adding a new item, then it should also remove the least frequently used item. If\nthere is a tie, then the least recently used key should be removed.\nget(key): gets the value at key. If no such key exists, return null.\nEach operation should run in O(1) time.\n\"\"\"\n\nfrom typing import Callable, Optional\n\n\nclass DoubleLinkedListNode:\n def __init__(self, key: int, val: int) -> None:\n self.key = key\n self.val = val\n self.freq = 0\n self.next = None\n self.prev = None\n\n\nclass DoubleLinkedList:\n def __init__(self) -> None:\n self.head = DoubleLinkedListNode(None, None)\n self.rear = DoubleLinkedListNode(None, None)\n self.head.next, self.rear.prev = self.rear, self.head\n\n def add(self, node: DoubleLinkedListNode) -> None:\n temp = self.rear.prev\n self.rear.prev, node.next = node, self.rear\n temp.next, node.prev = node, temp\n node.freq += 1\n self._position_node(node)\n\n def remove(self, node: DoubleLinkedListNode) -> DoubleLinkedListNode:\n temp_last, temp_next = node.prev, node.next\n node.prev, node.next = None, None\n temp_last.next, temp_next.prev = temp_next, temp_last\n return node\n\n def _position_node(self, node: DoubleLinkedListNode) -> None:\n while node.prev.key and node.prev.freq > node.freq:\n node1, node2 = node, node.prev\n node1.prev, node2.next = node2.prev, node1.prev\n node1.next, node2.prev = node2, node1\n\n\nclass LFUCache:\n def __init__(self, capacity: int) -> None:\n self.list = DoubleLinkedList()\n self.capacity = capacity\n self.num_keys = 0\n self.hits = 0\n self.miss = 0\n self.cache = {}\n\n def __repr__(self) -> str:\n return (\n f\"CacheInfo(hits={self.hits}, misses={self.miss}, \"\n f\"capacity={self.capacity}, current_size={self.num_keys})\"\n )\n\n def __contains__(self, key: int) -> bool:\n return key in self.cache\n\n def get(self, key: int) -> Optional[int]:\n if key in self.cache:\n self.hits += 1\n self.list.add(self.list.remove(self.cache[key]))\n return self.cache[key].val\n self.miss += 1\n return None\n\n def set(self, key: int, value: int) -> None:\n if key not in self.cache:\n if self.num_keys >= self.capacity:\n key_to_delete = self.list.head.next.key\n self.list.remove(self.cache[key_to_delete])\n del self.cache[key_to_delete]\n self.num_keys -= 1\n self.cache[key] = DoubleLinkedListNode(key, value)\n self.list.add(self.cache[key])\n self.num_keys += 1\n else:\n node = self.list.remove(self.cache[key])\n node.val = value\n self.list.add(node)\n\n\nif __name__ == \"__main__\":\n cache = LFUCache(3)\n\n print(cache.get(\"a\"))\n\n cache.set(\"a\", 1)\n cache.set(\"b\", 2)\n cache.set(\"c\", 3)\n\n print(cache.get(\"a\"))\n\n cache.set(\"d\", 4)\n cache.set(\"e\", 5)\n\n print(cache.get(\"a\"))\n print(cache.get(\"b\"))\n print(cache.get(\"c\"))\n print(cache.get(\"d\"))\n print(cache.get(\"e\"))\n\n print(cache)\n", "id": "3986855", "language": "Python", "matching_score": 4.726604461669922, "max_stars_count": 70, "path": "Solutions/067.py" }, { "content": "\"\"\"\nProblem:\n\nImplement an LRU (Least Recently Used) cache. It should be able to be initialized with\na cache size n, and contain the following methods:\n\nset(key, value): sets key to value. If there are already n items in the cache and we\nare adding a new item, then it should also remove the least recently used item.\nget(key): gets the value at key. If no such key exists, return null. Each operation\nshould run in O(1) time.\n\"\"\"\n\nfrom __future__ import annotations\nfrom typing import Optional\n\n\nclass DoubleLinkedListNode:\n def __init__(self, key: int, val: int) -> None:\n self.key = key\n self.val = val\n self.next = None\n self.prev = None\n\n\nclass DoubleLinkedList:\n def __init__(self) -> None:\n self.head = DoubleLinkedListNode(None, None)\n self.rear = DoubleLinkedListNode(None, None)\n self.head.next, self.rear.prev = self.rear, self.head\n\n def add_node(self, node: DoubleLinkedListNode) -> None:\n temp = self.rear.prev\n temp.next, node.prev = node, temp\n self.rear.prev, node.next = node, self.rear\n\n def remove_node(self, node: DoubleLinkedListNode) -> DoubleLinkedListNode:\n temp_last, temp_next = node.prev, node.next\n node.prev, node.next = None, None\n temp_last.next, temp_next.prev = temp_next, temp_last\n return node\n\n\nclass LRUCache:\n def __init__(self, capacity: int) -> None:\n self.list = DoubleLinkedList()\n self.capacity = capacity\n self.num_keys = 0\n self.hits = 0\n self.miss = 0\n self.cache = {}\n\n def __repr__(self) -> str:\n return (\n f\"CacheInfo(hits={self.hits}, misses={self.miss}, \"\n f\"capacity={self.capacity}, current size={self.num_keys})\"\n )\n\n def get(self, key: int) -> Optional[int]:\n if key in self.cache:\n self.hits += 1\n # placing the node at the proper position in the linked list in case of\n # cache hit\n self.list.add_node(self.list.remove_node(self.cache[key]))\n return self.cache[key].val\n self.miss += 1\n return None\n\n def set(self, key: int, value: int) -> None:\n if key not in self.cache:\n if self.num_keys >= self.capacity:\n # replacement algorithm in case the cache is full\n key_to_delete = self.list.head.next.key\n self.list.remove_node(self.cache[key_to_delete])\n del self.cache[key_to_delete]\n self.num_keys -= 1\n self.cache[key] = DoubleLinkedListNode(key, value)\n self.list.add_node(self.cache[key])\n self.num_keys += 1\n return\n # if the key is already present, its value is updated\n node = self.list.remove_node(self.cache[key])\n node.val = value\n self.list.add_node(node)\n\n\nif __name__ == \"__main__\":\n cache = LRUCache(3)\n\n print(cache.get(\"a\"))\n\n cache.set(\"a\", 1)\n cache.set(\"b\", 2)\n cache.set(\"c\", 3)\n\n print(cache.get(\"a\"))\n\n cache.set(\"d\", 4)\n cache.set(\"e\", 5)\n\n print(cache.get(\"a\"))\n print(cache.get(\"b\"))\n print(cache.get(\"c\"))\n print(cache.get(\"d\"))\n print(cache.get(\"e\"))\n\n print(cache)\n", "id": "4071999", "language": "Python", "matching_score": 1.095854640007019, "max_stars_count": 70, "path": "Solutions/052.py" }, { "content": "\"\"\"\nProblem:\n\nGiven the head to a singly linked list, where each node also has a 'random' pointer\nthat points to anywhere in the linked list, deep clone the list.\n\"\"\"\n\nfrom DataStructures.LinkedList import LinkedList, Node\n\n\ndef rand_join(ll: LinkedList, pos1: int, pos2: int) -> None:\n pos_source = ll.head\n pos_dest = ll.head\n try:\n # moving the pointers to the required position\n for _ in range(pos1):\n pos_source = pos_source.next\n for _ in range(pos2):\n pos_dest = pos_dest.next\n # setting the random pointer\n pos_source.random_ptr = pos_dest\n except:\n raise IndexError(\"Given position is out of the Linked List\")\n\n\ndef clone(ll: LinkedList) -> LinkedList:\n clone_head = ll.head\n pos1 = ll.head\n pos2 = ll.head.next\n # duplicating all elements (by value in the linked list)\n # [a -> b -> c becomes a -> a -> b -> b -> c -> c]\n for _ in range(ll.length):\n pos1.next = Node(pos1.val)\n pos1 = pos1.next\n pos1.next = pos2\n pos1 = pos1.next\n if pos2 is None:\n break\n pos2 = pos2.next\n # setting the clone head to the proper position\n clone_head = clone_head.next\n pos1 = ll.head\n # setting the random pointer of the cloned linked list\n # (every 2nd element in the new linked list: a -> [a] -> b -> [b] -> c -> [c])\n for _ in range(ll.length - 1):\n pos1.next.random_ptr = pos1.random_ptr\n pos1 = pos1.next.next\n # reverting the linked list to its original form\n pos1 = ll.head\n pos2 = ll.head.next\n for _ in range(ll.length - 1):\n pos1.next = pos2.next\n pos2.next = pos2.next.next\n pos1 = pos1.next\n if pos2.next == None:\n break\n pos2 = pos2.next\n # creating the cloned linked list from the generated nodes\n cloned_LL = LinkedList()\n cloned_LL.head = clone_head\n cloned_LL.length = ll.length\n cloned_LL.rear = pos2\n return cloned_LL\n\n\n# adding the random pointer to Node class\nsetattr(Node, \"random_ptr\", None)\n\nif __name__ == \"__main__\":\n LL = LinkedList()\n\n LL.add(1)\n LL.add(2)\n LL.add(3)\n LL.add(4)\n\n rand_join(LL, 0, 2)\n rand_join(LL, 2, 0)\n rand_join(LL, 1, 3)\n\n print(\"Original List:\", LL)\n\n LL_clone = clone(LL)\n\n print(\"Cloned List:\", LL_clone)\n\n # adding different elements to show that the clone is a deep copy\n LL.add(100)\n LL_clone.add(5)\n\n print(\"\\nOriginal List:\", LL)\n\n print(\"Cloned List:\", LL_clone)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "234100", "language": "Python", "matching_score": 2.3595829010009766, "max_stars_count": 70, "path": "Solutions/131.py" }, { "content": "\"\"\"\nProblem:\n\nDetermine whether a doubly linked list is a palindrome. What if it’s singly linked?\n\nFor example, 1 -> 4 -> 3 -> 4 -> 1 returns true while 1 -> 4 returns false.\n\"\"\"\n\nfrom DataStructures.LinkedList import LinkedList\n\n\ndef is_palindrome(ll: LinkedList) -> bool:\n if ll.head is None:\n return True\n elif ll.rear == ll.head:\n return True\n\n pos1 = ll.head\n pos2 = ll.rear\n for i in range((ll.length + 1) // 2):\n if pos1.val != pos2.val:\n return False\n # updating the end pointer\n pos = pos1\n for _ in range((ll.length - (2 * i)) - 2):\n pos = pos.next\n pos2 = pos\n # updating the start pointer\n pos1 = pos1.next\n return True\n\n\nif __name__ == \"__main__\":\n LL = LinkedList()\n for i in [1, 4, 3, 2, 3, 4, 1]:\n LL.add(i)\n print(\"Palindrome: {}\\t\\tList: {}\".format(is_palindrome(LL), LL))\n\n LL = LinkedList()\n for i in [1, 4, 3]:\n LL.add(i)\n print(\"Palindrome: {}\\t\\tList: {}\".format(is_palindrome(LL), LL))\n\n LL = LinkedList()\n for i in [1]:\n LL.add(i)\n print(\"Palindrome: {}\\t\\tList: {}\".format(is_palindrome(LL), LL))\n\n LL = LinkedList()\n print(\"Palindrome: {}\\t\\tList: {}\".format(is_palindrome(LL), LL))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(1)\n[This problem can be reduced to O(n) time & space by caching the Linked List in an\narray]\n[If a Double Linked List is used, the problem is reduced to O(n) time & O(1) space]\n\"\"\"\n", "id": "11198856", "language": "Python", "matching_score": 2.0269081592559814, "max_stars_count": 70, "path": "Solutions/104.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a linked list, uniformly shuffle the nodes. What if we want to prioritize space\nover time?\n\"\"\"\n\nfrom random import randint\n\nfrom DataStructures.LinkedList import LinkedList\n\n\ndef shuffle(ll: LinkedList) -> LinkedList:\n length = len(ll)\n if length in (0, 1):\n return ll\n\n for _ in range(length):\n pos1, pos2 = randint(0, length - 1), randint(0, length - 1)\n node1, node2 = ll.head, ll.head\n for _ in range(pos1):\n node1 = node1.next\n for _ in range(pos2):\n node2 = node2.next\n node1.val, node2.val = node2.val, node1.val\n return ll\n\n\nif __name__ == \"__main__\":\n ll = LinkedList()\n\n for i in range(1, 6):\n ll.add(i)\n\n print(ll)\n shuffle(ll)\n print(ll)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "8288260", "language": "Python", "matching_score": 2.7489774227142334, "max_stars_count": 70, "path": "Solutions/337.py" }, { "content": "\"\"\"\nProblem:\n\nGiven the head of a singly linked list, swap every two nodes and return its head.\n\nFor example, given 1 -> 2 -> 3 -> 4, return 2 -> 1 -> 4 -> 3.\n\"\"\"\n\nfrom DataStructures.LinkedList import Node, LinkedList\n\n\ndef swap_nodes(ll: LinkedList) -> Node:\n node1 = ll.head\n node2 = ll.head.next\n while True:\n try:\n node1.val, node2.val = node2.val, node1.val\n node1, node2 = node1.next.next, node2.next.next\n except:\n break\n return ll.head\n\n\nif __name__ == \"__main__\":\n LL = LinkedList()\n\n LL.add(1)\n LL.add(2)\n LL.add(3)\n LL.add(4)\n\n print(LL)\n print(swap_nodes(LL))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "10939487", "language": "Python", "matching_score": 0.09632503241300583, "max_stars_count": 70, "path": "Solutions/145.py" }, { "content": "####################################################\n# LOCAL IMPORT #####################################\n####################################################\n\nfrom Data_Gen import generate_data\n\n####################################################\n# IMPORTS (FROM LIBRARY) ###########################\n####################################################\n\nimport requests\n\n####################################################\n# DECALRING NECESSARY VARIABLES ####################\n####################################################\n\nurl = ' http://127.0.0.1:5000/api'\nheader = {}\n\n####################################################\n# USER DETAILS + VALIDATION ########################\n####################################################\n\n# DEFAULT ACCEPTED VALUES FOR USERNAME AND PASSWORD\n# username: 'admin', \n# password: 'password'\n\nuser = input(\"Enter user-name: \")\npassword = input(\"Enter password: \")\n\nheader['user'] = user\nheader['password'] = password\n\ntry:\n r = requests.patch(url, headers=header)\n\n if (r.json()[\"status\"] == \"Accepted\"):\n print('Access Granted...')\n else:\n print(\"Access Denied. Closing program...\")\n exit()\n\nexcept:\n print(\"Connection Error!\")\n print(\"Unable to connect to server. Make sure Karmatek Site is running in case of local server, otherwise, check the url variable.\")\n print(\"Closing program...\")\n exit()\n\n####################################################\n# DATA FETCH & EXCEL SHEET CREATION ################\n####################################################\n\ndef data_gen():\n choice = input(\"Generate the paticipation details? (y/n): \").lower()\n\n if (choice == 'y'):\n try:\n r = requests.get(url, headers=header)\n if (r.status_code == 200):\n data = r.json()\n if (len(data[0]) and len(data[1])):\n generate_data(data)\n print('Task Complete!')\n else:\n print(\"Participants hasn't registered for any event\")\n else:\n raise RuntimeError\n\n except requests.exceptions.ConnectionError:\n print('Connection Error!'.upper())\n print('Closing Program...'.upper())\n exit()\n\n except:\n print(\"Some Error has occoured!\".upper())\n\n####################################################\n# EMAIL RESEND REQUEST #############################\n####################################################\n\ndef email_resend():\n choice = input(\"Are you sure you want to resend mails to the unconfirmed participants? (y/n): \").lower()\n\n if (choice == 'y'):\n try:\n r = requests.post(url, headers=header)\n if (r.status_code == 200):\n if (len(r.json()) > 0):\n print('Task Complete!')\n else:\n print('No unconfirmed user found.')\n else:\n raise RuntimeError\n \n except requests.exceptions.ConnectionError:\n print('Connection Error!'.upper())\n print('Closing Program...'.upper())\n exit()\n\n except:\n print(\"Some Error has occoured!\".upper())\n\n####################################################\n# UPDATE ADMIN DETAILS REQUEST ######################\n####################################################\n\ndef update_admin():\n choice = input(\"Are you sure you want to update admin details? (y/n): \").lower()\n\n if (choice == 'y'):\n header_update = {}\n\n user_confirm = input(\"Enter user-name for re-confirmation: \")\n password_confirm = input(\"Enter password for re-confirmation: \")\n\n header_update['user'] = user_confirm\n header_update['password'] = password_confirm\n\n r = requests.patch(url, headers=header_update)\n\n if (r.json()[\"status\"] == \"Accepted\"):\n print('Access Granted...')\n else:\n print(\"Access Denied. Check the username and password\")\n return\n\n header_update['user_new'] = input('Enter the new username (Leave blank for no change): ')\n\n if (header_update['user_new'] == \"\"):\n header_update['user_new'] = None\n \n header_update['password_new'] = input('Enter the new password (Leave blank for no change): ')\n\n if (header_update['password_new'] == \"\"):\n header_update['password_new'] = None\n\n r = requests.put(url, headers=header_update)\n \n if (r.status_code == 200):\n if (header_update['user_new'] != None):\n header['user'] = header_update['user_new']\n if (header_update['password_new'] != None):\n header['password'] = header_update['password_new']\n print('Task Complete!')\n else:\n print('Error! Couldn\\'t update details')\n\n####################################################\n# DRIVER CODE ######################################\n####################################################\n\nwhile True:\n print(\"\\nChoose from the following options:\")\n print('(1) Generate Paricipation Data')\n print('(2) Resend emails to the unconfirmed participants')\n print('(3) Update admin detials')\n print('(4) Close program')\n choice = input('Enter your choice: ')\n\n if (choice == '1'):\n data_gen()\n \n elif (choice == '2'):\n email_resend()\n\n elif (choice == '3'):\n update_admin()\n\n elif (choice == '4'):\n print(f'Bye {header[\"user\"]}!')\n print('Have a nice day')\n break\n\n else:\n print('Sorry could not undertand what you wanted to say')", "id": "12101652", "language": "Python", "matching_score": 3.6823229789733887, "max_stars_count": 1, "path": "client.py" }, { "content": "####################################################\r\n# IMPORTS (LOCAL) ##################################\r\n####################################################\r\n\r\nfrom Karmatek import api, app, db, mail\r\nfrom Karmatek.model import User, Events\r\nfrom Karmatek.users.views import serializer, Message\r\nfrom Karmatek.api.credentials_confirm import check_admin_cred, update_admin_cred\r\n\r\n####################################################\r\n# IMPORTS (FROM LIBRARY) ###########################\r\n####################################################\r\n\r\nfrom flask_restful import Resource\r\nfrom flask import Blueprint, request, abort, url_for\r\n\r\n####################################################\r\n# BLUEPRINT SETUP ##################################\r\n####################################################\r\n\r\napi_blueprint = Blueprint('api', __name__)\r\n\r\n####################################################\r\n# API SETUP ########################################\r\n####################################################\r\n\r\nclass Api_endpoint_Resource(Resource):\r\n\r\n# GET REQUEST: Returns the Users who have confirmed the email and the list of events they are paticipating in\r\n\r\n def get(self):\r\n if ('user' in request.headers and 'password' in request.headers):\r\n if (check_admin_cred(request.headers.get('user'), request.headers.get('password'))):\r\n users = list(User.query.all())\r\n events = list(Events.query.all())\r\n\r\n return [[user.json() for user in users if (user.confirm)], [event.json() for event in events]]\r\n\r\n else:\r\n return {'message': 'Access Denied'}, 403\r\n \r\n else:\r\n return {'message': 'Access Denied'}, 403\r\n \r\n# POST REQUEST: Returns the list of unconfirmed users and re-sends the confirmation mail to them\r\n\r\n def post(self):\r\n if ('user' in request.headers and 'password' in request.headers):\r\n if (check_admin_cred(request.headers.get('user'), request.headers.get('password'))):\r\n emails = list(db.engine.execute('select users.email \\\r\n from users \\\r\n where users.confirm=0'))\r\n \r\n for i in range(len(emails)):\r\n emails[i] = emails[i][0]\r\n \r\n try:\r\n for email in emails:\r\n token = serializer.dumps(email, salt='email-confirm')\r\n link = url_for('users.confirm_email', token=token, _external=True)\r\n link_home = url_for('home', _external=True)\r\n\r\n username = list(db.engine.execute(f'select users.username \\\r\n from users \\\r\n where users.email=\"{email}\"'))[0][0]\r\n\r\n msg = Message('Karmatek 2k20 Confirmation', sender=app.config[\"MAIL_USERNAME\"], recipients=[email])\r\n\r\n msg.body = f'''\r\n\\tHello {username}\r\n\r\nThankyou for registering at Karmatek 2k20. Please click on the link below to confirm your email id.\r\nYour confirmation link is: {link}\r\nPlease login to your account and select the events you want to paricipate in as soon as possible at the official Karmatek 2k20 site ({link_home}).\r\nHope you have an awesome time.\r\nLET'S TECHNICATE....\r\n\r\nPS: Ignore the mail if you have already confirmed your mail id.\r\n \r\n\\tYour Sincerely\r\n\\t<NAME>\r\n\\tTechincal Head\r\n\\tKarmatek 2k20\r\n\\tGCECT Tech-fest\r\n '''\r\n\r\n mail.send(msg)\r\n\r\n except:\r\n print('\\nUnable to send mails, please check the user id and password (in Karmatek/__init__.py)\\n')\r\n return {'message': 'Internal Server Error'}, 500\r\n\r\n return emails\r\n \r\n else:\r\n return {'message': 'Access Denied'}, 403\r\n \r\n else:\r\n return {'message': 'Access Denied'}, 403\r\n\r\n# PUT REQUEST: Updates the admin details (name and/or password)\r\n\r\n def put(self):\r\n if ('user' in request.headers and 'password' in request.headers):\r\n if (check_admin_cred(request.headers.get('user'), request.headers.get('password'))):\r\n if ('user_new' in request.headers or 'password_new' in request.headers):\r\n if ('user_new' in request.headers):\r\n username = request.headers.get('user_new')\r\n else:\r\n username = None\r\n\r\n if ('password_new' in request.headers):\r\n password = request.headers.get('password_new')\r\n else:\r\n password = None\r\n\r\n update_admin_cred(username, password)\r\n else:\r\n return {'message': 'New details not found'}, 404\r\n\r\n else:\r\n return {'message': 'Access Denied'}, 403\r\n \r\n else:\r\n return {'message': 'Access Denied'}, 403\r\n\r\n# PATCH REQUEST: Checks if the admin details is correct\r\n\r\n def patch(self):\r\n if ('user' in request.headers and 'password' in request.headers):\r\n if (check_admin_cred(request.headers.get('user'), request.headers.get('password'))):\r\n return {\"status\": \"Accepted\"}\r\n \r\n else:\r\n return {\"status\": \"Rejected\"}, 403\r\n \r\n else:\r\n return {\"status\": \"Rejected\"}, 403\r\n \r\n\r\napi.add_resource(Api_endpoint_Resource, '/api')\r\n", "id": "9683340", "language": "Python", "matching_score": 3.932061195373535, "max_stars_count": 2, "path": "Karmatek/api/__init__.py" }, { "content": "####################################################\r\n# IMPORTS (FROM LIBRARY) ###########################\r\n####################################################\r\n\r\nfrom flask import Blueprint, render_template, flash, redirect, request, url_for, abort\r\nfrom flask_login import login_user, logout_user, current_user, login_required\r\nfrom flask_mail import Message\r\nfrom itsdangerous import URLSafeTimedSerializer, SignatureExpired\r\n\r\n####################################################\r\n# IMPORTS (LOCAL) ##################################\r\n####################################################\r\n\r\nfrom Karmatek.users.forms import LoginForm, Register, UpdateUserForm, EventsForm\r\nfrom Karmatek.model import User, Events\r\nfrom Karmatek import app, db, login_manager, mail\r\n\r\n####################################################\r\n# BLUEPRINT SETUP ##################################\r\n####################################################\r\n\r\nusers = Blueprint('users', __name__)\r\n\r\n####################################################\r\n# TIMED SERIALIZER SETUP ###########################\r\n####################################################\r\n\r\nserializer = URLSafeTimedSerializer('somesecretkey')\r\n\r\n####################################################\r\n# LOGIN SETUP ######################################\r\n####################################################\r\n\r\n@users.route('/login', methods=[\"GET\", \"POST\"])\r\ndef login():\r\n form = LoginForm()\r\n\r\n if form.validate_on_submit():\r\n user = User.query.filter_by(email=form.email.data).first()\r\n \r\n if (user is not None and user.check_password(form.password.data)):\r\n login_user(user, remember=True)\r\n flash('Login Successful!')\r\n\r\n next = request.args.get('next')\r\n\r\n if (next == None or not next[0] == '/'):\r\n next = url_for('home')\r\n \r\n return redirect(next)\r\n \r\n else:\r\n flash('Incorrect Username/Password!')\r\n \r\n return render_template('login.html', form=form)\r\n\r\n####################################################\r\n# REGISTRATION SETUP ###############################\r\n####################################################\r\n\r\n@users.route('/register', methods=[\"GET\", \"POST\"])\r\ndef register():\r\n form = Register()\r\n\r\n if form.validate_on_submit():\r\n user1 = User.query.filter_by(email=form.email.data).first()\r\n\r\n if (user1 == None):\r\n if (len(form.password.data) < 6):\r\n flash('Use a stronger password')\r\n return redirect(url_for('users.register'))\r\n\r\n user = User(email=form.email.data, username=form.name.data, password=<PASSWORD>, ph_num=form.ph_num.data, dept=form.dept.data, year=form.year.data)\r\n db.session.add(user)\r\n db.session.commit()\r\n\r\n try:\r\n token = serializer.dumps(form.email.data, salt='email-confirm')\r\n link = url_for('users.confirm_email', token=token, _external=True)\r\n link_home = url_for('home', _external=True)\r\n\r\n msg = Message('Karmatek 2k20 Confirmation', sender=app.config[\"MAIL_USERNAME\"], recipients=[form.email.data])\r\n\r\n msg.body = f'''\r\n\\tHello {form.name.data}\r\n\r\nThankyou for registering at Karmatek 2k20. Please click on the link below to confirm your email id.\r\nYour confirmation link is: {link}\r\nPlease login to your account and select the events you want to paricipate in as soon as possible at the official Karmatek 2k20 site ({link_home}).\r\nHope you have an awesome time.\r\nLET'S TECHNICATE....\r\n \r\n\\tYour Sincerely\r\n\\t<NAME>\r\n\\tTechincal Head\r\n\\tKarmatek 2k20\r\n\\tGCECT Tech-fest\r\n '''\r\n\r\n mail.send(msg)\r\n\r\n flash(f'Thankyou for Registering. Welcome to Karmatek 2k20! A confirmation email has been sent to \"{form.email.data}\"')\r\n\r\n return redirect(url_for('users.login'))\r\n\r\n except:\r\n flash('Your Account has been created, but at the moment, we are unable to send the confirmation mail.')\r\n abort(500)\r\n \r\n else:\r\n flash('Email already registered!')\r\n \r\n return render_template('register.html', form=form, page_name=\"Registration\")\r\n\r\n####################################################\r\n# LOGOUT SETUP #####################################\r\n####################################################\r\n\r\n@users.route('/logout')\r\n@login_required\r\ndef logout():\r\n logout_user()\r\n flash('Successfully Logged Out!')\r\n return redirect(url_for('home'))\r\n\r\n####################################################\r\n# PROFILE SETUP ####################################\r\n####################################################\r\n\r\n@users.route('/account', methods=[\"GET\", \"POST\"])\r\n@login_required\r\ndef account():\r\n form = UpdateUserForm()\r\n form.email.data = current_user.email\r\n events = list(Events.query.filter_by(user_id=current_user.id))\r\n events_form = EventsForm()\r\n\r\n for i in range(len(events)):\r\n events[i] = (url_for('users.delete', event_id=events[i].id), events[i])\r\n \r\n if events_form.validate_on_submit():\r\n check = list(Events.query.filter_by(user_id=current_user.id, event=events_form.event_selector.data))\r\n\r\n if (check):\r\n flash(\"You have already registered for this event!\")\r\n else:\r\n temp = Events(current_user.id, events_form.event_selector.data)\r\n db.session.add(temp)\r\n db.session.commit()\r\n flash(\"Registration Successful!\")\r\n\r\n return redirect(url_for('users.account'))\r\n \r\n if form.validate_on_submit():\r\n current_user.username = form.name.data\r\n current_user.ph_num = form.ph_num.data\r\n current_user.dept = form.dept.data\r\n current_user.year = form.year.data\r\n\r\n db.session.commit()\r\n\r\n flash('User Account Updated!')\r\n\r\n return redirect(url_for('home'))\r\n\r\n elif request.method == \"GET\":\r\n form.name.data = current_user.username\r\n form.ph_num.data = current_user.ph_num\r\n form.dept.data = current_user.dept\r\n form.year.data = current_user.year\r\n\r\n return render_template('profile.html', form=form, events_form=events_form, events=events, len=len)\r\n\r\n####################################################\r\n# EVENT DETAILS SETUP ##############################\r\n####################################################\r\n\r\ndata_dict = dict([(\"Robo Race\", {'text': 'Design your own robo and bring it on the track to test its power and efficiency. Let’s see who wins the wheel-to-wheel action in this enthralling event.',\r\n 'd&t': '10/03/2020 12:30pm'}),\r\n (\"Robo Carrom\", {'text' : 'Bring your robot to test its skills and agility. Register today and see how good your bot is at playing carrom.',\r\n 'd&t' : '10/03/2020 03:30pm'}),\r\n (\"Robo Soccer\", {'text' : 'Because the robo is not just speed. Yes, your droid/ robot can do a lot more. Let your robot kick the ball while you get the prize money if it wins.',\r\n 'd&t' : '10/03/2020 06:30pm'}),\r\n (\"Robo Maze\", {'text' : 'Feel the thrill, feel the tension as your hand made robot moves through a complicated maze. Let’s see who is the fastest to move through the labyrinth!',\r\n 'd&t' : '11/03/2020 12:30pm'}),\r\n (\"Autonomous Line Follower\", {'text' : 'Does your droid have the caliber to be the fastest line follower? Build your own autonomous robot and beat others on the track to win an exciting prize money!',\r\n 'd&t' : '11/03/2020 03:30pm'}),\r\n (\"Code Beta\", {'text' : 'New to coding? No worries! Hone your coding skills and kick start your CP journey with a challenge meant for beginners. Are you up for it?',\r\n 'd&t' : '10/03/2020 12:30pm'}),\r\n (\"Code Pro\", {'text' : 'Put your coding skills to test as you work your way through algorithms like a “PRO”! Grab the title of Code Pro Champ and take home an exciting prize money.',\r\n 'd&t' : '11/03/2020 12:30pm'}),\r\n (\"Web Designing\", {'text' : 'Got a talent for web designing? Don’t let it loiter around. Pick a partner and compete with other talented web designers. Test your skills and maybe win to grab the prize money!',\r\n 'd&t' : '10/03/2020 04:30pm'}),\r\n (\"Pubg\", {'text' : 'The battleground is all set.\\nAssemble your players and fight to remain the last one alive and win not just a chicken dinner but also the prize money.',\r\n 'd&t' : '11/03/2020 02:30pm'}),\r\n (\"NFS Most Wanted\", {'text' : 'Everything under control? Then you aren’t moving fast enough.\\nGet inside the racing car and race your way to win. Let’s see who wins the race to grab the prize.',\r\n 'd&t' : '11/03/2020 04:30pm'}),\r\n (\"Fifa\", {'text' : 'Bring out the e-footballer and the champion within you.\\nPut your FIFA skills to test and battle it out to win and take home the prize. So, are you game?',\r\n 'd&t' : '11/03/2020 06:00pm'}),\r\n (\"Call of Duty\", {'text' : 'Nothing brings gamers together like a bit of competition. How can a gamer miss a competition of COD? Shoot it like your life depends on it and win an assured prize money.',\r\n 'd&t' : '10/03/2020 04:00pm'}),\r\n (\"Chess\", {'text' : '“All that matters on the chessboard is good moves.”- <NAME>\\nDo you think you are smart enough to play the winning move? If you have a passion for chess then come and show us how smart you are.',\r\n 'd&t' : '10/03/2020 01:00pm'}),\r\n (\"Nail it @19\", {'text' : '“Sometimes, less is more.”- Shakespeare.\\nAre you a minimalist with a head full of creativity? Can you knit a crisp and concise write-up with just 19 words? Yes? Come one, come all!',\r\n 'd&t' : '12/03/2020 12:30pm'}),\r\n (\"Petapixel\", {'text' : 'Got a knack for photography? Here is an opportunity for all the budding photographers to show off their skills and win a prize.',\r\n 'd&t' : '12/03/2020 02:00pm'}),\r\n (\"Memester Challenge\", {'text' : 'Can you just not pass a day without fooling around? If you think that are humorous enough to be a meme maker, then this is your challenge.',\r\n 'd&t' : '12/03/2020 01:30pm'}),\r\n (\"Matrivia\", {'text' : 'Do you have a love for material sciences? Here is the fun-filled quiz on material sciences. Use your knowledge and your wit to crack this questionnaire. Are you smart enough?',\r\n 'd&t' : '12/03/2020 12:30pm'}),\r\n (\"Fandom\", {'text' : 'Are you a Potterhead? Or a die-hard fan of MCU? Do you know everything about GOT? Put your knowledge to the ultimate test and win the prize.',\r\n 'd&t' : '12/03/2020 01:00pm'}),\r\n (\"Ek Duje ke liye\", {'text' : 'Cutest couple in college? Are you the inseparable pair? Hone your way through exciting and fun rounds to show the world what you can do “ek duje ke liye”.',\r\n 'd&t' : '12/03/2020 03:00pm'}),\r\n (\"CubicMatics\", {'text' : 'Can you solve the Rubik’s cube within seconds? Then register now to compete with other such speedcubers and win.',\r\n 'd&t' : '12/03/2020 04:30pm'})])\r\n\r\n@users.route('/paticipation/<int:event_id>')\r\ndef event_detail(event_id):\r\n event = Events.query.get_or_404(event_id)\r\n \r\n try:\r\n if (event.user_id != current_user.id):\r\n abort(403)\r\n except:\r\n abort(403)\r\n \r\n return render_template('event.html', event=event, data_dict=data_dict)\r\n\r\n####################################################\r\n# REMOVE PATICIPATION SETUP ########################\r\n####################################################\r\n\r\n@users.route('/<int:event_id>/delete', methods=[\"GET\", \"POST\"])\r\n@login_required\r\ndef delete(event_id):\r\n event = Events.query.get_or_404(event_id)\r\n\r\n try:\r\n if (event.user_id != current_user.id):\r\n abort(403)\r\n except:\r\n abort(403)\r\n \r\n db.session.delete(event)\r\n db.session.commit()\r\n\r\n flash('Successfully Removed Event Participation!')\r\n\r\n return redirect(url_for('users.account', event=event))\r\n\r\n####################################################\r\n# EMAIL CONFIRMATION SETUP #########################\r\n####################################################\r\n\r\n@users.route('/confirm/<token>')\r\ndef confirm_email(token):\r\n try:\r\n email = serializer.loads(token, salt='email-confirm', max_age=86400)\r\n user = User.query.filter_by(email=email).first()\r\n user.confirm = 1\r\n db.session.commit()\r\n\r\n except SignatureExpired:\r\n flash('Signature has expired. Create a new account and confirm the mail as soon as possible.')\r\n return render_template('home')\r\n \r\n flash('Email id Confirmed! Now you can select events to paticiapte in.')\r\n return redirect(url_for('users.account'))", "id": "957230", "language": "Python", "matching_score": 6.465328693389893, "max_stars_count": 2, "path": "Karmatek/users/views.py" }, { "content": "####################################################\n# IMPORTS (FROM LIBRARY) ###########################\n####################################################\n\nfrom flask import render_template, url_for, flash, redirect, request, Blueprint, session, abort\nfrom flask_login import login_user, logout_user, current_user, login_required\nfrom flask_mail import Message\nfrom itsdangerous import URLSafeTimedSerializer, SignatureExpired\n\n####################################################\n# IMPORTS (LOCAL) ##################################\n####################################################\n\nfrom blog import db, mail\nfrom blog.models import User, BlogPost, Notifications, Followers\nfrom blog.users.forms import Register, UpdateUserForm, LoginForm, ForgotPasswordForm, ChangePasswordForm, UpdatePasswordForm\nfrom blog.users.password import is_strong\n\n####################################################\n# BLUEPRINT SETUP ##################################\n####################################################\n\nusers = Blueprint('user', __name__)\n\n####################################################\n# TIMED SERIALIZER SETUP ###########################\n####################################################\n\nserializer = URLSafeTimedSerializer('somesecretkey')\n\n####################################################\n# REGISTRATION SETUP ###############################\n####################################################\n\n@users.route('/register', methods=[\"GET\", \"POST\"])\ndef register():\n form = Register()\n\n if form.validate_on_submit():\n user = User.query.filter_by(email=form.email.data).first()\n\n if (user == None):\n if (is_strong(form.password.data)):\n user = User(email=form.email.data, username=form.username.data, password=form.password.data)\n db.session.add(user)\n db.session.commit()\n\n try:\n token = serializer.dumps(form.email.data, salt='email-confirm')\n link = url_for('user.confirm_email', token=token, _external=True)\n link_home = url_for('core.index', _external=True)\n\n msg = Message('Blog Tap Email Confirmation', sender='<EMAIL>', recipients=[form.email.data])\n\n msg.body = f'''\n\\tWelcome Blogger!\n\n{form.username.data}, thankyou for registering at Blog Tap. \nPlease click on the link below to confirm your email id.\nYour confirmation link is: {link}\nLogin to your account and start Blogging at Blog Tap ({link_home}).\nHope you have an awesome time.\n \n\\tYour Sincerely\n\\t<NAME>\n\\tCreator\n\\tBlog Tap\n'''\n\n mail.send(msg)\n\n flash(f'Thankyou for registering at Blog Tap. Welcome {form.username.data}! Please confirm your email within 30 days.')\n\n return redirect(url_for('user.login'))\n\n except:\n flash('Your Account has been created, but we were unable to send the confirmation mail.')\n return redirect(url_for('user.login'))\n\n else:\n flash('Use a strong password (1 Upper and 1 lower case characters, 1 number, 1 symbol and minimum length of 6)')\n return redirect(url_for('user.register'))\n \n else:\n flash('Email already registered!')\n return redirect(url_for('user.register'))\n \n return render_template('register.html', form=form)\n\n####################################################\n# LOGIN SETUP ######################################\n####################################################\n\n@users.route('/login', methods=[\"GET\", \"POST\"])\ndef login():\n form = LoginForm()\n\n if form.validate_on_submit():\n user = User.query.filter_by(email=form.email.data).first()\n \n if (user is not None and user.check_password(form.password.data)):\n login_user(user)\n session.permanent = True\n\n flash('Login Successful!')\n\n next = request.args.get('next')\n\n if (next == None or not next[0] == '/'):\n next = url_for('core.index')\n \n return redirect(next)\n \n else:\n flash('Incorrect Username/Password!')\n return redirect(url_for('user.login'))\n \n return render_template('login.html', form=form)\n\n####################################################\n# LOGOUT SETUP #####################################\n####################################################\n\n@users.route('/logout')\n@login_required\ndef logout():\n logout_user()\n flash('Successfully logged out!')\n return redirect(url_for('core.index'))\n\n####################################################\n# ACCOUNT UPDATION SETUP ###########################\n####################################################\n\n@users.route('/account', methods=[\"GET\", \"POST\"])\n@login_required\ndef account():\n form = UpdateUserForm()\n\n if form.validate_on_submit():\n current_user.username = form.username.data\n\n flash('Username Updated!')\n\n db.session.commit()\n\n return redirect(url_for('user.account'))\n\n elif request.method == \"GET\":\n form.username.data = current_user.username\n \n elif request.method == \"POST\":\n pic_num = request.form[\"profile-img\"]\n current_user.profile_image = current_user.profile_image[:-5] + pic_num + '.png'\n db.session.commit()\n\n form.username.data = current_user.username\n \n profile_image = url_for('static', filename='img/'+current_user.profile_image)\n\n if (current_user.is_authenticated):\n notifs = Notifications.query.filter_by(user_id=current_user.id).order_by(Notifications.date.desc()).all()\n else:\n notifs = []\n\n return render_template('account.html', profile_image=profile_image, form=form, notifs=notifs)\n\n####################################################\n# LIST BLOGS (USER SPECIFIC) #######################\n####################################################\n\n@users.route(\"/user/<int:user_id>\")\ndef user_posts(user_id):\n page = request.args.get('page', 1, type=int)\n user = User.query.filter_by(id=user_id).first_or_404()\n blog_posts = BlogPost.query.filter_by(author=user).order_by(BlogPost.views.desc(), BlogPost.date.desc()).paginate(page=page, per_page=6)\n\n followers_count = db.engine.execute(f\"select count(*) as count \\\n from Followers \\\n where followed_id={user.id}\").scalar()\n\n if (current_user.is_authenticated):\n get_following = Followers.query.filter(Followers.follower_id==current_user.id, Followers.followed_id==user.id).first()\n if (get_following):\n can_follow = False\n else:\n can_follow = True\n else:\n get_following = False\n can_follow = False\n \n if (current_user.is_authenticated):\n notifs = Notifications.query.filter_by(user_id=current_user.id).order_by(Notifications.date.desc()).all()\n else:\n notifs = []\n\n return render_template('user_blog_posts.html', user=user, blog_posts=blog_posts, notifs=notifs, can_follow=can_follow, followers_count=followers_count)\n\n####################################################\n# FOLLOW USER ######################################\n####################################################\n\n@users.route(\"/follow/<int:user_id_1>/<int:user_id_2>\")\n@login_required\ndef follow(user_id_1, user_id_2):\n data = Followers.query.filter_by(follower_id=user_id_1, followed_id=user_id_2).all()\n user = User.query.get_or_404(user_id_2)\n\n if (data):\n flash(f\"You are already following {user_id_2}!\")\n else:\n data = Followers(user_id_1, user_id_2)\n db.session.add(data)\n \n notif = Notifications(user_id_1, f'You started following {user.username}!', user_id_2, False)\n db.session.add(notif)\n \n db.session.commit()\n\n flash(f'You are following {user.username}!')\n\n return redirect(url_for('user.user_posts', user_id=user.id))\n\n####################################################\n# UNFOLLOW USER ####################################\n####################################################\n\n@users.route(\"/unfollow/<int:user_id_1>/<int:user_id_2>\")\n@login_required\ndef unfollow(user_id_1, user_id_2):\n data = Followers.query.filter_by(follower_id=user_id_1, followed_id=user_id_2).first()\n user = User.query.get_or_404(user_id_2)\n\n if (not data):\n flash(f\"You don't follow {user.username}!\")\n else:\n db.session.delete(data)\n \n notif = Notifications(user_id_1, f'You stopped following {user.username}!', user_id_2, False)\n db.session.add(notif)\n \n db.session.commit()\n\n flash(f'You unfollowed {user.username}!')\n\n return redirect(url_for('user.user_posts', user_id=user.id))\n\n####################################################\n# EMAIL CONFIRMATION SETUP #########################\n####################################################\n\n@users.route('/confirm/<token>')\n@login_required\ndef confirm_email(token):\n try:\n email = serializer.loads(token, salt='email-confirm', max_age=(86400 * 30))\n user = User.query.filter_by(email=email).first()\n user.confirmed = True\n db.session.commit()\n\n except SignatureExpired:\n email = serializer.loads(token, salt='email-confirm')\n user = User.query.filter_by(email=email).first()\n db.session.delete(user)\n db.session.commit()\n\n flash('Activation Link has expired. Your account was deleted. Please create your account again and confirm the email id as soon as possible!')\n return redirect(url_for('user.register'))\n \n except:\n flash('Invalid Token')\n return redirect(url_for('core.index'))\n \n flash('Email id Confirmed! Start blogging Now!')\n return redirect(url_for('user.account'))\n\n####################################################\n# CHANGE BACKGROUND ################################\n####################################################\n\n@users.route(\"/change-background\")\n@login_required\ndef change_background():\n user = User.query.get_or_404(current_user.id)\n background_img_num = int(user.background)\n \n background_img_num = (background_img_num + 1) % 7\n \n if (background_img_num == 0):\n background_img_num = 1\n \n user.background = str(background_img_num)\n db.session.commit()\n \n return redirect(url_for('user.account'))\n\n####################################################\n# FORGOT PASSWORD SETUP ############################\n####################################################\n\n@users.route(\"/forgot-password\", methods=[\"GET\", \"POST\"])\ndef forgot_password():\n form = ForgotPasswordForm()\n\n if (form.validate_on_submit() or request.method == \"POST\"):\n email = form.email.data\n user = User.query.filter_by(email=email).first()\n\n if (not user):\n flash(\"There is no account registered using this email id\")\n return redirect(url_for('user.register'))\n else:\n if (not user.confirmed):\n db.session.delete(user)\n db.session.commit()\n\n flash(\"You had not confirmed your email. Your account has been deleted. Please re-open a new account.\")\n return redirect(url_for('user.register'))\n else:\n try:\n token = serializer.dumps(email, salt='forgot-confirm')\n link = url_for('user.change_password', token=token, _external=True)\n\n msg = Message('Blog Tap Password Reset', sender='<EMAIL>', recipients=[email])\n\n msg.body = f'''\n\\tWelcome Blogger!\n\nHello {user.username}.\nPlease click on the link below to reset your password at Blog Tap.\nYour reset link is: {link}\nIf you didn't request password reset, please ignore this mail.\nHope you have an awesome time.\n \n\\tYour Sincerely\n\\tTapajyoti Bose\n\\tCreator\n\\tBlog Tap\n'''\n\n mail.send(msg)\n\n flash(f'Password reset mail has been sent. Please reset your password within 48 hours.')\n return redirect(url_for('user.login'))\n\n except:\n flash('We were unable to send the password reset mail.')\n return redirect(url_for('core.index'))\n \n return render_template('forgot-pass.html', form=form)\n\n####################################################\n# RESET PASSWORD ##################################\n####################################################\n\n@users.route(\"/change-password/<token>\", methods=[\"GET\", \"POST\"])\ndef change_password(token):\n try:\n email = serializer.loads(token, salt='forgot-confirm', max_age=(86400 * 2))\n user = User.query.filter_by(email=email).first()\n\n login_user(user)\n session.permanent = True\n\n form = ChangePasswordForm()\n\n if (form.validate_on_submit() or request.method == \"POST\"):\n if (is_strong(form.password.data)):\n pswrd = User.gen_pass(form.password.data)\n user.password_hash = pswrd\n db.session.commit()\n\n flash(\"Password Reset!\")\n return redirect(url_for('core.index'))\n else:\n flash(\"Use a strong password (1 Upper and 1 lower case characters, 1 number, 1 symbol and minimum length of 6)\")\n return redirect(url_for('user.change_password', token=token))\n\n except SignatureExpired:\n email = serializer.loads(token, salt='forgot-confirm')\n user = User.query.filter_by(email=email).first()\n\n flash('Activation Link has expired. Please create go through the \"Forgot Password\" Process again!')\n return redirect(url_for('user.forgot_password'))\n \n except:\n flash('Invalid Token')\n return redirect(url_for('core.index'))\n \n return render_template('pass-reset.html', form=form)\n\n####################################################\n# UPDATE PASSWORD #################################\n####################################################\n\n@users.route(\"/update-password\", methods=[\"GET\", \"POST\"])\n@login_required\ndef update_password():\n form = UpdatePasswordForm()\n\n if (form.validate_on_submit() or request.method == \"POST\"):\n user = User.query.get_or_404(current_user.id)\n \n if (user.check_password(form.curr_pass.data)):\n if (is_strong(form.password.data)):\n user.password_hash = User.gen_pass(form.password.data)\n db.session.commit()\n flash('Password Updated!')\n return redirect(url_for('user.account'))\n\n else:\n flash('Use a strong password (1 Upper and 1 lower case characters, 1 number, 1 symbol and minimum length of 6)')\n return redirect(url_for('user.update_password'))\n\n else:\n flash(\"Incorrect Password!\")\n return redirect(url_for('user.update_password'))\n \n notifs = Notifications.query.filter_by(user_id=current_user.id).order_by(Notifications.date.desc()).all()\n\n return render_template('update-pass.html', form=form, notifs=notifs)", "id": "4559860", "language": "Python", "matching_score": 4.927306175231934, "max_stars_count": 3, "path": "blog/users/views.py" }, { "content": "####################################################\n# IMPORTS (FROM LIBRARY) ###########################\n####################################################\n\nfrom flask import render_template, url_for, request, redirect, Blueprint, flash, abort\nfrom flask_login import current_user, login_required\n\n####################################################\n# IMPORTS (LOCAL) ##################################\n####################################################\n\nfrom blog import db\nfrom blog.models import BlogPost, User, Notifications, Followers, View, Likes, Comments\nfrom blog.post.forms import BlogPostForm, CommentForm\n\n####################################################\n# BLUEPRINT SETUP ##################################\n####################################################\n\nblog_posts = Blueprint('blog_posts', __name__)\n\n####################################################\n# CREATE POST SETUP ################################\n####################################################\n\n@blog_posts.route('/create', methods=[\"GET\", \"POST\"])\n@login_required\ndef create_post():\n form = BlogPostForm()\n \n if (form.validate_on_submit() or request.method == \"POST\"):\n post = BlogPost(title=form.title.data, category=form.category.data, text=form.text.data, user_id=current_user.id)\n db.session.add(post)\n db.session.commit()\n\n followers = Followers.query.filter_by(followed_id=current_user.id).all()\n\n for follower in followers:\n notif = Notifications(follower.follower_id, f'{current_user.username} has posted a blog \"{form.title.data}\"!', post.id, True)\n db.session.add(notif)\n\n db.session.add(post)\n db.session.commit()\n\n flash('Post Created!')\n \n return redirect(url_for('core.index'))\n\n if (current_user.is_authenticated):\n notifs = Notifications.query.filter_by(user_id=current_user.id).order_by(Notifications.date.desc()).all()\n else:\n notifs = []\n\n return render_template('create_post.html', form=form, notifs=notifs)\n\n####################################################\n# BLOG POST VIEW SETUP #############################\n####################################################\n\n@blog_posts.route('/blog/<int:blog_post_id>', methods=[\"GET\", \"POST\"])\ndef blog_post(blog_post_id):\n post = BlogPost.query.get_or_404(blog_post_id)\n form = CommentForm()\n\n previous_comments = Comments.query.filter_by(blog_id=post.id).order_by(Comments.date.desc()).all()\n\n if (form.validate_on_submit() or request.method == \"POST\"):\n comment = Comments(blog_post_id, current_user.id, form.text.data)\n db.session.add(comment)\n\n if (current_user.id != post.author.id):\n notif = Notifications(post.author.id, f'{current_user.username} has commented on your blog \"{post.title}\"!', post.id, True)\n db.session.add(notif)\n\n db.session.commit()\n \n return redirect(url_for('blog_posts.blog_post', blog_post_id=blog_post_id))\n\n if (current_user.is_authenticated and current_user.email != post.author.email):\n user = User.query.get_or_404(current_user.id)\n user.last_viewed_catagory3 = user.last_viewed_catagory2\n user.last_viewed_catagory2 = user.last_viewed_catagory1\n user.last_viewed_catagory1 = post.category\n \n db.session.commit()\n\n view = View.query.filter_by(user_id=current_user.id, blog_id=blog_post_id).first()\n\n if (not view):\n post.views += 1\n view = View(current_user.id, blog_post_id)\n db.session.add(view)\n db.session.commit()\n \n if (current_user.is_authenticated):\n notifs = Notifications.query.filter_by(user_id=current_user.id).order_by(Notifications.date.desc()).all()\n else:\n notifs = []\n \n if (current_user.is_authenticated):\n like_stat = Likes.query.filter_by(user_id=current_user.id, blog_id=blog_post_id).first()\n else:\n like_stat = None\n\n like_count = db.engine.execute(f'''\n select count(*)\n from Likes\n where blog_id={blog_post_id} and like={1}\n ''')\n dislike_count = db.engine.execute(f'''\n select count(*)\n from Likes\n where blog_id={blog_post_id} and like={0}\n ''')\n like_count = [res[0] for res in like_count][0]\n dislike_count = [res[0] for res in dislike_count][0]\n \n if (like_count == None):\n like_count = 0\n if (dislike_count == None):\n like_count = 0\n \n if (like_stat):\n like_val = like_stat.like\n else:\n like_val = None\n \n return render_template('blog_posts.html', title=post.title, date=post.date, post=post, category=post.category, notifs=notifs, like_val=like_val, like_count=like_count, dislike_count=dislike_count, previous_comments=previous_comments, form=form, User=User)\n\n####################################################\n# UPDATE POST SETUP ################################\n####################################################\n\n@blog_posts.route('/<int:blog_post_id>/update', methods=[\"GET\", \"POST\"])\n@login_required\ndef update(blog_post_id):\n blog_title = None\n post = BlogPost.query.get_or_404(blog_post_id)\n\n if (post.author != current_user):\n abort(403)\n \n form = BlogPostForm()\n \n if (form.validate_on_submit() or request.method == \"POST\"):\n post.title = form.title.data\n post.text= form.text.data\n post.category = form.category.data\n\n followers = Followers.query.filter_by(followed_id=current_user.id).all()\n\n for follower in followers:\n notif = Notifications(follower.follower_id, f'{current_user.username} has updated the blog \"{blog_title}\"!', post.id, True)\n db.session.add(notif)\n\n db.session.commit()\n flash('Updated Post!')\n return redirect(url_for('blog_posts.blog_post', blog_post_id=post.id))\n\n if (request.method == \"GET\"):\n form.title.data = post.title\n blog_title = post.title\n form.text.data = post.text\n form.category.data = post.category\n \n if (current_user.is_authenticated):\n notifs = Notifications.query.filter_by(user_id=current_user.id).order_by(Notifications.date.desc()).all()\n else:\n notifs = []\n\n return render_template('create_post.html', form=form, notifs=notifs)\n\n####################################################\n# DELETE POST SETUP ################################\n####################################################\n\n@blog_posts.route('/<int:blog_post_id>/delete', methods=[\"GET\", \"POST\"])\n@login_required\ndef delete(blog_post_id):\n post = BlogPost.query.get_or_404(blog_post_id)\n\n if (post.author != current_user):\n abort(403)\n \n followers = Followers.query.filter_by(followed_id=current_user.id).all()\n\n for follower in followers:\n notif = Notifications(follower.follower_id, f'{current_user.username} has deleted the blog \"{post.title}\"!', post.author.id, False)\n db.session.add(notif)\n \n db.session.delete(post)\n db.session.commit()\n\n flash('Blog Deleted!')\n\n return redirect(url_for('core.index'))\n\n####################################################\n# LIKE INTERACTION SETUP ###########################\n####################################################\n\n@blog_posts.route('/<int:user_id>/<int:blog_post_id>/<int:like>')\n@login_required\ndef like(user_id, blog_post_id, like):\n post = BlogPost.query.get_or_404(blog_post_id)\n\n if (not current_user.is_authenticated or post.user_id == current_user.id):\n return redirect(url_for('blog_posts.blog_post', blog_post_id=blog_post_id))\n\n like_entry = Likes.query.filter_by(user_id=user_id, blog_id=blog_post_id).first()\n\n blog = BlogPost.query.get_or_404(blog_post_id)\n user = User.query.get_or_404(blog.author.id)\n user_reaction = User.query.get_or_404(user_id)\n\n if (not like_entry):\n like_entry = Likes(user_id, blog_post_id, bool(like))\n db.session.add(like_entry)\n\n notif = Notifications(user.id, f'{user_reaction.username} has reacted to your blog \"{blog.title}\"!', blog_post_id, True)\n db.session.add(notif)\n\n else:\n if (like_entry.like != bool(like)):\n like_entry.like = bool(like)\n\n notif = Notifications(user.id, f'{user_reaction.username} has reacted to your blog \"{blog.title}\"!', blog_post_id, True)\n db.session.add(notif)\n\n else:\n db.session.delete(like_entry)\n\n db.session.commit()\n \n return redirect(url_for('blog_posts.blog_post', blog_post_id=blog_post_id))\n\n####################################################\n# EDIT COMMENT SETUP ###############################\n####################################################\n\n@blog_posts.route('/<int:comment_id>/edit', methods=[\"GET\", \"POST\"])\n@login_required\ndef edit_comment(comment_id):\n comment = Comments.query.get_or_404(comment_id)\n\n if (comment.user_id != current_user.id):\n abort(403)\n \n form = CommentForm()\n \n if (form.validate_on_submit() or request.method == \"POST\"):\n comment.text = form.text.data\n db.session.commit()\n\n flash('Comment Updated!')\n return redirect(url_for('blog_posts.blog_post', blog_post_id=comment.blog_id))\n\n if (request.method == \"GET\"):\n form.text.data = comment.text\n \n if (current_user.is_authenticated):\n notifs = Notifications.query.filter_by(user_id=current_user.id).order_by(Notifications.date.desc()).all()\n else:\n notifs = []\n\n return render_template('edit_comment.html', form=form, notifs=notifs)", "id": "9332835", "language": "Python", "matching_score": 4.516249179840088, "max_stars_count": 3, "path": "blog/post/views.py" }, { "content": "####################################################\n# IMPORTS (FROM LIBRARY) ###########################\n####################################################\n\nfrom flask import render_template, request, Blueprint, redirect, url_for, flash\nfrom flask_login import current_user, login_required\n\n####################################################\n# IMPORTS (LOCAL) ##################################\n####################################################\n\nfrom blog.core.forms import Search_Form\nfrom blog.core.search_engine import search\nfrom blog.models import BlogPost, Notifications, View\nfrom blog import db\n\n####################################################\n# BLUEPRINT SETUP ##################################\n####################################################\n\ncore = Blueprint('core', __name__)\n\n####################################################\n# INDEX SETUP ######################################\n####################################################\n\n@core.route('/', methods=[\"GET\", \"POST\"])\ndef index():\n View.delete_expired()\n Notifications.delete_expired()\n \n page = request.args.get('page', 1, type=int)\n\n blog_posts = BlogPost.query.order_by(BlogPost.views.desc(), BlogPost.date.desc()).paginate(page=page, per_page=6)\n if (current_user.is_authenticated):\n ids = db.engine.execute(f'select blog_id \\\n from View \\\n where user_id={current_user.id}')\n viewed = [id_blog[0] for id_blog in ids]\n categories = [current_user.last_viewed_catagory1, current_user.last_viewed_catagory2, current_user.last_viewed_catagory3]\n recommended = BlogPost.query.filter(BlogPost.category.in_(categories), BlogPost.author!=current_user, ~(BlogPost.id.in_(viewed))).order_by(BlogPost.views.desc(), BlogPost.date.desc()).paginate(page=page, per_page=3, error_out=False)\n else:\n recommended = None\n \n if (current_user.is_authenticated):\n notifs = Notifications.query.filter_by(user_id=current_user.id).order_by(Notifications.date.desc()).all()\n else:\n notifs = []\n\n form = Search_Form()\n\n if form.validate_on_submit():\n return redirect(url_for('core.search_page', param=form.param.data))\n\n return render_template('index.html', page_name=\"Home\", blog_posts=blog_posts, recommended=recommended, notifs=notifs, form=form)\n\n####################################################\n# ABOUT SETUP ######################################\n####################################################\n\n@core.route('/about')\ndef about():\n if (current_user.is_authenticated):\n notifs = Notifications.query.filter_by(user_id=current_user.id).order_by(Notifications.date.desc()).all()\n else:\n notifs = []\n \n return render_template('about.html', notifs=notifs)\n\n####################################################\n# SEARCH SETUP #####################################\n####################################################\n\n@core.route('/search/<string:param>', methods=[\"GET\", \"POST\"])\ndef search_page(param):\n users, blogs = search(param)\n \n if (current_user.is_authenticated):\n notifs = Notifications.query.filter_by(user_id=current_user.id).order_by(Notifications.date.desc()).all()\n else:\n notifs = []\n\n form = Search_Form()\n\n if form.validate_on_submit():\n return redirect(url_for('core.search_page', param=form.param.data))\n \n if ((not blogs) and users):\n open_tab = 2\n else:\n open_tab = 1\n \n return render_template('search.html', notifs=notifs, param=param, users=users, blogs=blogs, form=form, open_tab=open_tab)\n\n####################################################\n# INDEX (SORTED) SETUP #############################\n####################################################\n\n@core.route('/sorted', methods=[\"GET\", \"POST\"])\ndef index_sorted():\n View.delete_expired()\n Notifications.delete_expired()\n \n page = request.args.get('page', 1, type=int)\n category_val = request.args.get('sort_val')\n \n if (not category_val):\n flash('Select a Category to sort by')\n return redirect(url_for('core.index'))\n\n if (category_val == \"Alphabetically Asc\"):\n blog_posts = BlogPost.query.order_by(BlogPost.title.asc()).paginate(page=page, per_page=6)\n elif (category_val == \"Alphabetically Dsc\"):\n blog_posts = BlogPost.query.order_by(BlogPost.title.desc()).paginate(page=page, per_page=6)\n elif (category_val == \"Category\"):\n blog_posts = BlogPost.query.order_by(BlogPost.category.asc()).paginate(page=page, per_page=6)\n elif (category_val == \"By Date Asc\"):\n blog_posts = BlogPost.query.order_by(BlogPost.date.asc()).paginate(page=page, per_page=6)\n elif (category_val == \"By Date Dsc\"):\n blog_posts = BlogPost.query.order_by(BlogPost.date.desc()).paginate(page=page, per_page=6)\n else:\n blog_posts = BlogPost.query.order_by(BlogPost.views.desc()).paginate(page=page, per_page=6)\n\n if (current_user.is_authenticated):\n ids = db.engine.execute(f'select blog_id \\\n from View \\\n where user_id={current_user.id}')\n viewed = [id_blog[0] for id_blog in ids]\n categories = [current_user.last_viewed_catagory1, current_user.last_viewed_catagory2, current_user.last_viewed_catagory3]\n recommended = BlogPost.query.filter(BlogPost.category.in_(categories), BlogPost.author!=current_user, ~(BlogPost.id.in_(viewed))).order_by(BlogPost.views.desc(), BlogPost.date.desc()).paginate(page=page, per_page=3, error_out=False)\n else:\n recommended = None\n \n if (current_user.is_authenticated):\n notifs = Notifications.query.filter_by(user_id=current_user.id).order_by(Notifications.date.desc()).all()\n else:\n notifs = []\n\n form = Search_Form()\n\n if form.validate_on_submit():\n return redirect(url_for('core.search_page', param=form.param.data))\n\n return render_template('sorted.html', page_name=\"Home\", blog_posts=blog_posts, recommended=recommended, notifs=notifs, form=form, category_val=category_val)", "id": "7967593", "language": "Python", "matching_score": 2.852093458175659, "max_stars_count": 3, "path": "blog/core/views.py" }, { "content": "####################################################\n# IMPORTS (FROM LIBRARY) ###########################\n####################################################\n\nfrom flask import Blueprint, render_template\nfrom flask_login import current_user\n\n####################################################\n# IMPORTS (LOCAL) ##################################\n####################################################\n\nfrom blog.models import Notifications\n\n####################################################\n# BLUEPRINT SETUP ##################################\n####################################################\n\nerror_pages = Blueprint('error_pages', __name__)\n\n####################################################\n# ERROR 404 SETUP ##################################\n####################################################\n\n@error_pages.app_errorhandler(404)\ndef error_404(error):\n if (current_user.is_authenticated):\n notifs = Notifications.query.filter_by(user_id=current_user.id).order_by(Notifications.date.desc()).all()\n else:\n notifs = []\n return render_template('error_pages/404.html', notifs=notifs), 404\n\n####################################################\n# ERROR 403 SETUP ##################################\n####################################################\n\n@error_pages.app_errorhandler(403)\ndef error_403(error):\n if (current_user.is_authenticated):\n notifs = Notifications.query.filter_by(user_id=current_user.id).order_by(Notifications.date.desc()).all()\n else:\n notifs = []\n return render_template('error_pages/403.html', notifs=notifs), 403\n\n####################################################\n# ERROR 500 SETUP ##################################\n####################################################\n\n@error_pages.app_errorhandler(500)\ndef error_500(error):\n if (current_user.is_authenticated):\n notifs = Notifications.query.filter_by(user_id=current_user.id).order_by(Notifications.date.desc()).all()\n else:\n notifs = []\n return render_template('error_pages/500.html', notifs=notifs),500", "id": "12218743", "language": "Python", "matching_score": 2.796050786972046, "max_stars_count": 3, "path": "blog/error_pages/handlers.py" }, { "content": "####################################################\r\n# IMPORTS (FROM LIBRARY) ###########################\r\n####################################################\r\n\r\nfrom flask import Blueprint, render_template\r\n\r\n####################################################\r\n# BLUEPRINT SETUP ##################################\r\n####################################################\r\n\r\nerror_pages = Blueprint('error_pages', __name__)\r\n\r\n####################################################\r\n# ERROR 404 SETUP ##################################\r\n####################################################\r\n\r\n@error_pages.app_errorhandler(404)\r\ndef error_404(error):\r\n return render_template('error_pages/404.html', page_name=\"404 Error\"), 404\r\n\r\n####################################################\r\n# ERROR 403 SETUP ##################################\r\n####################################################\r\n\r\n@error_pages.app_errorhandler(403)\r\ndef error_403(error):\r\n return render_template('error_pages/403.html', page_name=\"403 Error\"), 403\r\n\r\n####################################################\r\n# ERROR 500 SETUP ##################################\r\n####################################################\r\n\r\n@error_pages.app_errorhandler(500)\r\ndef error_500(error):\r\n return render_template('error_pages/500.html', page_name=\"500 Error\"), 500", "id": "894279", "language": "Python", "matching_score": 2.5742204189300537, "max_stars_count": 2, "path": "Karmatek/error_pages/handler.py" }, { "content": "####################################################\r\n# IMPORTS (LOCAL) ##################################\r\n####################################################\r\n\r\nfrom Karmatek import app\r\n\r\n####################################################\r\n# IMPORTS (FROM LIBRARY) ###########################\r\n####################################################\r\n\r\nfrom flask import render_template, Blueprint\r\n\r\n####################################################\r\n# BLUEPRINT SETUP ##################################\r\n####################################################\r\n\r\ncore = Blueprint('core', __name__)\r\n\r\n####################################################\r\n# HOME PAGE SETUP ##################################\r\n####################################################\r\n\r\n@app.route('/')\r\ndef home():\r\n return render_template('index.html')\r\n\r\n####################################################\r\n# COORDINATORS SETUP ###############################\r\n####################################################\r\n\r\n@app.route('/coordinator')\r\ndef coordinator():\r\n return render_template('coordinator-details.html')\r\n\r\n####################################################\r\n# UNION SETUP ######################################\r\n####################################################\r\n\r\n@app.route('/union')\r\ndef union():\r\n return render_template('union-details.html')", "id": "10160284", "language": "Python", "matching_score": 1.261622428894043, "max_stars_count": 2, "path": "Karmatek/core/views.py" }, { "content": "####################################################\r\n# IMPORTS (FROM LIBRARY) ###########################\r\n####################################################\r\n\r\nfrom datetime import timedelta\r\nfrom flask import Flask, render_template, Blueprint\r\nfrom flask_restful import Api\r\nfrom flask_sqlalchemy import SQLAlchemy\r\nfrom flask_mail import Mail\r\nfrom flask_migrate import Migrate\r\nfrom flask_login import LoginManager\r\nimport os\r\n\r\n####################################################\r\n# APP SETUP ########################################\r\n####################################################\r\n\r\napp = Flask(__name__)\r\napp.config[\"SECRET_KEY\"] = 'secret_key'\r\n\r\n####################################################\r\n# DATABASE SETUP ###################################\r\n####################################################\r\n\r\nbase_dir = os.path.abspath(os.path.dirname(__name__))\r\napp.config[\"SQLALCHEMY_DATABASE_URI\"] = 'sqlite:///' + os.path.join(base_dir, \"data.sqlite\")\r\napp.config[\"SQLALCHEMY_TRACK_MODIFICATIONS\"] = False\r\n\r\ndb = SQLAlchemy(app)\r\n\r\n####################################################\r\n# COOKIE LIFETIME SETUP ############################\r\n####################################################\r\n\r\napp.config['REMEMBER_COOKIE_DURATION'] = timedelta(days=7)\r\n\r\n####################################################\r\n# EMAIL SETUP ######################################\r\n####################################################\r\n\r\napp.config.update(\r\n\tDEBUG = False,\r\n\t#EMAIL SETTINGS\r\n\tMAIL_SERVER = 'smtp.gmail.com',\r\n\tMAIL_PORT = 465,\r\n\tMAIL_USE_SSL = True,\r\n\tMAIL_USERNAME = \"youremailid(if its not from gmail, change the mail server)\",\r\n\tMAIL_PASSWORD = \"<PASSWORD>\"\r\n\t)\r\n\r\nmail = Mail(app)\r\n\r\n####################################################\r\n# MIGRATION SETUP ##################################\r\n####################################################\r\n\r\nMigrate(app, db)\r\n\r\n####################################################\r\n# API SETUP ########################################\r\n####################################################\r\n\r\napi = Api(app)\r\n\r\n####################################################\r\n# LOGIN SETUP ######################################\r\n####################################################\r\n\r\nlogin_manager = LoginManager()\r\nlogin_manager.init_app(app)\r\nlogin_manager.login_view = 'users.login'\r\n\r\n####################################################\r\n# BLUEPRINT SETUP ##################################\r\n####################################################\r\n\r\nfrom Karmatek.core.views import core\r\nfrom Karmatek.users.views import users\r\nfrom Karmatek.error_pages.handler import error_pages\r\nfrom Karmatek.api import api_blueprint\r\n\r\napp.register_blueprint(core)\r\napp.register_blueprint(users)\r\napp.register_blueprint(error_pages)\r\napp.register_blueprint(api_blueprint)", "id": "845608", "language": "Python", "matching_score": 7.175479888916016, "max_stars_count": 2, "path": "Karmatek/__init__.py" }, { "content": "####################################################\n# IMPORTS (FROM LIBRARY) ###########################\n####################################################\n\nfrom datetime import timedelta\nfrom flask import Flask\nfrom flask_sqlalchemy import SQLAlchemy\nfrom flask_mail import Mail\nfrom flask_migrate import Migrate\nfrom flask_misaka import Misaka\nfrom flask_login import LoginManager\nimport os\n\n####################################################\n# APP SETUP ########################################\n####################################################\n\napp = Flask(__name__)\n\napp.config[\"SECRET_KEY\"] = 'secret_key'\n\n####################################################\n# DATABASE SETUP ###################################\n####################################################\n\nbase_dir = os.path.abspath(os.path.dirname(__name__))\napp.config[\"SQLALCHEMY_DATABASE_URI\"] = 'sqlite:///' + os.path.join(base_dir, \"data.sqlite\")\napp.config[\"SQLALCHEMY_TRACK_MODIFICATIONS\"] = False\n\ndb = SQLAlchemy(app)\n\n####################################################\n# COOKIE SETUP #####################################\n####################################################\n\napp.config['REMEMBER_COOKIE_DURATION'] = timedelta(days=28)\napp.config['PERMANENT_SESSION_LIFETIME'] = timedelta(days=28)\n\n####################################################\n# EMAIL SETUP ######################################\n####################################################\n\napp.config.update(\n\tMAIL_SERVER = 'smtp.gmail.com',\n\tMAIL_PORT = 465,\n\tMAIL_USE_SSL = True,\n\tMAIL_USERNAME = \"email\",\n\tMAIL_PASSWORD = \"password\"\n\t)\n\nmail = Mail(app)\n\n####################################################\n# MIGRATION SETUP ##################################\n####################################################\n\nMigrate(app, db)\n\n####################################################\n# LOGIN SETUP ######################################\n####################################################\n\nlogin_manager = LoginManager()\nlogin_manager.init_app(app)\nlogin_manager.login_view = 'user.login'\n\n####################################################\n# MISAKA SETUP (MARKDOWN CONVERTER) ################\n####################################################\n\nMisaka(app)\n\n####################################################\n# BLUEPRINT SETUP ##################################\n####################################################\n\nfrom blog.core.views import core\nfrom blog.error_pages.handlers import error_pages\nfrom blog.post.views import blog_posts\nfrom blog.users.views import users\n\napp.register_blueprint(core)\napp.register_blueprint(error_pages)\napp.register_blueprint(blog_posts)\napp.register_blueprint(users)", "id": "7582858", "language": "Python", "matching_score": 1.8951307535171509, "max_stars_count": 3, "path": "blog/__init__.py" }, { "content": "####################################################\n# IMPORTS (LOCAL) ##################################\n####################################################\n\nfrom blog import db, login_manager\n\n####################################################\n# IMPORTS (FROM LIBRARY) ###########################\n####################################################\n\nfrom werkzeug.security import generate_password_hash, check_password_hash\nfrom flask_login import UserMixin\nfrom datetime import datetime, timedelta\nfrom random import randint\n\n####################################################\n# USER LOADER SETUP ################################\n####################################################\n\n@login_manager.user_loader\ndef load_user(user_id):\n return User.query.get(user_id)\n\n####################################################\n# USER MODEL SETUP #################################\n####################################################\n\nclass User(db.Model, UserMixin):\n __tablename__ = \"users\"\n\n id = db.Column(db.Integer, primary_key=True)\n profile_image = db.Column(db.String(1), nullable=False)\n email = db.Column(db.String(64), unique=True, index=True)\n username = db.Column(db.String(64), index=True)\n password_hash = db.Column(db.String(64))\n confirmed = db.Column(db.Boolean, nullable=False)\n last_viewed_catagory1 = db.Column(db.String(64), index=True)\n last_viewed_catagory2 = db.Column(db.String(64), index=True)\n last_viewed_catagory3 = db.Column(db.String(64), index=True)\n background = db.Column(db.String(1), index=True, nullable=False)\n\n posts = db.relationship('BlogPost', backref='author', lazy=True)\n\n def __init__(self, email, username, password):\n self.username = username\n self.email = email\n self.password_hash = generate_password_hash(password)\n self.confirmed = False\n self.profile_image = \"profile_img_\" + str(randint(1, 9)) + \".png\"\n self.background = \"1\"\n \n def check_password(self, password):\n return check_password_hash(self.password_hash, password)\n\n def __repr__(self):\n return f\"User-name: {self.username}\\nEmail: {self.email}\"\n \n @staticmethod\n def gen_pass(password):\n return generate_password_hash(password)\n\n####################################################\n# BLOG POST MODEL SETUP ############################\n####################################################\n\nclass BlogPost(db.Model):\n __tablename__ = 'BlogPost'\n\n users = db.relationship(User)\n\n id = db.Column(db.Integer, primary_key=True)\n user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False)\n date = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)\n title = db.Column(db.String(150), nullable=False)\n text = db.Column(db.Text, nullable=False)\n views = db.Column(db.Integer, nullable=False)\n category = db.Column(db.String(64), nullable=False)\n\n def __init__(self, user_id, title, text, category):\n self.user_id = user_id\n self.title = title\n self.text = text\n self.category = category\n self.views = 0\n \n def __repr__(self):\n return f\"Post ID: {self.id} -- {self.date}\\nTitle: {self.title.upper()}\"\n\n####################################################\n# FOLLOWERS MODEL SETUP ############################\n####################################################\n\nclass Followers(db.Model):\n __tablename__ = 'Followers'\n\n id = db.Column(db.Integer, primary_key=True)\n follower_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False)\n followed_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False)\n\n def __init__(self, follower_id, followed_id):\n self.follower_id = follower_id\n self.followed_id = followed_id\n \n def __repr__(self):\n return f\"Follower ID: {self.follower_id}\\tFollowed ID: {self.followed_id}\"\n\n####################################################\n# NOTIFICATION MODEL SETUP #########################\n####################################################\n\nclass Notifications(db.Model):\n __tablename__ = 'Notifications'\n\n id = db.Column(db.Integer, primary_key=True)\n user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False)\n date = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)\n text = db.Column(db.String(150), nullable=False)\n link_id = db.Column(db.Integer, nullable=False)\n is_blog = db.Column(db.Boolean, nullable=False)\n\n def __init__(self, user_id, text, link_id, is_blog):\n self.user_id = user_id\n self.text = text\n self.link_id = link_id\n self.is_blog = is_blog\n \n def __repr__(self):\n return f\"User ID: {self.userer_id}\\tTime: {self.date}\\nText: {self.text}\"\n \n @classmethod\n def delete_expired(cls):\n expiration_days = 14\n limit = datetime.now() - timedelta(days=expiration_days)\n cls.query.filter(cls.date <= limit).delete()\n db.session.commit()\n\n####################################################\n# VIEW MODEL SETUP #################################\n####################################################\n\nclass View(db.Model):\n __tablename__ = 'View'\n\n id = db.Column(db.Integer, primary_key=True)\n user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False)\n blog_id = db.Column(db.Integer, db.ForeignKey('BlogPost.id'), nullable=False)\n timestamp = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)\n\n def __init__(self, user_id, blog_id):\n self.user_id = user_id\n self.blog_id = blog_id\n \n def __repr__(self):\n return f\"User ID: {self.userer_id}\\tBlog ID: {self.blog_id}\"\n\n @classmethod\n def delete_expired(cls):\n expiration_days = 2\n limit = datetime.now() - timedelta(days=expiration_days)\n cls.query.filter(cls.timestamp <= limit).delete()\n db.session.commit()\n\n####################################################\n# LIKES MODEL SETUP ################################\n####################################################\n\nclass Likes(db.Model):\n __tablename__ = 'Likes'\n\n id = db.Column(db.Integer, primary_key=True)\n user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False)\n blog_id = db.Column(db.Integer, db.ForeignKey('BlogPost.id'), nullable=False)\n like = db.Column(db.Boolean, nullable=False)\n\n def __init__(self, user_id, blog_id, like):\n self.user_id = user_id\n self.blog_id = blog_id\n self.like = like\n\n####################################################\n# COMMENTS MODEL SETUP #############################\n####################################################\n\nclass Comments(db.Model):\n __tablename__ = 'Comments'\n\n id = db.Column(db.Integer, primary_key=True)\n blog_id = db.Column(db.Integer, db.ForeignKey('BlogPost.id'), nullable=False)\n user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False)\n date = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)\n text = db.Column(db.Text, nullable=False)\n\n def __init__(self, blog_id, user_id, text):\n self.blog_id = blog_id\n self.user_id = user_id\n self.text = text\n ", "id": "3540986", "language": "Python", "matching_score": 4.715539932250977, "max_stars_count": 3, "path": "blog/models.py" }, { "content": "####################################################\r\n# IMPORTS (LOCAL) ##################################\r\n####################################################\r\n\r\nfrom Karmatek import db, login_manager\r\n\r\n####################################################\r\n# IMPORTS (FROM LIBRARY) ###########################\r\n####################################################\r\n\r\nfrom werkzeug.security import generate_password_hash, check_password_hash\r\nfrom flask_login import UserMixin, current_user\r\n\r\n####################################################\r\n# USER LOADER SETUP ################################\r\n####################################################\r\n\r\n@login_manager.user_loader\r\ndef load_user(user_id):\r\n return User.query.get(user_id)\r\n\r\n####################################################\r\n# USER MODEL SETUP #################################\r\n####################################################\r\n\r\nclass User(db.Model, UserMixin):\r\n __tablename__ = \"users\"\r\n\r\n id = db.Column(db.Integer, primary_key=True)\r\n email = db.Column(db.String(64), unique=True, index=True)\r\n username = db.Column(db.String(64), index=True)\r\n password_hash = db.Column(db.String(64))\r\n ph_num = db.Column(db.String(10))\r\n dept = db.Column(db.String(128))\r\n year = db.Column(db.Integer)\r\n confirm = db.Column(db.Integer, unique=False, default=0)\r\n\r\n posts = db.relationship('Events', backref='author', lazy=True)\r\n\r\n def __init__(self, email, username, password, ph_num, dept, year):\r\n self.username = username\r\n self.email = email\r\n self.password_hash = generate_password_hash(password)\r\n self.year = year\r\n self.dept = dept\r\n self.ph_num = ph_num\r\n self.confirm = 0\r\n \r\n def check_password(self, password):\r\n return check_password_hash(self.password_hash, password)\r\n\r\n def __repr__(self):\r\n return f\"User-name: {self.username}\\nEmail: {self.email}\"\r\n \r\n def json(self):\r\n return {\r\n 'Id': self.id,\r\n 'Name': self.username,\r\n 'Email': self.email,\r\n 'Phone': self.ph_num,\r\n 'Year': self.year,\r\n 'Dept': self.dept,\r\n 'Confirm': self.confirm\r\n }\r\n\r\n####################################################\r\n# PARTICIPATING EVENTs SETUP #######################\r\n####################################################\r\n\r\nclass Events(db.Model):\r\n __tablename__ = 'events'\r\n\r\n users = db.relationship(User)\r\n\r\n id = db.Column(db.Integer, primary_key=True)\r\n user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False)\r\n event = db.Column(db.String(256), nullable=False)\r\n\r\n def __init__(self, user_id, event):\r\n self.user_id = user_id\r\n self.event = event\r\n \r\n def __repr__(self):\r\n return f\"Event: {self.event}\"\r\n \r\n def __str__(self):\r\n return self.__repr__()\r\n \r\n def json(self):\r\n return {\r\n 'Id': self.user_id,\r\n 'Event': self.event\r\n }", "id": "9110931", "language": "Python", "matching_score": 1.290764331817627, "max_stars_count": 2, "path": "Karmatek/model.py" }, { "content": "####################################################\r\n# IMPORTS (FROM LIBRARY) ###########################\r\n####################################################\r\n\r\n# Imports for forms\r\n\r\nfrom flask_wtf import FlaskForm\r\nfrom wtforms import StringField, PasswordField, SubmitField, IntegerField, SelectField\r\nfrom wtforms.validators import DataRequired, Email, EqualTo\r\nfrom wtforms import ValidationError\r\nfrom flask_wtf.file import FileField, FileAllowed\r\n\r\n# Imports for users\r\n\r\nfrom flask_login import current_user\r\n\r\n####################################################\r\n# IMPORTS (LOCAL) ##################################\r\n####################################################\r\n\r\nfrom Karmatek.model import User\r\n\r\n####################################################\r\n# LOGIN FORM SETUP #################################\r\n####################################################\r\n\r\nclass LoginForm(FlaskForm):\r\n email = StringField('Email'.upper(), validators=[DataRequired(), Email()])\r\n password = PasswordField('Password'.upper(), validators=[DataRequired()])\r\n submit = SubmitField('Log In')\r\n\r\n####################################################\r\n# REGISTER FORM SETUP ##############################\r\n####################################################\r\n\r\nclass Register(FlaskForm):\r\n email = StringField('Email'.upper(), validators=[DataRequired(), Email()])\r\n name = StringField('Full Name'.upper(), validators=[DataRequired()])\r\n password = PasswordField('Password'.upper(), validators=[DataRequired()])\r\n pass_confirm = PasswordField('Confirm Password'.upper(), validators=[DataRequired(), EqualTo('password', 'Both the Password Fields Must Match')])\r\n ph_num = IntegerField(\"Phone Number\".upper(), validators=[DataRequired()])\r\n dept = StringField(\"Department\".upper(), validators=[DataRequired()])\r\n year = IntegerField(\"Academic Year\".upper(), validators=[DataRequired()])\r\n submit = SubmitField('REGISTER')\r\n\r\n def check_email(self, field):\r\n if User.query.filter_by(email=field.data).first():\r\n raise ValidationError('The email entered has already been registered')\r\n\r\n####################################################\r\n# UPDATION SETUP ###################################\r\n####################################################\r\n\r\nclass UpdateUserForm(FlaskForm):\r\n email = StringField('Email'.upper(), validators=[DataRequired(), Email()])\r\n name = StringField('Name'.upper(), validators=[DataRequired()])\r\n ph_num = IntegerField(\"Phone Number\".upper(), validators=[DataRequired()])\r\n dept = StringField(\"Department\".upper(), validators=[DataRequired()])\r\n year = IntegerField(\"Academic Year\".upper(), validators=[DataRequired()])\r\n submit = SubmitField('UPDATE')\r\n\r\n def check_email(self, field):\r\n if User.query.filter_by(email=field.data).first():\r\n raise ValidationError('The email entered has already been registered')\r\n\r\n####################################################\r\n# EVENT SELECTION SETUP ############################\r\n#################################################### \r\n\r\nclass EventsForm(FlaskForm):\r\n event_selector = SelectField(\"Select Events to Participate in\".upper(), choices=[\r\n (\"Robo Race\", \"Robo Race\"),\r\n (\"Robo Carrom\", \"Robo Carrom\"),\r\n (\"Robo Soccer\", \"Robo Soccer\"),\r\n (\"Robo Maze\", \"Robo Maze\"),\r\n (\"Autonomous Line Follower\", \"Autonomous Line Follower\"),\r\n (\"Code Beta\", \"Code Beta\"),\r\n (\"Code Pro\", \"Code Pro\"),\r\n (\"Web Designing\", \"Web Designing\"),\r\n (\"Pubg\", \"Pubg\"),\r\n (\"NFS Most Wanted\", \"NFS Most Wanted\"),\r\n (\"Fifa\", \"Fifa\"),\r\n (\"Call of Duty\", \"Call of Duty\"),\r\n (\"Chess\", \"Chess\"),\r\n (\"Nail it @19\", \"Nail it @19\"),\r\n (\"Petapixel\", \"Petapixel\"),\r\n (\"Memester Challenge\", \"Memester Challenge\"),\r\n (\"Matrivia\", \"Matrivia\"),\r\n (\"Fandom\", \"Fandom\"),\r\n (\"Ek Duje ke liye\", \"Ek Duje ke liye\"),\r\n (\"CubicMatics\", \"CubicMatics\")\r\n ])\r\n submit = SubmitField('ADD')", "id": "653189", "language": "Python", "matching_score": 5.534492015838623, "max_stars_count": 2, "path": "Karmatek/users/forms.py" }, { "content": "####################################################\n# IMPORTS (FROM LIBRARY) ###########################\n####################################################\n\n# Imports for forms\n\nfrom flask_wtf import FlaskForm\nfrom wtforms import StringField, PasswordField, SubmitField\nfrom wtforms.validators import DataRequired, Email, EqualTo\nfrom wtforms import ValidationError\nfrom flask_wtf.file import FileField, FileAllowed\n\n# Imports for users\n\nfrom flask_login import current_user\n\n####################################################\n# IMPORTS (LOCAL) ##################################\n####################################################\n\nfrom blog.models import User\n\n####################################################\n# LOGIN FORM SETUP #################################\n####################################################\n\nclass LoginForm(FlaskForm):\n email = StringField('Email', validators=[DataRequired(), Email()])\n password = PasswordField('Password', validators=[DataRequired()])\n submit = SubmitField('Log In')\n\n####################################################\n# REGISTER FORM SETUP ##############################\n####################################################\n\nclass Register(FlaskForm):\n email = StringField('Email', validators=[DataRequired(), Email()])\n username = StringField('Username', validators=[DataRequired()])\n password = PasswordField('Password', validators=[DataRequired()])\n pass_confirm = PasswordField('<PASSWORD> Password', validators=[DataRequired(), EqualTo('password', 'Both the Password Fields Must Match')])\n submit = SubmitField('Register')\n\n def check_email(self, field):\n if User.query.filter_by(email=field.data).first():\n raise ValidationError('The email entered has already been registered')\n\n####################################################\n# UPDATE FORM SETUP ################################\n####################################################\n\nclass UpdateUserForm(FlaskForm):\n username = StringField('Username', validators=[DataRequired()])\n submit = SubmitField('Update Username')\n\n####################################################\n# FORGOT PASSWORD FORM SETUP #######################\n####################################################\n\nclass ForgotPasswordForm(FlaskForm):\n email = StringField('Email', validators=[DataRequired(), Email()])\n submit = SubmitField('Get Reset Mail')\n\n####################################################\n# CHANGE PASSWORD FORM SETUP #######################\n####################################################\n\nclass ChangePasswordForm(FlaskForm):\n password = PasswordField('Password', validators=[DataRequired()])\n pass_confirm = PasswordField('Confirm Password', validators=[DataRequired(), EqualTo('password', 'Both the Password Fields Must Match')])\n submit = SubmitField('Set Password')\n\n####################################################\n# UPDATE PASSWORD FORM SETUP #######################\n####################################################\n\nclass UpdatePasswordForm(FlaskForm):\n curr_pass = PasswordField('Current Password', validators=[DataRequired()])\n password = PasswordField('<PASSWORD>', validators=[DataRequired()])\n pass_confirm = PasswordField('Confirm Password', validators=[DataRequired(), EqualTo('password', 'Both the Password Fields Must Match')])\n submit = SubmitField('Set Password')\n", "id": "4539993", "language": "Python", "matching_score": 3.4630892276763916, "max_stars_count": 3, "path": "blog/users/forms.py" }, { "content": "####################################################\n# IMPORTS (FROM LIBRARY) ###########################\n####################################################\n\nfrom flask_wtf import FlaskForm\nfrom wtforms import StringField, SubmitField, TextAreaField, SelectField\nfrom wtforms.validators import DataRequired\n\n####################################################\n# BLOG POST FORM SETUP #############################\n####################################################\n\nclass BlogPostForm(FlaskForm):\n title = StringField(\"Title\", validators=[DataRequired()])\n category = event_selector = SelectField(\"Category\", choices=[\n ('Art', 'Art'), \n ('Education', 'Education'), \n ('Finances', 'Finances'), \n ('Fitness', 'Fitness'), \n ('Food', 'Food'), \n ('Gaming', 'Gaming'), \n ('Health', 'Health'), \n ('Lifestyle', 'Lifestyle'), \n ('Litrature', 'Litrature'), \n ('Movie', 'Movie'), \n ('Music', 'Music'), \n ('News', 'News'), \n ('Parenting', 'Parenting'), \n ('Personal', 'Personal'), \n ('Politics', 'Politics'), \n ('Religious', 'Religious'), \n ('Self-Development', 'Self-Development'), \n ('Sports', 'Sports'), \n ('Technology', 'Technology'), \n ('Travel', 'Travel'),\n ('Other', 'Other')\n ])\n text = TextAreaField(\"Text\", validators=[DataRequired()])\n submit = SubmitField(\"Post\")\n\n####################################################\n# COMMENTS FORM SETUP ##############################\n####################################################\n\nclass CommentForm(FlaskForm):\n text = TextAreaField(\"Comment\", validators=[DataRequired()])\n submit = SubmitField(\"Post\")", "id": "2257308", "language": "Python", "matching_score": 4.430147171020508, "max_stars_count": 3, "path": "blog/post/forms.py" }, { "content": "####################################################\n# IMPORTS (FROM LIBRARY) ###########################\n####################################################\n\nfrom flask_wtf import FlaskForm\nfrom wtforms import StringField, SubmitField\nfrom wtforms.validators import DataRequired\n\n####################################################\n# SEARCH FORM SETUP ################################\n####################################################\n\nclass Search_Form(FlaskForm):\n param = StringField('Search for User/Blog', validators=[DataRequired()])\n submit = SubmitField('Search')", "id": "10075615", "language": "Python", "matching_score": 0.13678398728370667, "max_stars_count": 3, "path": "blog/core/forms.py" }, { "content": "\"\"\"\nProblem:\n\nImplement an efficient string matching algorithm.\n\nThat is, given a string of length N and a pattern of length k, write a program that\nsearches for the pattern in the string with less than O(N * k) worst-case time\ncomplexity.\n\nIf the pattern is found, return the start index of its location. If not, return False.\n\"\"\"\n\nfrom typing import List, Union\n\n\ndef kmp_search(text: str, pattern: str) -> Union[int, bool]:\n # modified kmp search to return the first match only\n len_pattern = len(pattern)\n len_text = len(text)\n lps = compute_lps(pattern, len_pattern)\n\n j = 0\n i = 0\n while i < len_text:\n if pattern[j] == text[i]:\n i += 1\n j += 1\n if j == len_pattern:\n return i - j\n elif i < len_text and pattern[j] != text[i]:\n if j != 0:\n j = lps[j - 1]\n else:\n i += 1\n return False\n\n\ndef compute_lps(pattern: str, len_pattern: int) -> List[int]:\n # computing the Longest Prefix which is also a Suffix\n lps = [0 for _ in range(len_pattern)]\n length = 0\n i = 1\n while i < (len_pattern):\n if pattern[i] == pattern[length]:\n length += 1\n lps[i] = length\n else:\n lps[i] = length\n i += 1\n return lps\n\n\nif __name__ == \"__main__\":\n print(kmp_search(\"abcabcabcd\", \"abcd\"))\n print(kmp_search(\"abcabcabc\", \"abcd\"))\n\n\n\"\"\"\nSPECS:\n\n[n = length of text, m = length of pattern]\nTIME COMPLEXITY: O(n + m)\nSPACE COMPLEXITY: O(m)\n\"\"\"\n", "id": "12184350", "language": "Python", "matching_score": 2.0256221294403076, "max_stars_count": 70, "path": "Solutions/276.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string and a pattern, find the starting indices of all occurrences of the\npattern in the string. For example, given the string \"abracadabra\" and the pattern\n\"abr\", you should return [0, 7].\n\"\"\"\n\nfrom typing import List\n\n\ndef kmp_search(string: str, pattern: str) -> List[int]:\n pattern_length = len(pattern)\n string_length = len(string)\n lps = [0] * pattern_length\n result = []\n compute_lps(pattern, pattern_length, lps)\n\n j = 0\n i = 0\n while i < string_length:\n if pattern[j] == string[i]:\n i += 1\n j += 1\n # entire pattern match\n if j == pattern_length:\n result.append(i - j)\n j = lps[j - 1]\n # mismatch after j positions\n elif i < string_length and pattern[j] != string[i]:\n if j != 0:\n j = lps[j - 1]\n else:\n i += 1\n return result\n\n\ndef compute_lps(pattern: str, pattern_length: int, lps: List[int]) -> None:\n length = 0\n lps[0]\n i = 1\n while i < pattern_length:\n # match occours\n if pattern[i] == pattern[length]:\n length += 1\n lps[i] = length\n i += 1\n continue\n if length != 0:\n length = lps[length - 1]\n else:\n lps[i] = 0\n i += 1\n\n\nif __name__ == \"__main__\":\n print(kmp_search(\"abracadabra\", \"abr\"))\n print(kmp_search(\"abracadabra\", \"xyz\"))\n print(kmp_search(\"aaaa\", \"aa\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(string_length + pattern_length)\nSPACE COMPLEXITY: O(pattern_length)\n\"\"\"\n", "id": "8808360", "language": "Python", "matching_score": 0.6928812861442566, "max_stars_count": 70, "path": "Solutions/211.py" }, { "content": "\"\"\"\nProblem:\n\nAn sorted array of integers was rotated an unknown number of times.\n\nGiven such an array, find the index of the element in the array in faster than linear\ntime. If the element doesn't exist in the array, return null.\n\nFor example, given the array [13, 18, 25, 2, 8, 10] and the element 8, return 4\n(the index of 8 in the array).\n\nYou can assume all the integers in the array are unique.\n\"\"\"\n\nfrom typing import List\n\n\ndef find_pivot_helper(arr: List[int], start: int, end: int, length: int) -> int:\n if end < start:\n return 0\n\n mid = (start + end) // 2\n if mid > 0 and arr[mid - 1] > arr[mid]:\n return mid\n elif mid < length - 1 and arr[mid + 1] < arr[mid]:\n return mid + 1\n elif arr[start] < arr[mid]:\n return find_pivot_helper(arr, mid + 1, end, length)\n elif arr[end] > arr[mid]:\n return find_pivot_helper(arr, start, mid - 1, length)\n\n\ndef binary_search(arr: List[int], low: int, high: int, element: int) -> int:\n if high < low:\n return -1\n\n mid = (low + high) // 2\n if element == arr[mid]:\n return mid\n if element > arr[mid]:\n return binary_search(arr, (mid + 1), high, element)\n else:\n return binary_search(arr, low, (mid - 1), element)\n\n\ndef pivoted_binary_search(arr: List[int], element: int) -> int:\n length = len(arr)\n pivot = find_pivot_helper(arr, 0, length - 1, length)\n\n if arr[pivot] == element:\n return pivot\n if arr[pivot] < element:\n element_position = binary_search(arr, pivot + 1, length - 1, element)\n if element_position != -1:\n return element_position\n element_position = binary_search(arr, 0, pivot - 1, element)\n if element_position != -1:\n return element_position\n return -1\n return -1\n\n\nif __name__ == \"__main__\":\n arr = [13, 18, 25, 2, 8, 10]\n\n print(pivoted_binary_search(arr, 8))\n print(pivoted_binary_search(arr, 10))\n print(pivoted_binary_search(arr, 15))\n print(pivoted_binary_search(arr, 2))\n\n arr = [25, 2, 8, 10, 13, 18]\n\n print(pivoted_binary_search(arr, 8))\n\n arr = [8, 10, 13, 18, 25, 2]\n\n print(pivoted_binary_search(arr, 8))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(log(n))\nSPACE COMPLEXITY: O(log(n)) [recursion depth]\n\"\"\"\n", "id": "10921873", "language": "Python", "matching_score": 2.546210527420044, "max_stars_count": 70, "path": "Solutions/058.py" }, { "content": "\"\"\"\nProblem:\n\nSuppose an array sorted in ascending order is rotated at some pivot unknown to you\nbeforehand. Find the minimum element in O(log N) time. You may assume the array does\nnot contain duplicates.\n\nFor example, given [5, 7, 10, 3, 4], return 3.\n\"\"\"\n\nfrom typing import List\n\n\ndef find_pivot_helper(arr: List[int], low: int, high: int) -> int:\n if low == high:\n return high\n\n mid = (high + low) // 2\n if mid < high and arr[mid] > arr[mid + 1]:\n return mid\n elif mid > low and arr[mid] < arr[mid - 1]:\n return mid - 1\n elif arr[mid] > arr[high]:\n return find_pivot_helper(arr, mid + 1, high)\n return find_pivot_helper(arr, low, mid - 1)\n\n\ndef find_pivot(arr: List[int]) -> int:\n length = len(arr)\n # the pivot returns the last index of the rotated array\n pivot = find_pivot_helper(arr, 0, length)\n return (pivot + 1) % length\n\n\nif __name__ == \"__main__\":\n print(find_pivot([5, 7, 10, 3, 4]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(log(n))\nSPACE COMPLEXITY: O(log(n)) [python doesn't support tail recursion optimization]\n\"\"\"\n", "id": "12394417", "language": "Python", "matching_score": 1.0910024642944336, "max_stars_count": 70, "path": "Solutions/203.py" }, { "content": "\"\"\"\nProblem:\n\nDescribe an algorithm to compute the longest increasing subsequence of an array of\nnumbers in O(n log n) time.\n\"\"\"\n\nfrom typing import List\n\n\ndef get_ceil_index(arr: List[int], l: int, r: int, key: int) -> int:\n while r - l > 1:\n m = l + (r - l) // 2\n if arr[m] >= key:\n r = m\n else:\n l = m\n return r\n\n\ndef get_longest_increasing_subsequence(arr: List[int]) -> int:\n length = len(arr)\n tail_table = [0 for i in range(length)]\n tail_table[0] = arr[0]\n result_length = 1\n\n for i in range(1, length):\n if arr[i] < tail_table[0]:\n # new smallest value\n tail_table[0] = arr[i]\n elif arr[i] > tail_table[result_length - 1]:\n # current element is a part of a increasing subsequence\n tail_table[result_length] = arr[i]\n result_length += 1\n else:\n # current element is the last candidate of an existing subsequence and will\n # replace ceil value in tail_table\n tail_table[get_ceil_index(tail_table, -1, result_length - 1, arr[i])] = arr[\n i\n ]\n return result_length\n\n\nif __name__ == \"__main__\":\n print(get_longest_increasing_subsequence([1, 2, 3, 4, 5]))\n print(get_longest_increasing_subsequence([1, 2, 3, 5, 4]))\n print(get_longest_increasing_subsequence([1, 4, 1, 2, 3]))\n print(get_longest_increasing_subsequence([5, 4, 3, 2, 1]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x log(n))\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "8438749", "language": "Python", "matching_score": 1.8719595670700073, "max_stars_count": 70, "path": "Solutions/364.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of numbers, find the length of the longest increasing subsequence in\nthe array. The subsequence does not necessarily have to be contiguous.\n\nFor example, given the array [0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 11, 7, 15],\nthe longest increasing subsequence has length 6: it is 0, 2, 6, 9, 11, 15.\n\"\"\"\n\nfrom typing import List\n\n\ndef longest_increasing_subsequence(arr: List[int]) -> int:\n length = len(arr)\n arr_dp = [1 for i in range(length)]\n for i in range(1, length):\n for j in range(i):\n if arr[i] > arr[j]:\n # increasing subsequence\n arr_dp[i] = max(arr_dp[i], arr_dp[j] + 1)\n return max(arr_dp)\n\n\nif __name__ == \"__main__\":\n print(\n longest_increasing_subsequence(\n [0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 11, 7, 15]\n )\n )\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "4933185", "language": "Python", "matching_score": 0.18183065950870514, "max_stars_count": 70, "path": "Solutions/075.py" }, { "content": "\"\"\"\nProblem:\n\nA girl is walking along an apple orchard with a bag in each hand. She likes to pick\napples from each tree as she goes along, but is meticulous about not putting different\nkinds of apples in the same bag.\n\nGiven an input describing the types of apples she will pass on her path, in order,\ndetermine the length of the longest portion of her path that consists of just two types\nof apple trees.\n\nFor example, given the input [2, 1, 2, 3, 3, 1, 3, 5], the longest portion will involve\ntypes 1 and 3, with a length of four.\n\"\"\"\n\nfrom typing import List\n\n\ndef get_longest_path_length(apples: List[int]) -> int:\n curr_apples = {}\n max_path = 0\n start = 0\n curr_apples[apples[start]] = 1\n length = len(apples)\n # moving the pointer to the position where the apple is not the same as the 1st\n # apple in the array\n for i in range(1, length):\n if apples[i] in curr_apples:\n curr_apples[apples[i]] += 1\n else:\n mismatch = i\n break\n else:\n # only 1 type of apple present in the input\n return length\n curr_apples[apples[mismatch]] = 1\n # updating max_path to find the result\n for i in range(mismatch + 1, length):\n curr_apple = apples[i]\n if curr_apple not in curr_apples:\n max_path = max(max_path, i - start)\n while len(curr_apples) > 1:\n curr_apples[apples[start]] -= 1\n if not curr_apples[apples[start]]:\n del curr_apples[apples[start]]\n start += 1\n curr_apples[curr_apple] = 1\n else:\n curr_apples[curr_apple] += 1\n max_path = max(max_path, length - start)\n return max_path\n\n\nif __name__ == \"__main__\":\n print(get_longest_path_length([2, 1, 2, 3, 3, 1, 3, 5]))\n print(get_longest_path_length([2, 1, 2, 2, 2, 1, 2, 1]))\n print(get_longest_path_length([1, 2, 3, 4]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "2672407", "language": "Python", "matching_score": 1.4784786701202393, "max_stars_count": 70, "path": "Solutions/298.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a tree where each edge has a weight, compute the length of the longest path in\nthe tree.\n\nFor example, given the following tree:\n\n a\n /|\\\n b c d\n / \\\n e f\n / \\\n g h\nand the weights: a-b: 3, a-c: 5, a-d: 8, d-e: 2, d-f: 4, e-g: 1, e-h: 1, the longest\npath would be c -> a -> d -> f, with a length of 17.\n\nThe path does not have to pass through the root, and each node can have any amount of\nchildren.\n\"\"\"\n\nfrom __future__ import annotations\n\n\nclass Node:\n def __init__(self, val: str) -> None:\n self.val = val\n self.max_path = 0\n self.child_dists = {}\n\n def add_child(self, child: str, wt: int) -> None:\n self.child_dists[child] = wt\n\n def get_max_path(self, tree: Tree) -> int:\n if not self.child_dists:\n return 0\n # generating the max path length\n path_lengths = []\n children_max_path_lengths = []\n for node, dist in self.child_dists.items():\n path_lengths.append(tree.tree[node].max_path + dist)\n children_max_path_lengths.append(tree.tree[node].get_max_path(tree))\n return max(sum(sorted(path_lengths)[-2:]), max(children_max_path_lengths))\n\n def update_max_paths(self, tree: Tree) -> None:\n if not self.child_dists:\n self.max_path = 0\n return\n # generating the paths from the root\n root_paths = []\n for child, dist in self.child_dists.items():\n tree.tree[child].update_max_paths(tree)\n root_paths.append(tree.tree[child].max_path + dist)\n self.max_path = max(root_paths)\n\n\nclass Tree:\n def __init__(self) -> None:\n self.tree = {}\n self.root = None\n\n def add_node(self, val: str) -> None:\n self.tree[val] = Node(val)\n if not self.root:\n self.root = val\n\n def add_child(self, parent: str, child: str, wt: int) -> None:\n if parent not in self.tree:\n raise ValueError(\"Parent Node not present in the tree\")\n self.tree[parent].add_child(child, wt)\n self.tree[child] = Node(child)\n\n def get_longest_path(self) -> int:\n if not self.root:\n return 0\n self.tree[self.root].update_max_paths(self)\n return self.tree[self.root].get_max_path(self)\n\n\nif __name__ == \"__main__\":\n tree = Tree()\n\n tree.add_node(\"a\")\n tree.add_child(\"a\", \"b\", 3)\n tree.add_child(\"a\", \"c\", 5)\n tree.add_child(\"a\", \"d\", 8)\n tree.add_child(\"d\", \"e\", 2)\n tree.add_child(\"d\", \"f\", 4)\n tree.add_child(\"e\", \"g\", 1)\n tree.add_child(\"e\", \"h\", 1)\n\n print(tree.get_longest_path())\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "8106061", "language": "Python", "matching_score": 0.6671854853630066, "max_stars_count": 70, "path": "Solutions/160.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a binary tree, return the level of the tree with minimum sum.\n\"\"\"\n\nfrom sys import maxsize\n\nfrom DataStructures.Queue import Queue\nfrom DataStructures.Tree import BinaryTree, Node\n\n\ndef get_level_min_sum(tree: BinaryTree) -> int:\n if not tree.root:\n return 0\n # the levels are delimited in the queue by None\n queue = Queue()\n queue.enqueue(tree.root)\n queue.enqueue(None)\n\n min_level_sum = maxsize\n curr_level_sum = 0\n while not queue.is_empty():\n node = queue.dequeue()\n if node is not None:\n if node.left:\n queue.enqueue(node.left)\n if node.right:\n queue.enqueue(node.right)\n curr_level_sum += node.val\n else:\n min_level_sum = min(curr_level_sum, min_level_sum)\n if len(queue) > 0:\n queue.enqueue(None)\n curr_level_sum = 0\n return min_level_sum\n\n\nif __name__ == \"__main__\":\n a = Node(100)\n b = Node(200)\n c = Node(300)\n d = Node(400)\n e = Node(500)\n f = Node(600)\n g = Node(700)\n h = Node(800)\n\n a.left = b\n a.right = c\n\n b.left = d\n b.right = e\n\n c.left = f\n c.right = g\n\n d.right = h\n\n tree = BinaryTree()\n tree.root = a\n\n print(tree)\n print(get_level_min_sum(tree))\n a.val = 1000\n print(tree)\n print(get_level_min_sum(tree))\n b.val = 1500\n print(tree)\n print(get_level_min_sum(tree))\n h.val = 2000\n print(tree)\n print(get_level_min_sum(tree))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "3068088", "language": "Python", "matching_score": 3.116309881210327, "max_stars_count": 70, "path": "Solutions/117.py" }, { "content": "\"\"\"\nProblem:\n\nPrint the nodes in a binary tree level-wise. For example, the following should print\n1, 2, 3, 4, 5.\n\n 1\n / \\\n2 3\n / \\\n 4 5\n\"\"\"\n\nfrom typing import List\n\nfrom DataStructures.Queue import Queue\nfrom DataStructures.Tree import BinaryTree, Node\n\n\ndef get_lvl_wise_nodes(tree: BinaryTree) -> List[Node]:\n # using bfs to generate the list of nodes by level\n if not tree.root:\n return []\n\n queue = Queue()\n queue.enqueue(tree.root)\n ans = []\n while not queue.is_empty():\n node = queue.dequeue()\n if node.left is not None:\n queue.enqueue(node.left)\n if node.right is not None:\n queue.enqueue(node.right)\n ans.append(node.val)\n return ans\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n tree.root = Node(1)\n\n tree.root.left = Node(2)\n tree.root.right = Node(3)\n\n tree.root.right.left = Node(4)\n tree.root.right.right = Node(5)\n\n print(f\"Tree: {tree}\")\n print(f\"Level wise result: {get_lvl_wise_nodes(tree)}\")\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "7710780", "language": "Python", "matching_score": 1.742037296295166, "max_stars_count": 70, "path": "Solutions/107.py" }, { "content": "\"\"\"\nProblem:\n\nGiven pre-order and in-order traversals of a binary tree, write a function to reconstruct the tree.\n\nFor example, given the following preorder traversal:\n\n[a, b, d, e, c, f, g]\nAnd the following inorder traversal:\n\n[d, b, e, a, f, c, g]\nYou should return the following tree:\n\n a\n / \\\n b c\n / \\ / \\\nd e f g\n\"\"\"\n\nfrom typing import List\n\nfrom DataStructures.Tree import BinaryTree, Node\n\n\ndef generate_tree(preorder: List[int], inorder: List[int]) -> BinaryTree:\n length = len(preorder)\n if length != len(inorder):\n raise RuntimeError\n if length == 0:\n return BinaryTree()\n # generating the root\n root = preorder[0]\n tree = BinaryTree()\n tree.root = Node(root)\n # generating the rest of the tree\n if length > 1:\n i = inorder.index(root)\n # partitioning the nodes as per the branch\n inorder_left, preorder_left = (inorder[:i], preorder[1 : i + 1])\n inorder_right, preorder_right = (inorder[i + 1 :], preorder[i + 1 :])\n # creating a tree for each branch\n tree_left = generate_tree(preorder_left, inorder_left)\n tree_right = generate_tree(preorder_right, inorder_right)\n # attaching the sub-tree to their respective branch\n tree.root.left = tree_left.root\n tree.root.right = tree_right.root\n return tree\n\n\nif __name__ == \"__main__\":\n test1 = generate_tree(\n [\"a\", \"b\", \"d\", \"e\", \"c\", \"f\", \"g\"], [\"d\", \"b\", \"e\", \"a\", \"f\", \"c\", \"g\"]\n )\n print(test1)\n\n test2 = generate_tree(\n [\"a\", \"b\", \"d\", \"e\", \"c\", \"f\"], [\"d\", \"b\", \"e\", \"a\", \"f\", \"c\"]\n )\n print(test2)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "9744563", "language": "Python", "matching_score": 2.949021339416504, "max_stars_count": 70, "path": "Solutions/048.py" }, { "content": "\"\"\"\nProblem:\n\nTypically, an implementation of in-order traversal of a binary tree has O(h) space\ncomplexity, where h is the height of the tree. Write a program to compute the in-order\ntraversal of a binary tree using O(1) space.\n\"\"\"\n\nfrom typing import Generator\n\nfrom DataStructures.Tree import BinaryTree, Node\n\n\ndef morris_traversal(tree: BinaryTree) -> Generator[int, None, None]:\n current = tree.root\n\n while current is not None:\n if current.left is None:\n yield current.val\n current = current.right\n continue\n # Find the inorder predecessor of current\n pre = current.left\n while pre.right is not None and pre.right is not current:\n pre = pre.right\n if pre.right is None:\n # Make current as right child of its inorder predecessor\n pre.right = current\n current = current.left\n else:\n # Revert the changes made in the 'if' part to restore the\n # original tree. (Fix the right child of predecessor)\n pre.right = None\n yield current.val\n current = current.right\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n\n tree.root = Node(1)\n\n tree.root.left = Node(2)\n tree.root.right = Node(3)\n\n tree.root.left.left = Node(4)\n tree.root.left.right = Node(5)\n\n tree.root.right.right = Node(6)\n\n print(tree)\n for node in morris_traversal(tree):\n print(node, end=\" \")\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "3387739", "language": "Python", "matching_score": 1.9548630714416504, "max_stars_count": 70, "path": "Solutions/223.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a binary tree, determine whether or not it is height-balanced. A height-balanced\nbinary tree can be defined as one in which the heights of the two subtrees of any node\nnever differ by more than one.\n\"\"\"\n\nfrom typing import Tuple\n\nfrom DataStructures.Tree import Node, BinaryTree\n\n\ndef height_helper(node: Node) -> Tuple[int, bool]:\n if node.left is None:\n left_height, balance_left = 0, True\n else:\n left_height, balance_left = height_helper(node.left)\n if node.right is None:\n right_height, balance_right = 0, True\n else:\n right_height, balance_right = height_helper(node.right)\n\n balance = balance_left and balance_right\n current_balance = -1 <= (right_height - left_height) <= 1\n height = max(left_height, right_height) + 1\n return height, balance and current_balance\n\n\ndef check_balance(tree: BinaryTree) -> bool:\n if tree.root is None:\n return True\n _, balance = height_helper(tree.root)\n return balance\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n\n tree.root = Node(0)\n\n tree.root.left = Node(1)\n tree.root.right = Node(2)\n\n tree.root.left.left = Node(3)\n tree.root.left.right = Node(4)\n\n print(check_balance(tree))\n\n tree.root.left.right.left = Node(5)\n\n print(check_balance(tree))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "7943560", "language": "Python", "matching_score": 2.7607839107513428, "max_stars_count": 70, "path": "Solutions/247.py" }, { "content": "\"\"\"\nProblem:\n\nGiven the root of a binary tree, return a deepest node. For example, in the following\ntree, return d.\n\n a\n / \\\n b c\n /\nd\n\"\"\"\n\nfrom typing import Optional, Tuple\n\nfrom DataStructures.Tree import BinaryTree, Node\n\n\ndef deepest_node_helper(node: Node) -> Tuple[int, Optional[Node]]:\n if node is None:\n return 0, None\n if not (node.left and node.right):\n return 1, node\n # getting the deepest node of the left-subtree\n left_height, left_node = 0, None\n if node.left:\n left_height, left_node = deepest_node_helper(node.left)\n # getting the deepest node of the right-subtree\n right_height, right_node = 0, None\n if node.right:\n right_height, right_node = deepest_node_helper(node.right)\n # comparing and returning the deepest node\n if left_height > right_height:\n return left_height + 1, left_node\n return right_height + 1, right_node\n\n\ndef deepest_node(tree: BinaryTree) -> Node:\n _, node = deepest_node_helper(tree.root)\n return node\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n tree.root = Node(\"a\")\n\n tree.root.left = Node(\"b\")\n tree.root.right = Node(\"c\")\n\n tree.root.left.left = Node(\"d\")\n\n print(deepest_node(tree))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(log(n)) [recursion depth]\n\"\"\"\n", "id": "10644631", "language": "Python", "matching_score": 0.05566331371665001, "max_stars_count": 70, "path": "Solutions/080.py" }, { "content": "\"\"\"\nProblem:\n\nAssume you have access to a function toss_biased() which returns 0 or 1 with a\nprobability that's not 50-50 (but also not 0-100 or 100-0). You do not know the bias of\nthe coin.\n\nWrite a function to simulate an unbiased coin toss.\n\"\"\"\n\nfrom random import random\nimport matplotlib.pyplot as plt\n\n\ndef toss_biased():\n # toss with 30-70 bias\n value = random()\n if value < 0.3:\n return 0\n return 1\n\n\ndef toss_unbiased():\n # getting the biased toss value twice\n toss1 = toss_biased()\n toss2 = toss_biased()\n # as long as we dont get different values, we keep tossing\n while toss1 == toss2:\n toss1 = toss_biased()\n toss2 = toss_biased()\n return toss1\n\n\nif __name__ == \"__main__\":\n biased = [toss_biased() for i in range(100_000)]\n unbiased = [toss_unbiased() for i in range(100_000)]\n\n # displaying biased distribution\n plt.title(\"Biased Distribution\")\n plt.hist(biased, bins=2, edgecolor=\"black\")\n plt.show()\n\n # displaying unbiased distribution\n plt.title(\"Unbiased Distribution\")\n plt.hist(unbiased, bins=2, edgecolor=\"black\")\n plt.show()\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1)\nSPACE COMPLEXITY: O(1)\n[for toss_unbiased function]\n\"\"\"\n", "id": "6693881", "language": "Python", "matching_score": 1.4703413248062134, "max_stars_count": 70, "path": "Solutions/066.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a stream of elements too large to store in memory, pick a random element from the\nstream with uniform probability.\n\"\"\"\n\nfrom random import randint\nimport matplotlib.pyplot as plt\nfrom typing import Generator\n\n\ndef element_stream() -> Generator[int, None, None]:\n # generator function to simulate a stream of elements too large to store in memory\n while True:\n yield randint(1, 10_000)\n\n\ndef random_selector(generator: Generator[int, None, None]) -> int:\n # getting 10 elements from the stream of elements\n arr = [next(generator) for i in range(10)]\n # selecting a random element from the array of 10 elements\n pos = randint(0, 9)\n return arr[pos]\n\n\nif __name__ == \"__main__\":\n generator = element_stream()\n # storing the selected elements for plotting a graph\n values = []\n for i in range(100_000):\n values.append(random_selector(generator))\n # plotting the histogram of frequencies of the selected elements (not stated in\n # problem, added to display the uniform distribution)\n plt.hist(values, edgecolor=\"black\")\n plt.show()\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "4322872", "language": "Python", "matching_score": 1.7796313762664795, "max_stars_count": 70, "path": "Solutions/015.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given n numbers as well as n probabilities that sum up to 1. Write a function\nto generate one of the numbers with its corresponding probability.\n\nFor example, given the numbers [1, 2, 3, 4] and probabilities [0.1, 0.5, 0.2, 0.2],\nyour function should return 1 10% of the time, 2 50% of the time, and 3 and 4 20% of\nthe time.\n\nYou can generate random numbers between 0 and 1 uniformly.\n\"\"\"\n\nimport matplotlib.pyplot as plt\nfrom random import random\nfrom typing import List\n\n\nclass RandomGenerator:\n def __init__(self, numbers: List[int], probabilities: List[float]) -> None:\n self.numbers = numbers\n self.probabilities = probabilities\n\n def generate(self) -> int:\n check = random()\n cumulative = 0\n for pos in range(len(self.probabilities)):\n cumulative += self.probabilities[pos]\n if cumulative >= check:\n return self.numbers[pos]\n\n\nif __name__ == \"__main__\":\n generator = RandomGenerator([1, 2, 3, 4], [0.1, 0.5, 0.2, 0.2])\n nums = []\n for _ in range(1, 100_000):\n nums.append(generator.generate())\n plt.hist(nums)\n plt.show()\n", "id": "10794634", "language": "Python", "matching_score": 1.439236044883728, "max_stars_count": 70, "path": "Solutions/152.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an integer n and a list of integers l, write a function that randomly generates a\nnumber from 0 to n-1 that isn't in l (uniform).\n\"\"\"\n\nimport matplotlib.pyplot as plt\nfrom random import randint\nfrom typing import List\n\n\ndef generate_num_not_in_data(n: int, data: List[int]) -> int:\n num = randint(0, n - 1)\n if num in data:\n return generate_num_not_in_data(n, data)\n return num\n\n\nif __name__ == \"__main__\":\n data = [1, 3, 5]\n results = {}\n for i in range(100_000):\n val = generate_num_not_in_data(7, data)\n if val in results:\n results[val] += 1\n else:\n results[val] = 1\n\n x, y = [], []\n for i in results:\n x.append(i)\n y.append(results[i])\n plt.bar(x=x, height=y, edgecolor=\"black\")\n plt.show()\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "2143672", "language": "Python", "matching_score": 1.2731465101242065, "max_stars_count": 70, "path": "Solutions/090.py" }, { "content": "\"\"\"\nProblem:\n\nUsing a function rand7() that returns an integer from 1 to 7 (inclusive) with uniform\nprobability, implement a function rand5() that returns an integer from 1 to 5\n(inclusive).\n\"\"\"\n\nfrom random import randint\nimport matplotlib.pyplot as plt\n\n\n# rand7 implementation\ndef rand7() -> int:\n return randint(1, 7)\n\n\ndef rand5() -> int:\n val = rand7()\n if val <= 5:\n return val\n return rand5()\n\n\nif __name__ == \"__main__\":\n values = []\n for i in range(100_000):\n values.append(rand5())\n plt.hist(values, bins=5, edgecolor=\"black\")\n plt.show()\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "12077707", "language": "Python", "matching_score": 2.6137726306915283, "max_stars_count": 70, "path": "Solutions/071.py" }, { "content": "\"\"\"\nProblem:\n\nUsing a function rand5() that returns an integer from 1 to 5 (inclusive) with uniform\nprobability, implement a function rand7() that returns an integer from 1 to 7\n(inclusive).\n\"\"\"\n\nfrom random import randint\nimport matplotlib.pyplot as plt\n\n# rand5 implementation\ndef rand5() -> int:\n return randint(1, 5)\n\n\ndef rand7() -> int:\n # generating 2 numbers between 1 and 5\n temp1 = rand5()\n temp2 = rand5()\n # generating a number temp between 1 and 25\n temp = 5 * temp1 + temp2 - 5\n # if the number is NOT in the range[1, 21], rand7 is called again\n # mod 7 over 1 to 21, yield all numbers from 0 to 6 with EQUAL probability\n if temp <= 21:\n return (temp % 7) + 1\n return rand7()\n\n\nif __name__ == \"__main__\":\n results = []\n # executing rand7 100,000 times (for plotting on a graph)\n for _ in range(100_000):\n results.append(rand7())\n # plotting the distribution\n plt.hist(results, 7, edgecolor=\"black\")\n plt.show()\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "6326721", "language": "Python", "matching_score": 0.12819020450115204, "max_stars_count": 70, "path": "Solutions/045.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a list of numbers L, implement a method sum(i, j) which returns the sum from the\nsublist L[i:j] (including i, excluding j).\n\nFor example, given L = [1, 2, 3, 4, 5], sum(1, 3) should return sum([2, 3]), which is 5.\n\nYou can assume that you can do some pre-processing. sum() should be optimized over the\npre-processing step.\n\"\"\"\n\nfrom typing import List\n\n\nclass SubarraySumOptimizer:\n def __init__(self, arr: List[int]) -> None:\n # runs in O(n) time, O(n) space\n self.preprocessed_arr = [0 for _ in range(len(arr) + 1)]\n for i in range(len(arr)):\n self.preprocessed_arr[i + 1] = self.preprocessed_arr[i] + arr[i]\n\n def sum(self, start: int, end: int) -> int:\n # runs in O(1) time, O(1) space\n # NOTE: the sum is supposed to return the sum in the range [start, end)\n if (start < 0) or (end > len(self.preprocessed_arr) - 1) or (start > end):\n return 0\n return self.preprocessed_arr[end] - self.preprocessed_arr[start]\n\n\nif __name__ == \"__main__\":\n sso = SubarraySumOptimizer([1, 2, 3, 4, 5])\n\n print(sso.sum(1, 3))\n print(sso.sum(0, 5))\n print(sso.sum(0, 4))\n print(sso.sum(3, 4))\n", "id": "10327288", "language": "Python", "matching_score": 1.1053657531738281, "max_stars_count": 70, "path": "Solutions/149.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a list of integers, write a function that returns the largest sum of non-adjacent\nnumbers. Numbers can be 0 or negative.\n\nFor example, [2, 4, 6, 8] should return 12, since we pick 4 and 8. [5, 1, 1, 5] should\nreturn 10, since we pick 5 and 5.\n\"\"\"\n\nfrom typing import List\n\n\ndef max_nonadjacent_sum(arr: List[int]) -> int:\n including = 0\n excluding = 0\n for elem in arr:\n # updating maximum sum including and excluding the current element\n including, excluding = max(excluding + elem, elem), max(excluding, including)\n return max(including, excluding)\n\n\nif __name__ == \"__main__\":\n print(max_nonadjacent_sum([2, 4, 6, 8]))\n print(max_nonadjacent_sum([5, 1, 1, 5]))\n print(max_nonadjacent_sum([-5, 1, 1, -5]))\n print(max_nonadjacent_sum([5, 5, 10, 100, 10, 5]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "4929803", "language": "Python", "matching_score": 1.8360135555267334, "max_stars_count": 70, "path": "Solutions/009.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a list of integers, return the largest product that can be made by multiplying\nany three integers.\n\nFor example, if the list is [-10, -10, 5, 2], we should return 500, since that's\n-10 * -10 * 5.\n\nYou can assume the list has at least three integers.\n\"\"\"\n\nfrom typing import List\n\n\ndef largest_product_of_3(arr: List[int]) -> int:\n # tracking the smallest 2 and the largest 3 numbers and generating the largest\n # product using them\n negative_1, negative_2 = 0, 0\n positive_1, positive_2, positive_3 = 0, 0, 0\n for elem in arr:\n if elem < negative_1:\n negative_2 = negative_1\n negative_1 = elem\n elif elem < negative_2:\n negative_2 = elem\n elif elem > positive_1:\n positive_3 = positive_2\n positive_2 = positive_1\n positive_1 = elem\n elif elem > positive_2:\n positive_3 = positive_2\n positive_2 = elem\n elif elem > positive_3:\n positive_3 = elem\n return max(\n positive_1 * negative_1 * negative_2, positive_1 * positive_2 * positive_3\n )\n\n\nif __name__ == \"__main__\":\n print(largest_product_of_3([-10, -10, 5, 2]))\n print(largest_product_of_3([10, -10, 5, 2]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "380274", "language": "Python", "matching_score": 0.8730992674827576, "max_stars_count": 70, "path": "Solutions/069.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a list of numbers, create an algorithm that arranges them in order to form the\nlargest possible integer. For example, given [10, 7, 76, 415], you should return\n77641510.\n\"\"\"\n\nfrom __future__ import annotations\nfrom typing import List\n\n\nclass CustomInt:\n def __init__(self, value: int) -> None:\n self.value = str(value)\n\n def __lt__(self, other: CustomInt) -> bool:\n if self.value == other.value:\n return False\n for c1, c2 in zip(self.value, other.value):\n if c1 > c2:\n return False\n elif c1 < c2:\n return True\n if len(self.value) > len(other.value):\n return True\n return False\n\n\ndef get_largest(arr: List[int]) -> int:\n arr = list(map(CustomInt, arr))\n arr.sort(reverse=True)\n return int(\"\".join(map(lambda x: x.value, arr)))\n\n\nif __name__ == \"__main__\":\n print(get_largest([10, 7, 76, 415]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x log(n))\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "2687693", "language": "Python", "matching_score": 1.3946799039840698, "max_stars_count": 70, "path": "Solutions/228.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of integers out of order, determine the bounds of the smallest window\nthat must be sorted in order for the entire array to be sorted. For example, given\n[3, 7, 5, 6, 9], you should return (1, 3).\n\"\"\"\n\n\nfrom typing import List, Tuple\n\n\ndef get_sort_range(arr: List[int]) -> Tuple[int, int]:\n arr_sorted = sorted(arr)\n if arr_sorted == arr:\n return -1, -1\n # getting the start and end of the unsorted part of the array\n start, end = 0, 0\n for i in range(len(arr)):\n if arr[i] != arr_sorted[i]:\n start = i\n break\n for i in range(start, len(arr)):\n if arr[i] != arr_sorted[i]:\n end = i\n return start, end\n\n\nif __name__ == \"__main__\":\n print(get_sort_range([3, 5, 6, 7, 9]))\n print(get_sort_range([3, 7, 5, 6, 9]))\n print(get_sort_range([5, 4, 3, 2, 1]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x log(n))\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "4511530", "language": "Python", "matching_score": 1.1470816135406494, "max_stars_count": 70, "path": "Solutions/257.py" }, { "content": "\"\"\"\nProblem:\n\nWrite a function that returns the bitwise AND of all integers between M and N,\ninclusive.\n\"\"\"\n\n\ndef bitwise_and_on_range(start: int, end: int) -> int:\n # using naive approach\n result = start\n for num in range(start + 1, end + 1):\n result = result & num\n return result\n\n\nif __name__ == \"__main__\":\n print(bitwise_and_on_range(3, 4))\n print(bitwise_and_on_range(5, 6))\n print(bitwise_and_on_range(126, 127))\n print(bitwise_and_on_range(127, 215))\n print(bitwise_and_on_range(129, 215))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "12657492", "language": "Python", "matching_score": 1.256650686264038, "max_stars_count": 70, "path": "Solutions/317.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a positive integer n, return the n-th perfect number.\n\nFor example, given 1, you should return 19. Given 2, you should return 28.\n\"\"\"\n\n\ndef calc_sum_of_digits(num: int) -> int:\n s = 0\n for digit in str(num):\n s += int(digit)\n return s\n\n\ndef get_nth_perfect_num_naive(n: int) -> int:\n num = 19\n count = 1\n while n > count:\n num += 1\n if calc_sum_of_digits(num) == 10:\n count += 1\n return num\n\n\nif __name__ == \"__main__\":\n print(get_nth_perfect_num_naive(1))\n print(get_nth_perfect_num_naive(2))\n print(get_nth_perfect_num_naive(10))\n", "id": "11918845", "language": "Python", "matching_score": 0.9044620990753174, "max_stars_count": 70, "path": "Solutions/070.py" }, { "content": "\"\"\"\nProblem:\n\nLet's define a \"sevenish\" number to be one which is either a power of 7, or the sum of\nunique powers of 7. The first few sevenish numbers are 1, 7, 8, 49, and so on. Create\nan algorithm to find the nth sevenish number.\n\"\"\"\n\n\ndef get_nth_sevenish_num(number: int) -> int:\n curr = 1\n curr_iteration = 1\n while curr < number:\n curr_iteration += 1\n curr += curr_iteration\n\n curr -= curr_iteration\n result = 7 ** (curr_iteration - 1)\n curr_to_add = 1\n\n for _ in range(number - curr - 1):\n result += curr_to_add\n curr_to_add *= 7\n return result\n\n\nif __name__ == \"__main__\":\n print(get_nth_sevenish_num(1)) # 1 = 7 ^ 0\n print(get_nth_sevenish_num(2)) # 7 = 7 ^ 1\n print(get_nth_sevenish_num(3)) # 8 = 7 ^ 0 + 7 ^ 1\n print(get_nth_sevenish_num(4)) # 49 = 7 ^ 2\n print(get_nth_sevenish_num(5)) # 50 = 7 ^ 0 + 7 ^ 2\n print(get_nth_sevenish_num(6)) # 57 = 7 ^ 0 + 7 ^ 1 + 7 ^ 2\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(log(n))\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "9969779", "language": "Python", "matching_score": 1.1496552228927612, "max_stars_count": 70, "path": "Solutions/221.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a 32-bit positive integer N, determine whether it is a power of four in faster\nthan O(log N) time.\n\"\"\"\n\n# for details visit: https://stackoverflow.com/a/19611541/8650340\n\n\ndef is_power_of_4(num: int) -> bool:\n return ((num & -num) & 0x55555554) == num\n\n\nif __name__ == \"__main__\":\n print(is_power_of_4(2))\n print(is_power_of_4(4)) # 4 ^ 1\n print(is_power_of_4(8))\n print(is_power_of_4(16)) # 4 ^ 2\n print(is_power_of_4(32))\n print(is_power_of_4(64)) # 4 ^ 3\n print(is_power_of_4(128))\n print(is_power_of_4(256)) # 4 ^ 4\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "6041006", "language": "Python", "matching_score": 1.1350798606872559, "max_stars_count": 70, "path": "Solutions/268.py" }, { "content": "\"\"\"\nProblem:\n\nImplement integer exponentiation. That is, implement the pow(x, y) function, where x\nand y are integers and returns x^y.\n\nDo this faster than the naive method of repeated multiplication.\n\nFor example, pow(2, 10) should return 1024.\n\"\"\"\n\n\ndef pow(base: int, power: int) -> int:\n if power == 0:\n return 1\n\n if power % 2 != 0:\n return pow((base * base), power // 2) * base\n return pow((base * base), power // 2)\n\n\nif __name__ == \"__main__\":\n print(pow(2, 10))\n print(pow(3, 4))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(log(n))\nSPACE COMPLEXITY: O(log(n)) [recursion depth]\n\"\"\"\n", "id": "12011602", "language": "Python", "matching_score": 0.9850090742111206, "max_stars_count": 70, "path": "Solutions/061.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given a binary tree in a peculiar string representation. Each node is written\nin the form (lr), where l corresponds to the left child and r corresponds to the right\nchild.\n\nIf either l or r is null, it will be represented as a zero. Otherwise, it will be\nrepresented by a new (lr) pair.\n\nHere are a few examples:\n\nA root node with no children: (00)\nA root node with two children: ((00)(00))\nAn unbalanced tree with three consecutive left children: ((((00)0)0)0)\nGiven this representation, determine the depth of the tree.\n\"\"\"\n\n\ndef get_depth(tree_representation: str) -> int:\n depth, max_depth = 0, 0\n for char in tree_representation:\n if char == \"(\":\n # entering a node (depth addition)\n depth += 1\n elif char == \")\":\n # exiting a node (depth subtraction)\n depth -= 1\n max_depth = max(max_depth, depth)\n return max_depth\n\n\nif __name__ == \"__main__\":\n print(get_depth(\"(00)\"))\n print(get_depth(\"((00)(00))\"))\n print(get_depth(\"((((00)0)0)0)\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "967014", "language": "Python", "matching_score": 1.9154950380325317, "max_stars_count": 70, "path": "Solutions/357.py" }, { "content": "\"\"\"\nProblem:\n\nThe horizontal distance of a binary tree node describes how far left or right the node\nwill be when the tree is printed out.\n\nMore rigorously, we can define it as follows:\n\nThe horizontal distance of the root is 0.\nThe horizontal distance of a left child is hd(parent) - 1.\nThe horizontal distance of a right child is hd(parent) + 1.\nFor example, for the following tree, hd(1) = -2, and hd(6) = 0.\n\n 5\n / \\\n 3 7\n / \\ / \\\n 1 4 6 9\n / /\n 0 8\nThe bottom view of a tree, then, consists of the lowest node at each horizontal\ndistance. If there are two nodes at the same depth and horizontal distance, either is\nacceptable.\n\nFor this tree, for example, the bottom view could be [0, 1, 3, 6, 8, 9].\n\nGiven the root to a binary tree, return its bottom view.\n\"\"\"\n\nfrom typing import Dict, List, Tuple\n\nfrom DataStructures.Tree import Node, BinaryTree\n\n\ndef get_bottom_view_helper(\n node: Node, depth: int, hd: int, accumulator: Dict[int, Tuple[int, int]]\n) -> Dict[int, Tuple[int, int]]:\n if hd not in accumulator:\n accumulator[hd] = (depth, node.val)\n elif accumulator[hd][0] <= depth:\n accumulator[hd] = (depth, node.val)\n\n if node.left:\n get_bottom_view_helper(node.left, depth + 1, hd - 1, accumulator)\n if node.right:\n get_bottom_view_helper(node.right, depth + 1, hd + 1, accumulator)\n return accumulator\n\n\ndef get_bottom_view(tree: BinaryTree) -> List[int]:\n data = get_bottom_view_helper(tree.root, 0, 0, {})\n res_arr = [(hd, data[hd][1]) for hd in data]\n res_arr.sort(key=lambda elem: elem[0])\n return [elem for _, elem in res_arr]\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n tree.root = Node(5)\n\n tree.root.left = Node(3)\n tree.root.right = Node(7)\n\n tree.root.left.left = Node(1)\n tree.root.left.right = Node(4)\n\n tree.root.right.left = Node(6)\n tree.root.right.right = Node(9)\n\n tree.root.left.left.left = Node(0)\n\n tree.root.right.right.left = Node(8)\n\n print(tree)\n print(get_bottom_view(tree))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "7360413", "language": "Python", "matching_score": 2.1027464866638184, "max_stars_count": 70, "path": "Solutions/215.py" }, { "content": "\"\"\"\nProblem:\n\nTwo nodes in a binary tree can be called cousins if they are on the same level of the\ntree but have different parents. For example, in the following diagram 4 and 6 are\ncousins.\n\n 1\n / \\\n 2 3\n / \\ \\\n4 5 6\nGiven a binary tree and a particular node, find all cousins of that node.\n\"\"\"\n\nfrom typing import List, Optional\n\nfrom DataStructures.Tree import BinaryTree, Node\n\n\ndef get_depth_dfs_helper(\n node: Node, search_node_val: int, depth: int, parent_val: Optional[int] = None\n) -> Optional[int]:\n if node.val == search_node_val:\n return depth, parent_val\n if node.left:\n left_depth, parent = get_depth_dfs_helper(\n node.left, search_node_val, depth + 1, node\n )\n if left_depth:\n return left_depth, parent\n if node.right:\n right_depth, parent = get_depth_dfs_helper(\n node.right, search_node_val, depth + 1, node\n )\n if right_depth:\n return right_depth, parent\n return None, None\n\n\ndef get_node_by_depth(\n node: Node,\n curr_depth: int,\n depth: int,\n search_node_val: int,\n accumulator: int,\n ignore_parent_val: int,\n parent_val: Optional[int] = None,\n) -> None:\n # getting all nodes where the depth is equal to the input depth (except the node\n # with black-listed parent [\"ignore_parent_val\"])\n if parent_val == ignore_parent_val:\n return\n if node.val == search_node_val:\n return\n if curr_depth == depth:\n accumulator.append(node.val)\n return\n if node.left:\n get_node_by_depth(\n node.left,\n curr_depth + 1,\n depth,\n search_node_val,\n accumulator,\n ignore_parent_val,\n node,\n )\n if node.right:\n get_node_by_depth(\n node.right,\n curr_depth + 1,\n depth,\n search_node_val,\n accumulator,\n ignore_parent_val,\n node,\n )\n\n\ndef dfs_get_depth(tree: BinaryTree, search_node_val: int):\n return get_depth_dfs_helper(tree.root, search_node_val, 0)\n\n\ndef get_cousins(tree: BinaryTree, node_val: int) -> List[int]:\n depth, parent = dfs_get_depth(tree, node_val)\n if depth is None:\n raise ValueError(\"Node not present in Tree\")\n cousins = []\n get_node_by_depth(tree.root, 0, depth, node_val, cousins, parent)\n return cousins\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n\n tree.root = Node(1)\n\n tree.root.left = Node(2)\n tree.root.right = Node(3)\n\n tree.root.left.left = Node(4)\n tree.root.left.right = Node(5)\n\n tree.root.right.right = Node(6)\n\n print(tree)\n print(get_cousins(tree, 4))\n\n tree.root.right.left = Node(7)\n\n print(tree)\n print(get_cousins(tree, 4))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "12773058", "language": "Python", "matching_score": 2.692246437072754, "max_stars_count": 70, "path": "Solutions/284.py" }, { "content": "\"\"\"\nProblem:\n\nIn Ancient Greece, it was common to write text with the first line going left to right,\nthe second line going right to left, and continuing to go back and forth. This style\nwas called \"boustrophedon\".\n\nGiven a binary tree, write an algorithm to print the nodes in boustrophedon order.\n\nFor example, given the following tree:\n\n 1\n / \\\n 2 3\n / \\ / \\\n4 5 6 7\n\nYou should return [1, 3, 2, 4, 5, 6, 7].\n\"\"\"\n\nfrom typing import Dict, List\nfrom DataStructures.Tree import Node, BinaryTree\n\n\ndef get_boustrophedon_helper(\n node: Node, level: int, accumulator: Dict[int, List[int]]\n) -> None:\n # using dfs to store a list of values by level\n if level not in accumulator:\n accumulator[level] = []\n accumulator[level].append(node.val)\n if node.left:\n get_boustrophedon_helper(node.left, level + 1, accumulator)\n if node.right:\n get_boustrophedon_helper(node.right, level + 1, accumulator)\n\n\ndef get_boustrophedon(tree: BinaryTree) -> List[int]:\n if not tree.root:\n return []\n # generating the nodes by level\n level_data = {}\n get_boustrophedon_helper(tree.root, 1, level_data)\n result = []\n # adding the even levels in reverse order in the result\n for level in sorted(list(level_data.keys())):\n if level % 2 == 0:\n result.extend(reversed(level_data[level]))\n else:\n result.extend(level_data[level])\n return result\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n tree.root = Node(1)\n\n tree.root.left = Node(2)\n tree.root.right = Node(3)\n\n tree.root.left.left = Node(4)\n tree.root.left.right = Node(5)\n\n tree.root.right.left = Node(6)\n tree.root.right.right = Node(7)\n\n print(get_boustrophedon(tree))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "4237257", "language": "Python", "matching_score": 1.9486463069915771, "max_stars_count": 70, "path": "Solutions/258.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a complete binary tree, count the number of nodes in faster than O(n) time.\nRecall that a complete binary tree has every level filled except the last, and the\nnodes in the last level are filled starting from the left.\n\"\"\"\n\nfrom typing import Optional\n\nfrom DataStructures.Tree import BinaryTree, Node\n\n\ndef get_num_node_complete_bin_tree_helper(\n node: Node,\n left_route_levels: Optional[int] = None,\n right_route_levels: Optional[int] = None,\n) -> int:\n # generating the route levels incase it is not passed to the function\n if left_route_levels is None:\n curr_node = node\n while curr_node:\n curr_node = curr_node.left\n left_route_levels += 1\n if right_route_levels is None:\n curr_node = node\n while curr_node:\n curr_node = curr_node.right\n right_route_levels += 1\n # checking if the binary tree is completely filled\n if left_route_levels == right_route_levels:\n return pow(2, left_route_levels) - 1\n # getting the number of nodes in the sub-trees\n left_route_nodes, right_route_nodes = 0, 0\n if node.left:\n left_route_nodes = get_num_node_complete_bin_tree_helper(\n node.left, left_route_levels - 1, 0\n )\n if node.right:\n right_route_nodes = get_num_node_complete_bin_tree_helper(\n node.right, 0, right_route_levels - 1\n )\n return left_route_nodes + right_route_nodes + 1\n\n\ndef get_num_node_complete_bin_tree(tree: BinaryTree) -> int:\n if not tree.root:\n return 0\n return get_num_node_complete_bin_tree_helper(tree.root)\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n\n tree.root = Node(1)\n\n tree.root.left = Node(2)\n tree.root.right = Node(3)\n\n tree.root.left.left = Node(4)\n tree.root.left.right = Node(5)\n\n tree.root.right.left = Node(6)\n\n print(get_num_node_complete_bin_tree(tree))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(log(n))\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "2926883", "language": "Python", "matching_score": 2.1630916595458984, "max_stars_count": 70, "path": "Solutions/204.py" }, { "content": "\"\"\"\nProbelm:\n\nRecall that a full binary tree is one in which each node is either a leaf node, or has\ntwo children. Given a binary tree, convert it to a full one by removing nodes with only\none child.\n\nFor example, given the following tree:\n\n a\n / \\\n b c\n / \\\nd e\n \\ / \\\n f g h\nYou should convert it to:\n\n a\n / \\\nf e\n / \\\n g h\n\"\"\"\n\nfrom DataStructures.Tree import Node, BinaryTree\n\n\ndef create_full_bin_tree_helper(node: Node) -> None:\n # if a node with one missing child is encountered, the value is replaced by its\n # child and the children of the current node overwritten with the child's children\n if node.right is None and node.left is None:\n return\n elif node.left is not None and node.right is None:\n node.val = node.left.val\n node.right = node.left.right\n node.left = node.left.left\n create_full_bin_tree_helper(node)\n elif node.left is None and node.right is not None:\n node.val = node.right.val\n node.left = node.right.left\n node.right = node.right.right\n create_full_bin_tree_helper(node)\n elif node.left is not None and node.right is not None:\n create_full_bin_tree_helper(node.left)\n create_full_bin_tree_helper(node.right)\n\n\ndef create_full_bin_tree(tree: BinaryTree) -> None:\n if tree.root:\n create_full_bin_tree_helper(tree.root)\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n tree.root = Node(\"a\")\n\n tree.root.left = Node(\"b\")\n tree.root.right = Node(\"c\")\n\n tree.root.left.left = Node(\"d\")\n tree.root.left.left.right = Node(\"f\")\n\n tree.root.right.right = Node(\"e\")\n\n tree.root.right.right.left = Node(\"g\")\n tree.root.right.right.right = Node(\"h\")\n\n print(tree)\n\n create_full_bin_tree(tree)\n\n print(tree)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "3696866", "language": "Python", "matching_score": 1.7291035652160645, "max_stars_count": 70, "path": "Solutions/254.py" }, { "content": "\"\"\"\nProblem:\n\nA unival tree (which stands for \"universal value\") is a tree where all nodes under it\nhave the same value.\n\nGiven the root to a binary tree, count the number of unival subtrees.\n\nFor example, the following tree has 5 unival subtrees:\n\n 0\n / \\\n 1 0\n / \\\n 1 0\n / \\\n 1 1\n\"\"\"\n\nfrom typing import Tuple\n\nfrom DataStructures.Tree import Node, BinaryTree\n\n\ndef num_universal_helper(node: Node, val: int, acc: int = 0) -> Tuple[int, bool]:\n # base case for recursion [leaf node]\n if node.left is None and node.right is None:\n if node.val == val:\n return (acc + 1), True\n return (acc + 1), False\n # if the value matches the parent's value, its children are also checked\n elif node.val == val:\n if node.left:\n acc, res1 = num_universal_helper(node.left, val, acc)\n else:\n res1 = True\n if node.right:\n acc, res2 = num_universal_helper(node.right, val, acc)\n else:\n res2 = True\n if res1 and res2:\n acc += 1\n # If the value doesn't match the parent's value, its children are checked with the\n # new value (value of the current node)\n else:\n if node.left:\n acc, res1 = num_universal_helper(node.left, node.val, acc)\n else:\n res1 = True\n if node.right:\n acc, res2 = num_universal_helper(node.right, node.val, acc)\n else:\n res2 = True\n if res1 and res2:\n acc += 1\n return acc, (node.val == val)\n\n\ndef num_universal(tree: BinaryTree) -> int:\n if not tree.root:\n return 0\n result, _ = num_universal_helper(tree.root, tree.root.val)\n return result\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n\n tree.root = Node(0)\n tree.root.left = Node(1)\n tree.root.right = Node(0)\n\n tree.root.right.left = Node(1)\n tree.root.right.right = Node(0)\n\n tree.root.right.left.left = Node(1)\n tree.root.right.left.right = Node(1)\n\n print(tree)\n\n print(num_universal(tree))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(log(n)) [call stack]\n\"\"\"\n", "id": "2421856", "language": "Python", "matching_score": 2.515432596206665, "max_stars_count": 70, "path": "Solutions/008.py" }, { "content": "\"\"\"\nProblem:\n\nInvert a binary tree.\n\nFor example, given the following tree:\n\n a\n / \\\n b c\n / \\ /\nd e f\nshould become:\n\n a\n / \\\n c b\n \\ / \\\n f e d\n\"\"\"\n\nfrom DataStructures.Tree import BinaryTree, Node\n\n\ndef invert_helper(node: Node) -> None:\n node.right, node.left = node.left, node.right\n # recursively inverting the children\n if node.right is not None:\n invert_helper(node.right)\n if node.left is not None:\n invert_helper(node.left)\n\n\ndef invert(tree: BinaryTree) -> None:\n # inverts the tree in place\n if not tree.root:\n return\n invert_helper(tree.root)\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n tree.root = Node(\"a\")\n\n tree.root.left = Node(\"b\")\n tree.root.right = Node(\"c\")\n\n tree.root.left.left = Node(\"d\")\n tree.root.left.right = Node(\"e\")\n\n tree.root.right.left = Node(\"f\")\n\n print(tree)\n\n invert(tree)\n\n print(tree)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "11473771", "language": "Python", "matching_score": 2.7205216884613037, "max_stars_count": 70, "path": "Solutions/083.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a binary tree where all nodes are either 0 or 1, prune the tree so that subtrees\ncontaining all 0s are removed.\n\nFor example, given the following tree:\n\n 0\n / \\\n 1 0\n / \\\n 1 0\n / \\\n 0 0\nshould be pruned to:\n\n 0\n / \\\n 1 0\n /\n 1\nWe do not remove the tree at the root or its left child because it still has a 1 as a\ndescendant.\n\"\"\"\n\nfrom DataStructures.Tree import Node, BinaryTree\n\n\ndef prune_helper(node: Node) -> None:\n if node.left:\n prune_helper(node.left)\n if node.left.val == 0:\n if not node.left.left and not node.left.right:\n temp = node.left\n node.left = None\n del temp\n if node.right:\n prune_helper(node.right)\n if node.right.val == 0:\n if not node.right.left and not node.right.right:\n temp = node.right\n node.right = None\n del temp\n\n\ndef prune(tree: BinaryTree) -> BinaryTree:\n if tree.root:\n prune_helper(tree.root)\n return tree\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n tree.root = Node(0)\n\n tree.root.left = Node(1)\n tree.root.right = Node(0)\n\n tree.root.right.left = Node(1)\n tree.root.right.right = Node(0)\n\n tree.root.right.left.left = Node(0)\n tree.root.right.left.right = Node(0)\n\n print(tree)\n print(prune(tree))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "3114664", "language": "Python", "matching_score": 1.4174671173095703, "max_stars_count": 70, "path": "Solutions/146.py" }, { "content": "\"\"\"\nProblem:\n\nImplement locking in a binary tree. A binary tree node can be locked or unlocked only\nif all of its descendants or ancestors are not locked.\n\nDesign a binary tree node class with the following methods:\n\nis_locked, which returns whether the node is locked. lock, which attempts to lock the\nnode. If it cannot be locked, then it should return false. Otherwise, it should lock it\nand return true. unlock, which unlocks the node. If it cannot be unlocked, then it\nshould return false. Otherwise, it should unlock it and return true. You may augment\nthe node to add parent pointers or any other property you would like. You may assume\nthe class is used in a single-threaded program, so there is no need for actual locks or\nmutexes. Each method should run in O(h), where h is the height of the tree.\n\"\"\"\n\nfrom DataStructures.Tree import Node, BinaryTree\n\n\nclass NodeWithLock(Node):\n \"\"\"\n Binary Tree Node with locking mechanism\n\n Functions:\n is_locked: check if the current node is locked\n lock: locks the current node\n unlock: unlocks the current node\n _is_any_parent_unlocked: helper function to check if any parent is unlocked\n _is_any_descendant_unlocked: helper function to check if any of the descendant is\n unlocked\n \"\"\"\n\n def __init__(self, val: int) -> None:\n Node.__init__(self, val)\n self.locked = False\n self.parent = None\n\n def __str__(self) -> str:\n curr_node = f\"{self.val}, {'locked' if self.locked else 'unlocked'}\"\n left, right = \"\", \"\"\n if self.left:\n left = f\"{self.left} \"\n if self.right:\n right = f\" {self.right}\"\n return f\"({left} {curr_node} {right})\"\n\n def is_locked(self) -> bool:\n return self.locked\n\n def lock(self) -> bool:\n is_any_parent_unlocked = self._is_any_parent_unlocked()\n is_any_descendant_unlocked = self._is_any_descendant_unlocked()\n if is_any_parent_unlocked or is_any_descendant_unlocked:\n self.locked = True\n return True\n return False\n\n def unlock(self) -> bool:\n is_any_parent_unlocked = self._is_any_parent_unlocked()\n is_any_descendant_unlocked = self._is_any_descendant_unlocked()\n if is_any_parent_unlocked or is_any_descendant_unlocked:\n self.locked = False\n return True\n return False\n\n def _is_any_parent_unlocked(self) -> bool:\n # time complexity: O(log(n))\n node = self\n while node.parent:\n if not node.is_locked():\n return True\n node = node.parent\n return False\n\n def _is_any_descendant_unlocked(self) -> bool:\n # time complexity: O(log(n))\n if not self.is_locked():\n return True\n\n if self.left:\n left = self.left._is_any_descendant_unlocked()\n else:\n left = False\n if self.right:\n right = self.right._is_any_descendant_unlocked()\n else:\n right = False\n return left or right\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n tree.root = NodeWithLock(5)\n\n tree.root.left = NodeWithLock(3)\n tree.root.left.parent = tree.root\n tree.root.right = NodeWithLock(18)\n tree.root.right.parent = tree.root\n\n tree.root.left.left = NodeWithLock(0)\n tree.root.left.left.parent = tree.root.left\n\n print(tree)\n\n print()\n print(tree.root.left.left.lock())\n print(tree.root.left.lock())\n print(tree.root.lock())\n print()\n\n print(tree)\n\n print()\n print(tree.root.left.unlock())\n print()\n\n print(tree)\n\n print()\n print(tree.root.unlock())\n print()\n\n print(tree)\n", "id": "9178866", "language": "Python", "matching_score": 2.9436440467834473, "max_stars_count": 70, "path": "Solutions/024.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a binary tree, find the lowest common ancestor (LCA) of two given nodes in the\ntree. Assume that each node in the tree also has a pointer to its parent.\n\nAccording to the definition of LCA on Wikipedia: \"The lowest common ancestor is defined\nbetween two nodes v and w as the lowest node in T that has both v and w as descendants\n(where we allow a node to be a descendant of itself).\"\n\"\"\"\n\nfrom __future__ import annotations\nfrom typing import Optional\n\nfrom DataStructures.Tree import BinaryTree\n\n\nclass Node:\n def __init__(self, val: int, parent: Optional[Node] = None) -> None:\n self.val = val\n self.left = None\n self.right = None\n self.parent = parent\n\n def __eq__(self, other: Node) -> bool:\n return self is other\n\n def __hash__(self) -> int:\n return hash(self.val)\n\n def __repr__(self) -> str:\n return self.str_representation()\n\n def str_representation(self) -> str:\n if self.right is None and self.left is None:\n return f\"('{self.val}')\"\n elif self.left is not None and self.right is None:\n return f\"({self.left.str_representation()}, '{self.val}', None)\"\n elif self.left is not None and self.right is not None:\n return (\n f\"({self.left.str_representation()},\"\n + f\" '{self.val}', {self.right.str_representation()})\"\n )\n elif self.left is None and self.right is not None:\n return f\"(None, '{self.val}', {self.right.str_representation()})\"\n\n\ndef get_lca(node1: Node, node2: Node) -> Optional[Node]:\n node1_ancestors = set()\n node = node1\n while node:\n node1_ancestors.add(node)\n node = node.parent\n node = node2\n while node:\n if node in node1_ancestors:\n return node\n node = node.parent\n return None\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n\n a = Node(1)\n b = Node(2, parent=a)\n c = Node(3, parent=a)\n d = Node(4, parent=b)\n e = Node(5, parent=b)\n f = Node(6, parent=c)\n g = Node(7, parent=c)\n\n tree.root = a\n\n tree.root.left = b\n tree.root.right = c\n\n tree.root.left.left = d\n tree.root.left.right = e\n\n tree.root.right.left = f\n tree.root.right.right = g\n\n print(tree)\n\n print(get_lca(f, g))\n print(get_lca(a, g))\n print(get_lca(d, g))\n print(get_lca(a, c))\n print(get_lca(e, b))\n print(get_lca(a, Node(8)))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(log(n))\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "277836", "language": "Python", "matching_score": 2.7126615047454834, "max_stars_count": 70, "path": "Solutions/112.py" }, { "content": "from __future__ import annotations\nfrom typing import Any, Optional, Union\n\n\nclass Node:\n \"\"\"\n Node Class for the nodes of a Binary Tree\n\n Functions:\n insert_helper: Helper function to add node in a Binary Search Tree\n height_helper: Helper function to calculate the height of a Binary Tree\n num_nodes_helper: Helper function to calculate the number of Nodes in a Binary Tree\n to_str: Helper function for __repr__\n \"\"\"\n\n def __init__(\n self, val: int, left: Optional[Node] = None, right: Optional[Node] = None\n ) -> None:\n self.val = val\n self.left = left\n self.right = right\n\n def __eq__(self, other: Any) -> bool:\n if type(other) == Node and self.val == other.val:\n return self.left == other.left and self.right == other.right\n return False\n\n def __repr__(self) -> str:\n return self.to_str()\n\n def height_helper(self) -> int:\n # Helper function to calculate the height of a Binary Tree\n # Uses: height = max(left_height, right_height)\n left_height, right_height = 0, 0\n if self.left is not None:\n left_height = self.left.height_helper()\n if self.right is not None:\n right_height = self.right.height_helper()\n return max(left_height, right_height) + 1\n\n def insert_helper(self, val: int) -> None:\n # Helper function to add node in a Binary Search Tree\n # Uses: BST property\n # NOTE: Duplicate nodes are not added\n if self.val > val:\n if self.left is None:\n self.left = Node(val)\n else:\n self.left.insert_helper(val)\n elif self.val < val:\n if self.right is None:\n self.right = Node(val)\n else:\n self.right.insert_helper(val)\n\n def num_nodes_helper(self) -> int:\n # Helper function to calculate the number of Nodes in a Binary Tree\n left, right = 0, 0\n if self.left:\n left = self.left.num_nodes_helper()\n if self.right:\n right = self.right.num_nodes_helper()\n return left + right + 1\n\n def to_str(self) -> str:\n # Helper function for __repr__\n # Returns all the childen in case 1 of them is not None, else returns only the\n # value\n if self.right is None and self.left is None:\n return f\"('{self.val}')\"\n elif self.left is not None and self.right is None:\n return f\"({self.left.to_str()}, '{self.val}', null)\"\n elif self.left is None and self.right is not None:\n return f\"(null, '{self.val}', {self.right.to_str()})\"\n elif self.left is not None and self.right is not None:\n return f\"({self.left.to_str()}, '{self.val}', {self.right.to_str()})\"\n\n\nclass BinaryTree:\n \"\"\"\n Binary Tree Class\n\n Functions:\n find_height: Calculate the height of a Binary Tree (uses height_helper in the Node\n Class)\n\n NOTE: This class does not have the add node function and nodes have to be added\n manually\n \"\"\"\n\n def __init__(self) -> None:\n self.root = None\n\n def __eq__(self, other: Any) -> bool:\n if type(other) == BinaryTree:\n return self.root == other.root\n return False\n\n def __len__(self) -> int:\n if self.root:\n return self.root.num_nodes_helper()\n return 0\n\n def __repr__(self) -> str:\n return str(self.root)\n\n def find_height(self) -> int:\n # Calculate the height of a Binary Tree\n if self.root:\n return self.root.height_helper()\n return 0\n\n\nclass BinarySearchTree(BinaryTree):\n \"\"\"\n Binary Tree Class (INHERITS FROM THE BinaryTree CLASS)\n\n Functions:\n add: Add nodes to a Binary Search Tree (uses insert_helper in the Node Class)\n \"\"\"\n\n def __init__(self) -> None:\n BinaryTree.__init__(self)\n\n def add(self, val: Union[int, str]) -> None:\n # Add nodes to a Binary Search Tree\n if self.root is None:\n self.root = Node(val)\n else:\n self.root.insert_helper(val)\n", "id": "4159426", "language": "Python", "matching_score": 2.4959518909454346, "max_stars_count": 70, "path": "Solutions/DataStructures/Tree.py" }, { "content": "\"\"\"\nProblem:\n\nSuppose an arithmetic expression is given as a binary tree. Each leaf is an integer and\neach internal node is one of '+', '−', '∗', or '/'.\n\nGiven the root to such a tree, write a function to evaluate it.\n\nFor example, given the following tree:\n\n *\n / \\\n + +\n / \\ / \\\n3 2 4 5\nYou should return 45, as it is (3 + 2) * (4 + 5).\n\"\"\"\n\nfrom __future__ import annotations\nfrom typing import Callable, Optional, Union\n\nfrom DataStructures.Tree import BinaryTree, Node\n\n\n# symbol to function map\nOPERATIONS_DICT = {\n \"+\": lambda num1, num2: num1 + num2,\n \"-\": lambda num1, num2: num1 - num2,\n \"*\": lambda num1, num2: num1 * num2,\n \"/\": lambda num1, num2: num1 / num2,\n}\n\n\nclass ExpressionTreeNode(Node):\n def __init__(\n self,\n val: Union[int, float, str, Callable],\n left: Optional[ExpressionTreeNode] = None,\n right: Optional[ExpressionTreeNode] = None,\n ) -> None:\n Node.__init__(self, val, left, right)\n\n def transform_helper(self) -> None:\n if self.val in OPERATIONS_DICT:\n self.val = OPERATIONS_DICT[self.val]\n self.left.transform_helper()\n self.right.transform_helper()\n\n def calculate_helper(self) -> Union[int, float]:\n if callable(self.val):\n return self.val(self.left.calculate_helper(), self.right.calculate_helper())\n return self.val\n\n\ndef calculate_expression_tree(tree: BinaryTree) -> Union[int, float]:\n root = tree.root\n if root:\n root.transform_helper()\n return root.calculate_helper()\n return None\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n\n tree.root = ExpressionTreeNode(\"*\")\n tree.root.left = ExpressionTreeNode(\"+\")\n tree.root.right = ExpressionTreeNode(\"+\")\n\n tree.root.left.left = ExpressionTreeNode(3)\n tree.root.left.right = ExpressionTreeNode(2)\n\n tree.root.right.left = ExpressionTreeNode(4)\n tree.root.right.right = ExpressionTreeNode(5)\n\n print(calculate_expression_tree(tree))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "10664176", "language": "Python", "matching_score": 1.6255677938461304, "max_stars_count": 70, "path": "Solutions/050.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an arithmetic expression in Reverse Polish Notation, write a program to evaluate\nit.\n\nThe expression is given as a list of numbers and operands. For example: [5, 3, '+']\nshould return 5 + 3 = 8.\n\nFor example, [15, 7, 1, 1, '+', '-', '/', 3, '*', 2, 1, 1, '+', '+', '-'] should return\n5, since it is equivalent to ((15 / (7 - (1 + 1))) * 3) - (2 + (1 + 1)) = 5.\n\nYou can assume the given expression is always valid.\n\"\"\"\n\nfrom typing import List, Union\n\nfrom DataStructures.Stack import Stack\n\nFUNCTIONS = {\n \"+\": lambda a, b: a + b,\n \"-\": lambda a, b: a - b,\n \"*\": lambda a, b: a * b,\n \"/\": lambda a, b: a / b,\n}\n\n\ndef calculate(expression_list: List[Union[int, str]]) -> Union[float, int]:\n stack = Stack()\n # calculating the expression\n for expression in expression_list:\n if expression in FUNCTIONS:\n a = stack.pop()\n b = stack.pop()\n stack.push(FUNCTIONS[expression](a, b))\n else:\n stack.push(expression)\n return stack[0]\n\n\nif __name__ == \"__main__\":\n print(calculate([5, 3, \"+\"]))\n print(calculate([15, 7, 1, 1, \"+\", \"-\", \"/\", 3, \"*\", 2, 1, 1, \"+\", \"+\", \"-\"]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "8930311", "language": "Python", "matching_score": 0.8252730965614319, "max_stars_count": 70, "path": "Solutions/163.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string of parentheses, write a function to compute the minimum number of\nparentheses to be removed to make the string valid (i.e. each open parenthesis is\neventually closed).\n\nFor example, given the string \"()())()\", you should return 1. Given the string \")(\",\nyou should return 2, since we must remove all of them.\n\"\"\"\n\nfrom copy import deepcopy\n\nfrom DataStructures.Stack import Stack\n\n\ndef get_min_parentheses_remove(\n expression: str, stack: Stack = Stack(), num_removed: int = 0\n) -> int:\n if not expression and stack.is_empty():\n return num_removed\n elif not expression:\n return len(stack) + num_removed\n if (expression[0] == \")\") and (not stack.is_empty() and stack.peek() == \"(\"):\n stack.pop()\n return get_min_parentheses_remove(expression[1:], stack, num_removed)\n # calulating the modifications for parenthesis added to stack\n stack_copy = deepcopy(stack)\n stack_copy.push(expression[0])\n modifications_parenthesis_added_to_stack = get_min_parentheses_remove(\n expression[1:], stack_copy, num_removed\n )\n # calulating the modifications for parenthesis removed\n modifications_parenthesis_ignored = get_min_parentheses_remove(\n expression[1:], stack, num_removed + 1\n )\n return min(\n modifications_parenthesis_added_to_stack, modifications_parenthesis_ignored\n )\n\n\nif __name__ == \"__main__\":\n print(get_min_parentheses_remove(\"()())()\"))\n print(get_min_parentheses_remove(\")(\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "5450527", "language": "Python", "matching_score": 2.140443801879883, "max_stars_count": 70, "path": "Solutions/086.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string of parentheses, find the balanced string that can be produced from it\nusing the minimum number of insertions and deletions. If there are multiple solutions,\nreturn any of them.\n\nFor example, given \"(()\", you could return \"(())\". Given \"))()(\", you could return\n\"()()()()\".\n\"\"\"\n\nfrom copy import deepcopy\nfrom typing import Tuple\n\nfrom DataStructures.Stack import Stack\n\n\ndef get_min_changes_helper(\n string: str, modifications: int, stack: Stack, current: str\n) -> Tuple[int, str]:\n if not string and stack.is_empty():\n return modifications, current\n elif not string:\n additions = len(stack)\n return modifications + additions, current + (\")\" * additions)\n\n if string[0] == \"(\":\n stack_added = deepcopy(stack)\n stack_added.push(\"(\")\n modifications1, string1 = get_min_changes_helper(\n string[1:], modifications, stack_added, current + \"(\"\n ) # adding to stack\n modifications2, string2 = get_min_changes_helper(\n string[1:], modifications + 1, stack, current\n ) # removing from string\n return min(\n [(modifications1, string1), (modifications2, string2)],\n key=lambda tup: tup[0],\n )\n\n if not stack.is_empty():\n stack.pop()\n return get_min_changes_helper(string[1:], modifications, stack, current + \")\")\n return get_min_changes_helper(string[1:], modifications + 1, stack, current)\n\n\ndef get_min_changes(string: str) -> str:\n _, res = get_min_changes_helper(string, 0, Stack(), \"\")\n return res\n\n\nif __name__ == \"__main__\":\n print(get_min_changes(\"(()\"))\n print(get_min_changes(\"))()(\"))\n print(get_min_changes(\"()(()\"))\n print(get_min_changes(\"()(()))\"))\n print(get_min_changes(\")(())\"))\n print(get_min_changes(\"())(\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(2 ^ n)\n\"\"\"\n", "id": "4529391", "language": "Python", "matching_score": 1.8414781093597412, "max_stars_count": 70, "path": "Solutions/199.py" }, { "content": "\"\"\"\nProblem:\n\nYou're given a string consisting solely of (, ), and *. * can represent either a (, ),\nor an empty string. Determine whether the parentheses are balanced.\n\nFor example, (()* and (*) are balanced. )*( is not balanced.\n\"\"\"\n\nfrom copy import deepcopy\n\nfrom DataStructures.Stack import Stack\n\n\ndef can_balance_parentheses(string: str, stack: Stack = Stack()) -> bool:\n if not string and stack.is_empty():\n return True\n elif not string:\n return False\n # checking if the parentheses can be balanced\n if string[0] == \"(\":\n stack.push(\"(\")\n return can_balance_parentheses(string[1:], stack)\n elif string[0] == \")\":\n if not stack.is_empty() and stack.peek() == \"(\":\n stack.pop()\n return can_balance_parentheses(string[1:], stack)\n return False\n elif string[0] == \"*\":\n return (\n can_balance_parentheses(\"(\" + string[1:], deepcopy(stack))\n or can_balance_parentheses(\")\" + string[1:], deepcopy(stack))\n or can_balance_parentheses(string[1:], deepcopy(stack))\n )\n\n\nif __name__ == \"__main__\":\n print(can_balance_parentheses(\"(()*\", Stack()))\n print(can_balance_parentheses(\"(*)\", Stack()))\n print(can_balance_parentheses(\")*(\", Stack()))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(3 ^ n)\nSPACE COMPLEXITY: O(3 ^ n)\n\"\"\"\n", "id": "10666214", "language": "Python", "matching_score": 1.416266679763794, "max_stars_count": 70, "path": "Solutions/142.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string of round, curly, and square open and closing brackets, return whether\nthe brackets are balanced (well-formed).\n\nFor example, given the string \"([])\", you should return true.\n\nGiven the string \"([)]\" or \"((()\", you should return false.\n\"\"\"\n\nfrom typing import Dict\n\nfrom DataStructures.Stack import Stack\n\n\ndef is_parenthesis_balanced(\n string: str, parenthesis_map: Dict[str, str] = {\"{\": \"}\", \"[\": \"]\", \"(\": \")\"}\n) -> bool:\n open_parenthesis_set = set(parenthesis_map.keys())\n stack = Stack()\n # iterating through the string and checking if its balanced\n for char in string:\n if char in open_parenthesis_set:\n stack.push(char)\n elif not stack.is_empty() and parenthesis_map[stack.peek()] == char:\n stack.pop()\n else:\n return False\n # the string is balanced only if the stack is empty (equal number of opening and\n # closing parenthesis)\n return stack.is_empty()\n\n\nif __name__ == \"__main__\":\n print(is_parenthesis_balanced(\"([])\"))\n print(is_parenthesis_balanced(\"((([{}])))\"))\n print(is_parenthesis_balanced(\"([])[]({})\"))\n print(is_parenthesis_balanced(\"([)]\"))\n print(is_parenthesis_balanced(\"((()\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "8297281", "language": "Python", "matching_score": 1.129941701889038, "max_stars_count": 70, "path": "Solutions/027.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string, determine whether any permutation of it is a palindrome.\n\nFor example, carrace should return true, since it can be rearranged to form racecar,\nwhich is a palindrome. daily should return false, since there's no rearrangement that\ncan form a palindrome.\n\"\"\"\n\n\ndef is_permutation_palindrome(string: str) -> str:\n char_set = set()\n\n for char in string:\n if char in char_set:\n char_set.remove(char)\n else:\n char_set.add(char)\n length = len(char_set)\n if length in (1, 0):\n return True\n return False\n\n\nif __name__ == \"__main__\":\n print(is_permutation_palindrome(\"carrace\"))\n print(is_permutation_palindrome(\"daily\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "1386257", "language": "Python", "matching_score": 1.014175534248352, "max_stars_count": 70, "path": "Solutions/157.py" }, { "content": "\"\"\"\nProblem:\n\nA permutation can be specified by an array P, where P[i] represents the location of the\nelement at i in the permutation. For example, [2, 1, 0] represents the permutation\nwhere elements at the index 0 and 2 are swapped.\n\nGiven an array and a permutation, apply the permutation to the array. For example,\ngiven the array [\"a\", \"b\", \"c\"] and the permutation [2, 1, 0], return [\"c\", \"b\", \"a\"].\n\"\"\"\n\nfrom typing import List\n\n\ndef permute(arr: List[str], p: List[int]) -> List[str]:\n for i in range(len(p)):\n p[i] = arr[p[i]]\n return p\n\n\nif __name__ == \"__main__\":\n print(permute([\"a\", \"b\", \"c\"], [2, 1, 0]))\n print(permute([\"a\", \"b\", \"c\", \"d\"], [3, 0, 1, 2]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "7892768", "language": "Python", "matching_score": 0.7644032835960388, "max_stars_count": 70, "path": "Solutions/206.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string s and a list of words words, where each word is the same length, find\nall starting indices of substrings in s that is a concatenation of every word in words\nexactly once.\n\nFor example, given s = \"dogcatcatcodecatdog\" and words = [\"cat\", \"dog\"], return [0, 13],\nsince \"dogcat\" starts at index 0 and \"catdog\" starts at index 13.\n\nGiven s = \"barfoobazbitbyte\" and words = [\"dog\", \"cat\"], return [] since there are no\nsubstrings composed of \"dog\" and \"cat\" in s.\n\nThe order of the indices does not matter.\n\"\"\"\n\nfrom itertools import permutations as generate_permutations\nfrom re import finditer\nfrom typing import Iterable, List\n\n\ndef concat_iteratable(iterateable: Iterable[str]) -> str:\n concated_value = \"\"\n for elem in iterateable:\n concated_value += elem\n return concated_value\n\n\ndef get_permutation_match_indices(s: str, words: List[str]) -> List[int]:\n permutations = [\n concat_iteratable(permutation)\n for permutation in list(generate_permutations(words))\n ]\n indices = []\n for permutation in permutations:\n indices.extend([match.start() for match in finditer(permutation, s)])\n return indices\n\n\nif __name__ == \"__main__\":\n print(get_permutation_match_indices(\"barfoobazbitbyte\", [\"dog\", \"cat\"]))\n print(get_permutation_match_indices(\"dogcatcatcodecatdog\", [\"cat\", \"dog\"]))\n print(get_permutation_match_indices(\"dogcatcatcodecatdogcat\", [\"cat\", \"dog\"]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ m + 2 ^ n)\nSPACE COMPLEXITY: O(n)\n[n = number of characters in input string\n m = number of match words]\n\"\"\"\n", "id": "10858933", "language": "Python", "matching_score": 1.0858850479125977, "max_stars_count": 70, "path": "Solutions/172.py" }, { "content": "\"\"\"\nProblem:\n\nFind an efficient algorithm to find the smallest distance (measured in number of words)\nbetween any two given words in a string.\n\nFor example, given words \"hello\", and \"world\" and a text content of \"dog cat hello cat\ndog dog hello cat world\", return 1 because there's only one word \"cat\" in between the\ntwo words.\n\"\"\"\n\n\ndef calculate_distance(text: str, word1: str, word2: str) -> int:\n word_list = text.split()\n length = len(word_list)\n distance, position, last_match = None, None, None\n # searching for the smallest distance\n for i in range(length):\n if word_list[i] in (word1, word2):\n if last_match in (word_list[i], None):\n last_match = word_list[i]\n position = i\n continue\n current_distance = i - position - 1\n last_match = word_list[i]\n position = i\n if distance == None:\n distance = current_distance\n else:\n distance = min(distance, current_distance)\n return distance\n\n\nif __name__ == \"__main__\":\n print(\n calculate_distance(\n \"dog cat hello cat dog dog hello cat world\", \"hello\", \"world\"\n )\n )\n print(\n calculate_distance(\"dog cat hello cat dog dog hello cat world\", \"world\", \"dog\")\n )\n print(calculate_distance(\"hello world\", \"hello\", \"world\"))\n print(calculate_distance(\"hello\", \"hello\", \"world\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "3445034", "language": "Python", "matching_score": 1.2769570350646973, "max_stars_count": 70, "path": "Solutions/153.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a list of words, determine whether the words can be chained to form a circle. A\nword X can be placed in front of another word Y in a circle if the last character of X\nis same as the first character of Y.\n\nFor example, the words ['chair', 'height', 'racket', 'touch', 'tunic'] can form the\nfollowing circle: chair -> racket -> touch -> height -> tunic -> chair.\n\"\"\"\n\n\nfrom typing import Dict, List, Set\n\n\ndef check_circle_formation_helper(\n word_list: List[str],\n start: Dict[str, Set[str]],\n end: Dict[str, Set[str]],\n curr: str,\n start_word: str,\n seen: Set[str],\n) -> bool:\n if len(seen) == len(word_list):\n if start_word[0] == curr[-1]:\n return True\n return False\n try:\n for word in start[curr[-1]]:\n if word not in seen:\n seen_copy = seen.copy()\n seen_copy.add(word)\n if check_circle_formation_helper(\n word_list, start, end, word, start_word, seen_copy\n ):\n return True\n except KeyError:\n # the current word's last character isn't present in start dictionary\n pass\n return False\n\n\ndef check_circle_formation(word_list: List[str]) -> bool:\n start = {}\n end = {}\n for word in word_list:\n if word[0] not in start:\n start[word[0]] = set()\n start[word[0]].add(word)\n if word[-1] not in end:\n end[word[-1]] = set()\n end[word[-1]].add(word)\n # starting with all words and checking if a circle can be formed\n for word in word_list:\n if check_circle_formation_helper(\n word_list, start, end, word, word, set([word])\n ):\n return True\n return False\n\n\nif __name__ == \"__main__\":\n print(\n check_circle_formation([\"chair\", \"height\", \"racket\", \"touch\", \"tunic\"])\n ) # chair, racket, touch, height, tunic, chair\n print(\n check_circle_formation([\"height\", \"racket\", \"touch\", \"tunic\", \"car\"])\n ) # racket, touch, height, tunic, car, racket\n print(\n check_circle_formation([\"height\", \"racket\", \"touch\", \"tunic\"])\n ) # racket, touch, height, tunic (no looping even though there is a chain)\n print(\n check_circle_formation([\"height\", \"racket\", \"touch\", \"tunic\", \"cat\"])\n ) # no looping\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "2689613", "language": "Python", "matching_score": 0.8445029854774475, "max_stars_count": 70, "path": "Solutions/246.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given a list of N points (x1, y1), (x2, y2), ..., (xN, yN) representing a\npolygon. You can assume these points are given in order; that is, you can construct the\npolygon by connecting point 1 to point 2, point 2 to point 3, and so on, finally\nlooping around to connect point N to point 1.\n\nDetermine if a new point p lies inside this polygon. (If p is on the boundary of the\npolygon, you should return False).\n\"\"\"\n\nfrom typing import List, Tuple\n\nPoint = Tuple[int, int]\n\n\ndef is_inside(points: List[Point], p: Point) -> bool:\n # Using the following concept:\n # if a stright line in drawn from the point p to its right (till infinity), the\n # drawn line will intersect the lines connecting the points odd number of times\n # (if p is enclosed by the points) else the the number of intersections will be\n # even (implying its outside the figure created by the points)\n\n # Details:\n # https://www.geeksforgeeks.org/how-to-check-if-a-given-point-lies-inside-a-polygon\n\n if len(points) in (0, 1, 2):\n return False\n\n x, y = p\n last = points[0]\n intersections = 0\n same_height = set()\n\n for point in points[1:]:\n x1, y1 = last\n x2, y2 = point\n if min(y1, y2) <= y <= max(y1, y2) and x <= min(x1, x2):\n if y2 == y and point not in same_height:\n intersections += 1\n same_height.add(point)\n elif y1 == y and last not in same_height:\n intersections += 1\n same_height.add(last)\n last = point\n\n point = points[0]\n x1, y1 = last\n x2, y2 = point\n if max(y1, y2) >= y >= min(y1, y2) and x <= min(x1, x2):\n if y2 == y and point not in same_height:\n intersections += 1\n same_height.add(point)\n elif y1 == y and last not in same_height:\n intersections += 1\n same_height.add(last)\n if intersections % 2 == 1:\n return True\n return False\n\n\nif __name__ == \"__main__\":\n print(is_inside([(4, 3), (5, 4), (6, 3), (5, 2)], (3, 3)))\n print(is_inside([(4, 3), (5, 4), (6, 3), (5, 2)], (5, 3)))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "12861594", "language": "Python", "matching_score": 0.5857300162315369, "max_stars_count": 70, "path": "Solutions/236.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given a list of data entries that represent entries and exits of groups of\npeople into a building. An entry looks like this:\n\n{\"timestamp\": 1526579928, \"count\": 3, \"type\": \"enter\"}\n\nThis means 3 people entered the building. An exit looks like this:\n\n{\"timestamp\": 1526580382, \"count\": 2, \"type\": \"exit\"}\n\nThis means that 2 people exited the building. timestamp is in Unix time.\n\nFind the busiest period in the building, that is, the time with the most people in the\nbuilding. Return it as a pair of (start, end) timestamps. You can assume the building\nalways starts off and ends up empty, i.e. with 0 people inside.\n\"\"\"\n\nfrom typing import Dict, List, Tuple, Union\n\n\ndef calculate_busiest_time(\n list_of_events: List[Dict[str, Union[int, str]]]\n) -> Tuple[int, int]:\n list_of_events.sort(key=lambda event: event[\"timestamp\"])\n people_inside_map = {}\n event = list_of_events[0]\n last_pos = event[\"timestamp\"]\n # people can only enter as the building at the begining\n people_inside_map[last_pos] = event[\"count\"]\n # generating people in the building by timestamp\n for event in list_of_events[1:]:\n count = event[\"count\"]\n if event[\"type\"] == \"enter\":\n curr_people_inside = people_inside_map[last_pos] + count\n else:\n curr_people_inside = people_inside_map[last_pos] - count\n people_inside_map[event[\"timestamp\"]] = curr_people_inside\n last_pos = event[\"timestamp\"]\n # generating the start time\n people_inside_list = list(people_inside_map.items())\n people_inside_list.sort(reverse=True, key=lambda element: element[1])\n start = people_inside_list[0][0]\n people_inside_list.sort(key=lambda element: element[0])\n # generating the end time\n flag = False\n for timestamp, _ in people_inside_list:\n if flag:\n end = timestamp\n break\n if timestamp == start:\n flag = True\n return start, end\n\n\nif __name__ == \"__main__\":\n events = [\n {\"timestamp\": 1526579928, \"count\": 3, \"type\": \"enter\"},\n {\"timestamp\": 1526579982, \"count\": 4, \"type\": \"enter\"},\n {\"timestamp\": 1526580054, \"count\": 5, \"type\": \"exit\"},\n {\"timestamp\": 1526580128, \"count\": 1, \"type\": \"enter\"},\n {\"timestamp\": 1526580382, \"count\": 3, \"type\": \"exit\"},\n ]\n print(calculate_busiest_time(events))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n log(n))\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "11773645", "language": "Python", "matching_score": 1.4194966554641724, "max_stars_count": 70, "path": "Solutions/171.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of time intervals (start, end) for classroom lectures (possibly\noverlapping), find the minimum number of rooms required.\n\nFor example, given [(30, 75), (0, 50), (60, 150)], you should return 2.\n\"\"\"\n\nfrom typing import List, Tuple\n\n\ndef minimum_rooms_required(intervals: List[Tuple[int, int]]) -> int:\n delta_room_map = {}\n max_rooms = 0\n curr_rooms = 0\n # updating time map\n for start, end in intervals:\n if start not in delta_room_map:\n delta_room_map[start] = 0\n delta_room_map[start] += 1\n if end not in delta_room_map:\n delta_room_map[end] = 0\n delta_room_map[end] -= 1\n # generating the minimum number of rooms required\n sorted_events = sorted(delta_room_map.items(), key=lambda x: x[0])\n for _, rooms in sorted_events:\n curr_rooms += rooms\n max_rooms = max(max_rooms, curr_rooms)\n return max_rooms\n\n\nif __name__ == \"__main__\":\n print(minimum_rooms_required([(30, 75), (0, 50), (60, 150)]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "4940233", "language": "Python", "matching_score": 0.8602073192596436, "max_stars_count": 70, "path": "Solutions/021.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a collection of intervals, find the minimum number of intervals you need to\nremove to make the rest of the intervals non-overlapping.\n\nIntervals can \"touch\", such as [0, 1] and [1, 2], but they won't be considered\noverlapping.\n\nFor example, given the intervals (7, 9), (2, 4), (5, 8), return 1 as the last interval\ncan be removed and the first two won't overlap.\n\nThe intervals are not necessarily sorted in any order.\n\"\"\"\n\nfrom typing import List\n\n\ndef num_overlap(arr: List[int]) -> int:\n time_slot_usage = [False for _ in range(max(arr, key=lambda x: x[1])[1] + 1)]\n overlap_count = 0\n\n for interval in arr:\n start, end = interval\n overlap_flag = True\n for i in range(start, end):\n if not time_slot_usage[i]:\n time_slot_usage[i] = True\n elif overlap_flag:\n overlap_count += 1\n overlap_flag = False\n return overlap_count\n\n\nif __name__ == \"__main__\":\n print(num_overlap([[0, 1], [1, 2]]))\n print(num_overlap([(7, 9), (2, 4), (5, 8)]))\n print(num_overlap([(7, 9), (2, 4), (5, 8), (1, 3)]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n[n = maximum ending time]\n\"\"\"\n", "id": "5113368", "language": "Python", "matching_score": 0.39394423365592957, "max_stars_count": 70, "path": "Solutions/191.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given an array of length 24, where each element represents the number of new\nsubscribers during the corresponding hour. Implement a data structure that efficiently\nsupports the following:\n\n- update(hour: int, value: int): Increment the element at index hour by value.\n- query(start: int, end: int): Retrieve the number of subscribers that have signed up\n between start and end (inclusive).\n\nYou can assume that all values get cleared at the end of the day, and that you will not\nbe asked for start and end values that wrap around midnight.\n\"\"\"\n\n\nclass Hourly_Subscribers:\n def __init__(self) -> None:\n self.sub_count = [0 for _ in range(24)]\n\n def update(self, hour: int, value: int) -> None:\n self.sub_count[hour - 1] += value\n\n def query(self, start: int, end: int) -> int:\n return sum(self.sub_count[start : end + 1])\n\n\nif __name__ == \"__main__\":\n hs = Hourly_Subscribers()\n\n hs.update(2, 50)\n hs.update(5, 100)\n\n print(hs.query(1, 7))\n\n hs.update(2, 10)\n\n print(hs.query(1, 7))\n", "id": "7368958", "language": "Python", "matching_score": 1.4873725175857544, "max_stars_count": 70, "path": "Solutions/242.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a clock time in hh:mm format, determine, to the nearest degree, the angle between\nthe hour and the minute hands.\n\nBonus: When, during the course of a day, will the angle be zero?\n\"\"\"\n\n\nHOUR_ANGLE = {\n 1: (1 / 12) * 360,\n 2: (2 / 12) * 360,\n 3: (3 / 12) * 360,\n 4: (4 / 12) * 360,\n 5: (5 / 12) * 360,\n 6: (6 / 12) * 360,\n 7: (7 / 12) * 360,\n 8: (8 / 12) * 360,\n 9: (9 / 12) * 360,\n 10: (10 / 12) * 360,\n 11: (11 / 12) * 360,\n 12: (12 / 12) * 360,\n}\n\n\ndef get_displaced_hour_angle(mm: int) -> float:\n return (mm / 60) * (360 / 12)\n\n\ndef get_minutes_angle(mm: int) -> float:\n return (mm / 60) * 360\n\n\ndef get_angle_between_arms(time: str) -> int:\n hh, mm = [int(elem) for elem in time.split(\":\")]\n hour_angle = (HOUR_ANGLE[hh] + get_displaced_hour_angle(mm)) % 360\n minute_angle = get_minutes_angle(mm)\n return round(abs(hour_angle - minute_angle))\n\n\nif __name__ == \"__main__\":\n print(get_angle_between_arms(\"12:20\"))\n print(get_angle_between_arms(\"12:00\"))\n print(get_angle_between_arms(\"6:30\"))\n print(get_angle_between_arms(\"3:45\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "6590759", "language": "Python", "matching_score": 0.8993657231330872, "max_stars_count": 70, "path": "Solutions/303.py" }, { "content": "\"\"\"\nProblem:\n\nWe're given a hashmap with a key courseId and value a list of courseIds, which\nrepresents that the prerequsite of courseId is courseIds. Return a sorted ordering of\ncourses such that we can finish all courses.\n\nReturn null if there is no such ordering.\n\nFor example, given {'CSC300': ['CSC100', 'CSC200'], 'CSC200': ['CSC100'], 'CSC100': []},\nshould return ['CSC100', 'CSC200', 'CSCS300'].\n\"\"\"\n\nfrom typing import Dict, List, Optional, Set, Tuple\n\n\ndef get_order_helper(\n course_map: Dict[str, str],\n course: str,\n order: List[str],\n processed: Set[str],\n break_limit: Optional[int] = None,\n curr: int = 0,\n) -> Tuple[Optional[List[int]], Optional[Set[int]]]:\n if not break_limit:\n break_limit = len(course_map)\n if break_limit < curr:\n return None, None\n if course_map[course] == []:\n # if the course doesn't have any pre-req\n if course not in processed:\n order.append(course)\n processed.add(course)\n return order, processed\n\n for prerequisite in course_map[course]:\n order, processed = get_order_helper(\n course_map, prerequisite, order, processed, break_limit, curr + 1\n )\n if order is None:\n return None, None\n order.append(course)\n processed.add(course)\n return order, processed\n\n\ndef get_order(course_map: Dict[str, str]) -> Optional[List[str]]:\n order = []\n processed = set()\n\n for course in course_map:\n if course not in processed:\n for prerequisite in course_map[course]:\n if prerequisite not in processed:\n order, processed = get_order_helper(\n course_map, prerequisite, order, processed\n )\n if order is None:\n return None\n order.append(course)\n processed.add(course)\n return order\n\n\nif __name__ == \"__main__\":\n prereqs = {\"CSC300\": [\"CSC100\", \"CSC200\"], \"CSC200\": [\"CSC100\"], \"CSC100\": []}\n print(get_order(prereqs))\n\n prereqs = {\n \"CSC400\": [\"CSC300\"],\n \"CSC300\": [\"CSC100\", \"CSC200\"],\n \"CSC200\": [\"CSC100\"],\n \"CSC100\": [],\n }\n print(get_order(prereqs))\n\n prereqs = {\n \"CSC400\": [\"CSC300\"],\n \"CSC300\": [\"CSC100\", \"CSC200\"],\n \"CSC200\": [\"CSC100\"],\n \"CSC100\": [\"CSC400\"],\n }\n print(get_order(prereqs))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(courses x prerequisites)\nSPACE COMPLEXITY: O(courses x prerequisites)\n\"\"\"\n", "id": "6644655", "language": "Python", "matching_score": 0.6629542112350464, "max_stars_count": 70, "path": "Solutions/092.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given a list of (website, user) pairs that represent users visiting websites.\nCome up with a program that identifies the top k pairs of websites with the greatest\nsimilarity.\n\nFor example, suppose k = 1, and the list of tuples is:\n\n[('a', 1), ('a', 3), ('a', 5),\n ('b', 2), ('b', 6),\n ('c', 1), ('c', 2), ('c', 3), ('c', 4), ('c', 5),\n ('d', 4), ('d', 5), ('d', 6), ('d', 7),\n ('e', 1), ('e', 3), ('e', 5), ('e', 6)]\nThen a reasonable similarity metric would most likely conclude that a and e are the\nmost similar, so your program should return [('a', 'e')].\n\"\"\"\n\nfrom typing import Dict, List, Set, Tuple\n\n\ndef get_similarity_score(\n visited_map: Dict[str, Set[int]], site1: str, site2: str\n) -> float:\n union = visited_map[site1] | visited_map[site2]\n intersection = visited_map[site1] & visited_map[site2]\n return len(intersection) / len(union)\n\n\ndef create_visit_map(visited_websites: List[Tuple[str, int]]) -> Dict[str, Set[int]]:\n visited_map = {}\n for site, user in visited_websites:\n if site not in visited_map:\n visited_map[site] = set()\n visited_map[site].add(user)\n return visited_map\n\n\ndef get_similar_websites_helper(\n visited_websites: List[Tuple[str, int]]\n) -> Dict[str, Dict[str, float]]:\n similarity = {}\n visited_map = create_visit_map(visited_websites)\n for site1 in visited_map:\n for site2 in visited_map:\n if site1 not in similarity:\n similarity[site1] = {}\n if site2 not in similarity:\n similarity[site2] = {}\n if site1 != site2 and site2 not in similarity[site1]:\n similarity_score = get_similarity_score(visited_map, site1, site2)\n similarity[site1][site2] = similarity_score\n similarity[site2][site1] = similarity_score\n return similarity\n\n\ndef get_similar_websites(\n visited_websites: List[Tuple[str, int]], k: int\n) -> List[Tuple[str, str]]:\n similarity_map = get_similar_websites_helper(visited_websites)\n # generating the similar sites array\n arr = [\n (site1, site2, similarity_map[site1][site2])\n for site2 in similarity_map\n for site1 in similarity_map\n if site1 != site2\n ]\n arr.sort(reverse=True, key=lambda x: x[2])\n # generating the top k similar websites\n result = []\n for i in range(k):\n # choosing every 2nd element as every 2 consecutive elements are the equivalent\n # (\"a\", \"b\") is equivalent to (\"b\", \"a\")\n site1, site2, _ = arr[2 * i]\n result.append((site1, site2))\n return result\n\n\nif __name__ == \"__main__\":\n visited_websites = [\n (\"a\", 1),\n (\"a\", 3),\n (\"a\", 5),\n (\"b\", 2),\n (\"b\", 6),\n (\"c\", 1),\n (\"c\", 2),\n (\"c\", 3),\n (\"c\", 4),\n (\"c\", 5),\n (\"d\", 4),\n (\"d\", 5),\n (\"d\", 6),\n (\"d\", 7),\n (\"e\", 1),\n (\"e\", 3),\n (\"e\", 5),\n (\"e\", 6),\n ]\n print(get_similar_websites(visited_websites, 1))\n print(get_similar_websites(visited_websites, 3))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "8676666", "language": "Python", "matching_score": 0.8506892323493958, "max_stars_count": 70, "path": "Solutions/287.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a sorted array, convert it into a height-balanced binary search tree.\n\"\"\"\n\nfrom typing import List\n\nfrom DataStructures.Tree import BinarySearchTree\n\n\ndef create_balanced_bst_helper(arr: List[int], tree: BinarySearchTree) -> None:\n # based on the fact that a sorted array middle element has approximately (with at\n # most difference of 1) equal number of elements on its either side\n if len(arr) == 0:\n return\n mid = len(arr) // 2\n tree.add(arr[mid])\n create_balanced_bst_helper(arr[:mid], tree)\n create_balanced_bst_helper(arr[mid + 1 :], tree)\n\n\ndef create_balanced_bst(arr: List[int]) -> BinarySearchTree:\n tree = BinarySearchTree()\n create_balanced_bst_helper(arr, tree)\n return tree\n\n\nif __name__ == \"__main__\":\n print(create_balanced_bst([1, 2, 3, 4, 5]))\n print(create_balanced_bst([1, 2, 3, 4, 5, 6, 7]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x log(n))\nSPACE COMPLEXITY: O(n)\n[time complexity can be reduced to O(n) using node reference inplace of calling\ntree.add()]\n\"\"\"\n", "id": "2334030", "language": "Python", "matching_score": 1.9591281414031982, "max_stars_count": 70, "path": "Solutions/296.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a tree, find the largest tree/subtree that is a BST.\n\nGiven a tree, return the size of the largest tree/subtree that is a BST.\n\"\"\"\n\nfrom sys import maxsize\nfrom typing import Tuple\n\nfrom DataStructures.Tree import BinaryTree, Node\n\n\ndef get_largest_bst_size_helper(node: Node) -> Tuple[int, Node, bool, int, int]:\n if not node:\n return 0, node, True, maxsize, -maxsize\n if not node.left and not node.right:\n return 1, node, True, node.val, node.val\n\n l_height, l_root, l_is_bst, l_max_val, l_min_val = get_largest_bst_size_helper(\n node.left\n )\n r_height, r_root, r_is_bst, r_max_val, r_min_val = get_largest_bst_size_helper(\n node.right\n )\n if l_is_bst and r_is_bst:\n if node.left and node.right:\n if l_max_val <= node.val <= r_min_val:\n return (l_height + r_height + 1), node, True, r_max_val, l_min_val\n else:\n if node.left and node.val > l_max_val:\n return l_height + 1, node, True, node.val, l_min_val\n elif node.right and node.val < r_min_val:\n return r_height + 1, node, True, r_max_val, node.val\n if l_height > r_height:\n return l_height, l_root, False, l_max_val, l_min_val\n return r_height, r_root, False, r_max_val, r_min_val\n\n\ndef get_largest_bst_size(tree: BinaryTree) -> Tuple[int, int]:\n size, node, _, _, _ = get_largest_bst_size_helper(tree.root)\n return size, node.val\n\n\nif __name__ == \"__main__\":\n a = Node(3)\n b = Node(2)\n c = Node(6)\n d = Node(1)\n e = Node(1)\n f = Node(4)\n\n a.left = b\n a.right = c\n b.left = d\n b.right = e\n c.left = f\n\n tree = BinaryTree()\n tree.root = a\n\n print(tree)\n print(\"Size: {}\\tNode Val: {}\".format(*get_largest_bst_size(tree)))\n\n a = Node(3)\n b = Node(2)\n c = Node(6)\n d = Node(1)\n e = Node(4)\n f = Node(4)\n\n a.left = b\n a.right = c\n b.left = d\n b.right = e\n c.left = f\n\n tree = BinaryTree()\n tree.root = a\n\n print(tree)\n print(\"Size: {}\\tNode Val: {}\".format(*get_largest_bst_size(tree)))\n\n a = Node(1)\n b = Node(2)\n c = Node(6)\n d = Node(1)\n e = Node(3)\n f = Node(4)\n\n a.left = b\n a.right = c\n b.left = d\n b.right = e\n c.left = f\n\n tree = BinaryTree()\n tree.root = a\n\n print(tree)\n print(\"Size: {}\\tNode Val: {}\".format(*get_largest_bst_size(tree)))\n\n a = Node(3)\n b = Node(2)\n c = Node(6)\n d = Node(1)\n e = Node(3)\n f = Node(4)\n\n a.left = b\n a.right = c\n b.left = d\n b.right = e\n c.left = f\n\n tree = BinaryTree()\n tree.root = a\n\n print(tree)\n print(\"Size: {}\\tNode Val: {}\".format(*get_largest_bst_size(tree)))\n\n a = Node(3)\n b = Node(1)\n c = Node(6)\n d = Node(0)\n e = Node(2)\n f = Node(4)\n\n a.left = b\n a.right = c\n b.left = d\n b.right = e\n c.left = f\n\n tree = BinaryTree()\n tree.root = a\n\n print(tree)\n print(\"Size: {}\\tNode Val: {}\".format(*get_largest_bst_size(tree)))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "7317727", "language": "Python", "matching_score": 1.9002008438110352, "max_stars_count": 70, "path": "Solutions/093.py" }, { "content": "\"\"\"\nProblem:\n\nGiven the root to a binary search tree, find the second largest node in the tree.\n\"\"\"\n\nfrom typing import Optional, Tuple\n\nfrom DataStructures.Tree import Node, BinarySearchTree\n\n\ndef get_largest_pair_from_current_node(node: Node) -> Tuple[Optional[Node], Node]:\n parent = None\n while node.right:\n parent = node\n node = node.right\n # both the parent and the node is returned\n return parent, node\n\n\ndef get_second_largest(tree: BinarySearchTree) -> Optional[int]:\n if tree.root is None:\n return None\n\n parent_of_largest, largest = get_largest_pair_from_current_node(tree.root)\n # if the largest node has a left node, the largest child of the left node is the\n # 2nd largest node (BST property)\n if largest.left:\n _, second_largest_node = get_largest_pair_from_current_node(largest.left)\n return second_largest_node.val\n # if the left node and the parent is absent (the tree contains only 1 node),\n # None is returned\n elif parent_of_largest is None:\n return None\n # if the largest parent is present its the 2nd largest node if no left node is\n # present (BST property)\n return parent_of_largest.val\n\n\nif __name__ == \"__main__\":\n tree = BinarySearchTree()\n\n tree.add(5)\n tree.add(3)\n tree.add(8)\n tree.add(2)\n tree.add(4)\n tree.add(7)\n tree.add(9)\n\n print(get_second_largest(tree))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(log(n))\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "6035436", "language": "Python", "matching_score": 3.4168038368225098, "max_stars_count": 70, "path": "Solutions/036.py" }, { "content": "\"\"\"\nProblem:\n\nGiven the sequence of keys visited by a postorder traversal of a binary search tree,\nreconstruct the tree.\n\nFor example, given the sequence 2, 4, 3, 8, 7, 5, you should construct the following\ntree:\n\n 5\n / \\\n 3 7\n / \\ \\\n2 4 8\n\"\"\"\n\nfrom typing import List\n\nfrom DataStructures.Tree import BinarySearchTree, Node\n\n\ndef bst_from_postorder(postorder: List[int]) -> BinarySearchTree:\n tree = BinarySearchTree()\n if postorder:\n tree.add(postorder[-1])\n for val in postorder[-2::-1]:\n tree.add(val)\n return tree\n\n\nif __name__ == \"__main__\":\n print(bst_from_postorder([2, 4, 3, 8, 7, 5]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n log(n))\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "5110525", "language": "Python", "matching_score": 1.538751482963562, "max_stars_count": 70, "path": "Solutions/179.py" }, { "content": "\"\"\"\nProblem:\n\nA Cartesian tree with sequence S is a binary tree defined by the following two\nproperties:\n\nIt is heap-ordered, so that each parent value is strictly less than that of its\nchildren. An in-order traversal of the tree produces nodes with values that correspond\nexactly to S. For example, given the sequence [3, 2, 6, 1, 9], the resulting Cartesian\ntree would be:\n\n 1\n / \\ \n 2 9\n / \\\n3 6\nGiven a sequence S, construct the corresponding Cartesian tree.\n\"\"\"\n\nfrom typing import List, Optional\n\nfrom DataStructures.Tree import Node, BinaryTree\n\n\ndef generate_cartesian_tree_helper(\n arr: List[int], last: Optional[Node] = None, root: Optional[Node] = None\n) -> Node:\n if not arr:\n return root\n # Cartesian tree generation\n node = Node(arr[0])\n if not last:\n # root of the tree\n return generate_cartesian_tree_helper(arr[1:], node, node)\n if last.val > node.val:\n # property of Cartesian tree\n node.left = last\n return generate_cartesian_tree_helper(arr[1:], node, node)\n last.right = node\n return generate_cartesian_tree_helper(arr[1:], last, last)\n\n\ndef generate_cartesian_tree(sequence: List[int]) -> BinaryTree:\n tree = BinaryTree()\n tree.root = generate_cartesian_tree_helper(sequence)\n return tree\n\n\nif __name__ == \"__main__\":\n print(generate_cartesian_tree([3, 2, 6, 1, 9]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "947821", "language": "Python", "matching_score": 2.2818491458892822, "max_stars_count": 70, "path": "Solutions/326.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an integer N, construct all possible binary search trees with N nodes.\n\"\"\"\n\nfrom copy import deepcopy\nfrom functools import lru_cache\nfrom typing import List\n\nfrom DataStructures.Stack import Stack\nfrom DataStructures.Tree import BinaryTree, Node\n\n\n@lru_cache(maxsize=128)\ndef fib(n: int) -> int:\n # memorized fibonacci function\n if n in (1, 2):\n return 1\n return fib(n - 1) + fib(n - 2)\n\n\ndef generate_tree_helper(i: int, arr: List[BinaryTree], nodes: int) -> BinaryTree:\n tree = arr[i]\n stack = Stack()\n stack.push(tree.root)\n\n while not stack.is_empty():\n # generating the new tree with 1 new node\n node = stack.pop()\n if not node.left:\n node.left = Node(0)\n for j in range(nodes):\n if j != i and tree == arr[j]:\n node.left = None\n break\n else:\n return tree\n else:\n stack.push(node.left)\n if not node.right:\n node.right = Node(0)\n for j in range(nodes):\n if j != i and tree == arr[j]:\n node.right = None\n break\n else:\n return tree\n else:\n stack.push(node.right)\n\n\ndef generate_tree(tree: BinaryTree) -> List[BinaryTree]:\n nodes = sum([fib(i) for i in range(1, len(tree) + 2)])\n arr = [deepcopy(tree) for _ in range(nodes)]\n for i in range(nodes):\n arr[i] = generate_tree_helper(i, arr, nodes)\n return arr\n\n\ndef create_trees_helper(tree_arr: List[BinaryTree], n: int) -> None:\n if n == 0:\n return\n new_tree_arr = []\n for tree in tree_arr:\n result = generate_tree(tree)\n new_tree_arr.extend(\n [temp for temp in result if temp and temp not in new_tree_arr]\n ) # can be optimized by using a set and overloading __hash__ in tree class\n tree_arr[:] = new_tree_arr\n create_trees_helper(tree_arr, n - 1)\n\n\ndef create_trees(n: int) -> List[BinaryTree]:\n tree_arr = []\n if n == 0:\n return tree_arr\n tree = BinaryTree()\n tree.root = Node(0)\n tree_arr.append(tree)\n create_trees_helper(tree_arr, n - 1)\n return tree_arr\n\n\nif __name__ == \"__main__\":\n for tree in create_trees(2):\n print(tree)\n print()\n for tree in create_trees(3):\n print(tree)\n print()\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n ^ 2)\n\"\"\"\n", "id": "3130410", "language": "Python", "matching_score": 2.467740774154663, "max_stars_count": 70, "path": "Solutions/278.py" }, { "content": "\"\"\"\nProblem:\n\nWrite a program to merge two binary trees. Each node in the new tree should hold a\nvalue equal to the sum of the values of the corresponding nodes of the input trees.\n\nIf only one input tree has a node in a given position, the corresponding node in the\nnew tree should match that input node.\n\"\"\"\n\nfrom DataStructures.Tree import BinaryTree, Node\n\n\ndef merge_trees_helper(node: Node, node1: Node, node2: Node) -> None:\n n1_l_val, n1_r_val = 0, 0\n n2_l_val, n2_r_val = 0, 0\n n1_l, n1_r = None, None\n n2_l, n2_r = None, None\n # tree1 node related data generation\n if node1:\n if node1.left:\n n1_l_val = node1.left.val\n n1_l = node1.left\n if node1.right:\n n1_r_val = node1.right.val\n n1_r = node1.right\n # tree2 node related data generation\n if node2:\n if node2.left:\n n2_l_val = node2.left.val\n n2_l = node2.left\n if node2.right:\n n2_r_val = node2.right.val\n n2_r = node2.right\n # left node generation\n if n1_l is not None or n2_l is not None:\n node.left = Node(n1_l_val + n2_l_val)\n merge_trees_helper(node.left, n1_l, n2_l)\n # right node generation\n if n1_r is not None or n2_r is not None:\n node.right = Node(n1_r_val + n2_r_val)\n merge_trees_helper(node.right, n1_r, n2_r)\n\n\ndef merge_trees(tree1: BinaryTree, tree2: BinaryTree) -> BinaryTree:\n tree = BinaryTree()\n if not tree1.root and not tree2.root:\n return tree\n # root generation\n r1, r2 = 0, 0\n if tree1.root:\n r1 = tree1.root.val\n if tree2.root:\n r2 = tree2.root.val\n tree.root = Node(r1 + r2)\n # generating rest of the tree\n merge_trees_helper(tree.root, tree1.root, tree2.root)\n return tree\n\n\nif __name__ == \"__main__\":\n tree1 = BinaryTree()\n tree1.root = Node(1)\n tree1.root.left = Node(2)\n tree1.root.right = Node(3)\n tree1.root.left.right = Node(4)\n print(tree1)\n\n tree2 = BinaryTree()\n tree2.root = Node(2)\n tree2.root.right = Node(-3)\n tree2.root.right.right = Node(10)\n print(tree2)\n\n print(merge_trees(tree1, tree2))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(nodes_tree1 + nodes_tree2)\nSPACE COMPLEXITY: O(height_tree1 + height_tree2)\n\"\"\"\n", "id": "4915554", "language": "Python", "matching_score": 2.763035297393799, "max_stars_count": 70, "path": "Solutions/327.py" }, { "content": "\"\"\"\nProblem:\n\nGiven two non-empty binary trees s and t, check whether tree t has exactly the same\nstructure and node values with a subtree of s. A subtree of s is a tree consists of a\nnode in s and all of this node's descendants. The tree s could also be considered as a\nsubtree of itself.\n\"\"\"\n\nfrom DataStructures.Tree import BinaryTree, Node\n\n\ndef is_equal(node1: Node, node2: Node) -> bool:\n if (not node1) and (not node2):\n return True\n if (not node1) or (not node2):\n return False\n if node1.val != node2.val:\n return False\n return is_equal(node1.left, node2.left) and is_equal(node1.right, node2.right)\n\n\ndef find_helper(sub_tree1: Node, sub_tree2: Node) -> bool:\n if is_equal(sub_tree1, sub_tree2):\n return True\n # if the subtree is not same, the children are checked\n if sub_tree1.left and find_helper(sub_tree1.left, sub_tree2):\n return True\n if sub_tree1.right and find_helper(sub_tree1.right, sub_tree2):\n return True\n return False\n\n\ndef get_match(s: BinaryTree, t: BinaryTree) -> bool:\n if s.root and t.root:\n return find_helper(s.root, t.root)\n return False\n\n\nif __name__ == \"__main__\":\n tree1 = BinaryTree()\n tree1.root = Node(0)\n tree1.root.left = Node(1)\n tree1.root.right = Node(2)\n tree1.root.right.left = Node(3)\n tree1.root.right.right = Node(4)\n\n tree2 = BinaryTree()\n tree2.root = Node(2)\n tree2.root.left = Node(3)\n tree2.root.right = Node(4)\n\n tree3 = BinaryTree()\n tree3.root = Node(2)\n tree3.root.left = Node(3)\n tree3.root.right = Node(5)\n\n print(get_match(tree1, tree2))\n print(get_match(tree1, tree3))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "994177", "language": "Python", "matching_score": 2.9610650539398193, "max_stars_count": 70, "path": "Solutions/115.py" }, { "content": "\"\"\"\nProblem:\n\nDetermine whether a tree is a valid binary search tree.\n\nA binary search tree is a tree with two children, left and right, and satisfies the\nconstraint that the key in the left child must be less than or equal to the root and\nthe key in the right child must be greater than or equal to the root.\n\"\"\"\n\nfrom DataStructures.Tree import BinaryTree, BinarySearchTree, Node\n\n\ndef is_binary_search_tree_helper(node: Node) -> bool:\n if node is None:\n return True\n if node.left is None and node.right is None:\n return True\n elif (node.left and node.left.val > node.val) or (\n node.right and node.right.val < node.val\n ):\n return False\n return is_binary_search_tree_helper(node.left) and is_binary_search_tree_helper(\n node.right\n )\n\n\ndef is_binary_search_tree(tree: BinaryTree) -> bool:\n return is_binary_search_tree_helper(tree.root)\n\n\nif __name__ == \"__main__\":\n tree1 = BinarySearchTree()\n\n tree1.add(5)\n tree1.add(9)\n tree1.add(1)\n tree1.add(4)\n tree1.add(10)\n tree1.add(3)\n tree1.add(2)\n tree1.add(10)\n tree1.add(7)\n\n print(is_binary_search_tree(tree1))\n\n tree2 = BinaryTree()\n tree2.root = Node(5)\n\n tree2.root.left = Node(4)\n tree2.root.right = Node(6)\n\n print(is_binary_search_tree(tree2))\n\n tree3 = BinaryTree()\n tree3.root = Node(5)\n\n tree3.root.left = Node(6)\n tree3.root.right = Node(4)\n\n print(is_binary_search_tree(tree3))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(log(n)) [recursion depth]\n\"\"\"\n", "id": "4253863", "language": "Python", "matching_score": 1.073496699333191, "max_stars_count": 70, "path": "Solutions/089.py" }, { "content": "from typing import Union\n\n\ndef get_parent_position(position: int) -> int:\n # helper function get the position of the parent of the current node\n return (position - 1) // 2\n\n\ndef get_left_child_position(position: int) -> int:\n # helper function get the position of the left child of the current node\n return (2 * position) + 1\n\n\ndef get_right_child_position(position: int) -> int:\n # helper function get the position of the right child of the current node\n return (2 * position) + 2\n\n\nclass MinHeap:\n \"\"\"\n Min Heap class\n\n Functions:\n extract_min: Remove and return the minimum element from the heap\n insert: Insert a node into the heap\n peek: Get the minimum element from the heap\n _bubble_up: helper function to place a node at the proper position (upward\n movement)\n _bubble_down: helper function to place a node at the proper position (downward\n movement)\n _swap_nodes: helper function to swap the nodes at the given positions\n \"\"\"\n\n def __init__(self) -> None:\n self.heap = []\n self.elements = 0\n\n def __len__(self) -> int:\n return self.elements\n\n def __repr__(self) -> str:\n return str(self.heap)\n\n def extract_min(self) -> Union[int, str]:\n # function to remove and return the minimum element from the heap\n if self.elements == 0:\n raise RuntimeError(\"Heap Underflow. Cannot extract min from a empty heap\")\n\n if self.elements > 1:\n self._swap_nodes(0, self.elements - 1)\n elem = self.heap.pop()\n self.elements -= 1\n if self.elements > 0:\n self._bubble_down(0)\n return elem\n\n def insert(self, elem: Union[int, str]) -> None:\n # function to insert a node into the heap\n self.heap.append(elem)\n self._bubble_up(self.elements)\n self.elements += 1\n\n def peek_min(self) -> Union[int, str]:\n # function to get the minimum element from the heap\n if self.elements == 0:\n raise RuntimeError(\"Heap is empty\")\n return self.heap[0]\n\n def _bubble_up(self, curr_pos: int) -> None:\n # Place a node at the proper position (upward movement) [to be used internally\n # only]\n if curr_pos == 0:\n return\n parent_position = get_parent_position(curr_pos)\n elem = self.heap[curr_pos]\n parent = self.heap[parent_position]\n if parent > elem:\n self._swap_nodes(parent_position, curr_pos)\n self._bubble_up(parent_position)\n\n def _bubble_down(self, curr_pos: int) -> None:\n # Place a node at the proper position (downward movement) [to be used\n # internally only]\n elem = self.heap[curr_pos]\n child_left_position = get_left_child_position(curr_pos)\n child_right_position = get_right_child_position(curr_pos)\n if child_left_position < self.elements and child_right_position < self.elements:\n child_left = self.heap[child_left_position]\n child_right = self.heap[child_right_position]\n if child_right < child_left:\n if child_right < elem:\n self._swap_nodes(child_right_position, curr_pos)\n return self._bubble_down(child_right_position)\n if child_left_position < self.elements:\n child_left = self.heap[child_left_position]\n if child_left < elem:\n self._swap_nodes(child_left_position, curr_pos)\n return self._bubble_down(child_left_position)\n else:\n return\n if child_right_position < self.elements:\n child_right = self.heap[child_right_position]\n if child_right < elem:\n self._swap_nodes(child_right_position, curr_pos)\n return self._bubble_down(child_right_position)\n\n def _swap_nodes(self, pos1: int, pos2: int) -> None:\n # function to swap two nodes in the heap [to be used internally only]\n self.heap[pos1], self.heap[pos2] = self.heap[pos2], self.heap[pos1]\n\n\nclass MaxHeap:\n \"\"\"\n Max Heap class\n\n Functions:\n extract_max: Remove and return the maximum element from the heap\n insert: Insert a node into the heap\n peek: Get the maximum element from the heap\n _bubble_up: helper function to place a node at the proper position (upward\n movement)\n _bubble_down: helper function to place a node at the proper position (downward\n movement)\n _swap_nodes: helper function to swap the nodes at the given positions\n \"\"\"\n\n def __init__(self) -> None:\n self.heap = []\n self.elements = 0\n\n def __len__(self) -> int:\n return self.elements\n\n def __repr__(self) -> str:\n return str(self.heap)\n\n def extract_max(self) -> Union[int, str]:\n # function to remove and return the minimum element from the heap\n if self.elements == 0:\n raise RuntimeError(\"Heap Underflow. Cannot extract max from a empty heap\")\n\n if self.elements > 1:\n self._swap_nodes(0, self.elements - 1)\n elem = self.heap.pop()\n self.elements -= 1\n if self.elements > 0:\n self._bubble_down(0)\n return elem\n\n def insert(self, elem: Union[int, str]) -> None:\n # function to insert a node into the heap\n self.heap.append(elem)\n self._bubble_up(self.elements)\n self.elements += 1\n\n def peek_max(self) -> Union[int, str]:\n # function to get the minimum element from the heap\n if self.elements == 0:\n raise RuntimeError(\"Heap is empty\")\n return self.heap[0]\n\n def _bubble_up(self, curr_pos: int) -> None:\n # Place a node at the proper position (upward movement) [to be used internally\n # only]\n if curr_pos == 0:\n return\n parent_position = get_parent_position(curr_pos)\n elem = self.heap[curr_pos]\n parent = self.heap[parent_position]\n if parent < elem:\n self._swap_nodes(parent_position, curr_pos)\n self._bubble_up(parent_position)\n\n def _bubble_down(self, curr_pos: int) -> None:\n # Place a node at the proper position (downward movement) [to be used\n # internally only]\n elem = self.heap[curr_pos]\n child_left_position = get_left_child_position(curr_pos)\n child_right_position = get_right_child_position(curr_pos)\n if child_left_position < self.elements and child_right_position < self.elements:\n child_left = self.heap[child_left_position]\n child_right = self.heap[child_right_position]\n if child_right > child_left:\n if child_right > elem:\n self._swap_nodes(child_right_position, curr_pos)\n return self._bubble_down(child_right_position)\n if child_left_position < self.elements:\n child_left = self.heap[child_left_position]\n if child_left > elem:\n self._swap_nodes(child_left_position, curr_pos)\n return self._bubble_down(child_left_position)\n else:\n return\n if child_right_position < self.elements:\n child_right = self.heap[child_right_position]\n if child_right > elem:\n self._swap_nodes(child_right_position, curr_pos)\n return self._bubble_down(child_right_position)\n\n def _swap_nodes(self, pos1: int, pos2: int) -> None:\n # function to swap two nodes in the heap [to be used internally only]\n self.heap[pos1], self.heap[pos2] = self.heap[pos2], self.heap[pos1]\n", "id": "11768943", "language": "Python", "matching_score": 1.4060167074203491, "max_stars_count": 70, "path": "Solutions/DataStructures/Heap.py" }, { "content": "\"\"\"\nProblem:\n\nCompute the running median of a sequence of numbers. That is, given a stream of\nnumbers, print out the median of the list so far on each new element.\n\nRecall that the median of an even-numbered list is the average of the two middle\nnumbers.\n\nFor example, given the sequence [2, 1, 5, 7, 2, 0, 5], your algorithm should print out:\n\n2\n1.5\n2\n3.5\n2\n2\n2\n\"\"\"\n\nfrom typing import List\n\n\nfrom DataStructures.Heap import MaxHeap, MinHeap\n\n\ndef get_running_medians(arr: List[int]) -> List[int]:\n min_heap = MinHeap()\n max_heap = MaxHeap()\n medians = []\n for elem in arr:\n # current median value generation\n min_heap.insert(elem)\n if len(min_heap) > len(max_heap) + 1:\n smallest_large_element = min_heap.extract_min()\n max_heap.insert(smallest_large_element)\n if len(min_heap) == len(max_heap):\n median = (min_heap.peek_min() + max_heap.peek_max()) / 2\n else:\n median = min_heap.peek_min()\n medians.append(median)\n return medians\n\n\nif __name__ == \"__main__\":\n print(get_running_medians([]))\n print(get_running_medians([2, 5]))\n print(get_running_medians([3, 3, 3, 3]))\n print(get_running_medians([2, 1, 5, 7, 2, 0, 5]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x log(n))\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "1393676", "language": "Python", "matching_score": 2.0249078273773193, "max_stars_count": 70, "path": "Solutions/033.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given a list of N numbers, in which each number is located at most k places\naway from its sorted position. For example, if k = 1, a given element at index 4 might\nend up at indices 3, 4, or 5.\n\nCome up with an algorithm that sorts this list in O(N log k) time.\n\"\"\"\n\nfrom typing import List\n\nfrom DataStructures.Heap import MinHeap\n\n\ndef k_sort(arr: List[int], k: int) -> List[int]:\n length = len(arr)\n heap = MinHeap()\n [heap.insert(elem) for elem in arr[: k + 1]]\n # updating the values of the array (to hold sorted elements)\n curr_index = 0\n for index in range(k + 1, length):\n arr[curr_index] = heap.extract_min()\n heap.insert(arr[index])\n curr_index += 1\n # updating the last k positions in the array by emptying the heap\n while heap:\n arr[curr_index] = heap.extract_min()\n curr_index += 1\n return arr\n\n\nif __name__ == \"__main__\":\n print(k_sort([1, 0, 2, 4, 3], 2))\n print(k_sort([6, 5, 3, 2, 8, 10, 9], 3))\n print(k_sort([10, 9, 8, 7, 4, 70, 60, 50], 4))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n log(k))\nSPACE COMPLEXITY: O(k)\n\"\"\"\n", "id": "1717069", "language": "Python", "matching_score": 0.7874725461006165, "max_stars_count": 70, "path": "Solutions/306.py" }, { "content": "\"\"\"\nProblem:\n\nYou have a large array with most of the elements as zero.\n\nUse a more space-efficient data structure, SparseArray, that implements the same\ninterface:\n\ninit(arr, size): initialize with the original large array and size.\nset(i, val): updates index at i with val.\nget(i): gets the value at index i.\n\"\"\"\n\nfrom typing import List\n\n\nclass SparseArray:\n def __init__(self, arr: List[int], size: int) -> None:\n self.arr = {}\n self.size = size\n for index, val in enumerate(arr):\n if val != 0:\n self.arr[index] = val\n\n def __repr__(self) -> str:\n string = \"\"\n for pos in range(self.size):\n if pos in self.arr:\n string += f\"{self.arr[pos]}, \"\n else:\n string += \"0, \"\n return \"[\" + string.rstrip(\" ,\") + \"]\"\n\n def set(self, pos: int, val: int) -> int:\n if pos > self.size:\n raise IndexError\n if val == 0:\n if pos in self.arr:\n del self.arr[pos]\n else:\n self.arr[pos] = val\n\n def get(self, pos: int) -> int:\n if pos > self.size:\n raise IndexError\n if pos in self.arr:\n return self.arr[pos]\n return 0\n\n\nif __name__ == \"__main__\":\n arr = SparseArray([1, 0, 0, 0, 3, 0, 2, 0], 8)\n\n print(arr)\n\n print(arr.get(0))\n print(arr.get(2))\n arr.set(2, 4)\n print(arr.get(2))\n arr.set(4, 1)\n print(arr.get(4))\n arr.set(0, 0)\n print(arr.get(0))\n\n print(arr)\n", "id": "10191466", "language": "Python", "matching_score": 2.6718709468841553, "max_stars_count": 70, "path": "Solutions/134.py" }, { "content": "\"\"\"\nProblem:\n\nImplement a bit array.\n\nA bit array is a space efficient array that holds a value of 1 or 0 at each index.\n\ninit(size): initialize the array with size\nset(i, val): updates index at i with val where val is either 1 or 0.\nget(i): gets the value at index i.\n\"\"\"\n\n\nclass Bit_Array:\n def __init__(self, length: int) -> None:\n self.length = length\n self.indices = set()\n\n def set(self, pos: int, val: int) -> None:\n if pos >= self.length:\n raise IndexError(\"Index is out of range\")\n if val == 0:\n if pos in self.indices:\n self.indices.remove(pos)\n else:\n self.indices.add(pos)\n\n def get(self, pos: int) -> int:\n if pos >= self.length:\n raise IndexError(\"Index is out of range\")\n if pos in self.indices:\n return 1\n return 0\n\n def __repr__(self) -> str:\n res = []\n for pos in range(self.length):\n if pos in self.indices:\n res.append(1)\n else:\n res.append(0)\n return str(res)\n\n\nif __name__ == \"__main__\":\n arr = Bit_Array(8)\n\n print(arr)\n\n arr.set(5, 1)\n arr.set(1, 1)\n\n print(arr)\n\n print(arr.get(1))\n print(arr.get(4))\n", "id": "11361940", "language": "Python", "matching_score": 0.7465509176254272, "max_stars_count": 70, "path": "Solutions/137.py" }, { "content": "\"\"\"\nProblem:\n\nWrite an algorithm that finds the total number of set bits in all integers between 1\nand N.\n\"\"\"\n\n\ndef get_set_bits(num: int) -> int:\n bin_num = bin(num)[2:]\n return sum([int(digit) for digit in bin_num])\n\n\ndef get_total_set_bits(N: int) -> int:\n result = 0\n for i in range(1, N + 1):\n result += get_set_bits(i)\n return result\n\n\nif __name__ == \"__main__\":\n print(get_total_set_bits(0))\n print(get_total_set_bits(1))\n print(get_total_set_bits(2))\n print(get_total_set_bits(3))\n print(get_total_set_bits(4))\n print(get_total_set_bits(5))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x log(n))\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "12111141", "language": "Python", "matching_score": 2.3630728721618652, "max_stars_count": 70, "path": "Solutions/310.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an integer n, find the next biggest integer with the same number of 1-bits on.\nFor example, given the number 6 (0110 in binary), return 9 (1001).\n\"\"\"\n\n\ndef get_set_bits(num: int) -> int:\n # get the number of bits set in a number [runs in O(log(n))]\n bin_num = bin(num)[2:]\n return sum([int(digit) for digit in bin_num])\n\n\ndef get_next_number_with_same_count_of_set_bits(num: int) -> int:\n num_of_set_bits = get_set_bits(num)\n curr = num + 1\n while True:\n if num_of_set_bits == get_set_bits(curr):\n return curr\n curr += 1\n\n\nif __name__ == \"__main__\":\n print(get_next_number_with_same_count_of_set_bits(6))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n) [as the result always lies between n and 2n]\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "4196361", "language": "Python", "matching_score": 0.8809256553649902, "max_stars_count": 70, "path": "Solutions/338.py" }, { "content": "\"\"\"\nProblem:\n\nWrite a program to determine how many distinct ways there are to create a max heap from\na list of N given integers.\n\nFor example, if N = 3, and our integers are [1, 2, 3], there are two ways, shown below.\n\n 3 3\n / \\ / \\\n1 2 2 1\n\"\"\"\n\nfrom math import log2\nfrom typing import List\n\n\ndef choose(n: int, k: int, nCk: List[List[int]]) -> int:\n # get nCk using dynamic programming\n if k > n:\n return 0\n if n <= 1:\n return 1\n if k == 0:\n return 1\n if nCk[n][k] != -1:\n return nCk[n][k]\n\n answer = choose(n - 1, k - 1, nCk) + choose(n - 1, k, nCk)\n nCk[n][k] = answer\n return answer\n\n\ndef get_nodes_left(n: int) -> int:\n if n == 1:\n return 0\n h = int(log2(n))\n # max number of elements that can be present in the hth level of any heap\n num_h = 1 << h # (2 ^ h)\n # number of elements that are actually present in the last level\n # [hth level (2 ^ h - 1)]\n last = n - ((1 << h) - 1)\n if last >= (num_h // 2):\n # if more than half of the last level is filled\n return (1 << h) - 1\n return (1 << h) - 1 - ((num_h // 2) - last)\n\n\ndef number_of_heaps(n: int, dp: List[int], nCk: List[List[int]]) -> int:\n if n <= 1:\n return 1\n if dp[n] != -1:\n return dp[n]\n\n left = get_nodes_left(n)\n ans = (\n choose(n - 1, left, nCk)\n * number_of_heaps(left, dp, nCk)\n * number_of_heaps(n - 1 - left, dp, nCk)\n )\n dp[n] = ans\n return ans\n\n\ndef get_number_of_heaps(n: int) -> int:\n dp = [-1 for _ in range(n + 1)]\n nCk = [[-1 for _ in range(n + 1)] for _ in range(n + 1)]\n return number_of_heaps(n, dp, nCk)\n\n\nif __name__ == \"__main__\":\n print(get_number_of_heaps(3))\n print(get_number_of_heaps(10))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(n ^ 2)\n\"\"\"\n", "id": "8667484", "language": "Python", "matching_score": 1.1040449142456055, "max_stars_count": 70, "path": "Solutions/336.py" }, { "content": "\"\"\"\nProblem:\n\nA strobogrammatic number is a positive number that appears the same after being rotated\n180 degrees. For example, 16891 is strobogrammatic.\n\nCreate a program that finds all strobogrammatic numbers with N digits.\n\"\"\"\n\nfrom typing import List\n\n\ndef get_strobogrammatic_numbers_helper(N: int) -> List[str]:\n if N == 0:\n return [\"\"]\n if N == 1:\n return [\"1\", \"8\", \"0\"]\n\n smaller_strobogrammatic_numbers = get_strobogrammatic_numbers_helper(N - 2)\n strob_numbers = []\n for x in smaller_strobogrammatic_numbers:\n strob_numbers.extend(\n [\"1\" + x + \"1\", \"6\" + x + \"9\", \"9\" + x + \"6\", \"8\" + x + \"8\",]\n )\n return strob_numbers\n\n\ndef get_strobogrammatic_numbers(N: int) -> List[int]:\n return [int(num) for num in get_strobogrammatic_numbers_helper(N)]\n\n\nif __name__ == \"__main__\":\n print(get_strobogrammatic_numbers(1))\n print(get_strobogrammatic_numbers(2))\n print(get_strobogrammatic_numbers(3))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(4 ^ n)\nSPACE COMPLEXITY: O(4 ^ n)\n\"\"\"\n", "id": "5458253", "language": "Python", "matching_score": 0.7045742273330688, "max_stars_count": 70, "path": "Solutions/362.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given an array of length n + 1 whose elements belong to the set {1, 2, ..., n}.\nBy the pigeonhole principle, there must be a duplicate. Find it in linear time and\nspace.\n\"\"\"\n\nfrom typing import List\n\n\ndef find_duplicate(arr: List[int]) -> int:\n seen_numbers = set()\n for num in arr:\n if num in seen_numbers:\n return num\n seen_numbers.add(num)\n\n\nif __name__ == \"__main__\":\n print(find_duplicate([1, 2, 4, 6, 5, 3, 2]))\n print(find_duplicate([3, 1, 4, 2, 3]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "4139499", "language": "Python", "matching_score": 0.7246288657188416, "max_stars_count": 70, "path": "Solutions/164.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string, return whether it represents a number. Here are the different kinds of\nnumbers:\n\n\"10\", a positive integer\n\"-10\", a negative integer\n\"10.1\", a positive real number\n\"-10.1\", a negative real number\n\"1e5\", a number in scientific notation\nAnd here are examples of non-numbers:\n\n\"a\"\n\"x 1\"\n\"a -2\"\n\"-\"\n\"\"\"\n\n\ndef check_valid_number_representation(string: str) -> bool:\n is_valid = True\n has_number = False\n num_negatives, num_points, num_e = 0, 0, 0\n\n for char in string:\n if not (char.isdigit()):\n if char == \"-\":\n if num_negatives >= 1:\n # if the string contains an 'e', 2 '-'s are allowed (for mantissa\n # and exponent)\n if num_negatives == 1 and num_e == 1:\n num_negatives += 1\n continue\n is_valid = False\n break\n num_negatives += 1\n elif char == \".\":\n if num_points >= 1:\n # if the string contains an 'e', 2 '.'s are allowed (for mantissa\n # and exponent)\n if num_points == 1 and num_e == 1:\n num_points += 1\n continue\n is_valid = False\n break\n num_points += 1\n elif char == \"e\":\n # a number can have only 1 'e'\n if num_e >= 1:\n is_valid = False\n break\n num_e += 1\n elif char == \" \":\n # spaces are ignored\n pass\n else:\n # any other character makes the number invalid\n is_valid = False\n break\n else:\n # current character is a number\n has_number = True\n return is_valid and has_number\n\n\nif __name__ == \"__main__\":\n print(check_valid_number_representation(\"10\"))\n print(check_valid_number_representation(\"-10\"))\n print(check_valid_number_representation(\"10.1\"))\n print(check_valid_number_representation(\"-10.1\"))\n print(check_valid_number_representation(\"1e5\"))\n print(check_valid_number_representation(\"1e-5\"))\n print(check_valid_number_representation(\"-1.6 e -5.2\"))\n print(check_valid_number_representation(\"a\"))\n print(check_valid_number_representation(\"x 1\"))\n print(check_valid_number_representation(\"a -2\"))\n print(check_valid_number_representation(\"-\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "4587881", "language": "Python", "matching_score": 0.10577665269374847, "max_stars_count": 70, "path": "Solutions/123.py" }, { "content": "\"\"\"\nProblem:\n\nThere are M people sitting in a row of N seats, where M < N. Your task is to\nredistribute people such that there are no gaps between any of them, while keeping\noverall movement to a minimum.\n\nFor example, suppose you are faced with an input of [0, 1, 1, 0, 1, 0, 0, 0, 1], where\n0 represents an empty seat and 1 represents a person. In this case, one solution would\nbe to place the person on the right in the fourth seat. We can consider the cost of a\nsolution to be the sum of the absolute distance each person must move, so that the cost\nhere would be 5.\n\nGiven an input such as the one above, return the lowest possible cost of moving people\nto remove all gaps.\n\"\"\"\n\nfrom itertools import permutations\nfrom sys import maxsize\nfrom typing import List, Set\n\n\ndef get_people_indices(arr: List[int]) -> Set[int]:\n return set([index for index, occupied in enumerate(arr) if occupied])\n\n\ndef get_min_dist(vacant_spots: List[int], available_people: List[int]) -> int:\n # generating all permutations and returning the minumum cost\n min_dist = maxsize\n length = len(vacant_spots)\n permutation_list = list(permutations(range(length)))\n for permutation in permutation_list:\n dist = 0\n for i in range(length):\n k = permutation[i]\n dist += abs(vacant_spots[i] - available_people[k])\n min_dist = min(min_dist, dist)\n return min_dist\n\n\ndef get_lowest_cost(arr: List[int]) -> int:\n num_people = sum(arr)\n if num_people in (0, 1):\n return 0\n starting_people_indices = get_people_indices(arr)\n lowest_cost = maxsize\n # generating all possible valid seating arrangements and getting the minimum cost\n for offset in range(len(arr) - num_people + 1):\n subarr = arr[offset : offset + num_people]\n all_indices = set([offset + x for x in range(num_people)])\n people_indices = set([offset + x for x in get_people_indices(subarr)])\n\n vacant_indices = list(all_indices - people_indices)\n occupied_indices = list(starting_people_indices - people_indices)\n lowest_cost = min(lowest_cost, get_min_dist(vacant_indices, occupied_indices))\n return lowest_cost\n\n\nif __name__ == \"__main__\":\n print(get_lowest_cost([0, 1, 1, 0, 1, 0, 0, 0, 1]))\n print(get_lowest_cost([0, 1, 0, 0, 1, 0, 1, 0, 1]))\n print(get_lowest_cost([1, 1, 0, 0, 1, 0, 1, 0, 1]))\n print(get_lowest_cost([1, 1, 1, 1, 1, 0, 0, 0, 0]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x n!)\nSPACE COMPLEXITY: O(n!)\n\"\"\"\n", "id": "12272318", "language": "Python", "matching_score": 1.7756606340408325, "max_stars_count": 70, "path": "Solutions/309.py" }, { "content": "\"\"\"\nProblem:\n\nSuppose you are given a table of currency exchange rates, represented as a 2D array.\nDetermine whether there is a possible arbitrage: that is, whether there is some\nsequence of trades you can make, starting with some amount A of any currency, so that\nyou can end up with some amount greater than A of that currency.\n\nThere are no transaction costs and you can trade fractional quantities.\n\"\"\"\n\n# Solution copied from:\n# https://github.com/vineetjohn/daily-coding-problem/blob/master/solutions/problem_032.py\n\nfrom math import log\nfrom typing import Union\n\nnumber = Union[int, float]\n\n\ndef arbitrage(table: number) -> bool:\n transformed_graph = [[-log(edge) for edge in row] for row in table]\n # Pick any source vertex -- we can run Bellman-Ford from any vertex and\n # get the right result\n source = 0\n n = len(transformed_graph)\n min_dist = [float(\"inf\")] * n\n min_dist[source] = 0\n # Relax edges |V - 1| times\n for _ in range(n - 1):\n for v in range(n):\n for w in range(n):\n if min_dist[w] > min_dist[v] + transformed_graph[v][w]:\n min_dist[w] = min_dist[v] + transformed_graph[v][w]\n # If we can still relax edges, then we have a negative cycle\n for v in range(n):\n for w in range(n):\n if min_dist[w] > min_dist[v] + transformed_graph[v][w]:\n return True\n return False\n\n\nif __name__ == \"__main__\":\n print(arbitrage([[1, 2], [0.5, 1]]))\n print(arbitrage([[1, 3, 4], [2, 1, 3], [5, 2, 1]]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 3)\nSPACE COMPLEXITY: O(n ^ 2)\n\"\"\"\n", "id": "7701406", "language": "Python", "matching_score": 0.5003170967102051, "max_stars_count": 70, "path": "Solutions/032.py" }, { "content": "\"\"\"\nProblem:\n\nPageRank is an algorithm used by Google to rank the importance of different websites.\nWhile there have been changes over the years, the central idea is to assign each site\na score based on the importance of other pages that link to that page.\n\nMore mathematically, suppose there are N sites, and each site i has a certain count Ci\nof outgoing links. Then the score for a particular site Sj is defined as :\n\nscore(Sj) = (1 - d) / N + d * (score(Sx) / Cx+ score(Sy) / Cy+ ... + score(Sz) / Cz))\nHere, Sx, Sy, ..., Sz denote the scores of all the other sites that have outgoing links\nto Sj, and d is a damping factor, usually set to around 0.85, used to model the\nprobability that a user will stop searching.\n\nGiven a directed graph of links between various websites, write a program that\ncalculates each site's page rank.\n\"\"\"\n\nfrom typing import Dict, List, Union\n\nfrom DataStructures.Graph import GraphDirectedUnweighted\n\nDAMPING_FACTOR = 0.85\n\n\ndef calculate_score(\n node: Union[int, str],\n graph: GraphDirectedUnweighted,\n page_scores: Dict[Union[int, str], float],\n) -> float:\n # caclulate the page score of the given page\n aggregate_score = 0\n for other in graph.connections:\n if node in graph.connections[other]:\n if page_scores[other] is None:\n # considering there is no cyclic dependency\n page_scores[other] = calculate_score(other, graph, page_scores)\n aggregate_score += page_scores[other] / len(graph.connections[other])\n score = ((1 - DAMPING_FACTOR) / len(graph)) + (DAMPING_FACTOR * aggregate_score)\n return round(score, 2)\n\n\ndef get_page_rank(graph: GraphDirectedUnweighted) -> List[Union[int, str]]:\n page_scores = {node: None for node in graph.connections}\n for node in graph.connections:\n page_scores[node] = calculate_score(node, graph, page_scores)\n # returning the pages sorted in the reverse order of their page scores\n return sorted(\n [page for page in page_scores],\n key=lambda page: page_scores[page],\n reverse=True,\n )\n\n\nif __name__ == \"__main__\":\n graph = GraphDirectedUnweighted()\n\n graph.add_edge(\"a\", \"b\")\n graph.add_edge(\"a\", \"c\")\n\n graph.add_edge(\"b\", \"c\")\n\n graph.add_edge(\"d\", \"a\")\n graph.add_edge(\"d\", \"b\")\n graph.add_edge(\"d\", \"c\")\n\n print(get_page_rank(graph))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "1791589", "language": "Python", "matching_score": 2.05841326713562, "max_stars_count": 70, "path": "Solutions/335.py" }, { "content": "\"\"\"\nProblem:\n\nWrite an algorithm that computes the reversal of a directed graph. For example, if a\ngraph consists of A -> B -> C, it should become A <- B <- C.\n\"\"\"\n\nfrom DataStructures.Graph import GraphDirectedUnweighted\n\n\ndef reverse_direction(graph: GraphDirectedUnweighted) -> None:\n visited = set()\n for node in graph.connections:\n # storing the nodes that require updation in to change as for loop doesn't\n # support simultaneous updation\n visited.add(node)\n to_change = []\n for neighbour in graph.connections[node]:\n if neighbour not in visited:\n if node not in graph.connections[neighbour]:\n to_change.append(neighbour)\n for neighbour in to_change:\n graph.connections[neighbour].add(node)\n graph.connections[node].remove(neighbour)\n\n\nif __name__ == \"__main__\":\n graph = GraphDirectedUnweighted()\n\n graph.add_edge(\"A\", \"B\")\n graph.add_edge(\"B\", \"C\")\n\n print(graph)\n\n reverse_direction(graph)\n\n print(graph)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(v + e)\nSPACE COMPLEXITY: O(v)\n\"\"\"\n", "id": "9675340", "language": "Python", "matching_score": 2.7290782928466797, "max_stars_count": 70, "path": "Solutions/218.py" }, { "content": "\"\"\"\nProblem:\n\nA bridge in a connected (undirected) graph is an edge that, if removed, causes the\ngraph to become disconnected. Find all the bridges in a graph.\n\"\"\"\n\nfrom sys import maxsize\nfrom typing import Dict, List, Optional, Set, Tuple\n\nfrom DataStructures.Graph import GraphUndirectedUnweighted\n\n\ndef get_bridges_helper(\n graph: GraphUndirectedUnweighted,\n node: int,\n visited: Set[int],\n parent: Dict[int, Optional[int]],\n low: Dict[int, int],\n disc: Dict[int, int],\n bridges: List[Tuple[int, int]],\n) -> None:\n # find all bridges using dfs\n visited.add(node)\n disc[node] = graph.time\n low[node] = graph.time\n graph.time += 1\n for neighbour in graph.connections[node]:\n if neighbour not in visited:\n parent[neighbour] = node\n get_bridges_helper(graph, neighbour, visited, parent, low, disc, bridges)\n # check if the subtree rooted with neighbour has a connection to one of the\n # ancestors of node\n low[node] = min(low[node], low[neighbour])\n # if the lowest vertex reachable from subtree under neighbour is below node\n # in DFS tree, then node-neighbour is a bridge\n if low[neighbour] > disc[node]:\n bridges.append((node, neighbour))\n elif neighbour != parent[node]:\n low[node] = min(low[node], disc[neighbour])\n\n\ndef get_bridges(graph: GraphUndirectedUnweighted) -> List[Tuple[int, int]]:\n visited = set()\n disc = {node: maxsize for node in graph.connections}\n low = {node: maxsize for node in graph.connections}\n parent = {node: None for node in graph.connections}\n bridges = []\n graph.time = 0\n for node in graph.connections:\n if node not in visited:\n get_bridges_helper(graph, node, visited, parent, low, disc, bridges)\n return bridges\n\n\nif __name__ == \"__main__\":\n g1 = GraphUndirectedUnweighted()\n g1.add_edge(1, 0)\n g1.add_edge(0, 2)\n g1.add_edge(2, 1)\n g1.add_edge(0, 3)\n g1.add_edge(3, 4)\n print(\"Bridges in first graph:\")\n print(*get_bridges(g1))\n\n g2 = GraphUndirectedUnweighted()\n g2.add_edge(0, 1)\n g2.add_edge(1, 2)\n g2.add_edge(2, 3)\n print(\"\\nBridges in second graph:\")\n print(*get_bridges(g2))\n\n g3 = GraphUndirectedUnweighted()\n g3.add_edge(0, 1)\n g3.add_edge(1, 2)\n g3.add_edge(2, 0)\n g3.add_edge(1, 3)\n g3.add_edge(1, 4)\n g3.add_edge(1, 6)\n g3.add_edge(3, 5)\n g3.add_edge(4, 5)\n print(\"\\nBridges in third graph:\")\n print(*get_bridges(g3))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(v + e)\nSPACE COMPLEXITY: O(v)\n\"\"\"\n", "id": "63860", "language": "Python", "matching_score": 3.043283462524414, "max_stars_count": 70, "path": "Solutions/262.py" }, { "content": "\"\"\"\nProblem:\n\nA graph is minimally-connected if it is connected and there is no edge that can be\nremoved while still leaving the graph connected. For example, any binary tree is\nminimally-connected.\n\nGiven an undirected graph, check if the graph is minimally-connected. You can choose to\nrepresent the graph as either an adjacency matrix or adjacency list.\n\"\"\"\n\nfrom copy import deepcopy\n\nfrom DataStructures.Graph import GraphUndirectedUnweighted\nfrom DataStructures.Queue import Queue\n\n\ndef is_minimally_connected(graph: GraphUndirectedUnweighted) -> bool:\n graph_copy = GraphUndirectedUnweighted()\n graph_copy.connections, graph_copy.nodes = deepcopy(graph.connections), graph.nodes\n # getting a random node for starting the traversal\n for node in graph.connections:\n start = node\n break\n # running bfs and checking if a node is visited more than once\n # (redundant edges present => not a minimally connected graph)\n visited = set([start])\n queue = Queue()\n queue.enqueue(start)\n while not queue.is_empty():\n node = queue.dequeue()\n for neighbour in graph_copy.connections[node]:\n graph_copy.connections[neighbour].remove(node)\n queue.enqueue(neighbour)\n if neighbour in visited:\n return False\n visited.add(neighbour)\n return True\n\n\nif __name__ == \"__main__\":\n graph = GraphUndirectedUnweighted()\n\n graph.add_edge(1, 2)\n graph.add_edge(1, 3)\n graph.add_edge(3, 4)\n\n print(graph)\n print(is_minimally_connected(graph))\n\n graph.add_edge(1, 4)\n\n print(graph)\n print(is_minimally_connected(graph))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x e)\nSPACE COMPLEXITY: O(n + e)\n\"\"\"\n", "id": "7767053", "language": "Python", "matching_score": 1.856709599494934, "max_stars_count": 70, "path": "Solutions/182.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a start word, an end word, and a dictionary of valid words, find the shortest\ntransformation sequence from start to end such that only one letter is changed at each\nstep of the sequence, and each transformed word exists in the dictionary. If there is\nno possible transformation, return null. Each word in the dictionary have the same\nlength as start and end and is lowercase.\n\nFor example, given start = \"dog\", end = \"cat\", and\ndictionary = {\"dot\", \"dop\", \"dat\", \"cat\"}, return [\"dog\", \"dot\", \"dat\", \"cat\"].\n\nGiven start = \"dog\", end = \"cat\", and dictionary = {\"dot\", \"tod\", \"dat\", \"dar\"}, return\nnull as there is no possible transformation from dog to cat.\n\"\"\"\n\nfrom sys import maxsize\nfrom typing import List, Optional\n\nfrom DataStructures.Graph import GraphUndirectedUnweighted\nfrom DataStructures.Queue import Queue\n\n\ndef is_str_different_by_1_character(s1: str, s2: str) -> bool:\n len1 = len(s1)\n len2 = len(s2)\n no_mismatch = True\n if len1 != len2:\n if abs(len1 - len2) > 1:\n return False\n no_mismatch = False\n\n for c1, c2 in zip(s1, s2):\n if c1 != c2:\n if no_mismatch:\n no_mismatch = False\n else:\n return False\n return True\n\n\ndef create_graph(vert_list: List[str]) -> GraphUndirectedUnweighted:\n graph = GraphUndirectedUnweighted()\n length = len(vert_list)\n for i in range(length):\n for j in range(i, length):\n if is_str_different_by_1_character(vert_list[i], vert_list[j]):\n graph.add_edge(vert_list[i], vert_list[j])\n return graph\n\n\ndef bfs_path(graph: GraphUndirectedUnweighted, start: str, stop: str) -> List[str]:\n parent_map = {node: None for node in graph.connections}\n # bfs\n queue = Queue()\n seen = set()\n queue.enqueue(start)\n seen.add(start)\n while not queue.is_empty():\n node = queue.dequeue()\n for neighbour in graph.connections[node]:\n if neighbour not in seen:\n parent_map[neighbour] = node\n queue.enqueue(neighbour)\n seen.add(neighbour)\n # generating the path\n path = [stop]\n while parent_map[path[-1]] is not None:\n path.append(parent_map[path[-1]])\n if path[-1] == start:\n break\n return reversed(path)\n\n\ndef min_transform(start: str, stop: str, dictionary: List[str]) -> Optional[List[str]]:\n if start not in dictionary:\n dictionary.append(start)\n if stop not in dictionary:\n return None\n\n graph = create_graph(dictionary)\n return bfs_path(graph, start, stop)\n\n\nif __name__ == \"__main__\":\n print(min_transform(\"dog\", \"cat\", [\"dot\", \"dop\", \"dat\", \"cat\"]))\n print(min_transform(\"dog\", \"cat\", [\"dot\", \"tod\", \"dat\", \"dar\"]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O((n ^ 2) x len(word))\nSPACE COMPLEXITY: O(n ^ 2)\n\"\"\"\n", "id": "8187227", "language": "Python", "matching_score": 1.8833341598510742, "max_stars_count": 70, "path": "Solutions/170.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given a set of synonyms, such as (big, large) and (eat, consume). Using this\nset, determine if two sentences with the same number of words are equivalent.\n\nFor example, the following two sentences are equivalent:\n\n\"He wants to eat food.\"\n\"He wants to consume food.\"\nNote that the synonyms (a, b) and (a, c) do not necessarily imply (b, c): consider the\ncase of (coach, bus) and (coach, teacher).\n\nFollow-up: what if we can assume that (a, b) and (a, c) do in fact imply (b, c)?\n\"\"\"\n\nfrom typing import List, Tuple, Union\n\nfrom DataStructures.Graph import GraphUndirectedUnweighted\n\n\ndef generate_graph(synonyms: List[Tuple[str, str]]) -> GraphUndirectedUnweighted:\n graph = GraphUndirectedUnweighted()\n for word_1, word_2 in synonyms:\n graph.add_edge(word_1, word_2)\n return graph\n\n\ndef check_equivalence(\n sentence_1: str, sentence_2: str, synonyms: List[Tuple[str, str]]\n) -> bool:\n graph = generate_graph(synonyms)\n word_list_1 = sentence_1.strip().split()\n word_list_2 = [word for word in word_list_1]\n\n if len(word_list_1) != len(word_list_2):\n return False\n for word_1, word_2 in zip(word_list_1, word_list_2):\n if word_1 != word_2:\n if word_1 not in graph.connections or word_2 not in graph.connections:\n return False\n if word_2 not in graph.connections[word_1]:\n return False\n return True\n\n\nif __name__ == \"__main__\":\n print(\n check_equivalence(\n \"He wants to eat food.\", \"He wants to consume food.\", [(\"eat\", \"consume\")]\n )\n )\n print(\n check_equivalence(\n \"He is waiting for the bus.\",\n \"He is waiting for the teacher.\",\n [(\"coach\", \"bus\"), (\"coach\", \"teacher\")],\n )\n )\n\n# if we can assume that (a, b) and (a, c) do in fact imply (b, c), then for each word,\n# we would have to run a dfs/bfs to get all words similar to any given word, instead of\n# simply comparing: 'word_2 not in graph.connections[word_1]'\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(characters)\nSPACE COMPLEXITY: O(words ^ 2)\n\"\"\"\n", "id": "8009305", "language": "Python", "matching_score": 2.726341485977173, "max_stars_count": 70, "path": "Solutions/345.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an undirected graph G, check whether it is bipartite. Recall that a graph is\nbipartite if its vertices can be divided into two independent sets, U and V, such that\nno edge connects vertices of the same set.\n\"\"\"\n\nfrom DataStructures.Graph import GraphUndirectedUnweighted\n\n\ndef is_bipartite(graph: GraphUndirectedUnweighted) -> bool:\n set_1, set_2 = set(), set()\n sorted_nodes = sorted(\n graph.connections.items(), key=lambda x: len(x[1]), reverse=True\n )\n\n for node, _ in sorted_nodes:\n if node in set_2:\n continue\n set_1.add(node)\n for other_node in graph.connections[node]:\n set_2.add(other_node)\n for node in set_2:\n for other_node in graph.connections[node]:\n if other_node in set_2:\n return False\n return True\n\n\nif __name__ == \"__main__\":\n graph1 = GraphUndirectedUnweighted()\n\n graph1.add_edge(1, 2)\n graph1.add_edge(2, 3)\n graph1.add_edge(1, 4)\n\n print(is_bipartite(graph1))\n\n graph1.add_edge(1, 3)\n\n print(is_bipartite(graph1))\n\n graph2 = GraphUndirectedUnweighted()\n\n graph2.add_edge(1, 2)\n graph2.add_edge(2, 3)\n graph2.add_edge(3, 4)\n graph2.add_edge(4, 1)\n\n print(is_bipartite(graph2))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(v + e)\nSPACE COMPLEXITY: O(v)\n[e = edges, v = vertices]\n\"\"\"\n", "id": "4748137", "language": "Python", "matching_score": 1.7840125560760498, "max_stars_count": 70, "path": "Solutions/207.py" }, { "content": "from typing import Union\n\n\nclass GraphUndirectedUnweighted:\n \"\"\"\n Graph Undirected Unweighted Class\n\n Functions:\n add_node: function to add a node in the graph\n add_edge: function to add an edge between 2 nodes in the graph\n \"\"\"\n\n def __init__(self) -> None:\n self.connections = {}\n self.nodes = 0\n\n def __repr__(self) -> str:\n return str(self.connections)\n\n def __len__(self) -> int:\n return self.nodes\n\n def add_node(self, node: Union[int, str]) -> None:\n # Add a node in the graph if it is not in the graph\n if node not in self.connections:\n self.connections[node] = set()\n self.nodes += 1\n\n def add_edge(self, node1: Union[int, str], node2: Union[int, str]) -> None:\n # Add an edge between 2 nodes in the graph\n self.add_node(node1)\n self.add_node(node2)\n self.connections[node1].add(node2)\n self.connections[node2].add(node1)\n\n\nclass GraphDirectedUnweighted:\n \"\"\"\n Graph Directed Unweighted Class\n\n Functions:\n add_node: function to add a node in the graph\n add_edge: function to add an edge between 2 nodes in the graph\n \"\"\"\n\n def __init__(self) -> None:\n self.connections = {}\n self.nodes = 0\n\n def __repr__(self) -> str:\n return str(self.connections)\n\n def __len__(self) -> int:\n return self.nodes\n\n def add_node(self, node: Union[int, str]) -> None:\n # Add a node in the graph if it is not in the graph\n if node not in self.connections:\n self.connections[node] = set()\n self.nodes += 1\n\n def add_edge(self, node1: Union[int, str], node2: Union[int, str]) -> None:\n # Add an edge between 2 nodes in the graph\n self.add_node(node1)\n self.add_node(node2)\n self.connections[node1].add(node2)\n\n\nclass GraphUndirectedWeighted:\n \"\"\"\n Graph Undirected Weighted Class\n\n Functions:\n add_node: function to add a node in the graph\n add_edge: function to add an edge between 2 nodes in the graph\n \"\"\"\n\n def __init__(self) -> None:\n self.connections = {}\n self.nodes = 0\n\n def __repr__(self) -> str:\n return str(self.connections)\n\n def __len__(self) -> int:\n return self.nodes\n\n def add_node(self, node: Union[int, str]) -> None:\n # Add a node in the graph if it is not in the graph\n if node not in self.connections:\n self.connections[node] = {}\n self.nodes += 1\n\n def add_edge(\n self, node1: Union[int, str], node2: Union[int, str], weight: int\n ) -> None:\n # Add an edge between 2 nodes in the graph\n self.add_node(node1)\n self.add_node(node2)\n self.connections[node1][node2] = weight\n self.connections[node2][node1] = weight\n\n\nclass GraphDirectedWeighted:\n \"\"\"\n Graph Directed Weighted Class\n\n Functions:\n add_node: function to add a node in the graph\n add_edge: function to add an edge between 2 nodes in the graph\n \"\"\"\n\n def __init__(self) -> None:\n self.connections = {}\n self.nodes = 0\n\n def __repr__(self) -> str:\n return str(self.connections)\n\n def __len__(self) -> int:\n return self.nodes\n\n def add_node(self, node: Union[int, str]) -> None:\n # Add a node in the graph if it is not in the graph\n if node not in self.connections:\n self.connections[node] = {}\n self.nodes += 1\n\n def add_edge(\n self, node1: Union[int, str], node2: Union[int, str], weight: int\n ) -> None:\n # Add an edge between 2 nodes in the graph\n self.add_node(node1)\n self.add_node(node2)\n self.connections[node1][node2] = weight\n", "id": "6403782", "language": "Python", "matching_score": 2.402376174926758, "max_stars_count": 70, "path": "Solutions/DataStructures/Graph.py" }, { "content": "\"\"\"\nProblem:\n\nRecall that the minimum spanning tree is the subset of edges of a tree that connect all\nits vertices with the smallest possible total edge weight. Given an undirected graph\nwith weighted edges, compute the maximum weight spanning tree.\n\"\"\"\n\nfrom typing import Set\n\nfrom DataStructures.Graph import GraphUndirectedWeighted\n\n\ndef get_maximum_spanning_tree_helper(\n graph: GraphUndirectedWeighted,\n curr_node: int,\n remaining_nodes: Set[int],\n weight: int,\n) -> int:\n if not remaining_nodes:\n return weight\n\n scores = []\n for destination in graph.connections[curr_node]:\n if destination in remaining_nodes:\n rem_cp = set(remaining_nodes)\n rem_cp.remove(destination)\n new_score = get_maximum_spanning_tree_helper(\n graph,\n destination,\n rem_cp,\n weight + graph.connections[curr_node][destination],\n )\n scores.append(new_score)\n return max(scores)\n\n\ndef get_maximum_spanning_tree(graph: GraphUndirectedWeighted) -> int:\n node_set = set(graph.connections.keys())\n start_node = node_set.pop()\n\n weight = get_maximum_spanning_tree_helper(graph, start_node, node_set, 0)\n return weight\n\n\nif __name__ == \"__main__\":\n graph = GraphUndirectedWeighted()\n\n graph.add_edge(1, 2, 5)\n graph.add_edge(1, 3, 2)\n graph.add_edge(3, 2, 1)\n graph.add_edge(3, 4, 3)\n graph.add_edge(2, 4, 4)\n\n print(graph)\n print(get_maximum_spanning_tree(graph))\n\n graph = GraphUndirectedWeighted()\n\n graph.add_edge(1, 2, 1)\n graph.add_edge(1, 3, 2)\n graph.add_edge(3, 2, 3)\n\n print(graph)\n print(get_maximum_spanning_tree(graph))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x e)\nSPACE COMPLEXITY: O(n x e)\n\"\"\"\n", "id": "12128075", "language": "Python", "matching_score": 2.0756051540374756, "max_stars_count": 70, "path": "Solutions/234.py" }, { "content": "\"\"\"\nProblem:\n\nA group of houses is connected to the main water plant by means of a set of pipes. A\nhouse can either be connected by a set of pipes extending directly to the plant, or\nindirectly by a pipe to a nearby house which is otherwise connected.\n\nFor example, here is a possible configuration, where A, B, and C are houses, and arrows\nrepresent pipes: A <--> B <--> C <--> plant\n\nEach pipe has an associated cost, which the utility company would like to minimize.\nGiven an undirected graph of pipe connections, return the lowest cost configuration of\npipes such that each house has access to water.\n\nIn the following setup, for example, we can remove all but the pipes from plant to A,\nplant to B, and B to C, for a total cost of 16.\n\npipes = {\n 'plant': {'A': 1, 'B': 5, 'C': 20},\n 'A': {'C': 15},\n 'B': {'C': 10},\n 'C': {}\n}\n\"\"\"\n\nfrom sys import maxsize\nfrom typing import Dict, Optional, Tuple\n\nfrom DataStructures.Graph import GraphUndirectedWeighted\nfrom DataStructures.PriorityQueue import MinPriorityQueue\n\n\ndef dijkstra(\n graph: GraphUndirectedWeighted, start: str\n) -> Tuple[Dict[str, int], Dict[str, Optional[str]]]:\n # dijkstra's algorithm for single source shortest path\n dist = {node: maxsize for node in graph.connections}\n parent = {node: None for node in graph.connections}\n dist[start] = 0\n priority_queue = MinPriorityQueue()\n [priority_queue.push(node, weight) for node, weight in dist.items()]\n # running dijkstra's algorithm\n while not priority_queue.isEmpty():\n node = priority_queue.extract_min()\n for neighbour in graph.connections[node]:\n if dist[neighbour] > dist[node] + graph.connections[node][neighbour]:\n dist[neighbour] = dist[node] + graph.connections[node][neighbour]\n priority_queue.update_key(neighbour, dist[neighbour])\n parent[neighbour] = node\n return dist, parent\n\n\ndef tree_weight_sum(dist: Dict[str, int], parent: Dict[str, Optional[str]]) -> int:\n # function to calculate the total weight of the dijkstra's minimum spanning tree\n weight_sum = 0\n for node in dist:\n if parent[node]:\n weight_sum += dist[node] - dist[parent[node]]\n return weight_sum\n\n\ndef get_minimum_cost(pipes: Dict[str, Dict[str, int]]) -> int:\n # function to get the minimum configuration distance of the pipes\n # graph generation\n graph = GraphUndirectedWeighted()\n for src in pipes:\n for dest in pipes[src]:\n graph.add_edge(src, dest, pipes[src][dest])\n # minimum cost calculation\n dist, parent = dijkstra(graph, \"plant\")\n return tree_weight_sum(dist, parent)\n\n\nif __name__ == \"__main__\":\n print(\n get_minimum_cost(\n pipes={\n \"plant\": {\"A\": 1, \"B\": 5, \"C\": 20},\n \"A\": {\"C\": 15},\n \"B\": {\"C\": 10},\n \"C\": {},\n }\n )\n )\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(e + n.log(n))\nSPACE COMPLEXITY: O(n + e)\n[n = number of nodes, e = number of edges]\n\"\"\"\n", "id": "1305756", "language": "Python", "matching_score": 4.681183338165283, "max_stars_count": 70, "path": "Solutions/299.py" }, { "content": "\"\"\"\nProblem:\n\nA network consists of nodes labeled 0 to N. You are given a list of edges (a, b, t),\ndescribing the time t it takes for a message to be sent from node a to node b. Whenever\na node receives a message, it immediately passes the message on to a neighboring node,\nif possible.\n\nAssuming all nodes are connected, determine how long it will take for every node to\nreceive a message that begins at node 0.\n\nFor example, given N = 5, and the following edges:\n\nedges = [\n (0, 1, 5),\n (0, 2, 3),\n (0, 5, 4),\n (1, 3, 8),\n (2, 3, 1),\n (3, 5, 10),\n (3, 4, 5)\n]\nYou should return 9, because propagating the message from 0 -> 2 -> 3 -> 4 will take\nthat much time\n\"\"\"\n\nfrom sys import maxsize\nfrom typing import Dict, List, Optional, Tuple\n\nfrom DataStructures.Graph import GraphDirectedWeighted\nfrom DataStructures.PriorityQueue import MinPriorityQueue\n\n\ndef dijkstra(\n graph: GraphDirectedWeighted, start: int\n) -> Tuple[Dict[int, int], Dict[int, Optional[int]]]:\n dist = {node: maxsize for node in graph.connections}\n parent = {node: None for node in graph.connections}\n dist[start] = 0\n priority_queue = MinPriorityQueue()\n [priority_queue.push(node, weight) for node, weight in dist.items()]\n while not priority_queue.isEmpty():\n node = priority_queue.extract_min()\n for neighbour in graph.connections[node]:\n if dist[neighbour] > dist[node] + graph.connections[node][neighbour]:\n dist[neighbour] = dist[node] + graph.connections[node][neighbour]\n priority_queue.update_key(neighbour, dist[neighbour])\n parent[neighbour] = node\n return dist, parent\n\n\ndef get_propagation_time(edges: List[Tuple[int, int, int]]) -> int:\n graph = GraphDirectedWeighted()\n for src, dest, wt in edges:\n graph.add_edge(src, dest, wt)\n\n time, _ = dijkstra(graph, 0)\n return max(time.values())\n\n\nif __name__ == \"__main__\":\n edges = [\n (0, 1, 5),\n (0, 2, 3),\n (0, 5, 4),\n (1, 3, 8),\n (2, 3, 1),\n (3, 5, 10),\n (3, 4, 5),\n ]\n print(get_propagation_time(edges))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(v + e x log(v))\nSPACE COMPLEXITY: O(v)\n\"\"\"\n", "id": "5300017", "language": "Python", "matching_score": 4.2682108879089355, "max_stars_count": 70, "path": "Solutions/270.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given a huge list of airline ticket prices between different cities around the\nworld on a given day. These are all direct flights. Each element in the list has the\nformat (source_city, destination, price).\n\nConsider a user who is willing to take up to k connections from their origin city A to\ntheir destination B. Find the cheapest fare possible for this journey and print the\nitinerary for that journey.\n\nFor example, our traveler wants to go from JFK to LAX with up to 3 connections, and our\ninput flights are as follows:\n\n[\n ('JFK', 'ATL', 150),\n ('ATL', 'SFO', 400),\n ('ORD', 'LAX', 200),\n ('LAX', 'DFW', 80),\n ('JFK', 'HKG', 800),\n ('ATL', 'ORD', 90),\n ('JFK', 'LAX', 500),\n]\nDue to some improbably low flight prices, the cheapest itinerary would be\nJFK -> ATL -> ORD -> LAX, costing $440.\n\"\"\"\n\nfrom sys import maxsize\nfrom typing import Dict, List, Optional, Tuple\n\nfrom DataStructures.Graph import GraphDirectedWeighted\nfrom DataStructures.PriorityQueue import MinPriorityQueue\n\n\ndef modified_dijkstra(\n graph: GraphDirectedWeighted, start: str, k: int\n) -> Tuple[Dict[str, int], Dict[str, Optional[str]]]:\n dist = {node: maxsize for node in graph.connections}\n parent = {node: None for node in graph.connections}\n dist[start] = 0\n priority_queue = MinPriorityQueue()\n [priority_queue.push(node, weight) for node, weight in dist.items()]\n\n while not priority_queue.is_empty():\n node = priority_queue.extract_min()\n ancestors = 0\n parent_node = parent[node]\n # calculating ancestors\n while parent_node:\n ancestors += 1\n parent_node = parent[parent_node]\n # limiting distance update till k moves\n if ancestors <= k:\n for neighbour in graph.connections[node]:\n if dist[neighbour] > dist[node] + graph.connections[node][neighbour]:\n dist[neighbour] = dist[node] + graph.connections[node][neighbour]\n parent[neighbour] = node\n priority_queue.update_key(neighbour, dist[neighbour])\n return dist, parent\n\n\ndef generate_path(\n flights: List[Tuple[str, str, int]], start: str, dest: str, k: int\n) -> Tuple[int, List[str]]:\n # graph generation\n graph = GraphDirectedWeighted()\n for src, dest, wt in flights:\n graph.add_edge(src, dest, wt)\n # running dijkstra's algorithm\n dist, parent = modified_dijkstra(graph, start, k)\n # getting the cost and path\n if not parent[dest]:\n return []\n path, cost = [dest], dist[dest]\n curr = parent[dest]\n while curr:\n path.append(curr)\n curr = parent[curr]\n return cost, path[::-1]\n\n\nif __name__ == \"__main__\":\n flights = [\n (\"JFK\", \"ATL\", 150),\n (\"ATL\", \"SFO\", 400),\n (\"ORD\", \"LAX\", 200),\n (\"LAX\", \"DFW\", 80),\n (\"JFK\", \"HKG\", 800),\n (\"ATL\", \"ORD\", 90),\n (\"JFK\", \"LAX\", 500),\n ]\n print(generate_path(flights, \"JFK\", \"LAX\", 3))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(e x v x log(v))\nSPACE COMPLEXITY: O(v ^ 2)\n[even though dijkstra's algorithm runs in O(e x log(v)) to lock maximum k moves, the\ncompleity increases to O(e x v x log(v))]\n\"\"\"\n", "id": "23193", "language": "Python", "matching_score": 1.1761505603790283, "max_stars_count": 70, "path": "Solutions/346.py" }, { "content": "\"\"\"\nProblem:\n\nA classroom consists of N students, whose friendships can be represented in an\nadjacency list. For example, the following descibes a situation where 0 is friends\nwith 1 and 2, 3 is friends with 6, and so on.\n\n{\n 0: [1, 2],\n 1: [0, 5],\n 2: [0],\n 3: [6],\n 4: [],\n 5: [1],\n 6: [3]\n}\nEach student can be placed in a friend group, which can be defined as the transitive\nclosure of that student's friendship relations. In other words, this is the smallest\nset such that no student in the group has any friends outside this group. For the\nexample above, the friend groups would be {0, 1, 2, 5}, {3, 6}, {4}.\n\nGiven a friendship list such as the one above, determine the number of friend groups\nin the class.\n\"\"\"\n\nfrom typing import Dict, List, Set\n\nfrom DataStructures.Graph import GraphUndirectedUnweighted\n\n\ndef get_components_dfs_helper(\n graph: GraphUndirectedUnweighted, node: int, component: Set[int], visited: Set[int]\n) -> None:\n visited.add(node)\n component.add(node)\n for neighbour in graph.connections[node]:\n if neighbour not in visited:\n get_components_dfs_helper(graph, neighbour, component, visited)\n\n\ndef get_components(graph: GraphUndirectedUnweighted) -> List[Set[int]]:\n components = []\n visited = set()\n for node in graph.connections:\n if node not in visited:\n component = set()\n get_components_dfs_helper(graph, node, component, visited)\n components.append(component)\n return components\n\n\ndef get_friendship_transitive_closure(\n friendship_list: Dict[int, List[int]],\n) -> List[Set[int]]:\n graph = GraphUndirectedUnweighted()\n for node in friendship_list:\n graph.add_node(node)\n for neighbour in friendship_list[node]:\n graph.add_edge(node, neighbour)\n return get_components(graph)\n\n\nif __name__ == \"__main__\":\n print(\n get_friendship_transitive_closure(\n {0: [1, 2], 1: [0, 5], 2: [0], 3: [6], 4: [], 5: [1], 6: [3]}\n )\n )\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n + e)\nSPACE COMPLEXITY: O(n)\n[n = nodes, e = edges]\n\"\"\"\n", "id": "1370700", "language": "Python", "matching_score": 3.005552053451538, "max_stars_count": 70, "path": "Solutions/279.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an undirected graph, determine if it contains a cycle.\n\"\"\"\n\nfrom typing import Set\n\nfrom DataStructures.Graph import GraphUndirectedUnweighted\n\n\ndef get_components_helper(\n graph: GraphUndirectedUnweighted,\n node: int,\n component: Set[int],\n visited: Set[int],\n degree: int = 0,\n) -> int:\n # function to get the degree of the component\n # generating the degree recursively using dfs\n visited.add(node)\n component.add(node)\n for neighbour in graph.connections[node]:\n degree += 1\n if neighbour not in visited:\n degree += get_components_helper(graph, neighbour, component, visited)\n return degree\n\n\ndef is_cyclic(graph: GraphUndirectedUnweighted) -> bool:\n visited = set()\n for node in graph.connections:\n if node not in visited:\n component = set()\n component_degree = get_components_helper(graph, node, component, visited)\n if component_degree > 2 * (len(component) - 1):\n return False\n return True\n\n\nif __name__ == \"__main__\":\n graph = GraphUndirectedUnweighted()\n graph.add_edge(1, 2)\n graph.add_edge(1, 3)\n graph.add_edge(1, 4)\n\n print(is_cyclic(graph))\n\n graph.add_edge(2, 4)\n\n print(is_cyclic(graph))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n + e)\nSPACE COMPLEXITY: O(n)\n[n = nodes, e = edges]\n\"\"\"\n", "id": "7433891", "language": "Python", "matching_score": 0.36687105894088745, "max_stars_count": 70, "path": "Solutions/280.py" }, { "content": "\"\"\"\nProblem:\n\nConway's Game of Life takes place on an infinite two-dimensional board of square cells.\nEach cell is either dead or alive, and at each tick, the following rules apply:\n\nAny live cell with less than two live neighbours dies. Any live cell with two or three\nlive neighbours remains living. Any live cell with more than three live neighbours\ndies. Any dead cell with exactly three live neighbours becomes a live cell. A cell\nneighbours another cell if it is horizontally, vertically, or diagonally adjacent.\n\nImplement Conway's Game of Life. It should be able to be initialized with a starting\nlist of live cell coordinates and the number of steps it should run for. Once\ninitialized, it should print out the board state at each step. Since it's an infinite\nboard, print out only the relevant coordinates, i.e. from the top-leftmost live cell to\nbottom-rightmost live cell.\n\nYou can represent a live cell with an asterisk (*) and a dead cell with a dot (.).\n\"\"\"\n\nfrom __future__ import annotations\nfrom sys import maxsize\nfrom typing import Any, List, Set\n\n\nclass Coordinate:\n def __init__(self, x: int, y: int) -> None:\n self.x = x\n self.y = y\n\n def __eq__(self, other: Any) -> bool:\n if type(other) != Coordinate:\n return False\n return self.x == other.x and self.y == other.y\n\n def __hash__(self) -> int:\n return hash((self.x, self.y))\n\n def __repr__(self) -> str:\n return f\"({self.x}, {self.y})\"\n\n def get_neighbours(self) -> List[Coordinate]:\n return [\n Coordinate(self.x - 1, self.y),\n Coordinate(self.x - 1, self.y + 1),\n Coordinate(self.x, self.y + 1),\n Coordinate(self.x + 1, self.y + 1),\n Coordinate(self.x + 1, self.y),\n Coordinate(self.x + 1, self.y - 1),\n Coordinate(self.x, self.y - 1),\n Coordinate(self.x - 1, self.y - 1),\n ]\n\n\ndef show_board(alive_cells: Set[Coordinate]) -> None:\n x_max, x_min = -maxsize, maxsize\n y_max, y_min = -maxsize, maxsize\n # generating bounds\n for cell in alive_cells:\n x, y = cell.x, cell.y\n x_min, x_max = min(x_min, x), max(x_max, x)\n y_min, y_max = min(y_min, y), max(y_max, y)\n # displaying the board\n for x in range(x_min, x_max + 1):\n for y in range(y_min, y_max + 1):\n if Coordinate(x, y) in alive_cells:\n print(\"*\", end=\" \")\n else:\n print(\".\", end=\" \")\n print()\n print()\n\n\ndef play_game(board: List[Coordinate], n: int) -> None:\n alive_cells = set(board)\n print(\"Initail Board of Game of Life:\")\n show_board(alive_cells)\n\n for i in range(1, n + 1):\n # alive cells cells cannot be modified inside the loop, using dead and alice\n # lists to track changes\n dead = []\n alive = []\n for cell in alive_cells:\n alive_neighbours = 0\n neighbours = cell.get_neighbours()\n for neighbour in neighbours:\n # checking how many live neighbours the cell has\n if neighbour in alive_cells:\n alive_neighbours += 1\n # checking how many live neighbours the cell (neighbour) has\n if neighbour not in alive_cells:\n neighbours_of_neighbour = neighbour.get_neighbours()\n alive_neighbours_of_neighbour = 0\n for neighbour_of_neighbour in neighbours_of_neighbour:\n if neighbour_of_neighbour in alive_cells:\n alive_neighbours_of_neighbour += 1\n if alive_neighbours_of_neighbour == 3:\n alive.append(neighbour)\n if alive_neighbours < 2 or alive_neighbours > 3:\n dead.append(cell)\n\n # removing dead cells\n for cell in dead:\n alive_cells.remove(cell)\n # adding new live cells\n for cell in alive:\n alive_cells.add(cell)\n # displaying board\n print(f\"Iteration {i}:\")\n show_board(alive_cells)\n\n\nif __name__ == \"__main__\":\n board_0 = [Coordinate(0, 0), Coordinate(1, 0), Coordinate(1, 1), Coordinate(1, 5)]\n play_game(board_0, 3)\n\n board_1 = [\n Coordinate(0, 0),\n Coordinate(1, 0),\n Coordinate(1, 1),\n Coordinate(1, 5),\n Coordinate(2, 5),\n Coordinate(2, 6),\n ]\n play_game(board_1, 4)\n\n board_2 = [\n Coordinate(0, 0),\n Coordinate(1, 0),\n Coordinate(1, 1),\n Coordinate(2, 5),\n Coordinate(2, 6),\n Coordinate(3, 9),\n Coordinate(4, 8),\n Coordinate(5, 10),\n ]\n play_game(board_2, 4)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O((alive cells ^ 2) x n)\nSPACE COMPLEXITY: O(alive cells)\n\"\"\"\n", "id": "8282997", "language": "Python", "matching_score": 1.4841264486312866, "max_stars_count": 70, "path": "Solutions/039.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a 2D board of characters and a word, find if the word exists in the grid.\n\nThe word can be constructed from letters of sequentially adjacent cell, where \"adjacent\"\ncells are those horizontally or vertically neighboring. The same letter cell may not be\nused more than once.\n\nFor example, given the following board:\n\n[\n ['A','B','C','E'],\n ['S','F','C','S'],\n ['A','D','E','E']\n]\nexists(board, \"ABCCED\") returns true, exists(board, \"SEE\") returns true,\nexists(board, \"ABCB\") returns false.\n\"\"\"\n\nfrom typing import List, Set, Tuple\n\nBoard = List[List[str]]\nPosition = Tuple[int, int]\n\n\ndef get_neighbors(positons: Position, n: int, m: int) -> List[Position]:\n i, j = positons\n neighbors = [(i + 1, j), (i - 1, j), (i, j - 1), (i, j + 1)]\n result = []\n for neighbor in neighbors:\n i, j = neighbor\n if i <= i < n and 0 <= j < m:\n result.append(neighbor)\n return result\n\n\ndef exists_helper(\n board: Board, position: Position, string: str, visited: Set[Position] = set()\n) -> bool:\n if not string:\n return True\n # using backtracking to generate the result as every position can be used only once\n neighbors = get_neighbors(position, len(board), len(board[0]))\n for neighbor in neighbors:\n i, j = neighbor\n if (board[i][j] == string[0]) and (neighbor not in visited):\n visited.add((i, j))\n if exists_helper(board, (i, j), string[1:], visited):\n return True\n visited.remove((i, j))\n return False\n\n\ndef exists(board: Board, string: str) -> bool:\n if not string:\n return True\n\n for row_index, row in enumerate(board):\n for index, elem in enumerate(row):\n if string[0] == elem:\n if exists_helper(board, (row_index, index), string[1:], set()):\n return True\n return False\n\n\nif __name__ == \"__main__\":\n board = [\n [\"A\", \"B\", \"C\", \"E\"],\n [\"S\", \"F\", \"C\", \"S\"],\n [\"A\", \"D\", \"E\", \"E\"]\n ]\n\n print(exists(board, \"ABCCED\"))\n print(exists(board, \"SEE\"))\n print(exists(board, \"ABCB\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x m)\nSPACE COMPLEXITY: O(n x m)\n\"\"\"", "id": "1400160", "language": "Python", "matching_score": 1.634582757949829, "max_stars_count": 70, "path": "Solutions/098.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given an N by N matrix of random letters and a dictionary of words. Find the\nmaximum number of words that can be packed on the board from the given dictionary.\n\nA word is considered to be able to be packed on the board if:\n\nIt can be found in the dictionary\nIt can be constructed from untaken letters by other words found so far on the board\nThe letters are adjacent to each other (vertically and horizontally, not diagonally).\nEach tile can be visited only once by any word.\nFor example, given the following dictionary:\n\n{ 'eat', 'rain', 'in', 'rat' }\nand matrix:\n\n[['e', 'a', 'n'],\n ['t', 't', 'i'],\n ['a', 'r', 'a']]\nYour function should return 3, since we can make the words 'eat', 'in', and 'rat'\nwithout them touching each other. We could have alternatively made 'eat' and 'rain',\nbut that would be incorrect since that's only 2 words.\n\"\"\"\n\nfrom typing import List, Optional, Set, Tuple, Union\n\n\ndef get_neighbours(\n pos: Tuple[int, int], dim: Tuple[int, int], seen: Set[int]\n) -> List[Tuple[int, int]]:\n n, m = dim\n i, j = pos\n positions = [\n (i - 1, j),\n (i + 1, j),\n (i, j - 1),\n (i, j + 1),\n ]\n valid_positions = []\n for position in positions:\n y, x = position\n if (0 <= y < n and 0 <= x < m) and (position not in seen):\n valid_positions.append(position)\n return valid_positions\n\n\ndef can_generate_word(\n matrix: List[List[str]],\n pos: Tuple[int, int],\n word: str,\n seen: Set[int],\n dim: Tuple[int, int],\n) -> Union[bool, Optional[Set[int]]]:\n # check if the current word can be generated from the matrix\n if word == \"\":\n return True, seen\n neighbours = get_neighbours(pos, dim, seen)\n for neighbour in neighbours:\n i, j = neighbour\n if matrix[i][j] == word[0]:\n generated, seen_pos = can_generate_word(\n matrix, neighbour, word[1:], seen | set([neighbour]), dim\n )\n if generated:\n return generated, seen_pos\n return False, None\n\n\ndef get_power_set(words: List[str]) -> List[List[str]]:\n # generate the power set of the given list except the empty set\n num_of_words = len(words)\n accumulator = []\n pow_set_size = pow(2, num_of_words)\n\n for counter in range(pow_set_size):\n temp = []\n for j in range(num_of_words):\n if (counter & (1 << j)) > 0:\n temp.append(words[j])\n if temp:\n # adding only valid sets\n accumulator.append(temp)\n return accumulator\n\n\ndef get_max_packed_helper(matrix: List[List[str]], words: Set[str]) -> int:\n n, m = len(matrix), len(matrix[0])\n count = 0\n seen = set()\n\n for i in range(n):\n for j in range(m):\n char = matrix[i][j]\n for word in words:\n if word[0] == char:\n # a match has been found, trying to generate the entire word from\n # the first character in the matrix\n generated, seen_temp = can_generate_word(\n matrix, (i, j), word[1:], seen, (n, m)\n )\n if generated:\n count += 1\n seen = seen_temp\n return count\n\n\ndef get_max_packed(matrix: List[List[str]], words: Set[str]) -> int:\n words_list = get_power_set(list(words))\n max_words = 0\n for word_list in words_list:\n max_words = max(max_words, get_max_packed_helper(matrix, word_list))\n return max_words\n\n\nif __name__ == \"__main__\":\n print(\n get_max_packed(\n [\n [\"e\", \"a\", \"n\"],\n [\"t\", \"t\", \"i\"],\n [\"a\", \"r\", \"a\"]\n ], {\"eat\", \"rain\", \"in\", \"rat\"},\n )\n )\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x m x words x len(longest word))\nSPACE COMPLEXITY: O(n x m)\n\"\"\"\n", "id": "4058770", "language": "Python", "matching_score": 2.24240779876709, "max_stars_count": 70, "path": "Solutions/341.py" }, { "content": "\"\"\"\nProblem:\n\nBoggle is a game played on a 4 x 4 grid of letters. The goal is to find as many words\nas possible that can be formed by a sequence of adjacent letters in the grid, using\neach cell at most once. Given a game board and a dictionary of valid words, implement a\nBoggle solver.\n\"\"\"\n\nfrom typing import List, Set, Tuple\n\nfrom DataStructures.Trie import Trie\n\nMatrix = List[List[str]]\nPosition = Tuple[int, int]\n\n\ndef get_neighbours(position: Position) -> List[Position]:\n i, j = position\n neighbours = []\n all_neighbours = [\n (i - 1, j - 1),\n (i - 1, j),\n (i - 1, j + 1),\n (i, j + 1),\n (i + 1, j + 1),\n (i + 1, j),\n (i + 1, j - 1),\n (i, j - 1),\n ]\n for y, x in all_neighbours:\n if 0 <= x < 4 and 0 <= y < 4:\n neighbours.append((y, x))\n return neighbours\n\n\ndef get_words(\n matrix: Matrix, position: Position, trie: Trie, curr: str, result: Set[str]\n) -> None:\n possibilities = trie.get_suggestions(curr)\n\n if not possibilities:\n return\n if len(possibilities) == 1 and list(possibilities)[0] == curr:\n result.add(curr)\n return\n\n for neighbour in get_neighbours(position):\n i, j = neighbour\n get_words(matrix, neighbour, trie, curr + matrix[i][j], result)\n return\n\n\ndef solve_Boggle(matrix: Matrix, dictionary: Set[str]) -> Set[str]:\n prefix_tree = Trie()\n prefix_tree.add_words(dictionary)\n result = set()\n # generating the resultant words\n for i in range(4):\n for j in range(4):\n if matrix[i][j] in prefix_tree.root.children:\n get_words(matrix, (i, j), prefix_tree, matrix[i][j], result)\n return result\n\n\nif __name__ == \"__main__\":\n board = [\n [\"A\", \"L\", \"B\", \"P\"],\n [\"C\", \"O\", \"E\", \"Y\"],\n [\"F\", \"C\", \"H\", \"O\"],\n [\"B\", \"A\", \"D\", \"A\"],\n ]\n words_in_board = {\"PECH\", \"COLA\", \"YO\", \"BAD\"}\n words_not_in_board = {\"FOR\", \"BULL\"}\n dictionary = words_in_board | words_not_in_board\n\n print(dictionary)\n print(solve_Boggle(board, dictionary))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1)\nSPACE COMPLEXITY: O(1)\n[size of board is 4 x 4 (constant)]\n\"\"\"\n", "id": "2293738", "language": "Python", "matching_score": 1.611111044883728, "max_stars_count": 70, "path": "Solutions/227.py" }, { "content": "\"\"\"\nProblem:\n\nA knight's tour is a sequence of moves by a knight on a chessboard such that all\nsquares are visited once.\n\nGiven N, write a function to return the number of knight's tours on an N by N\nchessboard.\n\"\"\"\n\nfrom typing import List, Tuple\n\nBoard = List[List[int]]\n\n\ndef get_valid_moves(position: Tuple[int, int], n: int) -> Tuple[int, int]:\n y, x = position\n positions = [\n (y + 1, x + 2),\n (y - 1, x + 2),\n (y + 1, x - 2),\n (y - 1, x - 2),\n (y + 2, x + 1),\n (y + 2, x - 1),\n (y - 2, x + 1),\n (y - 2, x - 1),\n ]\n valid_moves = [\n (y_test, x_test)\n for (y_test, x_test) in positions\n if 0 <= y_test < n and 0 <= x_test < n\n ]\n return valid_moves\n\n\ndef is_board_complete(board: Board) -> bool:\n for row in board:\n for elem in row:\n if elem == 0:\n return False\n return True\n\n\ndef solver_helper(board: Board, position: Tuple[int, int], count: int) -> int:\n if is_board_complete(board):\n count += 1\n return count\n for move in get_valid_moves(position, len(board)):\n y, x = move\n if board[y][x] == 0:\n board[y][x] = 1\n count += solver_helper(board, move, 0)\n board[y][x] = 0\n return count\n\n\ndef solve(n: int) -> int:\n board = [[0 for i in range(n)] for j in range(n)]\n board[0][0] = 1\n count = solver_helper(board, (0, 0), 0)\n return count\n\n\nif __name__ == \"__main__\":\n print(solve(1))\n print(solve(2))\n print(solve(3))\n print(solve(4))\n print(solve(5))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(8 ^ (n ^ 2))\nSPACE COMPLEXITY: O(n ^ 2)\n\"\"\"\n", "id": "11643451", "language": "Python", "matching_score": 2.0243520736694336, "max_stars_count": 70, "path": "Solutions/064.py" }, { "content": "\"\"\"\nProblem:\n\nA knight is placed on a given square on an 8 x 8 chessboard. It is then moved randomly\nseveral times, where each move is a standard knight move. If the knight jumps off the\nboard at any point, however, it is not allowed to jump back on.\n\nAfter k moves, what is the probability that the knight remains on the board?\n\"\"\"\n\nfrom typing import List, Tuple\n\n\ndef get_moves(position: Tuple[int, int]) -> List[Tuple[int, int]]:\n i, j = position\n moves = [\n (i + 2, j + 1),\n (i + 2, j - 1),\n (i - 2, j + 1),\n (i - 2, j - 1),\n (i + 1, j + 2),\n (i + 1, j - 2),\n (i - 1, j + 2),\n (i - 1, j - 2),\n ]\n return moves\n\n\ndef get_knight_on_board_probability_helper(position: Tuple[int, int], k: int) -> int:\n i, j = position\n if not (0 <= i < 8) or not (0 <= j < 8):\n return 0\n if k == 0:\n return 1\n # generating total number of valid moves from current position\n moves = get_moves(position)\n accumulator = 0\n for pos in moves:\n accumulator += get_knight_on_board_probability_helper(pos, k - 1)\n return accumulator\n\n\ndef get_knight_on_board_probability(position: Tuple[int, int], k: int) -> float:\n # P(knight remains on board) = (number of positions on board / total positions)\n number_of_move_in_board = get_knight_on_board_probability_helper(position, k)\n return number_of_move_in_board / pow(8, k)\n\n\nif __name__ == \"__main__\":\n print(\"{:.3f}\".format(get_knight_on_board_probability((4, 4), 1)))\n print(\"{:.3f}\".format(get_knight_on_board_probability((4, 4), 2)))\n print(\"{:.3f}\".format(get_knight_on_board_probability((1, 1), 3)))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(8 ^ k)\nSPACE COMPLEXITY: O(k)\n\"\"\"\n", "id": "1185030", "language": "Python", "matching_score": 0.7723289728164673, "max_stars_count": 70, "path": "Solutions/304.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string of digits, generate all possible valid IP address combinations.\n\nIP addresses must follow the format A.B.C.D, where A, B, C, and D are numbers between\n0 and 255. Zero-prefixed numbers, such as 01 and 065, are not allowed, except for 0\nitself.\n\nFor example, given \"2542540123\", you should return ['254.25.40.123', '254.254.0.123'].\n\"\"\"\n\nfrom typing import List\n\nACCEPTABLE_NUMBERS = set([str(i) for i in range(256)])\n\n\ndef get_ip_combinations_helper(\n string: str, curr: List[str], accumulator: List[List[str]]\n) -> None:\n if not string and len(curr) == 4:\n accumulator.append(list(curr))\n return\n elif len(curr) > 4:\n return\n\n curr_part = \"\"\n for char in string:\n curr_part += char\n length = len(curr_part)\n if length > 3:\n return\n if curr_part in ACCEPTABLE_NUMBERS:\n get_ip_combinations_helper(\n string[length:], list(curr) + [curr_part], accumulator\n )\n\n\ndef get_ip_combinations(string: str) -> List[str]:\n accumulator = []\n get_ip_combinations_helper(string, [], accumulator)\n return [\".\".join(combination) for combination in accumulator]\n\n\nif __name__ == \"__main__\":\n print(get_ip_combinations(\"2542540123\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(2 ^ n)\n\"\"\"\n", "id": "9001240", "language": "Python", "matching_score": 1.4825607538223267, "max_stars_count": 70, "path": "Solutions/213.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a set of characters C and an integer k, a De Bruijn sequence is a cyclic sequence\nin which every possible k-length string of characters in C occurs exactly once.\n\nFor example, suppose C = {0, 1} and k = 3. Then our sequence should contain the\nsubstrings {'000', '001', '010', '011', '100', '101', '110', '111'}, and one possible\nsolution would be 00010111.\n\nCreate an algorithm that finds a De Bruijn sequence.\n\"\"\"\n\nfrom typing import List, Set\n\n\ndef generate_all_combinations(\n characters: Set[str], size: int, accumulator: List[str]\n) -> None:\n if not accumulator:\n accumulator.extend(characters)\n size -= 1\n while size > 0:\n updated_acc = []\n for _ in range(len(accumulator)):\n temp = accumulator.pop(0)\n for char in characters:\n updated_acc.append(temp + char)\n size -= 1\n accumulator.extend(updated_acc)\n\n\ndef get_de_bruijn_helper(\n characters: Set[str], combinations_set: Set[str], k: int, context: str = \"\"\n) -> Set[str]:\n if not combinations_set:\n return set([context])\n\n dseqs = set()\n if not context:\n # if context is empty, it is initized using a combination\n for combo in combinations_set:\n child_dseqs = get_de_bruijn_helper(\n characters, combinations_set - set([combo]), k, combo\n )\n dseqs |= child_dseqs\n return dseqs\n\n for character in characters:\n combo = context[-(k - 1) :] + character\n if combo in combinations_set:\n child_dseqs = get_de_bruijn_helper(\n characters, combinations_set - set([combo]), k, context + character\n )\n dseqs |= child_dseqs\n return dseqs\n\n\ndef get_de_bruijn(characters: Set[str], k: int) -> Set[str]:\n combinations_list = []\n generate_all_combinations(characters, k, combinations_list)\n combinations_set = set(combinations_list)\n return get_de_bruijn_helper(characters, combinations_set, k)\n\n\nif __name__ == \"__main__\":\n print(get_de_bruijn({\"0\", \"1\"}, 3))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ k)\nSPACE COMPLEXITY: O(n + k)\n\"\"\"\n", "id": "10486410", "language": "Python", "matching_score": 0.22412876784801483, "max_stars_count": 70, "path": "Solutions/264.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of numbers representing the stock prices of a company in chronological\norder and an integer k, return the maximum profit you can make from k buys and sells.\nYou must buy the stock before you can sell it, and you must sell the stock before you\ncan buy it again.\n\nFor example, given k = 2 and the array [5, 2, 4, 0, 1], you should return 3.\n\"\"\"\n\nfrom typing import List\n\n\ndef get_max_profit_helper(\n arr: List[int],\n curr_index: int,\n curr_profit: int,\n buys_left: int,\n sells_left: int,\n length: int,\n) -> int:\n # if the end of the array is reached or no more sells can be performed current\n # profit is returned (base case for recursion)\n if curr_index == length or sells_left == 0:\n return curr_profit\n # if the number of 'buys' and 'sells' left are equal, the stock needs to be bought\n if buys_left == sells_left:\n return max(\n # wait for a different deal\n get_max_profit_helper(\n arr, curr_index + 1, curr_profit, buys_left, sells_left, length\n ),\n # buy at the current price\n get_max_profit_helper(\n arr,\n curr_index + 1,\n curr_profit - arr[curr_index],\n buys_left - 1,\n sells_left,\n length,\n ),\n )\n # if the number of 'buys' and 'sells' left are inequal, the stock needs to be sold\n return max(\n # wait and hold for selling at a different price\n get_max_profit_helper(\n arr, curr_index + 1, curr_profit, buys_left, sells_left, length,\n ),\n # sell at the current price\n get_max_profit_helper(\n arr,\n curr_index + 1,\n curr_profit + arr[curr_index],\n buys_left,\n sells_left - 1,\n length,\n ),\n )\n\n\ndef get_max_profit(arr: List[int], k: int) -> int:\n return get_max_profit_helper(arr, 0, 0, k, k, len(arr))\n\n\nif __name__ == \"__main__\":\n print(get_max_profit([5, 2, 4, 0, 1], 2))\n print(get_max_profit([5, 2, 4], 2))\n print(get_max_profit([5, 2, 4], 1))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "5292438", "language": "Python", "matching_score": 2.649855136871338, "max_stars_count": 70, "path": "Solutions/130.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a array of numbers representing the stock prices of a company in chronological\norder, write a function that calculates the maximum profit you could have made from\nbuying and selling that stock. You're also given a number fee that represents a\ntransaction fee for each buy and sell transaction.\n\nYou must buy before you can sell the stock, but you can make as many transactions as\nyou like.\n\nFor example, given [1, 3, 2, 8, 4, 10] and fee = 2, you should return 9, since you\ncould buy the stock at $1, and sell at $8, and then buy it at $4 and sell it at $10.\nSince we did two transactions, there is a $4 fee, so we have 7 + 6 = 13 profit minus $4\nof fees.\n\"\"\"\n\nfrom typing import List\n\n\ndef get_max_profit(\n prices: List[int], fee: int, profit: int = 0, current: int = 0, can_buy: bool = True\n) -> int:\n if not prices:\n return profit\n if can_buy:\n return max(\n get_max_profit(\n prices[1:], fee, profit, (-prices[0] - fee), False\n ), # buying\n get_max_profit(\n prices[1:], fee, profit, 0, True\n ), # holding\n )\n return max(\n get_max_profit(\n prices[1:], fee, (profit + current + prices[0]), 0, True\n ), # selling\n get_max_profit(\n prices[1:], fee, profit, current, False\n ), # holding\n )\n\n\nif __name__ == \"__main__\":\n print(get_max_profit([1, 3, 2, 8, 4, 10], 2))\n print(get_max_profit([1, 3, 2, 1, 4, 10], 2))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "1768135", "language": "Python", "matching_score": 3.0552520751953125, "max_stars_count": 70, "path": "Solutions/193.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a array of numbers representing the stock prices of a company in chronological\norder, write a function that calculates the maximum profit you could have made from\nbuying and selling that stock once. You must buy before you can sell it.\n\nFor example, given [9, 11, 8, 5, 7, 10], you should return 5, since you could buy the\nstock at 5 dollars and sell it at 10 dollars.\n\"\"\"\n\nfrom typing import List, Optional\n\n\ndef get_max_profit(arr: List[int]) -> Optional[int]:\n length = len(arr)\n if length < 2:\n return None\n\n min_element = arr[0]\n profit = max(0, arr[1] - arr[0])\n # generating the maximum profit\n for i in range(1, length):\n min_element = min(min_element, arr[i])\n profit = max(profit, arr[i] - min_element)\n return profit\n\n\nif __name__ == \"__main__\":\n print(get_max_profit([9, 11, 8, 5, 7, 10]))\n print(get_max_profit([1, 2, 3, 4, 5]))\n print(get_max_profit([5, 4, 3, 2, 1]))\n print(get_max_profit([1000]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "1669224", "language": "Python", "matching_score": 1.0053598880767822, "max_stars_count": 70, "path": "Solutions/047.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of numbers, find the maximum sum of any contiguous subarray of the array.\n\nFor example, given the array [34, -50, 42, 14, -5, 86], the maximum sum would be 137,\nsince we would take elements 42, 14, -5, and 86.\n\nGiven the array [-5, -1, -8, -9], the maximum sum would be 0, since we would not take\nany elements.\n\nDo this in O(N) time.\n\"\"\"\n\nfrom sys import maxsize\nfrom typing import List\n\n\ndef kadanes_algorithm(arr: List[int]) -> int:\n length = len(arr)\n if length == 0:\n return 0\n max_so_far = -maxsize\n max_ending_here = 0\n # generating the largest continuous sum\n for i in range(length):\n max_ending_here = max_ending_here + arr[i]\n max_ending_here = max(max_ending_here, 0)\n max_so_far = max(max_so_far, max_ending_here)\n return max_so_far\n\n\nif __name__ == \"__main__\":\n print(kadanes_algorithm([34, -50, 42, 14, -5, 86]))\n print(kadanes_algorithm([-5, -1, -8, -9]))\n print(kadanes_algorithm([5, 1, 8, 9]))\n print(kadanes_algorithm([]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "1801651", "language": "Python", "matching_score": 1.0263632535934448, "max_stars_count": 70, "path": "Solutions/049.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given an array of integers, where each element represents the maximum number of\nsteps that can be jumped going forward from that element. Write a function to return\nthe minimum number of jumps you must take in order to get from the start to the end of\nthe array.\n\nFor example, given [6, 2, 4, 0, 5, 1, 1, 4, 2, 9], you should return 2, as the optimal\nsolution involves jumping from 6 to 5, and then from 5 to 9.\n\"\"\"\n\nfrom sys import maxsize\nfrom typing import List\n\n\ndef get_min_jumps(arr: List[int]) -> int:\n length = len(arr)\n dp = [0 for _ in range(length)]\n for i in range(length - 2, -1, -1):\n if arr[i]:\n dp[i] = min(dp[i + 1 : i + arr[i] + 1]) + 1\n else:\n dp[i] = maxsize\n return dp[0]\n\n\nif __name__ == \"__main__\":\n print(get_min_jumps([6, 2, 4, 0, 5, 1, 1, 4, 2, 9]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "11608962", "language": "Python", "matching_score": 1.012389898300171, "max_stars_count": 70, "path": "Solutions/245.py" }, { "content": "\"\"\"\nProblem:\n\nStarting from 0 on a number line, you would like to make a series of jumps that lead\nto the integer N.\n\nOn the ith jump, you may move exactly i places to the left or right.\n\nFind a path with the fewest number of jumps required to get from 0 to N.\n\"\"\"\n\n\ndef get_sum_till_n(n: int) -> int:\n return (n * (n + 1)) // 2\n\n\ndef count_jumps(n: int) -> int:\n # answer will be same either it is positive or negative\n n = abs(n)\n ans = 0\n # continue till number is lesser or not in same parity\n while get_sum_till_n(ans) < n or (get_sum_till_n(ans) - n) & 1:\n ans += 1\n return ans\n\n\nif __name__ == \"__main__\":\n print(count_jumps(-3))\n print(count_jumps(0))\n print(count_jumps(1))\n print(count_jumps(2))\n print(count_jumps(3))\n print(count_jumps(4))\n print(count_jumps(5))\n print(count_jumps(9))\n print(count_jumps(10))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "3518061", "language": "Python", "matching_score": 1.1193827390670776, "max_stars_count": 70, "path": "Solutions/322.py" }, { "content": "\"\"\"\nProblem:\n\nGiven integers M and N, write a program that counts how many positive integer pairs\n(a, b) satisfy the following conditions:\n\na + b = M\na XOR b = N\n\"\"\"\n\n\ndef get_count(M: int, N: int) -> int:\n count = 0\n for i in range(1, M):\n # (a, b) and (b, a) are considered different entities.\n # To consider them only once, use range(1, M // 2)\n if i ^ (M - i) == N:\n count += 1\n return count\n\n\nif __name__ == \"__main__\":\n print(get_count(100, 4))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "8665623", "language": "Python", "matching_score": 1.0484871864318848, "max_stars_count": 70, "path": "Solutions/332.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of integers, find the maximum XOR of any two elements.\n\"\"\"\n\nfrom sys import maxsize\nfrom typing import List\n\n\ndef get_max_xor(arr: List[int]) -> int:\n max_xor = -maxsize\n for index, elem1 in enumerate(arr):\n for elem2 in arr[index + 1 :]:\n max_xor = max(max_xor, elem1 ^ elem2)\n return max_xor\n\n\nif __name__ == \"__main__\":\n print(get_max_xor([1, 2, 3, 4]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "3956368", "language": "Python", "matching_score": 0.000547293690033257, "max_stars_count": 70, "path": "Solutions/249.py" }, { "content": "\"\"\"\nProblem:\n\nImplement a URL shortener with the following methods:\n\nshorten(url), which shortens the url into a six-character alphanumeric string, such as\nzLg6wl.\nrestore(short), which expands the shortened string into the original url. If no such\nshortened string exists, return null.\nHint: What if we enter the same URL twice?\n\"\"\"\n\nfrom hashlib import sha224\nfrom typing import Optional\n\n\nclass URL_Shortner:\n def __init__(self, prefix: str = \"http://short_url.in/\") -> None:\n self.shortened_url_map = {}\n self.url_prefix = prefix\n\n def shorten(self, url: str) -> str:\n shortened_url_hash = sha224(url.encode()).hexdigest()[:6]\n if shortened_url_hash not in self.shortened_url_map:\n self.shortened_url_map[shortened_url_hash] = url\n return self.url_prefix + shortened_url_hash\n\n def restore(self, short: str) -> Optional[str]:\n if short[-6:] in self.shortened_url_map:\n return self.shortened_url_map[short[-6:]]\n return None\n\n\nif __name__ == \"__main__\":\n us = URL_Shortner()\n\n url = \"https://www.google.com/\"\n shortened_url = us.shorten(url)\n print(shortened_url)\n\n print(us.restore(shortened_url))\n print(us.restore(\"http://short_url.in/64f827\"))\n", "id": "4422931", "language": "Python", "matching_score": 1.3131710290908813, "max_stars_count": 70, "path": "Solutions/055.py" }, { "content": "\"\"\"\nProblem:\n\nImplement a PrefixMapSum class with the following methods:\n\ninsert(key: str, value: int): Set a given key's value in the map. If the key already\nexists, overwrite the value.\nsum(prefix: str): Return the sum of all values of keys that begin with a given prefix.\n\nFor example, you should be able to run the following code:\n\n>>> mapsum.insert(\"columnar\", 3)\n>>> assert mapsum.sum(\"col\") == 3\n>>> mapsum.insert(\"column\", 2)\n>>> assert mapsum.sum(\"col\") == 5\n\"\"\"\n\nfrom DataStructures.Trie import Trie\n\n\nclass PrefixMapSum:\n def __init__(self) -> None:\n self.trie = Trie()\n self.hash_map = {}\n\n def insert(self, key: str, value: int) -> None:\n if key not in self.hash_map:\n self.trie.add(key)\n self.hash_map[key] = value\n\n def sum(self, prefix: str) -> int:\n words = self.trie.get_suggestions(prefix)\n result = 0\n for word in words:\n result += self.hash_map[word]\n return result\n\n\nif __name__ == \"__main__\":\n mapsum = PrefixMapSum()\n\n mapsum.insert(\"columnar\", 3)\n assert mapsum.sum(\"col\") == 3\n\n mapsum.insert(\"column\", 2)\n assert mapsum.sum(\"col\") == 5\n", "id": "4233913", "language": "Python", "matching_score": 1.716753363609314, "max_stars_count": 70, "path": "Solutions/232.py" }, { "content": "\"\"\"\nProblem:\n\nImplement an autocomplete system. That is, given a query string s and a set of all\npossible query strings, return all strings in the set that have s as a prefix.\n\nFor example, given the query string de and the set of strings [dog, deer, deal], return\n[deer, deal].\n\nHint: Try preprocessing the dictionary into a more efficient data structure to speed up\nqueries.\n\"\"\"\n\nfrom typing import List\n\nfrom DataStructures.Trie import Trie\n\n\ndef get_suggestion(word_list: List[str], prefix: str) -> List[str]:\n # using trie data structure to get the suggestions (for deatils, check\n # ./DataStructres/Trie)\n trie = Trie()\n trie.add_words(word_list)\n prefix_match = trie.get_suggestions(prefix)\n # type casting the result to list as the return type is a set\n return list(prefix_match)\n\n\nif __name__ == \"__main__\":\n print(get_suggestion([\"deer\", \"dog\", \"deal\"], \"de\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n[n = total number of characters (all words)]\n\"\"\"\n", "id": "8987988", "language": "Python", "matching_score": 2.00862455368042, "max_stars_count": 70, "path": "Solutions/011.py" }, { "content": "from typing import List, Optional\n\n\nclass TrieNode:\n \"\"\"\n TrieNode Class for the nodes of a Pre-processing Trie\n \"\"\"\n\n def __init__(self) -> None:\n self.children = {}\n self.is_end = False\n\n\nclass Trie:\n \"\"\"\n Pre-processing Trie Class\n\n Functions:\n add: Add a string to the Trie\n add_words: Add a list of strings to the Trie\n get_suggestions: Get all possible words from the given prefix\n _traverse: Helper function for get_suggestions (generates the words from prefix)\n \"\"\"\n\n def __init__(self) -> None:\n self.root = TrieNode()\n\n def add(self, word: str) -> None:\n # Add a string to the Trie\n pos = self.root\n for char in word:\n if char not in pos.children:\n pos.children[char] = TrieNode()\n pos = pos.children[char]\n pos.is_end = True\n\n def add_words(self, word_list: List[str]) -> None:\n # Add a list of strings to the Trie\n for word in word_list:\n self.add(word)\n\n def get_suggestions(self, prefix: str) -> Optional[List[str]]:\n # Get all possible words from the given prefix\n pos = self.root\n for char in prefix:\n if char not in pos.children:\n # returns None if no word is possible from the given prefix\n return None\n pos = pos.children[char]\n result = set()\n self._traverse(pos, prefix, result)\n return result\n\n def _traverse(self, pos: TrieNode, curr: str, result: set) -> None:\n # Helper function for get_suggestions\n # Generates the words from prefix (using result as the accumulator)\n if pos.is_end:\n result.add(curr)\n for child in pos.children:\n self._traverse(pos.children[child], curr + child, result)\n", "id": "7502438", "language": "Python", "matching_score": 1.647626280784607, "max_stars_count": 70, "path": "Solutions/DataStructures/Trie.py" }, { "content": "\"\"\"\nProblem:\n\nA ternary search tree is a trie-like data structure where each node may have up to\nthree children. Here is an example which represents the words code, cob, be, ax, war\nand we.\n\n c\n / | \\\n b o w\n / | | |\na e d a\n| / | | \\ \nx b e r e\nThe tree is structured according to the following rules:\n\nleft child nodes link to words lexicographically earlier than the parent prefix\nright child nodes link to words lexicographically later than the parent prefix\nmiddle child nodes continue the current word\nFor instance, since code is the first word inserted in the tree, and cob\nlexicographically precedes cod, cob is represented as a left child extending from cod.\n\nImplement insertion and search functions for a ternary search tree.\n\"\"\"\n\nfrom random import shuffle, random\nfrom typing import Optional\n\n\nclass Node:\n def __init__(self, val: Optional[str] = None) -> None:\n self.val = val\n self.left = None\n self.mid = None\n self.right = None\n\n def __bool__(self) -> bool:\n return bool(self.val)\n\n def insert_helper(self, string: str) -> None:\n if not string:\n return\n\n if self.left is None:\n self.left = Node()\n if self.mid is None:\n self.mid = Node()\n if self.right is None:\n self.right = Node()\n\n char = string[0]\n if not self:\n self.val = char\n self.mid.insert_helper(string[1:])\n if self.val == char:\n self.mid.insert_helper(string[1:])\n elif self.val > char:\n self.left.insert_helper(string)\n else:\n self.right.insert_helper(string)\n\n def search_helper(self, string: str) -> bool:\n if not string:\n return True\n\n char = string[0]\n length = len(string)\n if char == self.val:\n if self.mid:\n return self.mid.search_helper(string[1:])\n elif length == 1:\n return True\n return False\n elif char < self.val:\n if self.left:\n return self.left.search_helper(string)\n return False\n else:\n if self.right:\n return self.right.search_helper(string)\n return False\n\n\nclass TernarySearchTree:\n def __init__(self) -> None:\n self.root = None\n\n def insert(self, string: str) -> None:\n if not string:\n return\n if not self.root:\n self.root = Node(string[0])\n string = string[1:]\n curr = self.root\n for char in string:\n curr.mid = Node(char)\n curr = curr.mid\n else:\n self.root.insert_helper(string)\n\n def search(self, string: str) -> bool:\n if not string:\n return True\n if not self.root:\n return False\n return self.root.search_helper(string)\n\n\nif __name__ == \"__main__\":\n words_present = [\"ax\", \"be\", \"cob\", \"code\", \"war\", \"we\"]\n words_absent = [\"axe\", \"bee\", \"creed\", \"hi\", \"see\", \"wax\"]\n\n chosen_words = words_absent + words_present\n shuffle(chosen_words)\n chosen_words = [word for word in chosen_words if random() > 0.5]\n shuffle(chosen_words)\n\n tree = TernarySearchTree()\n\n for word in words_present:\n tree.insert(word)\n\n for word in chosen_words:\n if tree.search(word):\n print(f\"'{word}' is PRESENT in the tree\")\n else:\n print(f\"'{word}' is NOT PRESENT in the tree\")\n", "id": "7683948", "language": "Python", "matching_score": 1.4761236906051636, "max_stars_count": 70, "path": "Solutions/348.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a binary search tree, find the floor and ceiling of a given integer. The floor is\nthe highest element in the tree less than or equal to an integer, while the ceiling is\nthe lowest element in the tree greater than or equal to an integer.\n\nIf either value does not exist, return None.\n\"\"\"\n\nfrom typing import Optional, Tuple\n\nfrom DataStructures.Tree import BinarySearchTree, Node\n\n\ndef get_ceiling(node: Node, value: int) -> Optional[int]:\n # function to get the ceiling of the input in a binary search tree\n # using BST property to find the element optiomally\n if node.val > value:\n if node.left:\n if node.left.val >= value:\n return get_ceiling(node.left, value)\n return node.val\n return node.val\n elif node.val == value:\n return value\n else:\n if node.right:\n return get_ceiling(node.right, value)\n return None\n\n\ndef get_floor(node: Node, value: int) -> Optional[int]:\n # function to get the floor of the input in a binary search tree\n # using BST property to find the element optiomally\n if node.val < value:\n if node.right:\n if node.right.val <= value:\n return get_floor(node.right, value)\n return node.val\n return node.val\n elif node.val == value:\n return value\n else:\n if node.left:\n return get_floor(node.left, value)\n return None\n\n\ndef get_floor_and_ceiling(\n tree: BinarySearchTree, value: int\n) -> Tuple[Optional[int], Optional[int]]:\n # function to get the ceiling and floor of the input in a binary search tree\n if tree.root:\n return get_floor(tree.root, value), get_ceiling(tree.root, value)\n return None, None\n\n\nif __name__ == \"__main__\":\n tree = BinarySearchTree()\n\n tree.add(4)\n tree.add(2)\n tree.add(1)\n tree.add(3)\n tree.add(6)\n\n print(get_floor_and_ceiling(tree, 2))\n print(get_floor_and_ceiling(tree, 7))\n print(get_floor_and_ceiling(tree, -1))\n print(get_floor_and_ceiling(tree, 5))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(log(n))\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "5301276", "language": "Python", "matching_score": 2.919443130493164, "max_stars_count": 70, "path": "Solutions/307.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a node in a binary tree, return the next bigger element, also known as the\ninorder successor.\n\nFor example, the inorder successor of 22 is 30.\n\n 10\n / \\\n 5 30\n / \\\n 22 35\nYou can assume each node has a parent pointer.\n\"\"\"\n\nfrom typing import Optional\n\nfrom DataStructures.Tree import Node, BinarySearchTree\n\n\ndef inorder_successor_helper(node: Node) -> Optional[int]:\n # using bst property to find the inorder successor\n if node.right:\n pos = node.right\n while pos.left:\n pos = pos.left\n return pos.val\n if node.parent:\n return node.parent.val\n return None\n\n\ndef inorder_successor(node: Node) -> Optional[int]:\n if not node:\n return\n return inorder_successor_helper(node)\n\n\n# adding the parent pointer to Node class\nsetattr(Node, \"parent\", None)\n\nif __name__ == \"__main__\":\n a = Node(10)\n b = Node(5)\n c = Node(30)\n d = Node(22)\n e = Node(35)\n\n a.left = b\n a.right = c\n c.left = d\n c.right = e\n\n b.parent = a\n c.parent = a\n d.parent = c\n e.parent = c\n\n tree = BinarySearchTree()\n tree.root = a\n\n print(tree)\n print(inorder_successor(d))\n print(inorder_successor(a))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "402520", "language": "Python", "matching_score": 1.99086332321167, "max_stars_count": 70, "path": "Solutions/133.py" }, { "content": "\"\"\"\nProblem:\n\nGiven the root of a binary search tree, and a target K, return two nodes in the tree\nwhose sum equals K.\n\nFor example, given the following tree and K of 20\n\n 10\n / \\\n 5 15\n / \\\n 11 15\nReturn the nodes 5 and 15.\n\"\"\"\n\nfrom typing import Generator, Optional, Tuple\n\nfrom DataStructures.Tree import BinaryTree, Node\n\n\ndef inorder_traverse_generator(node: Node) -> Generator[int, None, None]:\n if node.left:\n for val in inorder_traverse_generator(node.left):\n yield val\n yield node.val\n if node.right:\n for val in inorder_traverse_generator(node.right):\n yield val\n\n\ndef get_inorder_traverse_generator(\n tree: BinaryTree,\n) -> Optional[Generator[int, None, None]]:\n if tree.root:\n return inorder_traverse_generator(tree.root)\n return None\n\n\ndef get_target_sum(tree: BinaryTree, k: int) -> Tuple[Optional[int], Optional[int]]:\n generator = get_inorder_traverse_generator(tree)\n if not generator:\n return None, None\n # checking for the target sum\n previous = set()\n for val in generator:\n if (k - val) in previous:\n return (k - val), val\n previous.add(val)\n return None, None\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n tree.root = Node(10)\n\n tree.root.left = Node(5)\n tree.root.right = Node(15)\n\n tree.root.right.left = Node(11)\n tree.root.right.right = Node(15)\n\n print(get_target_sum(tree, 15))\n print(get_target_sum(tree, 20))\n print(get_target_sum(tree, 21))\n print(get_target_sum(tree, 25))\n print(get_target_sum(tree, 30))\n print(get_target_sum(tree, 35))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "6230976", "language": "Python", "matching_score": 1.8699578046798706, "max_stars_count": 70, "path": "Solutions/125.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a binary search tree and a range [a, b] (inclusive), return the sum of the\nelements of the binary search tree within the range.\n\nFor example, given the following tree:\n\n 5\n / \\\n 3 8\n / \\ / \\\n2 4 6 10\nand the range [4, 9], return 23 (5 + 4 + 6 + 8).\n\"\"\"\n\nfrom typing import Tuple\n\nfrom DataStructures.Tree import BinarySearchTree, Node\n\n\ndef get_sum_over_range_helper(node: Node, low: int, high: int) -> int:\n if node is None:\n return 0\n if low <= node.val <= high:\n return (\n node.val\n + get_sum_over_range_helper(node.left, low, high)\n + get_sum_over_range_helper(node.right, low, high)\n )\n elif low > node.val:\n return get_sum_over_range_helper(node.right, low, high)\n return get_sum_over_range_helper(node.left, low, high)\n\n\ndef get_sum_over_range(tree: BinarySearchTree, sum_range: Tuple[int, int]) -> int:\n if tree.root is None:\n return 0\n low, high = sum_range\n return get_sum_over_range_helper(tree.root, low, high)\n\n\nif __name__ == \"__main__\":\n tree = BinarySearchTree()\n\n tree.add(5)\n tree.add(3)\n tree.add(8)\n tree.add(2)\n tree.add(4)\n tree.add(6)\n tree.add(10)\n\n print(get_sum_over_range(tree, (4, 9)))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(height_of_the_tree) [due to recursion]\n\"\"\"\n", "id": "1333448", "language": "Python", "matching_score": 2.5295839309692383, "max_stars_count": 70, "path": "Solutions/343.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a binary tree, find a minimum path sum from root to a leaf.\n\nFor example, the minimum path in this tree is [10, 5, 1, -1], which has sum 15.\n\n 10\n / \\\n5 5\n \\ \\\n 2 1\n /\n -1\n\"\"\"\n\nfrom typing import List, Tuple\n\nfrom DataStructures.Tree import BinaryTree, Node\n\n\ndef minimum_path_sum_helper(node: Node) -> Tuple[int, List[int]]:\n left_sum, left = None, None\n right_sum, right = None, None\n if node.left:\n left_sum, left = minimum_path_sum_helper(node.left)\n if node.right:\n right_sum, right = minimum_path_sum_helper(node.right)\n # generating the minimum path sum\n if not left and not right:\n return node.val, [node.val]\n elif left and not right:\n return (left_sum + node.val), left + [node.val]\n elif right and not left:\n return (right_sum + node.val), right + [node.val]\n return min(\n ((left_sum + node.val), left + [node.val]),\n ((right_sum + node.val), right + [node.val]),\n key=lambda x: x[0],\n )\n\n\ndef minimum_path_sum(tree: BinaryTree) -> List[int]:\n if not tree.root:\n raise ValueError(\"Empty Tree\")\n _, path = minimum_path_sum_helper(tree.root)\n return path[::-1]\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n tree.root = Node(10)\n\n tree.root.left = Node(5)\n tree.root.right = Node(5)\n\n tree.root.left.right = Node(2)\n\n tree.root.right.right = Node(1)\n\n tree.root.right.right.left = Node(-1)\n\n print(tree)\n print(minimum_path_sum(tree))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "12607424", "language": "Python", "matching_score": 2.635032892227173, "max_stars_count": 70, "path": "Solutions/135.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a binary tree, return all paths from the root to leaves.\n\nFor example, given the tree\n\n 1\n / \\\n 2 3\n / \\\n 4 5\nit should return [[1, 2], [1, 3, 4], [1, 3, 5]].\n\"\"\"\n\nfrom typing import List\n\nfrom DataStructures.Tree import BinaryTree, Node\n\n\ndef get_paths_helper(node: Node, paths: List[int], curr_path: List[int]) -> None:\n if not node.left and not node.right:\n # leaf node\n curr_path.append(node.val)\n paths.append([*curr_path])\n curr_path.pop()\n return\n # non-leaf node\n curr_path.append(node.val)\n if node.left:\n get_paths_helper(node.left, paths, curr_path)\n if node.right:\n get_paths_helper(node.right, paths, curr_path)\n curr_path.pop()\n\n\ndef get_paths(tree: BinaryTree):\n if not tree.root:\n return []\n paths = []\n get_paths_helper(tree.root, paths, [])\n return paths\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n tree.root = Node(1)\n\n tree.root.left = Node(2)\n tree.root.right = Node(3)\n\n tree.root.right.left = Node(4)\n tree.root.right.right = Node(5)\n\n print(tree)\n print(get_paths(tree))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "10776324", "language": "Python", "matching_score": 2.3708362579345703, "max_stars_count": 70, "path": "Solutions/110.py" }, { "content": "\"\"\"\nProblem:\n\nWrite a program to serialize a tree into a string and deserialize a string into a tree.\n\"\"\"\n\nfrom DataStructures.Queue import Queue\nfrom DataStructures.Tree import Node, BinaryTree\n\n\ndef serialize_helper(node: Node) -> str:\n # helper function to serialize a binary tree (uses prefix traversal)\n # data is padded with single quotes (') and comma (,) is used as a delimiter\n if node.right is None and node.left is None:\n return f\"'{node.val}','None','None'\"\n elif node.left is not None and node.right is None:\n return f\"'{node.val}',{serialize_helper(node.left)},'None'\"\n elif node.left is None and node.right is not None:\n return f\"'{node.val}','None',{serialize_helper(node.right)}\"\n elif node.left is not None and node.right is not None:\n return (\n f\"'{node.val}',\"\n + f\"{serialize_helper(node.left)},\"\n + f\"{serialize_helper(node.right)}\"\n )\n\n\ndef serialize(tree: BinaryTree) -> str:\n return serialize_helper(tree.root)\n\n\ndef deserialize_helper(node: Node, queue: Queue) -> Node:\n # helper function to deserialize a string into a Binary Tree\n # data is a queue containing the data as a prefix notation can be easily decoded\n # using a queue\n left = queue.dequeue().strip(\"'\")\n if left != \"None\":\n # if the left child exists, its added to the tree\n node.left = Node(left)\n node.left = deserialize_helper(node.left, queue)\n\n right = queue.dequeue().strip(\"'\")\n if right != \"None\":\n # if the right child exists, its added to the tree\n node.right = Node(right)\n node.right = deserialize_helper(node.right, queue)\n return node\n\n\ndef deserialize(string: str) -> BinaryTree:\n # the string needs to have the same format as the binary tree serialization\n # eg: data is padded with single quotes (') and comma (,) is used as a delimiter\n data = string.split(\",\")\n queue = Queue()\n for node in data:\n queue.enqueue(node)\n tree = BinaryTree()\n tree.root = Node(queue.dequeue().strip(\"'\"))\n deserialize_helper(tree.root, queue)\n return tree\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n\n tree.root = Node(\"root\")\n\n tree.root.left = Node(\"left\")\n tree.root.right = Node(\"right\")\n\n tree.root.left.left = Node(\"left.left\")\n\n print(serialize(tree))\n\n generated_tree = deserialize(\n \"'root','left','left.left','None','None','None','right','None','None'\"\n )\n\n print(serialize(generated_tree))\n\n\n\"\"\"\nSPECS:\n\nSERIALIZE: (n = Number of Nodes)\nTIME COMPLEXITY: O(n) \nSPACE COMPLEXITY: O(n)\n\nDESERIALIZE: (n = Number of Characters in the String)\nTIME COMPLEXITY: O(n) \nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "12071064", "language": "Python", "matching_score": 1.586632490158081, "max_stars_count": 70, "path": "Solutions/003.py" }, { "content": "\"\"\"\nProblem:\n\nHuffman coding is a method of encoding characters based on their frequency. Each letter\nis assigned a variable-length binary string, such as 0101 or 111110, where shorter\nlengths correspond to more common letters. To accomplish this, a binary tree is built\nsuch that the path from the root to any leaf uniquely maps to a character. When\ntraversing the path, descending to a left child corresponds to a 0 in the prefix, while\ndescending right corresponds to 1.\n\nHere is an example tree (note that only the leaf nodes have letters):\n\n *\n / \\\n * *\n / \\ / \\\n * a t *\n / \\\nc s\nWith this encoding, cats would be represented as 0000110111.\n\nGiven a dictionary of character frequencies, build a Huffman tree, and use it to\ndetermine a mapping between characters and their encoded binary strings.\n\"\"\"\n\nfrom typing import Dict, Union\n\nfrom DataStructures.Tree import Node\n\n\ndef huffman_code_tree(node: Union[Node, str], binString: str = \"\") -> Dict[str, str]:\n if type(node) is str:\n return {node: binString}\n d = dict()\n d.update(huffman_code_tree(node.left, binString + \"0\"))\n d.update(huffman_code_tree(node.right, binString + \"1\"))\n return d\n\n\ndef get_huffman_code(char_freq: Dict[str, int]) -> Dict[str, str]:\n # calculating Huffman code\n nodes = sorted(char_freq.items(), key=lambda x: x[1], reverse=True)\n while len(nodes) > 1:\n key1, c1 = nodes[-1]\n key2, c2 = nodes[-2]\n nodes = nodes[:-2]\n node = Node(None, key1, key2)\n nodes.append((node, c1 + c2))\n nodes = sorted(nodes, key=lambda x: x[1], reverse=True)\n huffmanCode = huffman_code_tree(nodes[0][0])\n return huffmanCode\n\n\nif __name__ == \"__main__\":\n print(get_huffman_code({\"c\": 1, \"a\": 2, \"t\": 2, \"s\": 1}))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "5175943", "language": "Python", "matching_score": 2.136263847351074, "max_stars_count": 70, "path": "Solutions/261.py" }, { "content": "\"\"\"\nProblem:\n\nRun-length encoding is a fast and simple method of encoding strings. The basic idea is\nto represent repeated successive characters as a single count and character. For\nexample, the string \"AAAABBBCCDAA\" would be encoded as \"4A3B2C1D2A\".\n\nImplement run-length encoding and decoding. You can assume the string to be encoded\nhave no digits and consists solely of alphabetic characters. You can assume the string\nto be decoded is valid.\n\"\"\"\n\n\ndef encode_run_length_encoding(string: str) -> str:\n # Time Complexity: O(n), Space Complexity: O(1)\n if not string:\n return \"\"\n encoded_string = \"\"\n prev_char = string[0]\n count = 0\n # generating the encoded string\n for char in string:\n if char != prev_char:\n encoded_string += str(count) + prev_char\n prev_char = char\n count = 1\n else:\n count += 1\n encoded_string += str(count) + prev_char\n return encoded_string\n\n\ndef decode_run_length_encoding(string: str) -> str:\n # Time Complexity: O(n), Space Complexity: O(1)\n decoded_string = \"\"\n char_frequency = 0\n # generating the decoded string\n for char in string:\n if char.isdigit():\n char_frequency = char_frequency * 10 + int(char)\n else:\n decoded_string += char * char_frequency\n char_frequency = 0\n return decoded_string\n\n\nif __name__ == \"__main__\":\n print(encode_run_length_encoding(\"AAAABBBCCDAA\"))\n print(decode_run_length_encoding(\"4A3B2C1D2A\"))\n", "id": "1991546", "language": "Python", "matching_score": 1.587562918663025, "max_stars_count": 70, "path": "Solutions/029.py" }, { "content": "\"\"\"\nProblem:\n\nGiven the mapping a = 1, b = 2, ... z = 26, and an encoded message, count the number of\nways it can be decoded.\n\nFor example, the message '111' would give 3, since it could be decoded as 'aaa', 'ka',\nand 'ak'.\n\nYou can assume that the messages are decodable. For example, '001' is not allowed.\n\"\"\"\n\n\ndef count_decoding(digits: str) -> int:\n len_digits = len(digits)\n # dynamic Programming table\n count = [0 for _ in range(len_digits + 1)]\n # base cases\n count[0] = 1\n count[1] = 1\n\n for i in range(2, len_digits + 1):\n count[i] = 0\n # if the last digit is not 0, then last digit must add to the number of words\n if digits[i - 1] > \"0\":\n count[i] = count[i - 1]\n # if the number formed by the last 2 digits is less than 26, its a valid\n # character\n if digits[i - 2] == \"1\" or (digits[i - 2] == \"2\" and digits[i - 1] < \"7\"):\n count[i] += count[i - 2]\n return count[len_digits]\n\n\nif __name__ == \"__main__\":\n print(count_decoding(\"81\"))\n print(count_decoding(\"11\"))\n print(count_decoding(\"111\"))\n print(count_decoding(\"1311\"))\n print(count_decoding(\"1111\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "12085066", "language": "Python", "matching_score": 1.3823469877243042, "max_stars_count": 70, "path": "Solutions/007.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a mapping of digits to letters (as in a phone number), and a digit string, return\nall possible letters the number could represent. You can assume each valid number in\nthe mapping is a single digit.\n\nFor example if {'2': ['a', 'b', 'c'], '3': ['d', 'e', 'f'], } then \"23\" should return\n['ad', 'ae', 'af', 'bd', 'be', 'bf', 'cd', 'ce', 'cf'].\n\"\"\"\n\nfrom typing import Dict, List\n\n\ndef get_mappings(\n digit_to_character_map: Dict[str, str], string: str, result: List[str] = []\n) -> List[str]:\n if not string:\n return result\n if not result:\n for elem in digit_to_character_map[string[0]]:\n result.append(elem)\n return get_mappings(digit_to_character_map, string[1:], result)\n # generating the mappings\n temp = []\n for part in result:\n for elem in digit_to_character_map[string[0]]:\n temp.append(part + elem)\n result[:] = temp\n return get_mappings(digit_to_character_map, string[1:], result)\n\n\nif __name__ == \"__main__\":\n print(get_mappings({\"2\": [\"a\", \"b\", \"c\"], \"3\": [\"d\", \"e\", \"f\"]}, \"23\", []))\n print(get_mappings({\"2\": [\"a\", \"b\", \"c\"], \"3\": [\"d\", \"e\", \"f\"]}, \"32\", []))\n print(get_mappings({\"2\": [\"a\", \"b\", \"c\"], \"3\": [\"d\", \"e\", \"f\"]}, \"222\", []))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ m)\nSPACE COMPLEXITY: O(n ^ m)\n[n = string length, m = no of characters in map]\n\"\"\"\n", "id": "4217196", "language": "Python", "matching_score": 1.4159648418426514, "max_stars_count": 70, "path": "Solutions/081.py" }, { "content": "\"\"\"\nProblem:\n\nA cryptarithmetic puzzle is a mathematical game where the digits of some numbers are\nrepresented by letters. Each letter represents a unique digit.\n\nFor example, a puzzle of the form:\n\n SEND\n+ MORE\n--------\n MONEY\nmay have the solution:\n\n{'S': 9, 'E': 5, 'N': 6, 'D': 7, 'M': 1, 'O': 0, 'R': 8, 'Y': 2}\n\nGiven a three-word puzzle like the one above, create an algorithm that finds a solution\n\"\"\"\n\n\nfrom typing import Dict, List, Set\n\n\ndef get_num_from_string(char_map: Dict[str, int], string: str) -> int:\n mantissa = 10\n total = 0\n for char in string[::-1]:\n total += char_map[char] * mantissa\n mantissa *= 10\n return total\n\n\ndef is_valid_map(exp1: str, exp2: str, res: str, char_map: Dict[str, int]) -> bool:\n num1 = get_num_from_string(char_map, exp1)\n num2 = get_num_from_string(char_map, exp2)\n num3 = get_num_from_string(char_map, res)\n return num1 + num2 == num3\n\n\ndef get_valid_char_map(\n exp1: str, exp2: str, res: str, char_maps: List[Dict[str, int]]\n) -> Dict[str, int]:\n for char_map in char_maps:\n if is_valid_map(exp1, exp2, res, char_map):\n return char_map\n\n\ndef assign_letters(\n chars_left: Set[str],\n nums_left: Set[int],\n restrictions: Dict[str, Set[int]],\n char_map: Dict[str, int] = {},\n) -> List[Dict[str, int]]:\n # function to assign digits to the characters\n # brute force approach: all valid (doesn't contradict restictions) combinations\n # are generated\n if not chars_left:\n return [char_map]\n curr_char = list(chars_left)[0]\n char_maps = []\n for num in nums_left:\n if num in restrictions[curr_char]:\n continue\n char_map_cp = char_map.copy()\n char_map_cp[curr_char] = num\n child_char_maps = assign_letters(\n chars_left - set([curr_char]),\n nums_left - set([num]),\n restrictions,\n char_map_cp,\n )\n char_maps.extend(child_char_maps)\n return char_maps\n\n\ndef decode(exp1: str, exp2: str, res: str) -> Dict[str, int]:\n characters = set(exp1) | set(exp2) | set(res)\n if len(characters) > 10:\n raise ValueError(\"Number of digits cannot be more than 10\")\n\n nums = set(range(0, 10))\n restrictions = {}\n for char in characters:\n restrictions[char] = set()\n for word in [exp1, exp2, res]:\n restrictions[word[0]].add(0)\n char_maps = assign_letters(characters, nums, restrictions)\n return get_valid_char_map(exp1, exp2, res, char_maps)\n\n\nif __name__ == \"__main__\":\n print(decode(\"SEND\", \"MORE\", \"MONEY\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(n)\n[n = number of unique characters]\n\"\"\"\n", "id": "6199952", "language": "Python", "matching_score": 1.9258570671081543, "max_stars_count": 70, "path": "Solutions/250.py" }, { "content": "\"\"\"\nProblem:\n\nSoundex is an algorithm used to categorize phonetically, such that two names that sound\nalike but are spelled differently have the same representation.\n\nSoundex maps every name to a string consisting of one letter and three numbers, like\nM460.\n\nOne version of the algorithm is as follows:\n\nRemove consecutive consonants with the same sound (for example, change ck -> c).\nKeep the first letter. The remaining steps only apply to the rest of the string.\nRemove all vowels, including y, w, and h.\nReplace all consonants with the following digits:\nb, f, p, v -> 1\nc, g, j, k, q, s, x, z -> 2\nd, t -> 3\nl -> 4\nm, n -> 5\nr -> 6\nIf you don't have three numbers yet, append zeros until you do. Keep the first three\nnumbers. Using this scheme, Jackson and Jaxen both map to J250.\n\"\"\"\n\nIRRELEVANT_CHAR = {\"a\", \"e\", \"i\", \"o\", \"u\", \"y\", \"w\", \"h\"}\nSIMILAR_SOUND_MAP = {\"c\": {\"k\", \"s\"}, \"k\": {\"c\"}, \"s\": {\"c\"}}\nCHAR_DIGIT_MAP = {\n \"b\": \"1\",\n \"f\": \"1\",\n \"p\": \"1\",\n \"v\": \"1\",\n \"c\": \"2\",\n \"g\": \"2\",\n \"j\": \"2\",\n \"k\": \"2\",\n \"q\": \"2\",\n \"s\": \"2\",\n \"x\": \"2\",\n \"z\": \"2\",\n \"d\": \"3\",\n \"t\": \"3\",\n \"l\": \"4\",\n \"m\": \"5\",\n \"n\": \"5\",\n \"r\": \"6\",\n}\n\n\ndef soundex(word: str) -> str:\n # removing irrelevant characters from the word\n word = \"\".join([char for char in word.lower() if char not in IRRELEVANT_CHAR])\n last_char = word[0]\n transformed_word = last_char\n soundex_map = \"\"\n # eliminating similar sounding characters\n for char in word[1:]:\n if char in SIMILAR_SOUND_MAP:\n if last_char in SIMILAR_SOUND_MAP[char]:\n continue\n transformed_word += char\n last_char = char\n # generating soundex\n soundex_map = transformed_word[0].upper()\n for char in transformed_word[1:]:\n soundex_map += CHAR_DIGIT_MAP[char]\n return soundex_map + \"0\" * (4 - len(soundex_map))\n\n\nif __name__ == \"__main__\":\n print(soundex(\"Jackson\"))\n print(soundex(\"Jaxen\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "5561245", "language": "Python", "matching_score": 1.4182811975479126, "max_stars_count": 70, "path": "Solutions/349.py" }, { "content": "\"\"\"\nProblem:\n\nYou come across a dictionary of sorted words in a language you've never seen before.\nWrite a program that returns the correct order of letters in this language.\n\nFor example, given ['xww', 'wxyz', 'wxyw', 'ywx', 'ywz'], you should return\n['x', 'z', 'w', 'y'].\n\"\"\"\n\nfrom typing import Dict, List, Optional, Set\n\n\ndef update_letter_order(sorted_words: List[str], letters: Dict[str, Set[str]]) -> None:\n order = []\n new_words = {}\n prev_char = None\n\n for word in sorted_words:\n if word:\n char = word[0]\n if char != prev_char:\n order.append(char)\n if char not in new_words:\n new_words[char] = list()\n new_words[char].append(word[1:])\n prev_char = char\n\n for index, char in enumerate(order):\n letters[char] = letters[char] | set(order[index + 1 :])\n for char in new_words:\n update_letter_order(new_words[char], letters)\n\n\ndef find_path(\n letters: Dict[str, Set[str]], start: str, path: List[str], length: int\n) -> Optional[List[str]]:\n if len(path) == length:\n return path\n if not letters[start]:\n return None\n\n for next_start in letters[start]:\n new_path = find_path(letters, next_start, path + [next_start], length)\n if new_path:\n return new_path\n\n\ndef get_letter_order(sorted_words: List[str]):\n letters = {}\n for word in sorted_words:\n for letter in word:\n if letter not in letters:\n letters[letter] = set()\n\n update_letter_order(sorted_words, letters)\n\n max_children = max([len(x) for x in letters.values()])\n potential_heads = [x for x in letters if len(letters[x]) == max_children]\n\n path = None\n for head in potential_heads:\n path = find_path(letters, head, path=[head], length=len(letters))\n if path:\n break\n return path\n\n\nif __name__ == \"__main__\":\n print(get_letter_order([\"xww\", \"wxyz\", \"wxyw\", \"ywx\", \"ywz\"]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(words x letters + words ^ 2 + letters ^ 2)\nSPACE COMPLEXITY: O(words x letters)\n\"\"\"\n", "id": "12831565", "language": "Python", "matching_score": 1.5441471338272095, "max_stars_count": 70, "path": "Solutions/226.py" }, { "content": "\"\"\"\nProblem:\n\nIn a directed graph, each node is assigned an uppercase letter. We define a path's\nvalue as the number of most frequently-occurring letter along that path. For example,\nif a path in the graph goes through \"ABACA\", the value of the path is 3, since there\nare 3 occurrences of 'A' on the path.\n\nGiven a graph with n nodes and m directed edges, return the largest value path of the\ngraph. If the largest value is infinite, then return null.\n\nThe graph is represented with a string and an edge list. The i-th character represents\nthe uppercase letter of the i-th node. Each tuple in the edge list (i, j) means there\nis a directed edge from the i-th node to the j-th node. Self-edges are possible, as\nwell as multi-edges.\n\nFor example, the following input graph:\n\nABACA\n[(0, 1),\n (0, 2),\n (2, 3),\n (3, 4)]\nWould have maximum value 3 using the path of vertices [0, 2, 3, 4], (A, A, C, A).\n\nThe following input graph:\n\nA\n[(0, 0)]\nShould return null, since we have an infinite loop.\n\"\"\"\n\n# Solution copied from:\n# https://github.com/vineetjohn/daily-coding-problem/blob/master/solutions/problem_072.py\n\n\nfrom typing import Dict, List, Optional, Set, Tuple\n\n\nclass GraphPath:\n def __init__(\n self, nodes: Set[str] = set(), letter_counts: Dict[str, int] = dict()\n ) -> None:\n self.nodes = nodes\n self.letter_counts = letter_counts\n\n def __repr__(self) -> str:\n return \"nodes={}, letters={}\".format(self.nodes, self.letter_counts)\n\n\ndef get_max_value_string_helper(\n graph_path: GraphPath, node: str, adjacency_map: Dict[str, Set[str]]\n) -> List[GraphPath]:\n if node in graph_path.nodes:\n return [graph_path]\n\n new_nodes = graph_path.nodes.copy()\n new_nodes.add(node)\n new_letter_counts = graph_path.letter_counts.copy()\n if node[0] not in new_letter_counts:\n new_letter_counts[node[0]] = 0\n new_letter_counts[node[0]] += 1\n\n new_graph_path = GraphPath(new_nodes, new_letter_counts)\n\n if node not in adjacency_map:\n return [new_graph_path]\n\n paths = list()\n for child_node in adjacency_map[node]:\n new_paths = get_max_value_string_helper(\n new_graph_path, child_node, adjacency_map\n )\n paths.extend(new_paths)\n return paths\n\n\ndef get_max_value_string(\n graph_string: str, edge_list: List[Tuple[int, int]]\n) -> Optional[int]:\n letter_counts = dict()\n nodes = list()\n for char in graph_string:\n if char not in letter_counts:\n letter_counts[char] = 0\n else:\n letter_counts[char] += 1\n nodes.append(\"{}{}\".format(char, letter_counts[char]))\n\n adjacency_map = dict()\n for start, end in edge_list:\n if nodes[start] not in adjacency_map:\n adjacency_map[nodes[start]] = set()\n if nodes[start] != nodes[end]:\n adjacency_map[nodes[start]].add(nodes[end])\n\n paths = list()\n graph_path = GraphPath()\n for node in adjacency_map:\n new_paths = get_max_value_string_helper(graph_path, node, adjacency_map)\n paths.extend(new_paths)\n\n max_value = 0\n for path in paths:\n max_path_value = max(path.letter_counts.values())\n if max_path_value > max_value:\n max_value = max_path_value\n return max_value if max_value > 0 else None\n\n\nif __name__ == \"__main__\":\n print(get_max_value_string(\"ABACA\", [(0, 1), (0, 2), (2, 3), (3, 4)]))\n print(get_max_value_string(\"A\", [(0, 0)]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O((len(graph_string) + edges) ^ 2)\nSPACE COMPLEXITY: O(len(graph_string) + edges)\n\"\"\"\n", "id": "5792180", "language": "Python", "matching_score": 1.5925449132919312, "max_stars_count": 70, "path": "Solutions/072.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string and a number of lines k, print the string in zigzag form. In zigzag,\ncharacters are printed out diagonally from top left to bottom right until reaching the\nkth line, then back up to top right, and so on.\n\nFor example, given the sentence \"thisisazigzag\" and k = 4, you should print:\n\nt a g\n h s z a\n i i i z\n s g\n\"\"\"\n\n\ndef clamp(num: int, min_value: int, max_value: int) -> int:\n return max(min(num, max_value), min_value)\n\n\ndef print_zigzag_string(string: str, k: int) -> None:\n if k < 1:\n return\n\n length = len(string)\n matrix = [[\" \" for _ in range(length)] for _ in range(k)]\n i, j = 0, 0\n is_increasing = True\n # generating zigzag string matrix\n for char in string:\n matrix[i][j] = char\n j += 1\n if is_increasing:\n i += 1\n else:\n i -= 1\n if i == k or i == -1:\n is_increasing = not is_increasing\n if i == k:\n i = clamp(i - 2, 0, k - 1)\n else:\n i = clamp(i + 2, 0, k - 1)\n # displaying the string matrix\n for row in matrix:\n for elem in row:\n print(elem, end=\"\")\n print()\n\n\nif __name__ == \"__main__\":\n print_zigzag_string(\"thisisazigzag\", 4)\n print()\n print_zigzag_string(\"thisisazigzag\", 3)\n print()\n print_zigzag_string(\"thisisazigzag\", 2)\n print()\n print_zigzag_string(\"thisisazigzag\", 1)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x k)\nSPACE COMPLEXITY: O(n x k)\n\"\"\"\n", "id": "3486036", "language": "Python", "matching_score": 1.5982741117477417, "max_stars_count": 70, "path": "Solutions/253.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of numbers of length N, find both the minimum and maximum using less\nthan 2 * (N - 2) comparisons.\n\"\"\"\n\nfrom typing import List, Tuple\n\n\ndef get_min_max(arr: List[int]) -> Tuple[int, int]:\n if not arr:\n return None, None\n\n length = len(arr)\n if length % 2 == 0:\n max_elem = max(arr[0], arr[1])\n min_elem = min(arr[0], arr[1])\n start = 2\n else:\n max_elem = min_elem = arr[0]\n start = 1\n\n # reducing the number of comparisons by comparing the array elements with themselves\n # effective comparisons is 3 for every 2 elements\n for i in range(start, length, 2):\n if arr[i] < arr[i + 1]:\n max_elem = max(max_elem, arr[i + 1])\n min_elem = min(min_elem, arr[i])\n continue\n max_elem = max(max_elem, arr[i])\n min_elem = min(min_elem, arr[i + 1])\n\n return min_elem, max_elem\n\n\nif __name__ == \"__main__\":\n print(get_min_max([1000, 11, 445, 1, 330, 3000]))\n print(get_min_max([1000, 11, 445, 1, -330]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "8152294", "language": "Python", "matching_score": 1.3908296823501587, "max_stars_count": 70, "path": "Solutions/235.py" }, { "content": "\"\"\"\nProblem:\n\nMegaCorp wants to give bonuses to its employees based on how many lines of codes they\nhave written. They would like to give the smallest positive amount to each worker\nconsistent with the constraint that if a developer has written more lines of code than\ntheir neighbor, they should receive more money.\n\nGiven an array representing a line of seats of employees at MegaCorp, determine how\nmuch each one should get paid.\n\nFor example, given [10, 40, 200, 1000, 60, 30], you should return [1, 2, 3, 4, 2, 1].\n\"\"\"\n\nfrom typing import List\n\n\ndef get_bonus(arr: List[int]) -> List[int]:\n length = len(arr)\n if length == 0:\n return []\n if length == 1:\n return [1]\n\n comparison = [None for _ in range(length)]\n for i in range(1, length):\n if arr[i] > arr[i - 1]:\n comparison[i] = \"+\"\n elif arr[i] < arr[i - 1]:\n comparison[i] = \"-\"\n else:\n comparison[i] = \"=\"\n\n i = 0\n comparison[0] = comparison[1]\n result = [0 for _ in range(length)]\n while i < length:\n # case: current element is larger than the previous element\n if i < length and comparison[i] == \"+\":\n j = i + 1\n while j < length and comparison[j] == \"+\":\n j += 1\n j -= 1\n curr = 1\n for k in range(i, j + 1):\n result[k] = curr\n curr += 1\n i = j + 1\n # case: current element is smaller than the previous element\n elif i < length and comparison[i] == \"-\":\n j = i - 1\n while j > 0 and result[j] == 1:\n result[j] += 1\n j -= 1\n j = i + 1\n while j < length and comparison[j] == \"-\":\n j += 1\n j -= 1\n curr = 1\n for k in range(j, i - 1, -1):\n result[k] = curr\n curr += 1\n i = j + 1\n # case: current element is equal to the previous element\n else:\n result[i] = result[i - 1]\n i += 1\n return result\n\n\nif __name__ == \"__main__\":\n print(get_bonus([1000]))\n print(get_bonus([10, 40, 200, 1000, 60, 30]))\n print(get_bonus([10, 40, 200, 1000, 900, 800, 30]))\n print(get_bonus([10, 40, 200, 1000, 900, 800, 30, 30]))\n print(get_bonus([10, 40, 200, 1000, 800, 800, 30]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "459779", "language": "Python", "matching_score": 0.8128572702407837, "max_stars_count": 70, "path": "Solutions/265.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a pivot x, and a list lst, partition the list into three parts.\n\nThe first part contains all elements in lst that are less than x\nThe second part contains all elements in lst that are equal to x\nThe third part contains all elements in lst that are larger than x Ordering within a\npart can be arbitrary.\nFor example, given x = 10 and lst = [9, 12, 3, 5, 14, 10, 10], one partition may be\n[9, 3, 5, 10, 10, 12, 14]\n\"\"\"\n\nfrom typing import List, Optional\n\n\ndef separate_with_pivot(arr: List[int], i: int, j: int, x: int) -> Optional[int]:\n if not arr:\n return\n # separating the elements less than x and greater than or equal to x\n while i < j:\n if arr[i] >= x and arr[j] < x:\n arr[i], arr[j] = arr[j], arr[i]\n i += 1\n j -= 1\n continue\n if arr[i] < x:\n i += 1\n if arr[j] >= x:\n j -= 1\n if (arr[i] < x) and (i + 1 < len(arr)):\n return i + 1\n return i\n\n\ndef pivot_list(arr: List[int], x: int) -> List[int]:\n length = len(arr)\n temp = separate_with_pivot(arr, 0, length - 1, x)\n pivot_start = temp if temp else 0\n separate_with_pivot(arr, pivot_start, length - 1, x + 1)\n return arr\n\n\nif __name__ == \"__main__\":\n print(pivot_list([9, 12, 3, 5, 14, 10, 10], 10))\n print(pivot_list([9, 12, 3, 5, 14, 10, 10], 8))\n print(pivot_list([9, 12, 3, 5, 11, 10, 10], 10))\n print(pivot_list([9, 12, 14, 10, 10], 8))\n print(pivot_list([3, 5], 8))\n print(pivot_list([8, 8, 8], 8))\n print(pivot_list([], 8))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "1080362", "language": "Python", "matching_score": 0.7881723642349243, "max_stars_count": 70, "path": "Solutions/143.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of numbers N and an integer k, your task is to split N into k partitions\nsuch that the maximum sum of any partition is minimized. Return this sum.\n\nFor example, given N = [5, 1, 2, 7, 3, 4] and k = 3, you should return 8, since the\noptimal partition is [5, 1, 2], [7], [3, 4].\n\"\"\"\n\nfrom sys import maxsize\nfrom typing import List, Tuple\n\n\ndef minimize_partition_sum_helper(arr: List[int], k: int) -> Tuple[List[int], int]:\n if k == 1:\n return [arr], sum(arr)\n\n min_value = maxsize\n min_candidate = None\n for i in range(len(arr)):\n arr_1, sum_1 = [arr[:i]], sum(arr[:i])\n arr_2, sum_2 = minimize_partition_sum_helper(arr[i:], k - 1)\n candidate = arr_1 + arr_2, max(sum_1, sum_2)\n if candidate[1] < min_value:\n min_value = candidate[1]\n min_candidate = candidate\n return min_candidate\n\n\ndef minimize_partition_sum(arr: List[int], k: int) -> int:\n _, max_sum = minimize_partition_sum_helper(arr, k)\n return max_sum\n\n\nif __name__ == \"__main__\":\n print(minimize_partition_sum([5, 1, 2, 7, 3, 4], 3))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n ^ 2)\n\"\"\"\n", "id": "11465798", "language": "Python", "matching_score": 1.2616738080978394, "max_stars_count": 70, "path": "Solutions/243.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a binary tree of integers, find the maximum path sum between two nodes. The path\nmust go through at least one node, and does not need to go through the root.\n\"\"\"\n\nfrom DataStructures.Tree import BinaryTree, Node\n\n\ndef get_max_path_sum_helper(\n node: Node, current_max_sum: int = 0, overall_max_sum: int = 0\n) -> int:\n if not node:\n return 0\n\n l_max_sum = get_max_path_sum_helper(node.left, current_max_sum, overall_max_sum)\n r_max_sum = get_max_path_sum_helper(node.right, current_max_sum, overall_max_sum)\n # current max sum = max(\n # node's value added to the current sum\n # current max sum\n # only node's value selected\n # right subtree not selected\n # left subtree not selected\n # entire subtree selected\n # )\n current_max_sum = max(\n current_max_sum + node.val,\n current_max_sum,\n node.val,\n l_max_sum + node.val,\n r_max_sum + node.val,\n l_max_sum + node.val + r_max_sum,\n )\n # overall max sum is updated as per requirement\n overall_max_sum = max(current_max_sum, overall_max_sum)\n return overall_max_sum\n\n\ndef get_max_path_sum(tree: BinaryTree) -> int:\n return get_max_path_sum_helper(tree.root)\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n tree.root = Node(1)\n\n print(tree)\n print(get_max_path_sum(tree))\n\n tree.root.left = Node(2)\n print(tree)\n print(get_max_path_sum(tree))\n\n tree.root.right = Node(3)\n print(tree)\n print(get_max_path_sum(tree))\n\n tree.root.val = -1\n print(tree)\n print(get_max_path_sum(tree))\n\n tree.root.left.left = Node(4)\n print(tree)\n print(get_max_path_sum(tree))\n\n tree.root.right.right = Node(-1)\n print(tree)\n print(get_max_path_sum(tree))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "5192539", "language": "Python", "matching_score": 2.55568790435791, "max_stars_count": 70, "path": "Solutions/094.py" }, { "content": "\"\"\"\nProblem:\n\nGiven the root of a binary tree, find the most frequent subtree sum. The subtree sum of\na node is the sum of all values under a node, including the node itself.\n\nFor example, given the following tree:\n\n 5\n / \\\n2 -5\n\nReturn 2 as it occurs twice: once as the left leaf, and once as the sum of 2 + 5 - 5.\n\"\"\"\n\nfrom typing import Dict\n\nfrom DataStructures.Tree import Node, BinaryTree\n\n\ndef add_to_freq_count(val: int, dictionary: Dict[int, int]) -> Dict[int, int]:\n if val not in dictionary:\n dictionary[val] = 0\n dictionary[val] += 1\n return dictionary\n\n\ndef get_frequent_subtree_sum_helper(\n node: Node, sum_freq: Dict[int, int] = {}\n) -> Dict[int, int]:\n if node.left is None and node.right is None:\n return add_to_freq_count(node.val, sum_freq), node.val\n\n elif node.left is not None and node.right is None:\n sum_freq, current = get_frequent_subtree_sum_helper(node.left, sum_freq)\n current += node.val\n return add_to_freq_count(current, sum_freq), current\n\n elif node.left is None and node.right is not None:\n sum_freq, current = get_frequent_subtree_sum_helper(node.right, sum_freq)\n current += node.val\n return add_to_freq_count(current, sum_freq), current\n\n sum_freq, current_left = get_frequent_subtree_sum_helper(node.left, sum_freq)\n sum_freq, current_right = get_frequent_subtree_sum_helper(node.right, sum_freq)\n current = current_left + node.val + current_right\n return add_to_freq_count(current, sum_freq), current\n\n\ndef get_frequent_subtree_sum(tree: BinaryTree) -> int:\n freq, _ = get_frequent_subtree_sum_helper(tree.root, {})\n # finding the most frequent value\n modal_value, frequency = None, 0\n for key, val in freq.items():\n if val > frequency:\n frequency = val\n modal_value = key\n return modal_value\n\n\nif __name__ == \"__main__\":\n tree = BinaryTree()\n\n tree.root = Node(5)\n tree.root.left = Node(2)\n tree.root.right = Node(-5)\n\n print(tree)\n\n print(get_frequent_subtree_sum(tree))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "474449", "language": "Python", "matching_score": 1.7468150854110718, "max_stars_count": 70, "path": "Solutions/196.py" }, { "content": "\"\"\"\nProblem:\n\nGenerate a finite, but an arbitrarily large binary tree quickly in O(1).\n\nThat is, generate() should return a tree whose size is unbounded but finite.\n\"\"\"\n\nfrom random import random, randint\n\nimport matplotlib.pyplot as plt\n\nfrom DataStructures.Tree import BinaryTree, Node\n\n\ndef generate_helper(\n node: Node,\n probability_add_children: float = 0.5,\n probability_add_branch: float = 0.5,\n) -> None:\n if random() > probability_add_children:\n return\n # generating the left branch\n if random() < probability_add_branch:\n node.left = Node(randint(1, 1000))\n generate_helper(node.left, probability_add_children, probability_add_branch)\n # generating the right branch\n if random() < probability_add_branch:\n node.right = Node(randint(1, 1000))\n generate_helper(node.right, probability_add_children, probability_add_branch)\n\n\ndef generate() -> BinaryTree:\n tree = BinaryTree()\n tree.root = Node(randint(1, 1000))\n generate_helper(tree.root, 0.7, 0.7)\n # suggestion: don't use higher values for probability, it will lead to recursion\n # error\n return tree\n\n\nif __name__ == \"__main__\":\n tree_length_list = []\n for i in range(1000):\n tree_length_list.append(len(generate()))\n plt.hist(tree_length_list)\n plt.show()\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n[n nodes cannot be generated in O(1) time, but since n is finite it may be considered\nconstant]\n\"\"\"\n", "id": "1224216", "language": "Python", "matching_score": 1.412989854812622, "max_stars_count": 70, "path": "Solutions/116.py" }, { "content": "\"\"\"\nProblem:\n\nA tree is symmetric if its data and shape remain unchanged when it is reflected about\nthe root node. The following tree is an example:\n\n 4\n / | \\\n 3 5 3\n / \\\n9 9\nGiven a k-ary tree, determine whether it is symmetric.\n\"\"\"\n\nfrom typing import Dict, List\n\n\nclass Node:\n def __init__(self, val: int) -> None:\n self.val = val\n self.children = []\n\n def __str__(self) -> str:\n return \"{} -> {}\".format(self.val, self.children)\n\n\ndef generate_tree_levels(\n root: Node, levels: Dict[int, List[int]], level_number: int\n) -> Dict[int, List[int]]:\n # DFS to generate the nodes in the tree by level\n if level_number not in levels:\n levels[level_number] = []\n levels[level_number].append(root.val)\n for child in root.children:\n generate_tree_levels(child, levels, level_number + 1)\n return levels\n\n\ndef is_symmetric(node: Node) -> bool:\n levels = generate_tree_levels(node, {}, 0)\n # checking if the tree is symmetric\n for level_values in levels.values():\n if level_values != level_values[::-1]:\n return False\n return True\n\n\nif __name__ == \"__main__\":\n a = Node(4)\n b = Node(5)\n c = Node(3)\n d = Node(3)\n e = Node(9)\n f = Node(9)\n\n a.children = [c, b, d]\n\n c.children = [f]\n d.children = [e]\n\n print(is_symmetric(a))\n\n c.children = [f, Node(1)]\n d.children = [Node(1), e]\n\n print(is_symmetric(a))\n\n c.val = 4\n print(is_symmetric(a))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n + e)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "10965758", "language": "Python", "matching_score": 0.9888275861740112, "max_stars_count": 70, "path": "Solutions/237.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given a tree with an even number of nodes. Consider each connection between a\nparent and child node to be an \"edge\". You would like to remove some of these edges,\nsuch that the disconnected subtrees that remain each have an even number of nodes.\n\nFor example, suppose your input was the following tree:\n\n 1\n / \\ \n 2 3\n / \\ \n 4 5\n / | \\\n6 7 8\n\nIn this case, removing the edge (3, 4) satisfies our requirement.\n\nWrite a function that returns the maximum number of edges you can remove while still\nsatisfying this requirement.\n\"\"\"\n\nfrom __future__ import annotations\nfrom typing import List, Tuple\n\n\nclass Node:\n def __init__(self, val: int) -> None:\n self.val = val\n self.children = []\n\n def add_children(self, children: List[Node] = []) -> None:\n self.children = [*children]\n\n\nclass Tree:\n def __init__(self) -> None:\n self.root = None\n\n\ndef get_even_edge_split_helper(node: Node) -> Tuple[int, int]:\n nodes_count = 0\n even_splits = 0\n for child in node.children:\n child_nodes, child_even_splits = get_even_edge_split_helper(child)\n nodes_count += child_nodes\n even_splits += child_even_splits\n if child_nodes != 0 and child_nodes % 2 == 0:\n even_splits += 1\n return nodes_count + 1, even_splits\n\n\ndef get_even_edge_split(tree: Tree) -> int:\n if tree.root:\n _, result = get_even_edge_split_helper(tree.root)\n return result\n return 0\n\n\nif __name__ == \"__main__\":\n tree = Tree()\n\n a = Node(1)\n b = Node(2)\n c = Node(3)\n d = Node(4)\n e = Node(5)\n f = Node(6)\n g = Node(7)\n h = Node(8)\n\n a.add_children([b, c])\n c.add_children([d, e])\n d.add_children([f, g, h])\n\n tree.root = a\n\n print(get_even_edge_split(tree)) # possible splits at (1, 3) and (3, 4)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "8466791", "language": "Python", "matching_score": 1.6345443725585938, "max_stars_count": 70, "path": "Solutions/344.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a linked list, rearrange the node values such that they appear in alternating\nlow -> high -> low -> high ... form. For example, given 1 -> 2 -> 3 -> 4 -> 5, you\nshould return 1 -> 3 -> 2 -> 5 -> 4.\n\"\"\"\n\nfrom DataStructures.LinkedList import Node, LinkedList\n\n\ndef rearrange(ll: LinkedList) -> None:\n nodes_count = len(ll)\n nodes = [int(node) for node in ll]\n nodes.sort()\n\n for i in range(2, nodes_count, 2):\n nodes[i], nodes[i - 1] = nodes[i - 1], nodes[i]\n\n curr = ll.head\n for i in range(nodes_count):\n curr.val = nodes[i]\n curr = curr.next\n\n\nif __name__ == \"__main__\":\n LL = LinkedList()\n\n for i in range(1, 6):\n LL.add(i)\n\n print(LL)\n rearrange(LL)\n print(LL)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "7649104", "language": "Python", "matching_score": 1.4244616031646729, "max_stars_count": 70, "path": "Solutions/256.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a linked list, sort it in O(n log n) time and constant space.\n\nFor example, the linked list 4 -> 1 -> -3 -> 99 should become -3 -> 1 -> 4 -> 99.\n\"\"\"\n\nfrom typing import Optional\n\nfrom DataStructures.LinkedList import Node, LinkedList\n\n\ndef sorted_merge(node: Node, a: Optional[Node], b: Optional[Node]) -> Optional[Node]:\n if a is None:\n return b\n if b is None:\n return a\n\n result = None\n if a.val <= b.val:\n result = a\n result.next = sorted_merge(node, a.next, b)\n else:\n result = b\n result.next = sorted_merge(node, a, b.next)\n return result\n\n\ndef merge_sort(ll: LinkedList, h: Optional[Node]) -> Optional[Node]:\n if h is None or h.next is None:\n return h\n\n middle = get_middle(ll, h)\n next_to_middle = middle.next\n middle.next = None\n\n left = merge_sort(ll, h)\n right = merge_sort(ll, next_to_middle)\n\n sortedlist = sorted_merge(ll, left, right)\n return sortedlist\n\n\ndef get_middle(ll: LinkedList, head: Optional[Node]) -> Optional[Node]:\n # searching for the middle of the linked list using fast pointer slow pointer\n if head == None:\n return head\n\n slow, fast = head, head\n while fast.next is not None and fast.next.next is not None:\n slow = slow.next\n fast = fast.next.next\n return slow\n\n\ndef sort(ll: LinkedList) -> LinkedList:\n ll.head = merge_sort(ll, ll.head)\n # reseting rear\n curr = ll.head\n while curr.next:\n curr = curr.next\n ll.rear = curr\n return ll\n\n\nif __name__ == \"__main__\":\n LL = LinkedList()\n\n for val in [6, 3, 7, 5, 30, 2, 50]:\n LL.add(val)\n\n print(LL)\n sort(LL)\n print(LL)\n print()\n\n LL = LinkedList()\n\n for val in [4, 1, -3, 99]:\n LL.add(val)\n\n print(LL)\n sort(LL)\n print(LL)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n log(n))\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "4804884", "language": "Python", "matching_score": 2.2503089904785156, "max_stars_count": 70, "path": "Solutions/169.py" }, { "content": "from typing import Iterable\n\n\nclass Node:\n \"\"\"\n Node Class for the nodes of a Linked List\n \"\"\"\n\n def __init__(self, val: int = None) -> None:\n self.val = val\n self.next = None\n\n def __repr__(self) -> str:\n if self.next:\n return f\"{str(self.val)} => {str(self.next)}\"\n return str(self.val)\n\n\nclass LinkedList:\n \"\"\"\n Linked List Class\n\n Functions:\n add: function to add a node at the end of the linked list\n \"\"\"\n\n def __init__(self) -> None:\n self.head = None\n self.rear = None\n self.length = 0\n\n def __repr__(self) -> str:\n return str(self.head)\n\n def add(self, val: int = 0):\n # Add a new node with the provided value and adds it at the rear of the list\n self.length += 1\n if self.head is None:\n self.head = Node(val)\n self.rear = self.head\n else:\n self.rear.next = Node(val)\n self.rear = self.rear.next\n\n def __len__(self) -> int:\n return self.length\n\n # iteration initialization\n def __iter__(self) -> Iterable:\n self.curr = self.head\n return self\n\n # next function to iterate through the linked list iterator\n def __next__(self) -> int:\n if self.curr:\n value = self.curr.val\n self.curr = self.curr.next\n return value\n else:\n raise StopIteration\n", "id": "7363021", "language": "Python", "matching_score": 1.6696515083312988, "max_stars_count": 70, "path": "Solutions/DataStructures/LinkedList.py" }, { "content": "\"\"\"\nProblem:\n\nDesign and implement a HitCounter class that keeps track of requests (or hits). It\nshould support the following operations:\n\n* record(timestamp): records a hit that happened at timestamp\n* total(): returns the total number of hits recorded\n* range(lower, upper): returns the number of hits that occurred between timestamps lower\n and upper (inclusive)\nFollow-up: What if our system has limited memory?\n\"\"\"\n\nfrom DataStructures.LinkedList import Node, LinkedList\n\n\ndef add_node_sorted(ll: LinkedList, val: int) -> None:\n ll.length += 1\n if not ll.head:\n ll.head = Node(val)\n ll.rear = ll.head\n elif val > ll.rear.val:\n ll.rear.next = Node(val)\n ll.rear = ll.rear.next\n else:\n pos = ll.head\n while pos.val < val:\n pos = pos.next\n temp = pos.val\n pos.val = val\n new_node = Node(temp)\n new_node.next = pos.next\n pos.next = new_node\n if pos == ll.rear:\n ll.rear = new_node\n\n\ndef get_number_of_nodes_in_range(ll: LinkedList, start: int, stop: int) -> int:\n if not ll.head:\n return 0\n\n pos = ll.head\n num = 0\n while pos and pos.val < start:\n pos = pos.next\n if not pos:\n return 0\n while pos and pos.val <= stop:\n pos = pos.next\n num += 1\n return num\n\n\nclass HitCounter:\n def __init__(self) -> None:\n self.List = LinkedList()\n self.start = None\n self.end = None\n\n def record(self, timestamp: int) -> None:\n add_node_sorted(self.List, timestamp)\n # keeping track of the smallest and largest timestamp\n if not self.start:\n self.start = timestamp\n self.end = timestamp\n elif timestamp < self.start:\n self.start = timestamp\n elif timestamp > self.end:\n self.end = timestamp\n\n def total(self) -> int:\n return len(self.List)\n\n def range(self, lower: int, upper: int) -> int:\n if upper < self.start or lower > self.end:\n return 0\n return get_number_of_nodes_in_range(self.List, lower, upper)\n\n def __repr__(self):\n return str(self.List)\n\n\nif __name__ == \"__main__\":\n hc = HitCounter()\n\n time1 = 1\n time2 = 10\n time3 = 20\n\n print(hc.total())\n print(hc)\n print()\n\n hc.record(time2)\n\n print(hc.total())\n print(hc)\n print(\"Number in range:\")\n print(hc.range(5, 15))\n print(hc.range(10, 15))\n print()\n\n hc.record(time1)\n\n print(hc.total())\n print(hc)\n print(\"Number in range:\")\n print(hc.range(5, 15))\n print(hc.range(12, 15))\n print()\n\n hc.record(time3)\n\n print(hc.total())\n print(hc)\n print(\"Number in range:\")\n print(hc.range(5, 15))\n print(hc.range(0, 25))\n print()\n", "id": "5435667", "language": "Python", "matching_score": 1.3914234638214111, "max_stars_count": 70, "path": "Solutions/132.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given a string formed by concatenating several words corresponding to the\nintegers zero through nine and then anagramming.\n\nFor example, the input could be 'niesevehrtfeev', which is an anagram of\n'threefiveseven'. Note that there can be multiple instances of each integer.\n\nGiven this string, return the original integers in sorted order. In the example above,\nthis would be 357.\n\"\"\"\n\nfrom collections import Counter\nfrom sys import maxsize\nfrom typing import Counter as C\n\nWORDS = [\n Counter(\"zero\"),\n Counter(\"one\"),\n Counter(\"two\"),\n Counter(\"three\"),\n Counter(\"four\"),\n Counter(\"five\"),\n Counter(\"six\"),\n Counter(\"seven\"),\n Counter(\"eight\"),\n Counter(\"nine\"),\n]\n\n\ndef generate_num_helper(counter: C[str]) -> C[int]:\n # runs in O(1) as all the loops run in constant time\n result = Counter()\n for value, word_counter in enumerate(WORDS):\n temp = maxsize\n for key in word_counter:\n # checking the number of occurance of current number\n if counter[key] >= word_counter[key]:\n temp = min(temp, counter[key] // word_counter[key])\n else:\n temp = 0\n break\n else:\n # updating the input counter to remove the current number\n curr_counter = Counter()\n for key in word_counter:\n curr_counter[key] = word_counter[key] * temp\n counter = counter - curr_counter\n result[value] = temp\n return result\n\n\ndef generate_num(string: str) -> int:\n str_counter = Counter(string)\n numbers_counter = generate_num_helper(str_counter)\n\n numbers_list = [str(num) for num in sorted(numbers_counter.elements())]\n return int(\"\".join(numbers_list))\n\n\nif __name__ == \"__main__\":\n print(generate_num(\"niesevehrtfeev\"))\n print(generate_num(\"niesveeviehertifennevf\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x log(n))\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "509346", "language": "Python", "matching_score": 1.0622057914733887, "max_stars_count": 70, "path": "Solutions/359.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a number in Roman numeral format, convert it to decimal.\n\nThe values of Roman numerals are as follows:\n\n{\n 'M': 1000,\n 'D': 500,\n 'C': 100,\n 'L': 50,\n 'X': 10,\n 'V': 5,\n 'I': 1\n}\nIn addition, note that the Roman numeral system uses subtractive notation for numbers\nsuch as IV and XL.\n\nFor the input XIV, for instance, you should return 14.\n\"\"\"\n\nVALUE_MAP = {\"M\": 1000, \"D\": 500, \"C\": 100, \"L\": 50, \"X\": 10, \"V\": 5, \"I\": 1}\n\n\ndef convert_roman_to_decimal(num_str: str) -> int:\n length = len(num_str)\n num = 0\n\n for i in range(length - 1):\n # check if the value has to be added or subtracted\n if VALUE_MAP[num_str[i]] < VALUE_MAP[num_str[i + 1]]:\n num -= VALUE_MAP[num_str[i]]\n else:\n num += VALUE_MAP[num_str[i]]\n num += VALUE_MAP[num_str[length - 1]]\n return num\n\n\nif __name__ == \"__main__\":\n print(convert_roman_to_decimal(\"I\"))\n print(convert_roman_to_decimal(\"IV\"))\n print(convert_roman_to_decimal(\"XIV\"))\n print(convert_roman_to_decimal(\"XL\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "6233847", "language": "Python", "matching_score": 1.1344265937805176, "max_stars_count": 70, "path": "Solutions/216.py" }, { "content": "\"\"\"\nProblem:\n\nImplement the singleton pattern with a twist. First, instead of storing one instance,\nstore two instances. And in every even call of getInstance(), return the first instance\nand in every odd call of getInstance(), return the second instance.\n\"\"\"\n\nfrom __future__ import annotations\n\n\nclass Twisted_Singleton:\n _instance1, _instance2 = None, None\n _is_odd = True\n _is_initialized = False\n\n def __init__(self, instance_num: int) -> None:\n self.instance_num = instance_num\n\n def __repr__(self) -> str:\n return str(self.instance_num)\n\n @staticmethod\n def initialize() -> None:\n if not Twisted_Singleton._is_initialized:\n Twisted_Singleton._instance1 = Twisted_Singleton(1)\n Twisted_Singleton._instance2 = Twisted_Singleton(2)\n Twisted_Singleton._is_initialized = True\n\n @staticmethod\n def getInstance() -> Twisted_Singleton:\n if not Twisted_Singleton._is_initialized:\n Twisted_Singleton.initialize()\n\n if Twisted_Singleton._is_odd:\n instance = Twisted_Singleton._instance1\n else:\n instance = Twisted_Singleton._instance2\n Twisted_Singleton._is_odd = not Twisted_Singleton._is_odd\n return instance\n\n\nif __name__ == \"__main__\":\n Twisted_Singleton.initialize()\n\n print(Twisted_Singleton.getInstance())\n print(Twisted_Singleton.getInstance())\n print(Twisted_Singleton.getInstance())\n print(Twisted_Singleton.getInstance())\n print(Twisted_Singleton.getInstance())\n print(Twisted_Singleton.getInstance())\n", "id": "8380124", "language": "Python", "matching_score": 1.0011506080627441, "max_stars_count": 70, "path": "Solutions/120.py" }, { "content": "\"\"\"\nProblem:\n\nThe sequence [0, 1, ..., N] has been jumbled, and the only clue you have for its order\nis an array representing whether each number is larger or smaller than the last. Given\nthis information, reconstruct an array that is consistent with it. For example, given\n[None, +, +, -, +], you could return [1, 2, 3, 0, 4].\n\"\"\"\n\n\nfrom typing import List, Optional\n\n\ndef get_sequence(relative_arr: List[Optional[str]]) -> List[int]:\n length = len(relative_arr)\n larger_count = relative_arr.count(\"+\")\n first_num = length - 1 - larger_count\n larger_num, smaller_num = first_num + 1, first_num - 1\n\n result = [first_num]\n for elem in relative_arr[1:]:\n if elem == \"+\":\n result.append(larger_num)\n larger_num += 1\n else:\n result.append(smaller_num)\n smaller_num -= 1\n return result\n\n\nif __name__ == \"__main__\":\n print(get_sequence([None, \"+\", \"+\", \"-\", \"+\"]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "5556837", "language": "Python", "matching_score": 1.2699378728866577, "max_stars_count": 70, "path": "Solutions/260.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of integers, determine whether it contains a Pythagorean triplet. Recall\nthat a Pythogorean triplet (a, b, c) is defined by the equation a^2 + b^2 = c^2.\n\"\"\"\n\nfrom math import sqrt\nfrom typing import List, Optional, Tuple\n\n\ndef get_pythogorean_triplet(\n arr: List[int],\n) -> Tuple[Optional[int], Optional[int], Optional[int]]:\n length = len(arr)\n if length < 3:\n return False\n # generating the set of squared values for O(1) access\n squared_arr = [elem * elem for elem in arr]\n value_set = set(squared_arr)\n\n for i in range(length - 1):\n for j in range(i + 1, length):\n if squared_arr[i] + squared_arr[j] in value_set:\n return (\n int(sqrt(squared_arr[i])),\n int(sqrt(squared_arr[j])),\n int(sqrt(squared_arr[i] + squared_arr[j])),\n )\n return None, None, None\n\n\nif __name__ == \"__main__\":\n print(get_pythogorean_triplet([3, 4, 5, 6, 7]))\n print(get_pythogorean_triplet([3, 5, 6, 7]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "3027553", "language": "Python", "matching_score": 0.7689957618713379, "max_stars_count": 70, "path": "Solutions/282.py" }, { "content": "\"\"\"\nProblem:\n\nThe power set of a set is the set of all its subsets. Write a function that, given a\nset, generates its power set.\n\nFor example, given the set {1, 2, 3}, it should return\n{{}, {1}, {2}, {3}, {1, 2}, {1, 3}, {2, 3}, {1, 2, 3}}.\n\nYou may also use a list or array to represent a set.\n\"\"\"\n\nfrom typing import List\n\n\ndef get_power_set(arr: List[int]) -> List[List[int]]:\n power_set = [[]]\n # generating the power set\n for elem in arr:\n # generating the new sets\n additional_sets = []\n for subset in power_set:\n subset_copy = [subset_elem for subset_elem in subset]\n subset_copy.append(elem)\n additional_sets.append(subset_copy)\n # adding the new sets to the power set\n power_set.extend(additional_sets)\n return power_set\n\n\nif __name__ == \"__main__\":\n print(get_power_set([1, 2, 3]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(2 ^ n)\n\"\"\"\n", "id": "4576145", "language": "Python", "matching_score": 0.9320083856582642, "max_stars_count": 70, "path": "Solutions/037.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a set of distinct positive integers, find the largest subset such that every pair\nof elements in the subset (i, j) satisfies either i % j = 0 or j % i = 0.\n\nFor example, given the set [3, 5, 10, 20, 21], you should return [5, 10, 20]. Given\n[1, 3, 6, 24], return [1, 3, 6, 24].\n\"\"\"\n\nfrom typing import List\n\n\ndef get_largest_subset_helper(\n arr: List[int],\n length: int,\n prev_num: int = 1,\n curr_ind: int = 0,\n prev_subset: List[int] = [],\n) -> List[int]:\n if curr_ind == length:\n return prev_subset\n\n curr_elem = arr[curr_ind]\n res = get_largest_subset_helper(arr, prev_num, curr_ind + 1, prev_subset)\n if curr_elem % prev_num == 0:\n # generating the alternate result (with the element added)\n alternate_res = get_largest_subset_helper(\n arr, curr_elem, curr_ind + 1, prev_subset + [curr_elem]\n )\n return max(alternate_res, res, key=lambda result: len(result))\n return res\n\n\ndef get_largest_subset(arr: List[int]) -> List[int]:\n arr.sort()\n return get_largest_subset_helper(arr, len(arr), prev_subset=[])\n\n\nif __name__ == \"__main__\":\n print(get_largest_subset([]))\n print(get_largest_subset([2]))\n print(get_largest_subset([2, 3]))\n print(get_largest_subset([3, 5, 10, 20, 21]))\n print(get_largest_subset([1, 3, 6, 24]))\n print(get_largest_subset([3, 9, 15, 30]))\n print(get_largest_subset([2, 3, 9, 15, 30]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "8374424", "language": "Python", "matching_score": 0.8284856677055359, "max_stars_count": 70, "path": "Solutions/198.py" }, { "content": "\"\"\"\nProblem:\n\nYou have N stones in a row, and would like to create from them a pyramid. This pyramid\nshould be constructed such that the height of each stone increases by one until\nreaching the tallest stone, after which the heights decrease by one. In addition, the\nstart and end stones of the pyramid should each be one stone high.\n\nYou can change the height of any stone by paying a cost of 1 unit to lower its height\nby 1, as many times as necessary. Given this information, determine the lowest cost\nmethod to produce this pyramid.\n\nFor example, given the stones [1, 1, 3, 3, 2, 1], the optimal solution is to pay 2 to\ncreate [0, 1, 2, 3, 2, 1].\n\"\"\"\n\nfrom typing import List\n\n\ndef get_min_pyramid_cost(arr: List[int]) -> int:\n length = len(arr)\n left = [0 for _ in range(length)]\n right = [0 for _ in range(length)]\n # calculate maximum height (left)\n left[0] = min(arr[0], 1)\n for i in range(1, length):\n left[i] = min(arr[i], min(left[i - 1] + 1, i + 1))\n # calculate maximum height (right)\n right[length - 1] = min(arr[length - 1], 1)\n for i in range(length - 2, -1, -1):\n right[i] = min(arr[i], min(right[i + 1] + 1, length - i))\n\n # find minimum possible among calculated values\n tot = [0 for _ in range(length)]\n for i in range(length):\n tot[i] = min(right[i], left[i])\n # find maximum height of pyramid\n max_ind = 0\n for i in range(length):\n if tot[i] > tot[max_ind]:\n max_ind = i\n\n # calculate cost of this pyramid\n cost = 0\n height = tot[max_ind]\n # calculate cost of left half\n for x in range(max_ind, -1, -1):\n cost += arr[x] - height\n height = max(0, height - 1)\n # calculate cost of right half\n height = tot[max_ind] - 1\n for x in range(max_ind + 1, length):\n cost += arr[x] - height\n height = max(0, height - 1)\n return cost\n\n\nif __name__ == \"__main__\":\n print(get_min_pyramid_cost([1, 1, 3, 3, 2, 1]))\n print(get_min_pyramid_cost([1, 1, 1, 1, 1]))\n print(get_min_pyramid_cost([1, 1, 1, 5, 1]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "9300355", "language": "Python", "matching_score": 1.7517307996749878, "max_stars_count": 70, "path": "Solutions/293.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given an array of non-negative integers that represents a two-dimensional\nelevation map where each element is unit-width wall and the integer is the height.\nSuppose it will rain and all spots between two walls get filled up.\n\nCompute how many units of water remain trapped on the map in O(N) time and O(1) space.\n\nFor example, given the input [2, 1, 2], we can hold 1 unit of water in the middle.\n\nGiven the input [3, 0, 1, 3, 0, 5], we can hold 3 units in the first index, 2 in the\nsecond, and 3 in the fourth index (we cannot hold 5 since it would run off to the\nleft), so we can trap 8 units of water.\n\"\"\"\n\nfrom typing import List\n\n\ndef water(arr: List[int]) -> int:\n length = len(arr)\n # check if there is enough walls to store water\n if length < 3:\n return 0\n\n left, right = 0, length - 1\n left_max, right_max = 0, 0\n total_water = 0\n # calculating the amount of water that can be stored (using 2 pointers method)\n while left <= right:\n if arr[left] < arr[right]:\n if arr[left] > left_max:\n left_max = arr[left]\n else:\n total_water += left_max - arr[left]\n left += 1\n else:\n if arr[right] > right_max:\n right_max = arr[right]\n else:\n total_water += right_max - arr[right]\n right -= 1\n return total_water\n\n\nif __name__ == \"__main__\":\n print(water([2, 1, 2]))\n print(water([3, 0, 1, 3, 0, 5]))\n print(water([5, 3, 5, 3, 4]))\n print(water([5, 1, 1, 1, 0]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "5760204", "language": "Python", "matching_score": 0.9997334480285645, "max_stars_count": 70, "path": "Solutions/030.py" }, { "content": "\"\"\"\nProblem:\n\nThe United States uses the imperial system of weights and measures, which means that\nthere are many different, seemingly arbitrary units to measure distance. There are 12\ninches in a foot, 3 feet in a yard, 22 yards in a chain, and so on.\n\nCreate a data structure that can efficiently convert a certain quantity of one unit to\nthe correct amount of any other unit. You should also allow for additional units to be\nadded to the system.\n\"\"\"\n\nfrom typing import Union\n\n\nclass UnitConverter:\n def __init__(self) -> None:\n # default available metrics\n self.metrics = {\n \"inch\": 1,\n \"foot\": 1 * 12,\n \"yard\": 3 * 1 * 12,\n \"chain\": 22 * 3 * 1 * 12,\n }\n\n def add_unit(\n self, new_unit: str, available_unit: str, value: Union[int, float]\n ) -> None:\n # add a new unit with respect to an unit already present\n if available_unit not in self.metrics:\n raise ValueError(f\"Unit not found: {available_unit}\")\n self.metrics[new_unit] = self.metrics[available_unit] * value\n\n def convert(\n self,\n source_unit: str,\n result_unit: str,\n value: Union[int, float],\n precision: int = 4,\n ) -> float:\n # convert one metric to another, rounds off the result to required decimal\n # places\n if source_unit not in self.metrics:\n raise ValueError(f\"Unit not found: {source_unit}\")\n if result_unit not in self.metrics:\n raise ValueError(f\"Unit not found: {result_unit}\")\n return round(\n value * self.metrics[source_unit] / self.metrics[result_unit], precision\n )\n\n\nif __name__ == \"__main__\":\n uc = UnitConverter()\n print(uc.convert(\"inch\", \"foot\", 24))\n print(uc.convert(\"inch\", \"yard\", 36))\n\n uc.add_unit(\"furlong\", \"chain\", 10)\n\n print(uc.convert(\"inch\", \"furlong\", 4 * 36 * 22 * 10))\n print(uc.convert(\"foot\", \"yard\", 4))\n print(uc.convert(\"chain\", \"inch\", 2))\n print(uc.convert(\"chain\", \"foot\", 3))\n\n # NOTE: \"centimeter\" is not a part of imperial system, its used to show that\n # smaller units works too\n uc.add_unit(\"centimeter\", \"inch\", 0.394)\n print(uc.convert(\"centimeter\", \"foot\", 1))\n", "id": "3979589", "language": "Python", "matching_score": 0.9206223487854004, "max_stars_count": 70, "path": "Solutions/325.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an integer n, return the length of the longest consecutive run of 1s in its\nbinary representation.\n\nFor example, given 156, you should return 3.\n\"\"\"\n\n\ndef get_longest_chain_of_1s(num: int) -> int:\n num = bin(num)[2:]\n chain_max = 0\n chain_curr = 0\n\n for char in num:\n if char == \"1\":\n chain_curr += 1\n else:\n chain_max = max(chain_max, chain_curr)\n chain_curr = 0\n return max(chain_max, chain_curr)\n\n\nif __name__ == \"__main__\":\n print(get_longest_chain_of_1s(15))\n print(get_longest_chain_of_1s(156))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(log(n))\nSPACE COMPLEXITY: O(1)\n[there are log2(n) digits in the binary representation of any number n]\n\"\"\"\n", "id": "12661174", "language": "Python", "matching_score": 1.3999258279800415, "max_stars_count": 70, "path": "Solutions/214.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a 32-bit integer, return the number with its bits reversed.\n\nFor example, given the binary number 1111 0000 1111 0000 1111 0000 1111 0000, return\n0000 1111 0000 1111 0000 1111 0000 1111.\n\"\"\"\n\n\ndef complement_1s(num: str) -> str:\n result = \"\"\n for digit in num:\n result += str(int(not int(digit)))\n return result\n\n\nif __name__ == \"__main__\":\n print(complement_1s(\"11110000111100001111000011110000\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "1055362", "language": "Python", "matching_score": 0.5287983417510986, "max_stars_count": 70, "path": "Solutions/161.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a positive integer n, find the smallest number of squared integers which sum to n.\n\nFor example, given n = 13, return 2 since 13 = 3^2 + 2^2 = 9 + 4.\n\nGiven n = 27, return 3 since 27 = 3^2 + 3^2 + 3^2 = 9 + 9 + 9.\n\"\"\"\n\n\ndef min_square_num(num: int, accumulator: int = 0) -> int:\n if num == 0:\n return accumulator\n elif num == 1:\n return accumulator + 1\n\n largest_square_divisor = int(num ** 0.5) ** 2\n num = num - largest_square_divisor\n accumulator += 1\n return min_square_num(num, accumulator)\n\n\nif __name__ == \"__main__\":\n print(min_square_num(25)) # (5 ^ 2)\n print(min_square_num(13)) # (2 ^ 2) + (3 ^ 2)\n print(min_square_num(27)) # (5 ^ 2) + (1 ^ 2) + (1 ^ 2)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "11109669", "language": "Python", "matching_score": 1.2669408321380615, "max_stars_count": 70, "path": "Solutions/156.py" }, { "content": "\"\"\"\nProblem:\n\nWrite a program that determines the smallest number of perfect squares that sum up to\nN.\n\nHere are a few examples:\n\nGiven N = 4, return 1 (4)\nGiven N = 17, return 2 (16 + 1)\nGiven N = 18, return 2 (9 + 9)\n\"\"\"\n\nfrom math import ceil, sqrt\n\n\ndef get_min_squares_sum(N: int) -> int:\n dp = [0, 1, 2, 3]\n for i in range(4, N + 1):\n dp.append(i)\n for x in range(1, int(ceil(sqrt(i))) + 1):\n square = pow(x, 2)\n if square > i:\n break\n dp[i] = min(dp[i], 1 + dp[i - square])\n return dp[N]\n\n\nif __name__ == \"__main__\":\n print(get_min_squares_sum(4))\n print(get_min_squares_sum(17))\n print(get_min_squares_sum(18))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 1.5)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "11535826", "language": "Python", "matching_score": 1.2959152460098267, "max_stars_count": 70, "path": "Solutions/350.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a real number n, find the square root of n. For example, given n = 9, return 3.\n\"\"\"\n\nTOLERENCE = 10 ** (-6)\n\n\ndef almost_equal(num1: float, num2: float) -> bool:\n return num1 - TOLERENCE < num2 < num1 + TOLERENCE\n\n\ndef get_sqrt(num: int) -> float:\n # using binary search to get the sqaure-root\n high, low = num, 0\n while True:\n mid = (high + low) / 2\n mid_square = mid * mid\n if almost_equal(mid_square, num):\n return round(mid, 6)\n elif mid_square < num:\n low = mid + 1\n else:\n high = mid - 1\n\n\nif __name__ == \"__main__\":\n print(get_sqrt(100))\n print(get_sqrt(9))\n print(get_sqrt(3))\n print(get_sqrt(2))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(log(n))\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "11794835", "language": "Python", "matching_score": 1.1035478115081787, "max_stars_count": 70, "path": "Solutions/129.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an unsorted array, in which all elements are distinct, find a \"peak\" element in\nO(log N) time.\n\nAn element is considered a peak if it is greater than both its left and right\nneighbors. It is guaranteed that the first and last elements are lower than all others.\n\"\"\"\n\nfrom typing import List\n\n\ndef get_peak(arr: List[int]) -> int:\n # implement similar method as binary search [since the element being searched is\n # not a concrete value (unlike binary search), but any value which is greater than\n # its neighbours, it can only be found without sorting]\n mid = len(arr) // 2\n if (\n mid > 0\n and arr[mid - 1] < arr[mid]\n and mid < len(arr)\n and arr[mid + 1] < arr[mid]\n ):\n return arr[mid]\n elif mid > 0 and arr[mid - 1] < arr[mid]:\n return get_peak(arr[mid:])\n return get_peak(arr[: mid + 1])\n\n\nif __name__ == \"__main__\":\n print(get_peak([0, 2, 4, -1, 3, 1]))\n print(get_peak([0, 2, 4, 5, 3, 1]))\n print(get_peak([0, 2, 6, 5, 3, 1]))\n print(get_peak([0, 2, 4, 5, 7, 1]))\n print(get_peak([0, 8, 7, 5, 16, 1]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(log(n))\nSPACE COMPLEXITY: O(1)\n\"\"\"\n\n", "id": "2772266", "language": "Python", "matching_score": 0.7612236142158508, "max_stars_count": 70, "path": "Solutions/311.py" }, { "content": "\"\"\"\nProblem:\n\nA fixed point in an array is an element whose value is equal to its index. Given a\nsorted array of distinct elements, return a fixed point, if one exists. Otherwise,\nreturn False.\n\nFor example, given [-6, 0, 2, 40], you should return 2. Given [1, 5, 7, 8], you should\nreturn False.\n\"\"\"\n\nfrom typing import List, Union\n\n\ndef get_fixed_point(arr: List[int]) -> Union[int, False]:\n for index, value in enumerate(arr):\n if value == index:\n # fixed point found\n return value\n elif value > index:\n # since the array is sorted and has distinct elements, once the value\n # exceeds the index, the index can never be equal to the value at any\n # position\n break\n return False\n\n\nif __name__ == \"__main__\":\n print(get_fixed_point([-6, 0, 2, 40]))\n print(get_fixed_point([1, 5, 7, 8]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "4459374", "language": "Python", "matching_score": 0.7156849503517151, "max_stars_count": 70, "path": "Solutions/273.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a list of points, a central point, and an integer k, find the nearest k points\nfrom the central point.\n\nFor example, given the list of points [(0, 0), (5, 4), (3, 1)], the central point\n(1, 2), and k = 2, return [(0, 0), (3, 1)].\n\"\"\"\n\nfrom typing import List, Tuple\n\nPosition = Tuple[int, int]\n\n\ndef get_distance(point1: Position, point2: Position) -> float:\n return ((point1[0] - point2[0]) ** 2 + (point1[1] - point2[1]) ** 2) ** 0.5\n\n\ndef KNN(arr: List[Position], center: Position, k: int) -> List[Position]:\n return sorted(arr, key=lambda position: get_distance(position, center))[:k]\n\n\nif __name__ == \"__main__\":\n print(KNN([(0, 0), (5, 4), (3, 1)], (1, 2), 2))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x log(n))\nSPACE COMPLEXITY: O(k)\n\"\"\"\n", "id": "10585453", "language": "Python", "matching_score": 1.3696790933609009, "max_stars_count": 70, "path": "Solutions/150.py" }, { "content": "\"\"\"\nProblem:\n\nConsider the following scenario: there are N mice and N holes placed at integer points\nalong a line. Given this, find a method that maps mice to holes such that the largest\nnumber of steps any mouse takes is minimized.\n\nEach move consists of moving one mouse one unit to the left or right, and only one\nmouse can fit inside each hole.\n\nFor example, suppose the mice are positioned at [1, 4, 9, 15], and the holes are\nlocated at [10, -5, 0, 16]. In this case, the best pairing would require us to send the\nmouse at 1 to the hole at -5, so our function should return 6.\n\"\"\"\n\nfrom typing import List\n\n\ndef get_max_mouse_dist(mouse_position: List[int], hole_position: List[int]) -> int:\n mouse_position.sort()\n hole_position.sort()\n max_distance = 0\n\n for mouse, hole in zip(mouse_position, hole_position):\n max_distance = max(max_distance, abs(mouse - hole))\n return max_distance\n\n\nif __name__ == \"__main__\":\n print(get_max_mouse_dist([1, 4, 9, 15], [10, -5, 0, 16]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "6863920", "language": "Python", "matching_score": 1.9197710752487183, "max_stars_count": 70, "path": "Solutions/324.py" }, { "content": "\"\"\"\nProblem:\n\nYou are the technical director of WSPT radio, serving listeners nationwide. For\nsimplicity's sake we can consider each listener to live along a horizontal line\nstretching from 0 (west) to 1000 (east).\n\nGiven a list of N listeners, and a list of M radio towers, each placed at various\nlocations along this line, determine what the minimum broadcast range would have to be\nin order for each listener's home to be covered.\n\nFor example, suppose listeners = [1, 5, 11, 20], and towers = [4, 8, 15]. In this case\nthe minimum range would be 5, since that would be required for the tower at position 15\nto reach the listener at position 20.\n\"\"\"\n\nfrom sys import maxsize\nfrom typing import List\n\n\ndef get_min_range(listeners: List[int], towers: List[int]) -> int:\n # distance map storing the distance of listener from the nearest tower\n listeners_distance = {listener: maxsize for listener in listeners}\n for listener in listeners:\n for tower in towers:\n listeners_distance[listener] = min(\n listeners_distance[listener], abs(tower - listener)\n )\n return max(listeners_distance.values())\n\n\nif __name__ == \"__main__\":\n print(get_min_range([1, 5, 11, 20], [4, 8, 15]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(listeners x towers)\nSPACE COMPLEXITY: O(listeners)\n\"\"\"\n", "id": "506969", "language": "Python", "matching_score": 1.1475818157196045, "max_stars_count": 70, "path": "Solutions/314.py" }, { "content": "\"\"\"\nProblem:\n\nYou are in an infinite 2D grid where you can move in any of the 8 directions:\n\n (x,y) to\n (x+1, y),\n (x - 1, y),\n (x, y+1),\n (x, y-1),\n (x-1, y-1),\n (x+1,y+1),\n (x-1,y+1),\n (x+1,y-1)\nYou are given a sequence of points and the order in which you need to cover the points.\nGive the minimum number of steps in which you can achieve it. You start from the first\npoint.\n\nExample: Input: [(0, 0), (1, 1), (1, 2)] Output: 2 It takes 1 step to move from (0, 0)\nto (1, 1). It takes one more step to move from (1, 1) to (1, 2).\n\"\"\"\n\nfrom typing import List, Tuple\n\n\ndef get_min_steps(sequence: List[Tuple[int, int]]) -> int:\n length = len(sequence)\n if length in [0, 1]:\n return 0\n\n curr_position = sequence[0]\n total_distance = 0\n for next_position in sequence[1:]:\n i, j = curr_position\n y, x = next_position\n total_distance += max((abs(y - i)), abs(x - j))\n curr_position = next_position\n return total_distance\n\n\nif __name__ == \"__main__\":\n print(get_min_steps([]))\n print(get_min_steps([(0, 0)]))\n print(get_min_steps([(0, 0), (1, 1), (1, 2)]))\n print(get_min_steps([(0, 0), (1, 1), (1, 2), (3, 4)]))\n print(get_min_steps([(0, 0), (1, 1), (1, 2), (3, 6)]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "8698010", "language": "Python", "matching_score": 1.5766719579696655, "max_stars_count": 70, "path": "Solutions/100.py" }, { "content": "\"\"\"\nProblem:\n\nLet X be a set of n intervals on the real line. We say that a set of points P \"stabs\" X\nif every interval in X contains at least one point in P. Compute the smallest set of\npoints that stabs X.\n\nFor example, given the intervals [(1, 4), (4, 5), (7, 9), (9, 12)], you should return\n[4, 9].\n\"\"\"\n\nfrom typing import List, Tuple\n\n\ndef get_stab(list_of_intervals: List[Tuple[int]]) -> Tuple[int, int]:\n start, end = zip(*list_of_intervals)\n return min(end), max(start)\n\n\nif __name__ == \"__main__\":\n print(get_stab([(1, 4), (4, 5), (7, 9), (9, 12)]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n[even though zip is a generator and takes O(1) space, destructuring the array takes\nO(n) space]\n\"\"\"\n", "id": "553373", "language": "Python", "matching_score": 1.2694674730300903, "max_stars_count": 70, "path": "Solutions/200.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a set of closed intervals, find the smallest set of numbers that covers all the\nintervals. If there are multiple smallest sets, return any of them.\n\nFor example, given the intervals [0, 3], [2, 6], [3, 4], [6, 9], one set of numbers\nthat covers all these intervals is {3, 6}.\n\"\"\"\n\nfrom typing import List, Optional, Tuple\n\n\ndef get_spanning_interval(intervals: List[List[int]]) -> Optional[Tuple]:\n if not intervals:\n return\n start = intervals[0][1]\n end = start\n pos = 1\n # updating start\n for interval in intervals[1:]:\n interval_start, interval_end = interval\n if interval_start < start and interval_end < start:\n start = interval_end\n pos += 1\n else:\n break\n # updating end\n for interval in intervals[pos:]:\n interval_start, _ = interval\n if interval_start > end:\n end = interval_start\n return start, end\n\n\nif __name__ == \"__main__\":\n print(get_spanning_interval([[0, 3]]))\n print(get_spanning_interval([[0, 3], [2, 6]]))\n print(get_spanning_interval([[0, 3], [2, 6], [3, 4]]))\n print(get_spanning_interval([[0, 3], [2, 6], [3, 4], [6, 7]]))\n print(get_spanning_interval([[0, 3], [2, 6], [3, 4], [6, 9]]))\n print(get_spanning_interval([[0, 3], [2, 6], [3, 4], [6, 100]]))\n print(get_spanning_interval([[0, 4], [1, 2], [5, 6]]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "2785165", "language": "Python", "matching_score": 1.4655073881149292, "max_stars_count": 70, "path": "Solutions/119.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a list of possibly overlapping intervals, return a new list of intervals where\nall overlapping intervals have been merged.\n\nThe input list is not necessarily ordered in any way.\n\nFor example, given [(1, 3), (5, 8), (4, 10), (20, 25)], you should return\n[(1, 3), (4, 10), (20, 25)].\n\"\"\"\n\nfrom typing import List, Tuple\n\n\ndef merge_intervals(intervals: List[Tuple[int, int]]) -> List[Tuple[int, int]]:\n intervals.sort(key=lambda x: x[0])\n merged_intervals = []\n start = intervals[0][0]\n end = intervals[0][1]\n # generating the merged intervals\n for interval in intervals[1:]:\n curr_start, curr_end = interval\n if end < curr_start:\n merged_intervals.append((start, end))\n start = curr_start\n end = curr_end\n elif end < curr_end and end > curr_start:\n end = curr_end\n # adding the last interval\n merged_intervals.append((start, end))\n return merged_intervals\n\n\nif __name__ == \"__main__\":\n print(merge_intervals([(1, 3), (5, 8), (4, 10), (20, 25)]))\n print(merge_intervals([(1, 3), (5, 8), (4, 10), (20, 25), (6, 12)]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "5398", "language": "Python", "matching_score": 1.3390352725982666, "max_stars_count": 70, "path": "Solutions/077.py" }, { "content": "\"\"\"\nProblem:\n\nThe skyline of a city is composed of several buildings of various widths and heights,\npossibly overlapping one another when viewed from a distance. We can represent the\nbuildings using an array of (left, right, height) tuples, which tell us where on an\nimaginary x-axis a building begins and ends, and how tall it is. The skyline itself can\nbe described by a list of (x, height) tuples, giving the locations at which the height\nvisible to a distant observer changes, and each new height.\n\nGiven an array of buildings as described above, create a function that returns the\nskyline.\n\nFor example, suppose the input consists of the buildings\n[(0, 15, 3), (4, 11, 5), (19, 23, 4)]. In aggregate, these buildings would create a\nskyline that looks like the one below.\n\n ______ \n | | ___\n ___| |___ | | \n| | B | | | C |\n| A | | A | | |\n| | | | | |\n------------------------\nAs a result, your function should return\n[(0, 3), (4, 5), (11, 3), (15, 0), (19, 4), (23, 0)].\n\"\"\"\n\nfrom sys import maxsize\nfrom typing import List, Tuple\n\n\ndef get_skyline(arr: List[Tuple[int, int, int]]) -> List[Tuple[int, int]]:\n # getting the bounds of the skyline\n start = maxsize\n end = -maxsize\n for start_curr, end_curr, _ in arr:\n start = min(start, start_curr)\n end = max(end, end_curr)\n # generating the skyline\n skyline = [0 for _ in range(start, end + 1)]\n offset = start\n for start_curr, end_curr, height in arr:\n for i in range(start_curr - offset, end_curr - offset):\n skyline[i] = max(skyline[i], height)\n # generating result from the skyline\n result = []\n for i in range(start - offset, end - offset):\n if i == 0 or skyline[i] != skyline[i - 1]:\n result.append((i + offset, skyline[i]))\n result.append((end, 0))\n return result\n\n\nif __name__ == \"__main__\":\n print(get_skyline([(0, 15, 3), (4, 11, 5), (19, 23, 4)]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(max(arr) - min(arr))\nSPACE COMPLEXITY: O(max(arr) - min(arr))\n\"\"\"\n", "id": "9775446", "language": "Python", "matching_score": 0.6797407269477844, "max_stars_count": 70, "path": "Solutions/286.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given a circular lock with three wheels, each of which display the numbers 0\nthrough 9 in order. Each of these wheels rotate clockwise and counterclockwise.\n\nIn addition, the lock has a certain number of \"dead ends\", meaning that if you turn the\nwheels to one of these combinations, the lock becomes stuck in that state and cannot be\nopened.\n\nLet us consider a \"move\" to be a rotation of a single wheel by one digit, in either\ndirection. Given a lock initially set to 000, a target combination, and a list of dead\nends, write a function that returns the minimum number of moves required to reach the\ntarget state, or None if this is impossible.\n\"\"\"\n\nfrom sys import maxsize\nfrom typing import List, Tuple, Set\n\n\ndef turn_wheel_up(val: int) -> int:\n return (val + 1) % 10\n\n\ndef turn_wheel_down(val: int) -> int:\n return (val - 1 + 10) % 10\n\n\ndef get_min_moves_helper(\n curr: List[int],\n pattern: List[int],\n dead_ends: Set[Tuple[int, int, int]],\n seen: Set[str],\n accumulator: int = 0,\n) -> int:\n if curr == pattern:\n return accumulator\n curr_val = \"\".join([str(x) for x in curr])\n if curr_val in seen:\n # if a loop back occours, the target pattern cannot be reached\n return maxsize\n seen.add(curr_val)\n\n moves = []\n for i in range(3):\n temp = curr.copy()\n if temp[i] != pattern[i]:\n temp[i] = turn_wheel_up(temp[i])\n if tuple(temp) not in dead_ends:\n moves.append(temp)\n temp = curr.copy()\n if temp[i] != pattern[i]:\n temp[i] = turn_wheel_down(temp[i])\n if tuple(temp) not in dead_ends:\n moves.append(temp)\n\n temp = maxsize\n for move in moves:\n temp = min(\n temp, get_min_moves_helper(move, pattern, dead_ends, seen, accumulator + 1),\n )\n return temp\n\n\ndef get_min_moves(pattern, dead_ends):\n result = get_min_moves_helper([0, 0, 0], pattern, dead_ends, set())\n if result == maxsize:\n return None\n return result\n\n\nif __name__ == \"__main__\":\n print(get_min_moves([3, 4, 5], set([])))\n print(get_min_moves([3, 4, 5], set([(0, 0, 1), (0, 1, 0), (1, 0, 0)])))\n print(\n get_min_moves(\n [3, 4, 5],\n set([(0, 0, 1), (0, 1, 0), (1, 0, 0), (0, 0, 9), (0, 9, 0), (9, 0, 0)]),\n )\n )\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ n)\nSPACE COMPLEXITY: O(n ^ n)\n[n = max(pattern)]\n\"\"\"\n\n", "id": "6284433", "language": "Python", "matching_score": 1.2845430374145508, "max_stars_count": 70, "path": "Solutions/313.py" }, { "content": "\"\"\"\nProblem:\n\nOne way to unlock an Android phone is through a pattern of swipes across a 1-9 keypad.\nFor a pattern to be valid, it must satisfy the following:\n* All of its keys must be distinct.\n* It must not connect two keys by jumping over a third key, unless that key has already been used.\nFor example, 4 - 2 - 1 - 7 is a valid pattern, whereas 2 - 1 - 7 is not.\nFind the total number of valid unlock patterns of length N, where 1 <= N <= 9.\n\"\"\"\n\nfrom copy import deepcopy\nfrom typing import Set\n\n\nclass Dialpad:\n def __init__(self) -> None:\n self.nodes = set(range(1, 10))\n self.edges = {}\n self.edges[1] = {2, 4, 5, 6, 8}\n self.edges[2] = {1, 3, 4, 5, 6, 7, 9}\n self.edges[3] = {2, 4, 5, 6, 8}\n self.edges[4] = {1, 2, 3, 5, 7, 8, 9}\n self.edges[5] = {1, 2, 3, 4, 6, 7, 8, 9}\n self.edges[6] = {1, 2, 3, 5, 7, 8, 9}\n self.edges[7] = {2, 4, 5, 6, 8}\n self.edges[8] = {1, 4, 3, 5, 6, 7, 9}\n self.edges[9] = {2, 4, 5, 6, 8}\n\n def update_connections(self, curr: int) -> None:\n if 2 == curr:\n self.edges[1].add(3)\n self.edges[3].add(1)\n elif 4 == curr:\n self.edges[1].add(7)\n self.edges[7].add(1)\n elif 6 == curr:\n self.edges[9].add(3)\n self.edges[3].add(9)\n elif 8 == curr:\n self.edges[7].add(9)\n self.edges[9].add(7)\n elif 5 == curr:\n self.edges[1].add(9)\n self.edges[9].add(1)\n self.edges[7].add(3)\n self.edges[3].add(7)\n self.edges[2].add(8)\n self.edges[8].add(2)\n self.edges[4].add(6)\n self.edges[6].add(4)\n\n\ndef count_code_helper(dp: Dialpad, code_length: int, curr: int, seen: Set[int]) -> int:\n # helper function to trace the patterns and get the number of combinations\n if code_length == 0:\n return 1\n seen_cp = deepcopy(seen)\n seen_cp.add(curr)\n\n copied_dp = deepcopy(dp)\n copied_dp.update_connections(curr)\n nodes = dp.edges[curr]\n sub_count = 0\n\n for node in nodes:\n if node not in seen_cp:\n sub_count += count_code_helper(copied_dp, code_length - 1, node, seen_cp)\n return sub_count\n\n\ndef count_codes_of_n_length(dp: Dialpad, code_length: int) -> int:\n if code_length == 1:\n return len(dp.nodes)\n count = 0\n for node in dp.nodes:\n count += count_code_helper(dp, code_length, node, set())\n return count\n\n\ndef get_number_of_valid_unlock_patterns() -> int:\n dp = Dialpad()\n result = 0\n for n in range(1, 10):\n result += count_codes_of_n_length(dp, n)\n return result\n\n\nif __name__ == \"__main__\":\n # NOTE: computationally intensive operation as the number of patterns is really high\n print(get_number_of_valid_unlock_patterns())\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O((nodes ^ 2) x (code length ^ 2))\nSPACE COMPLEXITY: O((nodes ^ 2) x code length)\n\"\"\"\n", "id": "8120730", "language": "Python", "matching_score": 1.1012396812438965, "max_stars_count": 70, "path": "Solutions/239.py" }, { "content": "\"\"\"\nProblem:\n\nGray code is a binary code where each successive value differ in only one bit, as well\nas when wrapping around. Gray code is common in hardware so that we don't see temporary\nspurious values during transitions.\n\nGiven a number of bits n, generate a possible gray code for it.\n\nFor example, for n = 2, one gray code would be [00, 01, 11, 10].\n\"\"\"\n\nfrom typing import List\n\n\ndef get_grey_code(n: int) -> List[str]:\n if n == 0:\n return [\"\"]\n # generating grey code\n previous_grey_code = get_grey_code(n - 1)\n base0 = [\"0\" + val for val in previous_grey_code]\n base1 = [\"1\" + val for val in previous_grey_code[::-1]]\n return base0 + base1\n\n\nif __name__ == \"__main__\":\n print(get_grey_code(0))\n print(get_grey_code(1))\n print(get_grey_code(2))\n print(get_grey_code(3))\n print(get_grey_code(4))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(2 ^ n)\n[there are (2 ^ n) grey codes of length n]\n\"\"\"\n", "id": "4036548", "language": "Python", "matching_score": 0.9068979024887085, "max_stars_count": 70, "path": "Solutions/148.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given a starting state start, a list of transition probabilities for a Markov\nchain, and a number of steps num_steps. Run the Markov chain starting from start for\nnum_steps and compute the number of times we visited each state.\n\nFor example, given the starting state a, number of steps 5000, and the following\ntransition probabilities:\n\n[\n ('a', 'a', 0.9),\n ('a', 'b', 0.075),\n ('a', 'c', 0.025),\n ('b', 'a', 0.15),\n ('b', 'b', 0.8),\n ('b', 'c', 0.05),\n ('c', 'a', 0.25),\n ('c', 'b', 0.25),\n ('c', 'c', 0.5)\n]\n\nOne instance of running this Markov chain might produce\n{'a': 3012, 'b': 1656, 'c': 332 }.\n\"\"\"\n\nfrom random import random\nfrom typing import Dict, List, Tuple\n\nfrom DataStructures.Graph import GraphDirectedWeighted\n\n\ndef get_transition_form_node(graph: GraphDirectedWeighted, node: str) -> str:\n transition = random()\n curr = 0\n for neighbour in graph.connections[node]:\n curr += graph.connections[node][neighbour]\n if curr >= transition:\n return neighbour\n\n\ndef get_transitions(\n start: str, transitions: List[Tuple[str, str, float]], steps: int\n) -> Dict[str, int]:\n # generating graph\n graph = GraphDirectedWeighted()\n for (node1, node2, probability) in transitions:\n graph.add_edge(node1, node2, probability)\n # generating visited map\n visited = {node: 0 for node in graph.connections}\n node = start\n for _ in range(steps):\n node = get_transition_form_node(graph, node)\n visited[node] += 1\n return visited\n\n\nif __name__ == \"__main__\":\n transitions = [\n (\"a\", \"a\", 0.9),\n (\"a\", \"b\", 0.075),\n (\"a\", \"c\", 0.025),\n (\"b\", \"a\", 0.15),\n (\"b\", \"b\", 0.8),\n (\"b\", \"c\", 0.05),\n (\"c\", \"a\", 0.25),\n (\"c\", \"b\", 0.25),\n (\"c\", \"c\", 0.5),\n ]\n print(get_transitions(\"a\", transitions, 5000))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(steps + transition states)\nSPACE COMPLEXITY: O(transition states)\n\"\"\"\n", "id": "4620994", "language": "Python", "matching_score": 1.68020498752594, "max_stars_count": 70, "path": "Solutions/175.py" }, { "content": "\"\"\"\nProblem:\n\nA rule looks like this:\n\nA NE B\n\nThis means this means point A is located northeast of point B.\n\nA SW C\n\nmeans that point A is southwest of C.\n\nGiven a list of rules, check if the sum of the rules validate. For example:\n\nA N B\nB NE C\nC N A\ndoes not validate, since A cannot be both north and south of C.\n\nA NW B\nA N B\nis considered valid.\n\"\"\"\n\nfrom __future__ import annotations\nfrom typing import Union\n\nOPPOSITES_CARDINALS = {\"N\": \"S\", \"S\": \"N\", \"E\": \"W\", \"W\": \"E\"}\n\n\nclass Node:\n def __init__(self, val: str) -> None:\n self.val = val\n self.neighbours = {\"N\": set(), \"E\": set(), \"S\": set(), \"W\": set()}\n\n def __repr__(self) -> str:\n return f\"{self.val}\"\n\n def __eq__(self, other: Union[Node, str]) -> bool:\n if type(other) == Node:\n return self.val == other.val\n elif type(other) == str:\n return self.val == other\n return False\n\n def __hash__(self) -> int:\n return hash(self.val)\n\n\nclass Map:\n def __init__(self) -> None:\n self.nodes = {}\n\n def add_rule(self, rule: str) -> None:\n node1, direction, node2 = rule.split()\n node1 = Node(node1)\n node2 = Node(node2)\n # cheking for the existance of the nodes\n if node1 not in self.nodes:\n self.nodes[node1.val] = node1\n else:\n node1 = self.nodes[node1.val]\n if node2 not in self.nodes:\n self.nodes[node2.val] = node2\n else:\n node2 = self.nodes[node2.val]\n # updating the neighbours\n for char in direction:\n if (node1 in node2.neighbours[char]) or (\n node2 in node1.neighbours[OPPOSITES_CARDINALS[char]]\n ):\n raise RuntimeError\n for node in node1.neighbours[char]:\n self.add_rule(f\"{node} {char} {node2}\")\n # adding the rule to the calling node\n for char in direction:\n node2.neighbours[char].add(node1)\n node1.neighbours[OPPOSITES_CARDINALS[char]].add(node2)\n\n\nif __name__ == \"__main__\":\n m = Map()\n\n m.add_rule(\"A N B\")\n print(\"Rule Applied!\")\n m.add_rule(\"B NE C\")\n print(\"Rule Applied!\")\n\n try:\n m.add_rule(\"C N A\")\n except RuntimeError:\n print(\"Invalid Rule!\")\n", "id": "2417783", "language": "Python", "matching_score": 1.2006105184555054, "max_stars_count": 70, "path": "Solutions/087.py" }, { "content": "\"\"\"\nProblem:\n\nImplement a file syncing algorithm for two computers over a low-bandwidth network.\nWhat if we know the files in the two computers are mostly the same?\n\"\"\"\n\nfrom __future__ import annotations\nfrom hashlib import sha256\n\nhash_func = sha256\n\n\nclass MerkleNode:\n def __init__(self, name: str) -> None:\n self.parent = None\n self.node_hash = None\n self.name = name\n\n def __hash__(self) -> int:\n return hash(self.name)\n\n def __eq__(self, other: MerkleNode) -> bool:\n if type(other) != MerkleNode:\n return False\n return self.node_hash == other.node_hash\n\n\nclass MerkleDirectory(MerkleNode):\n def __init__(self, name: str) -> None:\n MerkleNode.__init__(self, name)\n self.children = set()\n self.is_dir = True\n # creating a file on directory initialize and calculating hash\n new_file = MerkleFile(\"dir_init\")\n new_file.add_to_directory(self)\n\n def __repr__(self) -> str:\n return f\"Name: {self.name}, Children: {self.children}, Hash: {self.node_hash}\"\n\n def add_to_directory(self, directory: MerkleDirectory) -> None:\n # adding the node\n self.parent = directory\n directory.children.add(self)\n # recalculating hash for all anscestors\n while directory is not None:\n directory.recalculate_hash()\n directory = directory.parent\n\n def recalculate_hash(self) -> None:\n # concatinating all hashes and recalculating on the cumulative hash\n cumulative_hash = \"\"\n for child in self.children:\n cumulative_hash += child.node_hash\n self.node_hash = hash_func(cumulative_hash.encode()).hexdigest()\n\n def synchronize(self, other: MerkleDirectory) -> None:\n # if the directories have the same hash, they are already synchronized\n if self.node_hash == other.node_hash:\n return\n # updating other using self\n for node in self.children:\n if not node in other.children:\n type(node)(node.add_to_directory(other))\n # updating self using other\n for node in other.children:\n if not node in self.children:\n type(node)(node.add_to_directory(self))\n\n\nclass MerkleFile(MerkleNode):\n def __init__(self, name: str) -> None:\n MerkleNode.__init__(self, name)\n self.node_contents = \"\"\n self.is_dir = False\n self.node_hash = hash_func(self.node_contents.encode()).hexdigest()\n\n def __repr__(self) -> str:\n return (\n f\"[ Name: {self.name}, Content: \"\n + f\"{self.node_contents if self.node_contents else 'null'}, \"\n + f\"Hash: {self.node_hash} ]\"\n )\n\n def add_to_directory(self, directory: MerkleDirectory) -> None:\n # adding the node\n self.parent = directory\n directory.children.add(self)\n # recalculating hash for all anscestors\n while directory is not None:\n directory.recalculate_hash()\n directory = directory.parent\n\n def update_contents(self, new_contents: str) -> None:\n # contents updated and hash recaculated\n self.node_hash = hash_func(new_contents.encode()).hexdigest()\n self.node_contents = new_contents\n if self.parent:\n self.parent.recalculate_hash()\n\n\nclass Computer:\n def __init__(self):\n self.root = MerkleDirectory(\"root\")\n\n def __repr__(self) -> str:\n return str(self.root)\n\n def synchronize(self, new_comp: Computer) -> None:\n print(\"Syncing computers...\")\n self.root.synchronize(new_comp.root)\n print(\"Sync successful!\\n\")\n\n\nif __name__ == \"__main__\":\n c1 = Computer()\n c2 = Computer()\n\n print(\"COMPUTER 1:\")\n print(c1)\n print(\"COMPUTER 2:\")\n print(c2)\n print()\n\n new_file = MerkleFile(\"check_file\")\n new_file.update_contents(\"Check\")\n new_file.add_to_directory(c1.root)\n\n new_dir = MerkleDirectory(\"check_dir\")\n new_dir.add_to_directory(c2.root)\n\n print(\"COMPUTER 1:\")\n print(c1)\n print(\"COMPUTER 2:\")\n print(c2)\n print()\n\n c1.synchronize(c2)\n\n print(\"COMPUTER 1:\")\n print(c1)\n print(\"COMPUTER 2:\")\n print(c2)\n print()\n", "id": "5127012", "language": "Python", "matching_score": 1.1836858987808228, "max_stars_count": 70, "path": "Solutions/059.py" }, { "content": "\"\"\"\nProblem:\n\nSuppose we represent our file system by a string in the following manner:\n\nThe string \"dir\\n\\tsubdir1\\n\\tsubdir2\\n\\t\\tfile.ext\" represents:\n\ndir\n subdir1\n subdir2\n file.ext\nThe directory dir contains an empty sub-directory subdir1 and a sub-directory subdir2\ncontaining a file file.ext.\n\nThe string \"dir\\n\\tsubdir1\\n\\t\\tfile1.ext\\n\\t\\tsubsubdir1\\n\\tsubdir2\\n\\t\\tsubsubdir2\\n\n\\t\\t\\tfile2.ext\" represents:\n\ndir\n subdir1\n file1.ext\n subsubdir1\n subdir2\n subsubdir2\n file2.ext\nThe directory dir contains two sub-directories subdir1 and subdir2. subdir1 contains a\nfile file1.ext and an empty second-level sub-directory subsubdir1. subdir2 contains a\nsecond-level sub-directory subsubdir2 containing a file file2.ext.\n\nWe are interested in finding the longest (number of characters) absolute path to a file\nwithin our file system. For example, in the second example above, the longest absolute\npath is \"dir/subdir2/subsubdir2/file2.ext\", and its length is 32 (not including the\ndouble quotes).\n\nGiven a string representing the file system in the above format, return the length of\nthe longest absolute path to a file in the abstracted file system. If there is no file\nin the system, return 0.\n\"\"\"\n\n\ndef count_tabs(string: str) -> int:\n return string.count(\"\\t\")\n\n\ndef longest_dir(string: str) -> int:\n dir_list = string.split(\"\\n\")\n length = len(dir_list)\n longest_directory_length = 0\n # calculating the length of the longest absolute path\n for i in range(length - 1, -1, -1):\n temp = dir_list[i]\n temp_dir = temp.lstrip(\"\\t\")\n # skipping calculation if it is not a file\n if temp_dir.find(\".\") == -1:\n continue\n # counting the number of tabs to check the location\n # (0 tabs = root, 1 tab = sub-directory, 2 tabs = sub-sub-directory, ...)\n count = count_tabs(temp)\n # moving back through the list to recreate the entire directory\n for j in range(i, -1, -1):\n if count_tabs(dir_list[j]) < count:\n temp_dir = dir_list[j].lstrip(\"\\t\") + \"/\" + temp_dir\n temp = dir_list[j]\n count = count_tabs(temp)\n # storing the longest directory path length\n longest_directory_length = max(longest_directory_length, len(temp_dir))\n return longest_directory_length\n\n\nif __name__ == \"__main__\":\n print(longest_dir(\"dir\\n\\tsubdir1\\n\\tsubdir2\\n\\t\\tfile.ext\"))\n print(\n longest_dir(\n \"dir\\n\\tsubdir1\\n\\t\\tfile1.ext\\n\\t\\tsubsubdir1\\n\\tsubdir2\\n\\t\\tsubsubdir2\"\n + \"\\n\\t\\t\\tfile2.ext\"\n )\n )\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "7073587", "language": "Python", "matching_score": 1.16681969165802, "max_stars_count": 70, "path": "Solutions/017.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an absolute pathname that may have . or .. as part of it, return the shortest\nstandardized path.\n\nFor example, given /usr/bin/../bin/./scripts/../, return /usr/bin/.\n\"\"\"\n\nfrom DataStructures.Stack import Stack\n\n\ndef get_shortest_standardized_path(path: str) -> str:\n path_list = path.split(\"/\")\n stack = Stack()\n\n for curr_directory in path_list:\n if curr_directory == \".\":\n continue\n elif curr_directory == \"..\":\n stack.pop()\n else:\n stack.push(curr_directory)\n return \"/\".join(stack)\n\n\nif __name__ == \"__main__\":\n print(get_shortest_standardized_path(\"/usr/bin/../bin/./scripts/../\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "9198420", "language": "Python", "matching_score": 1.5166617631912231, "max_stars_count": 70, "path": "Solutions/222.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a stack of N elements, interleave the first half of the stack with the second\nhalf reversed using only one other queue. This should be done in-place.\n\nRecall that you can only push or pop from a stack, and enqueue or dequeue from a queue.\n\nFor example, if the stack is [1, 2, 3, 4, 5], it should become [1, 5, 2, 4, 3]. If the\nstack is [1, 2, 3, 4], it should become [1, 4, 2, 3].\n\nHint: Try working backwards from the end state.\n\"\"\"\n\nfrom DataStructures.Stack import Stack\nfrom DataStructures.Queue import Queue\n\n\ndef interleave(stack: Stack) -> Stack:\n queue = Queue()\n # interleaving the elements\n for i in range(1, len(stack)):\n for _ in range(i, len(stack)):\n queue.enqueue(stack.pop())\n for _ in range(len(queue)):\n stack.push(queue.dequeue())\n return stack\n\n\nif __name__ == \"__main__\":\n stack = Stack()\n\n stack.push(1)\n stack.push(2)\n stack.push(3)\n stack.push(4)\n\n print(stack)\n print(interleave(stack))\n print()\n\n stack = Stack()\n\n stack.push(1)\n stack.push(2)\n stack.push(3)\n stack.push(4)\n stack.push(5)\n\n print(stack)\n print(interleave(stack))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "5566544", "language": "Python", "matching_score": 1.7396565675735474, "max_stars_count": 70, "path": "Solutions/180.py" }, { "content": "\"\"\"\nProblem:\n\nImplement a queue using two stacks. Recall that a queue is a FIFO (first-in, first-out)\ndata structure with the following methods: enqueue, which inserts an element into the\nqueue, and dequeue, which removes it.\n\"\"\"\n\nfrom DataStructures.Stack import Stack\n\n\nclass Queue:\n def __init__(self) -> None:\n self.stack1 = Stack()\n self.stack2 = Stack()\n\n def __str__(self) -> str:\n return str(self.stack2[::-1] + self.stack1[::])\n\n def enqueue(self, val: int) -> None:\n self._transfer_to_stack1()\n self.stack1.push(val)\n\n def dequeue(self) -> int:\n self._transfer_to_stack2()\n if len(self.stack2) == 0:\n raise RuntimeError(\"Cannot dequeue from a empty queue\")\n return self.stack2.pop()\n\n def _transfer_to_stack2(self) -> None:\n # helper function to transfer all items to the stack 1 from stack 2\n while not self.stack1.is_empty():\n self.stack2.push(self.stack1.pop())\n\n def _transfer_to_stack1(self) -> None:\n # helper function to transfer all items to the stack 2 from stack 1\n while not self.stack2.is_empty():\n self.stack1.push(self.stack1.pop())\n\n\nif __name__ == \"__main__\":\n queue = Queue()\n\n print(queue)\n\n queue.enqueue(1)\n queue.enqueue(5)\n queue.enqueue(9)\n queue.enqueue(3)\n queue.enqueue(4)\n queue.enqueue(0)\n\n print(queue)\n\n print(queue.dequeue())\n print(queue.dequeue())\n\n print(queue)\n\n print(queue.dequeue())\n print(queue.dequeue())\n print(queue.dequeue())\n print(queue.dequeue())\n\n print(queue)\n", "id": "3658137", "language": "Python", "matching_score": 2.215862274169922, "max_stars_count": 70, "path": "Solutions/053.py" }, { "content": "\"\"\"\nProblem:\n\nImplement a stack API using only a heap. A stack implements the following methods:\n\npush(item), which adds an element to the stack\npop(), which removes and returns the most recently added element (or throws an error if\n there is nothing on the stack)\n\nRecall that a heap has the following operations:\n\npush(item), which adds a new key to the heap\npop(), which removes and returns the max value of the heap\n\"\"\"\n\nfrom sys import maxsize\n\nfrom DataStructures.Heap import MinHeap\n\n\nclass Stack:\n def __init__(self) -> None:\n self.heap = MinHeap()\n self.next_wt = maxsize\n\n def pop(self) -> int:\n if len(self.heap) == 0:\n raise ValueError(\"Stack Underflow\")\n _, val = self.heap.extract_min()\n return val\n\n def push(self, val: int) -> None:\n self.heap.insert((self.next_wt, val))\n self.next_wt -= 1\n\n\nif __name__ == \"__main__\":\n stack = Stack()\n\n stack.push(1)\n stack.push(7)\n stack.push(4)\n\n print(stack.pop())\n\n stack.push(2)\n\n print(stack.pop())\n print(stack.pop())\n print(stack.pop())\n", "id": "1007397", "language": "Python", "matching_score": 2.028172254562378, "max_stars_count": 70, "path": "Solutions/154.py" }, { "content": "\"\"\"\nProblem:\n\nA quack is a data structure combining properties of both stacks and queues. It can be\nviewed as a list of elements written left to right such that three operations are\npossible:\n\npush(x): add a new item x to the left end of the list\npop(): remove and return the item on the left end of the list\npull(): remove the item on the right end of the list.\nImplement a quack using three stacks and O(1) additional memory, so that the amortized\ntime for any push, pop, or pull operation is O(1).\n\"\"\"\n\nfrom DataStructures.Stack import Stack\n\n\nclass Quack:\n def __init__(self) -> None:\n self.stack_1 = Stack()\n self.stack_2 = Stack()\n self.stack_3 = Stack()\n self.elements = 0\n\n def __len__(self) -> int:\n return self.elements\n\n def push(self, x: int) -> None:\n self.stack_1.push(x)\n self.stack_2.push(x)\n self.elements += 1\n\n def pop(self) -> int:\n if self.elements == 0:\n raise RuntimeWarning(\"Quack underflow\")\n if len(self.stack_2) == 0:\n while not self.stack_3.is_empty():\n self.stack_2.push(self.stack_3.pop())\n self.elements -= 1\n self.stack_2.pop()\n return self.stack_1.pop()\n\n def pull(self) -> int:\n if self.elements == 0:\n raise RuntimeWarning(\"Quack underflow\")\n if len(self.stack_3) == 0:\n while not self.stack_2.is_empty():\n self.stack_3.push(self.stack_2.pop())\n self.elements -= 1\n return self.stack_3.pop()\n\n\nif __name__ == \"__main__\":\n quack = Quack()\n\n quack.push(1)\n quack.push(2)\n quack.push(3)\n quack.push(4)\n quack.push(5)\n\n print(quack.pop())\n print(quack.pull())\n\n print(quack.pop())\n print(quack.pull())\n\n print(quack.pull())\n print(f\"Length: {len(quack)}\")\n", "id": "4369196", "language": "Python", "matching_score": 1.7669868469238281, "max_stars_count": 70, "path": "Solutions/365.py" }, { "content": "\"\"\"\nProblem:\n\nImplement a stack that has the following methods:\n\npush(val), which pushes an element onto the stack\npop(), which pops off and returns the topmost element of the stack. If there are no\nelements in the stack, then it should throw an error or return null.\nmax(), which returns the maximum value in the stack currently. If there are no elements\nin the stack, then it should throw an error or return null.\nEach method should run in constant time.\n\"\"\"\n\nfrom DataStructures.Stack import Stack\n\n\nclass MaxStack:\n def __init__(self) -> None:\n self.stack = Stack()\n self.maximum = Stack()\n\n def __repr__(self) -> str:\n return f\"Stack: {self.stack}\\nMax Stack: {self.maximum}\"\n\n def push(self, val: int) -> None:\n self.stack.push(val)\n # if the current value is larger than the previous maxima, its index is added\n # to the maximum stack\n if self.maximum.is_empty() or self.stack[self.maximum.peek()] < val:\n self.maximum.push(len(self.stack) - 1)\n\n def pop(self) -> int:\n if self.stack.is_empty():\n raise RuntimeError(\"Cannot pop from a empty stack\")\n # if the index of the current element to be removed is in the maximum stack,\n # its removed as well\n if len(self.stack) == self.maximum.peek() + 1:\n self.maximum.pop()\n return self.stack.pop()\n\n def max(self) -> int:\n if self.stack.is_empty():\n raise RuntimeError(\"Cannot get max of a empty stack\")\n # the maximum is accessed from the last poistion stored in maximum stack\n return self.stack[self.maximum.peek()]\n\n\nif __name__ == \"__main__\":\n s = MaxStack()\n\n s.push(1)\n s.push(3)\n s.push(2)\n s.push(5)\n\n print(s.max())\n print(s)\n print(s.pop())\n print()\n\n print(s.max())\n print(s)\n print(s.pop())\n print()\n\n print(s.max())\n print(s)\n print(s.pop())\n print()\n\n print(s.max())\n print(s)\n print(s.pop())\n print()\n\n print(s)\n", "id": "189292", "language": "Python", "matching_score": 1.789343237876892, "max_stars_count": 70, "path": "Solutions/043.py" }, { "content": "\"\"\"\nProblem:\n\nImplement 3 stacks using a single list:\n\nclass Stack:\n def __init__(self):\n self.list = []\n\n def pop(self, stack_number):\n pass\n\n def push(self, item, stack_number):\n pass\n\"\"\"\n\n\nclass Stack:\n def __init__(self) -> None:\n self.list = []\n self.stack1_last_index = 0\n self.stack2_last_index = 0\n self.stack3_last_index = 0\n\n def __repr__(self) -> str:\n return (\n f\"Stack1: {self.list[:self.stack1_last_index]}\"\n + f\"\\nStack2: {self.list[self.stack1_last_index:self.stack2_last_index]}\"\n + f\"\\nStack3: {self.list[self.stack2_last_index:]}\"\n )\n\n def pop(self, stack_number: int) -> int:\n if stack_number == 1:\n if len(self.list[: self.stack1_last_index]) == 0:\n raise ValueError(\"Stack Underflow\")\n self.list.pop(self.stack1_last_index - 1)\n self.stack1_last_index -= 1\n self.stack2_last_index -= 1\n self.stack3_last_index -= 1\n elif stack_number == 2:\n if len(self.list[self.stack1_last_index : self.stack2_last_index]) == 0:\n raise ValueError(\"Stack Underflow\")\n self.list.pop(self.stack2_last_index - 1)\n self.stack2_last_index -= 1\n self.stack3_last_index -= 1\n elif stack_number == 3:\n if len(self.list[self.stack2_last_index :]) == 0:\n raise ValueError(\"Stack Underflow\")\n self.list.pop()\n self.stack3_last_index -= 1\n\n def push(self, item: int, stack_number: int) -> None:\n if stack_number == 1:\n self.list.insert(self.stack1_last_index, item)\n self.stack1_last_index += 1\n self.stack2_last_index += 1\n self.stack3_last_index += 1\n elif stack_number == 2:\n self.list.insert(self.stack2_last_index, item)\n self.stack2_last_index += 1\n self.stack3_last_index += 1\n elif stack_number == 3:\n self.list.insert(self.stack3_last_index, item)\n self.stack3_last_index += 1\n\n\nif __name__ == \"__main__\":\n stack = Stack()\n stack.push(5, 3)\n stack.push(10, 2)\n stack.push(1, 1)\n\n print(stack)\n print()\n\n stack.push(3, 3)\n stack.push(1, 2)\n stack.push(0, 2)\n\n print(stack)\n print()\n\n stack.pop(2)\n stack.pop(1)\n stack.pop(3)\n\n print(stack)\n", "id": "5853517", "language": "Python", "matching_score": 0.6643673777580261, "max_stars_count": 70, "path": "Solutions/141.py" }, { "content": "\"\"\"\nProblem:\n\nAn XOR linked list is a more memory efficient doubly linked list. Instead of each node\nholding next and prev fields, it holds a field named both, which is an XOR of the next\nnode and the previous node. Implement an XOR linked list; it has an add(element) which\nadds the element to the end, and a get(index) which returns the node at index.\n\nIf using a language that has no pointers (such as Python), you can assume you have\naccess to get_pointer and dereference_pointer functions that converts between nodes\nand memory addresses.\n\"\"\"\n\n\n# Solution copied from:\n# https://github.com/r1cc4rdo/daily_coding_problem/blob/master/problems/06\n\n\n\"\"\"\nAn XOR linked list is a more memory efficient doubly linked list.\nInstead of each node holding next and prev fields, it holds a field named both, which\nis a XOR of the next node and the previous node. Implement a XOR linked list; it has an\nadd(element) which adds the element to the end, and a get(index) which returns the node\nat index.\n\nNOTE: python does not have actual pointers (id() exists but it is not an actual pointer\nin all implementations). For this reason, we use a python list to simulate memory.\nIndexes are the addresses in memory. This has the unfortunate consequence that the\ntravel logic needs to reside in the List class rather than the Node one.\n\"\"\"\n\nfrom typing import Tuple\n\n\nclass XORLinkedListNode:\n def __init__(self, val: int, prev: int, next: int) -> None:\n self.val = val\n self.both = prev ^ next\n\n def next_node(self, prev_idx: int) -> int:\n return self.both ^ prev_idx\n\n def prev_node(self, next_idx: int) -> int:\n return self.both ^ next_idx\n\n\nclass XORLinkedList:\n def __init__(self) -> None:\n self.memory = [XORLinkedListNode(None, -1, -1)]\n\n def head(self) -> Tuple[int, int, XORLinkedListNode]:\n # head node index, prev node index, head node\n return 0, -1, self.memory[0]\n\n def add(self, val: int) -> None:\n current_node_index, previous_node_index, current_node = self.head()\n while True:\n # walk down the list until the end is found\n next_node_index = current_node.next_node(previous_node_index)\n if next_node_index == -1:\n # the end is reached\n break\n previous_node_index, current_node_index = (\n current_node_index,\n next_node_index,\n )\n current_node = self.memory[next_node_index]\n # allocation\n new_node_index = len(self.memory)\n current_node.both = previous_node_index ^ new_node_index\n self.memory.append(XORLinkedListNode(val, current_node_index, -1))\n\n def get(self, index: int) -> int:\n current_index, previous_index, current_node = self.head()\n for _ in range(index + 1):\n previous_index, current_index = (\n current_index,\n current_node.next_node(previous_index),\n )\n current_node = self.memory[current_index]\n return current_node.val\n\n\nif __name__ == \"__main__\":\n xor_linked_list = XORLinkedList()\n\n xor_linked_list.add(1)\n xor_linked_list.add(2)\n xor_linked_list.add(3)\n xor_linked_list.add(4)\n\n print(xor_linked_list.get(0))\n print(xor_linked_list.get(1))\n print(xor_linked_list.get(2))\n print(xor_linked_list.get(3))\n", "id": "4999438", "language": "Python", "matching_score": 0.6780087351799011, "max_stars_count": 70, "path": "Solutions/006.py" }, { "content": "\"\"\"\nProblem:\n\nWe say a number is sparse if there are no adjacent ones in its binary representation.\nFor example, 21 (10101) is sparse, but 22 (10110) is not. For a given input N, find the\nsmallest sparse number greater than or equal to N.\n\nDo this in faster than O(N log N) time.\n\"\"\"\n\n\ndef get_next_sparse(num: int) -> int:\n binary = bin(num)[2:]\n new_str_bin = \"\"\n prev_digit = None\n flag = False\n # generating the binary representation of the next sparse number\n for i, digit in enumerate(binary):\n if digit == \"1\" and prev_digit == \"1\":\n flag = True\n if flag:\n new_str_bin += \"0\" * (len(binary) - i)\n break\n new_str_bin += digit\n prev_digit = digit\n if flag:\n if new_str_bin[0] == \"1\":\n new_str_bin = \"10\" + new_str_bin[1:]\n else:\n new_str_bin = \"1\" + new_str_bin\n return int(new_str_bin, base=2)\n\n\nif __name__ == \"__main__\":\n print(get_next_sparse(21))\n print(get_next_sparse(25))\n print(get_next_sparse(255))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(log(n))\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "3423370", "language": "Python", "matching_score": 1.2407866716384888, "max_stars_count": 70, "path": "Solutions/217.py" }, { "content": "\"\"\"\nProblem:\n\nThe number 6174 is known as Kaprekar's contant, after the mathematician who discovered\nan associated property: for all four-digit numbers with at least two distinct digits,\nrepeatedly applying a simple procedure eventually results in this value. The procedure\nis as follows:\n\nFor a given input x, create two new numbers that consist of the digits in x in\nascending and descending order. Subtract the smaller number from the larger number.\nFor example, this algorithm terminates in three steps when starting from 1234:\n\n4321 - 1234 = 3087\n8730 - 0378 = 8352\n8532 - 2358 = 6174\nWrite a function that returns how many steps this will take for a given input N.\n\"\"\"\n\nKAPREKAR_CONSTANT = 6174\n\n\ndef get_num_steps(num: int, steps: int = 0) -> int:\n if len(set(list(str(num)))) < 2:\n raise ValueError(\n \"Kaprekar's operation requires at least 2 distinct digits in the number\"\n )\n if num == KAPREKAR_CONSTANT:\n return steps\n # applying Kaprekar's operation\n digits = list(str(num))\n digits.sort()\n num1 = int(\"\".join(digits[::-1]))\n num2 = int(\"\".join(digits))\n return get_num_steps(num1 - num2, steps + 1)\n\n\nif __name__ == \"__main__\":\n print(get_num_steps(1234))\n print(get_num_steps(1204))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1) [as it does not exceed 7]\nSPACE COMPLEXITY: O(1) [as the number of digits is 4]\n\"\"\"\n", "id": "9132659", "language": "Python", "matching_score": 1.040458083152771, "max_stars_count": 70, "path": "Solutions/288.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a positive integer N, find the smallest number of steps it will take to reach 1.\n\nThere are two kinds of permitted steps:\n\nYou may decrement N to N - 1.\nIf a * b = N, you may decrement N to the larger of a and b.\nFor example, given 100, you can reach 1 in five steps with the following route:\n100 -> 10 -> 9 -> 3 -> 2 -> 1.\n\"\"\"\n\n\nfrom typing import Tuple\n\n\ndef get_closest_factors(num: int) -> Tuple[int, int]:\n a, b = 1, num\n factor_1, factor_2 = 1, 1\n while b > a:\n if num % a == 0:\n factor_1, factor_2 = a, num // a\n b = num / a\n a += 1\n return (factor_1, factor_2)\n\n\ndef get_step_size(num: int, steps: int = 0) -> int:\n if num < 1:\n raise ValueError(f\"Cannot reach 1 from {num}\")\n if num == 1:\n return steps\n # generating the sequence to get the least number of steps\n largest_factor = max(get_closest_factors(num))\n if largest_factor == num:\n return get_step_size(num - 1, steps + 1)\n return min(\n get_step_size(num - 1, steps + 1), get_step_size(largest_factor, steps + 1)\n )\n\n\nif __name__ == \"__main__\":\n print(get_step_size(100)) # 100 -> 10 -> 9 -> 3 -> 2 -> 1\n print(get_step_size(64)) # 64 -> 8 -> 4 -> 2 -> 1\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x log(n))\nSPACE COMPLEXITY: O(n) [considering call-stack]\n\"\"\"\n", "id": "2049853", "language": "Python", "matching_score": 1.1272410154342651, "max_stars_count": 70, "path": "Solutions/321.py" }, { "content": "\"\"\"\nProblem:\n\nA regular number in mathematics is defined as one which evenly divides some power of\n60. Equivalently, we can say that a regular number is one whose only prime divisors are\n2, 3, and 5.\n\nThese numbers have had many applications, from helping ancient Babylonians keep time to\ntuning instruments according to the diatonic scale.\n\nGiven an integer N, write a program that returns, in order, the first N regular\nnumbers.\n\"\"\"\n\nfrom typing import List, Set\n\n\ndef get_prime_factors(num: int) -> Set[int]:\n factors = set()\n curr = 2\n while num > 1:\n while num > 1 and num % curr == 0:\n num = num // curr\n factors.add(curr)\n curr += 1\n return factors\n\n\ndef get_regular_numbers(N: int) -> List[int]:\n # using Sieve of Eratosthenes Method to optimally find the required numbers\n total_range = 2 * N\n SoE = [False for _ in range(total_range)]\n result = []\n count = 0\n factors = set([2, 3, 5])\n\n for factor in factors:\n for i in range(factor, total_range, factor):\n if not SoE[i] and not (get_prime_factors(i) - factors):\n SoE[i] = True\n\n for index, value in enumerate(SoE):\n if value:\n result.append(index)\n count += 1\n if count == N:\n break\n return result\n\n\nif __name__ == \"__main__\":\n print(get_regular_numbers(10))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n log(n))\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "8918388", "language": "Python", "matching_score": 1.088737964630127, "max_stars_count": 70, "path": "Solutions/283.py" }, { "content": "\"\"\"\nProblem:\n\nThe Sieve of Eratosthenes is an algorithm used to generate all prime numbers smaller\nthan N. The method is to take increasingly larger prime numbers, and mark their\nmultiples as composite.\n\nFor example, to find all primes less than 100, we would first mark [4, 6, 8, ...]\n(multiples of two), then [6, 9, 12, ...] (multiples of three), and so on. Once we have\ndone this for all primes less than N, the unmarked numbers that remain will be prime.\n\nImplement this algorithm.\n\nBonus: Create a generator that produces primes indefinitely (that is, without taking N\nas an input).\n\"\"\"\n\n\nfrom typing import Generator, List\n\n\ndef sieve_of_eratosthenes(sieve: List[int] = []) -> List[int]:\n if sieve:\n length = len(sieve)\n sieve.extend([True for _ in range(length)])\n else:\n length = 10\n sieve = [True for _ in range(length * 2)]\n sieve[0], sieve[1] = False, False\n # sieve generation\n for i in range(2, 2 * length):\n if sieve[i]:\n for j in range(2 * i, 2 * length, i):\n sieve[j] = False\n return sieve\n\n\ndef primes_generator() -> Generator[int, None, None]:\n primes = sieve_of_eratosthenes()\n prev = 0\n while True:\n for i in range(prev, len(primes)):\n if primes[i]:\n yield i\n prev = len(primes)\n primes = sieve_of_eratosthenes(primes)\n\n\nif __name__ == \"__main__\":\n generator = primes_generator()\n for _ in range(35):\n print(next(generator))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "12719235", "language": "Python", "matching_score": 0.7040827870368958, "max_stars_count": 70, "path": "Solutions/244.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an even number (greater than 2), return two prime numbers whose sum will be equal\nto the given number.\n\nA solution will always exist. See Goldbach’s conjecture.\n\nExample:\n\nnumut: 4 Output: 2 + 2 = 4 If there are more than one solution possible, return the\nlexicographically smaller solution.\n\nIf [a, b] is one solution with a <= b, and [c, d] is another solution with c <= d, then\n\n[a, b] < [c, d]\nif a < c or a==c and b < d.\n\"\"\"\n\nfrom typing import Tuple\n\n\ndef is_prime(num: int) -> bool:\n # time complexity: O(log(n))\n for i in range(2, int(num ** 0.5) + 1):\n if num % i == 0:\n return False\n return True\n\n\ndef get_prime_sum(num: int) -> Tuple[int, int]:\n if num > 2 and is_prime(num - 2):\n return 2, num - 2\n if num > 3 and is_prime(num - 3):\n return 3, num - 3\n # all prime numbers are of the form (6n + 1) or (6n - 1)\n for i in range(6, num // 2, 6):\n if is_prime(i - 1) and is_prime(num - i + 1):\n return (i - 1), (num - i + 1)\n elif is_prime(i + 1) and is_prime(num - i - 1):\n return (i + 1), (num - i - 1)\n\n\nif __name__ == \"__main__\":\n num = 4\n num_1, num_2 = get_prime_sum(num)\n print(f\"{num} = {num_1} + {num_2}\")\n\n num = 10\n num_1, num_2 = get_prime_sum(num)\n print(f\"{num} = {num_1} + {num_2}\")\n\n num = 100\n num_1, num_2 = get_prime_sum(num)\n print(f\"{num} = {num_1} + {num_2}\")\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x log(n))\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "7621160", "language": "Python", "matching_score": 1.21829092502594, "max_stars_count": 70, "path": "Solutions/101.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a multiset of integers, return whether it can be partitioned into two subsets\nwhose sums are the same.\n\nFor example, given the multiset {15, 5, 20, 10, 35, 15, 10}, it would return true,\nsince we can split it up into {15, 5, 10, 15, 10} and {20, 35}, which both add up to 55.\n\nGiven the multiset {15, 5, 20, 10, 35}, it would return false, since we can't split it\nup into two subsets that add up to the same sum.\n\"\"\"\n\nfrom typing import List\n\n\ndef equal_sum_split_check_helper(\n arr: List[int], start: int, stop: int, sum_inner: int, sum_outer: int\n) -> bool:\n if start >= stop:\n return False\n elif sum_inner == sum_outer:\n return True\n # checking for all possible splits\n return equal_sum_split_check_helper(\n arr, start + 1, stop, sum_inner - arr[start], sum_outer + arr[start]\n ) or equal_sum_split_check_helper(\n arr, start, stop - 1, sum_inner - arr[stop], sum_outer + arr[stop]\n )\n\n\ndef equal_sum_split_check(arr: List[int]) -> bool:\n # cases where the array cannot be split\n total_sum = sum(arr)\n if not arr or total_sum % 2 == 1:\n return False\n # sorting the array (pre-requisite for split_helper)\n arr.sort()\n return equal_sum_split_check_helper(arr, 0, len(arr) - 1, total_sum, 0)\n\n\nif __name__ == \"__main__\":\n print(equal_sum_split_check([15, 5, 20, 10, 35, 15, 10]))\n print(equal_sum_split_check([15, 5, 20, 10, 35]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(n) [recursion depth]\n\"\"\"\n", "id": "11378930", "language": "Python", "matching_score": 1.8386715650558472, "max_stars_count": 70, "path": "Solutions/060.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a list of integers and a number K, return which contiguous elements of the list\nsum to K.\n\nFor example, if the list is [1, 2, 3, 4, 5] and K is 9, then it should return [2, 3, 4].\n\"\"\"\n\nfrom typing import List, Optional\n\n\ndef get_arr_contiguous_sum(arr: List[int], k: int) -> Optional[List[int]]:\n length = len(arr)\n total_sum = 0\n start, end = 0, 0\n # generating the sequence using moving window\n for i in range(length):\n if total_sum == k:\n return arr[start:end]\n total_sum += arr[i]\n end = i + 1\n if total_sum > k:\n total_sum -= arr[start]\n start += 1\n if total_sum == k:\n return arr[start:end]\n return None\n\n\nif __name__ == \"__main__\":\n print(get_arr_contiguous_sum([1, 2, 3, 4, 5], 0))\n print(get_arr_contiguous_sum([1, 2, 3, 4, 5], 1))\n print(get_arr_contiguous_sum([1, 2, 3, 4, 5], 5))\n print(get_arr_contiguous_sum([5, 4, 3, 4, 5], 12))\n print(get_arr_contiguous_sum([5, 4, 3, 4, 5], 11))\n print(get_arr_contiguous_sum([1, 2, 3, 4, 5], 9))\n print(get_arr_contiguous_sum([1, 2, 3, 4, 5], 3))\n print(get_arr_contiguous_sum([1, 2, 3, 4, 5], 300))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "2312145", "language": "Python", "matching_score": 1.586645245552063, "max_stars_count": 70, "path": "Solutions/102.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of elements, return the length of the longest subarray where all its\nelements are distinct.\n\nFor example, given the array [5, 1, 3, 5, 2, 3, 4, 1], return 5 as the longest subarray\nof distinct elements is [5, 2, 3, 4, 1].\n\"\"\"\n\nfrom typing import List\n\n\ndef max_unique_subarr(arr: List[int]) -> int:\n if not arr:\n return 0\n\n length = len(arr)\n cache = set()\n max_length, window_length, window_start = 0, 0, 0\n\n for i in range(length):\n if arr[i] not in cache:\n cache.add(arr[i])\n window_length += 1\n continue\n\n max_length = max(max_length, window_length)\n for j in range(window_start, i):\n cache.remove(arr[j])\n window_length -= 1\n if arr[j] == arr[i]:\n window_start = j\n cache.add(arr[j])\n window_length += 1\n break\n return max(max_length, window_length)\n\n\nif __name__ == \"__main__\":\n print(max_unique_subarr([5, 1, 3, 5, 2, 3, 4, 1, 5]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "5126879", "language": "Python", "matching_score": 1.2493807077407837, "max_stars_count": 70, "path": "Solutions/189.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an integer k and a string s, find the length of the longest substring that\ncontains at most k distinct characters.\n\nFor example, given s = \"abcba\" and k = 2, the longest substring with k distinct\ncharacters is \"bcb\".\n\"\"\"\n\n\ndef longest_substring_k_unique(string: str, k: int) -> str:\n length = len(string)\n # start and end is used to create the moving window (end - start)\n start = 0\n end = 1\n longest_substring_till_now = string[0]\n char_freq = {string[0]: 1}\n num_unique = 1\n # generating the longest substring\n while end < length:\n if string[end] in char_freq:\n char_freq[string[end]] += 1\n else:\n char_freq[string[end]] = 1\n num_unique += 1\n # updating moving window and character frequency\n if num_unique > k:\n while num_unique > k:\n char_freq[string[start]] -= 1\n if char_freq[string[start]] == 0:\n del char_freq[string[start]]\n num_unique -= 1\n start += 1\n # updating the longest substring\n temp = string[start : end + 1]\n if num_unique <= k and len(temp) > len(longest_substring_till_now):\n longest_substring_till_now = temp\n end += 1\n return longest_substring_till_now\n\n\nif __name__ == \"__main__\":\n print(longest_substring_k_unique(\"abcba\", 2))\n print(longest_substring_k_unique(\"abcba\", 20))\n print(longest_substring_k_unique(\"karappa\", 2))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "7728087", "language": "Python", "matching_score": 1.9172214269638062, "max_stars_count": 70, "path": "Solutions/013.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string, find the length of the smallest window that contains every distinct\ncharacter. Characters may appear more than once in the window.\n\nFor example, given \"jiujitsu\", you should return 5, corresponding to the final five\nletters.\n\"\"\"\n\nfrom sys import maxsize\n\n\ndef get_min_all_char_window(string: str) -> int:\n result = maxsize\n characters = set(string)\n length = len(string)\n curr_char = {}\n start, end = 0, len(characters)\n # initializing moving window\n for i in range(start, end):\n if string[i] not in curr_char:\n curr_char[string[i]] = 0\n curr_char[string[i]] += 1\n # check for all characters in the string is distinct\n if len(curr_char) == len(characters):\n result = len(curr_char)\n # updating moving window\n while end < length:\n if string[end] not in curr_char:\n curr_char[string[end]] = 0\n curr_char[string[end]] += 1\n # shortening window\n while curr_char[string[start]] > 1:\n curr_char[string[start]] -= 1\n start += 1\n # check if the window contains all characters\n if len(curr_char) == len(characters):\n result = min(result, end - start + 1)\n end += 1\n return result\n\n\nif __name__ == \"__main__\":\n print(get_min_all_char_window(\"jiujitsu\"))\n print(get_min_all_char_window(\"jiujiuuts\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "12389363", "language": "Python", "matching_score": 1.2499160766601562, "max_stars_count": 70, "path": "Solutions/320.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string and a set of characters, return the shortest substring containing all\nthe characters in the set.\n\nFor example, given the string \"figehaeci\" and the set of characters {a, e, i}, you\nshould return \"aeci\".\n\nIf there is no substring containing all the characters in the set, return null.\n\"\"\"\n\nfrom typing import Set\n\n\ndef shortest_substring_with_all_characters(string: str, characters: Set[str]) -> str:\n curr_char_queue, index_queue = [], []\n curr_seen = set()\n num_char = len(characters)\n result = None\n # generating the shortest substring\n for i in range(len(string)):\n if string[i] in characters:\n curr_char_queue.append(string[i])\n index_queue.append(i)\n curr_seen.add(string[i])\n # shortening the substring\n shift = 0\n for k in range(len(curr_char_queue) // 2):\n if curr_char_queue[k] == curr_char_queue[-k - 1]:\n shift += 1\n # truncating the queues\n curr_char_queue = curr_char_queue[shift:]\n index_queue = index_queue[shift:]\n # all characters found\n if len(curr_seen) == num_char:\n if (not result) or (len(result) > (index_queue[-1] - index_queue[0] + 1)):\n result = string[index_queue[0] : index_queue[-1] + 1]\n return result\n\n\nif __name__ == \"__main__\":\n print(shortest_substring_with_all_characters(\"abcdedbc\", {\"g\", \"f\"}))\n print(shortest_substring_with_all_characters(\"abccbbbccbcb\", {\"a\", \"b\", \"c\"}))\n print(shortest_substring_with_all_characters(\"figehaeci\", {\"a\", \"e\", \"i\"}))\n print(shortest_substring_with_all_characters(\"abcdedbc\", {\"d\", \"b\", \"b\"}))\n print(shortest_substring_with_all_characters(\"abcdedbc\", {\"b\", \"c\"}))\n print(shortest_substring_with_all_characters(\"abcdecdb\", {\"b\", \"c\"}))\n print(shortest_substring_with_all_characters(\"abcdecdb\", {\"b\", \"c\", \"e\"}))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "4643331", "language": "Python", "matching_score": 1.2630352973937988, "max_stars_count": 70, "path": "Solutions/103.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string, return the first recurring character in it, or null if there is no\nrecurring chracter.\n\nFor example, given the string \"acbbac\", return \"b\". Given the string \"abcdef\", return\nnull.\n\"\"\"\n\nfrom typing import Optional\n\n\ndef get_first_recurring_character(string: str) -> Optional[str]:\n seen_characters = set()\n\n for char in string:\n if char in seen_characters:\n return char\n seen_characters.add(char)\n return None\n\n\nif __name__ == \"__main__\":\n print(get_first_recurring_character(\"acbbac\"))\n print(get_first_recurring_character(\"abcdef\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "9359310", "language": "Python", "matching_score": 0.08638089895248413, "max_stars_count": 70, "path": "Solutions/159.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an unordered list of flights taken by someone, each represented as\n(origin, destination) pairs, and a starting airport, compute the person's itinerary. If\nno such itinerary exists, return null. If there are multiple possible itineraries,\nreturn the lexicographically smallest one. All flights must be used in the itinerary.\n\nFor example, given the list of flights\n[('SFO', 'HKO'), ('YYZ', 'SFO'), ('YUL', 'YYZ'), ('HKO', 'ORD')] and starting airport\n'YUL', you should return the list ['YUL', 'YYZ', 'SFO', 'HKO', 'ORD'].\n\nGiven the list of flights [('SFO', 'COM'), ('COM', 'YYZ')] and starting airport 'COM',\nyou should return null.\n\nGiven the list of flights [('A', 'B'), ('A', 'C'), ('B', 'C'), ('C', 'A')] and starting\nairport 'A', you should return the list ['A', 'B', 'C', 'A', 'C'] even though\n['A', 'C', 'A', 'B', 'C'] is also a valid itinerary. However, the first one is\nlexicographically smaller.\n\"\"\"\n\nfrom typing import List, Optional, Tuple\n\n\ndef get_itinerary(\n flights: List[Tuple[str, str]],\n current_position: str,\n current_itinerary: List[str] = [],\n) -> Optional[List[str]]:\n if not flights and current_itinerary:\n return current_itinerary + [current_position]\n elif not flights:\n return None\n\n resulatant_itinerary = None\n # generating the itinerary\n for index, (src, dest) in enumerate(flights):\n # the is constructed using the current position (using DFS)\n if current_position == src:\n child_itinerary = get_itinerary(\n flights[:index] + flights[index + 1 :], dest, current_itinerary + [src]\n )\n if child_itinerary and (\n not resulatant_itinerary or child_itinerary < resulatant_itinerary\n ):\n resulatant_itinerary = child_itinerary\n return resulatant_itinerary\n\n\nif __name__ == \"__main__\":\n print(\n get_itinerary(\n [(\"SFO\", \"HKO\"), (\"YYZ\", \"SFO\"), (\"YUL\", \"YYZ\"), (\"HKO\", \"ORD\")], \"YUL\"\n )\n )\n print(get_itinerary([(\"SFO\", \"COM\"), (\"COM\", \"YYZ\")], \"COM\"))\n print(get_itinerary([(\"A\", \"B\"), (\"A\", \"C\"), (\"B\", \"C\"), (\"C\", \"A\")], \"A\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 3)\nSPACE COMPLEXITY: O(n ^ 2)\n\"\"\"\n", "id": "5041463", "language": "Python", "matching_score": 0.7562099099159241, "max_stars_count": 70, "path": "Solutions/041.py" }, { "content": "\"\"\"\nProblem:\n\ncons(a, b) constructs a pair, and car(pair) and cdr(pair) returns the first and last\nelement of that pair. For example, car(cons(3, 4)) returns 3, and cdr(cons(3, 4))\nreturns 4.\n\nGiven this implementation of cons:\n\ndef cons(a, b):\n def pair(f):\n return f(a, b)\n return pair\n\"\"\"\n\nfrom typing import Callable\n\n\n# given implementation of cons:\ndef cons(a, b):\n def pair(f):\n return f(a, b)\n\n return pair\n\n\n# car implementation\ndef car(f: Callable) -> int:\n z = lambda x, y: x\n return f(z)\n\n\n# cdr implementation\ndef cdr(f: Callable) -> int:\n z = lambda x, y: y\n return f(z)\n\n\nif __name__ == \"__main__\":\n pair = cons(1, 3)\n\n print(car(pair))\n print(cdr(pair))\n\n\n\"\"\"\nSPECS:\n\ncar:\nTIME COMPLEXITY: O(1) \nSPACE COMPLEXITY: O(1)\n\ncdr:\nTIME COMPLEXITY: O(1) \nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "5167143", "language": "Python", "matching_score": 0.46265363693237305, "max_stars_count": 70, "path": "Solutions/005.py" }, { "content": "\"\"\"\nProblem:\n\nThere are N prisoners standing in a circle, waiting to be executed. The executions are\ncarried out starting with the kth person, and removing every successive kth person\ngoing clockwise until there is no one left.\n\nGiven N and k, write an algorithm to determine where a prisoner should stand in order\nto be the last survivor.\n\nFor example, if N = 5 and k = 2, the order of executions would be [2, 4, 1, 5, 3], so\nyou should return 3.\n\nBonus: Find an O(log N) solution if k = 2.\n\"\"\"\n\nfrom typing import Optional\n\n\ndef find_last_executed(n: int, k: int) -> Optional[int]:\n prisoners = [i for i in range(1, n + 1)]\n last_executed = None\n curr_pos = 0\n\n while prisoners:\n curr_pos = (curr_pos + k - 1) % len(prisoners)\n last_executed = prisoners[curr_pos]\n prisoners = prisoners[:curr_pos] + prisoners[curr_pos + 1 :]\n return last_executed\n\n\nif __name__ == \"__main__\":\n print(find_last_executed(5, 2))\n print(find_last_executed(3, 2))\n print(find_last_executed(5, 3))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "7386092", "language": "Python", "matching_score": 0.7174417972564697, "max_stars_count": 70, "path": "Solutions/225.py" }, { "content": "\"\"\"\nProblem:\n\nYou run an e-commerce website and want to record the last N order ids in a log.\nImplement a data structure to accomplish this, with the following API:\n\nrecord(order_id): adds the order_id to the log get_last(i): gets the ith last element\nfrom the log. i is guaranteed to be smaller than or equal to N. You should be as\nefficient with time and space as possible.\n\"\"\"\n\n\nclass Order_Log:\n def __init__(self, N: int) -> None:\n self.circular_buffer = [None for _ in range(N)]\n self.N = N\n self.pos = 0\n\n def record(self, order_id: int) -> None:\n # adding the order_id to the log\n self.circular_buffer[self.pos] = order_id\n self.pos += 1\n if self.pos == self.N:\n self.pos = 0\n\n def get_last(self, i: int) -> int:\n # getting the ith last element from the log\n position = self.pos - i\n return self.circular_buffer[position]\n\n\nif __name__ == \"__main__\":\n log = Order_Log(10)\n\n for id in range(20):\n log.record(id)\n\n print(log.get_last(1))\n print(log.get_last(5))\n\n log.record(20)\n log.record(21)\n\n print(log.get_last(1))\n print(log.get_last(3))\n", "id": "1970944", "language": "Python", "matching_score": 0.9809452295303345, "max_stars_count": 70, "path": "Solutions/016.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of integers, return a new array such that each element at index i of the\nnew array is the product of all the numbers in the original array except the one at i.\n\nFor example, if our input was [1, 2, 3, 4, 5], the expected output would be\n[120, 60, 40, 30, 24]. If our input was [3, 2, 1], the expected output would be\n[2, 3, 6].\n\nFollow-up: what if you can't use division?\n\"\"\"\n\nfrom typing import List\n\n\ndef product_of_arr_except_ith_elem(arr: List[int]) -> int:\n length = len(arr)\n result = [1 for _ in range(length)]\n # multiplying all the elements on the left of the ith element in the 1st pass\n # and all the elements on the right of the ith element in the 2nd pass\n prod = 1\n for i in range(length):\n result[i] *= prod\n prod *= arr[i]\n prod = 1\n for i in range(length - 1, -1, -1):\n result[i] *= prod\n prod *= arr[i]\n return result\n\n\nif __name__ == \"__main__\":\n print(product_of_arr_except_ith_elem([1, 2, 3, 4, 5]))\n print(product_of_arr_except_ith_elem([3, 2, 1]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "9844734", "language": "Python", "matching_score": 1.145321249961853, "max_stars_count": 70, "path": "Solutions/002.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a list of elements, find the majority element, which appears more than half the\ntimes (> floor(len(lst) / 2.0)).\n\nYou can assume that such an element exists.\n\nFor example, given [1, 2, 1, 1, 3, 4, 0], return 1.\n\"\"\"\n\nfrom typing import List, Optional\n\n\ndef majority_element(arr: List[int]) -> Optional[int]:\n length = len(arr)\n if not length:\n return\n elif length < 3:\n return arr[0]\n # getting the majority element by generating the frequencies\n frequency = {}\n for elem in arr:\n if elem not in frequency:\n frequency[elem] = 0\n frequency[elem] += 1\n for elem in frequency:\n if frequency[elem] > (length // 2):\n return elem\n\n\nif __name__ == \"__main__\":\n print(majority_element([1, 2, 1, 1, 1, 4, 0]))\n print(majority_element([1, 1, 1, 3, 3, 3, 4, 1, 1]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "8116072", "language": "Python", "matching_score": 0.8983978033065796, "max_stars_count": 70, "path": "Solutions/155.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given N identical eggs and access to a building with k floors. \nYour task is to find the lowest floor that will cause an egg to break, if dropped from that floor. \nOnce an egg breaks, it cannot be dropped again. \nIf an egg breaks when dropped from the xth floor, you can assume it will also break when dropped from any floor greater than x.\nWrite an algorithm that finds the minimum number of trial drops it will take, in the worst case, to identify this floor.\n\nExample:\n\nN = 1\nk = 5\nOutput = 5 (we will need to try dropping the egg at every floor, beginning with the first, until we reach the fifth floor)\n\"\"\"\n\nfrom sys import maxsize\n\n\ndef calculate(eggs: int, floors: int) -> int:\n dp_mat = [[maxsize for _ in range(floors + 1)] for _ in range(eggs + 1)]\n # base cases\n for i in range(floors + 1):\n dp_mat[1][i] = i\n dp_mat[0][i] = 0\n for i in range(eggs + 1):\n dp_mat[i][0] = 0\n # populating the dp matrix\n for egg in range(2, eggs + 1):\n for floor in range(1, floors + 1):\n for i in range(1, floor + 1):\n temp = 1 + max(dp_mat[egg - 1][i - 1], dp_mat[egg][floor - i])\n dp_mat[egg][floor] = min(dp_mat[egg][floor], temp)\n return dp_mat[eggs][floors]\n\n\nif __name__ == \"__main__\":\n print(calculate(2, 20))\n print(calculate(3, 15))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x (floor ^ 2))\nSPACE COMPLEXITY: O(n x floor)\n\"\"\"\n", "id": "2861790", "language": "Python", "matching_score": 1.147704005241394, "max_stars_count": 70, "path": "Solutions/230.py" }, { "content": "\"\"\"\nProblem:\n\nLet M be an N by N matrix in which every row and every column is sorted. No two\nelements of M are equal.\n\nGiven i1, j1, i2, and j2, compute the number of elements of M smaller than M[i1, j1]\nand larger than M[i2, j2].\n\"\"\"\n\nfrom typing import List\n\n\ndef get_num_in_range(mat: List[List[int]], i1: int, j1: int, i2: int, j2: int) -> int:\n num1, num2 = mat[i1][j1], mat[i2][j2]\n count = sum([len([x for x in row if (x < num1 and x > num2)]) for row in mat])\n return count\n\n\nif __name__ == \"__main__\":\n mat = [\n [1, 3, 7, 10, 15, 20],\n [2, 6, 9, 14, 22, 25],\n [3, 8, 10, 15, 25, 30],\n [10, 11, 12, 23, 30, 35],\n [20, 25, 30, 35, 40, 45],\n ]\n print(get_num_in_range(mat, 3, 3, 1, 1))\n\n matrix = [\n [1, 2, 3, 4],\n [5, 8, 9, 13],\n [6, 10, 12, 14],\n [7, 11, 15, 16]\n ]\n print(get_num_in_range(matrix, 1, 3, 3, 1))\n\n matrix = [\n [1, 2, 3, 4],\n [5, 6, 7, 8],\n [10, 11, 12, 13],\n [20, 21, 22, 23]\n ]\n print(get_num_in_range(matrix, 3, 3, 1, 0))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n ^ 2)\n\"\"\"\n", "id": "9314315", "language": "Python", "matching_score": 1.282652735710144, "max_stars_count": 70, "path": "Solutions/195.py" }, { "content": "\"\"\"\nProblem:\n\nSpreadsheets often use this alphabetical encoding for its columns: \"A\", \"B\", \"C\", ...,\n\"AA\", \"AB\", ..., \"ZZ\", \"AAA\", \"AAB\", ....\n\nGiven a column number, return its alphabetical column id. For example, given 1, return\n\"A\". Given 27, return \"AA\".\n\"\"\"\n\n\ndef get_column_name(num: int) -> str:\n result = \"\"\n while num > 0:\n result = chr(64 + (num % 26)) + result\n num = num // 26\n return result\n\n\nif __name__ == \"__main__\":\n print(get_column_name(1))\n print(get_column_name(27))\n print(get_column_name(30))\n print(get_column_name(53))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(log(n))\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "4367116", "language": "Python", "matching_score": 0.8927715420722961, "max_stars_count": 70, "path": "Solutions/212.py" }, { "content": "\"\"\"\nProblem:\n\nGiven n numbers, find the greatest common denominator between them.\n\nFor example, given the numbers [42, 56, 14], return 14.\n\"\"\"\n\nfrom math import gcd as gcd_of_2\nfrom typing import List\n\n\ndef gcd(nums: List[int]) -> int:\n if not nums:\n return None\n\n result = nums[0]\n for num in nums[1:]:\n result = gcd_of_2(result, num)\n return result\n\n\nif __name__ == \"__main__\":\n print(gcd([42, 56, 14]))\n print(gcd([3, 5]))\n print(gcd([9, 15]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n log(n))\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "8450155", "language": "Python", "matching_score": 0.19559486210346222, "max_stars_count": 70, "path": "Solutions/184.py" }, { "content": "\"\"\"\nProblem:\n\nYou are going on a road trip, and would like to create a suitable music playlist. The\ntrip will require N songs, though you only have M songs downloaded, where M < N. A\nvalid playlist should select each song at least once, and guarantee a buffer of B songs\nbetween repeats.\n\nGiven N, M, and B, determine the number of valid playlists.\n\"\"\"\n\n\ndef get_num_of_valid_playlist(N: int, M: int, B: int) -> int:\n # possible ways of selecting each song = [\n # (N), (N - 1), (N - 2) ... (N - B), (N - B), (N - B), ... till M songs\n # ]\n if B >= N:\n return 0\n result = 1\n curr = N\n for i in range(M):\n result = result * curr\n # after B songs, 1 new song will be available and 1 will be off limits, so the\n # number of available songs will be locked at (N - B)\n if i < B:\n curr -= 1\n return result\n\n\nif __name__ == \"__main__\":\n # (1, 2, 1), (2, 1, 2)\n print(get_num_of_valid_playlist(2, 3, 1))\n\n # (1, 2, 3, 1), (1, 3, 2, 1),\n # (2, 1, 3, 2), (2, 3, 1, 2),\n # (3, 1, 2, 3), (3, 2, 1, 3)\n print(get_num_of_valid_playlist(3, 4, 2))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(m)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "11509785", "language": "Python", "matching_score": 0.9582595229148865, "max_stars_count": 70, "path": "Solutions/318.py" }, { "content": "\"\"\"\nProblem:\n\nYou have access to ranked lists of songs for various users. Each song is represented as\nan integer, and more preferred songs appear earlier in each list. For example, the list\n[4, 1, 7] indicates that a user likes song 4 the best, followed by songs 1 and 7.\n\nGiven a set of these ranked lists, interleave them to create a playlist that satisfies\neveryone's priorities.\n\nFor example, suppose your input is {[1, 7, 3], [2, 1, 6, 7, 9], [3, 9, 5]}. In this\ncase a satisfactory playlist could be [2, 1, 6, 7, 3, 9, 5].\n\"\"\"\n\nfrom typing import List\n\nfrom DataStructures.PriorityQueue import MinPriorityQueue\n\n\ndef interleave_playlist(playlists: List[List[int]]) -> List[int]:\n queue = MinPriorityQueue()\n result = []\n # priority queue generation\n # offset used to ensure that in case a song occours 2nd time (in different\n # playlists), the priorities for all the songs in the 2nd playlist gets offset\n for playlist in playlists:\n offset = 0\n for priority, song in enumerate(playlist):\n if song not in queue:\n queue.push(song, offset + priority)\n else:\n old_priority = queue.get_priority(song)\n offset += max(priority, old_priority)\n queue.update_key(song, offset + priority)\n # priority queue updation\n # updating the queue is necessary to ensure if a song (occuring 2nd time in a\n # different playlists) gets push down the queue, all the songs in the playlist\n # (where the song appeared 1st) also get pushed down\n for playlist in playlists:\n offset = 0\n for priority, song in enumerate(playlist):\n old_priority = queue.get_priority(song)\n if old_priority > priority:\n offset = max(offset, old_priority - priority)\n queue.update_key(song, priority + offset)\n\n while not queue.is_empty():\n result.append(queue.extract_min())\n return result\n\n\nif __name__ == \"__main__\":\n print(interleave_playlist([[1, 7, 3], [2, 1, 6, 7, 9], [3, 9, 5]]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x log(n))\nSPACE COMPLEXITY: O(n)\n[n = number of elements in the matrix]\n\"\"\"\n", "id": "7155668", "language": "Python", "matching_score": 2.1943845748901367, "max_stars_count": 70, "path": "Solutions/360.py" }, { "content": "\"\"\"\nProblem:\n\nCreate a data structure that performs all the following operations in O(1) time:\n\nplus: Add a key with value 1. If the key already exists, increment its value by one.\nminus: Decrement the value of a key. If the key's value is currently 1, remove it.\nget_max: Return a key with the highest value.\nget_min: Return a key with the lowest value.\n\"\"\"\n\nfrom sys import maxsize\n\nfrom DataStructures.PriorityQueue import MaxPriorityQueue, MinPriorityQueue\n\n\nclass MagicDS:\n def __init__(self) -> None:\n self.map = {}\n self.max_priority_queue = MaxPriorityQueue()\n self.min_priority_queue = MinPriorityQueue()\n\n def plus(self, elem: int) -> None:\n # runs in O(log(n)) [O(1) on Amortized analysis]\n if elem not in self.map:\n self.map[elem] = 1\n self.max_priority_queue.push(elem, 1)\n self.min_priority_queue.push(elem, 1)\n else:\n self.map[elem] += 1\n self.max_priority_queue.update_key(elem, self.map[elem])\n self.min_priority_queue.update_key(elem, self.map[elem])\n\n def minus(self, elem: int) -> None:\n # runs in O(log(n)) [O(1) on Amortized analysis]\n if elem not in self.map:\n raise ValueError(\"Cannot decrement a non-existing key\")\n if self.map[elem] == 1:\n del self.map[elem]\n self.max_priority_queue.update_key(elem, maxsize)\n self.max_priority_queue.extract_max()\n self.min_priority_queue.update_key(elem, 0)\n self.min_priority_queue.extract_min()\n else:\n self.map[elem] -= 1\n self.max_priority_queue.update_key(elem, self.map[elem])\n self.min_priority_queue.update_key(elem, self.map[elem])\n\n def get_max(self) -> int:\n # runs in O(1)\n return self.max_priority_queue.peek_max()\n\n def get_min(self) -> int:\n # runs in O(1)\n return self.min_priority_queue.peek_min()\n\n\nif __name__ == \"__main__\":\n ds = MagicDS()\n\n ds.plus(1)\n ds.plus(1)\n ds.plus(1)\n ds.plus(2)\n ds.plus(2)\n ds.plus(3)\n\n print(ds.get_max())\n print(ds.get_min())\n\n ds.minus(1)\n ds.minus(1)\n ds.minus(1)\n\n print(ds.get_max())\n print(ds.get_min())\n", "id": "3144466", "language": "Python", "matching_score": 1.380000114440918, "max_stars_count": 70, "path": "Solutions/358.py" }, { "content": "\"\"\"\nProblem:\n\nFind the maximum of two numbers without using any if-else statements, branching, or\ndirect comparisons.\n\"\"\"\n\n\ndef get_max(num1: int, num2: int) -> int:\n return num1 ^ ((num1 ^ num2) & -(num1 < num2))\n\n\nif __name__ == \"__main__\":\n print(get_max(1, 5))\n print(get_max(4, 3))\n print(get_max(-3, 6))\n print(get_max(5, -4))\n print(get_max(-4, -2))\n print(get_max(-3, -6))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "8541251", "language": "Python", "matching_score": 0.7903547286987305, "max_stars_count": 70, "path": "Solutions/248.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of integers in which two elements appear exactly once and all other\nelements appear exactly twice, find the two elements that appear only once.\n\nFor example, given the array [2, 4, 6, 8, 10, 2, 6, 10], return 4 and 8. The order does\nnot matter.\n\nFollow-up: Can you do this in linear time and constant space?\n\"\"\"\n\nfrom typing import List, Tuple\n\n\ndef get_uniques(arr: List[int]) -> Tuple[int, int]:\n xor_result = 0\n for val in arr:\n xor_result = xor_result ^ val\n # using the rightmost set bit as mask to segregate the array of numbers into 2 sets\n # performing xor for num1 and num2 based on the set to which they belong to (the 2\n # sets are based on whether a number has rightmost_set_bit of the xor result 1 or 0)\n rightmost_set_bit = xor_result & ~(xor_result - 1)\n num1, num2 = 0, 0\n for val in arr:\n if val & rightmost_set_bit:\n num1 = num1 ^ val\n else:\n num2 = num2 ^ val\n return num1, num2\n\n\nif __name__ == \"__main__\":\n print(get_uniques([2, 4, 6, 8, 10, 2, 6, 10]))\n print(get_uniques([2, 4, 8, 8, 10, 2, 6, 10]))\n print(get_uniques([2, 3, 8, 8, 10, 2, 1, 10]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "3690886", "language": "Python", "matching_score": 1.3302650451660156, "max_stars_count": 70, "path": "Solutions/140.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of strictly the characters 'R', 'G', and 'B', segregate the values of\nthe array so that all the Rs come first, the Gs come second, and the Bs come last. You\ncan only swap elements of the array.\n\nDo this in linear time and in-place.\n\nFor example, given the array ['G', 'B', 'R', 'R', 'B', 'R', 'G'], it should become\n['R', 'R', 'R', 'G', 'G', 'B', 'B'].\n\"\"\"\n\nfrom typing import List\n\n\ndef segregate(arr: List[str]) -> None:\n length = len(arr)\n pos = 0\n # pass for segregating \"R\"s\n for i in range(length):\n if arr[i] == \"R\":\n arr[i], arr[pos] = arr[pos], arr[i]\n pos += 1\n # pass for segregating \"G\"s\n for i in range(pos, length):\n if arr[i] == \"G\":\n arr[i], arr[pos] = arr[pos], arr[i]\n pos += 1\n\n\nif __name__ == \"__main__\":\n arr = [\"G\", \"B\", \"R\", \"R\", \"B\", \"R\", \"G\"]\n segregate(arr)\n print(arr)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "414492", "language": "Python", "matching_score": 0.6301417946815491, "max_stars_count": 70, "path": "Solutions/035.py" }, { "content": "\"\"\"\nProblem:\n\nThe edit distance between two strings refers to the minimum number of character\ninsertions, deletions, and substitutions required to change one string to the other.\nFor example, the edit distance between \"kitten\" and \"sitting\" is three: substitute the\n\"k\" for \"s\", substitute the \"e\" for \"i\", and append a \"g\".\n\nGiven two strings, compute the edit distance between them.\n\"\"\"\n\n\ndef get_string_distance(str1: str, str2: str) -> int:\n n = len(str1)\n m = len(str2)\n dp = [[0 for x in range(m + 1)] for x in range(n + 1)]\n # generating the look-up table in bottom up manner\n for i in range(n + 1):\n for j in range(m + 1):\n # str1 empty (insert all characters of second string)\n if i == 0:\n dp[i][j] = j\n # str2 empty (remove all characters of second string)\n elif j == 0:\n dp[i][j] = i\n # last characters are same (ignore last char and recurse for remaining\n # string)\n elif str1[i - 1] == str2[j - 1]:\n dp[i][j] = dp[i - 1][j - 1]\n # last character are different (consider all possibilities and select the\n # minimum)\n else:\n dp[i][j] = 1 + min(\n dp[i][j - 1], # insertion\n dp[i - 1][j], # deletion\n dp[i - 1][j - 1], # replacement\n )\n return dp[n][m]\n\n\nif __name__ == \"__main__\":\n print(get_string_distance(\"kitten\", \"sitting\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x m)\nSPACE COMPLEXITY: O(n x m)\n\"\"\"\n", "id": "4186240", "language": "Python", "matching_score": 1.3860280513763428, "max_stars_count": 70, "path": "Solutions/031.py" }, { "content": "####################################################\n# IMPORTS (LOCAL) ##################################\n####################################################\n\nfrom blog.models import BlogPost, User\nfrom blog import db\n\n####################################################\n# FUNCTION TO GET THE NUMBER OF DIFFERENT CHARACTERS\n####################################################\n\ndef num_char_different(str1, str2):\n if (str1 == str2):\n return 0\n\n if (not str1):\n return len(str2)\n elif (not str2):\n return len(str1)\n \n if (str1[0] == str2[0]):\n return num_char_different(str1[1:], str2[1:])\n \n str1_rem_val = num_char_different(str1[1:], str2)\n str2_rem_val = num_char_different(str1, str2[1:])\n both_rem_val = num_char_different(str1[1:], str2[1:])\n\n return 1 + min(str1_rem_val, str2_rem_val, both_rem_val)\n\n####################################################\n# FUNCTION TO GET THE SIMILARITY ###################\n####################################################\n\ndef similarity(str1, str2):\n diff = num_char_different(str1.lower(), str2.lower())\n length = max(len(str1), len(str2))\n\n return (1 - (diff / length))\n\n####################################################\n# FUNCTION TO GET THE MATCHING BLOGS ###############\n####################################################\n\ndef get_matches_blogs(string):\n res = []\n id_title = db.engine.execute(\n 'select id, title\\\n from BlogPost'\n )\n for identifier, title in id_title:\n sim = similarity(string, title[:8])\n if (sim > 0.65):\n res.append((identifier, sim))\n \n res.sort(reverse=True, key=lambda x: x[1])\n return BlogPost.query.filter(BlogPost.id.in_([val[0] for val in res]))\n\n####################################################\n# FUNCTION TO GET THE MATCHING USERS ###############\n####################################################\n\ndef get_matches_users(string):\n res = []\n id_username = db.engine.execute(\n 'select id, username\\\n from users'\n )\n for identifier, username in id_username:\n sim = similarity(string, username[:8])\n if (sim > 0.65):\n res.append((identifier, sim))\n \n res.sort(reverse=True, key=lambda x: x[1])\n\n return User.query.filter(User.id.in_([val[0] for val in res]))\n\n####################################################\n# FUNCTION TO GET THE MATCHING BLOGS + USERS #######\n####################################################\n\ndef search(string):\n users = get_matches_users(string[:8])[:12]\n blogs = get_matches_blogs(string[:8])[:9]\n \n return users, blogs", "id": "299434", "language": "Python", "matching_score": 1.0203088521957397, "max_stars_count": 3, "path": "blog/core/search_engine.py" }, { "content": "####################################################\r\n# IMPORTS (LOCAL) ##################################\r\n####################################################\r\n\r\nfrom hashlib import sha256\r\nfrom pickle import load, dump\r\n\r\n####################################################\r\n# GET ADMIN DETAILS (HELPER) #######################\r\n####################################################\r\n\r\ndef get_admin_cred():\r\n with open('data.dat', 'rb') as data:\r\n admin = load(data)\r\n\r\n return admin\r\n\r\n####################################################\r\n# UPDATE ADMIN DETAILS (HELPER) ####################\r\n####################################################\r\n\r\ndef update_admin_cred(username=None, password=None):\r\n admin = get_admin_cred()\r\n user = {}\r\n\r\n if (username):\r\n user['user'] = sha256(username.encode()).hexdigest()\r\n else:\r\n user['user'] = admin['user']\r\n \r\n if (password):\r\n user['password'] = sha256(password.encode()).hexdigest()\r\n else:\r\n user['password'] = admin['password']\r\n \r\n with open('data.dat', 'wb') as data:\r\n dump(user, data)\r\n\r\n####################################################\r\n# VALIDATE ADMIN DETAILS (HELPER) ##################\r\n####################################################\r\n\r\ndef check_admin_cred(username, password):\r\n user = {'user': sha256(username.encode()).hexdigest(), \r\n 'password': <PASSWORD>(password.encode()).<PASSWORD>()}\r\n \r\n admin = get_admin_cred()\r\n \r\n if (admin['user'] == user['user'] and admin['password'] == user['password']):\r\n return True\r\n else:\r\n return False", "id": "6039264", "language": "Python", "matching_score": 0.7554394006729126, "max_stars_count": 2, "path": "Karmatek/api/credentials_confirm.py" }, { "content": "####################################################\n# FUNCTION TO CHECK IF A PASSWORD IS STRONG ########\n####################################################\n\ndef is_strong(password):\n upper = False\n lower = False\n symbol = False\n number = False\n\n if (len(password) < 6):\n return False\n\n for char in password:\n if (char.isdigit()):\n number = True\n elif (char.isupper()):\n upper = True\n elif (char.islower()):\n lower = True\n elif (not char.isalnum()):\n symbol = True\n \n if (upper and lower and symbol and number):\n return True\n \n return False\n", "id": "9077260", "language": "Python", "matching_score": 0.9133249521255493, "max_stars_count": 3, "path": "blog/users/password.py" }, { "content": "\"\"\"\nProblem:\n\nWhat will this code print out?\n\ndef make_functions():\n flist = []\n\n for i in [1, 2, 3]:\n def print_i():\n print(i)\n flist.append(print_i)\n\n return flist\n\nfunctions = make_functions()\nfor f in functions:\n f()\n\nHow can we make it print out what we apparently want?\n\"\"\"\n\n# The code will print 3 thrice (in 3 lines) as i is passed by reference\n\n\ndef make_functions():\n flist = []\n\n for i in [1, 2, 3]:\n\n def print_i(i):\n print(i)\n\n flist.append((print_i, i))\n return flist\n\n\nfunctions = make_functions()\nfor f, i in functions:\n f(i)\n", "id": "9515905", "language": "Python", "matching_score": 0.469612181186676, "max_stars_count": 70, "path": "Solutions/188.py" }, { "content": "\"\"\"\nProblem:\n\nWhat does the below code snippet print out? How can we fix the anonymous functions to\nbehave as we'd expect?\n\nfunctions = []\nfor i in range(10):\n functions.append(lambda : i)\n\nfor f in functions:\n print(f())\n\"\"\"\n\n# The code is expected to print 9, 9, ..., 9 (10 times) as the value of i is set to 9\n# at the end of the 1st loop and is not changed during iterating through the 2nd loop.\n# Considering we plan to print 0 to 9, we can set and update the value of i every\n# iteration\n\nfunctions = []\nfor i in range(10):\n functions.append(lambda: i)\n\ni = 0 # MODIFICATION\nfor f in functions:\n print(f())\n i += 1 # MODIFICATION\n", "id": "2924210", "language": "Python", "matching_score": 0.4935058355331421, "max_stars_count": 70, "path": "Solutions/091.py" }, { "content": "\"\"\"\nProblem:\n\nYou have 100 fair coins and you flip them all at the same time. Any that come up tails\nyou set aside. The ones that come up heads you flip again. How many rounds do you\nexpect to play before only one coin remains?\n\nWrite a function that, given 'n', returns the number of rounds you'd expect to play\nuntil one coin remains.\n\"\"\"\n\nfrom math import log2, ceil\n\n\ndef expectation(n: int) -> int:\n # since the number of coins is expected (mean) to be halved on each toss, we use\n # log2\n # ceil is used to round it off to the next larger integer as number of tosses\n # cannot be a fraction\n return ceil(log2(n + 1))\n\n\nif __name__ == \"__main__\":\n print(expectation(0))\n print(expectation(1))\n print(expectation(2))\n print(expectation(100))\n print(expectation(200))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "2472754", "language": "Python", "matching_score": 1.0895146131515503, "max_stars_count": 70, "path": "Solutions/124.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given a string consisting of the letters x and y, such as xyxxxyxyy. In\naddition, you have an operation called flip, which changes a single x to y or vice\nversa.\n\nDetermine how many times you would need to apply this operation to ensure that all x's\ncome before all y's. In the preceding example, it suffices to flip the second and sixth\ncharacters, so you should return 2.\n\"\"\"\n\nfrom sys import maxsize\n\n\ndef get_minimum_flips(string: str) -> int:\n length = len(string)\n # lookup table for dp\n flips_from_left = [0 for i in range(length)]\n flips_from_right = [0 for i in range(length)]\n # updating flips from left\n flips = 0\n for i in range(length):\n if string[i] == \"y\":\n flips = flips + 1\n flips_from_left[i] = flips\n # updating flips from right\n flips = 0\n for i in range(length - 1, -1, -1):\n if string[i] == \"x\":\n flips = flips + 1\n flips_from_right[i] = flips\n # generating the minimum number of flips (using minimum flips is the minimum flips\n # on the left + minimum flips on the right)\n minFlips = maxsize\n for i in range(1, length):\n minFlips = min(minFlips, flips_from_left[i - 1] + flips_from_right[i])\n return minFlips\n\n\nif __name__ == \"__main__\":\n print(get_minimum_flips(\"xyxxxyxyy\"))\n print(get_minimum_flips(\"xyxxxyxxxxxxxxxxxyyyyyyyyyyyyyyyx\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "68247", "language": "Python", "matching_score": 1.3098207712173462, "max_stars_count": 70, "path": "Solutions/331.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given an array of length N, where each element i represents the number of ways\nwe can produce i units of change. For example, [1, 0, 1, 1, 2] would indicate that\nthere is only one way to make 0, 2, or 3 units, and two ways of making 4 units.\n\nGiven such an array, determine the denominations that must be in use. In the case\nabove, for example, there must be coins with value 2, 3, and 4.\n\"\"\"\n\nfrom typing import List\n\n\ndef count_ways_to_generate_change(changes: List[int], target: int) -> int:\n length = len(changes)\n if not length:\n return 0\n table = [[0 for x in range(length)] for x in range(target + 1)]\n for i in range(length):\n table[0][i] = 1\n for i in range(1, target + 1):\n for j in range(length):\n if i - changes[j] >= 0:\n x = table[i - changes[j]][j]\n else:\n x = 0\n if j >= 1:\n y = table[i][j - 1]\n else:\n y = 0\n table[i][j] = x + y\n return table[target][length - 1]\n\n\ndef get_changes(num_ways_to_get_change: List[int]) -> List[int]:\n length = len(num_ways_to_get_change)\n changes_list = []\n\n for i in range(1, length):\n if num_ways_to_get_change[i] > 0:\n count = count_ways_to_generate_change(changes_list, i)\n if count == 0 or count + 1 == num_ways_to_get_change[i]:\n changes_list.append(i)\n return changes_list\n\n\nif __name__ == \"__main__\":\n print(get_changes([1, 0, 1, 1, 2]))\n print(get_changes([1, 0, 1, 1, 2, 1, 3]))\n print(get_changes([1, 0, 1, 1, 2, 1, 4]))\n print(get_changes([1, 0, 1, 1, 2, 1, 4, 2]))\n print(get_changes([1, 0, 1, 1, 2, 1, 4, 3]))\n\n\n\"\"\"\nSPECS:\nTIME COMPLEXITY: O(n ^ 3)\nSPACE COMPLEXITY: O(n ^ 2)\n[n = number of elements]\n\"\"\"\n", "id": "12707952", "language": "Python", "matching_score": 1.4657729864120483, "max_stars_count": 70, "path": "Solutions/316.py" }, { "content": "\"\"\"\nProblem:\n\nFind the minimum number of coins required to make n cents.\n\nYou can use standard American denominations, that is, 1¢, 5¢, 10¢, and 25¢.\n\nFor example, given n = 16, return 3 since we can make it with a 10¢, a 5¢, and a 1¢.\n\"\"\"\n\nfrom typing import List\n\n\ndef calc_num_coins(target: int, denominations: List[int] = [1, 5, 10, 25]) -> int:\n # pre-requisted: sorted denominations\n length = len(denominations)\n count = 0\n for i in range(length - 1, -1, -1):\n count += target // denominations[i]\n target = target % denominations[i]\n if target == 0:\n break\n if target != 0:\n raise ValueError(\"Target cannot be reached by using the supplied denominations\")\n return count\n\n\nif __name__ == \"__main__\":\n print(calc_num_coins(16))\n print(calc_num_coins(90))\n print(calc_num_coins(93))\n print(calc_num_coins(100))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "8015801", "language": "Python", "matching_score": 0.7382785677909851, "max_stars_count": 70, "path": "Solutions/138.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given a 2-d matrix where each cell represents number of coins in that cell.\nAssuming we start at matrix[0][0], and can only move right or down, find the maximum\nnumber of coins you can collect by the bottom right corner.\n\nFor example, in this matrix\n\n0 3 1 1\n2 0 0 4\n1 5 3 1\nThe most we can collect is 0 + 2 + 1 + 5 + 3 + 1 = 12 coins.\n\"\"\"\n\nfrom typing import List\n\n\ndef get_max_coins(matrix: List[List[int]]) -> int:\n n = len(matrix)\n m = len(matrix[0])\n # generating the maximum number of coins using dynamic programming\n for i in range(1, n):\n for j in range(1, m):\n matrix[i][j] += max(matrix[i - 1][j], matrix[i][j - 1])\n return matrix[n - 1][m - 1]\n\n\nif __name__ == \"__main__\":\n matrix = [\n [0, 3, 1, 1],\n [2, 0, 0, 4],\n [1, 5, 3, 1]\n ]\n print(get_max_coins(matrix))\n\n matrix = [\n [0, 3, 1, 1],\n [2, 8, 9, 4],\n [1, 5, 3, 1]\n ]\n print(get_max_coins(matrix))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x m)\nSPACE COMPLEXITY: O(1) [modifying the matrix in place]\n\"\"\"\n", "id": "11811956", "language": "Python", "matching_score": 1.3517074584960938, "max_stars_count": 70, "path": "Solutions/122.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an N by N matrix, rotate it by 90 degrees clockwise.\n\nFor example, given the following matrix:\n\n[[1, 2, 3],\n [4, 5, 6],\n [7, 8, 9]]\n\nyou should return:\n\n[[7, 4, 1],\n [8, 5, 2],\n [9, 6, 3]]\n\nFollow-up: What if you couldn't use any extra space?\n\"\"\"\n\nfrom numpy import array\nfrom typing import List\n\nMatrix = List[List[int]]\n\n\ndef rotate_matrix(matrix: Matrix) -> Matrix:\n num_layers = len(matrix) // 2\n max_index = len(matrix) - 1\n # rotating the matrix\n for layer in range(num_layers):\n for index in range(layer, max_index - layer):\n (\n matrix[layer][index],\n matrix[max_index - index][layer],\n matrix[max_index - layer][max_index - index],\n matrix[index][max_index - layer],\n ) = (\n matrix[max_index - index][layer],\n matrix[max_index - layer][max_index - index],\n matrix[index][max_index - layer],\n matrix[layer][index],\n )\n return matrix\n\n\nif __name__ == \"__main__\":\n matrix = [\n [1, 2, 3],\n [4, 5, 6],\n [7, 8, 9]\n ]\n print(array(matrix))\n print(array(rotate_matrix(matrix)))\n print()\n\n matrix = [\n [1, 2, 3, 4],\n [5, 6, 7, 8],\n [9, 10, 11, 12],\n [13, 14, 15, 16]\n ]\n print(array(matrix))\n print(array(rotate_matrix(matrix)))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x m)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "9199406", "language": "Python", "matching_score": 0.7034317851066589, "max_stars_count": 70, "path": "Solutions/168.py" }, { "content": "\"\"\"\nProblem:\n\nThere are N couples sitting in a row of length 2 * N. They are currently ordered\nrandomly, but would like to rearrange themselves so that each couple's partners can sit\nside by side. What is the minimum number of swaps necessary for this to happen?\n\"\"\"\n\n# The range of the swaps is [0, ceil(N / 2)]\n\n\nfrom typing import List\n\n\ndef get_desired_index(curr_index: int) -> int:\n if curr_index % 2 == 0:\n return curr_index + 1\n return curr_index - 1\n\n\ndef couple_pairing(array: List[int]) -> int:\n if array == None or (len(array) % 2) != 0:\n return 0\n hash_table = {}\n n_swaps = 0\n\n for index, element in enumerate(array):\n if element in hash_table:\n desired_index = hash_table[element]\n value_at_desired_index = array[desired_index]\n if value_at_desired_index != element:\n array[index], array[desired_index] = array[desired_index], array[index]\n n_swaps += 1\n hash_table[value_at_desired_index] = get_desired_index(index)\n continue\n hash_table[element] = get_desired_index(index)\n return n_swaps\n\n\nif __name__ == \"__main__\":\n print(couple_pairing([2, 1, 2, 3, 1, 3]))\n print(couple_pairing([3, 2, 1, 1, 2, 3]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "7570874", "language": "Python", "matching_score": 1.0150036811828613, "max_stars_count": 70, "path": "Solutions/240.py" }, { "content": "\"\"\"\nProblem:\n\nIn academia, the h-index is a metric used to calculate the impact of a researcher's\npapers. It is calculated as follows:\n\nA researcher has index h if at least h of her N papers have h citations each. If there\nare multiple h satisfying this formula, the maximum is chosen.\n\nFor example, suppose N = 5, and the respective citations of each paper are\n[4, 3, 0, 1, 5]. Then the h-index would be 3, since the researcher has 3 papers with at\nleast 3 citations.\n\nGiven a list of paper citations of a researcher, calculate their h-index.\n\"\"\"\n\n\nfrom typing import List\n\n\ndef get_h_index(citations: List[int]) -> int:\n citations.sort(reverse=True)\n for index, citation in enumerate(citations):\n if index >= citation:\n # implies that there are 'index' papers with atleast 'citation' citations\n return index\n return 0\n\n\nif __name__ == \"__main__\":\n print(get_h_index([4, 3, 0, 1, 5]))\n print(get_h_index([4, 1, 0, 1, 1]))\n print(get_h_index([4, 4, 4, 5, 4]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x log(n))\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "7428749", "language": "Python", "matching_score": 0.6863962411880493, "max_stars_count": 70, "path": "Solutions/241.py" }, { "content": "\"\"\"\nProblem 74:\n\nSuppose you have a multiplication table that is N by N. That is, a 2D array where the\nvalue at the i-th row and j-th column is (i + 1) * (j + 1) (if 0-indexed) or i * j\n(if 1-indexed).\n\nGiven integers N and X, write a function that returns the number of times X appears as\na value in an N by N multiplication table.\n\nFor example, given N = 6 and X = 12, you should return 4, since the multiplication\ntable looks like this:\n\n1\t2\t3\t4\t5\t6\n2\t4\t6\t8\t10\t12\n3\t6\t9\t12\t15\t18\n4\t8\t12\t16\t20\t24\n5\t10\t15\t20\t25\t30\n6\t12\t18\t24\t30\t36\nAnd there are 4 12's in the table.\n\"\"\"\n\n\ndef calculate_frequency(n: int, x: int) -> int:\n count = 0\n for i in range(1, n + 1):\n for j in range(1, i + 1):\n if i * j == x:\n if i == j:\n count += 1\n else:\n count += 2\n return count\n\n\nif __name__ == \"__main__\":\n print(calculate_frequency(6, 12))\n print(calculate_frequency(1, 1))\n print(calculate_frequency(2, 4))\n print(calculate_frequency(3, 6))\n print(calculate_frequency(3, 3))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "1608830", "language": "Python", "matching_score": 1.2833824157714844, "max_stars_count": 70, "path": "Solutions/074.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given an N by M 2D matrix of lowercase letters. Determine the minimum number of\ncolumns that can be removed to ensure that each row is ordered from top to bottom\nlexicographically. That is, the letter at each column is lexicographically later as you\ngo down each row. It does not matter whether each row itself is ordered\nlexicographically.\n\nFor example, given the following table:\n\ncba\ndaf\nghi\nThis is not ordered because of the a in the center. We can remove the second column to\nmake it ordered:\n\nca\ndf\ngi\nSo your function should return 1, since we only needed to remove 1 column.\n\nAs another example, given the following table:\n\nabcdef\nYour function should return 0, since the rows are already ordered (there's only one\nrow).\n\nAs another example, given the following table:\n\nzyx\nwvu\ntsr\nYour function should return 3, since we would need to remove all the columns to order\nit.\n\"\"\"\n\nfrom typing import List\n\n\ndef get_minimum_column_removals(matrix: List[List[int]]) -> int:\n rows, columns = len(matrix), len(matrix[0])\n count = 0\n for column in range(columns):\n # checking if the column is lexicographical\n for row in range(rows - 1):\n if matrix[row][column] > matrix[row + 1][column]:\n count += 1\n break\n return count\n\n\nif __name__ == \"__main__\":\n print(get_minimum_column_removals([\"cba\", \"daf\", \"ghi\"]))\n print(get_minimum_column_removals([\"abcdef\"]))\n print(get_minimum_column_removals([\"zyx\", \"wvu\", \"tsr\"]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x m)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "11105341", "language": "Python", "matching_score": 0.9071764945983887, "max_stars_count": 70, "path": "Solutions/076.py" }, { "content": "\"\"\"\nProblem:\n\nWrite a program that computes the length of the longest common subsequence of three\ngiven strings. For example, given \"epidemiologist\", \"refrigeration\", and\n\"supercalifragilisticexpialodocious\", it should return 5, since the longest common\nsubsequence is \"eieio\".\n\"\"\"\n\n\ndef lcs_of_3(str1: str, str2: str, str3: str) -> int:\n str1_length = len(str1)\n str2_length = len(str2)\n str3_length = len(str3)\n dp_matrix = [\n [[0 for i in range(str3_length + 1)] for j in range(str2_length + 1)]\n for k in range(str1_length + 1)\n ]\n # generating the matrix in bottom up\n for i in range(1, str1_length + 1):\n for j in range(1, str2_length + 1):\n for k in range(1, str3_length + 1):\n if str1[i - 1] == str2[j - 1] and str1[i - 1] == str3[k - 1]:\n dp_matrix[i][j][k] = dp_matrix[i - 1][j - 1][k - 1] + 1\n else:\n dp_matrix[i][j][k] = max(\n max(dp_matrix[i - 1][j][k], dp_matrix[i][j - 1][k]),\n dp_matrix[i][j][k - 1],\n )\n return dp_matrix[str1_length][str2_length][str3_length]\n\n\nif __name__ == \"__main__\":\n print(\n lcs_of_3(\n \"epidemiologist\", \"refrigeration\", \"supercalifragilisticexpialodocious\"\n )\n )\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 3)\nSPACE COMPLEXITY: O(n ^ 3)\n\"\"\"\n", "id": "4905083", "language": "Python", "matching_score": 0.5896136164665222, "max_stars_count": 70, "path": "Solutions/209.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string, find the longest palindromic contiguous substring. If there are more\nthan one with the maximum length, return any one.\n\nFor example, the longest palindromic substring of \"aabcdcb\" is \"bcdcb\". The longest\npalindromic substring of \"bananas\" is \"anana\".\n\"\"\"\n\n\ndef is_palindrome(string: str) -> bool:\n # helper function to check if a string is a palindrome\n return string == string[::-1]\n\n\ndef get_longest_palindrome_substring(string: str) -> str:\n if is_palindrome(string):\n return string\n # generating the longest palindromic substring\n string1 = get_longest_palindrome_substring(string[1:])\n string2 = get_longest_palindrome_substring(string[:-1])\n return max(string1, string2, key=lambda s: len(s))\n\n\nif __name__ == \"__main__\":\n print(get_longest_palindrome_substring(\"aabcdcb\"))\n print(get_longest_palindrome_substring(\"bananas\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "10523534", "language": "Python", "matching_score": 1.04537832736969, "max_stars_count": 70, "path": "Solutions/046.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string, find the palindrome that can be made by inserting the fewest number of\ncharacters as possible anywhere in the word. If there is more than one palindrome of\nminimum length that can be made, return the lexicographically earliest one (the first\none alphabetically).\n\nFor example, given the string \"race\", you should return \"ecarace\", since we can add\nthree letters to it (which is the smallest amount to make a palindrome). There are\nseven other palindromes that can be made from \"race\" by adding three letters, but\n\"ecarace\" comes first alphabetically.\n\nAs another example, given the string \"google\", you should return \"elgoogle\".\n\"\"\"\n\n\ndef get_nearest_palindrome(string: str) -> str:\n if string[::-1] == string:\n return string\n # generating the closest palindrome possible\n length = len(string)\n if string[0] == string[-1]:\n return string[0] + get_nearest_palindrome(string[1 : length - 1]) + string[0]\n # incase the 1st characters are different, strings using both the characters are\n # generated\n pal_1 = string[0] + get_nearest_palindrome(string[1:]) + string[0]\n pal_2 = string[-1] + get_nearest_palindrome(string[: length - 1]) + string[-1]\n # if one of the string is shorter, it is returned\n if len(pal_1) != len(pal_2):\n return min(pal_1, pal_2, key=lambda x: len(x))\n # if both strings have the same length, the lexicographically earliest one is\n # returned\n return min(pal_1, pal_2)\n\n\nif __name__ == \"__main__\":\n print(get_nearest_palindrome(\"race\"))\n print(get_nearest_palindrome(\"google\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "1658147", "language": "Python", "matching_score": 1.0754518508911133, "max_stars_count": 70, "path": "Solutions/034.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given a string of length N and a parameter k. The string can be manipulated by\ntaking one of the first k letters and moving it to the end.\n\nWrite a program to determine the lexicographically smallest string that can be created\nafter an unlimited number of moves.\n\nFor example, suppose we are given the string daily and k = 1. The best we can create in\nthis case is ailyd.\n\"\"\"\n\n\ndef generate_next(string: str, k: int) -> str:\n return string[k:] + string[:k]\n\n\ndef get_lexicographically_smallest_string(string: str, k: int) -> str:\n seen = set()\n result = string\n curr = generate_next(string, k)\n\n while curr not in seen:\n result = min(result, curr)\n seen.add(curr)\n curr = generate_next(curr, k)\n return result\n\n\nif __name__ == \"__main__\":\n print(get_lexicographically_smallest_string(\"daily\", 1))\n print(get_lexicographically_smallest_string(\"salloo\", 2))\n # unlimited number of moves allowed (so the word of length 5 and k = 2 goes round)\n print(get_lexicographically_smallest_string(\"daily\", 2))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n ^ 2)\n\"\"\"\n", "id": "7036651", "language": "Python", "matching_score": 1.2282167673110962, "max_stars_count": 70, "path": "Solutions/347.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a 2D matrix of characters and a target word, write a function that returns\nwhether the word can be found in the matrix by going left-to-right, or up-to-down.\n\nFor example, given the following matrix:\n\n[['F', 'A', 'C', 'I'],\n ['O', 'B', 'Q', 'P'],\n ['A', 'N', 'O', 'B'],\n ['M', 'A', 'S', 'S']]\nand the target word 'FOAM', you should return true, since it's the leftmost column.\nSimilarly, given the target word 'MASS', you should return true, since it's the last\nrow.\n\"\"\"\n\nfrom typing import List\n\n\ndef check_word_occorance(matrix: List[List[str]], word: str) -> bool:\n n = len(matrix)\n m = len(matrix[0])\n # check for word occourance in the rows\n for i in range(n):\n row_string = \"\".join(matrix[i])\n if word in row_string:\n return True\n # check for word occourance in the columns\n for j in range(m):\n column_string = \"\"\n for i in range(n):\n column_string += matrix[i][j]\n if word in column_string:\n return True\n return False\n\n\nif __name__ == \"__main__\":\n matrix = [\n [\"F\", \"A\", \"C\", \"I\"],\n [\"O\", \"B\", \"Q\", \"P\"],\n [\"A\", \"N\", \"O\", \"B\"],\n [\"M\", \"A\", \"S\", \"S\"],\n ]\n\n print(check_word_occorance(matrix, \"FOAM\"))\n print(check_word_occorance(matrix, \"MASS\"))\n print(check_word_occorance(matrix, \"FORM\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x m)\nSPACE COMPLEXITY: O(n + m)\n\"\"\"\n", "id": "1531530", "language": "Python", "matching_score": 1.0829154253005981, "max_stars_count": 70, "path": "Solutions/063.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a N by M matrix of numbers, print out the matrix in a clockwise spiral.\n\nFor example, given the following matrix:\n\n[[1, 2, 3, 4, 5],\n [6, 7, 8, 9, 10],\n [11, 12, 13, 14, 15],\n [16, 17, 18, 19, 20]]\nYou should print out the following:\n\n1, 2, 3, 4, 5, 10, 15, 20, 19, 18, 17, 16, 11, 6, 7, 8, 9, 14, 13, 12\n\"\"\"\n\nfrom typing import List\n\nMatrix = List[List[int]]\n\n\ndef unwind_matrix_helper(matrix: Matrix, ring: int, n: int, m: int) -> List[int]:\n current_ring_elems = []\n # 1st row\n for i in range(ring, m - ring):\n current_ring_elems.append(matrix[ring][i])\n # last column\n for i in range(ring + 1, n - ring):\n current_ring_elems.append(matrix[i][m - ring - 1])\n # last row\n if n > 1 and m > 1:\n for i in range(m - ring - 2, ring - 1, -1):\n current_ring_elems.append(matrix[n - ring - 1][i])\n # 1st column\n if n > 1 and m > 1:\n for i in range(n - ring - 2, ring, -1):\n current_ring_elems.append(matrix[i][ring])\n return current_ring_elems\n\n\ndef unwind_matrix(matrix: Matrix) -> List[int]:\n if not matrix:\n return []\n n = len(matrix)\n m = len(matrix[0])\n unwound_matrix = []\n if n > 1 and m > 1:\n for i in range(max(n, m) // 2):\n unwound_matrix.extend(unwind_matrix_helper(matrix, i, n, m))\n else:\n unwound_matrix = unwind_matrix_helper(matrix, 0, n, m)\n return unwound_matrix\n\n\nif __name__ == \"__main__\":\n matrix = [\n [1, 2, 3, 4, 5],\n [6, 7, 8, 9, 10],\n [11, 12, 13, 14, 15],\n [16, 17, 18, 19, 20],\n ]\n for elem in unwind_matrix(matrix):\n print(elem)\n\n print()\n matrix = [[1, 2, 3], [4, 5, 6]]\n for elem in unwind_matrix(matrix):\n print(elem)\n\n print()\n matrix = [[1, 4], [2, 5], [3, 6]]\n for elem in unwind_matrix(matrix):\n print(elem)\n\n print()\n matrix = [[1], [2], [3], [4], [5], [6]]\n for elem in unwind_matrix(matrix):\n print(elem)\n\n print()\n matrix = [[1, 2, 3]]\n for elem in unwind_matrix(matrix):\n print(elem)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x m)\nSPACE COMPLEXITY: O(n x m)\n\"\"\"\n", "id": "8652715", "language": "Python", "matching_score": 1.3937625885009766, "max_stars_count": 70, "path": "Solutions/065.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an N by M matrix consisting only of 1's and 0's, find the largest rectangle\ncontaining only 1's and return its area.\n\nFor example, given the following matrix:\n\n[[1, 0, 0, 0],\n [1, 0, 1, 1],\n [1, 0, 1, 1],\n [0, 1, 0, 0]]\nReturn 4.\n\"\"\"\n\nfrom typing import List\n\nMatrix = List[List[int]]\n\n\ndef is_row_extendable(matrix: Matrix, erow: int, scol: int, ecol: int) -> bool:\n return all(matrix[erow][scol:ecol])\n\n\ndef is_column_extendable(matrix: Matrix, ecol: int, srow: int, erow: int) -> bool:\n for row in range(srow, erow):\n if not matrix[row][ecol]:\n return False\n return True\n\n\ndef area_helper(\n matrix: Matrix,\n num_rows: int,\n num_cols: int,\n srow: int,\n erow: int,\n scol: int,\n ecol: int,\n) -> int:\n current_area = (erow - srow) * (ecol - scol)\n row_ex_area, col_ex_area = 0, 0\n # checking if the area can be extended\n can_extend_row = erow < num_rows and is_row_extendable(matrix, erow, scol, ecol)\n if can_extend_row:\n row_ex_area = area_helper(\n matrix, num_rows, num_cols, srow, erow + 1, scol, ecol\n )\n can_extend_col = ecol < num_cols and is_column_extendable(matrix, ecol, srow, erow)\n if can_extend_col:\n col_ex_area = area_helper(\n matrix, num_rows, num_cols, srow, erow, scol, ecol + 1\n )\n return max(current_area, row_ex_area, col_ex_area)\n\n\ndef get_max_rect(matrix: Matrix) -> int:\n if not matrix:\n return 0\n # generating the maximum area\n max_area = 0\n num_rows, num_cols = len(matrix), len(matrix[0])\n for i in range(num_rows):\n for j in range(num_cols):\n upper_bound_area = (num_rows - i) * (num_cols - j)\n if matrix[i][j] and upper_bound_area > max_area:\n area = area_helper(matrix, num_rows, num_cols, i, i + 1, j, j + 1)\n max_area = max(area, max_area)\n return max_area\n\n\nif __name__ == \"__main__\":\n matrix = [\n [1, 0, 0, 0],\n [1, 0, 1, 1],\n [1, 0, 1, 1],\n [0, 1, 0, 0]\n ]\n print(get_max_rect(matrix))\n\n matrix = [\n [1, 0, 0, 0],\n [1, 0, 1, 1],\n [1, 0, 1, 1],\n [0, 1, 1, 1]\n ]\n print(get_max_rect(matrix))\n\n matrix = [\n [1, 1, 1, 1],\n [1, 1, 1, 1],\n [1, 1, 1, 1],\n [1, 1, 1, 1]\n ]\n print(get_max_rect(matrix))\n\n matrix = [\n [0, 0, 0, 0],\n [0, 0, 0, 0],\n [0, 0, 0, 0],\n [0, 0, 0, 0]\n ]\n print(get_max_rect(matrix))\n\n matrix = [\n [1, 1, 1, 1],\n [1, 1, 1, 1],\n [1, 1, 0, 0],\n [0, 0, 0, 0]\n ]\n print(get_max_rect(matrix))\n\n matrix = [\n [1, 1, 0, 0],\n [1, 0, 0, 0],\n [1, 0, 0, 0],\n [1, 0, 0, 0]\n ]\n print(get_max_rect(matrix))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O((n x m) ^ 2)\nSPACE COMPLEXITY: O(n + m)\n\"\"\"\n", "id": "12015747", "language": "Python", "matching_score": 1.6597936153411865, "max_stars_count": 70, "path": "Solutions/136.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given a histogram consisting of rectangles of different heights. These heights\nare represented in an input list, such that [1, 3, 2, 5] corresponds to the following\ndiagram:\n\n x\n x \n x x\n x x x\nx x x x\nDetermine the area of the largest rectangle that can be formed only from the bars of\nthe histogram. For the diagram above, for example, this would be six, representing the\n2 x 3 area at the bottom right.\n\"\"\"\n\nfrom typing import List\n\nfrom DataStructures.Stack import Stack\n\n\ndef max_area_histogram(histogram: List[int]):\n stack = Stack()\n max_area = 0\n index = 0\n\n while index < len(histogram):\n if stack.is_empty() or histogram[stack.peek()] <= histogram[index]:\n stack.push(index)\n index += 1\n else:\n # if the current bar is lower than top of stack, the area of rectangle\n # needs to be calculated with the stack top as the smallest (or minimum\n # height) bar.\n top_of_stack = stack.pop()\n area = histogram[top_of_stack] * (\n (index - stack.peek() - 1) if not stack.is_empty() else index\n )\n max_area = max(max_area, area)\n # calculating the area formed by the indices still in the stack\n while not stack.is_empty():\n top_of_stack = stack.pop()\n area = histogram[top_of_stack] * (\n (index - stack.peek() - 1) if not stack.is_empty() else index\n )\n max_area = max(max_area, area)\n return max_area\n\n\nif __name__ == \"__main__\":\n print(max_area_histogram([1, 3, 2, 5]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "4155029", "language": "Python", "matching_score": 1.7234265804290771, "max_stars_count": 70, "path": "Solutions/353.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given an array representing the heights of neighboring buildings on a city\nstreet, from east to west. The city assessor would like you to write an algorithm that\nreturns how many of these buildings have a view of the setting sun, in order to\nproperly value the street.\n\nFor example, given the array [3, 7, 8, 3, 6, 1], you should return 3, since the top\nfloors of the buildings with heights 8, 6, and 1 all have an unobstructed view to the\nwest.\n\nCan you do this using just one forward pass through the array?\n\"\"\"\n\nfrom sys import maxsize\nfrom typing import List\n\nfrom DataStructures.Stack import Stack\n\n\ndef get_view_sunset(arr: List[int]) -> int:\n # the buildings can view the sunset when the elements are chosen in-order, keeping\n # only the ones that allow decending order selection\n stack = Stack()\n for elem in arr:\n if not stack.is_empty():\n last = stack.peek()\n while not stack.is_empty() and last < elem:\n stack.pop()\n last = maxsize\n if not stack.is_empty():\n last = stack.peek()\n if stack.is_empty() or stack.peek() > elem:\n stack.push(elem)\n return len(stack)\n\n\nif __name__ == \"__main__\":\n print(get_view_sunset([3, 7, 8, 3, 6, 1]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "5257645", "language": "Python", "matching_score": 1.8484580516815186, "max_stars_count": 70, "path": "Solutions/285.py" }, { "content": "\"\"\"\nProblem:\n\nOn a mysterious island there are creatures known as Quxes which come in three colors:\nred, green, and blue. One power of the Qux is that if two of them are standing next to\neach other, they can transform into a single creature of the third color.\n\nGiven N Quxes standing in a line, determine the smallest number of them remaining after\nany possible sequence of such transformations.\n\nFor example, given the input ['R', 'G', 'B', 'G', 'B'], it is possible to end up with a\nsingle Qux through the following steps:\n\n Arrangement | Change\n----------------------------------------\n['R', 'G', 'B', 'G', 'B'] | (R, G) -> B\n['B', 'B', 'G', 'B'] | (B, G) -> R\n['B', 'R', 'B'] | (R, B) -> G\n['B', 'G'] | (B, G) -> R\n['R'] |\n\"\"\"\n\nfrom typing import List\n\nfrom DataStructures.Stack import Stack\n\n\nQUXES = set([\"R\", \"G\", \"B\"])\n\n\ndef generate_new_qux(qux1: str, qux2: str) -> str:\n if qux1 == qux2:\n raise ValueError(\"Cannot form new Qux\")\n result = QUXES - set([qux1, qux2])\n return result.pop()\n\n\ndef get_transformation(arrangement: List[str]) -> List[str]:\n stack = Stack()\n for qux in arrangement:\n if stack.is_empty() or stack.peek() == qux:\n stack.push(qux)\n else:\n qux_last = stack.pop()\n while True:\n # backpropagating in case the previous quxes needs to be updated\n qux = generate_new_qux(qux_last, qux)\n if stack.is_empty() or stack.peek() == qux:\n break\n qux_last = stack.pop()\n stack.push(qux)\n return stack\n\n\nif __name__ == \"__main__\":\n print(get_transformation([\"R\", \"G\", \"B\", \"G\", \"B\"]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "12195948", "language": "Python", "matching_score": 0.9742226600646973, "max_stars_count": 70, "path": "Solutions/290.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a matrix of 1s and 0s, return the number of \"islands\" in the matrix. A 1\nrepresents land and 0 represents water, so an island is a group of 1s that are\nneighboring and their perimeter is surrounded by water.\n\nFor example, this matrix has 4 islands.\n\n1 0 0 0 0\n0 0 1 1 0\n0 1 1 0 0\n0 0 0 0 0\n1 1 0 0 1\n1 1 0 0 1\n\"\"\"\n\nfrom typing import List, Tuple\n\nfrom DataStructures.Queue import Queue\n\nGridShape = Tuple[int, int]\nMatrix = List[List[int]]\nPosition = Tuple[int, int]\n\n\ndef get_neighbours(position: Position, grid_shape: GridShape) -> List[Position]:\n n, m = grid_shape\n i, j = position\n neighbours = []\n position_list = [\n (i - 1, j - 1),\n (i - 1, j),\n (i - 1, j + 1),\n (i, j - 1),\n (i, j + 1),\n (i + 1, j - 1),\n (i + 1, j),\n (i + 1, j + 1),\n ]\n for curr_position in position_list:\n y, x = curr_position\n if 0 <= x < m and 0 <= y < n:\n neighbours.append(curr_position)\n return neighbours\n\n\ndef remove_island(matrix: Matrix, position: Position, grid_shape: GridShape) -> None:\n # using bfs to remove the islands\n queue = Queue()\n queue.enqueue(position)\n\n while not queue.is_empty():\n curr_position = queue.dequeue()\n i, j = curr_position\n if matrix[i][j] == 1:\n matrix[i][j] = 0\n for neighbour in get_neighbours((i, j), grid_shape):\n y, x = neighbour\n if matrix[y][x] == 1:\n queue.enqueue(neighbour)\n\n\ndef island_count(matrix: Matrix) -> int:\n count = 0\n n, m = len(matrix), len(matrix[0])\n\n for i in range(n):\n for j in range(m):\n if matrix[i][j] == 1:\n remove_island(matrix, (i, j), (n, m))\n count += 1\n return count\n\n\nif __name__ == \"__main__\":\n matrix = [\n [1, 0, 0, 0, 0],\n [0, 0, 1, 1, 0],\n [0, 1, 1, 0, 0],\n [0, 0, 0, 0, 0],\n [1, 1, 0, 0, 1],\n [1, 1, 0, 0, 1],\n ]\n print(island_count(matrix))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x m)\nSPACE COMPLEXITY: O(n x m)\n\"\"\"\n", "id": "6277800", "language": "Python", "matching_score": 1.7575918436050415, "max_stars_count": 70, "path": "Solutions/084.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given a 2-d matrix where each cell consists of either /, \\, or an empty space.\nWrite an algorithm that determines into how many regions the slashes divide the space.\n\nFor example, suppose the input for a three-by-six grid is the following:\n\n\\ /\n \\ /\n \\/\nConsidering the edges of the matrix as boundaries, this divides the grid into three\ntriangles, so you should return 3.\n\"\"\"\n\nfrom typing import Set, Tuple\n\n\ndef explore_region(\n position: Tuple[int, int], empty_spaces: Set, nrows: int, ncols: int\n) -> None:\n # dfs helper to remove all adjoining empty spaces for each region\n if position not in empty_spaces:\n return\n # travelling to the adjoining spaces\n empty_spaces.remove(position)\n x, y = position\n if x > 0:\n explore_region((x - 1, y), empty_spaces, nrows, ncols)\n if x < nrows - 1:\n explore_region((x + 1, y), empty_spaces, nrows, ncols)\n if y > 0:\n explore_region((x, y - 1), empty_spaces, nrows, ncols)\n if y < ncols - 1:\n explore_region((x, y + 1), empty_spaces, nrows, ncols)\n\n\ndef get_region_count(matrix: str) -> int:\n nrows, ncols = len(matrix), len(matrix[0])\n empty_spaces = set()\n for row in range(nrows):\n for col in range(ncols):\n if matrix[row][col] == \" \":\n empty_spaces.add((row, col))\n # traversing through the empty spaces\n regions = 0\n while empty_spaces:\n # random position selection\n for pos in empty_spaces:\n position = pos\n break\n explore_region(position, empty_spaces, nrows, ncols)\n regions += 1\n return regions\n\n\nif __name__ == \"__main__\":\n matrix = [\n list(r\"\\ /\"),\n list(r\" \\ / \"),\n list(r\" \\/ \")\n ]\n print(get_region_count(matrix))\n\n matrix = [\n list(r\" /\"),\n list(r\" \\ / \"),\n list(r\" \\/ \")\n ]\n print(get_region_count(matrix))\n\n matrix = [\n list(r\" /\"),\n list(r\" \\ / \"),\n list(r\" \\ \")\n ]\n print(get_region_count(matrix))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(row x column)\nSPACE COMPLEXITY: O(row x column)\n\"\"\"\n", "id": "9337062", "language": "Python", "matching_score": 1.2518337965011597, "max_stars_count": 70, "path": "Solutions/302.py" }, { "content": "\"\"\"\nProblem:\n\nThe transitive closure of a graph is a measure of which vertices are reachable from\nother vertices. It can be represented as a matrix M, where M[i][j] == 1 if there is a\npath between vertices i and j, and otherwise 0.\n\nFor example, suppose we are given the following graph in adjacency list form:\n\ngraph = [\n [0, 1, 3],\n [1, 2],\n [2],\n [3]\n]\nThe transitive closure of this graph would be:\n\n[1, 1, 1, 1]\n[0, 1, 1, 0]\n[0, 0, 1, 0]\n[0, 0, 0, 1]\nGiven a graph, find its transitive closure.\n\"\"\"\n\n\nfrom typing import List, Set\n\n\ndef get_transitive_matrix_helper(\n origin: int,\n curr_node: int,\n graph: List[List[int]],\n transitive_matrix: List[List[int]],\n visited: Set[int],\n) -> None:\n # helper function to generate the transitive matrix using dfs\n for node in graph[curr_node]:\n transitive_matrix[origin][node] = 1\n if node not in visited:\n visited.add(node)\n get_transitive_matrix_helper(\n origin, node, graph, transitive_matrix, visited\n )\n\n\ndef get_transitive_matrix(graph: List[List[int]]) -> List[List[int]]:\n num_nodes = len(graph)\n transitive_matrix = [[0 for _ in range(num_nodes)] for _ in range(num_nodes)]\n for node in range(num_nodes):\n get_transitive_matrix_helper(node, node, graph, transitive_matrix, set([node]))\n return transitive_matrix\n\n\nif __name__ == \"__main__\":\n graph = [[0, 1, 3], [1, 2], [2], [3]]\n\n for row in get_transitive_matrix(graph):\n print(*row)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n ^ 2)\n\"\"\"\n", "id": "2470854", "language": "Python", "matching_score": 0.9939355850219727, "max_stars_count": 70, "path": "Solutions/255.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an undirected graph represented as an adjacency matrix and an integer k, write a\nfunction to determine whether each vertex in the graph can be colored such that no two\nadjacent vertices share the same color using at most k colors.\n\"\"\"\n\nfrom typing import List\n\n\ndef can_color(adjacency_matrix: List[List[int]], k: int) -> bool:\n minimum_colors_required = 0\n vertices = len(adjacency_matrix)\n # generating the minimum number of colors required to color the graph\n for vertex in range(vertices):\n colors_required_for_current_vertex = sum(adjacency_matrix[vertex]) + 1\n minimum_colors_required = max(\n minimum_colors_required, colors_required_for_current_vertex\n )\n return minimum_colors_required <= k\n\n\nif __name__ == \"__main__\":\n adjacency_matrix = [\n [0, 1, 1, 1],\n [1, 0, 1, 1],\n [1, 1, 0, 1],\n [1, 1, 1, 0],\n ]\n\n print(can_color(adjacency_matrix, 4))\n print(can_color(adjacency_matrix, 3))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "4245949", "language": "Python", "matching_score": 0.7987173795700073, "max_stars_count": 70, "path": "Solutions/056.py" }, { "content": "\"\"\"\nProblem:\n\nA builder is looking to build a row of N houses that can be of K different colors. He\nhas a goal of minimizing cost while ensuring that no two neighboring houses are of the\nsame color.\n\nGiven an N by K matrix where the nth row and kth column represents the cost to build\nthe nth house with kth color, return the minimum cost which achieves this goal.\n\"\"\"\n\nfrom typing import List\n\nMatrix = List[List[int]]\n\n\ndef minimize_color_cost_helper(\n color_matrix: Matrix,\n results: List,\n curr_house: int,\n prev_color: int,\n curr_cost: int,\n n: int,\n k: int,\n):\n if curr_house == n:\n results.append(curr_cost)\n return\n # generating all the possible combinations\n for curr_color in range(k):\n # avoiding two neighboring houses having the same color\n if curr_color != prev_color:\n minimize_color_cost_helper(\n color_matrix,\n results,\n curr_house + 1,\n curr_color,\n curr_cost + color_matrix[curr_house][curr_color],\n n,\n k,\n )\n\n\ndef minimize_color_cost(color_matrix: Matrix) -> int:\n sequence = []\n n, k = len(color_matrix), len(color_matrix[0])\n minimize_color_cost_helper(color_matrix, sequence, 0, -1, 0, n, k)\n # returning the minimum cost\n return min(sequence)\n\n\nif __name__ == \"__main__\":\n print(minimize_color_cost([[1, 5, 2], [2, 3, 1], [7, 3, 5], [6, 2, 3]]))\n print(minimize_color_cost([[1, 5, 2], [2, 3, 1], [7, 3, 5], [6, 3, 2]]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x k!)\nSPACE COMPLEXITY: O(n x k!)\n\"\"\"\n", "id": "3874904", "language": "Python", "matching_score": 1.060057282447815, "max_stars_count": 70, "path": "Solutions/019.py" }, { "content": "\"\"\"\nProblem:\n\nAt a popular bar, each customer has a set of favorite drinks, and will happily accept\nany drink among this set. For example, in the following situation, customer 0 will be\nsatisfied with drinks 0, 1, 3, or 6.\n\npreferences = {\n 0: [0, 1, 3, 6],\n 1: [1, 4, 7],\n 2: [2, 4, 7, 5],\n 3: [3, 2, 5],\n 4: [5, 8]\n}\nA lazy bartender working at this bar is trying to reduce his effort by limiting the\ndrink recipes he must memorize. Given a dictionary input such as the one above, return\nthe fewest number of drinks he must learn in order to satisfy all customers.\n\nFor the input above, the answer would be 2, as drinks 1 and 5 will satisfy everyone.\n\"\"\"\n\nfrom typing import Dict, List, Set\n\n\ndef check_all_customers_satisfied(\n customers: Set[int], combination: List[int], drinks: Dict[int, Set[int]]\n) -> bool:\n temp = None\n for drink in combination:\n if temp is None:\n temp = drinks[drink]\n else:\n temp = temp.union(drinks[drink])\n return temp is not None and temp == customers\n\n\ndef get_min_drinks_helper(\n drinks_left: List[int],\n curr_combination: List[int],\n drinks: Dict[int, Set[int]],\n customers: Set[int],\n combination_list: List[List[int]],\n) -> None:\n # generating all possible combinations of drinks\n if not drinks_left:\n if check_all_customers_satisfied(customers, curr_combination, drinks):\n combination_list.append(curr_combination)\n return\n curr_drink = drinks_left.pop()\n get_min_drinks_helper(\n drinks_left[:],\n curr_combination + [curr_drink],\n drinks,\n customers,\n combination_list,\n )\n get_min_drinks_helper(\n drinks_left[:], curr_combination, drinks, customers, combination_list\n )\n\n\ndef get_min_drinks(preferences: Dict[int, List[int]]) -> int:\n # transforming preferences from customer -> drink to drink -> customer map\n drinks_transformed = {}\n for customer in preferences:\n for drink in preferences[customer]:\n if drink not in drinks_transformed:\n drinks_transformed[drink] = set()\n drinks_transformed[drink].add(customer)\n combinations = []\n # generating all combinations\n get_min_drinks_helper(\n list(drinks_transformed.keys()),\n [],\n drinks_transformed,\n set(preferences.keys()),\n combinations,\n )\n # NOTE: \"min()\" can be wrapped with \"len()\" to get the number of drinks\n return min(combinations, key=lambda x: len(x))\n\n\nif __name__ == \"__main__\":\n preferences = {\n 0: [0, 1, 3, 6],\n 1: [1, 4, 7],\n 2: [2, 4, 7, 5],\n 3: [3, 2, 5],\n 4: [5, 8],\n }\n print(get_min_drinks(preferences))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ drinks + customers x drinks)\nSPACE COMPLEXITY: O(customers x drinks)\n\"\"\"\n", "id": "4514244", "language": "Python", "matching_score": 1.0368030071258545, "max_stars_count": 70, "path": "Solutions/297.py" }, { "content": "\"\"\"\nProblem:\n\nThe stable marriage problem is defined as follows:\n\nSuppose you have N men and N women, and each person has ranked their prospective\nopposite-sex partners in order of preference.\n\nFor example, if N = 3, the input could be something like this:\n\nguy_preferences = {\n 'andrew': ['caroline', 'abigail', 'betty'],\n 'bill': ['caroline', 'betty', 'abigail'],\n 'chester': ['betty', 'caroline', 'abigail'],\n}\ngal_preferences = {\n 'abigail': ['andrew', 'bill', 'chester'],\n 'betty': ['bill', 'andrew', 'chester'],\n 'caroline': ['bill', 'chester', 'andrew']\n}\nWrite an algorithm that pairs the men and women together in such a way that no two\npeople of opposite sex would both rather be with each other than with their current\npartners.\n\"\"\"\n\nfrom typing import Dict, List, Tuple\n\n\ndef woman_prefers_man1_over_man2(\n gal_preferences: Dict[str, List[str]], woman: str, man1: str, man2: str\n) -> bool:\n # check if a given woman prefers man1 more than man2\n for man in gal_preferences[woman]:\n if man == man1:\n return True\n if man == man2:\n return False\n\n\ndef stable_marriage(\n guy_preferences: Dict[str, List[str]], gal_preferences: Dict[str, List[str]]\n) -> List[Tuple[str, str]]:\n woman_partners = {woman: None for woman in gal_preferences}\n man_free = {man: True for man in guy_preferences}\n free_count = len(guy_preferences)\n\n while free_count > 0:\n # random single man selection\n curr = None\n for man, status in man_free.items():\n if status:\n curr = man\n break\n # pairing up the current man with a woman\n for woman in guy_preferences[curr]:\n if not man_free[curr]:\n break\n # engaging the current man if a single woman is encountered\n if not woman_partners[woman]:\n woman_partners[woman] = curr\n man_free[curr] = False\n free_count -= 1\n # updating woman's partner in case the current man is more favoured by the\n # woman than the man she is engaged with\n else:\n engaged_man = woman_partners[woman]\n if woman_prefers_man1_over_man2(\n gal_preferences, woman, curr, engaged_man\n ):\n woman_partners[woman] = curr\n man_free[engaged_man] = True\n man_free[curr] = False\n return list(woman_partners.items())\n\n\nif __name__ == \"__main__\":\n guy_preferences = {\n \"andrew\": [\"caroline\", \"abigail\", \"betty\"],\n \"bill\": [\"caroline\", \"betty\", \"abigail\"],\n \"chester\": [\"betty\", \"caroline\", \"abigail\"],\n }\n gal_preferences = {\n \"abigail\": [\"andrew\", \"bill\", \"chester\"],\n \"betty\": [\"bill\", \"andrew\", \"chester\"],\n \"caroline\": [\"bill\", \"chester\", \"andrew\"],\n }\n print(stable_marriage(guy_preferences, gal_preferences))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "5172270", "language": "Python", "matching_score": 1.0740976333618164, "max_stars_count": 70, "path": "Solutions/329.py" }, { "content": "\"\"\"\nProblem:\n\nAt a party, there is a single person who everyone knows, but who does not know anyone\nin return (the \"celebrity\"). To help figure out who this is, you have access to an O(1)\nmethod called knows(a, b), which returns True if person a knows person b, else False.\n\nGiven a list of N people and the above operation, find a way to identify the celebrity\nin O(N) time.\n\"\"\"\n\nfrom typing import Dict, Set\n\n\nclass Party:\n def __init__(self, people: Dict[str, Set[str]]) -> None:\n self.people = people\n\n def knows(self, a: str, b: str) -> bool:\n # function to check if a knows b [runs in O(1)]\n return b in self.people[a]\n\n def get_celebrity(self) -> str:\n # runs in O(v + e) time & space (e = the maximum people a person knows,\n # v = number of people)\n celebrity_candidates = {}\n for person in self.people:\n if celebrity_candidates == {}:\n # getting potential celebrities if the celebrity candidates is empty\n # the values are filled with people the 1st person knows who doesn't\n # know him (celebrity candidates will contain all popular people\n # including the celebrity)\n for person2 in self.people[person]:\n if not self.knows(person2, person):\n celebrity_candidates[person2] = 1\n continue\n # checking for the person known by most people in case there is other\n # popular people\n for potential_celebrity in celebrity_candidates:\n if potential_celebrity in self.people[person]:\n celebrity_candidates[potential_celebrity] += 1\n return max(\n celebrity_candidates.keys(),\n key=lambda candidate: celebrity_candidates[candidate],\n )\n\n\nif __name__ == \"__main__\":\n people = {\n \"a\": {\"b\"}, # popular person (but not the celebrity)\n \"b\": set(), # celebrity\n \"c\": {\"a\", \"b\", \"d\"},\n \"d\": {\"a\", \"b\"},\n }\n party = Party(people)\n print(party.get_celebrity())\n", "id": "10248049", "language": "Python", "matching_score": 1.0588161945343018, "max_stars_count": 70, "path": "Solutions/333.py" }, { "content": "\"\"\"\nProblem:\n\nOn election day, a voting machine writes data in the form (voter_id, candidate_id) to a\ntext file. Write a program that reads this file as a stream and returns the top 3\ncandidates at any given time. If you find a voter voting more than once, report this as\nfraud.\n\"\"\"\n\nfrom typing import List\n\n\nclass MultipleVoteError(Exception):\n pass\n\n\nclass VotingMachine:\n def __init__(self, filename: str = \"data.txt\") -> None:\n with open(filename, \"r\") as f:\n lines = f.readlines()\n self.data = [line.rstrip(\"\\n\").split(\",\") for line in lines]\n\n def get_top_3(self) -> List[str]:\n # runs in O(n) time and space\n voters = set()\n votes = {}\n # calculating votes\n for voter, candidate in self.data:\n if voter in voters:\n raise MultipleVoteError(f\"Voter {voter} has voted multiple times\")\n voters.add(voter)\n if candidate not in votes:\n votes[candidate] = 0\n votes[candidate] += 1\n # geneating the top 3 participants\n candidates = list(votes.items())\n candidates.sort(reverse=True, key=lambda x: x[1])\n return [candidate for candidate, _ in candidates[:3]]\n\n\nif __name__ == \"__main__\":\n print(\"Data Set 1:\")\n vm = VotingMachine()\n print(vm.get_top_3())\n\n print(\"\\nData Set 2:\")\n try:\n vm = VotingMachine(\"dataError.txt\")\n vm.get_top_3()\n except MultipleVoteError as E:\n print(E)\n", "id": "10478437", "language": "Python", "matching_score": 0.9745932221412659, "max_stars_count": 70, "path": "Solutions/300/300.py" }, { "content": "####################################################\n# IMPORTS (FROM LIBRARY) ###########################\n####################################################\n\nfrom pandas import DataFrame\n\n####################################################\n# FUNCTION TO GENERATE THE PARTICIPATION DATA ######\n####################################################\n\ndef generate_data(data):\n users = list(data[0])\n events = list(data[1])\n\n data = []\n\n for user in users:\n for event in events:\n if (user['Id'] == event['Id']):\n temp = dict(user)\n temp['Event'] = event['Event']\n data.append(temp)\n\n df = DataFrame(data)\n\n df.to_excel('Paticipation_Data.xlsx', index=False)\n", "id": "12527655", "language": "Python", "matching_score": 0.11657106876373291, "max_stars_count": 1, "path": "Data_Gen.py" }, { "content": "\"\"\"\nProblem:\n\nUsing a read7() method that returns 7 characters from a file, implement readN(n) which\nreads n characters.\n\nFor example, given a file with the content \"Hello world\", three read7() returns\n\"Hello w\", \"orld\" and then \"\".\n\"\"\"\n\n# COUNT_7 stores the number of characters already read from the file\n# STASHED_TEXT stores the unreturned data (used in readN)\nCOUNT_7 = 0\nSTASHED_TEXT = \"\"\n\n\ndef read7(filename: str = \"data_082.txt\") -> str:\n global COUNT_7\n with open(filename, \"r\") as f:\n f.seek(COUNT_7, 0)\n data = f.read(7)\n COUNT_7 += 7\n return data\n\n\ndef readN(n: int, filename: str = \"data_082.txt\") -> str:\n global STASHED_TEXT\n for _ in range((n // 7) + 1):\n STASHED_TEXT += read7(filename)\n text = STASHED_TEXT[:n]\n STASHED_TEXT = STASHED_TEXT[n:]\n return text\n\n\nif __name__ == \"__main__\":\n print(readN(3))\n print(readN(10))\n print(readN(15))\n print(readN(25))\n print(readN(1000))\n print(readN(1000))\n", "id": "3807740", "language": "Python", "matching_score": 1.0068057775497437, "max_stars_count": 70, "path": "Solutions/082/082.py" }, { "content": "\"\"\"\nProblem:\n\nImplement a job scheduler which takes in a function f and an integer n, and calls f\nafter n milliseconds.\n\"\"\"\n\nfrom time import sleep\nfrom typing import Callable\n\n\ndef get_seconds_from_milliseconds(time_mil: int) -> float:\n return time_mil / 1000\n\n\ndef job_scheduler(function: Callable, delay: int) -> None:\n sleep(get_seconds_from_milliseconds(delay))\n function()\n\n\n# function to test the job scheduler\ndef print_hello() -> None:\n print(\"Hello!\")\n\n\nif __name__ == \"__main__\":\n job_scheduler(print_hello, 1)\n job_scheduler(print_hello, 500)\n job_scheduler(print_hello, 1000)\n", "id": "9989726", "language": "Python", "matching_score": 0.9273549914360046, "max_stars_count": 70, "path": "Solutions/010.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a function f, and N return a debounced f of N milliseconds.\n\nThat is, as long as the debounced f continues to be invoked, f itself will not be\ncalled for N milliseconds.\n\"\"\"\n\nfrom time import sleep\nfrom typing import Any, Callable\n\n\ndef debounce(ms: int) -> Callable:\n interval_seconds = ms / 1000\n\n def decorate(f: Callable) -> Any:\n def wrapped(*args, **kwargs):\n print(\"waiting initiated...\")\n sleep(interval_seconds)\n print(\"waiting over...\")\n\n return f(*args, **kwargs)\n\n return wrapped\n\n return decorate\n\n\n@debounce(3000)\ndef add_nums(x: int, y: int) -> int:\n return x + y\n\n\nif __name__ == \"__main__\":\n print(add_nums(1, 1))\n print()\n print(add_nums(1, 2))\n print()\n print(add_nums(1, 3))\n print()\n print(add_nums(1, 4))\n", "id": "4031121", "language": "Python", "matching_score": 0.6185275912284851, "max_stars_count": 70, "path": "Solutions/105.py" }, { "content": "\"\"\"\nProblem:\n\nWrite a function, add_subtract, which alternately adds and subtracts curried arguments.\nHere are some sample operations:\n\nadd_subtract(7) -> 7\nadd_subtract(1)(2)(3) -> 1 + 2 - 3 -> 0\nadd_subtract(-5)(10)(3)(9) -> -5 + 10 - 3 + 9 -> 11\n\"\"\"\n\nfrom __future__ import annotations\n\n\nclass CallableInt(int):\n def __init__(self, value: int) -> None:\n int.__init__(value)\n self.should_add = True\n\n def __call__(self, value: int) -> CallableInt:\n if self.should_add:\n result = CallableInt(self + value)\n else:\n result = CallableInt(self - value)\n result.update_should_add(not self.should_add)\n return result\n\n def update_should_add(self, should_add: bool) -> None:\n self.should_add = should_add\n\n\ndef add_subtract(value: int) -> CallableInt:\n return CallableInt(value)\n\n\nif __name__ == \"__main__\":\n print(add_subtract(7))\n print(add_subtract(1)(2)(3))\n print(add_subtract(-5)(10)(3)(9))\n", "id": "8069924", "language": "Python", "matching_score": 1.0418487787246704, "max_stars_count": 70, "path": "Solutions/363.py" }, { "content": "\"\"\"\nProblem:\n\nGiven three 32-bit integers x, y, and b, return x if b is 1 and y if b is 0, using only\nmathematical or bit operations. You can assume b can only be 1 or 0.\n\"\"\"\n\n\ndef switch_on_int(x: int, y: int, b: int) -> int:\n return (x * b) + (y * abs(b - 1))\n\n\nif __name__ == \"__main__\":\n print(switch_on_int(6, 8, 1))\n print(switch_on_int(6, 8, 0))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "8635824", "language": "Python", "matching_score": 0.4303351640701294, "max_stars_count": 70, "path": "Solutions/085.py" }, { "content": "\"\"\"\nProblem:\n\nImplement a data structure which carries out the following operations without resizing\nthe underlying array:\n* add(value): Add a value to the set of values.\n* check(value): Check whether a value is in the set.\n\nThe check method may return occasional false positives (in other words, incorrectly\nidentifying an element as part of the set), but should always correctly identify a true\nelement.\n\"\"\"\n\n# this is an improvised version of the method available at:\n# https://www.geeksforgeeks.org/bloom-filters-introduction-and-python-implementation/\n\nfrom bitarray import bitarray\nfrom math import log\nfrom random import shuffle\n\n\nclass BloomFilter:\n\n \"\"\" \n Class for Bloom filter, using murmur3 hash function \n \"\"\"\n\n def __init__(self, items_count: int, fp_prob: float) -> None:\n \"\"\" \n items_count : int \n Number of items expected to be stored in bloom filter \n fp_prob : float \n False Positive probability in decimal \n \"\"\"\n # False posible probability in decimal\n self.fp_prob = fp_prob\n\n # Size of bit array to use\n self.size = BloomFilter.get_size(items_count, fp_prob)\n\n # number of hash functions to use\n self.hash_count = BloomFilter.get_hash_count(self.size, items_count)\n\n # Bit array of given size\n self.bit_array = bitarray(self.size)\n\n # initialize all bits as 0\n self.bit_array.setall(0)\n\n def add(self, item: str) -> None:\n \"\"\" \n Add an item in the filter \n \"\"\"\n digests = []\n for _ in range(self.hash_count):\n\n # create digest for given item.\n # i work as seed to mmh3.hash() function\n # With different seed, digest created is different\n digest = hash(item) % self.size\n digests.append(digest)\n\n # set the bit True in bit_array\n self.bit_array[digest] = True\n\n def check(self, item: str) -> bool:\n \"\"\" \n Check for existence of an item in filter \n \"\"\"\n for _ in range(self.hash_count):\n digest = hash(item) % self.size\n if self.bit_array[digest] == False:\n\n # if any of bit is False then,its not present\n # in filter\n # else there is probability that it exist\n return False\n return True\n\n @staticmethod\n def get_size(n: int, p: float) -> int:\n \"\"\" \n Return the size of bit array(m) to used using \n following formula \n m = -(n * lg(p)) / (lg(2)^2) \n n : int \n number of items expected to be stored in filter \n p : float \n False Positive probability in decimal \n \"\"\"\n m = -(n * log(p)) / (log(2) ** 2)\n return int(m)\n\n @staticmethod\n def get_hash_count(m: int, n: int) -> int:\n \"\"\" \n Return the hash function(k) to be used using \n following formula \n k = (m/n) * lg(2) \n \n m : int \n size of bit array \n n : int \n number of items expected to be stored in filter \n \"\"\"\n k = (m / n) * log(2)\n return int(k)\n\n\nif __name__ == \"__main__\":\n n = 20 # no of items to add\n p = 0.05 # false positive probability\n\n bloomf = BloomFilter(n, p)\n print(\"Size of bit array:{}\".format(bloomf.size))\n print(\"False positive Probability:{}\".format(bloomf.fp_prob))\n print(\"Number of hash functions:{}\\n\".format(bloomf.hash_count))\n\n # words to be added\n word_present = [\n \"abound\",\n \"abounds\",\n \"abundance\",\n \"abundant\",\n \"accessable\",\n \"bloom\",\n \"blossom\",\n \"bolster\",\n \"bonny\",\n \"bonus\",\n \"bonuses\",\n \"coherent\",\n \"cohesive\",\n \"colorful\",\n \"comely\",\n \"comfort\",\n \"gems\",\n \"generosity\",\n \"generous\",\n \"generously\",\n \"genial\",\n ]\n\n # word not added\n word_absent = [\n \"bluff\",\n \"cheater\",\n \"hate\",\n \"war\",\n \"humanity\",\n \"racism\",\n \"hurt\",\n \"nuke\",\n \"gloomy\",\n \"facebook\",\n \"geeksforgeeks\",\n \"twitter\",\n ]\n\n for item in word_present:\n bloomf.add(item)\n\n shuffle(word_present)\n shuffle(word_absent)\n\n test_words = word_present[:10] + word_absent\n shuffle(test_words)\n for word in test_words:\n if bloomf.check(word):\n if word in word_absent:\n print(\"'{}' is a false positive!\".format(word))\n else:\n print(\"'{}' is probably present!\".format(word))\n else:\n print(\"'{}' is definitely not present!\".format(word))\n", "id": "2498034", "language": "Python", "matching_score": 1.2355200052261353, "max_stars_count": 70, "path": "Solutions/301.py" }, { "content": "\"\"\"\nProblem:\n\nImplement a queue using a set of fixed-length arrays.\n\nThe queue should support enqueue, dequeue, and get_size operations.\n\"\"\"\n\nfrom typing import Any\n\n\nclass Queue:\n def __init__(self, num_of_arr: int, size_of_arr: int) -> None:\n # storing the fixed length arrays as matrix\n self.matrix = [[None for _ in range(size_of_arr)] for _ in range(num_of_arr)]\n self.size_of_arr, self.num_of_arr = size_of_arr, num_of_arr\n self.head_pos, self.rear_pos = 0, 0\n\n def enqueue(self, obj: Any) -> None:\n if self.rear_pos == (self.num_of_arr * self.size_of_arr) - 1:\n raise OverflowError(\"Queue is full\")\n\n i = (self.rear_pos) // self.size_of_arr\n j = (self.rear_pos) % self.size_of_arr\n self.matrix[i][j] = obj\n self.rear_pos += 1\n\n def dequeue(self) -> Any:\n if self.rear_pos == 0:\n raise RuntimeError(\"Queue is empty\")\n\n obj = self.matrix[0][0]\n # resetting other elements' position\n for pos in range(1, self.rear_pos + 1):\n i = (pos) // self.size_of_arr\n j = (pos) % self.size_of_arr\n if j == 0:\n self.matrix[i - 1][self.size_of_arr - 1] = self.matrix[i][j]\n else:\n self.matrix[i][j - 1] = self.matrix[i][j]\n self.rear_pos -= 1\n return obj\n\n def get_size(self) -> int:\n return self.rear_pos\n\n\nif __name__ == \"__main__\":\n queue = Queue(3, 2)\n\n queue.enqueue(1)\n queue.enqueue(2)\n queue.enqueue(3)\n\n print(\"SIZE:\", queue.get_size())\n print(queue.dequeue())\n print(\"SIZE:\", queue.get_size())\n\n queue.enqueue(4)\n\n print(queue.dequeue())\n\n queue.enqueue(5)\n queue.enqueue(6)\n\n print(\"SIZE:\", queue.get_size())\n\n print(queue.dequeue())\n print(queue.dequeue())\n print(queue.dequeue())\n print(queue.dequeue())\n\n print(\"SIZE:\", queue.get_size())\n", "id": "682569", "language": "Python", "matching_score": 0.9977350234985352, "max_stars_count": 70, "path": "Solutions/356.py" }, { "content": "\"\"\"\nProblem:\n\nImplement a 2D iterator class. It will be initialized with an array of arrays, and\nshould implement the following methods:\n\nnext(): returns the next element in the array of arrays. If there are no more\nelements, raise an exception.\nhas_next(): returns whether or not the iterator still has elements left.\nFor example, given the input [[1, 2], [3], [], [4, 5, 6]], calling next() repeatedly\nshould output 1, 2, 3, 4, 5, 6.\n\nDo not use flatten or otherwise clone the arrays. Some of the arrays can be empty.\n\"\"\"\n\nfrom typing import Generator, List, Optional\n\n\nclass Iterator2D:\n def __init__(self, iteratable2d: List[List[int]]) -> None:\n self.iteratable2d = iteratable2d\n self.generator = Iterator2D.generator_func(iteratable2d)\n self.next_value = next(self.generator)\n\n def __repr__(self) -> str:\n return str(self.iteratable2d)\n\n @staticmethod\n def generator_func(iteratable2d: List[List[int]]) -> Generator[int, None, None]:\n for iteratable in iteratable2d:\n for element in iteratable:\n yield element\n\n def has_next(self) -> bool:\n return self.next_value is not None\n\n def next(self) -> Optional[int]:\n curr_value = self.next_value\n try:\n self.next_value = next(self.generator)\n except StopIteration:\n self.next_value = None\n return curr_value\n\n\nif __name__ == \"__main__\":\n iter_obj = Iterator2D([[1, 2], [3], [], [4, 5, 6]])\n print(iter_obj)\n\n print(iter_obj.has_next())\n print(iter_obj.next())\n\n print(iter_obj.has_next())\n print(iter_obj.next())\n\n print(iter_obj.has_next())\n print(iter_obj.next())\n\n print(iter_obj.has_next())\n print(iter_obj.next())\n\n print(iter_obj.has_next())\n print(iter_obj.next())\n\n print(iter_obj.has_next())\n print(iter_obj.next())\n\n print(iter_obj.has_next())\n print(iter_obj.next())\n", "id": "9701816", "language": "Python", "matching_score": 1.6364713907241821, "max_stars_count": 70, "path": "Solutions/166.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an iterator with methods next() and hasNext(), create a wrapper iterator,\nPeekableInterface, which also implements peek(). peek shows the next element that would\nbe returned on next().\n\nHere is the interface:\n\nclass PeekableInterface(object):\n def __init__(self, iterator):\n pass\n\n def peek(self):\n pass\n\n def next(self):\n pass\n\n def hasNext(self):\n pass\n\"\"\"\n\nfrom typing import Any, Iterable\n\n\nclass PeekableInterface(object):\n def __init__(self, iterator: Iterable[Any]) -> None:\n self.iterator = iterator\n try:\n self.next_val = next(self.iterator)\n self.has_next = True\n except StopIteration:\n self.next_val = None\n self.has_next = False\n\n def peek(self) -> Any:\n return self.next_val\n\n def next(self) -> Any:\n if self.has_next:\n curr_elem = self.next_val\n try:\n self.next_val = next(self.iterator)\n except StopIteration:\n self.next_val = None\n self.has_next = False\n return curr_elem\n return None\n\n def hasNext(self) -> bool:\n return self.has_next\n\n\nif __name__ == \"__main__\":\n sample_list = [1, 2, 3, 4, 5]\n iterator = iter(sample_list)\n peekable = PeekableInterface(iterator)\n\n print(peekable.peek())\n print(peekable.hasNext())\n\n print(peekable.next())\n print(peekable.next())\n print(peekable.next())\n\n print(peekable.peek())\n print(peekable.hasNext())\n\n print(peekable.next())\n print(peekable.hasNext())\n print(peekable.peek())\n print(peekable.next())\n\n print(peekable.hasNext())\n print(peekable.peek())\n\n print()\n\n sample_list = []\n iterator = iter(sample_list)\n peekable = PeekableInterface(iterator)\n\n print(peekable.peek())\n print(peekable.hasNext())\n", "id": "338667", "language": "Python", "matching_score": 0.9029959440231323, "max_stars_count": 70, "path": "Solutions/139.py" }, { "content": "\"\"\"\nProblem:\n\nreduce (also known as fold) is a function that takes in an array, a combining function,\nand an initial value and builds up a result by calling the combining function on each\nelement of the array, left to right. For example, we can write sum() in terms of\nreduce:\n\ndef add(a, b):\n return a + b\n\ndef sum(lst):\n return reduce(lst, add, 0)\n\nThis should call add on the initial value with the first element of the array, and then\nthe result of that with the second element of the array, and so on until we reach the\nend, when we return the sum of the array.\n\nImplement your own version of reduce.\n\"\"\"\n\nfrom typing import Any, Callable, Iterable\n\n\ndef reduce(iterable: Iterable, func: Callable, initial_value: int) -> int:\n value = initial_value\n for item in iterable:\n value = func(value, item)\n return value\n\n\ndef add(a: int, b: int) -> int:\n return a + b\n\n\ndef sum(lst: Iterable) -> int:\n return reduce(lst, add, 0)\n\n\nif __name__ == \"__main__\":\n print(sum([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n[for the reduce function only (considering the iterable doesn't contain a nested\niterable)]\n\"\"\"\n", "id": "11919459", "language": "Python", "matching_score": 0.7648708820343018, "max_stars_count": 70, "path": "Solutions/342.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a list, sort it using this method: reverse(lst, i, j), which sorts lst from i to\nj.\n\"\"\"\n\nfrom typing import List\n\n\ndef reverse(lst: List[int], i: int, j: int) -> None:\n lst[i : j + 1] = lst[i : j + 1][::-1]\n\n\ndef bubble_sort(lst: List[int]) -> List[int]:\n length = len(lst)\n for i in range(length - 1):\n for j in range(length - i - 1):\n if lst[j] > lst[j + 1]:\n reverse(lst, j, j + 1)\n return lst\n\n\nif __name__ == \"__main__\":\n print(bubble_sort([0, 6, 4, 2, 5, 3, 1]))\n print(bubble_sort([0, 6, 4, 2, 5, 3, 1, 10, 9]))\n print(bubble_sort([0, 6, 4, 2, 5, 3, 1, 2, 3]))\n print(bubble_sort([0, 6, 4, 2, 5, 3, 1, 11]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(1) [since reverse() is being used on adjacent indices]\n\"\"\"\n", "id": "8556523", "language": "Python", "matching_score": 0.5927072763442993, "max_stars_count": 70, "path": "Solutions/147.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of numbers and an index i, return the index of the nearest larger number\nof the number at index i, where distance is measured in array indices.\n\nFor example, given [4, 1, 3, 5, 6] and index 0, you should return 3.\n\nIf two distances to larger numbers are equal, then return any one of them. If the array\nat i doesn't have a nearest larger integer, then return null.\n\nFollow-up: If you can preprocess the array, can you do this in constant time?\n\"\"\"\n\nfrom typing import Dict, List\n\n\ndef preprocess(arr: List[int]) -> Dict[int, int]:\n preprocessed_indices = {}\n length = len(arr)\n sorted_tuples = [(value, index) for index, value in enumerate(arr)]\n sorted_tuples.sort(key=lambda tup: tup[0])\n # generating the minimum distance index\n for k, (_, i) in enumerate(sorted_tuples[:-1]):\n min_dist = length\n for m in range(k + 1, length):\n dist_temp = abs(i - sorted_tuples[m][1])\n if dist_temp < min_dist:\n min_dist = dist_temp\n preprocessed_indices[i] = sorted_tuples[m][1]\n return preprocessed_indices\n\n\ndef nearest_larger_value_index(arr: List[int], index: int) -> int:\n preprocessed_indices = preprocess(arr)\n if index not in preprocessed_indices:\n return None\n return preprocessed_indices[index]\n\n\nif __name__ == \"__main__\":\n print(nearest_larger_value_index([4, 1, 3, 5, 6], 0))\n print(nearest_larger_value_index([4, 1, 3, 5, 6], 1))\n print(nearest_larger_value_index([4, 1, 3, 5, 6], 4))\n print(nearest_larger_value_index([4, 1, 3, 5, 6], 3))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n)\n[O(n ^ 2) is for preprocessing, after which it's complexity is O(1)]\n\"\"\"\n", "id": "4744919", "language": "Python", "matching_score": 2.091013193130493, "max_stars_count": 70, "path": "Solutions/144.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a set of points (x, y) on a 2D cartesian plane, find the two closest points. For\nexample, given the points [(1, 1), (-1, -1), (3, 4), (6, 1), (-1, -6), (-4, -3)],\nreturn [(-1, -1), (1, 1)].\n\"\"\"\n\nfrom math import sqrt\nfrom sys import maxsize\nfrom typing import List, Tuple\n\n\nPoint = Tuple[int, int]\n\n\ndef get_distance(pt1: Point, pt2: Point) -> float:\n x1, y1 = pt1\n x2, y2 = pt2\n dist = sqrt(pow(x1 - x2, 2) + pow(y1 - y2, 2))\n return dist\n\n\ndef get_nearest_points(pts_arr: List[Point]) -> List[Point]:\n length = len(pts_arr)\n dist = maxsize\n pt1, pt2 = None, None\n \n for index_1 in range(length):\n for index_2 in range(index_1 + 1, length):\n pt1_temp, pt2_temp = pts_arr[index_1], pts_arr[index_2]\n dist_temp = get_distance(pt1_temp, pt2_temp)\n if dist_temp < dist:\n dist = dist_temp\n pt1, pt2 = pt1_temp, pt2_temp\n return [pt1, pt2]\n\n\nif __name__ == \"__main__\":\n print(get_nearest_points([(1, 1), (-1, -1), (3, 4), (6, 1), (-1, -6), (-4, -3)]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "6645653", "language": "Python", "matching_score": 1.6685521602630615, "max_stars_count": 70, "path": "Solutions/340.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of numbers and a number k, determine if there are three entries in the\narray which add up to the specified number k. For example, given [20, 303, 3, 4, 25]\nand k = 49, return true as 20 + 4 + 25 = 49.\n\"\"\"\n\nfrom typing import List\n\n\ndef target_sum_of_two(arr: List[int], k: int, index_1: int, index_2: int) -> bool:\n while index_1 < index_2:\n elem_1, elem_2 = arr[index_1], arr[index_2]\n curr_sum = elem_1 + elem_2\n if curr_sum == k:\n return True\n elif curr_sum < k:\n index_1 += 1\n else:\n index_2 -= 1\n return False\n\n\ndef target_sum_of_three(arr: List[int], k: int) -> bool:\n length = len(arr)\n # sorting the array to utilize the optimizations offered by a sorted array to find\n # target sum of 2 numbers\n arr.sort()\n\n for index_1, elem in enumerate(arr):\n index_2, index_3 = index_1 + 1, length - 1\n if elem >= k:\n break\n if target_sum_of_two(arr, k - elem, index_2, index_3):\n return True\n return False\n\n\nif __name__ == \"__main__\":\n print(target_sum_of_three([20, 303, 3, 4, 25], 49))\n print(target_sum_of_three([20, 303, 3, 4, 25], 50))\n print(target_sum_of_three([20, 300, -300, 4, 25], 25))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "9923953", "language": "Python", "matching_score": 1.194987416267395, "max_stars_count": 70, "path": "Solutions/339.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a circular array, compute its maximum subarray sum in O(n) time.\n\nFor example, given [8, -1, 3, 4], return 15 as we choose the numbers 3, 4, and 8 where\nthe 8 is obtained from wrapping around.\n\nGiven [-4, 5, 1, 0], return 6 as we choose the numbers 5 and 1.\n\"\"\"\n\nfrom typing import List\n\n\ndef kadane(arr: List[int]) -> int:\n max_sum, curr_sum = 0, 0\n for elem in arr:\n curr_sum += elem\n curr_sum = max(curr_sum, 0)\n max_sum = max(max_sum, curr_sum)\n return max_sum\n\n\ndef max_circular_subarr(arr: List[int]) -> int:\n length = len(arr)\n max_kadane = kadane(arr)\n # generating the maximum sum using the corner elements\n max_wrap = 0\n for i in range(length):\n max_wrap += arr[i]\n arr[i] = -arr[i]\n max_wrap += kadane(arr)\n return max(max_wrap, max_kadane)\n\n\nif __name__ == \"__main__\":\n print(max_circular_subarr([-4, 5, 1, 0]))\n print(max_circular_subarr([8, -1, 3, 4]))\n print(max_circular_subarr([-8, -1, -3, -4]))\n print(max_circular_subarr([8, -1, 300, -1, 4]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "2010837", "language": "Python", "matching_score": 1.5867784023284912, "max_stars_count": 70, "path": "Solutions/190.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of integers and a number k, where 1 <= k <= length of the array, compute\nthe maximum values of each subarray of length k.\n\nFor example, given array = [10, 5, 2, 7, 8, 7] and k = 3, we should get: [10, 7, 8, 8],\nsince:\n\n10 = max(10, 5, 2)\n7 = max(5, 2, 7)\n8 = max(2, 7, 8)\n8 = max(7, 8, 7)\nDo this in O(n) time and O(k) space. You can modify the input array in-place and you do\nnot need to store the results. You can simply print them out as you compute them.\n\"\"\"\n\nfrom collections import deque\nfrom typing import List\n\n\ndef calc_max_per_k_elems(arr: List[int], k: int) -> List[int]:\n length = len(arr)\n if not arr:\n return None\n if length <= k:\n return max(arr)\n # storing results (even though the problem states it can be directly printed)\n result = []\n dq = deque()\n # calculating the 1st element\n for i in range(k):\n while dq and arr[dq[-1]] < arr[i]:\n dq.pop()\n dq.append(i)\n result.append(arr[dq[0]])\n # generating the rest of the resultant elements\n for i in range(k, length):\n # removing all elements apart from the last k elements\n while dq and dq[0] <= i - k:\n dq.popleft()\n # removing the elements smaller than the current element\n while dq and arr[dq[-1]] < arr[i]:\n dq.pop()\n dq.append(i)\n result.append(arr[dq[0]])\n return result\n\n\nif __name__ == \"__main__\":\n print(calc_max_per_k_elems([10, 5, 2, 7, 8, 7], 3))\n print(calc_max_per_k_elems([1, 91, 17, 46, 45, 36, 9], 3))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(k)\n\"\"\"\n", "id": "10168122", "language": "Python", "matching_score": 1.1798608303070068, "max_stars_count": 70, "path": "Solutions/018.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of integers, write a function to determine whether the array could\nbecome non-decreasing by modifying at most 1 element.\n\nFor example, given the array [10, 5, 7], you should return true, since we can modify\nthe 10 into a 1 to make the array non-decreasing.\n\nGiven the array [10, 5, 1], you should return false, since we can't modify any one\nelement to get a non-decreasing array.\n\"\"\"\n\nfrom typing import List\n\n\ndef check_1_modify_to_sorted(arr: List[int]) -> bool:\n value = arr[0]\n non_increasing_count = 0\n for elem in arr[1:]:\n if elem - value < 0:\n non_increasing_count += 1\n if non_increasing_count > 1:\n return False\n value = elem\n return True\n\n\nif __name__ == \"__main__\":\n print(check_1_modify_to_sorted([10, 5, 7]))\n print(check_1_modify_to_sorted([10, 5, 1]))\n print(check_1_modify_to_sorted([1, 10, 5, 7]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "12174153", "language": "Python", "matching_score": 1.0687875747680664, "max_stars_count": 70, "path": "Solutions/079.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a list of numbers, return whether any two sums to k. For example, given\n[10, 15, 3, 7] and k of 17, return true since 10 + 7 is 17.\n\nBonus: Can you do this in one pass?\n\"\"\"\n\nfrom typing import List\n\n\ndef check_target_sum(arr: List[int], target: int) -> bool:\n # using hash list to store the previously seen values to get access to them in O(1)\n previous = set()\n for elem in arr:\n if (target - elem) in previous:\n return True\n previous.add(elem)\n return False\n\n\nif __name__ == \"__main__\":\n print(check_target_sum([], 17))\n print(check_target_sum([10, 15, 3, 7], 17))\n print(check_target_sum([10, 15, 3, 4], 17))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "7467482", "language": "Python", "matching_score": 1.5513497591018677, "max_stars_count": 70, "path": "Solutions/001.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a list of integers S and a target number k, write a function that returns a\nsubset of S that adds up to k. If such a subset cannot be made, then return null.\n\nIntegers can appear more than once in the list. You may assume all numbers in the list\nare positive.\n\nFor example, given S = [12, 1, 61, 5, 9, 2] and k = 24, return [12, 9, 2, 1] since it\nsums up to 24.\n\"\"\"\n\nfrom typing import List, Optional\n\n\ndef target_sum(arr: List[int], k: int) -> Optional[List[int]]:\n if not arr:\n return None\n elem = arr[0]\n if elem == k:\n return [elem]\n # generating the subset\n possible_subset = target_sum(arr[1:], k - elem)\n if possible_subset is not None:\n return [elem] + possible_subset\n return target_sum(arr[1:], k)\n\n\nif __name__ == \"__main__\":\n print(target_sum([12, 1, 61, 5, 9, 2], 24))\n print(target_sum([12, 1, 61, 5, 9, 2], 61))\n print(target_sum([12, 1, 61, 5, -108, 2], -106))\n print(target_sum([12, 1, 61, 5, -108, 2], 1006))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 3)\nSPACE COMPLEXITY: O(n ^ 2)\n\"\"\"\n", "id": "2663477", "language": "Python", "matching_score": 1.5087910890579224, "max_stars_count": 70, "path": "Solutions/042.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a sorted array, find the smallest positive integer that is not the sum of a\nsubset of the array.\n\nFor example, for the input [1, 2, 3, 10], you should return 7.\n\nDo this in O(N) time.\n\"\"\"\n\nfrom typing import List\n\n\ndef get_smallest_subset_sum_not_in_arr(arr: List[int]) -> int:\n # NOTE: The array is pre-sorted\n res = 1\n for elem in arr:\n if elem > res:\n break\n res += elem\n return res\n\n\nif __name__ == \"__main__\":\n print(get_smallest_subset_sum_not_in_arr([1, 2, 3, 10]))\n print(get_smallest_subset_sum_not_in_arr([1, 2, 10]))\n print(get_smallest_subset_sum_not_in_arr([1, 10]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "9303108", "language": "Python", "matching_score": 1.2901384830474854, "max_stars_count": 70, "path": "Solutions/224.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given an array X of floating-point numbers x1, x2, ... xn. These can be rounded\nup or down to create a corresponding array Y of integers y1, y2, ... yn.\n\nWrite an algorithm that finds an appropriate Y array with the following properties:\n\nThe rounded sums of both arrays should be equal.\nThe absolute pairwise difference between elements is minimized. In other words,\n|x1- y1| + |x2- y2| + ... + |xn- yn| should be as small as possible.\nFor example, suppose your input is [1.3, 2.3, 4.4]. In this case you cannot do better\nthan [1, 2, 5], which has an absolute difference of\n|1.3 - 1| + |2.3 - 2| + |4.4 - 5| = 1.\n\"\"\"\n\nfrom typing import List, Tuple\n\n\ndef get_fraction_from_tuple(tup: Tuple[int, float]) -> float:\n _, elem = tup\n return elem - int(elem)\n\n\ndef round_arr(arr: List[float]) -> List[int]:\n rounded_arr = [round(elem) for elem in arr]\n sum_arr = int(sum(arr))\n sum_rounded_arr = sum(rounded_arr)\n # if the sums are equal, the rounding has been properly implemented\n if sum_arr == sum_rounded_arr:\n return rounded_arr\n # eqalizing the sums\n should_increment = sum_arr > sum_rounded_arr\n num_map = sorted(\n [(index, elem) for index, elem in enumerate(arr)],\n key=get_fraction_from_tuple,\n reverse=should_increment,\n )\n # incrementing and decrementing the values as per requirement (while minimizing the\n # pair-wise sum)\n for i in range(sum_arr - sum_rounded_arr):\n index, _ = num_map[i]\n rounded_arr[index] = (\n rounded_arr[index] + 1 if should_increment else rounded_arr[index] - 1\n )\n return rounded_arr\n\n\nif __name__ == \"__main__\":\n print(round_arr([1.3, 2.3, 4.4]))\n print(round_arr([1.8, 2.8, 4.4]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x log(n))\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "57018", "language": "Python", "matching_score": 1.027193307876587, "max_stars_count": 70, "path": "Solutions/355.py" }, { "content": "\"\"\"\nProblem:\n\nThe ancient Egyptians used to express fractions as a sum of several terms where each\nnumerator is one. For example, 4 / 13 can be represented as\n1 / (4 + 1 / (18 + (1 / 468))).\n\nCreate an algorithm to turn an ordinary fraction a / b, where a < b, into an Egyptian\nfraction.\n\"\"\"\n\nfrom fractions import Fraction\nfrom math import ceil\nfrom typing import List\n\n\ndef get_egyptian_frac(\n fraction: Fraction, previous_fraction: List[Fraction] = list()\n) -> List[Fraction]:\n if fraction.numerator == 1:\n previous_fraction.append(fraction)\n return previous_fraction\n\n egyptian_fraction = Fraction(1, ceil(fraction.denominator / fraction.numerator))\n previous_fraction.append(egyptian_fraction)\n return get_egyptian_frac(fraction - egyptian_fraction, previous_fraction)\n\n\nif __name__ == \"__main__\":\n print(get_egyptian_frac(Fraction(4, 13)))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(log(n))\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "3761139", "language": "Python", "matching_score": 0.8288083076477051, "max_stars_count": 70, "path": "Solutions/252.py" }, { "content": "\"\"\"\nProblem:\n\nThe \"look and say\" sequence is defined as follows: beginning with the term 1, each\nsubsequent term visually describes the digits appearing in the previous term. The first\nfew terms are as follows:\n\n1\n11\n21\n1211\n111221\nAs an example, the fourth term is 1211, since the third term consists of one 2 and one\n1.\n\nGiven an integer N, print the Nth term of this sequence.\n\"\"\"\n\nfrom functools import lru_cache\n\n\ndef generate_look_and_say_term(num: str) -> str:\n result = \"\"\n temp = \"\"\n for char in num[::-1]:\n if temp:\n if char == temp[0]:\n temp += char\n else:\n result = f\"{len(temp)}{temp[0]}\" + result\n temp = char\n else:\n temp = char\n result = f\"{len(temp)}{temp[0]}\" + result\n return result\n\n\n# cache is unnecessary for small sizes, but in case of large value of n, it drastically\n# speeds up the process using memorization\n@lru_cache(maxsize=5)\ndef get_look_and_say_term(n: int) -> str:\n num = \"1\"\n for _ in range(n - 1):\n num = generate_look_and_say_term(num)\n return num\n\n\nif __name__ == \"__main__\":\n for i in range(1, 6):\n print(f\"{i}th term = {get_look_and_say_term(i)}\")\n\n\n\"\"\"\nSPECS:\n\n[n = number of terms, m = longest look and say term]\nTIME COMPLEXITY: O(n + m)\nSPACE COMPLEXITY: O(n + m)\n[with cache]\n\nTIME COMPLEXITY: O(n * m)\nSPACE COMPLEXITY: O(m)\n[without cache]\n\"\"\"\n", "id": "12819531", "language": "Python", "matching_score": 1.5084785223007202, "max_stars_count": 70, "path": "Solutions/275.py" }, { "content": "\"\"\"\nProblem:\n\nA Collatz sequence in mathematics can be defined as follows. Starting with any positive\ninteger:\n\nIf n is even, the next number in the sequence is n / 2\nIf n is odd, the next number in the sequence is 3n + 1 It is conjectured that every\nsuch sequence eventually reaches the number 1. Test this conjecture.\n\nBonus: What input n <= 1000000 gives the longest sequence?\n\"\"\"\n\nfrom typing import List\n\n\ndef get_collatz_sequence_length(num: int, acc: int = 0) -> int:\n if num == 1:\n return acc\n if num % 2 == 0:\n return get_collatz_sequence_length(num // 2, acc + 1)\n return get_collatz_sequence_length(3 * num + 1, acc + 1)\n\n\ndef get_longest_collatz_sequence_under_1000000() -> int:\n longest_sequence_value = 0\n longest_sequence = 0\n for i in range(1, 1_000_000):\n curr_sequence = get_collatz_sequence_length(i, 0)\n if curr_sequence > longest_sequence:\n longest_sequence = curr_sequence\n longest_sequence_value = i\n return longest_sequence_value\n\n\nif __name__ == \"__main__\":\n # NOTE: brute force implementation, it will take quite a bit of time to execute\n print(get_longest_collatz_sequence_under_1000000())\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(log(n))\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "10475646", "language": "Python", "matching_score": 1.2794311046600342, "max_stars_count": 70, "path": "Solutions/210.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an unsorted array of integers, find the length of the longest consecutive\nelements sequence.\n\nFor example, given [100, 4, 200, 1, 3, 2], the longest consecutive element sequence is\n[1, 2, 3, 4]. Return its length: 4.\n\nYour algorithm should run in O(n) complexity.\n\"\"\"\n\nfrom typing import List\n\n\ndef longest_consecutive_elements_sequence(arr: List[int]) -> int:\n length = len(arr)\n arr_elems_set = set(arr)\n longest_sequence = 0\n # generating the longest sequence length\n for i in range(length):\n if (arr[i] - 1) not in arr_elems_set:\n # current element is the starting element of a sequence\n j = arr[i]\n while j in arr_elems_set:\n j += 1\n # update longest sequence length\n longest_sequence = max(longest_sequence, j - arr[i])\n return longest_sequence\n\n\nif __name__ == \"__main__\":\n print(longest_consecutive_elements_sequence([100, 4, 200, 1]))\n print(longest_consecutive_elements_sequence([100, 4, 200, 1, 3]))\n print(longest_consecutive_elements_sequence([100, 4, 200, 2, 3]))\n print(longest_consecutive_elements_sequence([100, 4, 200, 1, 3, 2]))\n print(longest_consecutive_elements_sequence([100, 4, 200, 1, 3, 2, 5]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "7080838", "language": "Python", "matching_score": 1.5038753747940063, "max_stars_count": 70, "path": "Solutions/099.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of integers, return a new array where each element in the new array is\nthe number of smaller elements to the right of that element in the original input array.\n\nFor example, given the array [3, 4, 9, 6, 1], return [1, 1, 2, 1, 0], since:\n\nThere is 1 smaller element to the right of 3\nThere is 1 smaller element to the right of 4\nThere are 2 smaller elements to the right of 9\nThere is 1 smaller element to the right of 6\nThere are no smaller elements to the right of 1\n\"\"\"\n\nfrom typing import List\n\n\ndef get_smaller_elements_arr(arr: List[int]) -> List[int]:\n smaller_elements_arr = []\n length = len(arr)\n\n for i in range(length):\n smaller_elements = 0\n for j in range(i + 1, length):\n if arr[i] > arr[j]:\n smaller_elements += 1\n smaller_elements_arr.append(smaller_elements)\n return smaller_elements_arr\n\n\nif __name__ == \"__main__\":\n print(get_smaller_elements_arr([3, 4, 9, 6, 1]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "6310325", "language": "Python", "matching_score": 0.8612232804298401, "max_stars_count": 70, "path": "Solutions/165.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of integers, find the first missing positive integer in linear time and\nconstant space. In other words, find the lowest positive integer that does not exist in\nthe array. The array can contain duplicates and negative numbers as well.\n\nFor example, the input [3, 4, -1, 1] should give 2. The input [1, 2, 0] should give 3.\n\nYou can modify the input array in-place.\n\"\"\"\n\nfrom typing import List\n\n\ndef first_missing_positive_integer(arr: List[int]) -> int:\n # placing the positive elements (< length) in their proper position\n # proper position index = element - 1\n # if after the palcement is complete, index of the 1st element not in its proper\n # position is the answer\n length = len(arr)\n for i in range(length):\n correctPos = arr[i] - 1\n while 1 <= arr[i] <= length and arr[i] != arr[correctPos]:\n arr[i], arr[correctPos] = arr[correctPos], arr[i]\n correctPos = arr[i] - 1\n # finding the first missing positive integer\n for i in range(length):\n if i + 1 != arr[i]:\n return i + 1\n return length + 1\n\n\nif __name__ == \"__main__\":\n print(first_missing_positive_integer([3, 4, 2, 1]))\n print(first_missing_positive_integer([3, 4, -1, 1]))\n print(first_missing_positive_integer([1, 2, 5]))\n print(first_missing_positive_integer([-1, -2]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\nNOTE: Even though there is a nested loop it is a O(n) algorithm as the cap on the\n maximum iterations is 2 * n [Amortised analysis]\n\"\"\"\n", "id": "8252385", "language": "Python", "matching_score": 0.5171826481819153, "max_stars_count": 70, "path": "Solutions/004.py" }, { "content": "\"\"\"\nProblem:\n\nSnakes and Ladders is a game played on a 10 x 10 board, the goal of which is get from square 1 to square 100. \nOn each turn players will roll a six-sided die and move forward a number of spaces equal to the result. \nIf they land on a square that represents a snake or ladder, they will be transported ahead or behind, respectively, to a new square.\nFind the smallest number of turns it takes to play snakes and ladders.\n\nFor convenience, here are the squares representing snakes and ladders, and their outcomes:\nsnakes = {16: 6, 48: 26, 49: 11, 56: 53, 62: 19, 64: 60, 87: 24, 93: 73, 95: 75, 98: 78}\nladders = {1: 38, 4: 14, 9: 31, 21: 42, 28: 84, 36: 44, 51: 67, 71: 91, 80: 100}\n\"\"\"\n\nfrom typing import Dict\n\n\ndef get_next_ladder_position(ladders: Dict[int, int], position: int):\n # helper function to get the position of the next ladder\n curr = 101\n for key in ladders:\n if key > position and key < curr:\n curr = key\n return curr\n\n\ndef get_next_position_with_no_snake(snakes: Dict[int, int], position: int) -> int:\n # helper function to get the position of the next move without landing on a snake\n curr = position + 6\n for _ in range(6):\n if curr not in snakes:\n break\n curr -= 1\n return curr\n\n\ndef play_snake_and_ladders(\n snakes: Dict[int, int], ladders: Dict[int, int], show_trace: bool = False\n) -> int:\n # function to return the minimum turns required to play the current board\n position = 0\n turns = 0\n while position < 100:\n turns += 1\n position = min(\n get_next_ladder_position(ladders, position),\n get_next_position_with_no_snake(snakes, position),\n 100,\n )\n if show_trace:\n print(position, end=\" \")\n if position in ladders:\n position = ladders[position]\n if show_trace:\n print(f\"=> {position}\", end=\" \")\n if position < 100 and show_trace:\n print(\"->\", end=\" \")\n if show_trace:\n print()\n return turns\n\n\nif __name__ == \"__main__\":\n snakes = {\n 16: 6,\n 48: 26,\n 49: 11,\n 56: 53,\n 62: 19,\n 64: 60,\n 87: 24,\n 93: 73,\n 95: 75,\n 98: 78,\n }\n ladders = {1: 38, 4: 14, 9: 31, 21: 42, 28: 84, 36: 44, 51: 67, 71: 91, 80: 100}\n\n print(play_snake_and_ladders(snakes, ladders, show_trace=True))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1)\nSPACE COMPLEXITY: O(1)\n[the maximum number of squares is 100]\n\"\"\"\n", "id": "3663105", "language": "Python", "matching_score": 1.2394530773162842, "max_stars_count": 70, "path": "Solutions/229.py" }, { "content": "\"\"\"\nProblem:\n\nAlice wants to join her school's Probability Student Club. Membership dues are computed\nvia one of two simple probabilistic games.\n\nThe first game: roll a die repeatedly. Stop rolling once you get a five followed by a\nsix. Your number of rolls is the amount you pay, in dollars.\n\nThe second game: same, except that the stopping condition is a five followed by a five.\n\nWhich of the two games should Alice elect to play? Does it even matter? Write a program\nto simulate the two games and calculate their expected value.\n\"\"\"\n\nfrom random import randint\nfrom time import sleep\nfrom typing import Tuple\n\n\ndef roll_dice() -> int:\n return randint(1, 6)\n\n\ndef simulate_game(stopping_condition: Tuple[int, int], display: bool = False) -> int:\n last_throw, second_last_throw = 0, 0\n required_second_last_throw, required_last_throw = stopping_condition\n number_of_throws = 0\n # simulation the game\n while (\n last_throw != required_last_throw\n or second_last_throw != required_second_last_throw\n ):\n current_roll = roll_dice()\n second_last_throw, last_throw = last_throw, current_roll\n number_of_throws += 1\n if display:\n sleep(0.1)\n print(f\"On {number_of_throws}th throw, value: {current_roll}\")\n if display:\n sleep(0.1)\n print(f\"Total Throws: {number_of_throws}\\n\")\n return number_of_throws\n\n\nif __name__ == \"__main__\":\n print(\"Game 1 (5, 6):\")\n simulate_game((5, 6), True)\n print(\"Game 2 (5, 5):\")\n simulate_game((5, 5), True)\n\n g1 = 0\n g2 = 0\n for i in range(10_000):\n g1 += simulate_game((5, 6))\n g2 += simulate_game((5, 5))\n print(\"Expectation of Game 1: {:.1f}\".format(g1 / 10_000))\n print(\"Expectation of Game 2: {:.1f}\".format(g2 / 10_000))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "7530434", "language": "Python", "matching_score": 1.78369140625, "max_stars_count": 70, "path": "Solutions/178.py" }, { "content": "\"\"\"\nProblem:\n\nWrite a function, throw_dice(N, faces, total), that determines how many ways it is\npossible to throw N dice with some number of faces each to get a specific total.\n\nFor example, throw_dice(3, 6, 7) should equal 15.\n\"\"\"\n\n\ndef throw_dice(N: int, faces: int, total: int, accumulator: int = 0) -> int:\n if N == 0 and total == 0:\n return accumulator + 1\n elif total < 0:\n return accumulator\n # dfs to calculate the answer\n for i in range(1, faces + 1):\n accumulator = throw_dice(N - 1, faces, total - i, accumulator)\n return accumulator\n\n\nif __name__ == \"__main__\":\n print(throw_dice(3, 6, 7))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(faces ^ log(total))\nSPACE COMPLEXITY: O(total) [call stack included]\n\"\"\"\n", "id": "7066789", "language": "Python", "matching_score": 0.9438024759292603, "max_stars_count": 70, "path": "Solutions/272.py" }, { "content": "\"\"\"\nProblem:\n\nThere exists a staircase with N steps, and you can climb up either 1 or 2 steps at a\ntime. Given N, write a function that returns the number of unique ways you can climb\nthe staircase. The order of the steps matters.\n\nFor example, if N is 4, then there are 5 unique ways:\n\n1, 1, 1, 1\n2, 1, 1\n1, 2, 1\n1, 1, 2\n2, 2\n\nWhat if, instead of being able to climb 1 or 2 steps at a time, you could climb any\nnumber from a set of positive integers X? For example, if X = {1, 3, 5}, you could\nclimb 1, 3, or 5 steps at a time.\n\"\"\"\n\nfrom typing import List\n\n\ndef count_ways(steps: int, permissable_steps: List[int] = [1, 2]) -> int:\n # dynamic programming array to store the number of ways a step can be reached\n num_ways = [0 for _ in range(steps + 1)]\n # base case\n num_ways[0] = 1\n # calculating using the formula steps_i = sum(steps_i(i - j))\n # i ε [0, steps); j ε permissable_steps\n for pos in range(steps + 1):\n for step in permissable_steps:\n temp_pos = pos - step\n if temp_pos >= 0:\n num_ways[pos] += num_ways[temp_pos]\n return num_ways[steps]\n\n\nif __name__ == \"__main__\":\n print(count_ways(4))\n print(count_ways(1, [1, 3, 5]))\n print(count_ways(2, [1, 3, 5]))\n print(count_ways(3, [1, 3, 5]))\n print(count_ways(4, [1, 3, 5]))\n print(count_ways(5, [1, 3, 5]))\n print(count_ways(6, [1, 3, 5]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n[n = number of steps]\n\"\"\"\n", "id": "2207397", "language": "Python", "matching_score": 0.9747273921966553, "max_stars_count": 70, "path": "Solutions/012.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given a 2 x N board, and instructed to completely cover the board with the\nfollowing shapes:\n\nDominoes, or 2 x 1 rectangles.\nTrominoes, or L-shapes.\nFor example, if N = 4, here is one possible configuration, where A is a domino, and B\nand C are trominoes.\n\nA B B C\nA B C C\nGiven an integer N, determine in how many ways this task is possible.\n\"\"\"\n\n\ndef count_arragements(N: int) -> int:\n dp = [0 for _ in range(max(3, N + 1))]\n # base cases\n dp[0] = 1 # no domino/trominoes selected\n dp[1] = 1\n dp[2] = 2\n # updating the lookup table\n for i in range(3, N + 1):\n dp[i] = 2 * dp[i - 1] + dp[i - 3]\n # returning the required value\n return dp[N]\n\n\nif __name__ == \"__main__\":\n print(count_arragements(0))\n print(count_arragements(1))\n print(count_arragements(2))\n print(count_arragements(3))\n print(count_arragements(4))\n print(count_arragements(5))\n", "id": "10046135", "language": "Python", "matching_score": 0.9166865348815918, "max_stars_count": 70, "path": "Solutions/312.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given an string representing the initial conditions of some dominoes. Each\nelement can take one of three values:\n\nL, meaning the domino has just been pushed to the left,\nR, meaning the domino has just been pushed to the right, or\n., meaning the domino is standing still.\nDetermine the orientation of each tile when the dominoes stop falling. Note that if a\ndomino receives a force from the left and right side simultaneously, it will remain\nupright.\n\nFor example, given the string .L.R....L, you should return LL.RRRLLL.\n\nGiven the string ..R...L.L, you should return ..RR.LLLL.\n\"\"\"\n\nfrom typing import List\n\n\ndef get_config_helper(dominos: List[str], length: int) -> List[str]:\n has_been_updated = False\n updated_dominos = dominos.copy()\n for i in range(length):\n if (\n dominos[i] == \"L\"\n and i > 0\n and dominos[i - 1] == \".\"\n and ((i > 1 and dominos[i - 2] != \"R\") or i <= 1)\n ):\n updated_dominos[i - 1] = \"L\"\n has_been_updated = True\n elif (\n dominos[i] == \"R\"\n and i < length - 1\n and dominos[i + 1] == \".\"\n and ((i < length - 2 and dominos[i + 2] != \"L\") or i >= length - 2)\n ):\n updated_dominos[i + 1] = \"R\"\n has_been_updated = True\n if has_been_updated:\n return get_config_helper(updated_dominos, length)\n return dominos\n\n\ndef get_config(initial_state: str) -> str:\n dominoes = list(initial_state)\n return \"\".join(get_config_helper(dominoes, len(dominoes)))\n\n\nif __name__ == \"__main__\":\n print(get_config(\".L.R....L\"))\n print(get_config(\"..R...L.L\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n ^ 2)\n[each iteration takes O(n) in both time & space and there can be n such iterations]\n\"\"\"\n", "id": "4702404", "language": "Python", "matching_score": 0.7232294678688049, "max_stars_count": 70, "path": "Solutions/269.py" }, { "content": "\"\"\"\nProblem:\n\nWord sense disambiguation is the problem of determining which sense a word takes on in\na particular setting, if that word has multiple meanings. For example, in the sentence\n\"I went to get money from the bank\", bank probably means the place where people deposit\nmoney, not the land beside a river or lake.\n\nSuppose you are given a list of meanings for several words, formatted like so:\n\n{\n \"word_1\": [\"meaning one\", \"meaning two\", ...],\n ...\n \"word_n\": [\"meaning one\", \"meaning two\", ...]\n}\nGiven a sentence, most of whose words are contained in the meaning list above, create\nan algorithm that determines the likely sense of each possibly ambiguous word.\n\"\"\"\n\nfrom typing import Dict, List\n\n\ndef get_meaning(\n sentence: str, meaning_map: Dict[str, List[str]]\n) -> Dict[str, List[str]]:\n # this is NOT a fool-proof solution\n # the problem is incomplete\n words = sentence.split()\n words_set = set(words)\n # selecting the words with multiple meanings\n ambiguous_words = [\n word for word in words if word in meaning_map and len(meaning_map[word]) > 1\n ]\n possible_context_meaning_map = {}\n # generating the possible meaning of the words in the given context\n for word in ambiguous_words:\n for meaning in meaning_map[word]:\n for meaning_word in meaning.split():\n if meaning_word in words_set:\n if word not in possible_context_meaning_map:\n possible_context_meaning_map[word] = []\n possible_context_meaning_map[word].append(meaning)\n break\n return possible_context_meaning_map\n\n\nif __name__ == \"__main__\":\n sentence = \"I went to get money from the bank\"\n meaning_map = {\n \"bank\": [\"place where people deposit money\", \"land beside a river or lake\"],\n \"get\": [\"acquire something\"],\n \"money\": [\"medium of exchange\"],\n \"went\": [\"to go (past)\"],\n }\n print(get_meaning(sentence, meaning_map))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(sentence x meaning_map x meaning_words)\nSPACE COMPLEXITY: O(sentence x meaning_map)\n\"\"\"\n\n# NOTE: The problem is incomplete.\n# We also need a source for which word appears in which context, to be able to infer\n# this in the actual sentences.\n# Once we have a set of strongly correlated words with each word-sense, we can search\n# the context of a word in the target sentence.\n# If there is a high overlap of those words with the already correlated words for a\n# particular word sense, we can guess that that is the answer.\n", "id": "7568313", "language": "Python", "matching_score": 1.6088109016418457, "max_stars_count": 70, "path": "Solutions/351.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a dictionary of words and a string made up of those words (no spaces), return the\noriginal sentence in a list. If there is more than one possible reconstruction, return\nany of them. If there is no possible reconstruction, then return null.\n\nFor example, given the set of words 'quick', 'brown', 'the', 'fox', and the string\n\"thequickbrownfox\", you should return ['the', 'quick', 'brown', 'fox'].\n\nGiven the set of words 'bed', 'bath', 'bedbath', 'and', 'beyond', and the string\n\"bedbathandbeyond\", return either ['bed', 'bath', 'and', 'beyond] or\n['bedbath', 'and', 'beyond'].\n\"\"\"\n\nfrom typing import List\n\n\ndef get_sentence_split(word_list: List[str], string: str) -> List[str]:\n word_set = set()\n buffer = \"\"\n words_found = []\n # populating the set with the words for O(1) access\n for word in word_list:\n word_set.add(word)\n # searching for words in the string\n for char in string:\n buffer += char\n if buffer in word_set:\n words_found.append(buffer)\n buffer = \"\"\n\n if len(words_found) == 0:\n return None\n return words_found\n\n\nif __name__ == \"__main__\":\n print(get_sentence_split([\"quick\", \"brown\", \"the\", \"fox\"], \"thequickbrownfox\"))\n print(\n get_sentence_split(\n [\"bed\", \"bath\", \"bedbath\", \"and\", \"beyond\"], \"bedbathandbeyond\"\n )\n )\n print(get_sentence_split([\"quick\", \"brown\", \"the\", \"fox\"], \"bedbathandbeyond\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(characters_in_input_string)\nSPACE COMPLEXITY: O(words)\n\"\"\"\n", "id": "12841740", "language": "Python", "matching_score": 1.0140422582626343, "max_stars_count": 70, "path": "Solutions/022.py" }, { "content": "\"\"\"\nProblem:\n\nCreate a basic sentence checker that takes in a stream of characters and determines\nwhether they form valid sentences. If a sentence is valid, the program should print it\nout.\n\nWe can consider a sentence valid if it conforms to the following rules:\n\nThe sentence must start with a capital letter, followed by a lowercase letter or a\nspace.\nAll other characters must be lowercase letters, separators (,,;,:) or terminal marks\n(.,?,!,‽).\nThere must be a single space between each word.\nThe sentence must end with a terminal mark immediately following a word.\n\"\"\"\n\nTERMINALS = [\".\", \"?\", \"!\", \"‽\"]\nSEPARATORS = [\",\", \";\", \":\"]\n\n\ndef check_sentence(sentence: str) -> None:\n if len(sentence) < 2 or not sentence[0].isupper():\n return\n if not sentence[1].islower() and not sentence[1].isspace():\n return\n\n space_flag = False\n for char in sentence[1:-1]:\n if char.isspace():\n if space_flag:\n return\n space_flag = True\n continue\n space_flag = False\n if not char.islower() and char not in SEPARATORS:\n return\n\n if sentence[-1] in TERMINALS:\n print(sentence)\n\n\nif __name__ == \"__main__\":\n check_sentence(\"This, will, pass.\")\n check_sentence(\"ThiS Should fail.\")\n check_sentence(\"this Should fail Too.\")\n check_sentence(\"This too should fail\")\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n[n = number of characters in the string]\n\"\"\"\n", "id": "10883999", "language": "Python", "matching_score": 1.3696368932724, "max_stars_count": 70, "path": "Solutions/263.py" }, { "content": "\"\"\"\nProblem:\n\nGhost is a two-person word game where players alternate appending letters to a word.\nThe first person who spells out a word, or creates a prefix for which there is no\npossible continuation, loses. Here is a sample game:\n\nPlayer 1: g\nPlayer 2: h\nPlayer 1: o\nPlayer 2: s\nPlayer 1: t [loses]\nGiven a dictionary of words, determine the letters the first player should start with,\nsuch that with optimal play they cannot lose.\n\nFor example, if the dictionary is [\"cat\", \"calf\", \"dog\", \"bear\"], the only winning\nstart letter would be b.\n\"\"\"\n\n\nfrom typing import List, Set\n\n\ndef get_winning_letters(words: List[str]) -> Set[str]:\n # requirements for winning start letter:\n # - the length is even for all words starting with the character\n starting_char_freq = {}\n for word in words:\n if word[0] not in starting_char_freq:\n starting_char_freq[word[0]] = []\n starting_char_freq[word[0]].append(word)\n\n winning_start_letters = set()\n for starting_char in starting_char_freq:\n for word in starting_char_freq[starting_char]:\n if len(word) % 2 != 0:\n break\n else:\n winning_start_letters.add(starting_char)\n return winning_start_letters\n\n\nif __name__ == \"__main__\":\n print(get_winning_letters([\"cat\", \"calf\", \"dog\", \"bear\"]))\n print(get_winning_letters([\"something\", \"hi\", \"cat\", \"dog\", \"bear\", \"hola\"]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "2482870", "language": "Python", "matching_score": 1.3776750564575195, "max_stars_count": 70, "path": "Solutions/259.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a list of words, return the shortest unique prefix of each word. For example, given the list:\n\ndog\ncat\napple\napricot\nfish\n\nReturn the list:\n\nd\nc\napp\napr\nf\n\"\"\"\n\nfrom typing import Dict, List, Optional\n\n\ndef get_unique_prefix_for_string(\n dictionary: Dict[str, int], string: str, string_list: List[str]\n) -> Optional[str]:\n prefix = \"\"\n for char in string:\n prefix += char\n if prefix not in dictionary:\n return prefix\n # if a string with the current prefix exists, the prefix for the string is\n # updated\n prev_str_with_same_prefix = string_list[dictionary[prefix]]\n prev_prefix = prefix\n prev_str_index = dictionary[prefix]\n\n del dictionary[prefix]\n try:\n prev_prefix = prev_str_with_same_prefix[: len(prev_prefix) + 1]\n except:\n return\n dictionary[prev_prefix] = prev_str_index\n\n\ndef get_unique_prefix(string_list: List[str]) -> List[str]:\n dictionary = {}\n # generating the unique prefix\n for index, string in enumerate(string_list):\n prefix = get_unique_prefix_for_string(dictionary, string, string_list)\n if not prefix:\n raise ValueError(\"Unique Prefix Generation not possible\")\n dictionary[prefix] = index\n return list(dictionary.keys())\n\n\nif __name__ == \"__main__\":\n print(get_unique_prefix([\"dog\", \"cat\", \"apple\", \"apricot\", \"fish\"]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "5944781", "language": "Python", "matching_score": 0.9661814570426941, "max_stars_count": 70, "path": "Solutions/162.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a word W and a string S, find all starting indices in S which are anagrams of W.\n\nFor example, given that W is \"ab\", and S is \"abxaba\", return 0, 3, and 4.\n\"\"\"\n\nfrom typing import Dict, List\n\n\ndef get_char_frequency(string: str) -> Dict[str, int]:\n freq = {}\n for char in string:\n if char not in freq:\n freq[char] = 0\n freq[char] += 1\n return freq\n\n\ndef get_word_start_loc(word: str, string: str) -> List[int]:\n word_len = len(word)\n str_len = len(string)\n char_needed_master = get_char_frequency(word)\n char_needed = dict(char_needed_master)\n curr = 0\n starting_indices = []\n # if the word is longer than the string, no anagram is possible\n if (word_len > str_len) or (word_len == 0):\n return []\n # generating the starting indices\n while curr < str_len:\n for i in range(curr, str_len):\n if string[i] not in char_needed:\n curr = i\n char_needed = dict(char_needed_master)\n break\n elif string[i] in char_needed:\n char_needed[string[i]] -= 1\n if char_needed[string[i]] == 0:\n del char_needed[string[i]]\n if char_needed == {}:\n starting_indices.append(curr)\n curr = i - 1\n char_needed = dict(char_needed_master)\n break\n curr += 1\n return starting_indices\n\n\nif __name__ == \"__main__\":\n print(get_word_start_loc(\"ab\", \"abxaba\"))\n print(get_word_start_loc(\"tac\", \"cataract\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(len(word) x len(string))\nSPACE COMPLEXITY: O(len(word))\n\"\"\"\n", "id": "57846", "language": "Python", "matching_score": 1.616302728652954, "max_stars_count": 70, "path": "Solutions/111.py" }, { "content": "\"\"\"\nProblem:\n\nA step word is formed by taking a given word, adding a letter, and anagramming the\nresult. For example, starting with the word \"APPLE\", you can add an \"A\" and anagram\nto get \"APPEAL\".\n\nGiven a dictionary of words and an input word, create a function that returns all valid\nstep words.\n\"\"\"\n\nfrom typing import Dict, List\n\n\ndef get_character_count(string: str) -> Dict[str, int]:\n freq = {}\n for char in string:\n if char not in freq:\n freq[char] = 0\n freq[char] += 1\n return freq\n\n\ndef is_step_word(word1: str, word2: str) -> bool:\n freq1 = get_character_count(word1)\n freq2 = get_character_count(word2)\n for char in freq1:\n if char in freq2:\n freq2[char] -= freq1[char]\n if freq2[char] == 0:\n del freq2[char]\n else:\n return False\n # checking if word2 is a step word of word1\n if len(freq2) == 1:\n [char] = freq2.keys()\n return freq2[char] == 1\n return False\n\n\ndef get_step_words(word: str, dictionary: List[str]) -> List[str]:\n step_words = []\n for test_word in dictionary:\n if is_step_word(word, test_word):\n step_words.append(test_word)\n return step_words\n\n\nif __name__ == \"__main__\":\n print(get_step_words(\"APPLE\", [\"APPEAL\"]))\n print(get_step_words(\"APPLE\", [\"APPEAL\", \"APPLICT\"]))\n print(get_step_words(\"APPLE\", [\"APPEAL\", \"APPLICT\", \"APPLES\"]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n.words)\nSPACE COMPLEXITY: O(n)\n[n = length of the longest word]\n\"\"\"\n", "id": "8796157", "language": "Python", "matching_score": 1.2315679788589478, "max_stars_count": 70, "path": "Solutions/266.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string with repeated characters, rearrange the string so that no two adjacent\ncharacters are the same. If this is not possible, return None.\n\nFor example, given \"aaabbc\", you could return \"ababac\". Given \"aaab\", return None.\n\"\"\"\n\nfrom typing import Optional\n\nfrom DataStructures.Queue import Queue\n\n\ndef get_unique_adjacent(string: str) -> Optional[str]:\n length = len(string)\n freq = {}\n if length == 0:\n return string\n\n for i in range(length):\n if string[i] not in freq:\n freq[string[i]] = 0\n freq[string[i]] += 1\n\n sorted_freq = sorted(freq.items(), key=lambda x: x[1], reverse=True)\n queue = Queue()\n [queue.enqueue(item) for item in sorted_freq]\n result = \"\"\n\n if length % 2 == 0 and sorted_freq[0][1] > length // 2:\n return None\n elif length % 2 == 1 and sorted_freq[0][1] > (length // 2) + 1:\n return None\n\n while not queue.is_empty():\n if len(queue) == 1:\n elem, freq = queue.peek()\n if freq == 2:\n result = elem + result + elem\n break\n elif freq == 1:\n if result[-1] != elem:\n result += elem\n else:\n result = elem + result\n break\n return None\n\n elem1, freq1 = queue.peek()\n elem2, freq2 = None, None\n if len(queue) > 1:\n elem2, freq2 = queue[1]\n\n result += elem1 + elem2\n\n queue[0] = elem1, freq1 - 1\n if len(queue) > 1:\n queue[1] = elem2, freq2 - 1\n if len(queue) > 1 and queue[1][1] == 0:\n queue.dequeue()\n if len(queue) > 0 and queue[0][1] == 0:\n queue.dequeue()\n return result\n\n\nif __name__ == \"__main__\":\n print(get_unique_adjacent(\"aaabbc\"))\n print(get_unique_adjacent(\"aaabbcc\"))\n print(get_unique_adjacent(\"aaabbac\"))\n print(get_unique_adjacent(\"aaab\"))\n print(get_unique_adjacent(\"aaabbaa\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "11097135", "language": "Python", "matching_score": 0.895561933517456, "max_stars_count": 70, "path": "Solutions/231.py" }, { "content": "\"\"\"\nProblem:\n\nCreate an algorithm to efficiently compute the approximate median of a list of numbers.\n\nMore precisely, given an unordered list of N numbers, find an element whose rank is\nbetween N / 4 and 3 * N / 4, with a high level of certainty, in less than O(N) time.\n\"\"\"\n\n# checkout the following link for complexity analysis:\n# https://www.geeksforgeeks.org/randomized-algorithms-set-3-12-approximate-median/\n\nfrom math import log10\nfrom random import randint\nfrom typing import List\n\n\ndef get_approx_median(arr: List[int]) -> int:\n length = len(arr)\n elements = min(int(10 * log10(length)), length)\n unique_elems = set()\n # selecting random log(n) * 10 elements\n for _ in range(elements):\n unique_elems.add(arr[randint(0, length - 1)])\n # getting the median of the selected elements\n sorted_unique_elems = sorted(list(unique_elems))\n return sorted_unique_elems[len(sorted_unique_elems) // 2]\n\n\nif __name__ == \"__main__\":\n print(\n get_approx_median(\n [3, 4, 3, 2, 4, 3, 1, 4, 3, 4, 2, 3, 4, 3, 0, 4, 0, 0, 1, 1, 0, 1, 2]\n )\n )\n print(get_approx_median([1, 3, 2, 4, 5, 6, 8, 7]))\n\n\n\"\"\"\nTime: O(log(n) x log(log(n)))\nSpace: O(log(n))\n\"\"\"\n", "id": "5943599", "language": "Python", "matching_score": 1.283560037612915, "max_stars_count": 70, "path": "Solutions/323.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of integers where every integer occurs three times except for one\ninteger, which only occurs once, find and return the non-duplicated integer.\n\nFor example, given [6, 1, 3, 3, 3, 6, 6], return 1. Given [13, 19, 13, 13], return 19.\n\nDo this in O(N) time and O(1) space.\n\"\"\"\n\nfrom typing import List\n\n\ndef get_unique(arr: List[int]) -> int:\n # Sum the bits in same positions for all the numbers modulo with 3 provides the\n # unique number's bit\n unique_elem = 0\n mask = 1\n # iterate through all the bits (considering a 64 bit integer)\n for _ in range(64):\n sum_i_pos_bits = 0\n # calculating the sum of the bits in the current position\n for elem in arr:\n if elem & mask != 0:\n sum_i_pos_bits = sum_i_pos_bits + 1\n # updating the unique element\n if sum_i_pos_bits % 3 == 1:\n unique_elem = unique_elem | mask\n # updating mask\n mask = mask << 1\n return unique_elem\n\n\nif __name__ == \"__main__\":\n arr = [3, 3, 2, 3]\n print(get_unique(arr))\n\n arr = [13, 19, 13, 13]\n print(get_unique(arr))\n\n arr = [6, 1, 3, 3, 3, 6, 6]\n print(get_unique(arr))\n\n arr = [12, 1, 3, 1, 1, 2, 3, 2, 2, 3]\n print(get_unique(arr))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "6813064", "language": "Python", "matching_score": 1.3603200912475586, "max_stars_count": 70, "path": "Solutions/040.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an unsigned 8-bit integer, swap its even and odd bits. The 1st and 2nd bit should\nbe swapped, the 3rd and 4th bit should be swapped, and so on.\n\nFor example, 10101010 should be 01010101. 11100010 should be 11010001.\n\nBonus: Can you do this in one line?\n\"\"\"\n\n\ndef swap_bits(num: int) -> int:\n # (left shift digits at odd position) BITWISE-OR (right shift digits at even\n # position)\n # NOTE: If the value of filter mask is placed in the expression, it reduces to one\n # liner\n filter_mask = 85\n return ((num & filter_mask) << 1) | ((num & (filter_mask << 1)) >> 1)\n\n\nif __name__ == \"__main__\":\n print(\"Swapped:\", bin(swap_bits(0)))\n print(\"Swapped:\", bin(swap_bits(255)))\n print(\"Swapped:\", bin(swap_bits(210)))\n print(\"Swapped:\", bin(swap_bits(170)))\n print(\"Swapped:\", bin(swap_bits(226)))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "10579803", "language": "Python", "matching_score": 1.2183865308761597, "max_stars_count": 70, "path": "Solutions/109.py" }, { "content": "\"\"\"\nProblem:\n\nGiven two strings A and B, return whether or not A can be shifted some number of times\nto get B.\n\nFor example, if A is abcde and B is cdeab, return true. If A is abc and B is acb,\nreturn false.\n\"\"\"\n\n\ndef can_shift(A: str, B: str) -> bool:\n return (A and B) and (len(A) == len(B)) and (B in A * 2)\n\n\nif __name__ == \"__main__\":\n print(can_shift(\"abcde\", \"cdeab\"))\n print(can_shift(\"abc\", \"acb\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "2693633", "language": "Python", "matching_score": 1.2773503065109253, "max_stars_count": 70, "path": "Solutions/108.py" }, { "content": "\"\"\"\nProblem:\n\nDetermine whether there exists a one-to-one character mapping from one string s1 to\nanother s2.\n\nFor example, given s1 = abc and s2 = bcd, return true since we can map a to b, b to c\nand c to d.\n\nGiven s1 = foo and s2 = bar, return false since the o cannot map to two characters.\n\"\"\"\n\n\ndef check(s1: str, s2: str) -> bool:\n l1, l2 = len(s1), len(s2)\n # checking if each character in s1 maps to 1 character in s2\n d = {}\n for i in range(l1):\n if s1[i] in d and d[s1[i]] != s2[i]:\n return False\n d[s1[i]] = s2[i]\n # checking if each character in s2 maps to 1 character in s1\n d = {}\n for i in range(l2):\n if s2[i] in d and d[s2[i]] != s1[i]:\n return False\n d[s2[i]] = s1[i]\n return True\n\n\nif __name__ == \"__main__\":\n print(check(\"abc\", \"bcd\"))\n print(check(\"abc\", \"foo\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n[n = number of characters in the strings]\n\"\"\"\n", "id": "1956250", "language": "Python", "matching_score": 0.8391321301460266, "max_stars_count": 70, "path": "Solutions/176.py" }, { "content": "\"\"\"\nProblem:\n\nWrite a function to flatten a nested dictionary. Namespace the keys with a period.\n\nFor example, given the following dictionary:\n\n{\n \"key\": 3,\n \"foo\": {\n \"a\": 5,\n \"bar\": {\n \"baz\": 8\n }\n }\n}\n\nit should become:\n\n{\n \"key\": 3,\n \"foo.a\": 5,\n \"foo.bar.baz\": 8\n}\n\nYou can assume keys do not contain dots in them, i.e. no clobbering will occur.\n\"\"\"\n\nfrom typing import Any, Dict\n\n\ndef flatten_dictionary(dictionary: Dict[str, Any]) -> Dict[str, Any]:\n for key in list(dictionary.keys()):\n value = dictionary[key]\n if type(value) == dict:\n value = flatten_dictionary(value)\n del dictionary[key]\n for nested_dictionary_key in value:\n dictionary[f\"{key}.{nested_dictionary_key}\"] = value[\n nested_dictionary_key\n ]\n return dictionary\n\n\nif __name__ == \"__main__\":\n print(flatten_dictionary({\n \"key\": 3,\n \"foo\": {\n \"a\": 5,\n \"bar\": {\n \"baz\": 8\n }\n }\n }))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(number of key-value pairs)\nSPACE COMPLEXITY: O(levels of nesting)\n\"\"\"\n", "id": "11513849", "language": "Python", "matching_score": 0.949627697467804, "max_stars_count": 70, "path": "Solutions/173.py" }, { "content": "from flask import Flask, jsonify, request\nfrom flask_cors import CORS\n\nfrom dh import DiffieHellman\n\napp = Flask(__name__)\ncors = CORS(app)\n\n\n@app.route(\"/generate-keys\", methods=[\"GET\"])\ndef generate_keys():\n dh = DiffieHellman()\n private_key, public_key = dh.get_private_key(), dh.gen_public_key()\n return jsonify({\"private_key\": private_key, \"public_key\": public_key,})\n\n\n@app.route(\"/generate-shared-key\", methods=[\"GET\"])\ndef generate_shared_key():\n try:\n local_private_key = request.args.get(\"local_private_key\")\n remote_public_key = request.args.get(\"remote_public_key\")\n except:\n return jsonify({\"message\": \"Invalid parameters\"}), 400\n try:\n shared_key = DiffieHellman.gen_shared_key_static(\n local_private_key, remote_public_key\n )\n except:\n return jsonify({\"message\": \"Invalid public key\"}), 400\n return jsonify({\"shared_key\": shared_key})\n\n\nif __name__ == \"__main__\":\n app.run()\n", "id": "490707", "language": "Python", "matching_score": 0.6614053845405579, "max_stars_count": 61, "path": "e2ee_key_gen_server/app.py" }, { "content": "\"\"\"\nProblem:\n\nA competitive runner would like to create a route that starts and ends at his house,\nwith the condition that the route goes entirely uphill at first, and then entirely\ndownhill.\n\nGiven a dictionary of places of the form {location: elevation}, and a dictionary\nmapping paths between some of these locations to their corresponding distances, find\nthe length of the shortest route satisfying the condition above. Assume the runner's\nhome is location 0.\n\nFor example, suppose you are given the following input:\n\nelevations = {0: 5, 1: 25, 2: 15, 3: 20, 4: 10}\npaths = {\n (0, 1): 10,\n (0, 2): 8,\n (0, 3): 15,\n (1, 3): 12,\n (2, 4): 10,\n (3, 4): 5,\n (3, 0): 17,\n (4, 0): 10\n}\nIn this case, the shortest valid path would be 0 -> 2 -> 4 -> 0, with a distance of 28.\n\"\"\"\n\nfrom sys import maxsize\nfrom typing import Dict, List, Set, Tuple\n\n\ndef floyd_warshall(graph: List[List[int]]) -> List[List[int]]:\n dist = [[elem for elem in row] for row in graph]\n nodes = len(graph)\n for i in range(nodes):\n for j in range(nodes):\n for k in range(nodes):\n if (\n dist[i][j] < maxsize\n and dist[i][k] < maxsize\n and dist[k][j] < maxsize\n ):\n dist[i][j] = min(dist[i][j], dist[i][k] + dist[k][j])\n return dist\n\n\ndef generate_graph_adjacency_matix(\n paths: Dict[Tuple[int, int], int], nodes: int\n) -> List[List[int]]:\n graph = [[maxsize for _ in range(nodes)] for _ in range(nodes)]\n for src, dest in paths:\n graph[src][dest] = paths[src, dest]\n return graph\n\n\ndef get_route_dfs_helper(\n curr_pos: int,\n target: int,\n acc_weight: int,\n visited: Set[int],\n graph: List[List[int]],\n flag: bool = False,\n) -> int:\n # flag is used to bypass returning weight of 0 when we start (curr_pos and target\n # is 0)\n if curr_pos == target and flag:\n return acc_weight\n visited.add(curr_pos)\n distance = maxsize\n for neighbour, weight in enumerate(graph[curr_pos]):\n if weight < maxsize:\n distance = min(\n distance,\n get_route_dfs_helper(\n neighbour, target, acc_weight + weight, visited.copy(), graph, True\n ),\n )\n return distance\n\n\ndef get_route(elevations: Dict[int, int], paths: Dict[Tuple[int, int], int]) -> int:\n graph = generate_graph_adjacency_matix(paths, len(elevations))\n dist = floyd_warshall(graph)\n return get_route_dfs_helper(0, 0, 0, set(), dist)\n\n\nif __name__ == \"__main__\":\n elevations = {0: 5, 1: 25, 2: 15, 3: 20, 4: 10}\n paths = {\n (0, 1): 10,\n (0, 2): 8,\n (0, 3): 15,\n (1, 3): 12,\n (2, 4): 10,\n (3, 4): 5,\n (3, 0): 17,\n (4, 0): 10,\n }\n print(get_route(elevations, paths))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(v.e)\nSPACE COMPLEXITY: O(v ^ 2)\n\"\"\"\n", "id": "4370207", "language": "Python", "matching_score": 1.2251776456832886, "max_stars_count": 70, "path": "Solutions/294.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a 2-D matrix representing an image, a location of a pixel in the screen and a\ncolor C, replace the color of the given pixel and all adjacent same colored pixels\nwith C.\n\nFor example, given the following matrix, and location pixel of (2, 2), and 'G' for\ngreen:\n\nB B W\nW W W\nW W W\nB B B\nBecomes\n\nB B G\nG G G\nG G G\nB B B\n\"\"\"\n\nfrom typing import List, Set, Tuple\nfrom numpy import array\n\nMatrix = List[List[int]]\nPosition = Tuple[int, int]\n\n\ndef generate_neighbours(position: Position, rows: int, cols: int) -> List[Position]:\n i, j = position\n valid_neighbours = []\n neighbours = [\n (i - 1, j - 1),\n (i - 1, j),\n (i - 1, j + 1),\n (i, j + 1),\n (i + 1, j + 1),\n (i + 1, j),\n (i + 1, j - 1),\n (i, j - 1),\n ]\n for neighbour in neighbours:\n y, x = neighbour\n if (0 <= x < cols) and (0 <= y < rows):\n valid_neighbours.append(neighbour)\n return valid_neighbours\n\n\ndef update_color_dfs_helper(\n matrix: Matrix,\n position: Position,\n new_color: str,\n prev_color: str,\n visited: Set[Position],\n rows: int,\n cols: int,\n) -> None:\n i, j = position\n matrix[i][j] = new_color\n visited.add(position)\n\n neighbours = generate_neighbours(position, rows, cols)\n for neighbour in neighbours:\n y, x = neighbour\n if neighbour not in visited and matrix[y][x] == prev_color:\n update_color_dfs_helper(\n matrix, neighbour, new_color, prev_color, visited, rows, cols\n )\n\n\ndef update_color(matrix: Matrix, position: Position, new_color: str) -> Matrix:\n rows = len(matrix)\n cols = len(matrix[0])\n i, j = position\n\n update_color_dfs_helper(\n matrix, position, new_color, matrix[i][j], set(), rows, cols\n )\n return matrix\n\n\nif __name__ == \"__main__\":\n print(\"Initial Matrix:\")\n matrix = [\n [\"B\", \"B\", \"W\"],\n [\"W\", \"W\", \"W\"],\n [\"W\", \"W\", \"W\"],\n [\"B\", \"B\", \"B\"]\n ]\n print(array(matrix))\n print(\"Updated Matrix:\")\n print(array(update_color(matrix, (2, 2), \"G\")))\n print()\n\n print(\"Initial Matrix:\")\n matrix = [\n [\"B\", \"B\", \"W\"],\n [\"W\", \"W\", \"W\"],\n [\"W\", \"W\", \"W\"],\n [\"B\", \"B\", \"B\"]\n ]\n print(array(matrix))\n print(\"Updated Matrix:\")\n print(array(update_color(matrix, (3, 2), \"G\")))\n print()\n\n print(\"Initial Matrix:\")\n matrix = [\n [\"B\", \"B\", \"W\"],\n [\"W\", \"W\", \"W\"],\n [\"W\", \"W\", \"W\"],\n [\"B\", \"B\", \"B\"]\n ]\n print(array(matrix))\n print(\"Updated Matrix:\")\n print(array(update_color(matrix, (0, 0), \"G\")))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x m)\nSPACE COMPLEXITY: O(n x m)\n\"\"\"\n", "id": "11675351", "language": "Python", "matching_score": 1.7831480503082275, "max_stars_count": 70, "path": "Solutions/151.py" }, { "content": "\"\"\"\nProblem:\n\nYou are given an M by N matrix consisting of booleans that represents a board. Each\nTrue boolean represents a wall. Each False boolean represents a tile you can walk on.\n\nGiven this matrix, a start coordinate, and an end coordinate, return the minimum number\nof steps required to reach the end coordinate from the start. If there is no possible\npath, then return null. You can move up, left, down, and right. You cannot move through\nwalls. You cannot wrap around the edges of the board.\n\nFor example, given the following board:\n\n[[f, f, f, f],\n [t, t, f, t],\n [f, f, f, f],\n [f, f, f, f]]\nand start = (3, 0) (bottom left) and end = (0, 0) (top left), the minimum number of\nsteps required to reach the end is 7, since we would need to go through (1, 2) because\nthere is a wall everywhere else on the second row.\n\"\"\"\n\nfrom numpy import array\nfrom sys import maxsize\nfrom typing import List, Tuple, Union\n\n\nMatrix_str = List[List[str]]\nMatrix = List[List[Union[int, str]]]\n\n\ndef get_neighbours(pos: Tuple[int, int], n: int, m: int) -> List[Tuple[int, int]]:\n i, j = pos\n neighbours = [\n (i - 1, j),\n (i + 1, j),\n (i, j + 1),\n (i, j - 1),\n ]\n valid_neighbours = []\n for neighbour in neighbours:\n y, x = neighbour\n if 0 <= y < n and 0 <= x < m:\n valid_neighbours.append(neighbour)\n return valid_neighbours\n\n\ndef transform_matrix(matrix: Matrix_str, n: int, m: int) -> None:\n # helper function to transform Matrix_str to Matrix type\n for i in range(n):\n for j in range(m):\n if matrix[i][j] == \"f\":\n matrix[i][j] = 0\n\n\ndef get_min_steps_helper(matrix: Matrix, pos: Tuple[int, int], n: int, m: int) -> None:\n # helper function to calculate the distance of position from the source\n i, j = pos\n unexplored_positions = []\n neighbours = get_neighbours(pos, n, m)\n # calculate the distance for the neighbours\n for neighbour in neighbours:\n y, x = neighbour\n if matrix[y][x] != \"t\":\n if matrix[y][x] != 0:\n curr_value = matrix[y][x]\n else:\n curr_value = maxsize\n unexplored_positions.append(neighbour)\n matrix[y][x] = min(curr_value, matrix[i][j] + 1)\n # exploring unexplored positions\n for position in unexplored_positions:\n get_min_steps_helper(matrix, position, n, m)\n\n\ndef get_min_steps(\n matrix: Matrix_str, start: Tuple[int, int], end: Tuple[int, int]\n) -> int:\n n = len(matrix)\n m = len(matrix[0])\n transform_matrix(matrix, n, m)\n # offseting start by 1 (as 0 represents unvisited positions)\n i, j = start\n matrix[i][j] = 1\n # calculating the distance for each position from the start\n neighbours = get_neighbours(start, n, m)\n # updating the value of neighbours (hard-coded to 2 as the starting position value\n # is 1)\n for neighbour in neighbours:\n y, x = neighbour\n if matrix[y][x] == 0:\n matrix[y][x] = 2\n # using helper to calculate the distance for the rest of the matrix\n for neighbour in neighbours:\n y, x = neighbour\n if matrix[y][x] == 2:\n get_min_steps_helper(matrix, neighbour, n, m)\n # matrix[y][x] - 1 is returned as initially the value was offsetted by +1\n y, x = end\n if matrix[y][x] == \"t\" or matrix[y][x] == 0:\n return None\n return matrix[y][x] - 1\n\n\nif __name__ == \"__main__\":\n mat = [\n [\"f\", \"f\", \"f\", \"f\"],\n [\"t\", \"t\", \"f\", \"t\"],\n [\"f\", \"f\", \"f\", \"f\"],\n [\"f\", \"f\", \"f\", \"f\"],\n ]\n start = (3, 0)\n end = (0, 0)\n\n print(array(mat))\n print(get_min_steps(mat, start, end))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x m)\nSPACE COMPLEXITY: O(n x m) [unexplored_positions can have at most 2 entries (repeated\nn x m times)]\n\"\"\"\n", "id": "2827885", "language": "Python", "matching_score": 2.0545554161071777, "max_stars_count": 70, "path": "Solutions/023.py" }, { "content": "\"\"\"\nProblem:\n\nA wall consists of several rows of bricks of various integer lengths and uniform\nheight. Your goal is to find a vertical line going from the top to the bottom of the\nwall that cuts through the fewest number of bricks. If the line goes through the edge\nbetween two bricks, this does not count as a cut.\n\nFor example, suppose the input is as follows, where values in each row represent the\nlengths of bricks in that row:\n\n[[3, 5, 1, 1],\n [2, 3, 3, 2],\n [5, 5],\n [4, 4, 2],\n [1, 3, 3, 3],\n [1, 1, 6, 1, 1]]\nThe best we can we do here is to draw a line after the eighth brick, which will only\nrequire cutting through the bricks in the third and fifth row.\n\nGiven an input consisting of brick lengths for each row such as the one above, return\nthe fewest number of bricks that must be cut to create a vertical line.\n\"\"\"\n\nfrom typing import List\n\n\ndef get_min_cut_position(wall: List[List[int]]) -> int:\n rows = len(wall)\n if rows == 1:\n cols = len(wall[0])\n if cols > 1:\n return wall[0][0]\n return wall[0][0] - 1\n # generating a hash map containing the positions with no bricks at any row\n no_brick_loaction = {}\n for i in range(rows):\n curr = 0\n for j in range(len(wall[i]) - 1):\n curr += wall[i][j]\n if curr not in no_brick_loaction:\n no_brick_loaction[curr] = 0\n no_brick_loaction[curr] += 1\n # the position with minimum bricks is returned\n if no_brick_loaction:\n key, _ = max(no_brick_loaction.items(), key=lambda x: x[1])\n return key\n # if all the rows contain 1 brick its cut from the length - 1 position\n return wall[0][0] - 1\n\n\nif __name__ == \"__main__\":\n wall = [\n [3, 5, 1, 1],\n [2, 3, 3, 2],\n [5, 5],\n [4, 4, 2],\n [1, 3, 3, 3],\n [1, 1, 6, 1, 1],\n ]\n print(get_min_cut_position(wall))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n[n = number of elements in the matrix]\n\"\"\"\n", "id": "2097396", "language": "Python", "matching_score": 2.0125443935394287, "max_stars_count": 70, "path": "Solutions/281.py" }, { "content": "\"\"\"\nProblem:\n\nIn linear algebra, a Toeplitz matrix is one in which the elements on any given diagonal\nfrom top left to bottom right are identical.\n\nHere is an example:\n\n1 2 3 4 8\n5 1 2 3 4\n4 5 1 2 3\n7 4 5 1 2\nWrite a program to determine whether a given input is a Toeplitz matrix.\n\"\"\"\n\nfrom typing import List\n\n\ndef is_toeplitz_matrix(matrix: List[List[int]]) -> bool:\n n = len(matrix)\n m = len(matrix[0])\n # checking the diagonals starting from the left edge\n for i in range(n):\n val = matrix[i][0]\n for row, col in zip(range(i, n), range(m)):\n if matrix[row][col] != val:\n return False\n # checking the diagonals starting from the top edge\n for i in range(1, m):\n val = matrix[0][i]\n for row, col in zip(range(n), range(i, m)):\n if matrix[row][col] != val:\n return False\n return True\n\n\nif __name__ == \"__main__\":\n print(\n is_toeplitz_matrix(\n [\n [1, 2, 3, 4, 8],\n [5, 1, 2, 3, 4],\n [4, 5, 1, 2, 3],\n [7, 4, 5, 1, 2]\n ]\n )\n )\n\n print(\n is_toeplitz_matrix(\n [\n [1, 2, 3, 4, 8],\n [5, 1, 2, 3, 4],\n [4, 5, 1, 2, 3],\n [7, 4, 5, 1, 1]\n ]\n )\n )\n\n\n\"\"\"\nSPECS:\nTIME COMPLEXITY: O(n x m)\nSPACE COMPLEXITY: O(1) [as zip and range both are generator functions]\n[n = rows, m = columns]\n\"\"\"\n", "id": "9724221", "language": "Python", "matching_score": 0.8520461320877075, "max_stars_count": 70, "path": "Solutions/315.py" }, { "content": "\"\"\"\nProblem:\n\nOn our special chessboard, two bishops attack each other if they share the same\ndiagonal. This includes bishops that have another bishop located between them, i.e.\nbishops can attack through pieces.\n\nYou are given N bishops, represented as (row, column) tuples on a M by M chessboard.\nWrite a function to count the number of pairs of bishops that attack each other. The\nordering of the pair doesn't matter: (1, 2) is considered the same as (2, 1).\n\nFor example, given M = 5 and the list of bishops:\n\n(0, 0)\n(1, 2)\n(2, 2)\n(4, 0)\nThe board would look like this:\n\n[b 0 0 0 0]\n[0 0 b 0 0]\n[0 0 b 0 0]\n[0 0 0 0 0]\n[b 0 0 0 0]\nYou should return 2, since bishops 1 and 3 attack each other, as well as bishops 3 and\n4.\n\"\"\"\n\nfrom typing import List, Set, Tuple\n\nPosition = Tuple[int, int]\n\n\ndef generate_diagonals(position: Position, board_size: int) -> Set[Position]:\n row, col = position\n diagonals = set()\n # upper left diagonal\n for i, j in zip(range(row - 1, -1, -1), range(col - 1, -1, -1)):\n diagonals.add((i, j))\n # lower left diagonal\n for i, j in zip(range(row + 1, board_size), range(col - 1, -1, -1)):\n diagonals.add((i, j))\n # upper right diagonal\n for i, j in zip(range(row - 1, -1, -1), range(col + 1, board_size)):\n diagonals.add((i, j))\n # lower right diagonal\n for i, j in zip(range(row + 1, board_size), range(col + 1, board_size)):\n diagonals.add((i, j))\n return diagonals\n\n\ndef get_number_of_attacking_bishops(board_size: int, positions: List[Position]) -> int:\n count = 0\n for index, position in enumerate(positions):\n diagonals = generate_diagonals(position, board_size)\n for position in positions[index:]:\n if position in diagonals:\n count += 1\n return count\n\n\nif __name__ == \"__main__\":\n positions = [(0, 0), (1, 2), (2, 2), (4, 0)]\n print(get_number_of_attacking_bishops(5, positions))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "8235988", "language": "Python", "matching_score": 1.6332470178604126, "max_stars_count": 70, "path": "Solutions/068.py" }, { "content": "\"\"\"\nProblem:\n\nYou are presented with an 8 by 8 matrix representing the positions of pieces on a chess\nboard. The only pieces on the board are the black king and various white pieces. Given\nthis matrix, determine whether the king is in check.\n\nFor details on how each piece moves, see here.\n\nFor example, given the following matrix:\n\n...K....\n........\n.B......\n......P.\n.......R\n..N.....\n........\n.....Q..\nYou should return True, since the bishop is attacking the king diagonally.\n\"\"\"\n\nfrom typing import List\n\n\ndef is_attacking(board: List[List[str]]) -> bool:\n for i in range(8):\n for j in range(8):\n # case pawn\n if board[i][j] == \"P\":\n if j > 0 and board[i - 1][j - 1] == \"K\":\n return True\n if j < 8 and board[i - 1][j + 1] == \"K\":\n return True\n # case rook\n elif board[i][j] == \"R\":\n for k in range(8):\n if board[i][k] == \"K\" or board[k][j] == \"K\":\n return True\n # case knight\n elif board[i][j] == \"N\":\n moves = [\n (i + 2, j + 1),\n (i + 2, j - 1),\n (i - 2, j + 1),\n (i - 2, j - 1),\n (i + 1, j + 2),\n (i + 1, j - 2),\n (i - 1, j + 2),\n (i - 1, j - 2),\n ]\n for y, x in moves:\n if 0 <= y < 8 and 0 <= x < 8 and board[y][x] == \"K\":\n return True\n # case bishop\n elif board[i][j] == \"B\":\n for y, x in zip(range(i, -1, -1), range(j, -1, -1)):\n if board[y][x] == \"K\":\n return True\n for y, x in zip(range(i, 8), range(j, -1, -1)):\n if board[y][x] == \"K\":\n return True\n for y, x in zip(range(i, 8), range(j, 8)):\n if board[y][x] == \"K\":\n return True\n for y, x in zip(range(i, -1, -1), range(j, 8)):\n if board[y][x] == \"K\":\n return True\n # case queen\n elif board[i][j] == \"Q\":\n for y, x in zip(range(i, -1, -1), range(j, -1, -1)):\n if board[y][x] == \"K\":\n return True\n for y, x in zip(range(i, 8), range(j, -1, -1)):\n if board[y][x] == \"K\":\n return True\n for y, x in zip(range(i, 8), range(j, 8)):\n if board[y][x] == \"K\":\n return True\n for y, x in zip(range(i, -1, -1), range(j, 8)):\n if board[y][x] == \"K\":\n return True\n for k in range(8):\n if board[i][k] == \"K\" or board[k][j] == \"K\":\n return True\n return False\n\n\nif __name__ == \"__main__\":\n print(\n is_attacking(\n [\n [\".\", \".\", \".\", \"K\", \".\", \".\", \".\", \".\"],\n [\".\", \".\", \".\", \".\", \".\", \".\", \".\", \".\"],\n [\".\", \"B\", \".\", \".\", \".\", \".\", \".\", \".\"],\n [\".\", \".\", \".\", \".\", \".\", \".\", \"P\", \".\"],\n [\".\", \".\", \".\", \".\", \".\", \".\", \".\", \"R\"],\n [\".\", \".\", \"N\", \".\", \".\", \".\", \".\", \".\"],\n [\".\", \".\", \".\", \".\", \".\", \".\", \".\", \".\"],\n [\".\", \".\", \".\", \".\", \".\", \"Q\", \".\", \".\"],\n ]\n )\n )\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n[n = number of pieces on the board (as the board is of dimension 8 x 8, all checks\nfor if a piece is attacking the king take O(1) time)]\n\"\"\"\n", "id": "9544677", "language": "Python", "matching_score": 1.0719414949417114, "max_stars_count": 70, "path": "Solutions/267.py" }, { "content": "\"\"\"\nProblem:\n\nYou have an N by N board. Write a function that, given N, returns the number of\npossible arrangements of the board where N queens can be placed on the board without\nthreatening each other, i.e. no two queens share the same row, column, or diagonal.\n\"\"\"\n\nfrom typing import List\n\n\ndef n_queens(n: int, queen_positions: List[int] = []) -> int:\n # N Queen solution using backtracking\n if n == len(queen_positions):\n return 1\n\n count = 0\n for col in range(n):\n queen_positions.append(col)\n if is_valid(queen_positions):\n count += n_queens(n, queen_positions)\n queen_positions.pop()\n return count\n\n\ndef is_valid(queen_positions: List[int]) -> bool:\n # check to see if any queen is threatening the current queen\n current_queen_row, current_queen_col = len(queen_positions) - 1, queen_positions[-1]\n for row, col in enumerate(queen_positions[:-1]):\n diff = abs(current_queen_col - col)\n if (\n diff == 0 # same row\n or diff == current_queen_row - row # same diagonal\n ):\n return False\n return True\n\n\nif __name__ == \"__main__\":\n print(n_queens(1))\n print(n_queens(4))\n print(n_queens(5))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "11798926", "language": "Python", "matching_score": 1.3011175394058228, "max_stars_count": 70, "path": "Solutions/038.py" }, { "content": "\"\"\"\nProblem:\n\nSudoku is a puzzle where you're given a partially-filled 9 by 9 grid with digits. The\nobjective is to fill the grid with the constraint that every row, column, and box\n(3 by 3 subgrid) must contain all of the digits from 1 to 9.\n\nImplement an efficient sudoku solver.\n\"\"\"\n\nfrom typing import List, Optional, Tuple\n\nBoard = List[List[int]]\nPosition = Tuple[int, int]\n\n\ndef print_board(board: Board) -> None:\n for i in range(len(board)):\n if i % 3 == 0 and i != 0:\n print(\"- - - - - - - - - - - -\")\n for j in range(len(board[0])):\n if j % 3 == 0 and j != 0:\n print(\" | \", end=\"\")\n if board[i][j] != 0:\n print(board[i][j], end=\" \")\n else:\n print(\".\", end=\" \")\n print()\n\n\ndef find_empty_position(board: Board) -> Optional[Position]:\n for i in range(len(board)):\n for j in range(len(board[0])):\n if board[i][j] == 0:\n return (i, j)\n return None\n\n\ndef check_conflict(board: Board, position: Position, num: int) -> bool:\n y, x = position\n # row check\n for i in range(len(board[0])):\n if board[y][i] == num:\n return False\n # column check\n for i in range(len(board)):\n if board[i][x] == num:\n return False\n # sub-section check\n sec_row = y // 3\n sec_col = x // 3\n for i in range((sec_row * 3), (sec_row * 3) + 3):\n for j in range((sec_col * 3), (sec_col * 3) + 3):\n if board[i][j] == num:\n return False\n return True\n\n\ndef sudoku_solver(board: Board) -> bool:\n position = find_empty_position(board)\n if not position:\n return True\n y, x = position\n # solving the board using backtracking\n for num in range(1, 10):\n if check_conflict(board, position, num):\n board[y][x] = num\n if sudoku_solver(board):\n return True\n board[y][x] = 0\n return False\n\n\nif __name__ == \"__main__\":\n board = [\n [7, 8, 0, 4, 0, 0, 1, 2, 0],\n [6, 0, 0, 0, 7, 5, 0, 0, 9],\n [0, 0, 0, 6, 0, 1, 0, 7, 8],\n [0, 0, 7, 0, 4, 0, 2, 6, 0],\n [0, 0, 1, 0, 5, 0, 9, 3, 0],\n [9, 0, 4, 0, 6, 0, 0, 0, 5],\n [0, 7, 0, 3, 0, 0, 0, 1, 2],\n [1, 2, 0, 0, 0, 7, 4, 0, 0],\n [0, 4, 9, 2, 0, 6, 0, 0, 7],\n ]\n\n print(\"Initial Board:\")\n print_board(board)\n\n sudoku_solver(board)\n\n print(\"\\nFinal Board:\")\n print_board(board)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(m ^ (n ^ 2))\nSPACE COMPLEXITY: O(m) [in the call stack]\n[m = number of unfilled positions, n = dimension of the board]\n\"\"\"\n", "id": "416064", "language": "Python", "matching_score": 1.2380815744400024, "max_stars_count": 70, "path": "Solutions/054.py" }, { "content": "\"\"\"\nProblem:\n\nConnect 4 is a game where opponents take turns dropping red or black discs into a 7 x 6\nvertically suspended grid. The game ends either when one player creates a line of four\nconsecutive discs of their color (horizontally, vertically, or diagonally), or when\nthere are no more spots left in the grid.\n\nDesign and implement Connect 4.\n\"\"\"\n\nfrom typing import Optional\n\n\nclass Connect4:\n def __init__(self) -> None:\n self.board = [[\"-\" for _ in range(7)] for _ in range(6)]\n self.to_play = \"R\"\n\n def get_empty(self, position: int) -> int:\n if self.board[0][position] != \"-\":\n return -1\n if self.board[5][position] == \"-\":\n return 5\n\n for i in range(5):\n if self.board[i][position] == \"-\" and self.board[i + 1][position] != \"-\":\n return i\n\n def play_turn(self) -> None:\n position = int(input(\"Enter the location to put the disk: \"))\n position -= 1\n row = self.get_empty(position)\n\n while row == -1:\n position = int(input(\"Enter the location to put the disk: \"))\n position -= 1\n row = self.get_empty(position)\n\n if self.to_play == \"R\":\n self.board[row][position] = \"R\"\n self.to_play = \"B\"\n else:\n self.board[row][position] = \"B\"\n self.to_play = \"R\"\n\n def victory_check(self) -> Optional[str]:\n # horizontal check\n for i in range(6):\n for j in range(4):\n if self.board[i][j] in (\"R\", \"B\"):\n disk = self.board[i][j]\n for k in range(j, j + 4):\n if self.board[i][k] != disk:\n break\n else:\n return disk\n # vertical check\n for i in range(3):\n for j in range(7):\n if self.board[i][j] in (\"R\", \"B\"):\n disk = self.board[i][j]\n for k in range(i, i + 4):\n if self.board[k][j] != disk:\n break\n else:\n return disk\n # top left to bottom right diagonal check\n for i in range(2):\n for j in range(3):\n if self.board[i][j] in (\"R\", \"B\"):\n disk = self.board[i][j]\n for k in range(4):\n if self.board[i + k][j + k] != disk:\n break\n else:\n return disk\n # top right to bottom left diagonal check\n for i in range(3, 6):\n for j in range(4, 7):\n if self.board[i][j] in (\"R\", \"B\"):\n disk = self.board[i][j]\n for k in range(4):\n if self.board[i - k][j - k] != disk:\n break\n else:\n return disk\n return None\n\n def full_check(self) -> bool:\n for i in range(6):\n for j in range(7):\n if self.board[i][j] == \"-\":\n return False\n return True\n\n def print_board(self) -> None:\n for i in range(6):\n for j in range(7):\n print(self.board[i][j], end=\" \")\n print()\n\n def play(self) -> None:\n while not self.full_check() and not self.victory_check():\n try:\n print(self.to_play, \"to play\")\n self.play_turn()\n print(\"Board: \")\n self.print_board()\n print()\n except IndexError:\n print(\"Illegal move!\")\n\n if self.full_check():\n print(\"Its a draw\")\n else:\n print(\"{} has won\".format(self.victory_check()))\n\n\nif __name__ == \"__main__\":\n Connect4().play()\n", "id": "10125466", "language": "Python", "matching_score": 1.4181733131408691, "max_stars_count": 70, "path": "Solutions/219.py" }, { "content": "\"\"\"\nProblem:\n\nAll the disks start off on the first rod in a stack. They are ordered by size, with the\nlargest disk on the bottom and the smallest one at the top.\n\nThe goal of this puzzle is to move all the disks from the first rod to the last rod\nwhile following these rules:\n\nYou can only move one disk at a time.\nA move consists of taking the uppermost disk from one of the stacks and placing it on\ntop of another stack.\nYou cannot place a larger disk on top of a smaller disk.\nWrite a function that prints out all the steps necessary to complete the Tower of Hanoi.\nYou should assume that the rods are numbered, with the first rod being 1, the second\n(auxiliary) rod being 2, and the last (goal) rod being 3.\n\nFor example, with n = 3, we can do this in 7 moves:\n\nMove 1 to 3\nMove 1 to 2\nMove 3 to 2\nMove 1 to 3\nMove 2 to 1\nMove 2 to 3\nMove 1 to 3\n\"\"\"\n\nfrom typing import Optional\n\n\ndef towers_of_hanoi(\n n: int,\n start_rod: Optional[str] = None,\n aux_rod: Optional[str] = None,\n end_rod: Optional[str] = None,\n) -> None:\n # initializing the names for the rods [using different convention from the one\n # mentioned in the question]\n if not start_rod:\n start_rod = \"start_rod\"\n print(\n f\"\\nTower of Hanoi for {n} Disks ========================================\"\n )\n if not aux_rod:\n aux_rod = \"aux_rod\"\n if not end_rod:\n end_rod = \"end_rod\"\n # if the number of disks left to move is 1, its shifted [base case for recursion]\n if n == 1:\n print(f\"Move disk 1 from {start_rod} to {end_rod}\")\n return\n\n # moving the top disk of the start rod to the proper position in the auxilary rod\n # using the end rod as buffer\n towers_of_hanoi(n - 1, start_rod, end_rod, aux_rod)\n # moving the top disk from the start rod to the end rod\n print(f\"Move disk {n} from {start_rod} to {end_rod}\")\n # moving the top disk of the auxilary rod to the proper position in the end rod\n # using the start rod as buffer\n towers_of_hanoi(n - 1, aux_rod, start_rod, end_rod)\n\n\nif __name__ == \"__main__\":\n towers_of_hanoi(3)\n towers_of_hanoi(4)\n towers_of_hanoi(5)\n towers_of_hanoi(6)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "9061800", "language": "Python", "matching_score": 0.8675855994224548, "max_stars_count": 70, "path": "Solutions/128.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a singly linked list and an integer k, remove the kth last element from the list.\nk is guaranteed to be smaller than the length of the list.\n\nThe list is very long, so making more than one pass is prohibitively expensive.\n\nDo this in constant space and in one pass.\n\"\"\"\n\nfrom DataStructures.LinkedList import Node, LinkedList\n\n\ndef delete_kth_last_node(ll: LinkedList, k: int) -> None:\n # case for head node removal\n if k == len(ll):\n temp = ll.head\n if len(ll) == 1:\n ll.head = None\n ll.rear = None\n else:\n ll.head = temp.next\n temp.next = None\n ll.length -= 1\n del temp\n return\n # generic node removal\n ptr_end = ll.head\n ptr_k = ll.head\n # moving the ptr_end up by k nodes\n for _ in range(k + 1):\n if ptr_end is None:\n raise ValueError(f\"Linked list contains less than {k} nodes\")\n ptr_end = ptr_end.next\n # searching for the end of the linked list\n # ptr_k is trailing the ptr_end up by k nodes, when end pointer reaches the end,\n # ptr_k is k nodes away from the end\n while ptr_end is not None:\n ptr_end = ptr_end.next\n ptr_k = ptr_k.next\n # removing the required element\n temp = ptr_k.next\n ptr_k.next = temp.next\n temp.next = None\n ll.length -= 1\n del temp\n\n\nif __name__ == \"__main__\":\n ll1 = LinkedList()\n for i in range(1, 10):\n ll1.add(i)\n print(ll1)\n delete_kth_last_node(ll1, 5)\n print(ll1)\n\n ll2 = LinkedList()\n for i in range(1, 4):\n ll2.add(i)\n print(ll2)\n delete_kth_last_node(ll2, 3)\n print(ll2)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "7528612", "language": "Python", "matching_score": 1.313893437385559, "max_stars_count": 70, "path": "Solutions/026.py" }, { "content": "\"\"\"\nProblem:\n\nGiven the head of a singly linked list, reverse it in-place.\n\"\"\"\n\nfrom DataStructures.LinkedList import LinkedList\n\n\ndef reverse_inplace(ll: LinkedList) -> None:\n ll.rear = ll.head\n ptr_prev, ptr_curr, ptr_next = None, ll.head, ll.head.next\n # reversing the flow\n while ptr_curr is not None:\n ptr_curr.next = ptr_prev\n ptr_prev, ptr_curr = ptr_curr, ptr_next\n if ptr_next is None:\n break\n ptr_next = ptr_next.next\n ll.head = ptr_prev\n\n\nif __name__ == \"__main__\":\n ll = LinkedList()\n for num in range(1, 6):\n ll.add(num)\n print(ll)\n reverse_inplace(ll)\n print(ll)\n\n ll = LinkedList()\n for num in range(1, 3):\n ll.add(num)\n print(ll)\n reverse_inplace(ll)\n print(ll)\n\n ll = LinkedList()\n for num in range(1, 2):\n ll.add(num)\n print(ll)\n reverse_inplace(ll)\n print(ll)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "9224149", "language": "Python", "matching_score": 2.1219661235809326, "max_stars_count": 70, "path": "Solutions/073.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a linked list and a positive integer k, rotate the list to the right by k places.\n\nFor example, given the linked list 7 -> 7 -> 3 -> 5 and k = 2, it should become\n3 -> 5 -> 7 -> 7.\n\nGiven the linked list 1 -> 2 -> 3 -> 4 -> 5 and k = 3, it should become\n3 -> 4 -> 5 -> 1 -> 2.\n\"\"\"\n\nfrom DataStructures.LinkedList import Node, LinkedList\n\n\ndef rotate_linked_list(ll: LinkedList, k: int = 0) -> None:\n k = k % ll.length\n\n for _ in range(k):\n temp = ll.head\n ll.head = ll.head.next\n temp.next = None\n ll.rear.next = temp\n ll.rear = ll.rear.next\n\n\nif __name__ == \"__main__\":\n LL = LinkedList()\n for num in [7, 7, 3, 5]:\n LL.add(num)\n\n print(LL)\n rotate_linked_list(LL, 2)\n print(LL)\n print()\n\n LL = LinkedList()\n for num in [1, 2, 3, 4, 5]:\n LL.add(num)\n\n print(LL)\n rotate_linked_list(LL, 3)\n print(LL)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(k)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "1980762", "language": "Python", "matching_score": 1.2723544836044312, "max_stars_count": 70, "path": "Solutions/177.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a linked list of numbers and a pivot k, partition the linked list so that all\nnodes less than k come before nodes greater than or equal to k.\n\nFor example, given the linked list 5 -> 1 -> 8 -> 0 -> 3 and k = 3, the solution could\nbe 1 -> 0 -> 5 -> 8 -> 3.\n\"\"\"\n\nfrom DataStructures.LinkedList import Node, LinkedList\n\n\ndef pivot_linked_list(ll: LinkedList, k: int) -> None:\n ptr1, ptr2 = ll.head, ll.head\n length = len(ll)\n k = k % length\n for _ in range(length):\n if ptr2.val < k:\n ptr1.val, ptr2.val = ptr2.val, ptr1.val\n ptr1 = ptr1.next\n ptr2 = ptr2.next\n\n\nif __name__ == \"__main__\":\n LL = LinkedList()\n LL.add(5)\n LL.add(1)\n LL.add(8)\n LL.add(0)\n LL.add(3)\n\n print(LL)\n\n pivot_linked_list(LL, 3)\n\n print(LL)\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "3862997", "language": "Python", "matching_score": 1.2609505653381348, "max_stars_count": 70, "path": "Solutions/208.py" }, { "content": "\"\"\"\nProblem:\n\nAn imminent hurricane threatens the coastal town of Codeville. If at most two people\ncan fit in a rescue boat, and the maximum weight limit for a given boat is k,\ndetermine how many boats will be needed to save everyone.\n\nFor example, given a population with weights [100, 200, 150, 80] and a boat limit of\n200, the smallest number of boats required will be three.\n\"\"\"\n\nfrom typing import List\n\n\ndef calculate_boats(arr: List[int], k: int) -> int:\n length = len(arr)\n arr.sort()\n\n ptr1 = 0\n ptr2 = length - 1\n result = 0\n while ptr1 < ptr2:\n if arr[ptr2] > k:\n # weight greater than boat weight limit\n raise ValueError(f\"Cannot accomodate {arr[ptr2]} within limit of {k}\")\n elif arr[ptr2] + arr[ptr1] > k:\n # 2 people CANNOT be accomodated in 1 boat\n result += 1\n ptr2 -= 1\n else:\n # 2 people CAN be accomodated in 1 boat\n result += 1\n ptr1 += 1\n ptr2 -= 1\n if ptr1 == ptr2:\n result += 1\n return result\n\n\nif __name__ == \"__main__\":\n print(calculate_boats([100, 200, 150, 80], 200))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n log(n))\nSPACE COMPLEXITY: O(1)\n\"\"\"\n\n", "id": "2712819", "language": "Python", "matching_score": 1.2178150415420532, "max_stars_count": 70, "path": "Solutions/291.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a sorted list of integers, square the elements and give the output in sorted\norder.\n\nFor example, given [-9, -2, 0, 2, 3], return [0, 4, 4, 9, 81].\n\"\"\"\n\nfrom typing import List\n\nArray = List[int]\n\n\ndef merge_sorted_lists(arr1: Array, arr2: Array) -> Array:\n ptr1, length1 = 0, len(arr1)\n ptr2, length2 = 0, len(arr2)\n merged_sorted_array = []\n # generating merged sorted list\n while ptr1 < length1 and ptr2 < length2:\n if arr1[ptr1] < arr2[ptr2]:\n merged_sorted_array.append(arr1[ptr1])\n ptr1 += 1\n else:\n merged_sorted_array.append(arr2[ptr2])\n ptr2 += 1\n merged_sorted_array.extend(arr1[ptr1:])\n merged_sorted_array.extend(arr2[ptr2:])\n return merged_sorted_array\n\n\ndef sort_squared_elements(arr: Array) -> Array:\n last_negative_position = 0\n length = len(arr)\n for i in range(length):\n if arr[i] > 0:\n last_negative_position = i\n break\n else:\n last_negative_position = length\n\n negative_part = [elem * elem for elem in arr[:last_negative_position]][::-1]\n positive_part = [elem * elem for elem in arr[last_negative_position:]]\n return merge_sorted_lists(positive_part, negative_part)\n\n\nif __name__ == \"__main__\":\n print(sort_squared_elements([]))\n print(sort_squared_elements([0]))\n print(sort_squared_elements([-1, 1]))\n print(sort_squared_elements([0, 2, 3]))\n print(sort_squared_elements([-9, -2, 0]))\n print(sort_squared_elements([-9, -2, 0, 2, 3]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "8965935", "language": "Python", "matching_score": 2.1593997478485107, "max_stars_count": 70, "path": "Solutions/118.py" }, { "content": "\"\"\"\nProblem:\n\nWe can determine how \"out of order\" an array A is by counting the number of inversions\nit has. Two elements A[i] and A[j] form an inversion if A[i] > A[j] but i < j. That is,\na smaller element appears after a larger element.\n\nGiven an array, count the number of inversions it has. Do this faster than O(N^2) time.\n\nYou may assume each element in the array is distinct.\n\nFor example, a sorted list has zero inversions. The array [2, 4, 1, 3, 5] has three\ninversions: (2, 1), (4, 1), and (4, 3). The array [5, 4, 3, 2, 1] has ten inversions:\nevery distinct pair forms an inversion.\n\"\"\"\n\nfrom typing import List, Tuple\n\n\ndef merge(part_a: List[int], part_b: List[int]) -> Tuple[List[int], int]:\n # helper function for merge sort\n i, j = 0, 0\n merged_array = []\n a, a_inv = part_a\n b, b_inv = part_b\n inversions = a_inv + b_inv\n length_a, length_b = len(a), len(b)\n # merging the arrays\n while i < length_a and j < length_b:\n if a[i] < b[j]:\n merged_array.append(a[i])\n i += 1\n else:\n merged_array.append(b[j])\n inversions += length_a - i\n j += 1\n while i < length_a:\n merged_array.append(a[i])\n i += 1\n while j < length_b:\n merged_array.append(b[j])\n j += 1\n return merged_array, inversions\n\n\ndef merge_sort(arr: List[int]) -> Tuple[List[int], int]:\n length = len(arr)\n if length in (0, 1):\n return arr, 0\n\n mid = length // 2\n merged_array, inversions = merge(merge_sort(arr[:mid]), merge_sort(arr[mid:]))\n return merged_array, inversions\n\n\ndef count_inversions(arr: List[int]) -> int:\n _, inversions = merge_sort(arr)\n return inversions\n\n\nif __name__ == \"__main__\":\n print(count_inversions([1, 2, 3, 4, 5]))\n print(count_inversions([2, 1, 3, 4, 5]))\n print(count_inversions([2, 4, 1, 3, 5]))\n print(count_inversions([2, 6, 1, 3, 7]))\n print(count_inversions([5, 4, 3, 2, 1]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x log(n))\nSPACE COMPLEXITY: O(log(n))\n\"\"\"\n", "id": "8312278", "language": "Python", "matching_score": 1.3740572929382324, "max_stars_count": 70, "path": "Solutions/044.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array and a number k that's smaller than the length of the array, rotate the\narray to the right k elements in-place.\n\"\"\"\n\nfrom typing import List\n\n\ndef rotate_array_right(arr: List[int], k: int) -> List[int]:\n length = len(arr)\n k = k % length\n # rotating array\n if k > 0:\n cache = arr[-k:]\n for i in range(length - k - 1, -1, -1):\n arr[k + i] = arr[i]\n arr[:k] = cache\n return arr\n\n\nif __name__ == \"__main__\":\n print(rotate_array_right([(i + 1) for i in range(5)], 9))\n print(rotate_array_right([(i + 1) for i in range(5)], 3))\n print(rotate_array_right([(i + 1) for i in range(5)], 2))\n print(rotate_array_right([(i + 1) for i in range(5)], 1))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 2)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "756100", "language": "Python", "matching_score": 0.9106366634368896, "max_stars_count": 70, "path": "Solutions/197.py" }, { "content": "\"\"\"\nProblem:\n\nWrite a function that rotates a list by k elements. For example, [1, 2, 3, 4, 5, 6]\nrotated by two becomes [3, 4, 5, 6, 1, 2]. Try solving this without creating a copy of\nthe list. How many swap or move operations do you need?\n\"\"\"\n\nfrom typing import List\n\n\ndef rotate_list_once(arr: List[int], length: int) -> None:\n # updates the list inplace\n first_elem = arr[0]\n for i in range(length - 1):\n arr[i] = arr[i + 1]\n arr[length - 1] = first_elem\n\n\ndef rotate_list(arr: List[int], k: int) -> List[int]:\n length = len(arr)\n k = k % length\n for _ in range(k):\n rotate_list_once(arr, length)\n return arr\n\n\nif __name__ == \"__main__\":\n print(rotate_list([1, 2, 3, 4, 5, 6], 0))\n print(rotate_list([1, 2, 3, 4, 5, 6], 2))\n print(rotate_list([1, 2, 3, 4, 5, 6], 4))\n print(rotate_list([1, 2, 3, 4, 5, 6], 6))\n print(rotate_list([1, 2, 3, 4, 5, 6], 10))\n print(rotate_list([1, 2, 3, 4, 5, 6], 1_000_000_000))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(k x n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "705588", "language": "Python", "matching_score": 1.5126513242721558, "max_stars_count": 70, "path": "Solutions/126.py" }, { "content": "\"\"\"\nProblem:\n\nGiven an array of a million integers between zero and a billion, out of order, how can\nyou efficiently sort it? Assume that you cannot store an array of a billion elements in\nmemory.\n\"\"\"\n\nfrom random import randint\nfrom typing import List\n\n\ndef gen_arr() -> List[int]:\n return [randint(0, 1_000_000_000) for _ in range(1_000_000)]\n\n\ndef counting_sort(arr: List[int], exp: int, n: int) -> int:\n # output array elements to store the sorted arr\n output = [0] * n\n count = [0] * 10\n # updating count\n for i in range(n):\n index = arr[i] // exp\n count[index % 10] += 1\n for i in range(1, 10):\n count[i] += count[i - 1]\n # genrating the output array\n for i in range(n - 1, -1, -1):\n index = arr[i] // exp\n output[count[index % 10] - 1] = arr[i]\n count[index % 10] -= 1\n # updating arr with output arr\n for i in range(0, len(arr)):\n arr[i] = output[i]\n\n\ndef radix_sort(arr: List[int]) -> None:\n length = len(arr)\n digits = len(str(max(arr)))\n exp = 1\n for _ in range(digits):\n counting_sort(arr, exp, length)\n exp *= 10\n\n\nif __name__ == \"__main__\":\n radix_sort(gen_arr())\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(range of the numbers)\n\"\"\"\n", "id": "10508192", "language": "Python", "matching_score": 0.7383432984352112, "max_stars_count": 70, "path": "Solutions/251.py" }, { "content": "\"\"\"\nProblem:\n\nThe area of a circle is defined as r^2. Estimate pi to 3 decimal places using a Monte\nCarlo method.\n\nHint: The basic equation of a circle is x^2 + y^2 = r^2.\n\"\"\"\n\nfrom random import random\nfrom typing import Tuple\n\n\ndef coordinate_gen() -> Tuple[float, float]:\n # Helper function to generate a random coordinate in the square bounded by\n # x = -1, x = 1 and y = -1, y = 1\n return random(), random()\n\n\ndef pi_approx(iterations: int = 1_000_000) -> float:\n circle_area = 0\n for _ in range(iterations):\n x, y = coordinate_gen()\n if pow(x, 2) + pow(y, 2) <= 1:\n circle_area += 1\n # Using Monte Carlo approximation [pi = 4 x (Area of circle / Area of square)]\n # [Area of circle = number of points in circle,\n # Area of square = total number of points]\n return round(4 * circle_area / iterations, 3)\n\n\nif __name__ == \"__main__\":\n print(pi_approx())\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "8099261", "language": "Python", "matching_score": 0.8093134164810181, "max_stars_count": 70, "path": "Solutions/014.py" }, { "content": "\"\"\"\nProblem:\n\nIn chess, the Elo rating system is used to calculate player strengths based on game\nresults.\n\nA simplified description of the Elo system is as follows. Every player begins at the\nsame score. For each subsequent game, the loser transfers some points to the winner,\nwhere the amount of points transferred depends on how unlikely the win is. For example,\na 1200-ranked player should gain much more points for beating a 2000-ranked player than\nfor beating a 1300-ranked player.\n\nImplement this system.\n\"\"\"\n\nfrom typing import Optional\n\n\nclass EloRating:\n INITIAL_POINTS = 1400\n MEAN_SCORE_CHANGE = 30\n\n def __init__(self) -> None:\n self.scores = {}\n\n def add_player(self, id: int) -> None:\n self.scores[id] = EloRating.INITIAL_POINTS\n\n def update_points(\n self, p1_id: int, p2_id: int, winner: Optional[int] = None\n ) -> None:\n if p1_id not in self.scores or p2_id not in self.scores:\n raise ValueError(\"Player not found\")\n if winner is None:\n return\n temp = set([p1_id, p2_id]) - set([winner])\n if len(temp) != 1:\n raise ValueError(\"Invalid player & winner combination\")\n # updating points\n loser = temp.pop()\n if self.scores[loser] > 0:\n ratio = self.scores[loser] / self.scores[winner]\n # lock to ensure there is no negative points\n points = min(self.scores[loser], int(ratio * EloRating.MEAN_SCORE_CHANGE))\n self.scores[winner] += points\n self.scores[loser] -= points\n\n def display_points(self) -> None:\n print(\"\\n\" + \"=\" * 15)\n print(\"POINTS\")\n print(\"=\" * 15)\n for player in self.scores:\n print(f\"{player}\\t{self.scores[player]}\")\n print(\"=\" * 15)\n\n\nif __name__ == \"__main__\":\n elo = EloRating()\n\n elo.add_player(1)\n elo.add_player(2)\n elo.add_player(3)\n\n elo.display_points()\n\n elo.update_points(1, 2, 1)\n elo.update_points(1, 3, 1)\n\n elo.display_points()\n\n elo.update_points(1, 2, 1)\n\n elo.display_points()\n\n elo.update_points(3, 2, 3)\n\n elo.display_points()\n\n elo.update_points(3, 2) # TIE\n\n elo.display_points()\n", "id": "11112739", "language": "Python", "matching_score": 1.1410702466964722, "max_stars_count": 70, "path": "Solutions/328.py" }, { "content": "\"\"\"\nProblem:\n\nThis problem was asked by Google.\n\nThe game of Nim is played as follows. Starting with three heaps, each containing a\nvariable number of items, two players take turns removing one or more items from a\nsingle pile. The player who eventually is forced to take the last stone loses. For\nexample, if the initial heap sizes are 3, 4, and 5, a game could be played as shown\nbelow:\n\nA\tB\tC\n3\t4\t5\n3\t1\t5\n3\t1\t3\n0\t1\t3\n0\t1\t0\n0\t0\t0\nIn other words, to start, the first player takes three items from pile B. The second\nplayer responds by removing two stones from pile C. The game continues in this way\nuntil player one takes last stone and loses.\n\nGiven a list of non-zero starting values [a, b, c], and assuming optimal play,\ndetermine whether the first player has a forced win.\n\"\"\"\n\n# Source: https://en.wikipedia.org/wiki/Nim#Mathematical_theory\n\nfrom typing import Tuple\n\n\ndef is_forced_win(heaps: Tuple[int, int, int]) -> bool:\n x = 0\n for heap in heaps:\n x = x ^ heap\n for heap in heaps:\n xa = heap ^ x\n if xa < heap:\n return True\n return False\n\n\nif __name__ == \"__main__\":\n print(is_forced_win((3, 4, 5)))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1)\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "6606194", "language": "Python", "matching_score": 1.437125563621521, "max_stars_count": 70, "path": "Solutions/289.py" }, { "content": "\"\"\"\nProblem:\n\nIn front of you is a row of N coins, with values v_1, v_2, ..., v_n.\n\nYou are asked to play the following game. You and an opponent take turns choosing\neither the first or last coin from the row, removing it from the row, and receiving the\nvalue of the coin.\n\nWrite a program that returns the maximum amount of money you can win with certainty, if\nyou move first, assuming your opponent plays optimally.\n\"\"\"\n\nfrom typing import List\n\n\ndef optimal_strategy_of_game(\n coins_arr: List[int], amount: int = 0, to_play: bool = True\n) -> int:\n if not coins_arr:\n return amount\n if to_play:\n return max(\n optimal_strategy_of_game(coins_arr[1:], amount + coins_arr[0], False),\n optimal_strategy_of_game(coins_arr[:-1], amount + coins_arr[-1], False),\n )\n if coins_arr[0] > coins_arr[-1]:\n return optimal_strategy_of_game(coins_arr[1:], amount, True)\n return optimal_strategy_of_game(coins_arr[:-1], amount, True)\n\n\nif __name__ == \"__main__\":\n print(optimal_strategy_of_game([1, 2, 3, 4, 5]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(2 ^ n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "8942827", "language": "Python", "matching_score": 0.8418906927108765, "max_stars_count": 70, "path": "Solutions/220.py" }, { "content": "\"\"\"\nProblem:\n\nThe 24 game is played as follows. You are given a list of four integers, each between 1\nand 9, in a fixed order. By placing the operators +, -, *, and / between the numbers,\nand grouping them with parentheses, determine whether it is possible to reach the value\n24.\n\nFor example, given the input [5, 2, 7, 8], you should return True, since\n(5 * 2 - 7) * 8 = 24.\n\nWrite a function that plays the 24 game.\n\"\"\"\n\nfrom typing import List\n\nOPERATORS = set([\"+\", \"-\", \"*\", \"/\"])\n\n\ndef game_24(arr: List[int]) -> bool:\n if len(arr) == 1:\n return arr[0] == 24\n # checking if 24 can be reached\n possibilities = []\n for si in range(len(arr) - 1):\n # checking all possibilities\n for operator in OPERATORS:\n num_1 = arr[si]\n num_2 = arr[si + 1]\n try:\n possibility = (\n arr[:si]\n + [eval(\"{} {} {}\".format(num_1, operator, num_2))]\n + arr[si + 2 :]\n )\n possibilities.append(possibility)\n except ZeroDivisionError:\n pass\n return any([game_24(x) for x in possibilities])\n\n\nif __name__ == \"__main__\":\n print(game_24([5, 2, 7, 8]))\n print(game_24([1, 1, 1, 1]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(operations)\nSPACE COMPLEXITY: O(operations)\n[the passed array contains 4 (constant) numbers, else it would have been\nO(operations x (n ^ n)) in time & space]\n\"\"\"\n", "id": "12124190", "language": "Python", "matching_score": 1.2633905410766602, "max_stars_count": 70, "path": "Solutions/334.py" }, { "content": "\"\"\"\nProblem:\nYou are presented with an array representing a Boolean expression. The elements are of\ntwo kinds:\n\nT and F, representing the values True and False.\n&, |, and ^, representing the bitwise operators for AND, OR, and XOR.\nDetermine the number of ways to group the array elements using parentheses so that the\nentire expression evaluates to True.\n\nFor example, suppose the input is ['F', '|', 'T', '&', 'T']. In this case, there are\ntwo acceptable groupings: (F | T) & T and F | (T & T).\n\"\"\"\n\nfrom typing import List\n\n\ndef evaluator(arr: List[str]) -> List[bool]:\n expr = \"\".join(arr)\n if len(arr) == 1 or len(arr) == 3:\n return [eval(expr)]\n\n groupings = []\n # checking all possible arrangements\n for i in range(len(arr) // 2):\n pivot = i * 2 + 1\n left = arr[:pivot]\n right = arr[pivot + 1 :]\n for fe in evaluator(left):\n for se in evaluator(right):\n new_exp = str(fe) + arr[pivot] + str(se)\n # adding the expression only if evaluates to True\n if eval(new_exp):\n groupings.append(True)\n return groupings\n\n\ndef get_groupings(arr: List[str]) -> int:\n # replacing the 'T's and 'F's\n for ind in range(len(arr)):\n if arr[ind] == \"F\":\n arr[ind] = \"False\"\n elif arr[ind] == \"T\":\n arr[ind] = \"True\"\n return len(evaluator(arr))\n\n\nif __name__ == \"__main__\":\n print(get_groupings([\"F\", \"|\", \"T\", \"&\", \"T\"]))\n print(get_groupings([\"F\", \"|\", \"T\", \"&\", \"T\", \"^\", \"F\"]))\n print(get_groupings([\"F\", \"|\", \"T\", \"&\", \"F\", \"^\", \"F\"]))\n print(get_groupings([\"F\", \"|\", \"T\", \"|\", \"F\", \"^\", \"F\"]))\n print(get_groupings([\"T\", \"^\", \"T\", \"&\", \"F\"]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n ^ 3)\nSPACE COMPLEXITY: O(n ^ 2)\n\"\"\"\n", "id": "2157823", "language": "Python", "matching_score": 1.2025198936462402, "max_stars_count": 70, "path": "Solutions/308.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a string consisting of parentheses, single digits, and positive and negative\nsigns, convert the string into a mathematical expression to obtain the answer.\n\nDon't use eval or a similar built-in parser.\n\nFor example, given '-1 + (2 + 3)', you should return 4.\n\"\"\"\n\n\ndef evaluate_expression(expression: str) -> int:\n result = 0\n add = True\n sub_eval_string = \"\"\n number_of_parentheses = 0\n for char in expression:\n if number_of_parentheses > 0:\n # inside a set of parentheses\n if char == \"(\":\n number_of_parentheses += 1\n elif char == \")\":\n number_of_parentheses -= 1\n if number_of_parentheses == 0:\n if add:\n result += evaluate_expression(sub_eval_string)\n else:\n result -= evaluate_expression(sub_eval_string)\n sub_eval_string = \"\"\n else:\n sub_eval_string += char\n else:\n if char == \"-\":\n add = False\n elif char == \"+\":\n add = True\n elif char == \"(\":\n number_of_parentheses = 1\n elif char.isdigit():\n if add:\n result += int(char)\n else:\n result -= int(char)\n return result\n\n\nif __name__ == \"__main__\":\n print(evaluate_expression(\"-1 + (2 + 3)\"))\n print(evaluate_expression(\"-1 + (2 + 3) + (2 - 3)\"))\n print(evaluate_expression(\"-1 + (2 + 3) + ((2 - 3) + 1)\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n)\nSPACE COMPLEXITY: O(n)\n\"\"\"\n", "id": "7150812", "language": "Python", "matching_score": 1.0747120380401611, "max_stars_count": 70, "path": "Solutions/274.py" }, { "content": "\"\"\"\nProblem:\n\nA Boolean formula can be said to be satisfiable if there is a way to assign truth\nvalues to each variable such that the entire formula evaluates to true.\n\nFor example, suppose we have the following formula, where the symbol ¬ is used to\ndenote negation:\n\n(¬c OR b) AND (b OR c) AND (¬b OR c) AND (¬c OR ¬a)\nOne way to satisfy this formula would be to let a = False, b = True, and c = True.\n\nThis type of formula, with AND statements joining tuples containing exactly one OR, is\nknown as 2-CNF.\n\nGiven a 2-CNF formula, find a way to assign truth values to satisfy it, or return False\nif this is impossible.\n\"\"\"\n\nfrom typing import Dict, List, Union\n\n\ndef generate_combinations(num: int) -> List[List[bool]]:\n # generate all boolean combinations for the given number of variables\n numbers = [num for num in range(pow(2, num))]\n combinations = []\n for number in numbers:\n bin_number = bin(number)[2:].zfill(num)\n combinations.append(list(bool(int(i)) for i in bin_number))\n return combinations\n\n\ndef validation_problem(expression: str) -> Union[Dict[str, bool], bool]:\n # getting the variables\n formatted_expression = \"\"\n variables = {}\n for index, char in enumerate(expression):\n formatted_expression += char.lower()\n if char.isalpha() and char not in \"OR AND\":\n if char not in variables:\n variables[char] = set()\n variables[char].add(index)\n # generating all combinations for the given variables\n variables_set = set(variables.keys())\n variables_list = list(variables_set)\n variables_count = len(variables_list)\n combinations = generate_combinations(variables_count)\n # checking expression satisfiablity using all combinations\n for combination in combinations:\n calulation_expression = \"\"\n for index, char in enumerate(formatted_expression):\n if char == \"¬\":\n calulation_expression += \"not \"\n elif char in variables_set and index in variables[char]:\n position = variables_list.index(char)\n calulation_expression += str(combination[position])\n else:\n calulation_expression += char\n if eval(calulation_expression):\n return {key: value for key, value in zip(variables_list, combination)}\n # returning False if the expression cannot be satisfied\n return False\n\n\nif __name__ == \"__main__\":\n print(validation_problem(\"(¬c OR b) AND (b OR c) AND (¬b OR c) AND (¬c OR ¬a)\"))\n print(validation_problem(\"a AND a\"))\n print(validation_problem(\"a AND ¬a\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(length x (2 ^ variables))\nSPACE COMPLEXITY: O(2 ^ variables)\n\"\"\"\n", "id": "11266983", "language": "Python", "matching_score": 1.7405909299850464, "max_stars_count": 70, "path": "Solutions/330.py" }, { "content": "\"\"\"\nProblem:\n\nUTF-8 is a character encoding that maps each symbol to one, two, three, or four bytes.\n\nFor example, the Euro sign, €, corresponds to the three bytes 11100010 10000010\n10101100. The rules for mapping characters are as follows:\n\nFor a single-byte character, the first bit must be zero.\nFor an n-byte character, the first byte starts with n ones and a zero. The other n - 1\nbytes all start with 10. Visually, this can be represented as follows.\n Bytes | Byte format\n-----------------------------------------------\n 1 | 0xxxxxxx\n 2 | 110xxxxx 10xxxxxx\n 3 | 1110xxxx 10xxxxxx 10xxxxxx\n 4 | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx\nWrite a program that takes in an array of integers representing byte values, and\nreturns whether it is a valid UTF-8 encoding.\n\"\"\"\n\nfrom re import compile\nfrom typing import List\n\n# acceptable expressions\nEXPRESSIONS = [\n compile(\"0.{7}\"),\n compile(\"110.{5}10.{6}\"),\n compile(\"1110.{4}10.{6}10.{6}\"),\n compile(\"11110.{3}10.{6}10.{6}10.{6}\"),\n]\n\n\ndef is_utf8(bin_num: str) -> bool:\n # using regular expression to match the input string\n for expression in EXPRESSIONS:\n if expression.match(bin_num):\n return True\n return False\n\n\ndef is_arr_utf8(arr: List[int]) -> bool:\n # the array cannot hold an utf8 symbol if the length of the array is more than 4 or\n # if any of the elements is larger than 255 (binary number contains more than 8\n # characters)\n if len(arr) > 4 or any([elem > 255 for elem in arr]):\n return False\n # generating the binary number\n bin_num = \"\"\n for elem in arr:\n num = bin(elem)[2:]\n bin_num += num.zfill(8)\n return is_utf8(bin_num)\n\n\nif __name__ == \"__main__\":\n print(is_arr_utf8([127]))\n print(is_arr_utf8([226, 130, 172]))\n print(is_arr_utf8([226, 127, 172]))\n print(is_arr_utf8([256, 130, 172]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1)\nSPACE COMPLEXITY: O(1)\n[its constant as the maximum size is 32 characters (constant)]\n\"\"\"\n", "id": "2472677", "language": "Python", "matching_score": 1.6541268825531006, "max_stars_count": 70, "path": "Solutions/277.py" }, { "content": "\"\"\"\nProblem:\n\nImplement regular expression matching with the following special characters:\n\n. (period) which matches any single character\n* (asterisk) which matches zero or more of the preceding element That is, implement a\nfunction that takes in a string and a valid regular expression and returns whether or\nnot the string matches the regular expression.\nFor example, given the regular expression \"ra.\" and the string \"ray\", your function\nshould return true. The same regular expression on the string \"raymond\" should return\nfalse.\n\nGiven the regular expression \".*at\" and the string \"chat\", your function should return\ntrue. The same regular expression on the string \"chats\" should return false.\n\"\"\"\n\n\ndef is_regex_match(pattern: str, text: str) -> bool:\n n, m = len(text), len(pattern)\n dp = [[False for _ in range(m + 1)] for _ in range(n + 1)]\n dp[0][0] = True\n # populating the 1st row of the lookup table\n for i in range(1, m + 1):\n if pattern[i - 1] == \"*\":\n dp[0][i] = dp[0][i - 2]\n # populating the remaining lookup table\n for i in range(1, n + 1):\n for j in range(1, m + 1):\n if pattern[j - 1] == \".\" or pattern[j - 1] == text[i - 1]:\n dp[i][j] = dp[i - 1][j - 1]\n elif pattern[j - 1] == \"*\":\n dp[i][j] = dp[i][j - 2]\n if pattern[j - 2] == \".\" or pattern[j - 2] == text[i - 1]:\n dp[i][j] = dp[i][j] | dp[i - 1][j]\n return dp[n][m]\n\n\nif __name__ == \"__main__\":\n print(is_regex_match(\"r.y\", \"ray\"))\n print(is_regex_match(\"ra.\", \"rays\"))\n print(is_regex_match(\".*at\", \"chat\"))\n print(is_regex_match(\".*at\", \"chats\"))\n print(is_regex_match(\".*\", \"random-word\"))\n print(is_regex_match(\".*a\", \"random-word\"))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n x m)\nSPACE COMPLEXITY: O(n x m)\n\"\"\"\n", "id": "12139878", "language": "Python", "matching_score": 0.6595030426979065, "max_stars_count": 70, "path": "Solutions/025.py" }, { "content": "\"\"\"\nProblem:\n\nMastermind is a two-player game in which the first player attempts to guess the secret\ncode of the second. In this version, the code may be any six-digit number with all\ndistinct digits.\n\nEach turn the first player guesses some number, and the second player responds by\nsaying how many digits in this number correctly matched their location in the secret\ncode. For example, if the secret code were 123456, then a guess of 175286 would score\ntwo, since 1 and 6 were correctly placed.\n\nWrite an algorithm which, given a sequence of guesses and their scores, determines\nwhether there exists some secret code that could have produced them.\n\nFor example, for the following scores you should return True, since they correspond to\nthe secret code 123456: {175286: 2, 293416: 3, 654321: 0}\n\nHowever, it is impossible for any key to result in the following scores, so in this\ncase you should return False: {123456: 4, 345678: 4, 567890: 4}\n\"\"\"\n\nfrom typing import Dict\n\n\ndef validate_guess(guess: int, matches: Dict[int, int]) -> bool:\n for match, match_count in matches.items():\n count = 0\n for char_1, char_2 in zip(str(guess).zfill(6), str(match).zfill(6)):\n if char_1 == char_2:\n count += 1\n if count != match_count:\n return False\n return True\n\n\ndef is_match_valid(matches: Dict[int, int]) -> bool:\n for guess in range(1_000_000):\n if validate_guess(guess, matches):\n return True\n return False\n\n\nif __name__ == \"__main__\":\n print(is_match_valid({175286: 2, 293416: 3, 654321: 0}))\n print(is_match_valid({123456: 4, 345678: 4, 567890: 4}))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1) [As (1,000,000 x 6) is a constant]\nSPACE COMPLEXITY: O(1)\n\"\"\"\n", "id": "1687357", "language": "Python", "matching_score": 1.515990138053894, "max_stars_count": 70, "path": "Solutions/361.py" }, { "content": "\"\"\"\nProblem:\n\nBlackjack is a two player card game whose rules are as follows:\n\n- The player and then the dealer are each given two cards.\n- The player can then \"hit\", or ask for arbitrarily many additional cards, so long as\n their total does not exceed 21.\n- The dealer must then hit if their total is 16 or lower, otherwise pass.\n- Finally, the two compare totals, and the one with the greatest sum not exceeding 21\n is the winner.\n\nFor this problem, cards values are counted as follows: each card between 2 and 10\ncounts as their face value, face cards count as 10, and aces count as 1.\n\nGiven perfect knowledge of the sequence of cards in the deck, implement a blackjack\nsolver that maximizes the player's score (that is, wins minus losses).\n\"\"\"\n\nfrom random import shuffle\nfrom typing import List, Tuple\n\n\ndef generate_random_card_sequence() -> List[int]:\n cards = [i for _ in range(4) for i in range(1, 11)]\n shuffle(cards)\n return cards\n\n\ndef get_best_player_score(\n sequence: List[int], player_score: int = 0, dealer_score: int = 0\n) -> Tuple[int, int]:\n if not sequence:\n return player_score, dealer_score\n elif player_score > 21 and dealer_score > 21:\n return -1, -1\n elif player_score > 21:\n return -1, dealer_score\n elif dealer_score > 21:\n return player_score, -1\n return max(\n get_best_player_score(sequence[1:], player_score + sequence[0], dealer_score),\n get_best_player_score(sequence[1:], player_score, dealer_score + sequence[0]),\n (player_score, dealer_score),\n # the player's score has more weightage than the dealer's score\n key=lambda x: 1.01 * x[0] + x[1],\n )\n\n\ndef simulate(n: int = 1_000) -> float:\n # simulating the game n times and returning the percent of victories for the player\n wins = 0\n for _ in range(n):\n sequence = generate_random_card_sequence()\n player_score, dealer_score = get_best_player_score(sequence)\n if player_score > dealer_score and player_score <= 21:\n wins += 1\n return (wins / n) * 100\n\n\nif __name__ == \"__main__\":\n print(f\"The Player won {simulate():.2f}% of the times\")\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1)\nSPACE COMPLEXITY: O(1)\n[the number of cards is constant (52)]\n\"\"\"\n", "id": "7299643", "language": "Python", "matching_score": 1.5249053239822388, "max_stars_count": 70, "path": "Solutions/238.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a function that generates perfectly random numbers between 1 and k (inclusive),\nwhere k is an input, write a function that shuffles a deck of cards represented as an\narray using only swaps.\n\nIt should run in O(N) time.\n\nHint: Make sure each one of the 52! permutations of the deck is equally likely.\n\"\"\"\n\nfrom random import randint\nfrom typing import List\n\n# implementation of a function that generates perfectly random numbers between 1 and k\ndef generate_random_number_in_range(k: int) -> int:\n return randint(1, k)\n\n\ndef swap() -> List[int]:\n # generating the card list\n cards = [card_no for card_no in range(1, 53)]\n # shuffling the cards\n for i in range(52):\n swap_position = generate_random_number_in_range(52) - 1\n cards[i], cards[swap_position] = cards[swap_position], cards[i]\n return cards\n\n\nif __name__ == \"__main__\":\n print(*swap())\n print(*swap())\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(1)\nSPACE COMPLEXITY: O(1)\n[n = 52 (constant)]\n\"\"\"\n", "id": "6115935", "language": "Python", "matching_score": 0.9471093416213989, "max_stars_count": 70, "path": "Solutions/051.py" }, { "content": "\"\"\"\nProblem:\n\nGiven a number in the form of a list of digits, return all possible permutations.\n\nFor example, given [1,2,3], return [[1,2,3],[1,3,2],[2,1,3],[2,3,1],[3,1,2],[3,2,1]].\n\"\"\"\n\nfrom copy import deepcopy\nfrom typing import List, Optional\n\n\ndef generate_all_permutations(\n arr: List[int], l: int = 0, r: Optional[int] = None, res: List[List[int]] = []\n) -> List[List[int]]:\n if r is None:\n r = len(arr) - 1\n if l == r:\n res.append(list(arr))\n return res\n # generating all permutation using backtracking\n for i in range(l, r + 1):\n arr[l], arr[i] = arr[i], arr[l]\n generate_all_permutations(arr, l + 1, r, res)\n arr[l], arr[i] = arr[i], arr[l]\n return res\n\n\nif __name__ == \"__main__\":\n print(generate_all_permutations([1, 2, 3], res=[]))\n print(generate_all_permutations([1, 2], res=[]))\n print(generate_all_permutations([1], res=[]))\n print(generate_all_permutations([], res=[]))\n\n\n\"\"\"\nSPECS:\n\nTIME COMPLEXITY: O(n!)\nSPACE COMPLEXITY: O(n!)\n[there are n! permutions for an array with n elements]\n\"\"\"\n", "id": "1205998", "language": "Python", "matching_score": 0.7888599634170532, "max_stars_count": 70, "path": "Solutions/096.py" }, { "content": "from blog import app, db\n\nif (__name__ == '__main__'):\n db.create_all()\n app.run(host='127.0.0.1', port=8000, debug=True)", "id": "761920", "language": "Python", "matching_score": 0.7043065428733826, "max_stars_count": 3, "path": "app.py" } ]
1.282653
sir-kin
[ { "content": "from math import gcd, log2, pi\nimport random\nimport progressbar\n\nimport multiprocessing\nimport sys\n\n# Import Qiskit\nfrom qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister\nfrom qiskit import Aer, execute\n\n\n########################################\n\n# we represent negative numbers via the ones-complement\n\n# little-endian encoding (LSB first)\n# ones complement\ndef int_to_bin_LSB(x,n):\n sign = (x < 0)\n x = abs(x)\n x = bin(x)[2:]\n x = x.zfill(n)[:n]\n\n x = x[::-1]\n x = [int(i) for i in x]\n return x\n\n\n# LSB first\ndef bin_to_int_LSB(L):\n x = ''.join([str(i) for i in L])\n x = eval('0b' + x[::-1])\n return x\n\n\ndef ones_complement(x,n):\n return (1<<n) + ~x\n\n\ndef shuffle(x):\n return random.sample(x, len(x))\n\n\ndef find_nth(haystack, needle, n):\n start = haystack.find(needle)\n while start >= 0 and n > 1:\n start = haystack.find(needle, start+len(needle))\n n -= 1\n return start\n\n\n\n########################################\n\n\n\n# classical constant factors, in little endian (LSB first)\ndef get_const_factors(a,N,n):\n const_factors = [(a<<i)%N for i in range(n)]\n const_factors = [int_to_bin_LSB(t,n) for t in const_factors]\n const_factors = [[int(i) for i in t] for t in const_factors]\n\n return const_factors\n\n\ndef egcd(a, b):\n if a == 0:\n return (b, 0, 1)\n else:\n g, y, x = egcd(b % a, a)\n return (g, x - (b // a) * y, y)\n\ndef mod_inverse(a, m):\n g, x, y = egcd(a, m)\n if g != 1:\n raise Exception('modular inverse does not exist')\n else:\n return x % m\n\n########################################\n\ndef cphi_gate(circ, k, control, target):\n theta = 2 * pi / (1<<k)\n circ.cp(theta, control, target)\n\ndef phi_gate(circ, k, target):\n theta = 2 * pi / (1<<k)\n circ.p(theta, target)\n\n\n# negation circuit\ndef ones_complement_circuit(a_qubits, m):\n circ = QuantumCircuit(m,ne)\n \n for i in a_qubits:\n circ.x(i)\n \n return circ\n\n \n########################################\n \n# initialization circuit\n\ndef make_initialization_circuit(a_qubits, a_bits, m):\n initialize_circ = QuantumCircuit(m,ne)\n for i,v in enumerate(a_bits):\n if v:\n initialize_circ.x(a_qubits[i])\n \n return initialize_circ\n\n\n########################################\n \n# quantum fourier transform\n \n\ndef make_QFT_circ(a_qubits,n,m):\n \n QFT_circ = QuantumCircuit(m,ne)\n \n QFT_cutoff = 5 + int(log2(n))\n \n for i in range(n)[::-1]:\n QFT_circ.h(a_qubits[i])\n \n for j in range(i)[::-1]:\n k = i - j + 1\n \n if k > QFT_cutoff:\n break\n \n cphi_gate(QFT_circ, k,a_qubits[j],a_qubits[i])\n \n return QFT_circ\n\n\n\n#QFT_circ = make_QFT_circ(a_qubits,n,m)\n#QFT_circ_inverse = QFT_circ.inverse()\n# QFT_circ.qasm()\n\n########################################\n\n# quantum adder\n# in-place a+b, where a is quantum and b is classical\n\ndef make_adder_circ(b_bits, a_qubits, m):\n adder_circ = QuantumCircuit(m,ne)\n \n for i in range(n)[::-1]:\n \n theta = 0\n \n for j in range(i + 1)[::-1]:\n k = i - j + 1\n \n if k > QFT_cutoff:\n break\n \n if b_bits[j]:\n theta += 2 * pi / (1<<k)\n \n if theta != 0:\n adder_circ.p(theta, a_qubits[i])\n \n return adder_circ\n\ndef make_controlled_adder_circ(control_qubit, b_bits, a_qubits, m):\n adder_circ = QuantumCircuit(m,ne)\n \n for i in range(n)[::-1]:\n \n theta = 0\n \n for j in range(i + 1)[::-1]:\n k = i - j + 1\n \n if k > QFT_cutoff:\n break\n \n if b_bits[j]:\n theta += 2 * pi / (1<<k)\n \n if theta != 0:\n adder_circ.cp(theta, control_qubit, a_qubits[i])\n \n return adder_circ\n\n\ndef make_multiply_controlled_adder_circ(control_qubits, b_bits, a_qubits, m):\n adder_circ = QuantumCircuit(m,ne)\n \n for i in range(n)[::-1]:\n \n theta = 0\n \n for j in range(i + 1)[::-1]:\n k = i - j + 1\n \n if k > QFT_cutoff:\n break\n \n if b_bits[j]:\n theta += 2 * pi / (1<<k)\n \n if theta != 0:\n adder_circ.mcp(theta, control_qubits, a_qubits[i])\n \n return adder_circ\n\n#adder_circ = make_adder_circ(b_bits, a_qubits, m)\n\n# multiple-control phase gate\n# https://qiskit.org/textbook/ch-gates/more-circuit-identities.html#c-from-\n# express in terms of rotations + toffoli, and decompose tofolli into 6 CNOTs\n\n\n########################################\n\n\n\n\ndef modular_addition(a_qubits,ancillary_qubit,b,N,n,m):\n \n QFT_circ = make_QFT_circ(a_qubits,n,m)\n QFT_circ_inverse = QFT_circ.inverse()\n ones_complement_circ = ones_complement_circuit(a_qubits, m)\n \n # (a' + (N-b))' = a + b - N\n circuit = ones_complement_circ + QFT_circ + make_adder_circ(int_to_bin_LSB(N-b,n), a_qubits, m) + QFT_circ_inverse + ones_complement_circ\n \n # copy high / sign bit to the ancillary qubit\n circuit.cx(a_qubits[-1], ancillary_qubit)\n \n # a + b - N +? N = (a + b) % N\n circuit += QFT_circ + make_controlled_adder_circ(ancillary_qubit, int_to_bin_LSB(N,n), a_qubits, m) + QFT_circ_inverse\n \n # (a + b) % N - b\n circuit += ones_complement_circ + QFT_circ + make_adder_circ(int_to_bin_LSB(b,n), a_qubits, m) + QFT_circ_inverse + ones_complement_circ\n \n # uncompute the ancillary qubit\n circuit.x(a_qubits[-1])\n circuit.cx(a_qubits[-1], ancillary_qubit)\n circuit.x(a_qubits[-1])\n \n # (a + b) % N - b + b = (a + b) % N\n circuit += QFT_circ + make_adder_circ(int_to_bin_LSB(b,n), a_qubits, m) + QFT_circ_inverse\n \n return circuit\n\ndef controlled_modular_addition(control_qubits,a_qubits,ancillary_qubit,b,N,n,m):\n if type(control_qubits) is int:\n control_qubits = [control_qubits]\n \n QFT_circ = make_QFT_circ(a_qubits,n,m)\n QFT_circ_inverse = QFT_circ.inverse()\n ones_complement_circ = ones_complement_circuit(a_qubits, m)\n \n # (a' + (N-b))' = a + b - N\n circuit = ones_complement_circ + QFT_circ + make_multiply_controlled_adder_circ(control_qubits, int_to_bin_LSB(N-b,n), a_qubits, m) + QFT_circ_inverse + ones_complement_circ\n \n # copy high / sign bit to the ancillary qubit\n circuit.cx(a_qubits[-1], ancillary_qubit)\n \n # a + b - N +? N = (a + b) % N\n circuit += QFT_circ + make_multiply_controlled_adder_circ(control_qubits + [ancillary_qubit], int_to_bin_LSB(N,n), a_qubits, m) + QFT_circ_inverse\n \n # (a + b) % N - b\n circuit += ones_complement_circ + QFT_circ + make_multiply_controlled_adder_circ(control_qubits, int_to_bin_LSB(b,n), a_qubits, m) + QFT_circ_inverse + ones_complement_circ\n \n # uncompute the ancillary qubit\n circuit.x( a_qubits[-1])\n circuit.cx(a_qubits[-1], ancillary_qubit)\n circuit.x( a_qubits[-1])\n \n # (a + b) % N - b + b = (a + b) % N\n circuit += QFT_circ + make_multiply_controlled_adder_circ(control_qubits, int_to_bin_LSB(b,n), a_qubits, m) + QFT_circ_inverse\n \n circuit.mcx(control_qubits, ancillary_qubit)\n circuit.x(ancillary_qubit)\n \n \n\n return circuit\n\n\ndef modular_multiplication(x_qubits,y_qubits,ancillary_qubit,b,N,n,m):\n circuit = QuantumCircuit(m,ne)\n \n for i in range(n):\n circuit += controlled_modular_addition(x_qubits[i], y_qubits,ancillary_qubit,(b<<i)%N,N,n,m)\n \n return circuit\n\n \ndef conditional_in_place_modular_multiplication(control_qubit, x_qubits, y_qubits, ancillary_qubit ,b,N,n,m):\n # only support singly-controlled multiplication\n assert type(control_qubit) is int\n \n circuit = QuantumCircuit(m,ne)\n \n for i in progressbar.progressbar(range(n)):\n circuit += controlled_modular_addition([x_qubits[i],control_qubit], y_qubits, ancillary_qubit, (b<<i)%N, N, n, m)\n \n \n circuit2 = QuantumCircuit(m,ne)\n b2 = mod_inverse(b, N)\n for i in progressbar.progressbar(range(n)):\n circuit2 += controlled_modular_addition([y_qubits[i],control_qubit], x_qubits, ancillary_qubit, (b2<<i)%N, N, n, m)\n \n circuit += circuit2.inverse()\n \n # conditional swap.\n for i,j in zip(x_qubits, y_qubits):\n circuit.cswap(control_qubit, i, j)\n \n\n return circuit\n\n\ndef conditional_in_place_modular_multiplication_to_string(control_qubit, x_qubits, y_qubits, ancillary_qubit ,b,N,n,m):\n # only support singly-controlled multiplication\n assert type(control_qubit) is int\n \n res = \"\"\n \n for i in progressbar.progressbar(range(n)):\n circuit = controlled_modular_addition([x_qubits[i],control_qubit], y_qubits, ancillary_qubit, (b<<i)%N, N, n, m)\n s = circuit.qasm()\n s = s[find_nth(s, '\\n', 4) + 1:]\n res += s\n \n \n b2 = mod_inverse(b, N)\n for i in progressbar.progressbar(range(n)[::-1]):\n circuit = controlled_modular_addition([y_qubits[i],control_qubit], x_qubits, ancillary_qubit, (b2<<i)%N, N, n, m)\n \n s = circuit.inverse().qasm()\n s = s[find_nth(s, '\\n', 4) + 1:]\n res += s\n \n \n # conditional swap.\n circuit = QuantumCircuit(m, ne)\n for i,j in zip(x_qubits, y_qubits):\n circuit.cswap(control_qubit, i, j)\n \n s = circuit.qasm()\n \n s = s[find_nth(s, '\\n', 4) + 1:]\n res += s\n\n return res\n\n\ndef modular_exponentiation(e_qubits, x_qubits, y_qubits, ancillary_qubit, a_val, N, n, m):\n circuit = QuantumCircuit(m,ne)\n \n for e_qubit in e_qubits:\n circuit += conditional_in_place_modular_multiplication(e_qubit, x_qubits, y_qubits, ancillary_qubit, a_val, N, n, m)\n a_val = (a_val**2)%N\n \n return circuit\n\n\n########################################\n\n\n\n# circuit parameters\n\n# 64-bit parameters\nN = 183265954861256291\na = 74583193119493\n\n\n# allocate registers\nn = 64\nm = ne = num_x_bits = 2*n\n\nq_mem = [0 for _ in range(7*n + 2)]\n\nz_qubits = [i for i in range(0, 2*n)]\nb_qubits = [i for i in range(2*n, 3*n)]\nN_qubits = [i for i in range(3*n, 4*n)]\n\ntmp_qubits = [i for i in range(4*n, 5*n)]\nc_qubits = [i for i in range(5*n, 6*n)]\ntmp2_qubits = [i for i in range(6*n, 7*n)]\n\nancilla_qubit = 7*n\nhigh_qubit = 7*n + 1\n\n\n\n\n\n########################################\n\n \n \nbase_directory = \"circuit_parts\"\n\n\n\n# finish with the inverse QFT \n# reverse bits b/c big vs little endian\ncircuit = make_QFT_circ(z_qubits[::-1],ne,m).inverse()\ncircuit.measure(z_qubits[::-1], list(range(ne)))\n\nwith open(f\"{base_directory}/circuit_finalization.qasm\",\"w\") as f:\n s = circuit.qasm()\n s = s[find_nth(s, '\\n', 4) + 1:]\n f.write(s)\n\n\n\n\n\n\n", "id": "4331661", "language": "Python", "matching_score": 5.513427257537842, "max_stars_count": 3, "path": "Quantum Rev 1/generate/generate_finalization.py" }, { "content": "import numpy as np\nfrom fractions import Fraction\nfrom matplotlib import pyplot as plt\n\nfrom math import gcd, log2, pi\nimport random\n\n# Import Qiskit\nfrom qiskit import QuantumCircuit\nfrom qiskit import Aer, execute\n\n\n########################################\n\n# circuit parameters\n\n\"\"\"\nto match Figure 5.1 in original Shor paper, use:\nN = 33\na = 5\nn = 6\nne = 8\n\nanother good example is:\nN = 55\na = 3\nn = 7\nne = 8\n\"\"\"\n\n# number to factor\nN = 33\n\n# base of exponent\na = 5\n\nprint(\"a = {}\\nN = {}\\n\".format(a,N))\nassert gcd(a,N) == 1\nassert 2 <= a < N\n\n\n########################################\n\n# number of bits + padding to store N\n# you need an extra bit or 2 of padding to prevent overflow.\n# 2 bits of padding always works, 1 sometimes works\nn = 6\n\n# number of bits in exponent\n# = number of classical bits as output.\n# the more exponent bits, the sharper the peaks in the output\n# --> the more precise the estimate of the order.\nne = 6\n\n\nassert n - 0.5 > log2(N), \"increase the number of n bits\"\n\n\n# total number of qubits used\nm = 2*n + ne + 1\n\n# stores result\nx_qubits = list(range(ne, n+ne))\n# auxillary bits used for controlled multiplication\ny_qubits = list(range(n+ne, 2*n + ne))\n# exponent bits\nz_qubits = list(range(ne))\n# auxillary bit used in modular addition to copy the carry/overflow bit\nancillary_qubit = m - 1\n\n\n########################################\n\n\n# we represent negative numbers via the ones-complement\n\n# little-endian encoding (LSB first)\n# ones complement\ndef int_to_bin_LSB(x,n):\n sign = (x < 0)\n x = abs(x)\n x = bin(x)[2:]\n x = x.zfill(n)[:n]\n\n x = x[::-1]\n x = [int(i) for i in x]\n return x\n\n\n# LSB first\ndef bin_to_int_LSB(L):\n x = ''.join([str(i) for i in L])\n x = eval('0b' + x[::-1])\n return x\n\n\ndef ones_complement(x,n):\n return (1<<n) + ~x\n\n\n########################################\n\n\n\n# classical constant factors, in little endian (LSB first)\ndef get_const_factors(a,N,n):\n const_factors = [(a<<i)%N for i in range(n)]\n const_factors = [int_to_bin_LSB(t,n) for t in const_factors]\n const_factors = [[int(i) for i in t] for t in const_factors]\n\n return const_factors\n\n\ndef egcd(a, b):\n if a == 0:\n return (b, 0, 1)\n else:\n g, y, x = egcd(b % a, a)\n return (g, x - (b // a) * y, y)\n\ndef mod_inverse(a, m):\n g, x, y = egcd(a, m)\n if g != 1:\n raise Exception('modular inverse does not exist')\n else:\n return x % m\n\n########################################\n\ndef cphi_gate(circ, k, control, target):\n theta = 2 * pi / (1<<k)\n circ.cp(theta, control, target)\n\ndef phi_gate(circ, k, target):\n theta = 2 * pi / (1<<k)\n circ.p(theta, target)\n\n\n# negation circuit\ndef ones_complement_circuit(a_qubits, m):\n circ = QuantumCircuit(m,ne)\n \n for i in a_qubits:\n circ.x(i)\n \n return circ\n\n \n########################################\n \n# initialization circuit\n\ndef make_initialization_circuit(a_qubits, a_bits, m):\n initialize_circ = QuantumCircuit(m,ne)\n for i,v in enumerate(a_bits):\n if v:\n initialize_circ.x(a_qubits[i])\n \n return initialize_circ\n\n\n########################################\n \n# quantum fourier transform\n \nQFT_cutoff = 5 + int(log2(n))\n\ndef make_QFT_circ(a_qubits,n,m):\n \n QFT_circ = QuantumCircuit(m,ne)\n \n QFT_cutoff = 5 + int(log2(n))\n \n for i in range(n)[::-1]:\n QFT_circ.h(a_qubits[i])\n \n for j in range(i)[::-1]:\n k = i - j + 1\n \n if k > QFT_cutoff:\n break\n \n cphi_gate(QFT_circ, k,a_qubits[j],a_qubits[i])\n \n return QFT_circ\n\n\n\n########################################\n\n# quantum adder\n# in-place a+b, where a is quantum and b is classical\n\ndef make_adder_circ(b_bits, a_qubits, m):\n adder_circ = QuantumCircuit(m,ne)\n \n for i in range(n)[::-1]:\n \n theta = 0\n \n for j in range(i + 1)[::-1]:\n k = i - j + 1\n \n if k > QFT_cutoff:\n break\n \n if b_bits[j]:\n theta += 2 * pi / (1<<k)\n \n if theta != 0:\n adder_circ.p(theta, a_qubits[i])\n \n return adder_circ\n\ndef make_controlled_adder_circ(control_qubit, b_bits, a_qubits, m):\n adder_circ = QuantumCircuit(m,ne)\n \n for i in range(n)[::-1]:\n \n theta = 0\n \n for j in range(i + 1)[::-1]:\n k = i - j + 1\n \n if k > QFT_cutoff:\n break\n \n if b_bits[j]:\n theta += 2 * pi / (1<<k)\n \n if theta != 0:\n adder_circ.cp(theta, control_qubit, a_qubits[i])\n \n return adder_circ\n\n\ndef make_multiply_controlled_adder_circ(control_qubits, b_bits, a_qubits, m):\n adder_circ = QuantumCircuit(m,ne)\n \n for i in range(n)[::-1]:\n \n theta = 0\n \n for j in range(i + 1)[::-1]:\n k = i - j + 1\n \n if k > QFT_cutoff:\n break\n \n if b_bits[j]:\n theta += 2 * pi / (1<<k)\n \n if theta != 0:\n adder_circ.mcp(theta, control_qubits, a_qubits[i])\n \n return adder_circ\n\n\n\n########################################\n\n\n\n\ndef modular_addition(a_qubits,ancillary_qubit,b,N,n,m):\n \n QFT_circ = make_QFT_circ(a_qubits,n,m)\n QFT_circ_inverse = QFT_circ.inverse()\n ones_complement_circ = ones_complement_circuit(a_qubits, m)\n \n # (a' + (N-b))' = a + b - N\n circuit = ones_complement_circ + QFT_circ + make_adder_circ(int_to_bin_LSB(N-b,n), a_qubits, m) + QFT_circ_inverse + ones_complement_circ\n \n # copy high / sign bit to the ancillary qubit\n circuit.cx(a_qubits[-1], ancillary_qubit)\n \n # a + b - N +? N = (a + b) % N\n circuit += QFT_circ + make_controlled_adder_circ(ancillary_qubit, int_to_bin_LSB(N,n), a_qubits, m) + QFT_circ_inverse\n \n # (a + b) % N - b\n circuit += ones_complement_circ + QFT_circ + make_adder_circ(int_to_bin_LSB(b,n), a_qubits, m) + QFT_circ_inverse + ones_complement_circ\n \n # uncompute the ancillary qubit\n circuit.x(a_qubits[-1])\n circuit.cx(a_qubits[-1], ancillary_qubit)\n circuit.x(a_qubits[-1])\n \n # (a + b) % N - b + b = (a + b) % N\n circuit += QFT_circ + make_adder_circ(int_to_bin_LSB(b,n), a_qubits, m) + QFT_circ_inverse\n \n return circuit\n\ndef controlled_modular_addition(control_qubits,a_qubits,ancillary_qubit,b,N,n,m):\n if type(control_qubits) is int:\n control_qubits = [control_qubits]\n \n QFT_circ = make_QFT_circ(a_qubits,n,m)\n QFT_circ_inverse = QFT_circ.inverse()\n ones_complement_circ = ones_complement_circuit(a_qubits, m)\n \n # (a' + (N-b))' = a + b - N\n circuit = ones_complement_circ + QFT_circ + make_multiply_controlled_adder_circ(control_qubits, int_to_bin_LSB(N-b,n), a_qubits, m) + QFT_circ_inverse + ones_complement_circ\n \n # copy high / sign bit to the ancillary qubit\n circuit.cx(a_qubits[-1], ancillary_qubit)\n \n # a + b - N +? N = (a + b) % N\n circuit += QFT_circ + make_multiply_controlled_adder_circ(control_qubits + [ancillary_qubit], int_to_bin_LSB(N,n), a_qubits, m) + QFT_circ_inverse\n \n # (a + b) % N - b\n circuit += ones_complement_circ + QFT_circ + make_multiply_controlled_adder_circ(control_qubits, int_to_bin_LSB(b,n), a_qubits, m) + QFT_circ_inverse + ones_complement_circ\n \n # uncompute the ancillary qubit\n circuit.x( a_qubits[-1])\n circuit.cx(a_qubits[-1], ancillary_qubit)\n circuit.x( a_qubits[-1])\n \n # (a + b) % N - b + b = (a + b) % N\n circuit += QFT_circ + make_multiply_controlled_adder_circ(control_qubits, int_to_bin_LSB(b,n), a_qubits, m) + QFT_circ_inverse\n \n circuit.mcx(control_qubits, ancillary_qubit)\n circuit.x(ancillary_qubit)\n \n \n\n return circuit\n\n\ndef modular_multiplication(x_qubits,y_qubits,ancillary_qubit,b,N,n,m):\n circuit = QuantumCircuit(m,ne)\n \n for i in range(n):\n circuit += controlled_modular_addition(x_qubits[i], y_qubits,ancillary_qubit,(b<<i)%N,N,n,m)\n \n return circuit\n\n \ndef conditional_in_place_modular_multiplication(control_qubit, x_qubits, y_qubits, ancillary_qubit ,b,N,n,m):\n # only support singly-controlled multiplication\n assert type(control_qubit) is int\n \n circuit = QuantumCircuit(m,ne)\n \n for i in range(n):\n circuit += controlled_modular_addition([x_qubits[i],control_qubit], y_qubits, ancillary_qubit, (b<<i)%N, N, n, m)\n \n \n circuit2 = QuantumCircuit(m,ne)\n b2 = mod_inverse(b, N)\n for i in range(n):\n circuit2 += controlled_modular_addition([y_qubits[i],control_qubit], x_qubits, ancillary_qubit, (b2<<i)%N, N, n, m)\n \n circuit += circuit2.inverse()\n \n # conditional swap.\n for i,j in zip(x_qubits, y_qubits):\n circuit.cswap(control_qubit, i, j)\n \n\n return circuit\n\n\n\"\"\"\ntakes |z>|0> -> |z>|a^z mod N>\n\nz stores exponent bits\nx stores result\ny is ancilla\n\"\"\"\ndef modular_exponentiation(z_qubits, x_qubits, y_qubits, ancillary_qubit, a, N, n, m):\n circuit = QuantumCircuit(m,ne)\n \n a_tmp = a\n for z_qubit in z_qubits:\n circuit += conditional_in_place_modular_multiplication(z_qubit, x_qubits, y_qubits, ancillary_qubit, a_tmp, N, n, m)\n a_tmp = pow(a_tmp,2,N)\n \n return circuit\n \n\n\n########################################\n\ncircuit = QuantumCircuit(m,ne)\n\n# initialize multiplication register to 1\nx_bits = int_to_bin_LSB(1, n)\ncircuit += make_initialization_circuit(x_qubits, x_bits, m)\n\n\n# hadamard the exponent bits\nfor z_qubit in z_qubits:\n circuit.h(z_qubit)\n\n\n# modular exponentiation\ncircuit += modular_exponentiation(z_qubits, x_qubits, y_qubits, ancillary_qubit, a, N, n, m)\n\n\n# finish with the inverse QFT \n# reverse bits b/c big vs little endian\ncircuit += make_QFT_circ(z_qubits[::-1],ne,m).inverse()\n\n# and finally measure the output\n# reverse bits b/c big vs little endian\ncircuit.measure(z_qubits[::-1], list(range(ne)))\n\n\n########################################\n\nprint(\"[+] generated circuit\")\nwith open(\"circuit.qasm\",\"w\") as f: f.write(circuit.qasm())\nprint(\"[+] saved circuit\\n\")\n\n\n# can load circuit from str.\n#qc = QuantumCircuit.from_qasm_str(qasm_str)\n\n\n########################################\n\n\n# simulate circuit\n\n# Select the QasmSimulator from the Aer provider\nsimulator = Aer.get_backend('qasm_simulator')\n\n# Execute and get counts\nresult = execute(circuit, simulator, shots=1024*8).result()\ncounts = result.get_counts(circuit)\n\n\n########################################\n\n# analyze results\n\n\n# find order of a classically\n\npowers_of_a = [1]\na_tmp = 1\nfor i in range(N):\n a_tmp = (a_tmp * a) % N\n powers_of_a.append(a_tmp)\n \norder = 1 + powers_of_a[1:].index(1)\n\n\n\nprint(\"[+] order = {}\".format(order))\n\nt = pow(a, order//2, N)\nif (order % 2 == 0) and t != N-1:\n \n print(\"[+] recovered factors of N = {}\".format(N))\n print(\"[+] {}\\t{}\".format(\n gcd(N, t - 1),\n gcd(N, t + 1) ))\n\n\n########################################\n\n\n# process the quantum results\n\nq = 1 << ne\nmeasurement_probabilities = np.zeros(q)\nfor i,j in counts.items():\n i = int(i,2)\n measurement_probabilities[i] = j\n \nmeasurement_probabilities /= np.sum(measurement_probabilities)\n\nplt.plot(measurement_probabilities)\nplt.xlabel(\"QFT^-1 {exponent}\")\nplt.ylabel(\"probability\")\nplt.title(f\"Shor's Algorithm Measurement Probabilities\\nN={N}, a={a}, ne={ne}\")\nplt.savefig(f\"output_distribution_N={N},a={a},ne={ne}.png\", dpi=300)\nplt.show()\n\n\nprint(\"\")\nfor i in range(10):\n s = np.random.choice(np.arange(q), p=measurement_probabilities)\n f = Fraction(s/q).limit_denominator(N)\n recovered_order = f.denominator\n print(\"[+] s = {}\\tf = {: <10}order = {}\".format(s, str(f), recovered_order))\n\n\n", "id": "5893068", "language": "Python", "matching_score": 2.6186444759368896, "max_stars_count": 3, "path": "shor clean/generate_shors_clean.py" }, { "content": "import re\nimport progressbar \nfrom math import gcd, log2\n\ndef egcd(a, b):\n if a == 0:\n return (b, 0, 1)\n else:\n g, y, x = egcd(b % a, a)\n return (g, x - (b // a) * y, y)\n\ndef mod_inverse(a, m):\n g, x, y = egcd(a, m)\n if g != 1:\n raise Exception('modular inverse does not exist')\n else:\n return x % m\n\n\"\"\"\nCurraco adder\nhttps://arxiv.org/pdf/1905.09749.pdf\n\"\"\"\n\ndef swap_gate(a,b):\n return f\"swap q[{a}],q[{b}];\\n\"\n\ndef not_gate(a):\n return f\"x q[{a}];\\n\"\n\ndef cnot_gate(a,b):\n return f\"cx q[{a}],q[{b}];\\n\"\n\ndef ccnot_gate(a,b,c):\n return f\"ccx q[{a}],q[{b}],q[{c}];\\n\"\n\n\n\n\n# extract number within brackets\npattern = re.compile(r\"\\[(\\d+)\\]\")\n\ndef eval_swap(i,j):\n q_mem[i],q_mem[j] = q_mem[j],q_mem[i]\n \ndef eval_ccx(i,j,k):\n if q_mem[i] and q_mem[j]:\n q_mem[k] = 1 - q_mem[k]\n \ndef eval_cx(i,j):\n if q_mem[i]:\n q_mem[j] = 1 - q_mem[j]\n \ndef eval_x(i):\n q_mem[i] = 1 - q_mem[i]\n\ndef eval_line(s):\n args = pattern.findall(s)\n args = list(map(int, args))\n \n if s.startswith(\"x \"):\n eval_x(*args)\n elif s.startswith(\"cx \"):\n eval_cx(*args)\n elif s.startswith(\"ccx \"):\n eval_ccx(*args)\n elif s.startswith(\"swap \"):\n eval_swap(*args)\n \n\n\"\"\"\nassume all the gates are self-inverse for now,\nso we only have to reverse the order of the gates\n\"\"\"\ndef invert_circuit(circuit):\n s = circuit.split(\"\\n\")\n s = s[:-1]\n s = s[::-1]\n s = '\\n'.join(s)\n s += '\\n'\n return s\n \n#####################################\n\n# we represent negative numbers via the ones-complement\n\n# big-endian encoding (MSB first)\n# ones complement\ndef int_to_bin(x,n):\n sign = (x < 0)\n x = abs(x)\n x = bin(x)[2:]\n x = x.zfill(n)[:n]\n if sign:\n x = x.replace(\"1\",\"a\").replace(\"0\",\"1\").replace(\"a\",\"0\")\n return x\n\ndef ones_complement(x,n):\n return (1<<n) + ~x\n\n\n# MSB first\ndef bin_to_int(L):\n x = ''.join([str(i) for i in L])\n x = eval('0b' + x)\n return x\n\n#####################################\n\n# The in-place majority gate MAJ\ndef MAJ(a,b,c):\n s = \"\"\n s += cnot_gate(c,b)\n s += cnot_gate(c,a)\n s += ccnot_gate(a,b,c)\n \n return s\n\n# UnMajority and Add\ndef UMA(a,b,c):\n s = \"\"\n s += ccnot_gate(a,b,c)\n s += cnot_gate(c,a)\n s += cnot_gate(a,b)\n \n return s\n\n# addition circuit\n\"\"\"\nin-place addition of a + b\nresult is stored in `b`.\n`a` is left unchanged.\na,b qubits must be specified in little endian order (LSB first)\n\n`ancilla_qubit` must be initialized to 0; is reset to 0 at end of circuit\n\nif given, then the high bit is stored in `high_qubit`\n\"\"\"\ndef addition_circuit(n, a_qubits, b_qubits, ancilla_qubit, high_qubit=None):\n assert len(a_qubits) == n\n assert len(b_qubits) == n\n \n qubit_order = [ancilla_qubit]\n for i,j in zip(b_qubits, a_qubits):\n qubit_order += [i,j]\n \n s = \"\"\n for i in range(n):\n idxs = qubit_order[2*i:3+2*i]\n s += MAJ(*idxs)\n \n if high_qubit is not None:\n s += cnot_gate(a_qubits[-1], high_qubit)\n \n for i in range(n)[::-1]:\n idxs = qubit_order[2*i:3+2*i]\n s += UMA(*idxs)\n \n return s\n\ndef ones_complement_circuit(a_qubits):\n circuit = \"\"\n for i in a_qubits:\n circuit += not_gate(i)\n return circuit\n \n# (a - b) = (a' + b)'\ndef subtraction_circuit(n, a_qubits, b_qubits, ancilla_qubit, high_qubit=None):\n \n circuit = \"\"\n circuit += ones_complement_circuit(a_qubits)\n \n circuit += addition_circuit(n, a_qubits, b_qubits, ancilla_qubit, high_qubit)\n \n circuit += ones_complement_circuit(a_qubits)\n circuit += ones_complement_circuit(b_qubits)\n \n return circuit\n\n# (-a + b) = (a + b')'\ndef subtraction_circuit_2(n, a_qubits, b_qubits, ancilla_qubit, high_qubit=None):\n \n circuit = \"\"\n circuit += ones_complement_circuit(b_qubits)\n \n circuit += addition_circuit(n, a_qubits, b_qubits, ancilla_qubit, high_qubit)\n \n circuit += ones_complement_circuit(b_qubits)\n \n return circuit\n\n\n\n\n\n\"\"\"\ncomputes in-place modular addition:\n|a>|b>|N> -> |a>|(a+b)%N>|N>\n\nuses several ancilla qubits, which are all uncomputed to zero by the circuit.\n\"\"\"\ndef modular_addition_circuit_(n, a_qubits, b_qubits, N_qubits, tmp_qubits, ancilla_qubit, high_qubit):\n \n # |a>|b>|N>|0>|0>\n \n circuit = addition_circuit(n, a_qubits, b_qubits, ancilla_qubit, None)\n # |a>|a+b>|N>|0>|0>\n \n circuit += subtraction_circuit_2(n, N_qubits, b_qubits, ancilla_qubit, high_qubit)\n # |a>|a+b-N>|N>|high qubit>|0>\n \n for i,j in zip(N_qubits, tmp_qubits):\n circuit += ccnot_gate(high_qubit, i, j)\n # |a>|a+b-N>|N>|high qubit>|0 or N>\n \n circuit += addition_circuit(n, tmp_qubits, b_qubits, ancilla_qubit, None)\n # |a>|(a+b)%N>|N>|high qubit>|0 or N>\n \n # uncompute tmp\n for i,j in zip(N_qubits, tmp_qubits):\n circuit += ccnot_gate(high_qubit, i, j)\n # |a>|(a+b)%N>|N>|high qubit>|0>\n \n # uncompute high qubit\n circuit += subtraction_circuit_2(n, a_qubits, b_qubits, ancilla_qubit, high_qubit)\n circuit += addition_circuit(n, a_qubits, b_qubits, ancilla_qubit, None)\n \n circuit += not_gate(high_qubit)\n \n return circuit\n\ndef memoize(func):\n cache = dict()\n \n def memoized_func(*args):\n x = []\n for i in args:\n if type(i) is list:\n i = tuple(i)\n x.append(i)\n x = tuple(x)\n args = x\n \n if args in cache:\n return cache[args]\n result = func(*args)\n cache[args] = result\n return result\n\n return memoized_func\n\nmodular_addition_circuit = memoize(modular_addition_circuit_)\n\n################################\n\n\n# 256-bit parameters\n# a = 3\n# N = 177513599716362998539142178307007771385442861398395017247577542547366103\n\n# 64-bit parameters\nN = 183265954861256291\na = 74583193119493\n\n\n# allocate registers\nn = 64\nnum_x_bits = 2*n\n\nq_mem = [0 for _ in range(7*n + 2)]\n\nx_qubits = [i for i in range(0, 2*n)]\nb_qubits = [i for i in range(2*n, 3*n)]\nN_qubits = [i for i in range(3*n, 4*n)]\n\ntmp_qubits = [i for i in range(4*n, 5*n)]\nc_qubits = [i for i in range(5*n, 6*n)]\ntmp2_qubits = [i for i in range(6*n, 7*n)]\n\nancilla_qubit = 7*n\nhigh_qubit = 7*n + 1\n\n\"\"\"\nif `conditional_qubit`, then set q_mem[dst] = src\n\nNote: `src` contains the classical VALUES, not the index of any quantum registers.\n\"\"\"\ndef conditional_classical_write(conditional_qubit, src_bits, dst_registers):\n s = \"\"\n for i,j in zip(src_bits, dst_registers):\n if i:\n s += cnot_gate(conditional_qubit, j)\n \n return s\n\ndef conditional_conditional_classical_write(conditional_qubit1, conditional_qubit2, src_bits, dst_registers):\n s = \"\"\n for i,j in zip(src_bits, dst_registers):\n if i:\n s += ccnot_gate(conditional_qubit1, conditional_qubit2, j)\n \n return s\n\n\n\"\"\"\nf_{a,N}(x) = (a*x)%N\nfor classical parameters a,N\n\na*x \n=\na * x_0 + \n(2*a) * x_1 +\n(2^2*a) * x_2 +\n...\n\"\"\"\ndef conditional_multiplication(conditional_qubit, const_factors, b_qubits, a_qubits, c_qubits):\n circuit = \"\"\n \n for src_bits, b_bit in zip(const_factors, b_qubits):\n \n circuit += conditional_conditional_classical_write(conditional_qubit, b_bit, src_bits, a_qubits)\n \n circuit += modular_addition_circuit(n, a_qubits, c_qubits, N_qubits, tmp_qubits, ancilla_qubit, high_qubit)\n \n circuit += conditional_conditional_classical_write(conditional_qubit, b_bit, src_bits, a_qubits)\n \n \n # if the control is 0, then copy the argument into the dst register.\n circuit += not_gate(conditional_qubit)\n for i,j in zip(b_qubits, c_qubits):\n circuit += ccnot_gate(conditional_qubit, i, j)\n circuit += not_gate(conditional_qubit)\n \n return circuit\n\n\n\n# multiplication\n\ndef multiplication(const_factors, b_qubits, a_qubits, c_qubits):\n circuit = \"\"\n for src_bits, b_bit in zip(const_factors, b_qubits):\n \n circuit += conditional_classical_write(b_bit, src_bits, a_qubits)\n \n circuit += modular_addition_circuit(n, a_qubits, c_qubits, N_qubits, tmp_qubits, ancilla_qubit, high_qubit)\n \n circuit += conditional_classical_write(b_bit, src_bits, a_qubits)\n\n return circuit\n\n\n# classical constant factors, in little endian (LSB first)\ndef get_const_factors(a,N,n):\n const_factors = [(a<<i)%N for i in range(n)]\n const_factors = [int_to_bin(t,n)[::-1] for t in const_factors]\n const_factors = [[int(i) for i in t] for t in const_factors]\n\n return const_factors\n\n\ndef in_place_conditional_multiplication(a, N, num_x_bits, conditional_qubit, b_qubits, tmp2_qubits, c_qubits):\n circuit = \"\"\n circuit += conditional_multiplication(conditional_qubit, get_const_factors(a,N,n), b_qubits, tmp2_qubits, c_qubits)\n \n for i,j in zip(b_qubits, c_qubits):\n circuit += swap_gate(i,j)\n \n circuit += invert_circuit(conditional_multiplication(conditional_qubit, get_const_factors(mod_inverse(a, N),N,n), b_qubits, tmp2_qubits, c_qubits))\n \n return circuit\n\n\n\ndef in_place_modular_exponentiation(\n a,N,num_x_bits,\n x_qubits, b_qubits, tmp2_qubits, c_qubits):\n circuit = \"\"\n \n a_val = a\n for x_qubit in x_qubits:\n circuit += in_place_conditional_multiplication(a_val,N,num_x_bits,x_qubit, b_qubits, tmp2_qubits, c_qubits)\n a_val = (a_val**2)%N\n \n return circuit\n\n\n################################\n\n\n\n# create circuit\nbase_directory = \"circuit_parts\"\n\n\ncircuit = \"\"\"OPENQASM 2.0;\ninclude \"qelib1.inc\";\nqreg q[{}];\ncreg c[{}];\n\n\"\"\".format(len(q_mem), len(x_qubits))\n\nfor i,v in enumerate(N_qubits):\n circuit += \"cx c[{}],q[{}];\\n\".format(i,v)\n\n\n# hadamard the exponent bits\nfor x_qubit in x_qubits:\n circuit += f\"h q[{x_qubit}];\\n\"\n\n# initialize `b` to 1\ncircuit += not_gate(b_qubits[0])\n\n\nwith open(f\"{base_directory}/circuit_initialization.qasm\",\"w\") as f:\n f.write(circuit)\n\n\n\n\n\"\"\"\nInstead of writing the entire circuit to a file, we write each modular multiplication to its own circuit_part file.\nThis means that teams don't have to analyze the entire large circuit.\n\"\"\"\n\n# circuit += in_place_modular_exponentiation( a,N,num_x_bits, x_qubits, b_qubits, tmp2_qubits, c_qubits)\n\n\na_val = a\nfor i,x_qubit in enumerate(progressbar.progressbar(x_qubits)):\n circuit = in_place_conditional_multiplication(a_val,N,num_x_bits,x_qubit, b_qubits, tmp2_qubits, c_qubits)\n a_val = (a_val**2)%N\n \n with open(f\"{base_directory}/circuit_part_{i}.qasm\",\"w\") as f:\n f.write(circuit)\n \n\n\n", "id": "11647745", "language": "Python", "matching_score": 2.366544008255005, "max_stars_count": 3, "path": "Quantum Rev 1/generate/generate_QR_1.py" }, { "content": "import re\nimport math\n\n\n# find nth occurence of needle in a haystack\n\ndef find_nth(haystack, needle, n):\n start = haystack.find(needle)\n while start >= 0 and n > 1:\n start = haystack.find(needle, start+len(needle))\n n -= 1\n return start\n\n\n####################################################\n\n\nwith open(\"../challenge/circuit_parts/circuit_part_0.qasm\", \"r\") as f:\n prog = f.read()\n \n\n####################################################\n\n# extract the 1st addition subroutine\n\ns = \"cx q[384],q[320];\\n\"\ni = prog.index(s)\naddition_circuit = prog[i:]\n\ns = \"cx q[448],q[320];\\n\"\nj = addition_circuit.index(s) + len(s)\naddition_circuit = addition_circuit[:j]\n\nlen_addition_circuit = addition_circuit.count(\"\\n\")\n\n\"\"\"\nnote that all the additions and registers and whatnot are 64-bit integers.\nand 384 / 64 = 6, 320 / 64 = 5,\nso this is adding-in-place registers 5 and 6\n\"\"\"\n\nprog = prog.replace(addition_circuit, \"add #5, #6\\n\")\n\n####################################################\n\n# coalesce all the repeated x-gates\n\ns = \"x q[320];\\n\"\ni = prog.index(s)\nnegation_circuit = prog[i:]\n\ns = \"x q[383];\\n\"\nj = negation_circuit.index(s) + len(s)\nnegation_circuit = negation_circuit[:j]\n\nprog = prog.replace(negation_circuit, \"negate #5\\n\")\n\n####################################################\n\n\"\"\"\nonce you've reversed the first addition subroutine,\nthen identifying the rest of them should be pretty straightforward.\n\"\"\"\n\n# extract the 2nd addition subroutine\n\ns = \"cx q[192],q[320];\\n\"\ni = prog.index(s)\naddition_circuit = prog[i:]\n\nj = find_nth(addition_circuit, \"\\n\", len_addition_circuit + 1) + 1\naddition_circuit = addition_circuit[:j]\n\n#this is adding-in-place registers 5 and 3\nprog = prog.replace(addition_circuit, \"add #5, #3\\n\")\n\n####################################################\n\n# coalesce the ccx\n\ns = \"ccx q[449],q[192],q[64];\\n\"\ni = prog.index(s)\nnegation_circuit = prog[i:]\n\ns = \"ccx q[449],q[255],q[319];\\n\"\nj = negation_circuit.index(s) + len(s)\nnegation_circuit = negation_circuit[:j]\n\nprog = prog.replace(negation_circuit, \"ccx q[449], #3, #4\\n\")\n\n####################################################\n\n# extract the 3rd addition subroutine\n\ns = \"cx q[64],q[320];\\n\"\ni = prog.index(s)\naddition_circuit = prog[i:]\n\nj = find_nth(addition_circuit, \"\\n\", len_addition_circuit + 1) + 1\naddition_circuit = addition_circuit[:j]\n\nprog = prog.replace(addition_circuit, \"add #4, #5\\n\")\n\n####################################################\n\n# extract the 4th addition subroutine\n\ns = \"cx q[384],q[320];\\n\"\ni = prog.index(s)\naddition_circuit = prog[i:]\n\nj = find_nth(addition_circuit, \"\\n\", len_addition_circuit + 1) + 1\naddition_circuit = addition_circuit[:j]\n\nprog = prog.replace(addition_circuit, \"add #6, #5\\n\")\n\n\n####################################################\n\n\"\"\"\ncoalesce ccx writes\n\"\"\"\n\nN = 64\n# register #6 starts with this qubit\nidx_start = 6 * N\n\n\ndef bin_to_int(b):\n res = 0\n for i in b:\n res |= 1<<i\n return res\n\n\"\"\"\nmatch repeated ccx lines where the first two controls are the same\n\nccx q[0],q[128],q[384];\nccx q[0],q[128],q[386];\n...\n\"\"\"\npattern = re.compile(r\"ccx q\\[(\\d+)\\],q\\[(\\d+)],q\\[\\d+\\];\\n(ccx q\\[\\1\\],q\\[\\2],q\\[\\d+\\];\\n)+\")\n\npattern2 = re.compile(r\"ccx q\\[(\\d+)\\],q\\[(\\d+)],q\\[(\\d+)\\];\")\n\nindices = [(m.start(0), m.end(0)) for m in re.finditer(pattern, prog)]\n\n\nnew_prog = prog\n\nfor segment in indices:\n s = prog[segment[0]:segment[1]]\n \n qubits = pattern2.findall(s)\n qubits = [int(i[2]) for i in qubits]\n \n idx_min = min(qubits)\n idx_max = max(qubits)\n register = math.floor(idx_min / N)\n \n if idx_max >= register * N + N:\n continue\n \n qubits = [i - register*N for i in qubits]\n \n number = bin_to_int(qubits)\n \n i = find_nth(s, \",q\", 2)\n cmd = s[:i]\n cmd += f\", `{number}` -> #{register}\\n\"\n \n new_prog = new_prog.replace(s, cmd)\n \nprog = new_prog\n\n\n####################################################\n\n\nprint(prog[:prog.index(\"swap\")])\n\n", "id": "6920128", "language": "Python", "matching_score": 0.6267305612564087, "max_stars_count": 3, "path": "Quantum Rev 1/solve/solve_static.py" }, { "content": "import re\nfrom math import pi, log2\nimport numpy as np\n\n# grep mcphase ../challenge/circuit_parts/circuit_z_0.qasm -A 0 > output_z_0.txt\n\nwith open(\"output_z_0.txt\", \"r\") as f:\n data = f.read(10000000)\n \ndata = data.split(\"\\n--\\n\")\n\n\nn = 64\n\npattern = re.compile(r\"\\[(\\d+)\\]\")\n\nprint(\"\\n########################################\\n\")\n\n\nlines = data[0].split(\"\\n\")\n\nphases = np.zeros(n)\n\nfor line in lines:\n args = pattern.findall(line)\n i = int(args[-1]) - 3 * n\n \n phase = line[line.find(\"(\")+1:line.find(\")\")]\n phase = eval(phase) # b/c we love security and best practices here\n print(phase / (2 * pi))\n \n phases[i] = phase / (2 * pi)\n \n\nprint(\"\\n########################################\\n\")\n\n\n\"\"\"\nwe need to convert the rotation angles into binary decimals.\nLet's sweep the number of digits and see what the resulting loss of accuracy is.\n\"\"\"\nfor m in range(5, 20):\n scaling = (1 << m)\n scaled_phases = phases * scaling\n error = np.sum(np.abs(np.round(scaled_phases) - scaled_phases))\n print(\"[+] {}\\t{}\".format(m, error))\n \n\nprint(\"\\n########################################\\n\")\n\n\nm = 12\nscaling = (1 << m)\nscaled_phases = [int(i) for i in np.round(phases * scaling)]\n\nbits = []\nfor i,v in enumerate(scaled_phases):\n print(f\"[+] {i}\\t{v:0{m}b}\")\n bits.append(f\"{v:0{m}b}\"[0])\n \nnumber = int(''.join(bits), 2)\n\n\nprint(\"\\n########################################\\n\")\n\n\ndef process(lines, m=12):\n phases = np.zeros(n)\n \n for line in lines:\n args = pattern.findall(line)\n i = int(args[-1]) - 3 * n\n \n phase = line[line.find(\"(\")+1:line.find(\")\")]\n phase = eval(phase) # b/c we love security and best practices here\n \n phases[i] = phase / (2 * pi)\n \n scaling = (1 << m)\n scaled_phases = [int(i) for i in np.round(phases * scaling)]\n \n bits = []\n for i,v in enumerate(scaled_phases):\n bits.append(f\"{v:0{m}b}\"[0])\n \n number = int(''.join(bits[::-1]), 2)\n return number\n\n\nfor d in data[:20]:\n lines = d.split(\"\\n\")\n number = process(lines)\n print(number)\n\n\nN = process(data[1].split(\"\\n\"))\na = process(data[2].split(\"\\n\"))\n\nprint(f\"\\n\\nrecovered N = {N}\")\n", "id": "2551035", "language": "Python", "matching_score": 0.25897592306137085, "max_stars_count": 3, "path": "Quantum Rev 2/solve/solve.py" } ]
2.366544
mvgrigoriev
[ { "content": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Dec 29 07:34:46 2017\n\n@author: MVGrigoriev\n\n@task: Data preprocessing in Pandas\n\n\"\"\"\n\nimport pandas\nimport os\ndata = pandas.read_csv('titanic.csv', index_col='PassengerId') # Data import\n\n# How many men and women were traveling by ship?\n#\nsexCounts = data['Sex'].value_counts()\nwith open('1_1.txt', 'w') as f1:\n print(sexCounts[0], sexCounts[1], file=f1, end='')\n\n# What part of the passengers managed to survive?\n#\nsurvivePassengers = data['Survived'].value_counts()\nwith open('1_2.txt', 'w') as f2:\n print(round(100. * survivePassengers[1] / len(data), 2), file=f2, end='') # Round values\n\n# What percentage of the first class passengers were among all passengers?\n#\nfirstClassPassengers = data['Pclass'].value_counts()\nwith open('1_3.txt', 'w') as f3:\n print(round(100. * firstClassPassengers[1] / len(data), 2), file=f3, end='')\n\n# How old were the passengers?\n#\nwith open('1_4.txt', 'w') as f4:\n print(round(data['Age'].mean(), 2), data['Age'].median(), file=f4, end='') # Average and median\n\n# Do the number of brothers / sisters / spouses correlate with the number of\n# parents / children?\n#\nwith open('1_5.txt', 'w') as f5:\n print(round(data['SibSp'].corr(data['Parch']), 2), file=f5, end='')\n\n# What is the most popular female name on the ship?\n#\nfemales = data.where(data.Sex == 'female')\nfemaleNames = females['Name']\nnameOnly = femaleNames.str.split('\\. ', expand=True)\nallNames = nameOnly[1].value_counts().index.tolist() # Out names to list\nwith open('1_6.txt', 'w') as f6:\n print(allNames[0], file=f6, end='')", "id": "8519873", "language": "Python", "matching_score": 2.9373834133148193, "max_stars_count": 0, "path": "w1/w1/w1.py" }, { "content": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Dec 28 15:17:16 2017\n\n@author: MVGrigoriev\n\n@task: Decision tree\n\"\"\"\n\nimport numpy as np\nimport pandas\nfrom sklearn.tree import DecisionTreeClassifier # Import class from scikit-learn\ndata = pandas.read_csv('titanic.csv', usecols=['Pclass', 'Fare', 'Age', 'Sex', 'Survived']) # Import data\ndata = data.dropna() \ndata['Sex'] = data['Sex'].map({'male': 1, 'female': 0})\nclf = DecisionTreeClassifier(random_state=241) # Create classification Decision tree,\n # DecisionTreeRegressor - for regression\nfeatures = data[['Pclass', 'Fare', 'Age', 'Sex']] # Features\ntarget = data[['Survived']] # Target\nclf.fit(features, target) # Learning\nfeaturesName = ['Pclass', 'Fare', 'Age', 'Sex']\nimportances = clf.feature_importances_\ntopFeaturesIndeces = sorted(range(len(importances)), key=lambda i: importances[i])[-2:] # Get indexes of TOP2 values\nwith open('1_7.txt', 'w') as f:\n print(featuresName[topFeaturesIndeces[0]], featuresName[topFeaturesIndeces[1]], file=f, end='')\n\n", "id": "3515868", "language": "Python", "matching_score": 0.62418532371521, "max_stars_count": 0, "path": "w1/w1/w2.py" }, { "content": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sun Jan 07 07:52:52 2018\n\n@author: MVGrigoriev\n\n@task: kNN method\n\"\"\"\n\nimport pandas\nimport numpy as np\nfrom sklearn.neighbors import KNeighborsClassifier # Import class from scikit-learn\nfrom sklearn.model_selection import KFold # Import KFold function \nfrom sklearn.model_selection import cross_val_score # Import metrics for cross validation\nfrom sklearn.preprocessing import scale # Import Scale function \ndata = pandas.read_csv('wine.data', header=None) # Import data\ntarget = data[0] # Extract target \nfeatures = data.drop(0, axis=1) # Extract features \nkf = KFold(n_splits=5, shuffle=True, random_state=42)\n\n# At what k is the maximum quality obtained without normalization of characteristics? \n# \n# What is the maximum quality without the normalization of characteristics (the number in the scale from 0 to 1)? \n# \nlistOfAccuracy = []\nfor i in range(1, 51):\n neigh = KNeighborsClassifier(n_neighbors=i)\n neigh.fit(features, target)\n cvs = cross_val_score(neigh, features, target, cv=kf, scoring='accuracy')\n cvsValue = np.mean(cvs)\n listOfAccuracy.append(cvsValue)\noptValue = max(listOfAccuracy)\noptIndex = listOfAccuracy.index(optValue)\nwith open('2_1.txt', 'w') as f1:\n print(optIndex+1, file=f1, end='')\nwith open('2_2.txt', 'w') as f2:\n print(round(optValue, 2), file=f2, end='')\n\n# Which optimal K is obtained after the normalization of the characteristics? \n# \n# What is the maximum quality after the normalization of characteristics (a number in the range from 0 to 1)? \n# \nfeatures = scale(features)\nlistOfAccuracy = []\nfor i in range(1, 51):\n neigh = KNeighborsClassifier(n_neighbors=i)\n neigh.fit(features, target)\n cvs = cross_val_score(neigh, features, target, cv=kf, scoring='accuracy')\n cvsValue = np.mean(cvs)\n listOfAccuracy.append(cvsValue)\noptValue = max(listOfAccuracy)\noptIndex = listOfAccuracy.index(optValue)\nwith open('2_3.txt', 'w') as f3:\n print(optIndex+1, file=f3, end='')\nwith open('2_4.txt', 'w') as f4:\n print(round(optValue, 2), file=f4, end='')\n", "id": "12863391", "language": "Python", "matching_score": 5.992068290710449, "max_stars_count": 0, "path": "w2/w2/t1.py" }, { "content": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sun Jan 07 ‏‎11:57:24 2018\n\n@author: MVGrigoriev\n\n@task: Metric select\n\"\"\"\n\n# At what p the quality on cross-validation turned out to be optimal? \n#\nimport numpy as np\nfrom sklearn.neighbors import KNeighborsRegressor # Import class from scikit-learn\nfrom sklearn.datasets import load_boston # Import boston house-prices dataset\nfrom sklearn.model_selection import cross_val_score # Import metrics for cross validation\nfrom sklearn.preprocessing import scale # Import Scale function\nfrom sklearn.model_selection import KFold # Import KFold function \nboston = load_boston()\nscaledData = scale(boston.data)\nkf = KFold(n_splits=5, shuffle=True, random_state=42)\npRange = np.linspace(1, 10, num=200)\nlistOfAccuracy = []\nfor i in pRange:\n neigh = KNeighborsRegressor(n_neighbors=5, weights='distance', metric='minkowski', p=i)\n #neigh.fit(scaledData, boston.target)\n cvs = cross_val_score(neigh, scaledData, boston.target, cv=kf, scoring='neg_mean_squared_error')\n cvsValue = np.max(cvs)\n listOfAccuracy.append(cvsValue)\noptValue = max(listOfAccuracy)\noptIndex = listOfAccuracy.index(optValue)\nwith open('2_5.txt', 'w') as f:\n print(round(pRange[optIndex], 2), file=f, end='')", "id": "7427264", "language": "Python", "matching_score": 1.1717617511749268, "max_stars_count": 0, "path": "w2/w2/t2.py" }, { "content": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Jan 19 ‏‎19:11:07 2018\n\n@author: MVGrigoriev\n\n@task: Support objects\n\"\"\"\n\n# Find the numbers of objects that are supports. \n#\nimport pandas\nimport numpy as np\nfrom sklearn.svm import SVC\nfrom sklearn.metrics import accuracy_score\nfrom sklearn.preprocessing import StandardScaler\n\ntrainData = pandas.read_csv('svm-data.csv', header=None) \ntargetTrainData = trainData[0]\nfeaturesTrainData = trainData.drop(0, axis=1)\n\nclf = SVC(C=100000, random_state=241, kernel='linear')\nclf.fit(featuresTrainData, targetTrainData)\n\nlistOfIndexes = []\nfor i in clf.support_:\n listOfIndexes.append(str(i+1))\n\nwith open('3_1.txt', 'w') as f:\n print(','.join(listOfIndexes), file=f, end='')\n", "id": "11657013", "language": "Python", "matching_score": 4.318338871002197, "max_stars_count": 0, "path": "w3/w3/t1.py" }, { "content": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Jan 08 16:57:43 2018\n\n@author: MVGrigoriev\n\n@task: Features normalizaion\n\"\"\"\n\n# Find the difference between the quality on the test sample after the normalization and the quality before it. \n#\nimport pandas\nfrom sklearn.linear_model import Perceptron\nfrom sklearn.metrics import accuracy_score\nfrom sklearn.preprocessing import StandardScaler\n\ntrainData = pandas.read_csv('perceptron-train.csv', header=None) \ntargetTrainData = trainData[0]\nfeaturesTrainData = trainData.drop(0, axis=1)\n\ntestData = pandas.read_csv('perceptron-test.csv', header=None)\ntargetTestData = testData[0]\nfeaturesTestData = testData.drop(0, axis=1)\n\nclf = Perceptron(random_state=241)\nclf.fit(featuresTrainData, targetTrainData)\n\npredictions = clf.predict(featuresTestData)\naccuracyClassificationScoreBeforeNormalization = accuracy_score(targetTestData, predictions)\n\nscaler = StandardScaler()\nfeaturesTrainDataScaled = scaler.fit_transform(featuresTrainData)\nfeaturesTestDataScaled = scaler.transform(featuresTestData)\n\nclf = Perceptron(random_state=241)\nclf.fit(featuresTrainDataScaled, targetTrainData)\n\npredictions = clf.predict(featuresTestDataScaled)\naccuracyClassificationScoreAfterNormalization = accuracy_score(targetTestData, predictions)\n\nwith open('2_6.txt', 'w') as f:\n print(accuracyClassificationScoreAfterNormalization-accuracyClassificationScoreBeforeNormalization, file=f, end='')\n", "id": "5247618", "language": "Python", "matching_score": 4.099294662475586, "max_stars_count": 0, "path": "w2/w2/t3.py" } ]
3.518339
john-b-shaw
[ { "content": "class LeafShape:\n def __init__(self, comment, angle):\n self.comment = comment\n self.polyedge = []\n self.width = []\n self.nsharp = []\n self.basewidth = []\n self.baselength = []\n self.aspectr = []\n self.q = []\n self.angle = angle\n\nclass Topology:\n def __init__(self, comment, lattice):\n self.comment = comment\n self.lattice = lattice\n self.height = []\n self.Vertex = []\n self.Neigh = []\n self.Bond = []\n\nclass LeafEnergy:\n def __init__(self,comment):\n self.comment = comment\n self.vectorConductivity = []\n self.Concentrations = []\n self.Currents = []\n self.CurrentMatrix = []\n self.alpha = []\n self.Energy = []\n self.gamma = []\n self.Pressures = []\n", "id": "9383807", "language": "Python", "matching_score": 1.594518780708313, "max_stars_count": 0, "path": "LEAFclass.py" }, { "content": "import math\n\nfrom numpy import *\n\nimport LEAFclass as LFCLSS\n\n\"\"\"\n geometry.py\n\n Contains some functions related to the geometry of\n the system, for example lattice bounding polygons etc...\n\"\"\"\n\ndef xscale(LEAF, f):\n \"\"\"\n Scales the vertices of LEAF by the factor f in \n x direction.\n \"\"\"\n LEAF.Vertex[:,0] *= f\n\ndef leafshape(angle):\n \"\"\"\n Generates a LeafShape with the given angle\n such that aheight = awidth*sin(angle)\n \"\"\"\n theta = arange(0.,7.) * pi/3. + pi\n LSHP = LFCLSS.LeafShape('Leaf Shape', angle)\n LSHP.polyedge = array([cos(theta)*1.21+1.2, sin(theta)*1.21])\n beta = math.atan(2*sin(angle))\n\n c = sqrt((LSHP.polyedge[0,0] - LSHP.polyedge[0,1])**2 + \\\n (LSHP.polyedge[1,0] - LSHP.polyedge[1,1])**2)\n h = LSHP.polyedge[1,1] - LSHP.polyedge[1,0]\n\n dx = LSHP.polyedge[0,1] - (LSHP.polyedge[0,0] + sqrt((h/sin(beta))**2 - \\\n (LSHP.polyedge[1,0] - LSHP.polyedge[1,1])**2))\n \n LSHP.polyedge[0, 1] -= dx\n LSHP.polyedge[0, 2] += dx\n LSHP.polyedge[0, 4] += dx\n LSHP.polyedge[0, 5] -= dx\n \n return LSHP\n\ndef poly_bounds(polyedge):\n \"\"\"\n returns width, height of a bounding rectangle around\n the polygon defined by polyedge\n \"\"\"\n \n width = max(polyedge[0]) - min(polyedge[0]) + 0.1\n height = max(polyedge[1]) - min(polyedge[1]) + 0.1\n\n return width, height\n\ndef bounding_box(polyedge):\n \"\"\"\n Calculates and returns the minimum bounding\n rectangle of the given polygon.\n The bounding box is oriented along the coordinate axes.\n \"\"\"\n\n top_left = array([min(polyedge[0]), max(polyedge[1])])\n top_right = array([max(polyedge[0]), max(polyedge[1])])\n bot_left = array([min(polyedge[0]), min(polyedge[1])])\n bot_right = array([max(polyedge[0]), min(polyedge[1])])\n\n width = linalg.norm(top_left - top_right) + 0.1\n height = linalg.norm(bot_left - top_left) + 0.1\n\n return bot_left, top_right, width, height\n\ndef subdivide_bonds(LEAF):\n \"\"\"\n Subdivides each bond in the leaf described by LEAF\n into 2 bonds.\n\n returns a list of the new vertices at the centers\n of old bonds,\n a list of bond pairs making up the old bonds,\n and a list of bond indices pointing to the old bond\n for each new vertex\n \"\"\"\n vnum = len(LEAF.Vertex)\n bnum = len(LEAF.Bond)\n new_bonds = list(LEAF.Bond.copy())\n new_verts = []\n pairs = []\n old_bonds = []\n new_vert_indices = []\n\n for i in xrange(len(LEAF.Bond)):\n # Find vertex in the middle\n vnew = 0.5*(LEAF.Vertex[LEAF.Bond[i,0]] + LEAF.Vertex[LEAF.Bond[i,1]])\n \n # New connectivity\n new_verts.append(vnew)\n\n old_target = LEAF.Bond[i,1]\n new_bonds[i][1] = vnum\n new_bonds.append(array([vnum, old_target]))\n pairs.append([i, bnum])\n old_bonds.append(i)\n new_vert_indices.append(vnum)\n\n vnum += 1\n bnum += 1\n\n LEAF.Bond = array(new_bonds)\n LEAF.Vertex = vstack((LEAF.Vertex, array(new_verts)))\n\n return array(new_vert_indices), array(pairs), array(old_bonds)\n", "id": "8915077", "language": "Python", "matching_score": 0.1592407375574112, "max_stars_count": 0, "path": "geometry.py" }, { "content": "import analyze as an\nimport numpy as np\nimport pickle\nimport matplotlib.pyplot as plt\nfrom matplotlib.gridspec import GridSpec\nimport networkx as nx\nimport xylem as xy\nfrom stats import stat\nimport simulate as sim\nimport time\n\nplt.rcParams.update({\n \"text.usetex\": True,\n \"font.family\": \"serif\",\n \"font.serif\": [\"Computer Modern Roman\"],\n \"font.size\": 20\n})\n\n# our T* values\nT = {'St Clair': 1.5E-01,\n 'Mississippi': 5.4E-01,\n 'Wax': 9.5E-01,\n 'Mossy': 5.6E-01,\n 'Kolyma': 5.0E-01,\n 'Colville': 4.7E-01,\n 'Apalachicola': 5.9E+00,\n 'Mackenzie': 3.5E+00,\n 'Orinoco': 1.4E+01,\n 'Yenisei': 2.5E+00,\n 'Lena': 2.9E+01,\n 'Yukon': 7.5E+00,\n 'Betsiboka': 8.1E+01,\n 'Irrawaddy': 5.0E+02,\n 'GBM': 1.2E+02,\n 'Rajang': 2.3E+02,\n 'Niger': 3.2E+02,\n 'Sarawak': 2.5E+03,\n 'R<NAME>': 6.0E+03,\n 'Barnstable': 5.5E+03\n }\n\n# main figures\ndef riverdisplay2x3(folder='jun20rivers', s=[0.01, 0.1, 1, 10, 100, 1000], mode='c'):\n fig = plt.figure(figsize=(12,10))\n\n n = 0\n k = 1\n for i in range(len(s)):\n #tide strength\n if mode == 'c':\n if folder[-2:] == 'sq':\n a = xy.DeltaNetwork.load(folder + \\\n '/%0.2f_0.15_90_sq_%d' % (s[i],n))\n else:\n a = xy.DeltaNetwork.load(folder + \\\n '/%0.2f_0.15_90_tri_%d' % (s[i],n))\n #basin density\n elif mode == 'd':\n if folder[-2:] == 'sq':\n a = xy.DeltaNetwork.load(folder + \\\n '/1.00_%0.2f_90_sq_%d' % (s[i],n))\n else:\n a = xy.DeltaNetwork.load(folder + \\\n '/1.00_%0.2f_90_tri_%d' % (s[i],n))\n fig.add_subplot(int(np.ceil(len(s)/3)), 3, k)\n a.plot(drawspecial=False)\n a.plot(style='loops', drawspecial=False)\n if mode == 'c':\n plt.title(r'$T^*=%0.2f$' % s[i], fontsize=20)\n elif mode == 'd':\n plt.title('d=%0.2f' % s[i])\n plt.axis('off')\n k += 1\n\n plt.tight_layout()\n plt.savefig('lineup2x3.png', transparent=True, dpi=200)\n\ndef riverdisplay(folder='jun20rivers', s=[0.01, 0.1, 1, 10, 100, 1000], mode='c'):\n import gdal\n fig = plt.figure(figsize=(20,10))\n grid = GridSpec(2, 2*len(s))\n\n n = 1\n for i in range(len(s)):\n if folder[-2:] == 'sq':\n a = xy.DeltaNetwork.load(folder + \\\n '/%0.2f_0.15_90_sq_%d' % (s[i],n))\n else:\n a = xy.DeltaNetwork.load(folder + \\\n '/%0.2f_0.15_90_tri_%d' % (s[i],n))\n fig.add_subplot(grid[0, 2*i:2*i+2])\n a.plot(style='loops', thr=1e-4, drawspecial=False, magn=6)\n plt.title(r'$T^*=%0.2f$' % s[i], fontsize=20)\n plt.axis('off')\n\n spots = [grid[1, 1:3], grid[1, 3:5], grid[1, 7:9], grid[1, 9:11]]\n deltas = ['Mississippi', 'Orinoco', 'Rajang', 'Barnstaple']\n\n '''for i in range(len(spots)):\n d = deltas[i]\n dataset = gdal.Open('shp/'+d+'/'+d+'_clipped.tif')\n band = dataset.GetRasterBand(1)\n arr = band.ReadAsArray().T\n scale = dataset.GetGeoTransform()[1]\n xs = arr.shape[0]\n fig.add_subplot(spots[i])\n plt.imshow(arr, cmap='Greys', extent=[0, arr.shape[1]*scale/1000, 0,\n arr.shape[0]*scale/1000],)\n plt.axis('off')\n plt.plot([0.1, 1.1], [0.1,0.1], '-r', lw=4)'''\n\n plt.tight_layout()\n plt.savefig('lineup.png', dpi=200, transparent=True)#plt.show()\n\ndef riverdisplay_vertical(folder='jun20rivers', mode='c'):\n p = 10**np.array([-1.25, -0.25, 0.75, 1.75, 2.75])\n\n fig = plt.figure(figsize=(3,12))\n grid = GridSpec(len(p), 1)\n n = 0\n for i in range(len(p)):\n if folder[-2:] == 'sq':\n a = xy.DeltaNetwork.load(folder + \\\n '/%0.2f_0.15_90_sq_%d' % (p[i],n))\n else:\n a = xy.DeltaNetwork.load(folder + \\\n '/%0.2f_0.15_90_tri_%d' % (p[i],n))\n fig.add_subplot(grid[i, 0])\n a.plot(style='loops', thr=1e-5, drawspecial=False, magn=4, c=[0.35,0,0.5])\n plt.text(-0.8, 0.5, r'$10^{%d}$' % np.around(0.25+np.log10(p[i])), fontsize=18)\n plt.axis('off')\n plt.gca().autoscale()\n plt.tight_layout()\n plt.savefig('lineup_vert.png', transparent=False, dpi=200)\n #plt.savefig('lineup_vert.svg', transparent=True, )\n\ndef wbridges():\n sim.realdataplot('wbridges', 'line')\n #sim.marshes_overlay('wbridges')\n #plt.savefig('final/main/wbridges.svg',transparent=True,)\n plt.savefig('final/main/wbridges.png',transparent=True, dpi=200)\n\n# sup figures\ndef initial_conditions():\n \"\"\" Set alpha in DeltaNetwork to 0.7 before running this function\n \"\"\"\n\n plt.figure(figsize=(8,8))\n ax = plt.gca()\n a = xy.DeltaNetwork.make_river(1, density=90, shape='triangle')\n a.plot(magn=4, ax=ax)\n plt.xlim([-1,1.05]); plt.ylim([-1.05,1])\n plt.xticks([-1,-0.5,0,0.5,1]); plt.yticks([-1,-0.5,0,0.5,1])\n plt.axis('equal')\n ax.spines['right'].set_visible(False)\n ax.spines['top'].set_visible(False)\n plt.tight_layout()\n plt.savefig('final/init.png', dpi=200, transparent=True)\n plt.savefig('final/init.svg', transparent=True)\n\ndef pressure_ensemble():\n import scipy\n from matplotlib import cm\n from matplotlib.colors import LogNorm, Normalize\n a = xy.DeltaNetwork.load('jun20rivers/1.00_0.15_90_tri_23')\n #a.ensembleplot('final/flowsmovies')\n\n plt.rcParams.update({\n \"font.size\": 24\n })\n\n a.ds = a.fluctuation_ensemble()\n\n CM = a.C_matrix_sparse(a.C)\n G = a.G_matrix_sparse(CM)[1:,1:].tocsc()\n p = scipy.sparse.linalg.spsolve(G, a.ds[1:,:])\n v = (np.amin(p), np.amax(p)-np.amin(p))\n #p = np.concatenate((p, p[:,::-1]), axis=1)\n\n t = np.linspace(0, 2*np.pi, p.shape[1])\n tides = np.cos(t)\n t *= p.shape[1]/2/np.pi\n\n fig = plt.figure(figsize=(16,8))\n grid = plt.GridSpec(1, 101, hspace=0.05, wspace=0.1, left=0.02, right=0.98)\n\n ax1 = fig.add_subplot(grid[0,0:50])\n a.plot(style='pressure', cmap='cividis', p=p[:,0], v=v,\n drawspecial=False, ax=ax1, magn=6)\n a.plot(style='pipes', drawspecial=False, ax=ax1)\n plt.axis('off')\n\n ax2 = fig.add_subplot(grid[0,50:])\n a.plot(style='pressure', cmap='cividis', p=p[:,-1], v=v,\n drawspecial=False, ax=ax2, magn=6)\n a.plot(style='pipes', drawspecial=False, ax=ax2)\n plt.axis('off')\n\n plt.colorbar(cm.ScalarMappable(norm=Normalize(vmin=0, vmax=1),\n cmap='cividis'), ax=ax2,\n label='Fraction of max potential')\n\n plt.savefig('final/pressures.png', dpi=100, transparent=True)\n #plt.savefig(dir+'/%05d.png' % i)\n\ndef domainsdraw():\n def f(s, shape, n):\n if shape != 'tri':\n return 'riverdomains/%0.2f_0.15_90_%s_%d' % (s, shape, n)\n elif shape == 'tri':\n return 'jun20rivers/%0.2f_0.15_90_%s_%d' % (s, shape, n)\n\n def load(f):\n return xy.DeltaNetwork.load(f)\n\n shapes = ['square', 'sine', 'strip', 'invtriangle', 'tri']\n names = ['Square', 'Sine', 'Strip', 'Inverted triangle', 'Triangle']\n\n fig = plt.figure(figsize=(12,8))\n gs = GridSpec(2,6)\n n = 0\n eps = 0.05\n\n fig.add_subplot(gs[0,:2])\n load(f(1.00, shapes[0], n)).plot(drawspecial=False)\n plt.title(names[0])\n plt.xlim([-1-eps,1+eps]), plt.ylim([-1-eps,1+eps]), #plt.axis('off')\n plt.xticks(ticks=[-1, -0.5, 0, 0.5, 1])\n plt.yticks(ticks=[-1, -0.5, 0, 0.5, 1])\n\n fig.add_subplot(gs[0,2:4])\n load(f(1.00, shapes[1], n)).plot(drawspecial=False)\n plt.title(names[1])\n plt.xlim([-1-eps,1+eps]), plt.ylim([-1-eps,1+eps]), #plt.axis('off')\n plt.xticks(ticks=[-1, -0.5, 0, 0.5, 1])\n plt.yticks(ticks=[-1, -0.5, 0, 0.5, 1], labels=['','','','',''])\n\n fig.add_subplot(gs[0,4:])\n load(f(1.00, shapes[2], n)).plot(drawspecial=False)\n plt.title(names[2])\n plt.xlim([-1-eps,1+eps]), plt.ylim([-1-eps,1+eps]), #plt.axis('off')\n plt.xticks(ticks=[-1, -0.5, 0, 0.5, 1])\n plt.yticks(ticks = [-1, -0.5, 0, 0.5, 1], labels=['','','','',''])\n\n fig.add_subplot(gs[1,1:3])\n load(f(1.00, shapes[3], n)).plot(drawspecial=False)\n plt.title(names[3])\n plt.xlim([-1-eps,1+eps]), plt.ylim([-1-eps,1+eps]), #plt.axis('off')\n plt.xticks(ticks=[-1, -0.5, 0, 0.5, 1])\n\n fig.add_subplot(gs[1,3:5])\n load(f(1.00, shapes[4], n)).plot(drawspecial=False)\n plt.title(names[4])\n plt.xlim([-1-eps,1+eps]), plt.ylim([-1-eps,1+eps]), #plt.axis('off')\n plt.xticks(ticks=[-1, -0.5, 0, 0.5, 1])\n plt.yticks(ticks = [-1, -0.5, 0, 0.5, 1], labels=['','','','',''])\n\n plt.tight_layout()\n\n plt.savefig('final/domains/domainsdraw.png', transparent=True, dpi=200)\n plt.savefig('final/domains/domainsdraw.svg', transparent=True)\n\ndef domainsanalysis():\n sim.riverdomains('mstdiff')\n\ndef noise():\n plt.figure(figsize=(7,8))\n sim.marshes()\n plt.savefig('final/noise/noise.png', transparent=True, dpi=200)\n #plt.savefig('final/noise/noise.svg', transparent=True)\n\n def fname(s, no, n):\n if no != 0:\n return 'marshes'+'/%0.2f_%0.2f_0.15_90_tri_%d' % (s, no, n)\n elif no == 0:\n return 'jun20rivers/%0.2f_0.15_90_tri_%d' % (s, n)\n s = [0.1, 10, 1000]\n no = [0.0, 1/3, 2/3, 1.0]\n no = no[::-1]\n cmap = plt.get_cmap('plasma')\n plt.figure(figsize=(8,10))\n i = 1\n for noi in range(len(no)):\n for si in range(len(s)):\n plt.subplot(len(no), len(s), i)\n a = xy.DeltaNetwork.load(fname(s[si], no[noi], 2))\n a.plot(drawspecial=False, magn=3)\n a.drawloops(c=cmap(no[noi]), thr=1e-5)\n if si == 0:\n plt.text(-1, -0.5, r'noise $=%0.2f$' % no[noi], rotation=90,\n fontsize=16)\n #plt.ylabel(r'noise $=%0.2f$' % no[noi])\n if noi == 0:\n plt.title(r'$T^*=%0.2f$' % s[si])\n\n plt.axis('off')\n i += 1\n plt.subplots_adjust(bottom=0.02, top=0.95, left=0.1, right=0.95,\n hspace=0.02, wspace=0.02)\n plt.savefig('final/noise/draw.png', dpi=200, transparent=True)\n #plt.savefig('final/noise/draw.svg', transparent=True)\n\ndef entropy():\n try:\n a = xy.DeltaNetwork.load('entropy/delta')\n e = np.load('entropy/entropy.npy')\n except:\n a = xy.DeltaNetwork.make_river(1, density=60, shape='triangle')\n a.simulate(entropy=True, plot=True, movie_dir='entropy')\n a.save('entropy/delta')\n np.save('entropy/entropy.npy', a.entropy)\n e = a.entropy\n\n plt.figure()\n a.plot()\n plt.axis('off')\n plt.savefig('entropy/final.png', dpi=300)\n\n plt.figure(figsize=(12,6))\n plt.subplot(121)\n plt.plot(np.arange(e.shape[0])/10, e[:,0], label='Pure river')\n plt.plot(np.arange(e.shape[0])/10, e[:,1], label='Pure tides')\n plt.xlabel('t', fontsize=20)\n plt.ylabel('Graph entropy S/k', fontsize=20)\n plt.legend(fontsize=20)\n\n plt.subplot(122)\n a.plot()\n plt.axis('off')\n\n plt.tight_layout()\n plt.savefig('entropy/entropy.png', dpi=200)\n\ndef nloops_flowchange():\n plt.rcParams.update({\n \"font.size\": 24\n })\n plt.figure(figsize=(12,6))\n plt.subplot(121)\n sim.jun20rivers('nloops', style='line', thr=1e-5)\n plt.ylabel('Number of loops in simulation')\n\n plt.subplot(122)\n sim.jun20rivers('flowchange', style='line', thr=1e-5)\n plt.ylabel('Fraction of flow-reversing edges')\n plt.tight_layout()\n plt.savefig('final/nloops_flowchange.png', transparent=True)\n #plt.savefig('final/nloops_flowchange.svg', transparent=True)\n\ndef resdist_npaths(style='line'):\n plt.rcParams.update({\n \"font.size\": 24\n })\n plt.figure(figsize=(18,6))\n ax1 = plt.subplot(131)\n sim.jun20rivers('resdist', style=style, thr=1e-5)\n plt.ylabel('RD from river to coast')\n plt.yscale('log')\n\n ax2 = plt.subplot(132, sharey=ax1)\n sim.jun20rivers('resdist1', style=style, thr=1e-5)\n plt.ylabel('RD from tidal nodes to coast')\n plt.yscale('log')\n\n plt.subplot(133)\n sim.jun20rivers('pathnodes', style=style, thr=1e-5)\n plt.ylabel('River to coast number of paths')\n plt.yscale('log')\n\n plt.tight_layout()\n plt.savefig('final/resdist_npaths.png', transparent=True)\n #plt.savefig('final/resdist_npaths.svg', transparent=True)\n\ndef convergence():\n plt.figure(figsize=(5,22.5))\n sim.jan20rivers('mstdiff')\n plt.subplots_adjust(left=0.25, hspace=0.25)\n plt.savefig('final/persistence/density_mstdiff.png',transparent=True,\n dpi=200)\n #plt.savefig('final/persistence/density_mstdiff.svg',transparent=True,)\n\n plt.figure(figsize=(5,22.5))\n sim.jan20rivers('wbridges')\n plt.subplots_adjust(left=0.25, hspace=0.25)\n plt.savefig('final/persistence/density_wbridges.png',transparent=True,\n dpi=200)\n #plt.savefig('final/persistence/density_wbridges.svg',transparent=True,)\n\n plt.figure(figsize=(5,22.5))\n sim.jan20rivers('nloops')\n plt.subplots_adjust(left=0.25, hspace=0.25)\n plt.savefig('final/persistence/density_nloops.png',transparent=True,\n dpi=200)\n #plt.savefig('final/persistence/density_nloops.svg',transparent=True,)\n\ndef backboneprocessing():\n a = xy.DeltaNetwork.load('jun20rivers/1.00_0.15_90_tri_22')\n plt.figure(figsize=(12,8))\n plt.tight_layout(pad=0, w_pad=0, h_pad=0)\n ax = plt.subplot(232)\n a.plot('sticks', thr=1e-3, drawspecial=False)\n plt.axis('off')\n plt.title('Thresholding directly')\n G = a.G.copy()\n\n plt.subplot(234)\n delattr(a, 'G')\n a.to_networkx(thr=1e-8)\n a.plot('sticks', drawspecial=False)\n plt.axis('off')\n plt.title('1. Set low threshold')\n\n plt.subplot(235)\n a.remove_trees_nx(1e-8)\n a.plot('sticks', drawspecial=False)\n plt.axis('off')\n plt.title(\"2. Trim trees (find ''backbone'')\")\n\n plt.subplot(236)\n a.thin(thr=1e-3)\n #cs = nx.get_edge_attributes(a.G, 'conductivity')\n #a.G.remove_edges_from([key for key in cs.keys() if cs[key] < 1e-3])\n #a.G.remove_nodes_from([n[0] for n in a.G.degree if n[1] == 0])\n a.plot('sticks', drawspecial=False)\n plt.axis('off')\n plt.title('3. Threshold backbone')\n\n G.remove_edges_from(a.G.edges)\n pos = nx.get_node_attributes(G, 'pos')\n #nx.draw_networkx(G, pos, edge_color='r', width=3, ax=ax)\n nodes = list(set([e[0] for e in G.edges]+[e[1] for e in G.edges]))\n nx.draw_networkx(G, pos, nodelist=nodes, with_labels=False,\n edge_color='r', node_color='r', node_size=2, width=2, ax=ax, )\n\n plt.savefig('backbone.png', dpi=300)\n plt.show()\n\ndef nloops_data():\n plt.rcParams.update({\n \"font.size\": 24\n })\n sim.realdataplot('nloops', 'line')\n plt.tight_layout()\n #plt.savefig('final/stats/nloops.svg',transparent=True,)\n plt.savefig('final/stats/nloops.png',transparent=True, dpi=150)\n\ndef loopareas_data():\n plt.rcParams.update({\n \"font.size\": 24\n })\n sim.realdataplot('loopareas')\n plt.tight_layout()\n plt.savefig('final/stats/loopareas.png', transparent=True, dpi=150)\n #plt.savefig('final/stats/loopareas.svg', transparent=True,)\n\ndef persistence(f='mstdiff'):\n ylabels = {\n 'wbridges': 'Fraction of total channel area found in loops',\n 'bridges': 'Fraction of total channel length found in loops',\n 'nloops': r'Thresholded number of loops per area (km$ ^{-2}$)',\n 'loopareas': 'Island area over total area',\n #'mstdiff': 'Minimum fraction of channel area removed to make a tree',\n 'mstdiff': r'$\\Omega$',\n 'mstdiffl': 'Minimum fraction of channel length removed to make a tree',\n 'resdist': 'Resistance distance from river to ocean',\n 'resdist1': 'Resistance distance from tidal nodes to ocean',\n 'pathnodes': 'Number of paths from river to each ocean node',\n 'flowchange': 'Fraction of thresholded channels that reverse flow',\n 'algconn': 'Algebraic connectivity'\n }\n strengths = np.logspace(-2, 3, 21)\n thrs = np.logspace(-5, -2, 30)\n slabels = [0.01, 0.1, 1.0, 10.0, 100.0, 1000.0]\n ntot = 8\n def pstat(file):\n try:\n with open('persistence/'+file+'.p', 'rb') as f:\n data = pickle.load(f)\n except:\n data = np.zeros((len(strengths),len(thrs)))\n for s in range(len(strengths)):\n print(s)\n for n in range(ntot):\n f = 'jun20rivers/%0.2f_0.15_90_tri_%d' % (strengths[s], n)\n a = xy.DeltaNetwork.load(f)\n for x in range(len(thrs)):\n data[s,x] += stat(a, file, thr=thrs[x])\n data /= ntot\n\n with open('persistence/'+file+'.p', 'wb') as f:\n pickle.dump(data, f)\n\n return data\n\n data = pstat(f)\n\n plt.figure(figsize=(10,8))\n cmap = plt.get_cmap('coolwarm')\n for i in range(data.shape[0]):\n if strengths[i] in slabels:\n plt.plot(thrs, data[i,:], label=strengths[i],\n c=cmap(i/data.shape[0]))\n else:\n plt.plot(thrs, data[i,:], c=cmap(i/data.shape[0]))\n\n plt.xscale('log')\n plt.xlabel('Threshold conductivity')\n plt.ylabel(ylabels[f])\n plt.legend()\n plt.savefig('final/persistence/sim.png',transparent=True, dpi=200)\n #plt.savefig('final/persistence/sim.svg',transparent=True,)\n\ndef datapersistence():\n deltas = T.keys()\n plt.figure(figsize=(10,8))\n usa = plt.get_cmap('coolwarm')\n for d in deltas:\n print(d)\n a = xy.read_json(d)\n x = []\n thrs = np.logspace(-5, 1, 30)\n for thr in thrs:\n x.append(stat(a, 'wbridgesnew', thr=thr))\n plt.plot(thrs, x, c=usa(np.log10(T[d])/5 + 2/5))\n #plt.text(thrs[0], x[0], d)\n\n slabels = [0.01, 0.1, 1.0, 10.0, 100.0, 1000.0]\n for s in slabels:\n plt.plot(thrs[-1], x[-1], '-', c=usa(np.log10(s)/5 + 2/5),\n label=str(s))\n plt.legend()\n\n plt.xscale('log')\n plt.xlabel('Normalized threshold conductivity')\n plt.ylabel('Fraction of channel area in loops')\n plt.tight_layout()\n plt.savefig('final/persistence/data.png', dpi=200, transparent=True)\n plt.savefig('final/persistence/data.svg', transparent=True)\n\ndef comparepersistence(file='wbridges'):\n strengths = np.logspace(-2, 3, 21)\n thrs = np.logspace(-5, -2, 30)\n slabels = 2*np.array([0.01, 0.1, 1.0, 10.0, 100.0, 1000.0])\n ntot = 8\n def pstat(file='wbridges'):\n try:\n with open('persistence/'+file+'.p', 'rb') as f:\n data = pickle.load(f)\n except:\n data = np.zeros((len(strengths),len(thrs)))\n for s in range(len(strengths)):\n print(s)\n for n in range(ntot):\n f = 'jun20rivers/%0.2f_0.15_90_tri_%d' % (strengths[s], n)\n a = xy.DeltaNetwork.load(f)\n for x in range(len(thrs)):\n data[s,x] += stat(a, file, thr=thrs[x])\n data /= ntot\n\n with open('persistence/'+file+'.p', 'wb') as f:\n pickle.dump(data, f)\n\n return data\n\n data = pstat(file)\n\n plt.figure(figsize=(10,14))\n ax1 = plt.subplot(211)\n cmap = plt.get_cmap('coolwarm')\n for i in range(data.shape[0]):\n if strengths[i] in slabels:\n plt.plot(thrs, data[i,:], label=strengths[i],\n c=cmap(i/data.shape[0]))\n else:\n plt.plot(thrs, data[i,:], c=cmap(i/data.shape[0]))\n\n plt.xscale('log')\n plt.ylabel('Percent loop channel area (simulations)')\n\n deltas = T.keys()\n plt.subplot(212, sharex=ax1)\n usa = plt.get_cmap('coolwarm')\n for d in deltas:\n print(d)\n a = xy.read_json(d)\n x = []\n thrs = np.logspace(-5, 1, 30)\n for thr in thrs:\n x.append(stat(a, file, thr=thr))\n plt.plot(thrs, x, c=usa(np.log10(T[d])/5 + 2/5))\n #plt.text(thrs[0], x[0], d)\n\n slabels = [0.01, 0.1, 1.0, 10.0, 100.0, 1000.0]\n for s in slabels:\n plt.plot(thrs[-1], x[-1], '-', c=usa(np.log10(s)/5 + 2/5),\n label=str(s))\n plt.legend()\n\n plt.xscale('log')\n plt.xlabel('Normalized threshold conductivity')\n plt.ylabel('Percent loop channel area (data)')\n plt.tight_layout()\n plt.show()\n\ndef sticksdraw():\n for d in T.keys():\n a = xy.newjson(d)\n if isinstance(a, str):\n continue\n ratio = np.amax(a.LEAF.Vertex[:,0])/np.amax(a.LEAF.Vertex[:,1])\n if ratio > 1.2:#d in ['Barnstaple', 'Orinoco']:\n fig = plt.figure(figsize=(6*ratio, 18))\n ax1 = plt.subplot(311)\n ax2 = plt.subplot(312, sharey=ax1)\n ax3 = plt.subplot(313, sharey=ax1)\n else:\n fig = plt.figure(figsize=(18,6/ratio))\n ax1 = plt.subplot(131)\n ax2 = plt.subplot(132, sharex=ax1)\n ax3 = plt.subplot(133, sharex=ax1)\n\n print(d)\n ax1.set_title(d)\n a.plot('loops', showbounds=True, thr=1e-4, ax=ax1, drawspecial=False)\n xadj = 0.05*np.amax(a.LEAF.Vertex[:,0])\n yadj = 0.05*np.amax(a.LEAF.Vertex[:,1])\n ax1.set_xlim([-1*xadj, np.amax(a.LEAF.Vertex[:,0]+xadj)])\n ax1.set_ylim([-1*yadj, np.amax(a.LEAF.Vertex[:,1]+yadj)])\n\n b = xy.read_json(d)\n b.plot('sticks', thr=1e-4, showbounds=False, ax=ax2, drawspecial=False)\n #b.drawloops(thr=1e-4, ax=ax2)\n ax2.tick_params(bottom=True, left=True, labelbottom=True, labelleft=True)\n ax2.set_xlim([-1*xadj, np.amax(b.LEAF.Vertex[:,0]+xadj)])\n ax2.set_ylim([-1*yadj, np.amax(b.LEAF.Vertex[:,1]+yadj)])\n\n b.thin()\n #b.smooth()\n b.plot('sticks', showbounds=False, ax=ax3, drawspecial=True)\n b.drawloops(thr=1e-4, ax=ax3)\n ax3.tick_params(bottom=True, left=True, labelbottom=True, labelleft=True)\n ax3.set_xlim([-1*xadj, np.amax(b.LEAF.Vertex[:,0]+xadj)])\n ax3.set_ylim([-1*yadj, np.amax(b.LEAF.Vertex[:,1]+yadj)])\n\n plt.tight_layout()\n plt.show()\n #plt.savefig('sticks/'+d+'.png', dpi=200)\n fig.clear()\n\ndef mstdiff():\n sim.realdataplot('mstdiff', 'line')\n #sim.marshes_overlay('mstdiff')\n sim.align('mstdiff', plot=False)\n plt.xlim([1e-2, 1e4])\n plt.legend()\n #plt.savefig('final/stats/mstdiff.svg',transparent=True,)\n plt.savefig('final/main/mstdiff.png', transparent=True, dpi=200)\n\ndef mstdiffl():\n sim.realdataplot('mstdiffl', 'line')\n sim.align('mstdiffl', plot=False)\n plt.xlim([1e-2, 1e4])\n #plt.savefig('final/stats/mstdiffl.svg',transparent=True,)\n plt.savefig('final/stats/mstdiffl.png',transparent=True, dpi=200)\n\ndef scalefree():\n strengths = np.logspace(-2, 3, 21)\n thrs = np.logspace(-5, -2, 30)\n slabels = [0.01, 0.1, 1.0, 10.0, 100.0, 1000.0]\n ntot = 35\n def pstat(file='wbridges'):\n try:\n with open('persistence/'+file+'.p', 'rb') as f:\n data = pickle.load(f)\n except:\n data = np.zeros((len(strengths),len(thrs)))\n for s in range(len(strengths)):\n print(s)\n for n in range(ntot):\n f = 'jun20rivers/%0.2f_0.15_90_tri_%d' % (strengths[s], n)\n a = xy.DeltaNetwork.load(f)\n for x in range(len(thrs)):\n data[s,x] += stat(a, file, thr=thrs[x])\n data /= ntot\n\n with open('persistence/'+file+'.p', 'wb') as f:\n pickle.dump(data, f)\n\n return data\n\n data = pstat('wbridges')\n\n plt.figure(figsize=(8,9))\n plt.plot(strengths, thrs[np.argmax(data < 0.2, axis=1)], 'b-')\n plt.xscale('log')\n plt.yscale('log')\n\n deltas = list(T.keys())\n #plt.figure(figsize=(8,6))\n thrs = np.logspace(-5, 1, 50)\n inds = []\n for d in deltas:\n print(d)\n a = xy.read_json(d)\n x = []\n for thr in thrs:\n x.append(stat(a, 'wbridges', thr=thr))\n inds.append(np.argmax(np.array(x)<0.2))\n\n Tstar = np.array(list(T.values()))/np.pi\n #plt.errorbar(Tstar, thrs[inds], c='k', ls='', ms=10,\n # xerr=[0.5*Tstar, 2*Tstar], lw=0.2)\n plt.plot(Tstar, thrs[inds], 'g.', ms=14)\n for d in range(len(deltas)):\n if deltas[d] in ['Orinoco', 'Rajang']:\n plt.text(0.65*Tstar[d], 1.3*thrs[inds[d]],\n deltas[d], fontsize=14, alpha=0.5)\n elif deltas[d] in ['Barnstable']:\n plt.text(0.2*Tstar[d], 1.6*thrs[inds[d]],\n deltas[d], fontsize=14, alpha=0.5)\n elif deltas[d] in ['Ras Isa']:\n plt.text(0.5*Tstar[d], 1.3*thrs[inds[d]],\n deltas[d], fontsize=14, alpha=0.5)\n elif deltas[d] in ['Mississippi']:\n plt.text(0.65*Tstar[d], 1.3*thrs[inds[d]],\n deltas[d], fontsize=14, alpha=0.5)\n else:\n plt.text(0.65*Tstar[d], 1.02*thrs[inds[d]],\n deltas[d], fontsize=14, alpha=0.5)\n plt.xlabel(r'$T^*$')\n plt.ylabel(r'Threshold for at least 20\\% channel area in loops')\n plt.savefig('final/stats/scalefree.png', transparent=True, dpi=150)\n #plt.savefig('final/main/scalefree.svg', transparent=True)\n\n#nloops_data()\n#loopareas_data()\nsim.realdataplot('mstdiff')\n", "id": "3895418", "language": "Python", "matching_score": 6.233613967895508, "max_stars_count": 0, "path": "plotting.py" }, { "content": "import numpy as np\nimport matplotlib.pyplot as plt\nimport xylem as xy\nimport time\nimport networkx as nx\nimport pickle\nfrom scipy.spatial.qhull import QhullError\nfrom stats import stat\n\nplt.rcParams.update({\n \"text.usetex\": True,\n \"font.family\": \"serif\",\n \"font.serif\": [\"Computer Modern Roman\"],\n \"font.size\": 20\n})\n\nylabels = {\n 'wbridges': 'Fraction of total channel area found in loops',\n 'bridges': 'Fraction of total channel length found in loops',\n 'nloops': r'Thresholded number of loops per area (km$ ^{-2}$)',\n 'loopareas': 'Island area over total area',\n 'mstdiff': r'$\\Omega$',\n 'mstdiffl': 'Minimum fraction of channel length removed to make a tree',\n 'resdist': 'Resistance distance from river to ocean',\n 'resdist1': 'Resistance distance from tidal nodes to ocean',\n 'pathnodes': 'Number of paths from river to each ocean node',\n 'flowchange': 'Fraction of thresholded channels that reverse flow',\n 'algconn': 'Algebraic connectivity'\n}\n\ndef jun20rivers(file='wbridges', style='line', thr=1e-4):\n \"\"\" simulates and analyses deltas in June 2020.\n The interior of this function can be edited to simulate and process\n the data, but after the data is available in folder jun20rivers,\n the analyze() line is all that is needed\n \"\"\"\n\n strengths = np.logspace(-2, 3, 21)\n folder = 'jun20rivers'\n ntot = 35\n def do(s,f):\n \"\"\" Simulate a delta with strength s and filename f\n Default arguments are specified in the a = ... line\n \"\"\"\n a = xy.DeltaNetwork.make_river(s, basin_fraction=0.15,\n density=90, shape='triangle')\n a.simulate()\n a.save(f)\n return a\n\n def simulate():\n \"\"\" Simulates over the range and sample size used\n \"\"\"\n for n in range(ntot):\n for s in strengths:\n f = folder+'/%0.2f_0.15_90_tri_%d' % (s, n)\n try:\n a = xy.DeltaNetwork.load(f)\n except FileNotFoundError:\n try:\n do(s,f)\n except ZeroDivisionError:\n print('.............................Simulation failed')\n continue\n\n def fix():\n \"\"\" Looks for any simulations that failed and redoes them\n \"\"\"\n for n in range(ntot):\n for s in strengths:\n f = folder+'/%0.2f_0.15_90_tri_%d' % (s, n)\n try:\n a = xy.DeltaNetwork.load(f)\n if np.any(np.isnan(a.C)):\n print(f)\n do(s,f)\n except FileNotFoundError:\n try:\n do(s,f)\n except ZeroDivisionError:\n print('.............................Simulation failed')\n continue\n\n def test():\n \"\"\" sanity check that fix() worked\n \"\"\"\n for s in strengths:\n for n in range(ntot):\n try:\n a = xy.DeltaNetwork.load(\n folder+'/%0.2f_0.15_90_tri_%d'%(s,n))\n print(a.C.max())\n except: pass\n\n def analyze(file='wbridges', style=style, thr=thr):\n \"\"\" Produces desired plot using the dataset made here\n \"\"\"\n stats = {}\n ax = plt.gca()\n try:\n with open(folder+'/'+file+'.p','rb') as f:\n stats = pickle.load(f)\n except FileNotFoundError:\n for s in strengths:\n print(s)\n stats[s] = np.array([])\n for n in range(ntot):\n try:\n a = xy.DeltaNetwork.load(\n folder+'/%0.2f_0.15_90_tri_%d'%(s,n))\n stats[s] = np.append(stats[s], stat(a, file, thr=thr))\n except FileNotFoundError:\n pass\n with open(folder+'/'+file+'.p', 'wb') as f:\n pickle.dump(stats, f)\n\n if file == 'nloops':\n for s in strengths: stats[s] *= 10\n cmap = plt.get_cmap('plasma')\n if style == 'line':\n avg = np.array([np.median(stats[s]) for s in strengths])\n firsts = np.array([np.percentile(stats[s], 25) for s in strengths])\n thirds = np.array([np.percentile(stats[s], 75) for s in strengths])\n ax.plot(2*strengths, avg, c=cmap(0), label='Noiseless \\nmodel')\n ax.fill_between(2*strengths, firsts, thirds, alpha=0.2,\n color=cmap(0))\n ax.set_xscale('log')\n elif style == 'box':\n stats = [stats[s] for s in strengths]\n labels = ['$10^{-2}$', '', '', '', '$10^{-1}$', '', '', '',\n '$10^{0}$', '', '', '', '$10^{1}$', '', '', '',\n '$10^{2}$', '', '', '', '$10^{3}$']\n plt.boxplot(stats, labels=labels)\n ax.set_xlabel(r'$T^*$')\n return stats\n\n analyze(file, style)\n\ndef riverdomains(file='wbridges'):\n \"\"\" Simulates and analyses the deltas simulated on different domain\n shapes. similar structure to big function jun20rivers()\n \"\"\"\n shapes = ['square', 'sine', 'strip', 'invtriangle']\n folder = 'riverdomains'\n strengths = [0.01, 0.1, 1, 10, 100, 1000]\n ntot = 20\n def fname(s, shape, n):\n \"\"\" generates filename string given T* strength s, shape string shape,\n and sample index n\n \"\"\"\n if shape != 'tri':\n return folder+'/%0.2f_0.15_90_%s_%d' % (s, shape, n)\n elif shape == 'tri':\n return 'jun20rivers/%0.2f_0.15_90_%s_%d' % (s, shape, n)\n\n def do(s, f, shape):\n \"\"\" Simulate a delta with strength s and filename f\n Default arguments are specified in the a = ... line\n \"\"\"\n a = xy.DeltaNetwork.make_river(s, basin_fraction=0.15,\n density=90, shape=shape)\n a.simulate()\n a.save(f)\n return a\n\n def simulate():\n \"\"\" Simulates over the range and sample size used\n \"\"\"\n for n in range(ntot):\n for shape in shapes:\n for s in strengths:\n f = fname(s, shape, n)\n try:\n a = xy.DeltaNetwork.load(f)\n except FileNotFoundError:\n try:\n print(f)\n do(s, f, shape)\n except ZeroDivisionError:\n print('.............................Simulation failed')\n continue\n\n def fix():\n \"\"\" Looks for any simulations that failed and redoes them\n \"\"\"\n for n in range(ntot):\n for shape in shapes:\n for s in strengths:\n f = fname(s, shape, n)\n try:\n a = xy.DeltaNetwork.load(f)\n if np.any(np.isnan(a.C)):\n print(f)\n do(s, f, shape)\n except FileNotFoundError:\n try:\n print(f)\n do(s, f, shape)\n except ZeroDivisionError:\n print('.............................Simulation failed')\n continue\n\n shapes = ['square', 'sine', 'strip', 'invtriangle', 'tri']\n def analyze(file='wbridges', thr=1e-4):\n \"\"\" Produces desired plot using the dataset made here\n \"\"\"\n plt.figure(figsize=(7,8))\n stats = {}\n names = ['Square', 'Sine', 'Strip', 'Inverted\\ntriangle', 'Triangle']\n box = []\n if file == 'mstdiff':\n thr = 1e-5\n for i in range(len(shapes)):\n try:\n with open(folder+'/'+file+'_'+shapes[i]+'.p','rb') as f:\n stats = pickle.load(f)\n except FileNotFoundError:\n for s in strengths:\n print(s)\n stats[s] = np.array([])\n for n in range(ntot):\n try:\n a = xy.DeltaNetwork.load(\n fname(s, shapes[i], n))\n x = stat(a, file, thr=thr)\n stats[s] = np.append(stats[s], x)\n except FileNotFoundError:\n pass\n with open(folder+'/'+file+'_'+shapes[i]+'.p', 'wb') as f:\n pickle.dump(stats, f)\n box.append(stats[1])\n\n med = [np.median(stats[s]) for s in strengths]\n plt.plot(np.array(strengths)*2, med, label=names[i])\n\n plt.xscale('log')\n plt.xlabel(r'$T^*$')\n plt.ylabel(ylabels[file])\n plt.legend(title='Domain shape')\n plt.savefig('final/domains/analysis.png', transparent=True, dpi=200)\n plt.savefig('final/domains/analysis.svg', transparent=True)\n\n plt.figure(figsize=(7,8))\n plt.boxplot(box, labels=names)\n plt.ylabel(ylabels[file])\n plt.savefig('final/domains/box.png', transparent=True, dpi=200)\n plt.savefig('final/domains/box.svg', transparent=True)\n\n return stats\n\n analyze(file)\n\ndef marshes(file='mstdiff'):\n \"\"\" Simulates deltas that have out-of-phase tidal nodes.\n \"\"\"\n strengths = [0.01, 0.1, 1, 10, 100, 1000]\n noises = [0, 1/3, 2/3, 1]\n folder = 'marshes'\n ntot = 10\n def fname(s, no, n):\n \"\"\" generates filename string given T* strength s, relative tidal\n variation (noise) no,\n and sample index n\n \"\"\"\n if no != 0:\n return folder+'/%0.2f_%0.2f_0.15_90_tri_%d' % (s, no, n)\n elif no == 0:\n return 'jun20rivers/%0.2f_0.15_90_tri_%d' % (s, n)\n\n def do(s, noise, f, shape='triangle'):\n \"\"\" Simulate a delta with strength s and filename f\n Default arguments are specified in the a = ... line\n \"\"\"\n a = xy.DeltaNetwork.make_marsh(s, noise, basin_fraction=0.15,\n density=90, shape=shape)\n a.simulate()\n a.save(f)\n return a\n\n def simulate():\n \"\"\" Simulates over the range and sample size used\n \"\"\"\n for n in range(ntot):\n for s in strengths:\n for no in noises:\n f = fname(s, no, n)\n try:\n a = xy.DeltaNetwork.load(f)\n except FileNotFoundError:\n try:\n do(s, no, f)\n except ZeroDivisionError:\n print('.............................Simulation failed')\n continue\n\n def fix():\n \"\"\" Looks for any simulations that failed and redoes them\n \"\"\"\n for n in range(ntot):\n for s in strengths:\n for no in noises:\n f = fname(s, no, n)\n try:\n a = xy.DeltaNetwork.load(f)\n if np.any(np.isnan(a.C)):\n print(f)\n do(s, no, f)\n except FileNotFoundError:\n try:\n do(s, no, f)\n except ZeroDivisionError:\n print('.............................Simulation failed')\n continue\n\n def analyze(file='mstdiff'):\n \"\"\" Produces desired plot using the dataset made here\n \"\"\"\n strengths = [0.01, 0.1, 1, 10, 100, 1000]\n stats = {}\n cmap = plt.get_cmap('plasma')\n if file == 'mstdiff':\n thr = 1e-5\n for no in noises:\n try:\n with open(folder+'/'+file+'_%0.2f.p' % no,'rb') as f:\n stats = pickle.load(f)\n except FileNotFoundError:\n for s in strengths:\n print(s)\n stats[s] = np.array([])\n for n in range(ntot):\n try:\n a = xy.DeltaNetwork.load(fname(s, no, n))\n stats[s] = np.append(stats[s], stat(a, file, thr=thr))\n except FileNotFoundError:\n pass\n with open(folder+'/'+file+'_%0.2f.p' % no, 'wb') as f:\n pickle.dump(stats, f)\n med = [np.median(stats[s]) for s in strengths]\n if no == 1:\n no = 0.999\n plt.plot(np.array(strengths)*2, med, label='%0.2f'%no, c=cmap(no),)\n plt.xscale('log')\n plt.xlabel(r'$T^*$')\n plt.ylabel(ylabels[file])\n plt.legend(title='Noise')\n return stats\n\n analyze(file=file)\n\ndef marshes_overlay(file='mstdiff'):\n \"\"\" Overlays a plot of the statistic 'file' to the active plot based on the\n stats from the 66\\% noise model\n not used in the paper\n \"\"\"\n strengths = [0.01, 0.1, 1, 10, 100, 1000]\n folder = 'marshes'\n ntot = 10\n def fname(s, no, n):\n if no != 0:\n return folder+'/%0.2f_%0.2f_0.15_90_tri_%d' % (s, no, n)\n elif no == 0:\n return 'jun20rivers/%0.2f_0.15_90_tri_%d' % (s, n)\n\n strengths = [0.01, 0.1, 1, 10, 100, 1000]\n stats = {}\n cmap = plt.get_cmap('magma')\n no = 0.66\n try:\n with open(folder+'/'+file+'_%0.2f.p' % no,'rb') as f:\n stats = pickle.load(f)\n except FileNotFoundError:\n for s in strengths:\n print(s)\n stats[s] = np.array([])\n for n in range(ntot):\n try:\n a = xy.DeltaNetwork.load(fname(s, no, n))\n stats[s] = np.append(stats[s], stat(a, file, thr=1e-5))\n print('m')\n except FileNotFoundError:\n pass\n print(stats[s])\n with open(folder+'/'+file+'_%0.2f.p' % no, 'wb') as f:\n pickle.dump(stats, f)\n print(stats)\n avg = np.array([np.median(stats[s]) for s in strengths])\n firsts = np.array([np.percentile(stats[s], 25) for s in strengths])\n thirds = np.array([np.percentile(stats[s], 75) for s in strengths])\n ax = plt.gca()\n cmap = plt.get_cmap('plasma')\n ax.plot(2*np.array(strengths), avg, c=cmap(0.66),\n label='66\\% noise \\nmodel')\n ax.fill_between(2*np.array(strengths), firsts, thirds, alpha=0.2,\n color=cmap(0.66))\n ax.set_xscale('log')\n\n# our T* estimates\nT = {'<NAME>': 1.5E-01,\n 'Mississippi': 5.4E-01,\n 'Wax': 9.5E-01,\n 'Mossy': 5.6E-01,\n 'Kolyma': 5.0E-01,\n 'Colville': 4.7E-01,\n 'Apalachicola': 5.9E+00,\n 'Mackenzie': 3.5E+00,\n 'Orinoco': 1.4E+01,\n 'Yenisei': 2.5E+00,\n 'Lena': 2.9E+01,\n 'Yukon': 7.5E+00,\n 'Betsiboka': 8.1E+01,\n 'Irrawaddy': 5.0E+02,\n 'GBM': 1.2E+02,\n 'Rajang': 2.3E+02,\n 'Niger': 3.2E+02,\n 'Sarawak': 2.5E+03,\n 'Ras Isa': 6.0E+03,\n 'Barnstable': 5.5E+03\n }\n\n# Nienhuis T* estimates, with our estimates substituted in where they\n# do not provide an estimate\nTN = {'St Clair': 0,\n 'Mississippi': 2.66E-01,\n 'Wax': 0,\n 'Mossy': 0,\n 'Kolyma': 5.91E-01,\n 'Colville': 2.03E-01,\n 'Apalachicola': 1.43E-01,\n 'Mackenzie': 3.22E+01,\n 'Orinoco': 1.19E+00,\n 'Yenisei': 1.35E-02,\n 'Lena': 4.29E-02,\n 'Yukon': 7.4E-01,\n 'Betsiboka': 9.90E-01,\n 'Irrawaddy': 3.53E+00,\n 'GBM': 0,\n 'Rajang': 2.5E+00,\n 'Niger': 0,\n 'Sarawak': 6.23E+00,\n 'Ras Isa': 0,\n 'Barnstable': 0\n }\n\ndef getStat(file='mstdiff'):\n \"\"\" calculate statistics and store them (or access them if already stored)\n for the real deltas\n \"\"\"\n try:\n with open('shp/'+file+'.p', 'rb') as f:\n return pickle.load(f)\n except FileNotFoundError:\n x = []\n for d in T.keys():\n x.append(stat(xy.newjson(d), file, thr=1e-4))\n print(d, x[-1])\n with open('shp/'+file+'.p', 'wb') as f:\n pickle.dump(x, f)\n return x\n\ndef realdataplot(file='wbridges', style='line'):\n \"\"\" Create a figure with the statistics from the real deltas\n in log T* space.\n Also calls jun20rivers() to draw the lines for simulated deltas\n \"\"\"\n plt.figure(figsize=(8,9))\n jun20rivers(file=file, style=style, thr=1e-5)\n plt.xscale('log')\n plt.ylabel(ylabels[file])\n if file in ['resdist', 'resdist1', 'pathnodes', 'flowchange']:\n return None\n\n y = getStat(file)\n Tstar = np.array(list(T.values()))\n labels = list(T.keys())\n y = np.array(y)\n\n print({labels[i]: y[i] for i in range(len(labels))})\n\n # plot data points with x error bars\n xerr = []\n for d in T.keys():\n if TN[d] == 0:\n xerr.append(T[d])\n else:\n xerr.append(TN[d])\n #plt.errorbar(Tstar, y, c='k', ls='', ms=10,\n # xerr = [0.5*Tstar, 1.001*Tstar], lw=0.2)\n plt.plot(Tstar, y, 'g.', label='Data', ms=14)\n\n # adjust data labels\n for i in range(len(Tstar)):\n #These settings for wbridges\n if file == 'wbridges':\n if labels[i] in ['Lena']:\n plt.text(0.65*Tstar[i], y[i]+0.025, labels[i], fontsize=14,\n alpha=0.5)\n elif labels[i] in ['Ras Isa']:\n plt.text(0.4*Tstar[i], y[i]+0.025, labels[i], fontsize=14,\n alpha=0.5)\n elif labels[i] in ['Barnstable']:\n plt.text(0.2*Tstar[i], y[i]-0.025, labels[i], fontsize=14,\n alpha=0.5)\n else:\n plt.text(0.65*Tstar[i], y[i]+0.005, labels[i], fontsize=14,\n alpha=0.5)\n #These settings for loopareas\n elif file == 'loopareas':\n if labels[i] in ['Mississippi']:\n plt.text(0.65*Tstar[i], y[i]+0.015, labels[i], fontsize=14,\n alpha=0.5)\n elif labels[i] in ['Ras Isa']:\n plt.text(0.4*Tstar[i], y[i]+0.018, labels[i], fontsize=14,\n alpha=0.5)\n elif labels[i] in ['Barnstable']:\n plt.text(0.2*Tstar[i], y[i]+0.005, labels[i], fontsize=14,\n alpha=0.5)\n else:\n plt.text(0.65*Tstar[i], y[i]+0.005, labels[i], fontsize=14,\n alpha=0.5)\n elif file == 'nloops':\n if labels[i] in ['Mississippi']:\n plt.text(0.65*Tstar[i], y[i]+65, labels[i], fontsize=14,\n alpha=0.5)\n elif labels[i] in ['Wax']:\n plt.text(0.85*Tstar[i], y[i]+4, labels[i], fontsize=14,\n alpha=0.5)\n elif labels[i] in ['Mossy', 'Colville', 'Kolyma']:\n plt.text(0.65*Tstar[i], y[i]+74, labels[i], fontsize=14,\n alpha=0.5)\n elif labels[i] in ['Ras Isa']:\n plt.text(0.45*Tstar[i], y[i]+20, labels[i], fontsize=14,\n alpha=0.5)\n elif labels[i] in ['Betsiboka']:\n plt.text(0.65*Tstar[i], y[i]+13, labels[i], fontsize=14,\n alpha=0.5)\n elif labels[i] in ['Barnstable']:\n plt.text(0.2*Tstar[i], y[i]+30, labels[i], fontsize=14,\n alpha=0.5)\n elif labels[i] in ['Sarawak']:\n plt.text(0.4*Tstar[i], y[i]+4, labels[i], fontsize=14,\n alpha=0.5)\n else:\n plt.text(0.65*Tstar[i], y[i]+4, labels[i], fontsize=14,\n alpha=0.5)\n #otherwise mstdiff\n else:\n if labels[i] in ['<NAME>']:\n plt.text(0.35*Tstar[i], y[i]+0.003, labels[i], fontsize=14,\n alpha=0.5)\n elif labels[i] in ['Barnstable']:\n plt.text(0.2*Tstar[i], y[i]+0.0005, labels[i], fontsize=14,\n alpha=0.5)\n elif labels[i] in ['Orinoco']:\n plt.text(0.65*Tstar[i], y[i]+0.0008, labels[i], fontsize=14,\n alpha=0.5)\n elif labels[i] in ['Yenisei', 'Mackenzie']:\n plt.text(0.5*Tstar[i], y[i]+0.0005, labels[i], fontsize=14,\n alpha=0.5)\n else:\n plt.text(0.65*Tstar[i], y[i]+0.0005, labels[i], fontsize=14,\n alpha=0.5)\n plt.tick_params()\n\n # binning\n bins = np.logspace(-1, 4, 6)\n biny = np.zeros(bins.shape[0] - 1)\n binarg = np.searchsorted(bins, Tstar)\n for i in range(len(y)):\n biny[binarg[i]-1] += y[i]\n biny /= np.unique(binarg, return_counts=True)[1]\n plt.plot(10**((np.log10(bins[:-1])+np.log10(bins[1:]))/2),\n biny, '--', color='g')\n '''for i in range(len(bins)-1):\n plt.plot([bins[i], bins[i+1]], [biny[i], biny[i]], '-',\n color='g', alpha=0.2)\n plt.plot([bins[i], bins[i]], [biny[i]-0.0025, biny[i]+0.0025], '-',\n color='g', alpha=0.2)\n plt.plot([bins[i+1], bins[i+1]], [biny[i]-0.0025, biny[i]+0.0025], '-',\n color='g', alpha=0.2)'''\n\ndef align(file='wbridges', plot=False):\n \"\"\" Finds the amount that the peak of data measurements and simulated delta\n statistics differ\n not used in paper\n \"\"\"\n from scipy.interpolate import interp1d\n\n strengths = np.append([1e-10], np.append(np.logspace(-2, 3, 21), 1e5))\n with open('jun20rivers/'+file+'.p','rb') as f:\n stats = pickle.load(f)\n avg = np.append([0],\n np.append([np.median(stats[s]) for s in strengths[1:-1]], 0))\n with open('shp/'+file+'.p', 'rb') as f:\n y = np.array(pickle.load(f))\n\n strengths *= 2\n deltasT = np.array(list(T.values()))\n def error(x):\n f = interp1d(x[0]*strengths, x[1]*avg)\n return np.linalg.norm(f(deltasT) - y, ord=2)\n\n from scipy.optimize import minimize\n res = minimize(error, x0=[1,1])\n if plot:\n plt.plot(res.x[0]*strengths, res.x[1]*avg, '--', color='tab:green')\n print(res.x)\n\ndef linearStatsAnalysis(file='mstdiff'):\n \"\"\" fits two lines to the left and right sides of the real datapoints when\n their statistic values are shuffled (but T* values kept the same)\n Does this 5e4 times\n Prints what fraction have left slope and right slope at least as strong\n as the real data\n Returns all the slopes\n \"\"\"\n x = np.log10(list(T.values()))\n y = np.array(getStat(file))\n\n shuffles = 50000\n peaks = np.arange(6, 15)\n\n left = np.zeros((shuffles, len(peaks)))\n right = np.zeros((shuffles, len(peaks)))\n\n for i in range(shuffles):\n if i % 10000 == 0:\n print(i)\n y = np.random.permutation(y)\n for j in range(len(peaks)):\n left[i,j] = np.linalg.lstsq(\n np.stack((np.ones(peaks[j]), x[:peaks[j]]), axis=1),\n y[:peaks[j]], rcond=None)[0][1]\n right[i,j] = np.linalg.lstsq(\n np.stack((np.ones(len(x)-peaks[j]), x[peaks[j]:]), axis=1),\n y[peaks[j]:], rcond=None)[0][1]\n\n x = np.log10(np.sort(list(T.values())))\n y = np.array(getStat('wbridges'))\n for p in np.arange(6, 15):\n leftslope = np.linalg.lstsq(\n np.stack((np.ones(p), x[:p]), axis=1), y[:p],\n rcond=None)[0][1]\n rightslope = np.linalg.lstsq(\n np.stack((np.ones(len(x)-p), x[p:]), axis=1), y[p:],\n rcond=None)[0][1]\n print(leftslope, rightslope)\n print(np.sum(np.logical_and(\n left[:,p-6] > leftslope, right[:,p-6] < rightslope)))\n\n return left, right\n\ndef quadraticStatsAnalysis(file='mstdiff'):\n \"\"\" Randomizes data as above and find what fraction of the 5e4 samples\n have a trend as strong as the data. If the original data has signed\n curvature C and peak at P, the fit to randomized data must have\n peak p at P/2 < p < 2P and curvature greater in magnitude than C with\n the same sign\n \"\"\"\n x = np.log10(np.sort(list(T.values())))\n y = np.array(getStat(file))\n\n shuffles = 50000\n\n # curvature\n c = np.zeros(shuffles)\n\n # peak location\n p = np.zeros(shuffles)\n\n for i in range(shuffles):\n if i % 10000 == 0:\n print(i)\n y = np.random.permutation(y)\n sol = np.linalg.lstsq(\n np.stack((np.ones(len(x)), x, x**2), axis=1), y, rcond=None)[0]\n\n c[i] = sol[2]\n p[i] = -sol[1]/(2*sol[2])\n\n x = np.log10(list(T.values()))\n y = np.array(getStat('mstdiff'))\n\n sol = leftslope = np.linalg.lstsq(\n np.stack((np.ones(len(x)), x, x**2), axis=1), y, rcond=None)[0]\n peak = -sol[1]/(2*sol[2])\n print(sol[2], peak)\n print(np.sum((c < sol[2]) * \\\n np.logical_and(p > peak/2, p < peak*2)))\n\n return c, p\n\ndef absoluteValueStatsAnalysis(file = 'mstdiff'):\n \"\"\" Randomizes data above and checks which of the fits of a function\n a|x-b| + cx + d have a more negative than the true data,\n c more positive than the true data,\n and b between 0 and 2 (so 1 < peak T* < 100)\n \"\"\"\n from scipy.optimize import curve_fit\n def f(x, a, b, c, d):\n return a*np.abs(x-b) + c*x + d\n\n x = np.log10(list(T.values()))\n y = np.array(getStat(file))\n\n opt, _ = curve_fit(f, x, y, p0=[0,0,0,0.05], bounds=(-2, 2))\n print(opt)\n\n shuffles = 50000\n\n avec = np.zeros(shuffles)\n bvec = np.zeros(shuffles)\n cvec = np.zeros(shuffles)\n dvec = np.zeros(shuffles)\n\n for i in range(shuffles):\n y = np.random.permutation(y)\n try:\n (avec[i], bvec[i], cvec[i], dvec[i]), _ = \\\n curve_fit(f, x, y, p0=[0,0,0,0.5])\n except RuntimeError:\n avec[i] = np.average(avec[:i])\n bvec[i] = np.average(bvec[:i])\n cvec[i] = np.average(cvec[:i])\n dvec[i] = np.average(dvec[:i])\n\n print(np.sum((avec < opt[0]) & (cvec > opt[2]) & \\\n np.logical_and(bvec > 0, bvec < 2)))\n\n ax1 =plt.subplot(141)\n plt.hist(avec)\n ax2 = plt.subplot(142)\n plt.hist(bvec)\n ax3 = plt.subplot(143)\n plt.hist(cvec)\n ax4 = plt.subplot(144)\n plt.hist(dvec)\n plt.show()\n\n return avec, bvec, cvec\n\nif __name__ == '__main__':\n a = xy.DeltaNetwork.load('jun20rivers/0.02_0.15_90_tri_34.p')\n #linearStatsAnalysis()\n #quadraticStatsAnalysis()\n #absoluteValueStatsAnalysis()\n", "id": "5654914", "language": "Python", "matching_score": 2.5075392723083496, "max_stars_count": 0, "path": "simulate.py" }, { "content": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri May 4 13:35:17 2018\n\n<NAME> 2017\n<NAME> 2019\n\"\"\"\nimport numpy as np\nimport scipy.io\nimport matplotlib.pyplot as plt\nimport networkx as nx\nimport xylem as xy\nfrom xylem import PialNetwork as pi\nfrom xylem import DeltaNetwork as de\nfrom shapely.geometry import Polygon, Point\nimport pickle\nimport cmath\nfrom scipy.stats import ks_2samp\nimport csv\n\nimport warnings\nimport time\nimport gc\n\ndef s(theta):\n if theta <= np.pi :\n return theta\n else:\n return theta - 2*np.pi\n\nclass AnalysisClass(object):\n\n def data_to_pial(self, filename, dataind, rescale=True):\n #build a AnalysisClass object from a data_file\n f = scipy.io.loadmat(filename)\n self.vertices = f['G'][0][0][0][0,0]#number of vertices in the graph\n self.edges = f['G'][0][0][1][0,0]#number of edges\n x = f['G'][0][0][2][0]\n y = f['G'][0][0][3][0]\n vertices_type = f['G'][0][0][4][0]\n self.penetrating_arterioles = np.where(vertices_type==2)[0]\n self.Adj = f['G'][0][0][5] #adjacency matrix\n\n #rescale data to scalebars (so x,y are in mm)\n if rescale:\n imported = []\n with open('scaling_claire.csv') as f:\n filereader = csv.reader(f, delimiter=',')\n for row in filereader:\n imported.append(row)\n scalingdata = np.array(imported[dataind]).astype('int')\n node1 = scalingdata[3]; node2 = scalingdata[4]\n a = (x[node1],y[node1])\n b = (x[node2],y[node2])\n dist = np.sqrt( (a[0]-b[0])**2+(a[1]-b[1])**2 )\n x = x - a[0]\n y = y - a[1]\n x = x/(dist*scalingdata[1]/scalingdata[2])\n y = y/(dist*scalingdata[1]/scalingdata[2])\n x = x - min(x); y = max(y) - y\n\n x = x.tolist()\n y = y.tolist()\n positions = zip(x,y)\n self.pos = dict(zip(range(self.vertices),positions)) #useful to plot\n rows, cols = np.where(self.Adj.todense() == 1)\n edges = zip(rows.tolist(), cols.tolist()) # contain every pair of vertices connected with an edge :(n_1,n_2) but also (n_2,n_1)\n self.G = nx.Graph() #create a graph\n self.G.add_edges_from(edges) #build the graph by adding the edge. Each edge appear twice : (n1,n2) and (n2,n1)\n for n in self.G.nodes() : #adding the position of the nodes\n self.G.node[n]['x'] = x[n]\n self.G.node[n]['y'] = y[n]\n self.sources = None\n\n def simulation_to_pial(self, xylem):\n #build a AnalysisClass object from a xylem object\n self.penetrating_arterioles = xylem.sinks #list of the indexes of the pa\n self.edges = xylem.bonds #number of edges\n self.G = xylem.to_networkx() #a graph\n self.vertices = xylem.verts #number of vertices\n x_pos = []\n y_pos = []\n for n in self.G.nodes() :\n x_pos.append(self.G.node[n]['x'])\n y_pos.append(self.G.node[n]['y'])\n positions = zip(x_pos,y_pos)\n self.pos = dict(zip(self.G.nodes(),positions))\n self.sources = xylem.sources\n self.sigma_rescaled = xylem.sigma\n\n def plot_data(self):\n #fig = plt.figure()\n #fig, ax = plt.subplots(figsize=(10, 10))\n #nx.draw_networkx(self.G, self.pos, with_labels=False,node_size=4,node_color='blue')\n #nx.draw_networkx_nodes(self.G, self.pos, nodelist=list(self.penetrating_arterioles), node_color='y',alpha=0.8,node_size=14)\n '''x_art=[self.pos[n][0] for n in self.penetrating_arterioles]\n y_art=[self.pos[n][1] for n in self.penetrating_arterioles]\n plt.plot(x_art,y_art,marker='.',color='y',linestyle='none',alpha=0.5,markersize=8)'''\n plt.axis('off')\n for pair in self.G.edges():\n x = [self.pos[pair[0]][0], self.pos[pair[1]][0] ]\n y = [self.pos[pair[0]][1], self.pos[pair[1]][1] ]\n plt.plot(x, y, 'b', alpha=1-sum(y)/2/8000)\n for sink in self.penetrating_arterioles:\n plt.plot([self.pos[sink][0]], [self.pos[sink][1]], 'y.', markersize=10,\n alpha=1-self.pos[sink][1]/8000)\n\n def plot_in_color(self,node_list,color):\n #to color the node_list of your choice in the color of your choice\n x = [self.pos[n][0] for n in node_list]\n y = [self.pos[n][1] for n in node_list]\n plt.plot(x,y,marker='.',color=color,linestyle='none')\n\n def remove_trees(self):\n #return a copy of the AnalysisClass object without the tree-part.\n G2 = self.G.copy()\n node_list = np.array(G2.nodes)\n PAs2 = list(self.penetrating_arterioles.copy())\n\n #Remove any loops (self edges)\n for i in node_list:\n if G2.has_edge(i,i):\n G2.remove_edges_from([(i,i)])\n\n A = nx.adjacency_matrix(G2,node_list,weight=None)\n B = A.sum(axis=1)\n nodes_to_remove = node_list[np.where(B<=1)[0]]\n pos2 = dict(self.pos)\n while len(nodes_to_remove) > 0:\n for n in nodes_to_remove:\n pos2.pop(n,None)\n if n in PAs2:\n PAs2.remove(n)\n G2.remove_nodes_from(nodes_to_remove)\n A = nx.adjacency_matrix(G2,node_list,weight=None)\n B = A.sum(axis=1)\n #nodes_to_remove= list(np.where(B==1)[0])\n nodes_to_remove = node_list[np.where(B==1)[0]]\n new_pial = AnalysisClass()\n new_pial.G = G2\n new_pial.pos = pos2\n new_pial.penetrating_arterioles = PAs2\n new_pial.sources = None\n return (new_pial)\n\n def length_loop(self,cycle):\n c = [self.pos[n] for n in cycle]\n polygon = Polygon(c)\n return polygon.length\n\n def area_loop(self,cycle):\n c = [self.pos[n] for n in cycle]\n polygon = Polygon(c)\n return polygon.area\n\n def loop_not_minimal(self,cycle):\n #return False if a cycle is minimal(does not contain anything inside)\n #True if the cycle is not minimal\n c = [self.pos[n] for n in cycle]\n polygon = Polygon(c)\n for n in self.G.nodes() :\n point = Point((self.G.node[n]['x'],self.G.node[n]['y']))\n if polygon.contains(point):\n return True\n return False\n\n def find_left_edge(self,edge):\n '''The AnalysisClass must be tree-free first by using the function remove_trees'''\n node = edge[1]\n neighbors = list(self.G.neighbors(node))\n neighbors.remove(edge[0])\n\n z = complex(self.G.node[node]['x']-self.G.node[edge[0]]['x'],self.G.node[node]['y']-self.G.node[edge[0]]['y'])\n z2 = [s(cmath.phase(complex(self.G.node[n]['x']-self.G.node[node]['x'],self.G.node[n]['y']-self.G.node[node]['y'])/z)) for n in neighbors]\n i = np.argmax(z2)\n\n left_edge = (node,neighbors[i])\n return left_edge\n\n def find_left_loop(self,edge,Bonds):\n #find a minimal loop, by starting from an edge (orientated) and turning left\n cycle = []\n cycle_nodes = []\n\n cycle.append(edge)\n cycle_nodes.append(edge[0])\n Bonds.remove(edge)\n\n first_node = edge[0]\n last_node = edge[1]\n\n while last_node != first_node:\n cycle_nodes.append(last_node)\n edge = self.find_left_edge(edge)\n last_node = edge[1]\n cycle.append(edge)\n Bonds.remove(edge)\n\n return(cycle,cycle_nodes)\n\n def find_all_minimal_loops(self):\n '''self has to be tree-free by using remove_trees first'''\n cycles = []\n cycles_nodes = []\n Bonds = []\n for edge in self.G.edges():\n Bonds.append(edge)\n Bonds.append(tuple(reversed(edge)))\n while len(Bonds)>0 :\n first = Bonds[0]\n result = self.find_left_loop(first,Bonds)\n cycles.append(result[0])\n cycles_nodes.append(result[1])\n dico = dict()\n for i in range(len(cycles_nodes)):\n if set(cycles_nodes[i]) not in dico.values():\n dico[i] = set(cycles_nodes[i])\n cycles = [cycles[i] for i in dico.keys()]\n self.cycles_edges = cycles\n self.cycles_nodes = [cycles_nodes[i] for i in dico.keys()]\n #print(len(self.cycles_nodes))\n i = 0\n ''' because the contour of the network remains\n whereas it is not a minmal loop, we have to withdraw it'''\n while i<len(self.cycles_nodes):\n if self.loop_not_minimal(self.cycles_nodes[i]):\n self.cycles_nodes.pop(i)\n self.cycles_edges.pop(i)\n else :\n i += 1\n\n def count_pa_per_loop(self):\n '''returns a list containing the number of penetrating arteriol on each\n loop. meaning either it is a node of the cycle either it is attached\n to a tree inside a cycle. If a pa belongs to n loops, it counts for\n 1/n for each loop.'''\n cycles = self.cycles_nodes\n stats = np.zeros(len(cycles))\n\n polygons = [Polygon([self.pos[n] for n in cycle]) for cycle in\n self.cycles_nodes]\n for pa in self.penetrating_arterioles:\n l = 0 #number of loops containing pa\n m = [] #indices of loops containing pa\n A = None #is pa inside a loop (True) or part of the loop (false)?\n for k in range(len(cycles)):\n if A != False :\n point = Point((self.G.node[pa]['x'],self.G.node[pa]['y']))\n polygon = polygons[k]\n if polygon.contains(point):\n A = True\n l = 1\n m = [k]\n break\n if A != True :\n if pa in cycles[k]:\n l += 1\n m.append(k)\n A = False\n for p in m:\n stats[p] += 1/l\n return stats\n\n def compute_tree_length_per_loop(self):\n G2 = self.G.copy()\n node_list = np.array(G2.nodes)\n\n #Remove any loops (self edges)\n for i in node_list:\n if G2.has_edge(i,i):\n G2.remove_edges_from([(i,i)])\n\n A = nx.adjacency_matrix(G2,node_list,weight=None)\n B = A.sum(axis=1)\n #Position in adj mx is not necessarily node label in graph, take indices of node_list where degree==1 in A\n extremities = node_list[np.where(B==1)[0]]\n\n num_cycles = len(self.cycles_nodes)\n\n L = [[] for i in range(num_cycles)]\n\n #first we find the loop in which the extremities are located\n #we know the rest of the tree will be located in the same loop\n\n polygons = []\n for i in range(num_cycles):\n polygons.append(Polygon([self.pos[n] for n in self.cycles_nodes[i]]))\n\n S = False\n\n for i in range(num_cycles):\n poly = polygons[i]\n for n in extremities:\n point = Point((self.G.node[n]['x'], self.G.node[n]['y']))\n if poly.contains(point):\n S = True\n L[i].append(n)\n\n '''rainbow = plt.get_cmap('rainbow')\n nx.draw_networkx(G2, self.pos, node_size=5, with_labels=False,font_size=5,node_color='r')\n nx.draw_networkx_nodes(G2, self.pos, nodelist=[1140], node_color=rainbow(0),node_size=100)\n nx.draw_networkx_nodes(G2, self.pos, nodelist=L[14], node_color=rainbow(0.5),node_size=50)\n\n x,y = polygons[14].exterior.xy\n plt.plot(x,y,)\n plt.show()'''\n\n\n '''for n in extremities:\n for i in range(num_cycles) :\n poly = polygons[i]\n point = Point((self.G.node[n]['x'],self.G.node[n]['y']))\n if poly.contains(point):\n S=True #means there is at least one cycle with a tree inside\n if n not in L[i]:\n L[i].append(n)\n\n break '''\n\n Length = np.zeros(num_cycles)\n\n while S:\n #L[k] contains the nodes with one neighbor inside cycle k\n L_next=[[] for i in range(num_cycles)]\n used_nodes = []\n\n for i in range(num_cycles):\n for k in L[i]:\n k_next = next(G2.neighbors(k))\n\n #Calculate length of edge\n z = complex(self.G.node[k]['x'] - self.G.node[k_next]['x'],\n self.G.node[k]['y'] - self.G.node[k_next]['y'])\n dist = abs(z)\n Length[i] += dist\n\n #Handle any trees that stretch across loops by ignoring any future attempts to remove k_next (see MDA303)\n if k_next not in used_nodes:\n used_nodes.append(k_next)\n L_next[i].append(k_next)\n G2.remove_node(k)\n else: pass\n\n\n L[i] = []\n\n S = False\n reused_nodes_bool = False\n\n node_list = np.array(G2.nodes)\n\n A = nx.adjacency_matrix(G2,node_list,weight=None)\n B = A.sum(axis=1)\n\n extremities = node_list[np.where(B==1)[0]]\n\n for i in range(num_cycles):\n for k_next in L_next[i]:\n if k_next in extremities:\n if k_next not in L[i]:\n L[i].append(k_next)\n S = True\n\n '''for i in range(num_cycles):\n for k_next in L_next[i]:\n if B[k_next] == 1:\n L[i].append(k_next)\n S=True'''\n\n return(Length)\n\n def compute_tree_ratio_per_loop(self):\n #tree_ratio of a loop = legnth of the trees inside this loop/perimeter of this loop\n #return ratio =list of the tree ratios\n Ratio = self.compute_tree_length_per_loop()\n for i in range(len(self.cycles_nodes)):\n perimeter = self.length_loop(self.cycles_nodes[i])\n Ratio[i] /= perimeter\n return Ratio\n\n def compute_numPAs_to_loop_area_per_loop(self):\n Ratio = self.count_pa_per_loop()\n for i in range(len(self.cycles_nodes)):\n area = self.area_loop(self.cycles_nodes[i])\n Ratio[i] /= area\n return Ratio\n\n def compute_tree_length_to_loop_area_per_loop(self):\n Ratio = self.compute_tree_length_per_loop()\n for i in range(len(self.cycles_nodes)):\n area = self.area_loop(self.cycles_nodes[i])\n Ratio[i] /= area\n return Ratio\n\n def loop_perimeters(self):\n perimeters = []\n for i in range(len(self.cycles_nodes)):\n p = self.length_loop(self.cycles_nodes[i])\n perimeters.append(p)\n return perimeters\n\n def loop_areas(self):\n areas = []\n for i in range(len(self.cycles_nodes)):\n a = self.area_loop(self.cycles_nodes[i])\n areas.append(a)\n return areas\n\n def find_pa_inside_loop(self,cycle):\n #does it include the pas on the loop ? in datas they are not on the loop; but in xylem they are\n c=[self.pos[n] for n in cycle]\n polygon=Polygon(c)\n L=[]\n for pa in self.penetrating_arterioles :\n point = Point(self.pos[pa])\n if polygon.contains(point):\n L.append(pa)\n return (L)\n\n################################################################################\n\ndef meshplotter_old(sigma_list, c_list, data, title='', cbarlabel='',\n logbool=False, graphplot=False, save_folder=''):\n sigmas = np.array(sigma_list)\n sigma_avgs = (sigmas[:-1] + sigmas[1:])/2\n sigma_bounds = [0, *sigma_avgs, sigma_list[-1]+1]\n\n cs = np.array(c_list)\n c_avgs = (cs[:-1] + cs[1:])/2\n c_bounds = [0, *c_avgs, c_list[-1]+1]\n\n xbounds, ybounds = np.meshgrid(sigma_bounds, c_bounds)\n\n if graphplot:\n fig = plt.figure(figsize=(20,10))\n grid = plt.GridSpec(2, 4, hspace=0.2, wspace=0.2)\n else:\n fig = plt.figure(figsize=(10,10))\n grid = plt.GridSpec(2, 2, hspace=0.2, wspace=0.2)\n\n\n main_ax = fig.add_subplot(grid[:,:2])\n\n if logbool:\n mesh = plt.pcolormesh(xbounds,ybounds,np.log10(data))\n else:\n mesh = plt.pcolormesh(xbounds,ybounds,data)\n plt.yscale('log')\n plt.title(title, fontsize=20)\n plt.xlim([0,sigma_list[-1]])\n plt.ylim([min(c_list)-1e-2, max(c_list)])\n\n plt.xlabel('$\\sigma$', fontsize=20)\n plt.ylabel('c', fontsize=20)\n\n cbar = plt.colorbar()\n cbar.ax.get_yaxis().labelpad = 15\n if cbarlabel == '':\n cbarlabel = title\n if logbool:\n cbar.set_label('log_10 of ' + cbarlabel, rotation=270)\n else:\n cbar.set_label(cbarlabel, rotation=270)\n\n x,y = np.meshgrid(sigma_list,c_list)\n plt.plot(x, y, 'k.', markersize=2)\n if graphplot:\n pairs = np.array([[0,-2],[-2,-2],[0,0],[-2,0]])\n for pair in zip([[0,2],[0,3],[1,2],[1,3]],pairs):\n ax = fig.add_subplot(grid[pair[0][0], pair[0][1]])\n ax.set_axis_off()\n try:\n picklefilename = save_folder + \"/pial_c%0.2f_w%0.2f%d.obj\" % \\\n (c_list[pair[1][1]],sigma_list[pair[1][0]], 1)\n with open(picklefilename, 'rb') as f:\n netw = pickle.load(f)\n netw.plot_data()\n except: pass\n\n plt.title('$\\sigma = %0.2f$, $c = %0.2f$' %\n (sigma_list[pair[1][0]],c_list[pair[1][1]]))\n\n return mesh\n\ndef meshplotter(sigmas, cs, data, logbool=False, graphplot=False,\n savefolder='', vmin=None, vmax=None,):\n sigma_avgs = (sigmas[:-1] + sigmas[1:])/2\n sigma_bounds = [0, *sigma_avgs, sigmas[-1]+1]\n\n c_avgs = (cs[:-1] + cs[1:])/2\n c_bounds = [0, *c_avgs, cs[-1]+10]\n\n xbounds, ybounds = np.meshgrid(sigma_bounds, c_bounds)\n\n if graphplot:\n fig = plt.figure(figsize=(20,10))\n grid = plt.GridSpec(2, 4, hspace=0.2, wspace=0.2)\n else:\n fig = plt.figure(figsize=(10,10))\n grid = plt.GridSpec(2, 2, hspace=0.2, wspace=0.2)\n\n plt.axis('off')\n if graphplot:\n pairs = np.array([[1,-4],[-2,-4],[1,1],[-2,1]])\n\n for pair in zip([[0,0],[0,1],[1,0],[1,1]],pairs):\n ax = fig.add_subplot(grid[pair[0][0], pair[0][1]])\n ax.set_axis_off()\n pname = \"%s/s%0.2f_c%0.2f.obj\" % (savefolder,\n sigmas[pair[1][0]], cs[pair[1][1]])\n try:\n with open(pname, 'rb') as f:\n a = pickle.load(f)\n a.plot()\n except FileNotFoundError:\n a = make_pial(sigmas[pair[1][0]], cs[pair[1][1]], n_sources=10)\n with open(pname, 'wb') as f:\n pickle.dump(a, f)\n a.plot()\n\n plt.title('$\\sigma = %0.2f$, $c = %0.2f$' %\n (sigmas[pair[1][0]], cs[pair[1][1]]))\n\n main_ax = fig.add_subplot(grid[:,-2:])\n plt.axis('on')\n\n if graphplot:\n plt.plot(sigmas[pairs[:,0]], cs[pairs[:,1]], 'rx', ms=13)\n\n if logbool:\n mesh = plt.pcolormesh(xbounds,ybounds,np.log10(data),\n vmin=vmin, vmax=vmax)\n else:\n mesh = plt.pcolormesh(xbounds,ybounds,data, vmin=vmin, vmax=vmax)\n plt.yscale('log')\n #plt.title(title, fontsize=20)\n plt.xlim([0,sigmas[-1]])\n plt.ylim([min(cs)-1e-2, max(cs)])\n\n plt.xlabel('$\\sigma$', fontsize=20)\n plt.ylabel('c', fontsize=20)\n\n cbar = plt.colorbar()\n cbar.ax.get_yaxis().labelpad = 15\n if logbool:\n cbar.set_label('$\\log_10$')\n '''# For labeling the colorbar, with optional argument 'title'\n if cbarlabel == '':\n cbarlabel = title\n if logbool:\n cbar.set_label('log_10 of ' + cbarlabel, rotation=270)\n else:\n cbar.set_label(cbarlabel, rotation=270)'''\n\n # enable to plot dots at each tested point\n #x,y = np.meshgrid(sigmas,cs)\n #plt.plot(x, y, 'k.', markersize=1)\n\n return mesh\n\ndef meshplotter_inv(sigmas, cs, data, logbool=False, graphplot=False,\n savefolder='', vmin=None, vmax=None,):\n sigma_avgs = (sigmas[:-1] + sigmas[1:])/2\n sigma_bounds = [0, *sigma_avgs, sigmas[-1]+1]\n\n cinvs = 1/cs[::-1]\n c_avgs = (cinvs[:-1] + cinvs[1:])/2\n c_bounds = [0, *c_avgs, np.amax(cinvs)+20]\n\n xbounds, ybounds = np.meshgrid(sigma_bounds, c_bounds)\n\n if graphplot:\n fig = plt.figure(figsize=(20,10))\n grid = plt.GridSpec(2, 4, hspace=0.2, wspace=0.2)\n else:\n fig = plt.figure(figsize=(10,10))\n grid = plt.GridSpec(2, 2, hspace=0.2, wspace=0.2)\n\n plt.axis('off')\n if graphplot:\n pairs = np.array([[1,1],[-2,1],[1,-4],[-2,-4]])\n\n for pair in zip([[0,0],[0,1],[1,0],[1,1]],pairs):\n ax = fig.add_subplot(grid[pair[0][0], pair[0][1]])\n ax.set_axis_off()\n pname = \"%s/s%0.2f_c%0.2f.obj\" % (savefolder,\n sigmas[pair[1][0]], cs[pair[1][1]])\n try:\n with open(pname, 'rb') as f:\n a = pickle.load(f)\n a.plot()\n except:\n a = pi.make(sigmas[pair[1][0]], cs[pair[1][1]], n_sources=10)\n a.simulate()\n with open(pname, 'wb') as f:\n pickle.dump(a, f)\n a.plot()\n\n plt.title('$\\sigma = %0.2f$, $c = %0.2f$' %\n (sigmas[pair[1][0]], 1/cs[pair[1][1]]))\n\n main_ax = fig.add_subplot(grid[:,-2:])\n plt.axis('on')\n\n if graphplot:\n plt.plot(sigmas[pairs[:,0]], 1/cs[pairs[:,1]], 'rx', ms=13)\n\n if logbool:\n mesh = plt.pcolormesh(xbounds,ybounds,np.log10(data[::-1,:]),\n vmin=vmin, vmax=vmax)\n else:\n mesh = plt.pcolormesh(xbounds,ybounds,data[::-1,:], vmin=vmin, vmax=vmax)\n plt.yscale('log')\n #plt.title(title, fontsize=20)\n plt.xlim([0,sigmas[-1]+0.15])\n plt.ylim([min(cinvs)+1e-2, max(cinvs)+10])\n\n plt.xlabel('Fluctuation width scale $\\sigma$', fontsize=20)\n plt.ylabel('Relative fluctuation strength', fontsize=20)\n\n cbar = plt.colorbar()\n cbar.ax.get_yaxis().labelpad = 15\n if logbool:\n cbar.set_label('$\\log_{10}$(EMD of tree length/loop perimeter)', fontsize=18)\n '''# For labeling the colorbar, with optional argument 'title'\n if cbarlabel == '':\n cbarlabel = title\n if logbool:\n cbar.set_label('log_10 of ' + cbarlabel, rotation=270)\n else:\n cbar.set_label(cbarlabel, rotation=270)'''\n\n # enable to plot dots at each tested point\n #x,y = np.meshgrid(sigmas,cs)\n #plt.plot(x, y, 'k.', markersize=1)\n\n return mesh\n\ndef rolling_average(x,y, n=80):\n def moving_average(a, n=3):\n ret = np.cumsum(a, dtype=float)\n ret[n:] = ret[n:] - ret[:-n]\n return ret[n - 1:] / n\n i = np.argsort(x)\n x = x[i]\n y = y[i]\n return moving_average(x, n=n), moving_average(y, n=n)\n\n################################################################################\n#Old methods using the AnalysisClass analysis class\n\ndef analyze(xylem, n, save_folder):\n #n is just an index for the xylem object\n #transform xylem into a pial object, save the pial object\n #display the network and save the fig\n #return and save 2 lists w and h : w[i] tree ratio of cycle i,\n #h[i] number of pa of cycle i\n pial = AnalysisClass()\n pial.simulation_to_pial(xylem)\n\n new_pial = pial.remove_trees()\n new_pial.find_all_minimal_loops()\n pial.cycles_nodes = new_pial.cycles_nodes\n\n h = pial.count_pa_per_loop()\n w = pial.compute_tree_ratio_per_loop()\n x = pial.compute_numPAs_to_loop_area_per_loop()\n p = pial.compute_tree_length_to_loop_area_per_loop()\n\n np.savez(save_folder + '/c%0.2f_w%0.2f_%d' % (xylem.cst, xylem.sigma_ratio, n), h=h,w=w,x=x,p=p)\n\n '''fig, ax = plt.subplots(figsize=(10, 10))\n pial.plot()\n plt.savefig('network.png')'''\n\n #to plot and save the histogram\n '''fig2, ax = plt.subplots(figsize=(10, 10))\n h1=plt.hist(h, bins=np.arange(17), density=True, facecolor='dodgerblue')\n plt.xlabel('number of penetrating arterioles per loop')\n plt.ylabel('density')\n plt.title(r'Histogram of number of pa per loop: $\\mu=100$, $\\sigma=15$')\n plt.savefig('h1.png')'''\n\n #to plot and save the histogram\n '''fig3, ax = plt.subplots(figsize=(10, 10))\n h2=plt.hist(w, bins=20,facecolor='dodgerblue')\n plt.xlabel('tree ratio per loop')\n plt.ylabel('density')\n plt.title(r'Histogram tree ratio per loop: $\\mu=100$, $\\sigma=15$')\n plt.savefig('h2.png')\n print(h,w)'''\n\n return (h,w,x,p)\n\ndef analyze_data(filepath, dataind):\n #take a data file ( .mat) and does the statistics\n pial = AnalysisClass()\n pial.data_to_pial(filepath, dataind)\n\n '''filehandler = open(\"pial_c\"+str(xylem.cst)+\"_w\"+str(xylem.sigma_ratio)+\n str(n)+\".obj\",\"wb\")\n pickle.dump(pial,filehandler)\n filehandler.close()'''\n\n '''fig, ax = plt.subplots(figsize=(10, 10))\n pial.plot()\n plt.savefig(filepath[:-4]+'.png')'''\n\n new_pial=pial.remove_trees()\n new_pial.find_all_minimal_loops()\n pial.cycles_nodes=new_pial.cycles_nodes\n h = pial.count_pa_per_loop()\n\n #np.save('pa_per_loop'.npy',h)\n '''fig2, ax = plt.subplots(figsize=(10, 10))\n plt.hist(h, bins=np.arange(17), density=True, facecolor='dodgerblue')\n plt.xlabel('number of penetrating arterioles per loop')\n plt.ylabel('density')\n plt.title(r'Histogram of number of pa per loop')\n plt.savefig(file+'histo_pa.png')'''\n\n w = pial.compute_tree_ratio_per_loop()\n\n #np.save('w_c'+str(xylem.cst)+'_w'+str(xylem.sigma_ratio)+str(n)+'.npy',h)\n '''fig3, ax = plt.subplots(figsize=(10, 10))\n plt.hist(w, bins=20,facecolor='dodgerblue')\n plt.xlabel('tree ratio per loop')\n plt.ylabel('density')\n plt.title(r'Histogram tree ratio per loop')\n plt.savefig(file+'histo_tree_ratio.png')'''\n\n x = pial.compute_numPAs_to_loop_area_per_loop()\n\n p = pial.compute_tree_length_to_loop_area_per_loop()\n\n return(h,w,x,p)\n\ndef analyze_several_data_files(L, plot=False):\n H_pa = np.array([])\n W_tree = np.array([])\n X_tree = np.array([])\n P_tree = np.array([])\n\n i = 1\n for file in L:\n print(file)\n res = analyze_data(file, i)\n H_pa = np.append(H_pa, res[0])\n W_tree = np.append(W_tree, res[1])\n X_tree = np.append(X_tree, res[2])\n P_tree = np.append(P_tree, res[3])\n i += 1\n #np.save('data_control_pa_per_loop.npy',H_pa)\n #np.save('data_control_tree_ratio_per_loop.npy',W_tree)\n\n print(\"Avg number of PAs per loop:\",np.mean(H_pa),\n \"\\nAvg ratio length of trees in loop to loop diameter\",np.mean(W_tree),\n \"\\nAvg ratio length of trees in loop to loop area\",np.mean(P_tree))\n\n if plot:\n #PLOT HISTO PA PER LOOP\n fig3, ax = plt.subplots(figsize=(10, 10))\n plt.hist(H_pa, bins=20, facecolor='dodgerblue')\n plt.xlabel('Number of PAs per loop')\n plt.ylabel('Frequency')\n plt.xlim([0,75])\n plt.title(r'Distribution of the number of PAs per loop')\n plt.text(20, 140, 'Average number of PAs in a loop \\n'+'$<X_{{pa}}>=${:.1f} ± {:.1f}'.format(np.mean(H_pa),np.std(H_pa)),\n bbox={'facecolor':'lightblue', 'alpha':0.5, 'pad':10})\n plt.savefig('raw_pa_counts.png')\n\n #PLOT HISTO TREE RATIO PER LOOP\n fig3, ax = plt.subplots(figsize=(10, 10))\n plt.hist(W_tree, bins=25, facecolor='darkred')\n plt.xlabel('Ratio of tree length inside loop to loop perimeter [unitless]')\n plt.ylabel('Frequency')\n plt.title(r'Tree length to loop perimeter ratios')\n plt.text(0.5, 50, 'Average tree ratio per loop : \\n'+'$<F_{{tree}}>=${:.3f} ± {:.2f}\\nTotal number of loops: {}'.format(np.mean(W_tree),np.std(W_tree),len(W_tree)),\n bbox={'facecolor':'lightsalmon', 'alpha':0.5, 'pad':10})\n plt.savefig('raw_perimeter_ratios.png')\n\n #PLOT PAs per area\n pruned = X_tree[np.nonzero(X_tree<80)]#[np.nonzero(X_tree<0.001)]\n\n fig3, ax = plt.subplots(figsize=(10, 10))\n plt.hist(pruned, bins=20, facecolor='g')\n plt.xlabel('Density of PAs in loop [1/mm^2]')\n plt.ylabel('Frequency')\n plt.title(r'PA density per loop (PAs/loop area)')\n plt.text(1, 40, 'Average PA count to area ratio per loop : \\n'+'$<F_{{tree}}>=${:.2E} ± {:.2E}\\nTotal number of loops: {}'.format(np.mean(X_tree),np.std(X_tree),len(X_tree)),\n bbox={'facecolor':'lightgreen', 'alpha':0.5, 'pad':10})\n plt.savefig('raw_PA_densities.png')\n\n fig3, ax = plt.subplots(figsize=(10, 10))\n plt.hist(P_tree, bins=30, facecolor='goldenrod')\n plt.xlabel('Ratio of tree length inside loop to loop area [1/mm]')\n plt.ylabel('Frequency')\n plt.title(r'Tree length to loop area ratios')\n plt.text(2.5, 30, 'Average tree to area ratio per loop : \\n'+'$<F_{{tree}}>=${:.2E} ± {:.2E}\\nTotal number of loops: {}'.format(np.mean(pruned),np.std(pruned),len(pruned)),\n bbox={'facecolor':'wheat', 'alpha':0.5, 'pad':10})\n plt.savefig('raw_area_ratios.png')\n\n return H_pa, W_tree, X_tree, P_tree\n\ndef study(save_folder, sigma_list, c_list, start_n, end_n, min_sigma=0,\n c_bounds=[0, 1000]):\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n for n in range(start_n, end_n):\n for sigma in sigma_list:\n for c in c_list:\n if sigma >= min_sigma and c >= c_bounds[0] and c <= c_bounds[1]:\n xylem = make_xylem(sigma,c)\n\n pial = AnalysisClass()\n pial.simulation_to_pial(xylem)\n\n filehandler = open(save_folder + \"/pial_c%0.2f_w%0.2f%d.obj\" % \\\n (xylem.cst, xylem.sigma_ratio,n),\"wb\")\n pickle.dump(pial,filehandler)\n filehandler.close()\n\n try:\n analyze(xylem, n, save_folder)\n except:\n pass\n\n gc.collect()\n\ndef batch_analysis(save_folder, sigma_list, c_list, max_n, logbool=False,\n plot=False):\n x,y = np.meshgrid(sigma_list,c_list)\n h_avg = np.zeros(np.shape(y))\n w_avg = np.zeros(np.shape(y))\n x_avg = np.zeros(np.shape(y))\n p_avg = np.zeros(np.shape(y))\n loop_area_avg = np.zeros(np.shape(y))\n n_loops_avg = np.zeros(np.shape(y))\n for sigma_i in range(len(sigma_list)):\n for c_i in range(len(c_list)):\n n_loops = 0\n num_file_successes = 0\n for n in range(max_n):\n #npzfilename = save_folder + '/c' + str(c_list[c_i]) + '_w' + str(sigma_list[sigma_i]) + '_' + str(n) +'.npz'\n #save_folder + '/c%0.2f_w%0.2f_%d.npz' % (c_list[c_i], sigma_list[sigma_i], n)\n #picklefilename = save_folder + '/pial_c' + str(c_list[c_i]) + '_w' + str(sigma_list[sigma_i]) + str(n) + '.obj'\n #save_folder + '/pial_c%0.2f_w%0.2f%d.obj' % \\\n #(c_list[c_i],sigma_list[sigma_i],n)\n npzfilename = save_folder + '/c%0.2f_w%0.2f_%d.npz' % (c_list[c_i],sigma_list[sigma_i], n)\n picklefilename = save_folder + \"/pial_c%0.2f_w%0.2f%d.obj\" % \\\n (c_list[c_i],sigma_list[sigma_i], n)\n try:\n loaded = np.load(npzfilename)\n h_avg[c_i, sigma_i] += np.sum(loaded['h'])\n w_avg[c_i, sigma_i] += np.sum(loaded['w'])\n x_avg[c_i, sigma_i] += np.sum(loaded['x'])\n p_avg[c_i, sigma_i] += np.sum(loaded['p'])\n loop_area_avg[c_i, sigma_i] += np.sum(np.nan_to_num(loaded['h']/loaded['x']))\n n_loops += len(loaded['h'])\n num_file_successes += 1\n n_loops_avg[c_i, sigma_i] += len(loaded['h'])\n except: pass\n n_loops_avg[c_i, sigma_i] /= num_file_successes\n if n_loops != 0:\n h_avg[c_i, sigma_i] /= n_loops\n w_avg[c_i, sigma_i] /= n_loops\n x_avg[c_i, sigma_i] /= n_loops\n p_avg[c_i, sigma_i] /= n_loops\n loop_area_avg[c_i, sigma_i] /= n_loops\n '''else:\n print('no loops for s = %0.2f, c = %0.2f' % (sigma_list[sigma_i], c_list[c_i]))\n h_avg[c_i, sigma_i] = 0\n w_avg[c_i, sigma_i] = 0\n x_avg[c_i, sigma_i] = 0\n p_avg[c_i, sigma_i] = 0\n loop_area_avg[c_i, sigma_i] = 0'''\n #print(sigma_list[sigma_i], c_list[c_i], n_loops)\n\n sigmas = np.array(sigma_list)\n sigma_avgs = (sigmas[:-1] + sigmas[1:])/2\n sigma_bounds = [0, *sigma_avgs, sigma_list[-1]+1]\n\n cs = np.array(c_list)\n c_avgs = (cs[:-1] + cs[1:])/2\n c_bounds = [0, *c_avgs, c_list[-1]+1]\n\n xbounds, ybounds = np.meshgrid(sigma_bounds, c_bounds)\n\n names = ['Average number of loops in simulation', 'Average sinks per loop',\n 'tree length over loop perimeter', 'PAs per area (density)',\n 'tree length over loop area', 'loop area']\n datas = [n_loops_avg, h_avg, w_avg, x_avg, p_avg, loop_area_avg]\n if plot:\n for data_ind in range(len(datas)):\n meshplotter(sigma_list, c_list, np.log(datas[data_ind]),\n title=names[data_ind], cbarlabel=names[data_ind],\n logbool=logbool, save_folder=save_folder,\n graphplot=True)\n\n return h_avg, w_avg, x_avg, p_avg, loop_area_avg, n_loops_avg\n\ndef compare_to_data(save_folder, sigma_list, c_list, max_n, expdatafiles=None,\n logbool=True, plot=True):\n metrics = ['h','w','x','p']\n labels = ['PAs per loop', 'tree length over loop perimeter',\n 'PAs per area (density)', 'tree length over loop area']\n data = {}\n for i in range(len(metrics)):\n data[metrics[i]] = []\n for c_i in range(len(c_list)):\n data[metrics[i]].append([])\n for sigma_i in range(len(sigma_list)):\n data[metrics[i]][c_i].append([])\n\n for c_i in range(len(c_list)):\n for sigma_i in range(len(sigma_list)):\n for n in range(max_n):\n npzfilename = save_folder + '/c%0.2f_w%0.2f_%d.npz' % \\\n (c_list[c_i],sigma_list[sigma_i], n)\n picklefilename = save_folder + \"/pial_c%0.2f_w%0.2f%d.obj\" % \\\n (c_list[c_i],sigma_list[sigma_i], n)\n try:\n loaded = np.load(npzfilename)\n loadbool = True\n except: pass\n if loadbool:\n for i in range(len(metrics)):\n data[metrics[i]][c_i][sigma_i] = np.append(\n data[metrics[i]][c_i][sigma_i], loaded[metrics[i]])\n '''try:\n with open(picklefilename, 'rb') as f:\n pial = pickle.load(f)\n data['numPAs'][c_i][sigma_i] = np.append(data['numPAs'][c_i][sigma_i], len(pial.penetrating_arterioles))\n print(c_i, sigma_i)\n except: pass'''\n loadbool = False\n\n ci = 7\n sigi = 13\n\n plt.hist(data[metrics[1]][ci][sigi], bins=16)\n plt.xlabel('tree length / loop perimeter')\n plt.ylabel('frequency')\n plt.title('c=%f, sigma=%f' % (c_list[ci], sigma_list[sigi]))\n plt.text(0.5, 7, 'N = '+str(len(data[metrics[0]][ci][sigi])))\n plt.show()\n\n processed = {}\n for i in range(len(metrics)):\n processed[metrics[i]] = np.zeros((len(c_list),len(sigma_list)))\n\n if expdatafiles == None:\n basic_entropy = True\n else:\n stats = analyze_several_data_files(expdatafiles, plot=False)\n\n expdensities = stats[2]\n #stats[2] = expdensities[np.logical_and(expdensities < 45, expdensities > 1e-2)]\n rho = expdensities.mean()\n lengthtomm = 1\n\n for i in range(len(metrics)):\n for c_i in range(len(c_list)):\n for sigma_i in range(len(sigma_list)):\n if len(data[metrics[i]][c_i][sigma_i]) > 0:\n processed[metrics[i]][c_i, sigma_i] = ks_2samp(\n data[metrics[i]][c_i][sigma_i], stats[i])[1]\n\n names = ['PAs per loop', 'tree length over loop perimeter',\n 'PAs per area (density)', 'tree length over loop area']\n\n for data_ind in range(len(metrics)):\n meshplotter(sigma_list, c_list, 1-processed[metrics[data_ind]],\n title='1-pval_'+names[data_ind], cbarlabel='1-pval of '+names[data_ind],\n logbool=True, save_folder='detailed-random')\n\n################################################################################\n# New methods\n\ndef river_batch(strengths, n, savefolder):\n for _ in range(n):\n for s in strengths:\n try:\n a = make_river(s, density=65, basin_fraction=0.08,\n shape='square', n_sinks=25, n_sources=1,\n basins='triangle')\n\n nloops = a.n_cycles()\n perloop = a.count_per_loop(type='basins')\n ps = a.loop_perimeters()\n qhull_ps = a.loop_qhull_perimeters()\n areas = a.loop_areas()\n qhull_areas = a.loop_qhull_areas()\n trees = a.tree_lengths()\n\n x = np.array([np.zeros(nloops),\n perloop, trees/ps,\n ps, qhull_ps,\n areas, qhull_areas,\n ps/qhull_ps, areas/qhull_areas])\n x[np.isnan(x)] = 0\n\n try:\n results = np.load('%s/%0.2f.npy' % (savefolder,s))\n results = np.append(results, x.T, axis=0)\n except:\n results = x.T\n\n results[0,0] += 1\n np.save('%s/%0.2f.npy' % (savefolder,s), results)\n except ZeroDivisionError:\n print('...................................Simulation failed')\n continue\n except AssertionError:\n print('...................................Loop search failed')\n continue\n except Exception as e:\n print(e)\n continue\n\n return results\n\ndef pial_batch(widths, strengths, n, savefolder, n_sources=10):\n try:\n with open(savefolder + '/simcounts.p','rb') as f:\n simcounts = pickle.load(f)\n except FileNotFoundError:\n simcounts = {}\n\n for sigma in widths:\n if sigma not in simcounts:\n simcounts[sigma] = {}\n for c in strengths:\n if c not in simcounts[sigma]:\n simcounts[sigma][c] = 0\n\n for _ in range(n):\n for sigma in widths:\n for c in strengths:\n try:\n a = pi.make_pial(sigma, c, density=65, sink_fraction=0.1,\n n_sources=n_sources)\n\n perloop = a.count_per_loop()\n ps = a.loop_perimeters()\n qhull_ps = a.loop_qhull_perimeters()\n areas = a.loop_areas()\n qhull_areas = a.loop_qhull_areas()\n trees = a.tree_lengths()\n\n x = np.array([perloop, trees/ps,\n ps, qhull_ps,\n areas, qhull_areas,\n ps/qhull_ps, areas/qhull_areas])\n #x[np.isnan(x)] = 0\n\n try:\n results = np.load('%s/s%0.2f_c%0.2f.npy' %\n (savefolder,sigma,c), allow_pickle=True)\n results = np.append(results, x.T, axis=0)\n except FileNotFoundError:\n results = x.T\n\n np.save('%s/s%0.2f_c%0.2f.npy' % (savefolder,sigma,c),\n results)\n\n simcounts[sigma][c] += 1\n\n print(simcounts)\n\n with open(savefolder+'/simcounts.p', 'wb') as f:\n pickle.dump(simcounts, f)\n\n except ZeroDivisionError:\n print('..................................Simulation failed')\n continue\n except AssertionError:\n print('.................................Loop search failed')\n continue\n '''except Exception as e:\n print(e)\n continue'''\n\n return results\n\ndef pial_data_dists(files):\n i = 1\n for file in files:\n a = pi.pial_xylem(file, i)\n\n t = time.time()\n perloop = a.count_per_loop()\n ps = a.loop_perimeters()\n qhull_ps = a.loop_qhull_perimeters()\n areas = a.loop_areas()\n qhull_areas = a.loop_qhull_areas()\n trees = a.tree_lengths()\n\n x = np.array([perloop, trees/ps,\n ps, qhull_ps,\n areas, qhull_areas,\n ps/qhull_ps, areas/qhull_areas])\n\n if i == 1:\n results = x.T\n else:\n results = np.append(results, x.T, axis=0)\n\n i += 1\n\n np.save('pial_dists.npy', results)\n\ndef pial_data_circles(file, i):\n from shapely.geometry import MultiLineString, LinearRing\n\n a = pi.pial_xylem(file,i)\n a.remove_trees()\n coords = [a.LEAF.Vertex[edge, :] for edge in a.LEAF.Bond]\n lines = MultiLineString(coords)\n\n xmax = max(a.LEAF.Vertex[:,0])\n ymax = max(a.LEAF.Vertex[:,1])\n\n x = np.linspace(0.2*xmax, 0.8*xmax, 10)\n y = np.linspace(0.2*ymax, 0.8*ymax, 5)\n data = np.zeros((len(x), len(y)))\n for i in range(len(x)):\n for j in range(len(y)):\n angles = np.linspace(0, 2*np.pi, 100, endpoint=False)\n circ = np.zeros((len(angles),2))\n circ[:,0] = np.cos(angles) + x[i]\n circ[:,1] = np.sin(angles) + y[j]\n\n intersections= lines.intersection(LinearRing(circ))\n try:\n data[i,j] = len(intersections)\n except TypeError:\n data[i,j] = 1\n\n return data.flatten()\n\ndef pial_multidata_circles(files):\n data = np.array([])\n i = 1\n for file in files:\n a = pial_data_circles(file, i)\n data = np.append(data, a)\n i += 1\n\n m = np.mean(data)\n s = np.std(data)\n\n plt.suptitle('Network backbone points intersecting distributed circles')\n plt.title('Mean: %0.2f, St. dev.: %0.2f' % (m, s))\n\n plt.hist(data.flatten(), bins=np.arange(20))\n plt.show()\n\ndef pial_data_sigma(file, i):\n a = pi.pial_xylem(file, i)\n dists = a.vert_distances_sqr(verts=a.sinks)\n np.fill_diagonal(dists,100)\n return np.amin(dists, axis=0)\n\ndef pial_multidata_sigma(files):\n data = np.array([])\n i = 1\n for file in files:\n data = np.append(data, pial_data_sigma(file, i))\n i += 1\n m = np.mean(data)\n s = np.std(data)\n\n plt.suptitle('Minimum distances between PAs')\n plt.title('Mean: %0.4f, St. dev.: %0.4f' % (m, s))\n\n plt.hist(data, bins=40)\n plt.xlim([0,0.5])\n plt.show()\n\nif __name__ == \"__main__\":\n #river_batch(np.linspace(0,3,31), 100, 'riverdata4.npy')\n\n pial_files = ['MDA101L_20170520_144817.mat',\n 'MDA105L_20170522_105334.mat',\n 'MDA106L_20170522_110804.mat',\n 'MDA302L_20170522_134110.mat',\n 'MDA303L_20170522_135724.mat',\n 'MDA304L_20170522_140157.mat',\n 'MDA305L_20170522_141712.mat',\n 'MDA401L_20170522_112005.mat',\n 'MDA402L_20170522_113536.mat',\n 'MDA403L_20170522_114900.mat',\n 'MDA404L_20170522_142801.mat',\n 'MDA405L_20170522_143707.mat',\n 'MDA407L_20170522_120231.mat',\n 'MDA503L_20170522_145147.mat',\n 'MDA601L_20170522_150013.mat',\n 'MDA702L_20170522_121314.mat',\n 'MDA704L_20170522_151851.mat',]\n\n pial_data_dists(pial_files)\n\n '''sigma_list = [0.1 ,0.2, 0.5, 1, 2, 5, 10]\n c_list = [0.1 ,0.2, 0.5, 1, 2, 5, 10, 20, 50]\n #study('3source', sigma_list, c_list, 10, 15)\n #batch_analysis('3source', sigma_list, c_list, 10)'''\n\n '''sigma_list = np.linspace(0.1, 5, 10)\n c_list = np.logspace(-1.5, 1.5, 8)\n #study('loglin3source', sigma_list, c_list, 1, 5)\n batch_analysis('loglin3source', sigma_list, c_list, 5, logbool=True)'''\n\n '''sigma_list = np.linspace(0.01, 5, 15)\n c_list = np.logspace(-1.5, 2, 15)\n study('detailed', sigma_list, c_list, 5, 10)\n #batch_analysis('detailed', sigma_list, c_list, 5, logbool=False)'''\n\n sigma_list = np.linspace(0.01, 5, 15)\n c_list = np.logspace(-1.5, 2, 15)\n #study('detailed-random', sigma_list, c_list, 14, 25, min_sigma=2,c_bounds=[0.5,9])\n #batch_analysis('detailed-random', sigma_list, c_list, 25, logbool=False, plot=True,)\n #batch_analysis('detailed-random', sigma_list, c_list, 22, plot=True,)\n\n #analyze_several_data_files(all_data_files, plot=False)\n\n #compare_to_data('detailed-random', sigma_list, c_list, 25, expdatafiles=all_data_files)\n", "id": "9970611", "language": "Python", "matching_score": 6.832843780517578, "max_stars_count": 0, "path": "analyze.py" }, { "content": "#!/usr/bin/env python\n# Math and linear algebra\nimport numpy as np\nimport scipy.sparse\n\n# Base object\nimport network\n\n# Setting up LEAF object\nimport LEAFclass as LFCLSS\nimport matplotlib.path as pltth\nfrom scipy.spatial import Voronoi, voronoi_plot_2d, ConvexHull\n\n# Useful for statistics\nfrom shapely.geometry import Polygon, Point, MultiPoint\nimport networkx as nx\n\n# Plotting\nimport matplotlib.pyplot as plt\nfrom matplotlib.collections import LineCollection\nfrom matplotlib import cm\nfrom matplotlib.colors import LogNorm, Normalize\nfrom matplotlib.patches import Polygon as pltPolygon\n\n# General\nimport time\nimport warnings\nimport pickle\n\nplt.rcParams.update({\n \"text.usetex\": True,\n \"font.family\": \"serif\",\n \"font.serif\": [\"Computer Modern Roman\"],\n \"font.size\": 16\n})\n\n\"\"\"\n xylem.py\n\n Contains classes for the optimization of Xylem networks.\n\n <NAME> 2012\n <NAME> 2019\n\"\"\"\n\nclass NetworkSuite(network.VascularNetwork):\n \"\"\"\n An expansion of the network.VascularNetwork class with functionality\n for simulation and analysis\n \"\"\"\n # Initialization and creation\n def __init__(self, LEAF, LSHP, size, C0=None,\n inputs='multi_sources', n_sources=3, sourceinds=None,\n outputs='line', n_sinks=None, sink_fraction=None, sinkinds=None,):\n \"\"\"Initialization:\n LEAF: Topology object describing where nodes are in the xy plane\n LSHP: LeafShape object giving the border of the network\n size: corresponds to the density of points in the network. the exact\n number is proportional to the square root of total nodes\n sigma: width of gaussian fluctuations, if simulating vasculature\n cst: corresponds to strength of gaussian fluctuations for\n vascular systems, or it corresponds to the maximum fraction of\n output flow that each basin takes in or puts out for a\n geography simulation\n gamma: parameter related to scaling law of the system being\n simulated\n C0: initial conditions, array of conductivities to begin with. If\n None, then random conductivities are used\n inputs: See fluctuation_ensemble for possible options\n outputs: ^\n n_sinks: number of sinks\n d: If n_sinks not explicitly specified, this is the fraction of\n nodes that will become sinks\n \"\"\"\n\n # essential\n super(NetworkSuite, self).__init__(LEAF, LSHP, size, C0=C0)\n\n self.weights_mode = 'animal'\n\n poly = Polygon(self.LSHP.polyedge)\n self.perimeter = poly.length\n self.area = poly.area\n\n self.set_sources(inputs, n_sources, sourceinds)\n self.set_sinks(outputs, n_sinks, sink_fraction, sinkinds)\n\n self.char_length = np.sqrt(self.area/self.n_sinks)\n\n @staticmethod\n # FIXME: 'semicircle' shape\n def make_LSHP(shape):\n \"\"\" Return a LSHP object with given shape.\n Possible shapes are\n 'hexagon', 'circle', 'square', 'triangle'.\n \"\"\"\n # Initialization of the network\n LSHP = LFCLSS.LeafShape(shape, np.pi/3)\n\n # Form appropriate lattice boundaries\n if shape == 'hexagon':\n theta = np.arange(0.,7.) * np.pi/3.\n LSHP.polyedge = np.array([np.cos(theta), np.sin(theta)]).T\n elif shape == 'circle' or shape == 0:\n theta = np.linspace(0, 2*np.pi, 100)\n LSHP.polyedge = np.array([np.cos(theta), np.sin(theta)]).T\n elif shape == 'semicircle':\n pass\n elif shape == 'square':\n LSHP.polyedge = np.array([[-1,-1], [1, -1], [1, 1], [-1, 1],\n [-1, -1]])\n elif shape == 'triangle':\n LSHP.polyedge = np.array([[-1, -1], [1, -1], [0, 1], [-1,-1]])\n elif shape == 'invtriangle':\n LSHP.polyedge = np.array([[-1, -1], [1, -1], [0, 1], [-1,-1]])\n LSHP.polyedge[:,1] *= -1\n elif shape == 'strip':\n x = 0.5\n LSHP.polyedge = np.array([[-x,-1], [x, -1], [x, 1], [-x, 1],\n [-x, -1]])\n elif shape == 'wide':\n x = 2\n LSHP.polyedge = np.array([[-x,-1], [x, -1], [x, 1], [-x, 1],\n [-x, -1]])\n elif shape == 'sine':\n amplitude = 0.3\n w = 0.5\n x = amplitude*np.sin(3*np.pi*np.linspace(0,1,20))\n y = np.linspace(-1,1,20)\n\n LSHP.polyedge = np.zeros((41,2))\n LSHP.polyedge[:20,0] = x + w\n LSHP.polyedge[:20,1] = y[::-1]\n LSHP.polyedge[20:40,0] = x - w\n LSHP.polyedge[20:40,1] = y\n LSHP.polyedge[40,:] = [x[0]+w, y[-1]]\n\n elif isinstance(shape, float):\n theta = np.linspace(0, 2*np.pi, 100)\n b = np.sqrt(1 - shape**2)\n LSHP.polyedge = np.array([b*np.cos(theta), np.sin(theta)]).T\n else:\n assert TypeError, 'Type %s is not supported' % shape\n\n return LSHP\n\n @staticmethod\n def make_LEAF(leafName, density, lattice, LSHP, yplot=False, angle=np.pi/3.,\n noise=0.0, zoom_factor=1.0, shapeplot=False, stats=False,\n trimming_percentile=100, ):\n \"\"\" Create a new network with given name, size, lattice type etc...\n noise is added as a percentage of np.mean bond length\n to the triangular and square lattices\n Parameters:\n leafName: LEAF class name\n density: Proportional to square root of number of nodes\n lattice: how nodes are distributed in the plane\n LSHP: Leaf shape (Topology class) instance\n yplot: option to plot Voronoi tesselation\n angle: only used for lattice = 'triangle'\n noise: determines how much nodes will np.differ from the lattice\n zoom_factor: ??\n shapeplot: option to plot leaf shape in its own plot\n stats: prints initial number of nodes and bonds\n trimming_percentile: bond length np.percentile above which the bonds are\n removed. Critical for square leafshape so there are not long\n edges along the leaf shape boundary.\n \"\"\"\n def verts_plt_path(vertex):\n \"\"\" Returns a matplotlib Path object describing the polygon defined\n by vertices.\n \"\"\"\n # Set up polygon\n verts = np.zeros((vertex.shape[0] + 1, vertex.shape[1]))\n verts[:-1,:] = vertex\n #verts[-1,:] = cycle.coords[0,:]\n\n codes = pltth.Path.LINETO*np.ones(verts.shape[0])\n codes[0] = pltth.Path.MOVETO\n codes[-1] = pltth.Path.CLOSEPOLY\n\n return pltth.Path(verts, codes)\n\n def replace_nan_by_avg(ar):\n # replace all nans by the average of the rest\n avg = ar[np.isfinite(ar)].mean()\n ar[np.isnan(ar)] = avg\n\n return ar\n\n def polygon_area(coords):\n \"\"\" Return the area of a closed polygon\n \"\"\"\n Xs = coords[:,0]\n Ys = coords[:,1]\n\n # Ignore orientation\n return 0.5*abs(sum(Xs[:-1]*Ys[1:] - Xs[1:]*Ys[:-1]))\n\n LEAF = LFCLSS.Topology(leafName, lattice)\n\n if lattice == 'yjunc':\n Mnei = 2\n X = np.linspace(-1.5, 1.5, num=density)\n Y = np.zeros(density)\n\n LEAF.height = X[1] - X[0]\n\n Y2 = np.arange(LEAF.height, 1, LEAF.height)\n X2 = X[int(len(X)/3)]*np.ones(len(Y2))\n\n maxlength = LEAF.height*1.01\n VertexM = np.zeros((density + len(Y2), 2))\n VertexM[:, 0] = np.concatenate((X, X2))\n VertexM[:, 1] = np.concatenate((Y, Y2))\n\n elif lattice == 'xjunc':\n Mnei = 2\n X = np.linspace(-1.5, 1.5, num=density)\n Y = np.zeros(density)\n\n LEAF.height = X[1] - X[0]\n\n Y2 = np.arange(LEAF.height, 1, LEAF.height)\n X2 = X[len(X)/3]*np.ones(len(Y2))\n\n Y3 = np.arange(-LEAF.height, -1, -LEAF.height)\n X3 = X[len(X)/3]*np.ones(len(Y3))\n\n maxlength = LEAF.height*1.01\n VertexM = np.zeros((density + len(Y2) + len(Y3), 2))\n VertexM[:, 0] = np.concatenate((X, X2, X3))\n VertexM[:, 1] = np.concatenate((Y, Y2, Y3))\n\n elif lattice == 'hjunc':\n X = np.linspace(-1.5, 1.5, num=density)\n Y = np.zeros(density)\n\n LEAF.height = X[1] - X[0]\n\n Y2 = np.arange(LEAF.height, 1, LEAF.height)\n X2 = X[len(X)/3]*np.ones(len(Y2))\n\n Y3 = np.arange(-LEAF.height, -1, -LEAF.height)\n X3 = X[len(X)/3]*np.ones(len(Y3))\n\n Y4 = np.arange(LEAF.height, 1, LEAF.height)\n X4 = X[len(X)/3 + 4]*np.ones(len(Y4))\n\n Y5 = np.arange(-LEAF.height, -1, -LEAF.height)\n X5 = X[len(X)/3 + 4]*np.ones(len(Y5))\n\n maxlength = LEAF.height*1.01\n VertexM = np.zeros((density + len(Y2) + len(Y3) + len(Y4) + len(Y5), 2))\n VertexM[:, 0] = np.concatenate((X, X2, X3, X4, X5))\n VertexM[:, 1] = np.concatenate((Y, Y2, Y3, Y4, Y5))\n\n # Generate Lattice\n elif lattice == 'random':\n \"\"\" We generate a lattice from Delaunay triangulation\n of random points on the plane\n \"\"\"\n n_points = int(0.5*density**2)\n VertexM = np.random.random((n_points, 2))*2 + np.array([-1,-1])\n\n LEAF.height = max(VertexM[:,1]) - min(VertexM[:,1])\n\n maxlength = None\n\n elif lattice == 'triangle':\n x, y = np.meshgrid(np.linspace(-1,1,int(np.sqrt(density))),\n np.linspace(-1,1,int(np.sqrt(density))))\n x[::2, :] += (x[0,1] - x[0,0])/2\n\n if noise > 0.0:\n # move positions around randomly\n x += noise*3.2/density*(2*np.random.random(x.shape) - 1)\n y += noise*3.2/density*(2*np.random.random(y.shape) - 1)\n\n VertexM[:,0] = x.flatten()\n VertexM[:,1] = y.flatten()\n\n elif lattice == 'line':\n X = np.linspace(-1.5, 1.5, num=density)\n Y = np.zeros(density)\n\n LEAF.height = X[1] - X[0]\n\n maxlength = LEAF.height*1.01\n VertexM = np.zeros((density, 2))\n VertexM[:, 0] = X\n VertexM[:, 1] = Y\n\n elif lattice == 'square':\n x = np.linspace(-1, 1, density)\n y = np.linspace(-1, 1, density)\n\n maxlength = (x[1] - x[0])*(1.01 + 2*noise)\n\n x, y = [a.flatten() for a in np.meshgrid(x,y)]\n\n if noise > 0.0:\n # move positions around randomly\n x += noise*3.2/density*(2*np.random.random(x.shape) - 1)\n y += noise*3.2/density*(2*np.random.random(y.shape) - 1)\n\n VertexM = np.array([x, y]).T\n\n elif lattice == 'rect':\n x = np.linspace(0, 2.5, density)\n y = np.linspace(-1.05, 1.05, 2*density)\n\n maxlength = (x[1] - x[0])*1.01\n\n X, Y = np.meshgrid(x, y)\n\n X = np.reshape(X, (2*density**2, 1))\n Y = np.reshape(Y, (2*density**2, 1))\n\n x = X[:,0]\n y = Y[:,0]\n\n VertexM = np.array([x, y]).T\n\n else:\n # load lattice from text file\n VertexM = np.loadtxt(lattice, delimiter=',')\n n_points = VertexM.shape[0]\n\n VertexM *= 2.42\n VertexM += np.array([1.2, 0])\n\n LEAF.height = max(VertexM[:,1]) - min(VertexM[:,1])\n\n VertexM *= zoom_factor\n\n maxlength = None\n\n #VertexM[:,0] -= min(VertexM[:,0]);\n #VertexM[:,1] -= np.mean(VertexM[:,1]);\n\n xyleaf = LSHP.polyedge.T\n\n # change com of leafshape to mid node of\n # network if leafshape is a circle.\n '''if LSHP.comment == 'circle' or LSHP.comment == 'hexagon':\n com = VertexM.mean(axis=0)\n central_i = np.argmin(np.linalg.norm(VertexM - com, axis=1))\n central = VertexM[central_i]\n\n lshape = xyleaf.T\n lshape -= lshape[:-1,:].mean(axis=0)\n lshape += central\n\n xyleaf = lshape.T'''\n\n # remove vertices that are outside of the shape\n\n # Voronoi tesselation gives bonds directly\n vor = Voronoi(VertexM)\n BondM = vor.ridge_points.copy()\n\n # nxutils are deprecated\n path = verts_plt_path(xyleaf.T)\n Kall = path.contains_points(VertexM)\n\n orig_indices = np.where(Kall)[0]\n RestVertices = np.where(np.logical_not(Kall))[0]\n VertexM = VertexM[Kall,:]\n\n # remove all bonds that connect to removed vertices\n BondM = BondM[Kall[BondM[:,0]] & Kall[BondM[:,1]], :]\n\n # update indices\n\n # this map is the inverse of orig_indices\n #index_map = -np.ones(Kall.shape)\n #index_map[orig_indices] = arange(orig_indices.shape[0])\n\n # equivalent to the above but shorter\n new_indices = np.cumsum(Kall) - 1\n\n BondM[:,0] = new_indices[BondM[:,0]]\n BondM[:,1] = new_indices[BondM[:,1]]\n\n #remove outer higher length\n vecX = np.zeros(BondM.shape)\n vecX[:,0] = VertexM[BondM[:,0],0]\n vecX[:,1] = VertexM[BondM[:,1],0]\n vecY = np.zeros(BondM.shape)\n vecY[:,0] = VertexM[BondM[:,0],1]\n vecY[:,1] = VertexM[BondM[:,1],1]\n\n lens = np.sqrt(np.diff(vecX)**2 + np.diff(vecY)**2)\n if maxlength == None:\n maxlength = np.percentile(lens, trimming_percentile)\n\n K = (lens <= maxlength);\n\n BondM = BondM[np.squeeze(K),:].copy();\n\n Np2 = VertexM.shape[0]; #actual number of nodes\n LBondsM = BondM.shape[0]\n if stats:\n print('Number of initial nodes: %d' % Np2)\n print('Number of initial bonds: %d' % LBondsM)\n #BondM = sort(BondM, axis=1)\n\n #construct neighbor list\n # We never need this and it's buggy!\n #NeighM = None#neighborsF(BondM,Mnei,Np2)\n\n #figure out which bond belongs to which Voronoi ridge\n ridge_lens = np.zeros(BondM.shape[0])\n for i, (u, v) in enumerate(BondM):\n u, v = orig_indices[u], orig_indices[v]\n\n ridge_inds = np.where(np.all(vor.ridge_points == [u, v], axis=1))[0]\n\n if ridge_inds.size == 0:\n ridge_lens[i] = np.nan\n print( \"Error: triangulation bond not in original voronoi tesselation\")\n continue\n\n ridge_ind = ridge_inds[0]\n\n # find length of Voronoi ridge\n ridge_verts = vor.ridge_vertices[ridge_ind]\n\n if -1 in ridge_verts:\n # one is infinity, length is undefined\n ridge_lens[i] = np.nan\n else:\n ridge_lens[i] = np.linalg.norm(vor.vertices[ridge_verts[0]]\n - vor.vertices[ridge_verts[1]])\n\n ridge_lens = replace_nan_by_avg(ridge_lens)\n\n # figure out what the area of each Voronoi cell is\n cell_areas = np.zeros(VertexM.shape[0])\n for i in range(VertexM.shape[0]):\n region = vor.point_region[orig_indices[i]]\n region_verts = vor.regions[region]\n\n if -1 in region_verts:\n cell_areas[i] = np.nan\n else:\n cell_verts = vor.vertices[region_verts]\n # make polygon closed\n cell_verts = np.vstack((cell_verts, cell_verts[0,:]))\n cell_areas[i] = polygon_area(cell_verts)\n\n cell_areas = replace_nan_by_avg(cell_areas)\n\n # find leftmost vertex and make it the zeroth one\n tempm = min(VertexM[:,0]);\n imin = np.argmin(VertexM[:,0]);\n Kleft = np.nonzero(abs(tempm -VertexM[:,0]) < 1e-6)\n isortleft = np.argsort(VertexM[Kleft[0],1]);\n\n mid_elem = isortleft[int(len(isortleft)/2.0)]\n imin = [Kleft[0][mid_elem]]\n\n # swap vertices\n VertexM[imin,:], VertexM[0, :] = VertexM[0,:], VertexM[imin,:]\n cell_areas[imin], cell_areas[0] = cell_areas[0], cell_areas[imin]\n\n # swap Bonds\n zero_entries = (BondM == 0)\n min_entries = (BondM == imin[0])\n\n BondM[zero_entries] = imin[0]\n BondM[min_entries] = 0\n\n # sort Bonds\n BondM.sort(axis=1)\n\n #set structure\n LEAF.Vertex = VertexM\n LEAF.RestVertices = RestVertices\n LEAF.Bond = BondM\n LEAF.RidgeLengths = ridge_lens\n LEAF.CellAreas = cell_areas\n LEAF.Voronoi = vor\n\n #plot (optional)\n if yplot:\n voronoi_plot_2d(vor)\n plt.plot(xyleaf[0,:], xyleaf[1,:])\n\n plt.show()\n\n return LEAF\n\n # simulation\n def flow_flux_en_weights(self):\n \"\"\" Return the weights for flows, fluxes, and energies\n \"\"\"\n if self.weights_mode == 'plant':\n self.flow_wts = self.intersection_lens/self.bond_lens\n self.flux_wts = 1./self.bond_lens\n self.en_wts = self.intersection_lens/self.bond_lens\n self.cost_wts = self.intersection_lens*self.bond_lens\n elif self.weights_mode == 'animal':\n self.flow_wts = 1./self.bond_lens\n self.flux_wts = 1./self.bond_lens\n self.en_wts = 1./self.bond_lens\n self.cost_wts = self.bond_lens\n elif self.weights_mode == 'none':\n # Use this if C0 is specified and already includes the lengths\n self.flow_wts = 1.0\n self.flux_wts = 1.0\n self.en_wts = 1.0\n self.cost_wts = 1.0\n else:\n print ('Warning: neither plant nor animal, using unit weights')\n self.flow_wts = 1.0\n self.flux_wts = 1.0\n self.en_wts = 1.0\n self.cost_wts = 1.0\n\n def Q2_avg_vector(self, C, ds):\n CM = self.C_matrix_sparse(C)\n G = self.G_matrix_sparse(CM)[1:,1:].tocsc()\n\n if ds.shape[1] > 1:\n Qsqr = np.sum((C[:,np.newaxis]*self.I_mat_red.dot(\n scipy.sparse.linalg.spsolve(G, ds[1:,:])))**2, axis=1)\n else:\n Qsqr = (C*self.I_mat_red.dot(\n scipy.sparse.linalg.spsolve(G, ds[1:,:])))**2\n\n Qsqr /= ds.shape[1]\n\n return Qsqr\n\n def set_sources(self, inputs, n_sources, sourceinds):\n self.sources = []\n self.n_sources = n_sources\n\n self.inputs = inputs\n if self.inputs == 'none':\n self.n_sources = 0\n self.sources = []\n return self.sources\n if sourceinds is not None:\n self.sources = sourceinds\n self.n_sources = len(self.sources)\n return self.sources\n\n if self.inputs == 'center':\n A = np.array([[0,0]])\n self.n_sources = 1\n elif self.inputs == 'upper_corners':\n x = [-1, 1]\n y = [1, 1]\n A = np.array([x,y]).T\n elif self.inputs == 'multi_sources': #sources around circle perimeter\n x = [np.cos(a*2*np.pi/n_sources) for a in\n np.arange(0,self.n_sources)]\n y = [np.sin(a*2*np.pi/n_sources) for a in\n np.arange(0,self.n_sources)]\n A = np.array([x,y]).T\n elif self.inputs == 'line_source':\n deltax = 2/(n_sources+1)\n x = [deltax*(i-(n_sources-1)/2) for i in\n range(self.n_sources)]\n y = np.ones_like(x)*max(self.LEAF.Vertex[:,1])\n A = np.array([x,y]).T\n elif self.inputs == 'line_source_wide':\n x = [2*(1+i)/(n_sources+1)-1 for i in range(n_sources)]\n y = np.ones_like(x)*max(self.LEAF.Vertex[:,1])\n A = np.array([x,y]).T\n else:\n raise ValueError('Given source term not implemented')\n\n for i in range(self.n_sources):\n distsq = (self.LEAF.Vertex[:,0] - A[i,0])**2 + \\\n (self.LEAF.Vertex[:,1] - A[i,1])**2\n self.sources.append(np.argmin(distsq))\n\n def set_sinks(self, outputs, n_sinks, sink_fraction, sinkinds):\n self.outputs = outputs\n if sinkinds is not None:\n self.sinks = sinkinds\n self.n_sinks = len(self.sinks)\n return self.sinks\n\n self.n_sinks = n_sinks\n if sink_fraction != None:\n self.n_sinks = int(sink_fraction*self.verts)\n\n self.sinks = []\n if self.outputs == 'random':\n \"\"\"Randomly distributed sinks with Gaussian fluctuations\"\"\"\n\n p = np.ones(self.verts)\n p[self.sources] = 0\n p /= self.verts - self.n_sources #Uniform probability for sinks\n self.sinks = np.random.choice(self.verts, size=self.n_sinks, p=p,\n replace=False)\n return self.sinks\n if self.outputs == 'grid':\n \"\"\"Sinks chosen closest to a grid\"\"\"\n\n r = self.char_length\n x = np.arange(-1,1,r)\n #y = np.arange(-1,1,r)\n X_all = np.zeros((len(x)**2,2))\n x0 = []\n y0 = []\n #this double for loop can be much more efficient using np.meshgrid\n for i in range(len(x)):\n for j in range(len(x)):\n x0.append(x[i])\n y0.append(x[j])\n X_all[:,0] = np.array(x0)\n X_all[:,1] = np.array(y0)\n A = np.array([X_all[i,:] for i in range(len(x0)) if\n (X_all[i,0]**2+X_all[i,1]**2)<(1.40)**2])\n A[:,0] += 1.45\n self.sinks = []\n elif self.outputs == 'line':\n x = [2*(1+i)/(self.n_sinks+1)-1 for i in range(self.n_sinks)]\n y = np.ones_like(x)*min(self.LEAF.Vertex[:,1])\n A = np.array([x,y]).T\n self.sinks = []\n elif self.outputs == 'semicircle':\n assert self.LSHP.comment == 'circle' or self.LSHP.comment == \\\n 'ellipse', 'semicircle requires circular leaf shape (LSHP)'\n x = [np.sin((np.pi/self.n_sinks)*(i-(self.n_sinks-1)/2))\n for i in range(self.n_sinks)]\n y = [-np.cos((np.pi/self.n_sinks)*(i-(self.n_sinks-1)/2))\n for i in range(self.n_sinks)]\n A = np.array([x,y]).T\n self.sinks = []\n elif self.outputs == 'circle':\n assert self.LSHP.comment == 'circle' or \\\n isinstance(self.LSHP.comment, float), \\\n 'semicircle requires circular leaf shape (LSHP)'\n x = [np.sqrt(1-self.LSHP.comment**2) * \\\n np.sin((2*np.pi/self.n_sinks) * \\\n (i-(self.n_sinks-1))) for i in range(self.n_sinks)]\n y = [np.cos((2*np.pi/self.n_sinks)*(i-(self.n_sinks-1)))\n for i in range(self.n_sinks)]\n A = np.array([x,y]).T\n self.sinks = []\n elif self.outputs == 'outer_spaced':\n s = self.perimeter / n_sinks\n A = np.zeros((n_sinks, 2))\n A[0,:] = self.LSHP.polyedge[0,:]\n n = 1 #next node to check\n for i in range(1, n_sinks):\n if np.linalg.norm(self.LSHP.polyedge[n,:] - A[i-1,:]) < s:\n d = np.linalg.norm(self.LSHP.polyedge[n,:] - A[i-1,:])\n n += 1\n while d + np.linalg.norm(self.LSHP.polyedge[n,:] - \\\n self.LSHP.polyedge[n-1,:]) < s:\n d += np.linalg.norm(self.LSHP.polyedge[n,:] - \\\n self.LSHP.polyedge[n-1,:])\n n += 1\n t = self.LSHP.polyedge[n,:] - self.LSHP.polyedge[n-1,:]\n A[i,:] = self.LSHP.polyedge[n-1,:] + (s-d) * \\\n (t/np.linalg.norm(t))\n else:\n t = self.LSHP.polyedge[n,:] - self.LSHP.polyedge[n-1,:]\n A[i,:] = A[i-1,:] + (t/np.linalg.norm(t))\n elif self.outputs == 'invtriangle':\n x = np.linspace(-1, 1, self.n_sinks)\n y = np.abs(-2*x) - 0.95\n A = np.array([x,y]).T\n self.sinks = []\n\n poly = Polygon(self.LSHP.polyedge)\n A = A[[poly.contains(Point(A[n,:])) for n in range(A.shape[0])],:]\n\n for i in range(A.shape[0]):\n distsq = (self.LEAF.Vertex[:,0] - A[i,0])**2 + \\\n (self.LEAF.Vertex[:,1] - A[i,1])**2\n sink = np.argmin(distsq)\n if sink not in self.sources and sink not in self.sinks:\n self.sinks.append(sink)\n self.LEAF.Vertex[sink,:] = A[i,:]\n\n self.n_sinks = len(self.sinks)\n\n def simulate_base(self, dt=0.1, timesteps=1e10, converge_en=1e-10,\n plot=False, plot_interval=10, movie_dir='movie', entropy=False):\n \"\"\" Simulates the xylem network as a dynamical system\n by integrating dC/dt = f(Q) - C + driving\n\n Parameters:\n dt: Integration time step\n timesteps: number of time steps to integrate\n converge_en: stopnp.ping criterion for relative\n change in energy\n kappa: nondimensional background production\n plot: whether to plot the network dynamics\n \"\"\"\n\n if plot:\n fig, ax = plt.subplots(1)\n\n self.ds = self.fluctuation_ensemble()\n self.flow_flux_en_weights()\n q_power = 1./(self.gamma + 1)\n\n en_last = 0\n k = 0 #for plotting\n for i in range(1, int(timesteps)):\n # Calculate average of squared currents for BCs given in self.ds\n Q2 = self.Q2_avg_vector(self.C*self.flux_wts, self.ds)\n\n if entropy:\n if i == 1:\n self.entropy = np.array([self.graph_entropy()])\n else:\n self.entropy = np.append(self.entropy,\n [self.graph_entropy()], axis=0)\n\n # Adaptation equation\n self.C += dt*(Q2**q_power - self.C)\n\n # dissipation\n nonz = self.C > 1e-8\n en = sum(Q2[nonz]/(self.en_wts[nonz]*self.C[nonz]))\n\n if plot and i % plot_interval == 0:\n print('Frame: %d' % k)\n\n # make video\n fig.clear()\n ax = fig.add_subplot(111)\n\n plt.axis('off')\n self.plot(drawspecial=False)\n #self.plot(showscale=True, drawspecial=False)\n '''ax.set_title('$t=%0.1f, \\sigma=%0.1f, c=%0.1f$' % (i*dt,\n self.sigma_ratio, self.cst), fontsize=14)'''\n ax.set_title('$t=%0.1f$' % (i*dt), fontsize=18)\n fig.savefig(movie_dir + '/%05d.png' % k, dpi=300)\n\n self.drawspecial()\n fig.savefig(movie_dir + '/a%05d.png' % k, dpi=300)\n\n k += 1\n\n if i > 1000:\n plot = False\n\n # break when energy converges\n if i == 0:\n pass\n else:\n if en_last == 0:\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n delta_en = abs((en - en_last)/en_last)\n else:\n delta_en = abs((en - en_last)/en_last)\n if delta_en < converge_en:\n print(\"Converged\")\n break\n\n en_last = en\n\n if np.any(np.isnan(self.C)):\n print(\"Error in simulation \\nEnding simulation\")\n break # something is wrong and we need to stop.\n\n self.energy_value = en\n #self.entropy_value = mean(self.flow_entropies(C))\n #self.edge_entropy_value = mean(self.edge_entropies(C))\n #self.cycle_number = self.cycles(C)\n #self.cut_bond_energy_value = self.cut_bond_energy(C)\n #self.normal_energy_value = self.energy(C)\n\n self.time_steps = i\n print('Time steps: ', self.time_steps)\n\n # loop statistics\n def find_cycles_backup(self):\n \"\"\"Finds something close to the minimal cycle basis of the network\n Avoid using\n Note: This is not necessarily the most efficient implementation.\n The method that <NAME> wrote which prunes all trees from the\n graph and finds loops by always taking the path of the leftmost\n edge can be around 10x faster. However, using nx.cycle_basis\n into nx.minimum_cycle_basis:\n 1) is easier to use\n 2) requires substantially less code and fewer libraries\n 3) is fairly intuitive as a change of basis\n Returns:\n self.cs: a list containing the lists of nodes in each cycle\n \"\"\"\n if hasattr(self, 'cycles'):\n return self.cycles\n else:\n self.to_networkx()\n\n #Find indices of nodes in the cycle basis, not necessarily minimal\n cycles = nx.cycle_basis(self.G)\n keep = np.unique([node for cycle in cycles for node in cycle])\n\n #Get rid of every non-cycle node\n G2 = self.G.copy()\n G2.remove_nodes_from(np.delete(np.arange(self.verts), keep))\n\n #Find the minimum cycle basis\n cycles = nx.minimum_cycle_basis(G2)\n\n #Order the nodes in each cycle\n for i in range(len(cycles)):\n new_cycle = [cycles[i][0]]\n\n searching = True\n while searching:\n neighbors = self.G.neighbors(new_cycle[-1])\n for neighbor in neighbors:\n if neighbor in cycles[i] and neighbor not in new_cycle:\n new_cycle.append(neighbor)\n break\n\n if len(new_cycle) == len(cycles[i]):\n searching = False\n\n cycles[i] = new_cycle\n\n self.cycles = cycles\n return self.cycles\n\n def find_cycles(self, thr=1e-6):\n \"\"\" A fast algorithm for finding the regions of a spatial network,\n where a region is an area enclosed by a loop that contains no\n other nodes of edges. The algorithm retains all nodes in a cycle\n basis of G, removes isolated loops (where all nodes have degree 2),\n splits the graph into connected component subgraphs, then finds all\n loops in each subgraph. Loops containing regions are found using a\n leftmost edge search starting from nodes of degree > 2. Nodes of\n degree > 2 on the outside of the subgraph will yield the entire\n exterior of the subgraph by a leftmost edge search when going\n clockwise, so this is done first to avoid clockwise searching when\n starting from a node on the exterior of the subgraph.\n Returns:\n self.cycles: a list of lists of nodes in each cycle (ordered, not\n oriented either cw or ccw)\n \"\"\"\n if hasattr(self, 'cycles'):\n return self.cycles\n else:\n def find_left_node(G, start, end):\n neighbors = list(G.neighbors(end))\n neighbors.remove(start)\n\n if len(neighbors) == 1:\n return neighbors[0]\n else:\n orig = (self.LEAF.Vertex[end,:] - \\\n self.LEAF.Vertex[start,:]).dot(np.array([1,1j]))\n\n neighs = (self.LEAF.Vertex[neighbors,:] - \\\n self.LEAF.Vertex[end,:]).dot(np.array([1,1j]))\n\n angles = np.angle(neighs/orig)\n return neighbors[np.argmax(angles)]\n\n def find_loop(G, start, end):\n loop = [start, end]\n while loop[-1] != loop[0]:\n loop.append(find_left_node(G, loop[-2], loop[-1]))\n return loop\n\n def find_exterior(sg):\n hull = ConvexHull(self.LEAF.Vertex[sg.nodes(),:])\n hullverts = np.array(sg.nodes())[hull.vertices]\n hullstarts = [n for n in hullverts if sg.degree(n) == 2]\n\n if len(hullstarts) == 0:\n raise IndexError('loop has no exterior nodes of deg 2')\n\n neighbors = list(sg.neighbors(hullstarts[0]))\n #cross product negative if sequence is going clockwise\n a = self.LEAF.Vertex[hullstarts[0],:] - \\\n self.LEAF.Vertex[neighbors[0],:]\n b = self.LEAF.Vertex[neighbors[1],:] - \\\n self.LEAF.Vertex[hullstarts[0],:]\n if np.cross(a,b) < 0: #order is clockwise\n outer = [neighbors[0], hullstarts[0], neighbors[1]]\n else:\n outer = [neighbors[1], hullstarts[0], neighbors[0]]\n\n #Find clockwise oriented loop around the entire subgraph\n next = find_left_node(sg, outer[-2], outer[-1])\n while next != outer[0]:\n outer.append(next)\n next = find_left_node(sg, outer[-2], outer[-1])\n return outer\n\n self.to_networkx(thr=thr)\n\n #Find indices of nodes in the cycle basis, not necessarily minimal\n\n cycles = nx.cycle_basis(self.G)\n if len(cycles) == 0:\n self.cycles = []\n return self.cycles\n keep = [node for cycle in cycles for node in cycle]\n\n #Get rid of every non-cycle node\n G2 = self.G.copy()\n G2.remove_nodes_from(np.delete(np.arange(self.verts),\n np.unique(keep)))\n\n ''' This algorithm fails when a single node is shared between\n two exterior loops, as in:\n /\\/\\\n \\/\\/\n '''\n\n self.cycles = []\n components = list(nx.connected_components(G2))\n\n #Find isolated cycles first, remove them from G2 before proceeding\n for component in components:\n if np.all([G2.degree(n) == 2 for n in component]):\n start = list(component)[0]\n loop = find_loop(G2, start, next(G2.neighbors(start)))\n self.cycles.append(loop)\n G2.remove_nodes_from(component)\n components = list(nx.connected_components(G2))\n\n while len(components) > 0:\n #find nodes on the exterior of the graph using a convex hull)\n sg = G2.subgraph(components[0])\n ext = find_exterior(sg)\n\n #start at an exterior node of degree 3, find a loop\n start = [n for n in ext if sg.degree(n) > 2][0]\n startinext = ext.index(start)\n if startinext == 0: end = ext[-1]\n else: end = ext[startinext-1]\n\n loop = find_loop(sg, start, end)\n self.cycles.append(loop)\n\n #remove every exterior node and edge that is part of that loop\n remnodes = [n for n in loop if n in ext and G2.degree(n) == 2]\n extedges = [(ext[i], ext[i+1]) for i in range(len(ext)-1)]\n extedges.append((ext[-1], ext[0]))\n loopedges = [(loop[i+1], loop[i]) for i in range(len(loop)-1)]\n loopedges.append((loop[0], loop[-1]))\n remedges = [e for e in loopedges if e in extedges]\n\n G2.remove_nodes_from(remnodes)\n G2.remove_edges_from(remedges)\n\n #trim any trees that appear\n degs = list(G2.degree(G2.nodes()))\n degcheck = [d[1] == 1 for d in degs]\n while np.any(degcheck):\n G2.remove_nodes_from([degs[i][0] for i in np.where(degcheck)[0]])\n degs = list(G2.degree(G2.nodes()))\n degcheck = [d[1] == 1 for d in degs]\n\n #check for isolated loops\n components = list(nx.connected_components(G2))\n for component in components:\n if np.all([G2.degree(n) == 2 for n in component]):\n start = list(component)[0]\n loop = find_loop(G2, start, next(G2.neighbors(start)))\n self.cycles.append(loop)\n G2.remove_nodes_from(component)\n components = list(nx.connected_components(G2))\n\n return self.cycles\n\n '''for component in components:\n #find nodes on the exterior of the graph using a convex hull)\n sg = G2.subgraph(component)\n t = time.time()\n hull = ConvexHull(self.LEAF.Vertex[sg.nodes(),:])\n hullverts = np.array(sg.nodes())[hull.vertices]\n print(time.time()-t)\n hullstarts = [n for n in hullverts if sg.degree(n) == 2]\n\n #unable to handle loops with no convex hull points of deg 2\n if len(hullstarts) == 0:\n print('exterior loop search failed')\n raise IndexError('loop has no exterior nodes of deg 2')\n\n neighbors = list(sg.neighbors(hullstarts[0]))\n #cross product negative if sequence is going clockwise\n a = self.LEAF.Vertex[hullstarts[0],:] - \\\n self.LEAF.Vertex[neighbors[0],:]\n b = self.LEAF.Vertex[neighbors[1],:] - \\\n self.LEAF.Vertex[hullstarts[0],:]\n if np.cross(a,b) < 0: #order is clockwise\n outer = [neighbors[0], hullstarts[0], neighbors[1]]\n else:\n outer = [neighbors[1], hullstarts[0], neighbors[0]]\n\n #Find clockwise oriented loop around the entire subgraph\n next = find_left_node(sg, outer[-2], outer[-1])\n while next != outer[0]:\n outer.append(next)\n next = find_left_node(sg, outer[-2], outer[-1])\n\n #Begin loop searches from nodes of degree > 2\n starts = [n for n in sg.nodes() if sg.degree(n) > 2]\n for start in starts:\n neighbors = list(sg.neighbors(start))\n\n #if start is on the outer loop, remove the clockwise search\n if start in outer:\n i = outer.index(start)\n if i != len(outer)-1:\n neighbors.remove(outer[i+1])\n elif i == len(outer)-1:\n neighbors.remove(outer[0])\n\n for n in neighbors:\n c = [start, n]\n next = find_left_node(sg, c[-2], c[-1])\n while next != c[0]:\n c.append(next)\n next = find_left_node(sg, c[-2], c[-1])\n\n #Assert that the loop contains no nodes\n poly = Polygon(self.LEAF.Vertex[c,:])\n try:\n assert not np.any([poly.contains(Point(\n self.LEAF.Vertex[n,:])) for n in sg.nodes()])\n if set(c) not in cyclesets:\n cyclesets.append(set(c))\n new_cycles.append(c)\n except AssertionError:\n print(\"overlapping lines made loop search fail\")\n\n self.cycles = new_cycles\n return self.cycles'''\n\n def n_cycles(self, thr=1e-6):\n if hasattr(self, 'cycles'):\n return len(self.cycles)\n else:\n self.to_networkx(thr=thr)\n cycles = nx.cycle_basis(self.G)\n return len(cycles)\n\n def count_per_loop(self, type='sinks', thr=1e-4):\n \"\"\"Counts the number of special nodes (sources, sinks, etc) inside\n each loop\n \"\"\"\n self.find_cycles(thr=thr)\n\n counts = np.zeros(len(self.cycles))\n polygons = [Polygon(self.LEAF.Vertex[cycle,:]) for cycle in self.cycles]\n if type == 'sinks':\n inds = self.sinks\n elif type == 'basins':\n inds = self.basins\n elif type == 'source':\n inds = self.sources\n else:\n raise TypeError(type + ' not supported')\n\n for i in inds:\n loops = []\n inloop = False\n for p in range(len(polygons)):\n if polygons[p].intersects(Point(self.LEAF.Vertex[i,:])):\n loops.append(p)\n inloop = True\n if inloop:\n counts[loops] += 1/len(loops)\n\n return counts\n\n def tree_lengths(self):\n cycles = self.find_cycles()\n if len(cycles) == 0:\n return None\n polygons = [Polygon(self.LEAF.Vertex[cycle,:]) for cycle in cycles]\n lengths = np.zeros(len(cycles))\n\n G2 = self.G.copy()\n\n #Find end nodes, assert no isolated points\n node_list = np.array(G2.nodes())\n degs = np.sum(nx.adjacency_matrix(G2, node_list, weight=None),\n axis=1)\n assert not np.any(degs == 0)\n assert nx.number_of_selfloops(G2) == 0\n\n ends = node_list[np.where(degs == 1)[0]]\n\n while len(ends) > 0:\n for i in ends:\n for p in range(len(polygons)):\n if polygons[p].intersects(Point(self.LEAF.Vertex[i,:])):\n lengths[p] += np.linalg.norm(self.LEAF.Vertex[i,:] -\n self.LEAF.Vertex[next(G2.neighbors(i)),:])\n break\n\n G2.remove_nodes_from(ends)\n\n node_list = np.array(G2.nodes())\n\n degs = np.sum(nx.adjacency_matrix(G2, node_list, weight=None),\n axis=1)\n ends = node_list[np.where(degs == 1)[0]]\n\n return lengths\n\n def loop_perimeters(self):\n self.find_cycles()\n perimeters = [Polygon(self.LEAF.Vertex[cycle,:]).length for cycle in\n self.cycles]\n return np.array(perimeters)\n\n def loop_qhull_perimeters(self):\n self.find_cycles()\n lengths = [Polygon(self.LEAF.Vertex[cycle,:]).convex_hull.length for\n cycle in self.cycles]\n return np.array(lengths)\n\n def loop_areas(self, thr=1e-4):\n self.find_cycles(thr=thr)\n areas = [Polygon(self.LEAF.Vertex[cycle,:]).area for cycle in\n self.cycles]\n return np.array(areas)\n\n def loop_qhull_areas(self):\n self.find_cycles()\n areas = [Polygon(self.LEAF.Vertex[cycle,:]).convex_hull.area for cycle\n in self.cycles]\n return np.array(areas)\n\n def loop_resistance(self,):\n self.find_cycles()\n CM = self.C_matrix(self.C)\n res = []\n for cycle in self.cycles:\n r = 0\n for i in range(len(cycle)-1):\n r += 1/CM[cycle[i], cycle[i+1]]\n res.append(r)\n return np.array(res)\n\n def loop_avg_edge_resistance(self,):\n res = self.loop_resistance()\n lens = np.array([len(cycle) for cycle in self.cycles])\n return res / lens\n\n def loop_max_edge_resistance(self,):\n self.find_cycles()\n CM = self.C_matrix(self.C)\n mres = np.zeros(len(self.cycles))\n for c in range(len(self.cycles)):\n for i in range(len(self.cycles[c])-1):\n if (1/CM[self.cycles[c][i], self.cycles[c][i+1]]) > mres[c]:\n mres[c] = 1/CM[self.cycles[c][i], self.cycles[c][i+1]]\n return mres\n\n def resistance_distance_mx(self, thr=0):\n #invR = scipy.sparse.linalg.inv(self.R_matrix_sparse(CM))\n G = self.G_matrix_sparse(self.C_matrix_sparse(self.C))\n invR = np.linalg.pinv(G.todense())\n\n Reff = np.zeros(invR.shape)\n i, j = np.triu_indices(self.verts)\n Reff[i,j] = invR[i,i] + invR[j,j] - invR[i,j] - invR[j,i]\n\n Reff += Reff.T\n x = np.amin(Reff[Reff > 0])\n\n Reff[Reff < x] = x\n return Reff\n\n def resistance_distances(self,):\n Reff = self.resistance_distance_mx()\n return Reff[self.sources, self.sinks], \\\n Reff[np.ix_(self.basins, self.sinks)]\n\n def remove_trees(self, thr=1e-4):\n \"\"\" Warning: this trashes the xylem object it's used on\n \"\"\"\n degs = self.degrees(thr=thr)\n\n while np.sum(degs == 1) > 0:\n rem = np.argwhere(degs == 1)\n self.LEAF.Vertex = np.delete(self.LEAF.Vertex, rem, axis=0)\n\n edges = np.argwhere(np.any(np.isin(self.LEAF.Bond, rem),axis=1))\n self.LEAF.Bond = np.delete(self.LEAF.Bond, edges, axis=0)\n\n for r in rem[::-1]:\n self.LEAF.Bond = np.where(self.LEAF.Bond > r, self.LEAF.Bond-1,\n self.LEAF.Bond)\n\n self.C = np.delete(self.C, edges)\n\n self.verts = self.LEAF.Vertex.shape[0]\n self.bonds = self.LEAF.Bond.shape[0]\n\n degs = self.degrees(thr=thr)\n\n def remove_trees_nx(self, thr=1e-4):\n self.to_networkx(thr=thr)\n\n G2 = self.G.copy()\n node_list = np.array(G2.nodes)\n degrees = np.sum(nx.adjacency_matrix(G2, node_list, weight=None),\n axis=1)\n remove = node_list[np.where(degrees <= 1)[0]]\n\n while len(remove) > 0:\n G2.remove_nodes_from(remove)\n node_list = np.array(G2.nodes)\n degrees = np.sum(nx.adjacency_matrix(G2, node_list,\n weight=None), axis=1)\n remove = node_list[np.where(degrees <= 1)[0]]\n\n self.G = G2\n\n def tree_fraction(self, len_attr='length', thr=1e-4):\n \"\"\" Return the ratio of edges that are part of a cycle to all edges\n \"\"\"\n G = self.to_networkx(thr=thr)\n\n cy = nx.minimum_cycle_basis(G)\n cy_edges = []\n for cycle in cy:\n for i in range(len(cycle) - 1):\n cy_edges.append(tuple(sorted((cycle[i], cycle[i+1]))))\n\n cy_edges.append(tuple(sorted((cycle[0], cycle[-1]))))\n\n cy_edges = set(cy_edges)\n\n cycle_lengths = [G[u][v][len_attr] for u, v in cy_edges]\n all_lengths = [d[len_attr] for u, v, d in G.edges(data=True)]\n\n return sum(cycle_lengths), sum(all_lengths)\n\n def path_edges(self, start=None, end=None, dir=False):\n ''' Count the number of paths from start to end(s) each edge is\n involved in\n '''\n if start is None:\n start = self.sources[0]\n if end is None:\n end = self.sinks\n if dir:\n G = self.to_networkx(graph='dir')\n else:\n G = self.to_networkx()\n paths = nx.algorithms.all_simple_paths(G, start, end)\n counts = np.zeros(self.LEAF.Bond.shape[0])\n\n ''' This way of counting edges can fail in the very narrow case that\n a path goes around a loop that is closed by a single remaining edge, as\n below:\n start ----->------v\n --> | |\n end -----<------v\n\n This is just going to be one that I have to live with for now\n '''\n for path in paths:\n counts += np.all(np.isin(self.LEAF.Bond, path), axis=1)\n return counts\n\n def path_nodes(self, start=None, end=None, thr=1e-4, dir=False):\n ''' Count the number of paths from start that end at end(s)\n '''\n if start is None:\n start = self.sources[0]\n if end is None:\n end = self.sinks\n if dir:\n G = self.to_networkx(thr=thr, graph='dir')\n else:\n G = self.to_networkx(thr=thr, )\n try:\n paths = nx.algorithms.all_simple_paths(G, start, end)\n except nx.exception.NodeNotFound:\n paths = []\n counts = np.zeros(len(end))\n for path in paths:\n counts[np.where(end == path[-1])] += 1\n return counts[counts > 0]\n\n def smooth(self, thr=1e-4):\n #self.to_networkx(thr=thr, graph='multi')\n assert type(self.G) == nx.classes.multigraph.MultiGraph\n\n pos = nx.get_node_attributes(self.G, 'pos')\n\n c = nx.get_edge_attributes(self.G, 'conductivity')\n l = nx.get_edge_attributes(self.G, 'length')\n w = {e: c[e]**(self.gamma/2)*l[e] for e in self.G.edges(keys=True)}\n nx.set_edge_attributes(self.G, w, 'area')\n\n for n in [x[0] for x in self.G.degree() if x[1] == 2]:\n neigh = [x for x in self.G[n]]\n if len(neigh) == 1:\n continue\n dic = self.G[n]\n # compute effective conductance of smoothed edge\n l = dic[neigh[0]][0]['length'] + dic[neigh[1]][0]['length']\n #l = np.linalg.norm(\n # self.G.nodes[neigh[0]]['pos'] - self.G.nodes[neigh[1]]['pos'])\n c = l/(dic[neigh[0]][0]['length']/dic[neigh[0]][0]['conductivity']+\\\n dic[neigh[1]][0]['length']/dic[neigh[1]][0]['conductivity'])\n a = dic[neigh[0]][0]['area'] + dic[neigh[1]][0]['area']\n self.G.add_edge(*neigh, conductivity=c, weight=l, length=l, area=a)\n self.G.remove_edge(n, neigh[0])\n self.G.remove_edge(n, neigh[1])\n self.G.remove_nodes_from(\n [x[0] for x in self.G.degree() if x[1] == 0])\n\n #currently not working\n def top_number_alternative_paths(self, thr=1e-4):\n \"\"\"\n Computes the number of alternative paths (Nap) in the combinatorics sense\n from the Apex to each of the shoreline outlets.\n \"\"\"\n apexid = self.sources[0]\n outlets = np.array(self.sinks)\n\n A = self.adjacency_matrix_asym(thr=thr).T\n epsilon = 10**-15\n\n # To compute Nap we need to find the null space of L==I*-A', where I* is\n # the Identity matrix with zeros for the diagonal entries that correspond\n # to the outlets.\n D = np.ones((A.shape[0],1))\n D[outlets] = 0\n L = np.diag(np.squeeze(D)) - A\n d, v = np.linalg.eig(L)\n d = np.abs(d)\n null_space_v = np.where(np.logical_and(d < epsilon, d > -epsilon))[0]\n print(len(null_space_v))\n print(len(self.sinks))\n\n # Renormalize eigenvectors of the null space to have one at the outlet entry\n vN = np.abs(v[:, null_space_v])\n paths = np.empty((len(null_space_v),2))\n for i in range(vN.shape[1]):\n I = np.where(vN[outlets, i] > epsilon)[0]\n print(I)\n print(vN[outlets[I], i])\n vN[:,i] = vN[:,i] / vN[outlets[I], i]\n paths[i,0] = outlets[I]\n paths[i,1] = vN[apexid, i]\n\n return paths\n\n # plotting\n def plot(self, style='pipes', thr=1e-4, drawspecial=True, nodelabels=False,\n c=[0,0,0], showscale=False, showbounds=False, magn=8, alpha=False,\n ds=None, cmap='plasma', p=None, v=None, ax=None):\n \"\"\"Plot the network after simulating to draw it as a graph or as\n a network with conductances.\n Parameters:\n thr: Threshold conductance. Real edges are those with C > thr\n style:\n c: 3-element list specifying pipe color in CMY format\n \"\"\"\n if ax == None:\n ax = plt.gca()\n\n if style == 'pipes':\n self.plot_conductivities_raw(magn=magn,\n process=lambda x:\n (x/x.max())**(0.5*self.gamma),\n col=np.array(c),\n alpha=alpha,\n ax=ax)\n elif style == 'loops':\n self.plot('pipes',\n drawspecial=drawspecial,\n nodelabels=nodelabels,\n showscale=showscale,\n showbounds=showbounds,\n magn=magn,\n alpha=alpha,\n ax=ax)\n self.drawloops(thr=thr, ax=ax, c=c)\n elif style == 'sticks':\n G = self.to_networkx(thr=thr)\n pos = nx.get_node_attributes(G, 'pos')\n nx.draw_networkx(G, pos, with_labels=False, node_size=6,\n node_color='black', ax=ax)\n elif style == 'arrows':\n G = self.to_networkx(thr=thr, dir=True)\n pos = nx.get_node_attributes(G, 'pos')\n nx.draw_networkx(G, pos, with_labels=False, node_size=6,\n node_color='black', ax=ax)\n elif style == 'paths':\n npaths = self.path_edges(dir=True)\n c = plt.get_cmap('magma')\n self.plot(c=c(npaths/max(npaths))[:,:-1],\n drawspecial=False, alpha=False, ax=ax)\n\n norm = Normalize(vmin=0,vmax=max(npaths))\n sm = plt.cm.ScalarMappable(cmap=c, norm=norm)\n sm.set_array([])\n plt.colorbar(sm, label='Number of paths containing link')\n elif style == 'flows':\n if ds == None:\n self.ds = self.fluctuation_ensemble()\n ds = self.ds[:,0]\n self.flow_flux_en_weights()\n\n CM = self.C_matrix_sparse(self.C)\n G = self.G_matrix_sparse(CM)[1:,1:].tocsc()\n\n p = scipy.sparse.linalg.spsolve(G, ds[1:])\n Q = (self.C*self.flux_wts)*self.I_mat_red.dot(p)\n\n start = np.where(Q <= 0, self.LEAF.Bond[:,0], self.LEAF.Bond[:,1])\n end = np.where(Q > 0, self.LEAF.Bond[:, 0], self.LEAF.Bond[:,1])\n\n x = self.LEAF.Vertex[start,0]\n y = self.LEAF.Vertex[start,1]\n u = self.LEAF.Vertex[end,0] - self.LEAF.Vertex[start,0]\n v = self.LEAF.Vertex[end,1] - self.LEAF.Vertex[start,1]\n\n colors = np.zeros((len(self.LEAF.Bond[:,0]),4))\n colors[:,:3] = c\n colors[:,3] = (abs(Q) > thr).astype('int')\n\n linewidths = abs(Q)/100\n head_widths = 0.02*np.ones_like(Q)\n head_lengths = np.where(1.5*head_widths > (u**2+v**2)**0.5,\n (u**2+v**2)**0.5,\n 1.5*head_widths)\n\n fast = np.nonzero(np.arange(len(x))*colors[:,3])[0]\n for f in fast:\n plt.arrow(x[f], y[f], u[f], v[f], width=linewidths[f],\n head_width=head_widths[f],\n head_length=head_lengths[f],\n length_includes_head=True, color=colors[f,:],\n zorder=5, ax=ax)\n elif style == 'pressure':\n if p is None:\n if ds is None:\n self.ds = self.fluctuation_ensemble()\n ds = self.ds[:,0]\n CM = self.C_matrix_sparse(self.C)\n G = self.G_matrix_sparse(CM)[1:,1:].tocsc()\n\n ds = self.fluctuation_ensemble()\n p = scipy.sparse.linalg.spsolve(G, ds[1:])\n p -= min(p)\n p /= max(p)\n if v is None:\n v = (np.amin(p), np.amax(p))\n\n from matplotlib.collections import PolyCollection\n cmap = cm.get_cmap(cmap)\n facecolors = cmap((p-v[0])/v[1])\n\n #plt.scatter(self.LEAF.Vertex[1:,0], self.LEAF.Vertex[1:,1],\n # c=facecolors)\n\n vor = self.LEAF.Voronoi\n\n rem = self.LEAF.RestVertices\n\n polys = []\n plottinginds = np.delete(np.arange(vor.points.shape[0]),\n rem)\n #plottinginds = np.arange(vor.points.shape[0])\n ##problem = np.where(np.all(vor.points == self.LEAF.Vertex[0,:],\n # axis=1))[0]\n plottinginds = np.delete(plottinginds, 0)\n\n for regionind in vor.point_region[plottinginds]:\n region = np.array(vor.regions[regionind])\n if -1 not in region:\n polys.append(vor.vertices[region,:])\n else:\n a = region[region >= 0]\n polys.append(vor.vertices[a,:])\n\n ax.add_collection(PolyCollection(polys,\n facecolors=facecolors,\n alpha=1))\n\n if drawspecial:\n self.drawspecial(ax=ax)\n\n if nodelabels:\n bonds = self.LEAF.Bond[self.C > thr, :]\n nodes = np.unique(bonds.flatten())\n for n in nodes:\n ax.text(self.LEAF.Vertex[n,0], self.LEAF.Vertex[n,1], str(n))\n\n if showscale:\n if self.LSHP.comment == 'circle' or self.LSHP.comment == 'hexagon':\n x = [-0.8, -0.8 + self.sigma]\n ax.plot(x, [-1.05]*2, 'k', lw=2)\n ax.set_ylim([-1.08,1])\n ax.text(np.mean(x) - 0.03, -1.14, '$\\sigma$', fontsize=20)\n elif self.LSHP.comment == 'square' or \\\n self.LSHP.comment == 'triangle':\n x = [-0.8, -0.8 + self.sigma]\n ax.plot(x, [1.1]*2, 'k', lw=2)\n ax.text(np.mean(x) - 0.03, 1.04, '$\\sigma$')\n else:\n self.scale_plt_figure(ax=ax)\n\n if showbounds:\n ax.plot(self.LSHP.polyedge[:,0], self.LSHP.polyedge[:,1], 'b',\n alpha=0.4)\n\n def drawloops(self, thr=1e-6, ax=None, c=[0,0,1]):\n self.find_cycles(thr=thr)\n if ax == None:\n ax = plt.gca()\n for cycle in self.cycles:\n #facecolor = np.random.rand(3)\n #if np.all(facecolor > 0.5):\n # facecolor[np.random.choice(3)] = np.random.rand()*0.5\n p = pltPolygon(self.LEAF.Vertex[cycle,:], facecolor=c,\n alpha=0.5)\n ax.add_patch(p)\n\n def drawspecial(self, ax=None):\n \"\"\"Plot the nodes with specified boundary conditions\"\"\"\n if ax == None:\n ax = plt.gca()\n ax.plot(self.LEAF.Vertex[self.sinks,0],\n self.LEAF.Vertex[self.sinks,1], 'y.', alpha=1,\n markersize=12)\n if self.sources is not None:\n ax.plot(self.LEAF.Vertex[self.sources,0],\n self.LEAF.Vertex[self.sources,1], 'r.', alpha=1,\n markersize=15)\n\n def color_plot(self, mode='currents'):\n CM = self.C_matrix_sparse(self.C)\n G = self.G_matrix_sparse(CM)[1:,1:].tocsc()\n\n if mode == 'current':\n C = self.C.copy() * self.flux_wts\n colors = np.abs(np.sum((C[:,np.newaxis]*self.I_mat_red.dot(\n scipy.sparse.linalg.spsolve(G, self.ds[1:,:]))), axis=1) / self.ds.shape[1])\n elif mode == 'pressure':\n colors = np.abs(np.sum((self.I_mat_red.dot(\n scipy.sparse.linalg.spsolve(G, self.ds[1:,:]))), axis=1) / self.ds.shape[1])\n\n colors = colors[np.where(abs(colors) < 5)]\n\n colors -= min(colors)\n\n colors /= max(colors)\n\n rainbow = cm.get_cmap('Greys')\n\n xs, ys = self.bond_coords()\n conds = (self.C.copy() / max(self.C))**0.25\n\n colors = rainbow(colors)\n #colors[:,3] = conds\n\n segs = np.array([np.array([xs[bond], ys[bond]]).T for bond in range(len(ys))])\n '''#Thresholding to plot only lines with C > thr\n a = segs[np.where(self.C.copy()>thr)]\n acond = conds[np.where(self.C.copy()>thr)]\n acolors = colors[np.where(self.C.copy()>thr)]'''\n\n ax = plt.gca()\n\n line_segments = LineCollection(segs,\n linewidths=0.2,\n colors=colors,\n capstyle='round',\n cmap='rainbow')\n ax.add_collection(line_segments)\n\n line_segments = LineCollection(segs,\n linewidths=conds*10,\n colors=colors,\n capstyle='round',\n cmap='rainbow')\n ax.add_collection(line_segments)\n\n self.scale_plt_figure(ax=ax)\n\n self.drawspecial()\n\n #storage\n def to_networkx(self, thr=1e-4, graph=None):\n \"\"\" Return a NetworkX graph representing this network\n \"\"\"\n graphs = {None: nx.classes.graph.Graph,\n 'dir': nx.classes.digraph.DiGraph,\n 'multi': nx.classes.multigraph.MultiGraph}\n if hasattr(self, 'G'):\n if graphs[graph] != type(self.G):\n pass\n else:\n return self.G\n\n if hasattr(self,'flow_wts'):\n pass\n else:\n self.flow_flux_en_weights()\n\n if graph == 'dir':\n G = nx.DiGraph()\n\n self.ds = self.fluctuation_ensemble()\n self.flow_flux_en_weights()\n\n CM = self.C_matrix_sparse(self.C)\n L = self.G_matrix_sparse(CM)[1:,1:].tocsc()\n\n # choose ensemble mode 1 just for simplicity\n # p = scipy.sparse.linalg.spsolve(L, self.ds[1:,20])\n # Q = (self.C*self.flux_wts)*self.I_mat_red.dot(p)\n # self.LEAF.Bond[Q >= 0, :] = self.LEAF.Bond[Q >= 0, ::-1]\n elif graph == 'multi':\n G = nx.MultiGraph()\n else:\n G = nx.Graph()\n\n eds = self.C > thr\n\n # Remove sinks for loop counting\n if self.system == 'delta':\n remove = np.all(np.isin(self.LEAF.Bond, self.sinks), axis=1)\n eds = np.logical_and(eds, np.logical_not(remove))\n\n for (a, b), c, w, l in zip(self.LEAF.Bond[eds,:],\n self.C[eds], self.flow_wts[eds], self.bond_lens[eds]):\n G.add_edge(a, b, conductivity=c, weight=1./w, length=l)\n G.nodes[a]['pos'] = self.LEAF.Vertex[a,:]\n G.nodes[b]['pos'] = self.LEAF.Vertex[b,:]\n\n if nx.number_of_selfloops(G) > 0:\n G.remove_edges_from(nx.selfloop_edges(G))\n\n self.G = G\n return self.G\n\n def from_networkx(self,): #not done yet\n \"\"\" Not implemented yet \"\"\"\n return None\n\n def save(self, name):\n \"\"\" Returns a dict that can be used to reconstruct the\n XylemNetwork object using the from_dict method but contains only\n bare bones data to save memory.\n \"\"\"\n data = {}\n\n data['type'] = self.system\n data['constructor_params'] = self.constructor_params\n data['attributes'] = {\n 'C': self.C,\n }\n\n if name[-2:] != '.p':\n name += '.p'\n\n with open(name, 'wb') as f:\n pickle.dump(data, f)\n\n return data\n\nclass DeltaNetwork(NetworkSuite):\n \"\"\" An object built upon the NetworkSuite object for the simulation of\n river deltas and marshes\n \"\"\"\n # initialization and simulation\n def __init__(self, LEAF, LSHP, size, cst, C0=None,\n fluctuations='river',\n inputs='line_source_wide', n_sources=1, sourceinds=None,\n outputs='line', n_sinks=25, sink_fraction=None, sinkinds=None,\n basins='random', basin_fraction=None, basininds=None):\n\n super(DeltaNetwork, self).__init__(LEAF, LSHP, size,\n C0=C0,\n inputs=inputs, n_sources=n_sources, sourceinds=sourceinds,\n outputs=outputs, n_sinks=n_sinks, sink_fraction=sink_fraction,\n sinkinds=sinkinds)\n\n self.system = 'delta'\n self.fluctuations = fluctuations\n\n self.gamma = 2/3 #0.666 for rivers, from exp data\n self.cst = cst\n\n self.set_basins(basins, basin_fraction, basininds)\n\n self.constructor_params = {\n 'LEAF': LEAF,\n 'LSHP': LSHP,\n 'size': size,\n 'cst': cst,\n 'C0': C0,\n 'fluctuations': self.fluctuations,\n 'sources': self.sources,\n 'sinks': self.sinks,\n 'basins': self.basins,\n 'basin_fraction': basin_fraction\n }\n\n def __repr__(self):\n return 'Geography object with T = ' + str(self.cst)\n\n def set_basins(self, basins, basin_fraction, basininds):\n if basin_fraction == None and basininds == None:\n self.basins = []\n return self.basins\n\n if basins == 'generate':\n bondnum = np.bincount(self.LEAF.Bond.flatten())\n ends = np.where(bondnum == 1)[0]\n self.basins = [x for x in ends if x not in self.sinks and\n x not in self.sources]\n return self.basins\n\n if basininds is not None:\n self.basins = basininds\n return self.basins\n\n n_basins = int(basin_fraction*self.verts)\n if basins == 'random':\n #random basins\n p = np.ones(self.verts)\n p[self.sources] = 0\n p[self.sinks] = 0\n p /= np.sum(p)\n self.basins = np.random.choice(self.verts,\n size=n_basins, p=p, replace=False)\n return self.basins\n if basins == 'square' or basins == 'triangle':\n extended_basins = int(n_basins*4/self.area)\n # Lattice between +/-0.98 to limit intersecting lines on\n # outside region of graph\n x, y = np.meshgrid(\n np.linspace(-0.98,0.98,int(np.sqrt(extended_basins))),\n np.linspace(-0.98,0.98,int(np.sqrt(extended_basins))))\n\n if basins == 'triangle':\n x[::2, :] += (x[0,1] - x[0,0])/2\n\n A = np.array([x.flatten(),y.flatten()]).T\n elif basins == 'linear':\n #this feature hasn't been tested!\n #basins distributed in y by to linear power law distribution\n x = np.linspace(-1,1, n_basins)\n y = -2 * np.random.power(2, size=n_basins) + 1\n\n A = np.array([x,y]).T\n\n self.basins = []\n poly = Polygon(self.LSHP.polyedge)\n for i in range(A.shape[0]):\n if poly.intersects(Point(A[i,:])):\n distsq = (self.LEAF.Vertex[:,0] - A[i,0])**2 + \\\n (self.LEAF.Vertex[:,1] - A[i,1])**2\n bas = np.argmin(distsq)\n if bas not in self.sources and bas not in self.sinks \\\n and bas not in self.basins:\n self.LEAF.Vertex[bas,:] = A[i,:]\n self.basins.append(bas)\n\n self.bond_lens = self.bond_lengths()\n scale = self.bond_lens.max()\n\n self.bond_lens /= scale\n\n def fluctuation_ensemble(self, fluctuations=None):\n resolution = 30\n\n ds = np.zeros((self.verts, resolution))\n\n nonsinks = np.delete(range(self.verts), self.sinks)\n ds[nonsinks, :] = 10**(-7)\n\n if fluctuations == None:\n fluctuations = self.fluctuations\n\n if fluctuations == 'river':\n ''' Note: you only need to do half the sin period since the\n other half has the same information'''\n\n tides = np.cos(np.linspace(0, np.pi, resolution, endpoint=True))\n\n ''' # dec19rivers, jan20rivers, may20rivers\n ds[self.sources,:] = 1/self.n_sources\n ds[self.basins,:] = self.cst*tides/len(self.basins)\n ds[self.sinks,:] = -np.sum(ds,axis=0)/self.n_sinks\n\n ds /= -1*np.sum(ds.clip(min=0),axis=0) # normalize inputs'''\n\n # jun20rivers\n ds[self.sources,:] = 1/self.n_sources\n ds[self.basins,:] = (self.cst) * tides / len(self.basins)\n ds /= np.sum(ds[:,0])\n ds[self.sinks, :] = -np.sum(ds, axis=0) / self.n_sinks\n elif fluctuations == 'marsh':\n '''#Basins fluctuating at spatially-random phase\n basins = np.random.permutation(self.basins)\n rows = np.tile(np.linspace(0, 2*np.pi, resolution, endpoint=False),\n (len(self.basins),1))\n cols = np.tile(np.linspace(0, 2*np.pi, len(self.basins),\n endpoint=False)[:,np.newaxis], (1,resolution))\n ds[self.basins, :] = np.sin(cols+rows)'''\n\n '''L = max(self.LSHP.polyedge[:,1]) - min(self.LSHP.polyedge[:,1])\n y = self.LEAF.Vertex[self.basins,1]\n\n phi = np.pi\n\n cols = np.tile(phi*y[:,np.newaxis]/L, (1,resolution))\n rows = np.tile(np.linspace(0, phi, resolution,\n endpoint=False), (len(self.basins),1))\n\n ds[self.basins,:] = np.sin(cols+rows)\n ds[regular_nodes, :] = 10**(-7)\n ds[self.sinks, :] -= ds.sum(axis=0)/len(self.sinks)'''\n\n tides = np.cos(np.linspace(0, np.pi, resolution, endpoint=True))\n\n inds = np.random.permutation(len(self.basins))\n rows = np.tile(np.linspace(0, 2*np.pi, resolution, endpoint=False),\n (len(self.basins),1))\n cols = np.tile(np.linspace(0, 2*np.pi, len(self.basins),\n endpoint=False)[:,np.newaxis], (1,resolution))\n\n ds[self.sources,:] = 1/self.n_sources\n ds[self.basins,:] = (self.cst) * tides / len(self.basins) + \\\n self.noise*self.cst*np.cos(rows+cols)[inds,:]/len(self.basins)\n ds /= np.sum(ds[:,0])\n ds[self.sinks, :] = -np.sum(ds, axis=0) / self.n_sinks\n\n elif fluctuations == 'entropy':\n ''' The first column is source only, remaining columns are basins\n only.\n '''\n tides = np.cos(np.linspace(0, np.pi, resolution-1, endpoint=True))\n\n ds[self.sources, 0] = 1/self.n_sources\n ds[self.basins, 0] = 10**(-7)\n ds[self.sources, 1:] = 10**(-7)\n ds[self.basins, 1:] = (self.cst) * tides / len(self.basins)\n ds[self.sinks, :] = -np.sum(ds, axis=0) / self.n_sinks\n\n '''ds[self.sources, 1:] = 10**(-7)\n ds[self.basins, 1:] = np.cos(np.linspace(0, np.pi, resolution-1,\n endpoint=True))\n ds[self.sinks, :] = -np.sum(ds,axis=0)/self.n_sinks\n\n ds /= np.sum(ds.clip(min=0),axis=0)'''\n\n #print(np.sum(ds, axis=0))\n\n return ds\n\n @classmethod\n def load(cls, name):\n \"\"\" Takes a data dict containing only the\n bare bones data of the network and turns it into\n a fully fledged XylemNetwork object.\n \"\"\"\n\n if name[-2:] != '.p':\n name += '.p'\n with open(name, 'rb') as f:\n saved = pickle.load(f)\n\n assert saved['type'] == 'delta', 'must be loading a delta network'\n\n data = saved['constructor_params']\n\n de = cls(data['LEAF'], data['LSHP'],\n data['size'], data['cst'],\n C0=data['C0'], fluctuations=data['fluctuations'],\n sourceinds=data['sources'],\n sinkinds=data['sinks'],\n basininds=data['basins'], basin_fraction=data['basin_fraction'],)\n\n de.__dict__.update(saved['attributes'])\n\n return de\n\n @classmethod\n def make_river(cls, c, density=65, basin_fraction=0.15, shape='square',\n basins='triangle', n_sources=1, n_sinks=29):\n if shape == 'circle':\n trimming_percentile = 100\n outputs = 'semicircle'\n inputs = 'line_source'\n else:\n outputs = 'line'\n trimming_percentile = 99\n if shape in ['square', 'sq', 'strip', 'sine']:\n inputs = 'line_source_wide'\n elif shape == 'triangle' or shape == 'tri':\n inputs = 'line_source'\n else:\n inputs = 'line_source'\n if shape == 'invtriangle':\n outputs = 'invtriangle'\n LSHP = NetworkSuite.make_LSHP(shape)\n LEAF = NetworkSuite.make_LEAF('River', density, 'random', LSHP,\n trimming_percentile=trimming_percentile)\n delta = cls(LEAF, LSHP, density, c,\n fluctuations='river',\n inputs=inputs, n_sources=n_sources,\n outputs=outputs, n_sinks=n_sinks,\n basins=basins, basin_fraction=basin_fraction)\n\n return delta\n\n @classmethod\n def make_marsh(cls, c, noise, density=65, basin_fraction=0.15,\n shape='square', basins='triangle', n_sources=1, n_sinks=29):\n assert noise >= 0 and noise <= 1\n if shape == 'circle':\n trimming_percentile = 100\n outputs = 'semicircle'\n inputs = 'line_source'\n else:\n outputs = 'line'\n trimming_percentile = 99\n if shape in ['square', 'sq', 'strip', 'sine']:\n inputs = 'line_source_wide'\n elif shape == 'triangle' or shape == 'tri':\n inputs = 'line_source'\n else:\n inputs = 'line_source'\n if shape == 'invtriangle':\n outputs = 'invtriangle'\n LSHP = NetworkSuite.make_LSHP(shape)\n LEAF = NetworkSuite.make_LEAF('Marsh', density, 'random',\n LSHP, trimming_percentile=trimming_percentile)\n\n marsh = cls(LEAF, LSHP, density, c,\n fluctuations='marsh',\n inputs=inputs, n_sources=n_sources,\n outputs=outputs, n_sinks=n_sinks,\n basins=basins, basin_fraction=basin_fraction)\n\n marsh.noise = noise\n\n return marsh\n\n def simulate(self, plot=False, movie_dir=None, entropy=False):\n print('\\nGeography simulation with T=%0.2f' % self.cst)\n initialtime = time.time()\n print('Number of sinks: %d' % len(self.sinks))\n print('Number of basins: %d' % len(self.basins))\n\n self.simulate_base(plot=plot, movie_dir=movie_dir, plot_interval=1,\n entropy=entropy, timesteps=1e5)\n\n print('Simulation complete')\n print('Runtime: ' + str(round((time.time()-initialtime)/60, 2)) + \\\n ' minutes')\n\n # processing\n def remove_trees_nx(self, thr=1e-4):\n if not hasattr(self, 'G'):\n self.to_networkx(thr=thr)\n\n G2 = self.G.copy()\n node_list = np.array(G2.nodes)\n degrees = np.sum(nx.adjacency_matrix(G2, node_list, weight=None),\n axis=1)\n remove = node_list[np.where(degrees <= 1)[0]]\n remove = remove[np.logical_not(np.isin(\n remove, np.append(self.sources, self.sinks)))]\n\n while len(remove) > 0:\n G2.remove_nodes_from(remove)\n node_list = np.array(G2.nodes)\n degrees = np.sum(nx.adjacency_matrix(G2, node_list,\n weight=None), axis=1)\n remove = node_list[np.where(degrees <= 1)[0]]\n remove = remove[np.logical_not(np.isin(\n remove, np.append(self.sources, self.sinks)))]\n if self.sinks[0] in remove:\n assert False\n\n self.G = G2\n\n def thin(self, thr=1e-4, basethr=1e-8):\n \"\"\" Trims internal trees\n \"\"\"\n self.remove_trees_nx(thr=basethr)\n\n cs = nx.get_edge_attributes(self.G, 'conductivity')\n self.G.remove_edges_from([key for key in cs.keys() if cs[key] < thr])\n self.G.remove_nodes_from([n[0] for n in self.G.degree if n[1] == 0])\n\n return self.G\n\n # statistics\n def flow_change(self, thr=1e-4):\n self.flow_flux_en_weights()\n C = self.C*self.flux_wts\n ds = self.fluctuation_ensemble()\n\n CM = self.C_matrix_sparse(C)\n G = self.G_matrix_sparse(CM)[1:,1:].tocsc()\n Q = C[:,np.newaxis]*self.I_mat_red.dot(\n scipy.sparse.linalg.spsolve(G, ds[1:,[0,-1]]))\n\n filter = self.C > thr\n tot = np.sum(filter)\n\n change = np.where(Q[filter,0] > 0,1,-1)*np.where(Q[filter,1] > 0,1,-1)\n return np.sum(change-1)/-2/tot\n\n def pressure_covar(self, mode='euclidean'):\n \"\"\"Calculates covariances in pressure between basins of geography\n simulations by subtracting the mean pressure from all basins\n and calculating all covariances from there\n Parameters:\n mode: either 'euclidean' to return euclidean distance between\n basins or 'ydist' to return y-axis distance between\n basins\n Returns:\n x: list of distances between basins (according to mode)\n y: list of covariances between basins corresponding to list x\n \"\"\"\n self.ds = self.fluctuation_ensemble()\n CM = self.C_matrix_sparse(self.C)\n G = self.G_matrix_sparse(CM)[1:,1:].tocsc()\n\n p = np.zeros((self.verts, self.ds.shape[1]))\n p[1:,:] = scipy.sparse.linalg.spsolve(G, self.ds[1:,:])\n print(np.amax(p)-np.amin(p))\n\n basinsp = p[self.basins,:]\n basinsp -= np.mean(basinsp, axis=0) #subtract off average pressures\n pick = np.random.choice(basinsp.shape[0], 1000, replace=False)\n pick = np.sort(pick)[::-1]\n basinsp = basinsp[pick, :]\n c = np.cov(basinsp)\n\n #can calculate exactly how many elements in x and y based on # of\n #basins but I've been too lazy to do this so far\n u1, u2 = np.triu_indices(c.shape[0], k=1)\n b = np.array(self.basins)[pick]\n\n #x = np.linalg.norm(self.LEAF.Vertex[b[u1], :] - \\\n # self.LEAF.Vertex[b[u2], :], axis=1)\n x = np.abs(self.LEAF.Vertex[b[u1], 0] - self.LEAF.Vertex[b[u2], 0])\n y = c[u1, u2]\n #pt = np.random.choice(np.where((x > 1.5) & (y > 6e3))[0])\n\n plt.figure(figsize=(10,8))\n\n plt.plot(x, y, '.')\n #plt.plot(x[pt], y[pt], '^r')\n #print(x[pt], y[pt])\n\n #self.plot('pipes')\n #plt.plot(self.LEAF.Vertex[[b[u1[pt]], b[u2[pt]]], 0], self.LEAF.Vertex[[b[u1[pt]], b[u2[pt]]], 1], '^r')\n #plt.show()\n\n return np.around(np.array(x), decimals=4), np.array(y)\n\n def binned_pressure_covar(self, mode='euclidean', return_original=False):\n x, y = self.pressure_covar(mode=mode)\n xred = np.unique(x)\n yred = np.zeros(len(xred))\n for i in range(len(xred)):\n yred[i] = np.sum(y[x == xred[i]]) / np.sum(x == xred[i])\n return x, y, xred, yred\n\n def sliding_covar(self, mode='euclidean', resolution=40):\n x, y = self.pressure_covar(mode=mode)\n centers = np.linspace(0, max(x), resolution+2)\n deltax = centers[1] - centers[0]\n xred = np.zeros(resolution)\n yred = np.zeros(resolution)\n for i in range(resolution):\n xred[i] = centers[i+1]\n yvals = y[np.where((x > centers[i]-deltax) &\n (x < centers[i]+deltax))[0]]\n yred[i] = np.sum(yvals) / len(yvals)\n return x, y, xred, yred\n\n def pressure_suite(self, k=7, thr=0, plot=False, mode=None, n=[491]):\n \"\"\" k is the column of self.ds that we are looking at\n \"\"\"\n self.ds = self.fluctuation_ensemble()\n\n CM = self.C_matrix_sparse(self.C)\n G = self.G_matrix_sparse(CM)[1:,1:].tocsc()\n p = np.zeros((self.verts, self.ds.shape[1]))\n p[1:,:] = scipy.sparse.linalg.spsolve(G, self.ds[1:,:])\n basinsp = p[self.basins,:]\n\n p = np.concatenate((p, p[:,::-1]), axis=1)\n plt.plot(p[self.sources[0],:])\n plt.plot(p[self.basins[0],:])\n plt.plot(p[self.basins[50],:])\n plt.show()\n\n for i in n:\n self.plot(style='sticks')\n\n plt.plot(self.LEAF.Vertex[i,0], self.LEAF.Vertex[i,1], '^r', ms=10)\n\n inds = np.delete(np.arange(Reff.shape[1]), i)\n c = np.delete(Reff[i,:], i)\n\n plt.scatter(self.LEAF.Vertex[inds,0], self.LEAF.Vertex[inds,1],\n c=c, cmap=vir, norm=LogNorm(vmin=vmin, vmax=np.amax(Reff)), zorder=-1)\n #plt.colorbar()\n plt.show()\n\n x = np.linalg.norm(self.LEAF.Vertex[u1, :] - \\\n self.LEAF.Vertex[u2,:], axis=1)\n\n def pressure_suite_new(self, k=7, thr=0, plot=False, mode=None):\n \"\"\" k is the column of self.ds that we are looking at\n \"\"\"\n\n CM = self.C_matrix_sparse(self.C) #Conductivity matrix\n\n ResMat = scipy.sparse.coo_matrix((1/self.C, (self.LEAF.Bond[:,0],\n self.LEAF.Bond[:,1])), shape=(self.verts, self.verts))\n ResMat = ResMat + ResMat.T;\n ResLap = scipy.sparse.spdiags(ResMat.sum(axis=0), [0], ResMat.shape[0],\n ResMat.shape[0], format='coo') - ResMat\n invR = scipy.sparse.linalg.inv(ResLap)\n\n G = self.G_matrix_sparse(CM).tocsc()\n #p = scipy.sparse.linalg.spsolve(G, self.ds[:,k])\n\n invG = scipy.sparse.linalg.inv(G)\n\n if thr == 0:\n u1, u2 = np.triu_indices(n=invR.shape[0],\n m=invR.shape[1], k=1)\n if mode == 'basins':\n testpts = self.basins\n u = np.array(np.triu_indices(n=invR.shape[0],\n m=invR.shape[1], k=1)).T\n u = u[np.all(np.isin(u, testpts), axis=1), :]\n u1 = u[:,0]\n u2 = u[:,1]\n else:\n testpts = np.unique(self.LEAF.Bond[self.C > thr, :])\n u = np.array(np.triu_indices(n=invR.shape[0],\n m=invR.shape[1], k=1)).T\n u = u[np.all(np.isin(u, testpts), axis=1), :]\n u1 = u[:,0]\n u2 = u[:,1]\n\n Reff = invR[u1,u1] + invR[u2,u2] - invR[u1,u2] - \\\n invR[u2,u1]\n Ceff = invG[u1,u1] + invG[u2,u2] - invG[u1,u2] - invG[u2,u1]\n\n Reff[Reff < 0] = 1e-25\n\n self.plot(style='sticks', drawspecial=False)\n\n plt.plot(self.LEAF.Vertex[self.sources[0],0], self.LEAF.Vertex[self.sources[0],1], '^r', ms=10)\n\n plt.scatter(self.LEAF.Vertex[testpts,0], self.LEAF.Vertex[testpts,1],\n c=Reff[self.sources[0], :], cmap=vir, norm=LogNorm(vmin=vmin, vmax=np.amax(Reff)), zorder=-1)\n #plt.colorbar()\n plt.show()\n\n x = np.linalg.norm(self.LEAF.Vertex[u1, :] - \\\n self.LEAF.Vertex[u2,:], axis=1)\n #dp = np.abs(p[u1, k] - p[u2, k])\n #Q = dp/Reff\n #Q2 = dp/Ceff\n\n if plot:\n plt.hexbin(x, Reff.tolist()[0], yscale='log')\n plt.show()\n\n def graph_entropy(self):\n \"\"\" Entropy of the graph at time k based on the description given in:\n DOI: 10.1080/0305215512331328259\n \"\"\"\n ds = self.fluctuation_ensemble('entropy')\n\n # Calculate S0\n I = self.sources + self.basins + self.sinks\n p0 = ds[I,:].clip(min=0)\n\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n S0 = -1*np.sum(p0*np.nan_to_num(np.log(p0)),axis=0)\n\n # Calculate Pn\n CM = self.C_matrix_sparse(self.C)\n G = self.G_matrix_sparse(CM)[1:,1:].tocsc()\n\n Q = self.C[:,np.newaxis]*self.I_mat_red.dot(\n scipy.sparse.linalg.spsolve(G, ds[1:,:]))\n\n PS = np.zeros(Q.shape[1])\n for j in [0,1]:\n \"\"\" Cheat by making an extra row for the 'super sink' to make\n calculations easier\"\"\"\n nodeQ = scipy.sparse.csc_matrix(\n (Q[:,j], (self.LEAF.Bond[:,0], self.LEAF.Bond[:,1])),\n shape=(self.verts+1, self.verts+1)\n )\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n nodeQ[self.verts, self.sinks] = ds[self.sinks, j]\n nodeQ[self.verts, self.basins] = ds[self.basins, j]\n nodeQ -= nodeQ.T\n nodeQ[nodeQ < 0] = 0\n T = np.squeeze(np.asarray(np.sum(nodeQ, axis=0)))[:-1]\n p = np.asarray(nodeQ[:, np.where(T != 0)[0]] / T[T != 0])\n '''logp = p\n logp[logp != 0] = np.log(logp[logp != 0])\n print(logp)'''\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n S = -1*np.sum(p*np.nan_to_num(np.log(p)), axis=0)\n\n PS[j] = np.sum(T[T != 0]*S)\n\n return S0[[0,1]] + PS[[0,1]]\n\n def flow_entropy(self, Q_matrix):\n \"\"\" calculate the flow entropy (path entropy) from\n the flow matrix according to\n\n doi:10.1103/PhysRevLett.104.048703\n \"\"\"\n #t0 = time.time()\n # only positive flows\n Qpos = Q_matrix.copy()\n Qpos[Qpos < 0] = 0\n\n # normalize by total outflows\n outflows = Qpos.sum(axis=1)\n P_matrix = Qpos/outflows[:,np.newaxis]\n P_matrix[np.logical_not(np.isfinite(P_matrix))] = 0\n\n # total outflow at each node\n P_i = Qpos.sum(axis=0)\n\n # local entropy\n logs = np.log2(P_matrix)\n logs[np.logical_not(np.isfinite(logs))] = 0\n\n S_i = np.sum(P_matrix*logs, axis=1)\n\n #t1 = time.time()\n\n #print \"time:\", t1 - t0\n\n return -np.sum(P_i*S_i)\n\n def flow_entropies(self, C):\n \"\"\" Calculate the mean flow entropy for\n injecting currents at all nodes and removing it at the\n source.\n \"\"\"\n #t0 = time.time()\n self.flow_flux_en_weights()\n C_weighted = C*self.flow_wts\n CM = self.C_matrix_sparse(C_weighted)\n G = self.G_matrix_sparse(CM)[1:,1:]\n\n # construct RHS for flows between sink and one\n # particular node\n #t1 = time.time()\n rhs = np.eye(G.shape[0]) # if result is dense then\n # faster if rhs is also dense\n Qs = C_weighted[:,np.newaxis]*self.I_mat_red.dot(scipy.sparse.linalg.spsolve(G, rhs))\n\n ents = []\n for i in range(self.verts - 1):\n Qm = np.zeros((self.verts, self.verts))\n\n for (a, b), Q in zip(self.LEAF.Bond, Qs[:,i]):\n Qm[a,b] = Q\n Qm[b,a] = -Q\n\n ents.append(self.flow_entropy(Qm))\n #t1 = time.time()\n\n #print \"Time\", t1-t0\n\n return np.array(ents)\n\n def bridges(self, thr=1e-4, smooth=False, weight=True):\n \"\"\" Returns fraction of channel area found in bridges.\n Subtract from 1 to get fraction of channel area found in loops.\n \"\"\"\n\n self.thin(thr=thr)\n if smooth:\n self.smooth(thr=thr)\n\n conductivity = nx.get_edge_attributes(self.G, 'conductivity')\n length = nx.get_edge_attributes(self.G, 'length')\n\n edges = list(nx.bridges(self.G))\n\n lb = np.array([length[e] for e in edges])\n l = np.array([length[e] for e in self.G.edges()])\n\n if weight == True:\n cb = np.array([conductivity[e] for e in edges])\n c = np.array([conductivity[e] for e in self.G.edges()])\n return np.sum(cb**(self.gamma/2) * lb) / \\\n np.sum(c**(self.gamma/2) * l)\n elif weight == 'new':\n cb = np.array([conductivity[e] for e in edges])\n return np.sum(cb**(self.gamma/2) * lb) / self.total_area()\n else:\n return np.sum(lb) / np.sum(l)\n\n def total_area(self, thr=1e-8):\n keep = np.where(self.C >= thr)\n return np.sum(self.bond_lens[keep]*self.C[keep]**(self.gamma/2))\n\n def loop_ranking(self,):\n self.find_cycles(thr=1e-4)\n x = np.array([np.median(a.C[c]) for c in self.cycles])\n rank = np.argsort(x)[::-1]\n if len(rank) == 0:\n return 1e-4\n if len(rank) > 0 and len(rank) < 5:\n return 1e-4\n if len(rank) >= 5:\n return np.median(a.C[self.cycles[rank[5]]])\n\n def mstdiff(self, thr=1e-4, weight=True):\n self.to_networkx(thr=thr, graph='multi')\n self.thin(thr=thr)\n self.smooth(thr=thr)\n\n if weight:\n tree = nx.maximum_spanning_tree(self.G, weight='area')\n a = nx.get_edge_attributes(tree, 'area')\n fulla = nx.get_edge_attributes(self.G, 'area')\n else:\n tree = nx.maximum_spanning_tree(self.G, weight='length')\n a = nx.get_edge_attributes(tree, 'length')\n fulla = nx.get_edge_attributes(self.G, 'length')\n\n mstarea = np.sum([a[e] for e in tree.edges(keys=True)])\n totalarea = np.sum([fulla[e] for e in self.G.edges(keys=True)])\n return 1 - mstarea / totalarea\n\n # plotting\n def drawspecial(self, ax=None):\n if ax == None:\n ax = plt.gca()\n ax.plot(self.LEAF.Vertex[self.sinks,0],\n self.LEAF.Vertex[self.sinks,1], 'y.', alpha=1,\n markersize=12, axes=ax)\n if self.sources is not None:\n ax.plot(self.LEAF.Vertex[self.sources,0],\n self.LEAF.Vertex[self.sources,1], 'r.', alpha=1,\n markersize=15, axes=ax)\n ax.plot(self.LEAF.Vertex[self.basins,0],\n self.LEAF.Vertex[self.basins,1], 'c.', alpha=0.5,\n markersize=18, axes=ax)\n\n def ensembleplot(self, dir):\n self.ds = self.fluctuation_ensemble()\n\n CM = self.C_matrix_sparse(self.C)\n G = self.G_matrix_sparse(CM)[1:,1:].tocsc()\n p = scipy.sparse.linalg.spsolve(G, self.ds[1:,:])\n v = (np.amin(p), np.amax(p)-np.amin(p))\n p = np.concatenate((p, p[:,::-1]), axis=1)\n\n t = np.linspace(0, 2*np.pi, p.shape[1])\n tides = np.cos(t)\n t *= p.shape[1]/2/np.pi\n\n fig = plt.figure(figsize=(15,10))\n grid = plt.GridSpec(4,5, hspace=0.05, wspace=0.1, left=0.1, right=0.9)\n for i in range(p.shape[1]):\n print(i)\n\n if self.fluctuations == 'river':\n fig.add_subplot(grid[1:3,0])\n plt.plot(t, tides*self.cst)\n plt.plot([t[i]]*2, [min([-self.cst,-1]), max([self.cst,1])],\n 'k')\n plt.plot([0,p.shape[1]], [0,0], 'k--')\n plt.xlabel('Ensemble state')\n plt.ylabel('Total flow through basins relative to river input')\n\n fig.add_subplot(grid[:,0:])\n self.plot(style='pressure', cmap='cividis', p=p[:,i], v=v,\n drawspecial=False)\n self.plot(style='pipes', drawspecial=False)\n plt.colorbar(cm.ScalarMappable(norm=Normalize(vmin=0, vmax=1),\n cmap='cividis'), ax=plt.gca(),\n label='Fraction of max potential')\n plt.axis('off')\n\n plt.savefig(dir+'/%05d.png' % i)\n plt.clf()\n\ndef crop_LEAF(LEAF, C0, lengths, x=None, y=None):\n if x == None:\n x = [np.amin(LEAF.Vertex[:,0]), np.amax(LEAF.Vertex[:,0])]\n if y == None:\n y = [np.amin(LEAF.Vertex[:,1]), np.amax(LEAF.Vertex[:,1])]\n\n keep = np.where(((LEAF.Vertex[:, 0] >= x[0]) & (LEAF.Vertex[:, 0] <= x[1]))\n & ((LEAF.Vertex[:, 1] >= y[0]) & (LEAF.Vertex[:, 1] <= y[1])))[0]\n\n aLEAF = LFCLSS.Topology(LEAF.comment+'_cropped', 'data')\n aLEAF.Bond = LEAF.Bond[np.all(np.isin(LEAF.Bond, keep), axis=1), :]\n\n x = 0\n for i in keep:\n aLEAF.Bond[aLEAF.Bond == i] = x\n x += 1\n\n aLEAF.Vertex = LEAF.Vertex[keep,:]\n aLEAF.Vertex -= np.amin(LEAF.Vertex, axis=0)\n\n C0 = C0[np.all(np.isin(LEAF.Bond,keep), axis=1)]\n lengths = lengths[np.all(np.isin(LEAF.Bond,keep), axis=1)]\n\n return aLEAF, C0, lengths\n\ndef rotate_LEAF(LEAF, angle=-1*np.pi/2):\n R = np.array([[np.cos(angle), -1*np.sin(angle)],\n [np.sin(angle), np.cos(angle)]])\n LEAF.Vertex = (R.dot(LEAF.Vertex.T)).T\n return LEAF\n\ndef remove_nodes(LEAF, LSHP, C0, lengths, nodes):\n LEAF.Vertex = np.delete(LEAF.Vertex, nodes, axis=0)\n rembonds = np.where(np.any(np.isin(LEAF.Bond, nodes), axis=1))[0]\n LEAF.Bond = np.delete(LEAF.Bond, rembonds, axis=0)\n C0 = np.delete(C0, rembonds)\n lengths = np.delete(lengths, rembonds)\n\n un = np.delete(np.arange(np.amax(LEAF.Bond)+1), nodes)\n conv = np.zeros(np.amax(LEAF.Bond) + 1)\n conv[un] = np.arange(len(un))\n LEAF.Bond[:, 0] = conv[LEAF.Bond[:, 0]]\n LEAF.Bond[:, 1] = conv[LEAF.Bond[:, 1]]\n\n hull = ConvexHull(LEAF.Vertex)\n LSHP.polyedge = LEAF.Vertex[hull.vertices, :]\n return LEAF, LSHP, C0, lengths\n\ndef remove_edges(LEAF, LSHP, C0, lengths, edges):\n def remove_edge(LEAF, LSHP, C0, lengths, edge):\n rembonds = np.where(np.all(np.isin(LEAF.Bond, edge), axis=1))[0]\n LEAF.Bond = np.delete(LEAF.Bond, rembonds, axis=0)\n C0 = np.delete(C0, rembonds)\n lengths = np.delete(lengths, rembonds)\n return LEAF, LSHP, C0, lengths\n\n for e in edges:\n LEAF, LSHP, C0, lengths = remove_edge(LEAF, LSHP, C0, lengths, e)\n return LEAF, LSHP, C0, lengths\n\ndef read_gbd(filename='shp/gbmd/gangesNetmod.shp', plot=False, sourceinds=[0],\n sinks=None, crop=False, x=[1e5, 2.5e5], y=[0, 1.5e5]):\n import geopandas as gpd\n shapefile = gpd.read_file(filename)\n\n if filename == 'shp/gbmd/gangesNetmod.shp':\n sinks = [8699, 8749, 8793, 8794, 8798, 8797, 8791, 8792, 8784, 8760,\n 8653, 8591, 8538, 8470, 8160, 8000, 7865, 7963, 8234, 8235,\n 8294, 8046, 8089, 7741, 7485,]\n\n if plot:\n shapefile.plot()\n plt.show()\n\n LEAF = LFCLSS.Topology('gbmd', 'data')\n\n LEAF.Bond = np.array([shapefile['FROM_NODE'],shapefile['TO_NODE']]).T - 1\n\n # Some indices are skipped over in shapefile, so this corrects for that\n un = np.sort(np.unique(LEAF.Bond)) # un = unique\n conv = np.zeros(np.amax(LEAF.Bond)+1) #conv = conversion array\n conv[un] = np.arange(len(un))\n LEAF.Bond[:,0] = conv[LEAF.Bond[:,0]]\n LEAF.Bond[:,1] = conv[LEAF.Bond[:,1]]\n\n LEAF.Vertex = np.zeros((np.amax(LEAF.Bond)+1,2))\n lengths = np.zeros(LEAF.Bond.shape[0])\n for i in range(len(shapefile['geometry'])):\n lengths[i] = shapefile['geometry'][i].length\n c = np.array(shapefile['geometry'][i].coords)\n p = LEAF.Bond[i,:]\n try:\n LEAF.Vertex[p,:] = c[[0,-1],:]\n except: print(p)\n\n # Make (0,0) at the bottom left\n LEAF.Vertex -= np.amin(LEAF.Vertex,axis=0)\n\n C0 = shapefile['Width']**3 #* 9.81/(100**2*0.004*shapefile['SHAPE_Leng'])\n C0 /= np.amax(C0[np.where(np.any(LEAF.Bond == sourceinds[0], axis=1))[0]])\n\n if crop:\n LEAF, C0, lengths = crop_LEAF(LEAF, C0, lengths, x=x, y=y)\n sinks = [0]\n\n LEAF.RidgeLengths = np.ones(LEAF.Bond.shape[0])\n LEAF.CellAreas = np.ones(LEAF.Bond.shape[0])\n\n LSHP = LFCLSS.LeafShape('from data', 0)\n hull = ConvexHull(LEAF.Vertex)\n LSHP.polyedge = LEAF.Vertex[hull.vertices,:]\n\n delta = DeltaNetwork(LEAF, LSHP, 0, 0, C0=C0,\n fluctuations='river', basins='generate',\n sourceinds=sourceinds, sinkinds=sinks)\n\n delta.lengths = lengths\n return delta\n\ndef read_deltas(delta='Colville', file='shp/DeltasNets_CIMPY', crop=False):\n from scipy.io import loadmat\n a = loadmat(file)\n\n LEAF = LFCLSS.Topology(delta, 'data')\n\n A = a[delta]['AdjW'][0][0]\n\n print(A)\n inds = np.nonzero(A)\n LEAF.Bond = np.array(inds).T\n LEAF.Bond = LEAF.Bond[:,::-1]\n C0 = A[inds]\n\n x = a[delta]['nx'][0][0][:,0]\n y = a[delta]['ny'][0][0][:,0]\n LEAF.Vertex = np.vstack((x,y)).T\n\n for n in range(LEAF.Vertex.shape[0]):\n if n == 0:\n continue\n prev = np.where(LEAF.Bond[:,1] == n)\n loc = np.where(LEAF.Bond[:,0] == n)\n C0[loc] *= np.amax(prev)\n\n C0 **= 3\n\n LEAF = rotate_LEAF(LEAF)\n if delta == 'Mossy':\n LEAF = rotate_LEAF(LEAF)\n elif delta == 'Parana':\n LEAF = rotate_LEAF(LEAF, angle=np.pi/8)\n\n #Make (0,0) at the bottom left\n LEAF.Vertex -= np.amin(LEAF.Vertex,axis=0)\n\n if crop:\n LEAF, C0 = crop_LEAF(LEAF, C0, x=x, y=y)\n sinks = [0]\n\n # correct conductivities here\n\n LEAF.RidgeLengths = np.ones(LEAF.Bond.shape[0])\n LEAF.CellAreas = np.ones(LEAF.Bond.shape[0])\n\n LSHP = LFCLSS.LeafShape('from data', 0)\n hull = ConvexHull(LEAF.Vertex)\n LSHP.polyedge = LEAF.Vertex[hull.vertices,:]\n\n sinks = {\n 'Mossy': [58, 57, 56, 55, 54, 53, 49, 43, 42, 40, 39, 33, 32, 28,\n 27, 25, 19, 18, 13],\n 'Colville': [40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 64, 65, 66, 68,\n 79, 80, 101, 102, 103, 104]\n }\n\n return DeltaNetwork(LEAF, LSHP, 0, 0, C0=C0,\n fluctuations='river', basins='generate',\n sourceinds=[0], sinkinds=sinks[delta])\n\ndef recover_json(file):\n return DeltaNetwork.load('deltadata/'+file)\n\nT = {'St Clair': 1.5E-01,\n 'Mississippi': 5.4E-01,\n 'Wax': 9.5E-01,\n 'Mossy': 5.6E-01,\n 'Kolyma': 5.0E-01,\n 'Colville': 4.7E-01,\n 'Apalachicola': 5.9E+00,\n 'Mackenzie': 3.5E+00,\n 'Orinoco': 1.4E+01,\n 'Yenisei': 2.5E+00,\n 'Lena': 2.9E+01,\n 'Yukon': 7.5E+00,\n 'Betsiboka': 8.1E+01,\n 'Irrawaddy': 5.0E+02,\n 'GBM': 1.2E+02,\n 'Rajang': 2.3E+02,\n 'Niger': 3.2E+02,\n 'Sarawak': 2.5E+03,\n 'Ras Isa': 6.0E+03,\n 'Barnstable': 5.5E+03\n }\ndeltas = list(T.keys())\n\ndef read_json(file='Irrawaddy', crop=False, convex=True):\n try: return recover_json(file)\n except FileNotFoundError: pass\n if file in ['Ras Isa', 'Sarawak']:\n return file\n\n import pandas as pd\n import json\n n = pd.DataFrame(\n json.load(open('shp/'+file+'/'+file+'_nodes.json', 'r'))['features'])\n e = pd.DataFrame(\n json.load(open('shp/'+file+'/'+file+'_links.json', 'r'))['features'])\n\n LEAF = LFCLSS.Topology(file, 'data')\n\n start = [eval(x['conn'])[0] for x in e['properties']]\n fin = [eval(x['conn'])[1] for x in e['properties']]\n LEAF.Bond = np.array([start, fin]).T\n\n un = np.array([x['id'] for x in n['properties']])\n conv = np.zeros(np.amax(LEAF.Bond) + 1)\n conv[un] = np.arange(len(un))\n LEAF.Bond[:, 0] = conv[LEAF.Bond[:, 0]]\n LEAF.Bond[:, 1] = conv[LEAF.Bond[:, 1]]\n\n if file == 'Mississippi':\n LEAF.Bond = LEAF.Bond[1:, :]\n\n x = [x['coordinates'][0] for x in n['geometry']]\n y = [x['coordinates'][1] for x in n['geometry']]\n\n LEAF.Vertex = np.array([x, y]).T\n LEAF.Vertex -= np.amin(LEAF.Vertex, axis=0)\n LEAF.Vertex *= 1e-3\n\n try: w = np.array([eval(x['wid']) for x in e['properties']])\n except KeyError: w = np.ones(LEAF.Bond.shape[0])\n # length = np.array([eval(x['len_adj']) for x in e['properties']])\n C0 = w**3 #* 9.8/0.004/1e2**2\n try: lengths = np.array([float(x['len']) for x in e['properties']])\n except:\n lengths = np.linalg.norm(LEAF.Vertex[LEAF.Bond[:,0],:] - \\\n LEAF.Vertex[LEAF.Bond[:,1],:], axis=1)\n\n if file == 'Mississippi':\n C0 = C0[1:]\n lengths = lengths[1:]\n\n if file == 'Niger':\n LEAF, C0, lengths = crop_LEAF(\n LEAF, C0, lengths, x=[-1, 115], y=[-1, 1e3])\n LEAF.Vertex -= np.amin(LEAF.Vertex, axis=0)\n\n if file == 'Yenisei':\n LEAF, C0, lengths = crop_LEAF(\n LEAF, C0, lengths, x=[-1, 80], y=[20, 180])\n\n if file == 'Yukon':\n LEAF, C0, lengths = crop_LEAF(\n LEAF, C0, lengths, x=[-1, 1e3], y=[36.75, 1e3])\n\n if file == 'Colville':\n LEAF, C0, lengths = crop_LEAF(\n LEAF, C0, lengths, x=[-1, 1e3], y=[8, 1e3])\n\n if file == 'Apalachicola':\n LEAF, C0, lengths = crop_LEAF(LEAF, C0, lengths, y=[0,15])\n\n LEAF.Vertex -= np.amin(LEAF.Vertex, axis=0)\n\n sources = {'<NAME>': 0,\n 'Mississippi': 7,\n 'Wax': 18,\n 'Mossy': 31,\n 'Kolyma': 17,\n 'Colville': 97,\n 'Apalachicola': 0,\n 'Mackenzie': 593,\n 'Orinoco': 196,\n 'Yenisei': 336,\n 'Lena': 1898,\n 'Yukon': 268,\n 'Betsiboka': 56,\n 'Irrawaddy': 288,\n 'GBM': 373,\n 'Rajang': 49,\n 'Niger': 124,\n 'Sarawak': 0,\n '<NAME>': 0,\n 'Barnstable': 37}\n\n #sinks = {x: [1] for x in deltas}\n with open('shp/sinks.p','rb') as f:\n sinks = {x[0]: x[1] for x in zip(deltas, pickle.load(f))}\n\n for d in deltas:\n try: sinks[d].remove(sources[d])\n except ValueError: pass\n\n firstbondind = np.where(np.any(LEAF.Bond == sources[file], axis=1))\n cscale = np.amax(C0[firstbondind])\n if file == 'Wax':\n cscale = 1000**3\n C0 /= cscale\n lengths /= cscale**(1/3)\n\n LEAF.RidgeLengths = np.ones(LEAF.Bond.shape[0])\n LEAF.CellAreas = np.ones(LEAF.Bond.shape[0])\n\n LSHP = LFCLSS.LeafShape('from data', 0)\n if convex:\n hull = ConvexHull(LEAF.Vertex)\n LSHP.polyedge = LEAF.Vertex[hull.vertices, :]\n else:\n import alphashape\n alpha = 0.95 * alphashape.optimizealpha(LEAF.Vertex)\n hull = alphashape.alphashape(LEAF.Vertex, alpha)\n LSHP.polyedge = np.array(hull.exterior.coords.xy).T\n\n if sinks is None:\n sinks = [LEAF.Vertex.shape[0] - 1]\n\n if file == 'St Clair':\n edges = [(1,65)]\n LEAF, LSHP, C0, lengths = \\\n remove_edges(LEAF, LSHP, C0, lengths, edges)\n\n if file == 'Barnstable':\n nodes = [6, 4, 2, 1, 0, 5, 8, 10, 7, 11, 13, 15, 12, 16, 17, 14, 26,\n 31, ]\n edges = [(31,35), (35, 41), (41,48), (48, 55), ]\n LEAF, LSHP, C0, lengths = \\\n remove_edges(LEAF, LSHP, C0, lengths, edges)\n LEAF, LSHP, C0, lengths = \\\n remove_nodes(LEAF, LSHP, C0, lengths, nodes)\n\n delta = DeltaNetwork(LEAF, LSHP, 0, 0, C0=C0,\n fluctuations='river', basins='generate',\n sourceinds=[sources[file]], sinkinds=sinks[file])\n\n delta.bond_lens = lengths\n\n return delta\n\ndef newjson(file):\n if file in ['<NAME>', 'Sarawak']:\n return file\n\n import pandas as pd\n import json\n n = pd.DataFrame(\n json.load(open('shp/'+file+'/'+file+'_nodes.json', 'r'))['features'])\n e = pd.DataFrame(\n json.load(open('shp/'+file+'/'+file+'_links.json', 'r'))['features'])\n\n LEAF = LFCLSS.Topology(file, 'data')\n\n start = [eval(x['conn'])[0] for x in e['properties']]\n fin = [eval(x['conn'])[1] for x in e['properties']]\n simpleBond = np.array([start, fin]).T\n\n un = np.array([x['id'] for x in n['properties']])\n conv = np.zeros(np.amax(simpleBond) + 1)\n conv[un] = np.arange(len(un))\n simpleBond[:, 0] = conv[simpleBond[:, 0]]\n simpleBond[:, 1] = conv[simpleBond[:, 1]]\n\n x = [x['coordinates'][0] for x in n['geometry']]\n y = [x['coordinates'][1] for x in n['geometry']]\n\n LEAF.Vertex = np.array([x, y]).T\n\n try: w = np.array([eval(x['wid']) for x in e['properties']])\n except KeyError: w = np.ones(LEAF.Bond.shape[0])\n simpleC0 = w**3 #* 9.8/0.004/1e2**2\n\n skip = 10\n if file == 'Barnstable':\n rembonds = [(31,35), (35, 41), (41,48), (48, 55), ]\n remnodes = [6, 4, 2, 1, 0, 5, 8, 10, 7, 11, 13, 15, 12, 16, 17, 14, 26,\n 31, ]\n else:\n rembonds = []\n remnodes = []\n startedlist = False\n for i in np.arange(simpleBond.shape[0]):\n start, end = simpleBond[i,:]\n if (start, end) in rembonds:\n continue\n if start in remnodes or end in remnodes:\n continue\n\n nextnewnode = LEAF.Vertex.shape[0]\n xy = np.array(e['geometry'][i]['coordinates'])\n if xy.shape[0] == 2:\n LEAF.Bond = np.append(LEAF.Bond, [[start, end]], axis=0)\n C0 = np.append(C0, [simpleC0[i]])\n continue\n if xy.shape[0] >= 3*skip:\n xy = xy[::skip,:]\n\n newbonds = np.zeros((xy.shape[0]-2, 2), dtype='int')\n\n newbonds[0,0] = start\n newbonds[-1,1] = end\n\n newbonds[1:,0] = np.arange(nextnewnode, nextnewnode+newbonds.shape[0]-1)\n newbonds[:-1,1]= np.arange(nextnewnode, nextnewnode+newbonds.shape[0]-1)\n\n if not startedlist:\n LEAF.Bond = newbonds\n C0 = np.repeat(simpleC0[i], newbonds.shape[0])\n startedlist = True\n else:\n LEAF.Bond = np.append(LEAF.Bond, newbonds, axis=0)\n C0 = np.append(C0, np.repeat(simpleC0[i], newbonds.shape[0]))\n LEAF.Vertex = np.append(LEAF.Vertex, xy[1:-1], axis=0)\n\n LEAF.Vertex -= np.amin(LEAF.Vertex, axis=0)\n LEAF.Vertex *= 1e-3\n\n lengths = np.linalg.norm(LEAF.Vertex[LEAF.Bond[:,0],:] - \\\n LEAF.Vertex[LEAF.Bond[:,1],:], axis=1)\n\n if file == 'Mississippi':\n LEAF.Bond = LEAF.Bond[1:, :]\n\n if file == 'Mississippi':\n C0 = C0[1:]\n lengths = lengths[1:]\n\n if file == 'Niger':\n LEAF, C0, lengths = crop_LEAF(\n LEAF, C0, lengths, x=[-1, 115], y=[-1, 1e3])\n LEAF.Vertex -= np.amin(LEAF.Vertex, axis=0)\n\n if file == 'Yenisei':\n LEAF, C0, lengths = crop_LEAF(\n LEAF, C0, lengths, x=[-1, 80], y=[20, 180])\n LEAF.Vertex -= np.amin(LEAF.Vertex, axis=0)\n\n if file == 'Yukon':\n LEAF, C0, lengths = crop_LEAF(\n LEAF, C0, lengths, x=[-1, 1e3], y=[36.75, 1e3])\n LEAF.Vertex -= np.amin(LEAF.Vertex, axis=0)\n\n if file == 'Colville':\n LEAF, C0, lengths = crop_LEAF(\n LEAF, C0, lengths, x=[-1, 1e3], y=[8, 1e3])\n LEAF.Vertex -= np.amin(LEAF.Vertex, axis=0)\n\n if file == 'Apalachicola':\n LEAF, C0, lengths = crop_LEAF(LEAF, C0, lengths, y=[0,13.5])\n\n sources = [1, 7, 52, 31, 17, 97, 20, 593, 196, 336, 1898, 268, 56, 289, 49, 0, 124, 37]\n sources = {x[0]: [x[1]] for x in zip(deltas,sources)}\n\n with open('shp/sinks.p','rb') as f:\n sinks = {x[0]: x[1] for x in zip(deltas,pickle.load(f))}\n sinks['Wax'] = [10]\n #b = sinks['Niger']\n #sinks['Barnstable'] = b\n #sinks['Niger'] = [1]\n\n sinks['Barnstable'] = [27]\n sources['Barnstable'] = [28]\n\n for d in deltas:\n try: sinks[d].remove(sources[d])\n except ValueError: pass\n except KeyError: pass\n\n firstbondind = np.where(np.any(LEAF.Bond == sources[file], axis=1))\n cscale = np.amax(C0[firstbondind])\n if file == 'Wax':\n cscale = 1000**3\n C0 /= cscale\n lengths /= cscale**(1/3)\n\n LEAF.RidgeLengths = np.ones(LEAF.Bond.shape[0])\n LEAF.CellAreas = np.ones(LEAF.Bond.shape[0])\n\n LSHP = LFCLSS.LeafShape('from data', 0)\n\n hull = ConvexHull(LEAF.Vertex)\n LSHP.polyedge = LEAF.Vertex[hull.vertices, :]\n\n if sinks is None:\n sinks = [LEAF.Vertex.shape[0] - 1]\n\n delta = DeltaNetwork(LEAF, LSHP, 0, 0, C0=C0,\n fluctuations='river', basins='generate',\n sourceinds=sources[file], sinkinds=sinks[file])\n\n delta.bond_lens = lengths\n\n return delta\n\ndef getsinks():\n sinks = []\n for k in deltas:\n print(k)\n a = read_json(k)\n if isinstance(a, str):\n sinks.append([])\n continue\n\n ax = plt.subplot(111)\n a.to_networkx(thr=1e-10)\n\n pos = nx.get_node_attributes(a.G, 'pos')\n nx.draw_networkx(a.G, pos, with_labels=False, node_size=6,\n node_color='black', ax=ax)\n x = [d[0] for d in a.G.degree if d[1]==1]\n sinks.append(x)\n #SG = a.G.subgraph(x)\n #nx.draw_networkx(SG, pos, with_labels=True, node_size=6,\n # node_color='black', ax=ax)\n #a.drawspecial()\n #plt.show()\n plt.clf()\n\n with open('shp/sinks.p','wb') as f:\n pickle.dump(sinks, f)\ndef getsources():\n for k in deltas:\n print(k)\n a = read_json(k)\n if isinstance(a, str):\n continue\n a.to_networkx(thr=1e-10)\n\n pos = nx.get_node_attributes(a.G, 'pos')\n nx.draw_networkx_labels(a.G, pos)\n x = [d[0] for d in a.G.degree if d[1]==1]\n #SG = a.G.subgraph(x)\n #nx.draw_networkx(SG, pos, with_labels=True)\n a.plot(alpha=True)\n plt.show()\n\nif __name__ == '__main__':\n \"\"\" Example code \"\"\"\n\n # Load and show one of the deltas\n a = read_json('Lena')\n a.plot()\n plt.show()\n\n\n # simulate a delta\n a = DeltaNetwork.make_river(1, 1, density=80, shape='triangle')\n a.simulate()\n a.plot()\n plt.show()\n", "id": "10780883", "language": "Python", "matching_score": 9.12685775756836, "max_stars_count": 0, "path": "xylem.py" }, { "content": "import numpy as np\nfrom scipy.spatial import Voronoi, voronoi_plot_2d\nimport LEAFclass as LFCLSS\nimport matplotlib.pyplot as plt\nimport matplotlib.path as pltth\n\n\n#INCLUDED FUNCTIONS\n# Create_networkFO\n# leafshape\n# neighborsF\n# refineF\n# displayNetworkF\n\ndef polygon_area(coords):\n \"\"\" Return the area of a closed polygon\n \"\"\"\n Xs = coords[:,0]\n Ys = coords[:,1]\n\n # Ignore orientation\n return 0.5*abs(sum(Xs[:-1]*Ys[1:] - Xs[1:]*Ys[:-1]))\n\ndef replace_nan_by_avg(ar):\n # replace all nans by the average of the rest\n avg = ar[np.isfinite(ar)].mean()\n ar[np.isnan(ar)] = avg\n\n return ar\n\ndef Create_networkFO(leafName, density, lattice, LSHP, yplot=False, \\\n angle=np.pi/3., noise=0.0, zoom_factor=1.0, shapeplot=False, stats=False,\n trimming_percentile=100, ):\n \"\"\" Create a new network with given name, size, lattice type etc...\n noise is added as a percentage of np.mean bond length\n to the triangular and square lattices\n Parameters:\n leafName: LEAF class name\n density: Proportional to square root of number of nodes\n lattice: how nodes are distributed in the plane\n LSHP: Leaf shape (Topology class) instance\n yplot: option to plot Voronoi tesselation\n angle: only used for lattice = 'triangle'\n noise: determines how much nodes will np.differ from the lattice\n zoom_factor: ??\n shapeplot: option to plot leaf shape in its own plot\n stats: prints initial number of nodes and bonds\n trimming_percentile: bond length np.percentile above which the bonds are\n removed. Critical for square leafshape so there are not long\n edges along the leaf shape boundary.\n \"\"\"\n def verts_plt_path(vertex):\n \"\"\" Returns a matplotlib Path object describing the polygon defined\n by vertices.\n \"\"\"\n # Set up polygon\n verts = np.zeros((vertex.shape[0] + 1, vertex.shape[1]))\n verts[:-1,:] = vertex\n #verts[-1,:] = cycle.coords[0,:]\n\n codes = pltth.Path.LINETO*np.ones(verts.shape[0])\n codes[0] = pltth.Path.MOVETO\n codes[-1] = pltth.Path.CLOSEPOLY\n\n return pltth.Path(verts, codes)\n\n LEAF = LFCLSS.Topology(leafName, lattice)\n\n if lattice == 'yjunc':\n Mnei = 2\n X = np.linspace(-1.5, 1.5, num=density)\n Y = np.zeros(density)\n\n LEAF.height = X[1] - X[0]\n\n Y2 = np.arange(LEAF.height, 1, LEAF.height)\n X2 = X[int(len(X)/3)]*np.ones(len(Y2))\n\n maxlength = LEAF.height*1.01\n VertexM = np.zeros((density + len(Y2), 2))\n VertexM[:, 0] = np.concatenate((X, X2))\n VertexM[:, 1] = np.concatenate((Y, Y2))\n\n elif lattice == 'xjunc':\n Mnei = 2\n X = linspace(-1.5, 1.5, num=density)\n Y = np.zeros(density)\n\n LEAF.height = X[1] - X[0]\n\n Y2 = arange(LEAF.height, 1, LEAF.height)\n X2 = X[len(X)/3]*np.ones(len(Y2))\n\n Y3 = arange(-LEAF.height, -1, -LEAF.height)\n X3 = X[len(X)/3]*np.ones(len(Y3))\n\n maxlength = LEAF.height*1.01\n VertexM = np.zeros((density + len(Y2) + len(Y3), 2))\n VertexM[:, 0] = concatenate((X, X2, X3))\n VertexM[:, 1] = concatenate((Y, Y2, Y3))\n\n elif lattice == 'hjunc':\n Mnei = 2\n X = linspace(-1.5, 1.5, num=density)\n Y = np.zeros(density)\n\n LEAF.height = X[1] - X[0]\n\n Y2 = arange(LEAF.height, 1, LEAF.height)\n X2 = X[len(X)/3]*np.ones(len(Y2))\n\n Y3 = arange(-LEAF.height, -1, -LEAF.height)\n X3 = X[len(X)/3]*np.ones(len(Y3))\n\n Y4 = arange(LEAF.height, 1, LEAF.height)\n X4 = X[len(X)/3 + 4]*np.ones(len(Y4))\n\n Y5 = arange(-LEAF.height, -1, -LEAF.height)\n X5 = X[len(X)/3 + 4]*np.ones(len(Y5))\n\n maxlength = LEAF.height*1.01\n VertexM = np.zeros((density + len(Y2) + len(Y3) + len(Y4) + len(Y5), 2))\n VertexM[:, 0] = concatenate((X, X2, X3, X4, X5))\n VertexM[:, 1] = concatenate((Y, Y2, Y3, Y4, Y5))\n\n # Generate Lattice\n elif lattice == 'random':\n \"\"\" We generate a lattice from Delaunay triangulation\n of random points on the plane\n \"\"\"\n n_points = int(0.5*density**2)\n VertexM = np.random.random((n_points, 2))*2 + np.array([-1,-1])\n\n LEAF.height = max(VertexM[:,1]) - min(VertexM[:,1])\n\n maxlength = None\n\n elif lattice == 'triangle':\n x, y = np.meshgrid(np.linspace(-1,1,int(np.sqrt(density))),\n np.linspace(-1,1,int(np.sqrt(density))))\n x[::2, :] += (x[0,1] - x[0,0])/2\n\n if noise > 0.0:\n # move positions around randomly\n x += noise*3.2/density*(2*np.random.random(x.shape) - 1)\n y += noise*3.2/density*(2*np.random.random(y.shape) - 1)\n\n VertexM[:,0] = x.flatten()\n VertexM[:,1] = y.flatten()\n\n \"\"\"Mnei = 6;\n nrows = density + 2;\n ncolu = density + 2;\n\n X = np.zeros(nrows*ncolu);\n Y = np.zeros(nrows*ncolu);\n\n awidth = 3./nrows; #advice: adjust the spacing according to nrows\n aheight = np.sin(angle)*awidth\n\n Xmax = awidth/2. * ncolu\n X[0] = -Xmax\n Xmin = -Xmax\n Ymax = aheight/2. * nrows\n Y[0] = -Ymax\n Ymin = -Ymax\n\n LEAF.height = aheight;\n\n c=0;\n for nr in range(nrows):\n if np.mod(nr,2)==0:\n for nc in range(ncolu):\n\n Y[c]=Y[0]+((nr)*aheight);\n X[c]=X[0]+((nc)*awidth);\n c=c+1;\n ##end\n else:\n for nc in range(ncolu-1) :\n\n Y[c]=Y[0]+((nr)*aheight);\n X[c]=X[0]+((nc+1./2.)*awidth);\n c=c+1;\n ##end\n #last value of c here reflectc total length\n X[c-nrows+1:c]=X[c-1:c-nrows:-1].copy();#X(c-nrows+2:c)=fliplr(X(c-nrows+2:c));\n Y[c-nrows+1:c]=Y[c-1:c-nrows:-1].copy();#Y(c-nrows+2:c)=fliplr(Y(c-nrows+2:c));\n ##end\n ##end\n\n X = X[:c]\n Y = Y[:c]\n\n if noise > 0.0:\n # move positions around randomly\n X += noise*awidth*(2*np.random.random(X.shape) - 1)\n Y += noise*awidth*(2*np.random.random(Y.shape) - 1)\n\n # maximum bond length we allow in this network\n maxlength = awidth*(1.01 + 2*noise);\n\n VertexM= np.zeros((c,2))\n del c\n VertexM[:,0]= X\n VertexM[:,1]= Y\"\"\"\n\n elif lattice == 'line':\n Mnei = 2\n X = linspace(-1.5, 1.5, num=density)\n Y = np.zeros(density)\n\n LEAF.height = X[1] - X[0]\n\n maxlength = LEAF.height*1.01\n VertexM = np.zeros((density, 2))\n VertexM[:, 0] = X\n VertexM[:, 1] = Y\n\n elif lattice == 'square':\n x = np.linspace(-1, 1, density)\n y = np.linspace(-1, 1, density)\n Nnodes_y = y.size\n\n maxlength = (x[1] - x[0])*(1.01 + 2*noise)\n\n x, y = [a.flatten() for a in np.meshgrid(x,y)]\n\n if noise > 0.0:\n # move positions around randomly\n x += noise*3.2/density*(2*np.random.random(x.shape) - 1)\n y += noise*3.2/density*(2*np.random.random(y.shape) - 1)\n\n VertexM = np.array([x, y]).T\n\n elif lattice == 'rect':\n Mnei = 4\n\n x = linspace(0, 2.5, density)\n y = linspace(-1.05, 1.05, 2*density)\n\n maxlength = (x[1] - x[0])*1.01\n\n X, Y = meshgrid(x, y)\n\n X = reshape(X, (2*density**2, 1))\n Y = reshape(Y, (2*density**2, 1))\n\n x = X[:,0]\n y = Y[:,0]\n\n VertexM = np.array([x, y]).T\n\n else:\n # load lattice from text file\n VertexM = loadtxt(lattice, delimiter=',')\n n_points = VertexM.shape[0]\n\n VertexM *= 2.42\n VertexM += np.array([1.2, 0])\n\n LEAF.height = max(VertexM[:,1]) - min(VertexM[:,1])\n\n VertexM *= zoom_factor\n\n maxlength = None\n\n #VertexM[:,0] -= min(VertexM[:,0]);\n #VertexM[:,1] -= np.mean(VertexM[:,1]);\n\n xyleaf = leafshape(LSHP, shapeplot);\n\n # change com of leafshape to mid node of\n # network if leafshape is a circle.\n '''if LSHP.comment == 'circle' or LSHP.comment == 'hexagon':\n com = VertexM.mean(axis=0)\n central_i = np.argmin(np.linalg.norm(VertexM - com, axis=1))\n central = VertexM[central_i]\n\n lshape = xyleaf.T\n lshape -= lshape[:-1,:].mean(axis=0)\n lshape += central\n\n xyleaf = lshape.T'''\n\n # remove vertices that are outside of the shape\n\n # Voronoi tesselation gives bonds directly\n vor = Voronoi(VertexM)\n BondM = vor.ridge_points.copy()\n\n # nxutils are deprecated\n path = verts_plt_path(xyleaf.T)\n Kall = path.contains_points(VertexM)\n\n orig_indices = np.where(Kall)[0]\n RestVertices = np.where(np.logical_not(Kall))[0]\n VertexM = VertexM[Kall,:]\n\n # remove all bonds that connect to removed vertices\n BondM = BondM[Kall[BondM[:,0]] & Kall[BondM[:,1]], :]\n\n # update indices\n\n # this map is the inverse of orig_indices\n #index_map = -np.ones(Kall.shape)\n #index_map[orig_indices] = arange(orig_indices.shape[0])\n\n # equivalent to the above but shorter\n new_indices = np.cumsum(Kall) - 1\n\n BondM[:,0] = new_indices[BondM[:,0]]\n BondM[:,1] = new_indices[BondM[:,1]]\n\n #remove outer higher length\n vecX = np.zeros(BondM.shape)\n vecX[:,0] = VertexM[BondM[:,0],0]\n vecX[:,1] = VertexM[BondM[:,1],0]\n vecY = np.zeros(BondM.shape)\n vecY[:,0] = VertexM[BondM[:,0],1]\n vecY[:,1] = VertexM[BondM[:,1],1]\n\n lens = np.sqrt(np.diff(vecX)**2 + np.diff(vecY)**2)\n if maxlength == None:\n maxlength = np.percentile(lens, trimming_percentile)\n\n K = (lens <= maxlength);\n\n BondM = BondM[np.squeeze(K),:].copy();\n\n Np2 = VertexM.shape[0]; #actual number of nodes\n LBondsM = BondM.shape[0]\n if stats:\n print('Number of initial nodes: %d' % Np2)\n print('Number of initial bonds: %d' % LBondsM)\n #BondM = sort(BondM, axis=1)\n\n #construct neighbor list\n # We never need this and it's buggy!\n #NeighM = None#neighborsF(BondM,Mnei,Np2)\n\n #figure out which bond belongs to which Voronoi ridge\n ridge_lens = np.zeros(BondM.shape[0])\n for i, (u, v) in enumerate(BondM):\n u, v = orig_indices[u], orig_indices[v]\n\n ridge_inds = np.where(np.all(vor.ridge_points == [u, v], axis=1))[0]\n\n if ridge_inds.size == 0:\n ridge_lens[i] = nan\n print( \"Error: triangulation bond not in original voronoi tesselation\")\n continue\n\n ridge_ind = ridge_inds[0]\n\n # find length of Voronoi ridge\n ridge_verts = vor.ridge_vertices[ridge_ind]\n\n if -1 in ridge_verts:\n # one is infinity, length is undefined\n ridge_lens[i] = np.nan\n else:\n ridge_lens[i] = np.linalg.norm(vor.vertices[ridge_verts[0]]\n - vor.vertices[ridge_verts[1]])\n\n ridge_lens = replace_nan_by_avg(ridge_lens)\n\n # figure out what the area of each Voronoi cell is\n cell_areas = np.zeros(VertexM.shape[0])\n for i in range(VertexM.shape[0]):\n region = vor.point_region[orig_indices[i]]\n region_verts = vor.regions[region]\n\n if -1 in region_verts:\n cell_areas[i] = np.nan\n else:\n cell_verts = vor.vertices[region_verts]\n # make polygon closed\n cell_verts = np.vstack((cell_verts, cell_verts[0,:]))\n cell_areas[i] = polygon_area(cell_verts)\n\n cell_areas = replace_nan_by_avg(cell_areas)\n\n # find leftmost vertex and make it the zeroth one\n tempm = min(VertexM[:,0]);\n imin = np.argmin(VertexM[:,0]);\n Kleft = np.nonzero(abs(tempm -VertexM[:,0]) < 1e-6)\n isortleft = np.argsort(VertexM[Kleft[0],1]);\n\n mid_elem = isortleft[int(len(isortleft)/2.0)]\n imin = [Kleft[0][mid_elem]]\n\n # swap vertices\n VertexM[imin,:], VertexM[0, :] = VertexM[0,:], VertexM[imin,:]\n cell_areas[imin], cell_areas[0] = cell_areas[0], cell_areas[imin]\n\n # swap Bonds\n zero_entries = (BondM == 0)\n min_entries = (BondM == imin[0])\n\n BondM[zero_entries] = imin[0]\n BondM[min_entries] = 0\n\n # sort Bonds\n BondM.sort(axis=1)\n\n #set structure\n LEAF.Vertex = VertexM\n LEAF.RestVertices = RestVertices\n LEAF.Bond = BondM\n LEAF.RidgeLengths = ridge_lens\n LEAF.CellAreas = cell_areas\n LEAF.Voronoi = vor\n\n #plot (optional)\n if yplot:\n voronoi_plot_2d(vor)\n plt.plot(xyleaf[0,:], xyleaf[1,:])\n\n plt.show()\n\n return LEAF\n\n## --------------------------------------------------------\n\ndef leafshape(LSHP, plot=False):\n\n #theta = linspace(0,np.pi, round(np.pi/0.01));\n\n ##width = LSHP.width ;\n ##nsharp = LSHP.nsharp; %small-->sharp leaves\n ##basewidth = LSHP.basewidth;\n ##baselength = LSHP.baselength;\n ##aspectr = LSHP.aspectr; %large->long leaves\n ##polyedge = LSHP.polyedge;\n ##q = LSHP.q;\n\n if len(LSHP.polyedge)==0:\n print ('does not work for python if polyedge is empty')\n ## r = (1.-LSHP.aspectr*abs(2.*(theta-np.pi/2.)/np.pi))*exp(-(abs((mod(LSHP.q*theta,np.pi))-np.pi/2.)/LSHP.width)**nsharp );\n ##\n ## [x,y] = pol2cart(theta,r);\n ## y = y+LSHP.baselength;\n ##\n ## y = [0 y 0];\n ## x = [LSHP.basewidth x -LSHP.basewidth];\n else:\n x = LSHP.polyedge[0,:]\n y = LSHP.polyedge[1,:]\n\n #end\n\n if plot:\n plt.plot(x,y)\n plt.show()\n\n xy = np.vstack((x,y))\n\n return xy\n\n## --------------------------------------------------------\n\"\"\"\nfing neighbors from bonds, or bonds from neighbors\n\"\"\"\ndef neighborsF(InputM,Mnei,Np2):\n\n\n if InputM.shape[1]==2:\n\n BondM = InputM.copy()\n NeighM = np.zeros((Np2,Mnei))\n for jc in xrange(Np2):\n K1 = nonzero(jc == BondM[:,0]);\n K2 = nonzero(jc == BondM[:,1]);\n\n neigh0 = hstack((BondM[K1[0],1], BondM[K2[0],0]))\n if len(neigh0)!=Mnei:\n neigh = hstack((neigh0, -1*np.ones(Mnei-len(neigh0)) ))\n else:\n neigh = neigh0\n #end\n NeighM[jc,:] = neigh\n #end\n OutputM = NeighM.copy()\n else:\n NeighM = InputM.copy()\n BondM=[];\n\n for jc in xrange(len(NeighM)):\n neigh = NeighM[jc,:];\n neigh = neigh[neigh>jc];\n dBondM = hstack((tile(jc,(len(neigh),1)), neigh));\n BondM = np.vstack((BondM, dBondM))\n #end\n BondM = sort(BondM,axis=1);\n OutputM = BondM;\n\n\n return OutputM\n\n## --------------------------------------------------------\n\"\"\"\n% subdivide network\n%DOES NOT WORK WITH PERIODIC BC\n\"\"\"\ndef refineF(LEAF,vectorConductivity,gam,condcheck):\n\n LEAF2 = LFCLSS.Topology(LEAF.comment+'refined')\n #initial network\n BondM = LEAF.Bond;\n VertexM = LEAF.Vertex;\n NeighM = LEAF.Neigh;\n\n Mnei = NeighM.shape[1];\n Np2 = len(VertexM);\n LBond = len(BondM)\n\n if condcheck=='areole':\n print ('python not yet coded for areole')\n #Nsources sources are in the end\n ## NSources = LEAF.NSources ;\n ## Ksource = Np2-(NSources-1:-1:0);\n ## Kcon = cell(NSources,1);\n ## K = [];\n ## for js = 1:length(Ksource)\n ## [Kr nc] = find(BondM==Ksource(js));\n ## tmp = BondM(Kr,:);tmp(tmp == Ksource(js)) = [];\n ## Kcon{js} = tmp';\n ## K = [K; Kr];\n ## end\n ## LBEnd = length(K) ;\n else:\n LBEnd = 0;\n NSources = 0;\n #end\n\n #%%%%%%%BODY\n ## indices = BondM[1:end-LBEnd,1)+(BondM(1:end-LBEnd,2)-1)*Np2;\n ## condvec = LEAF.Conductivity(indices);\n condvec = vectorConductivity[:BondM.shape[0]-LBEnd].copy()\n\n #each bond in body will now be split to 2\n le = BondM.shape[0]-LBEnd;\n eBond = np.zeros((LBond-LBEnd+le,2))\n eBond[:,0] = hstack((BondM[0:LBond-LBEnd,0], arange(Np2-NSources, le+Np2-NSources)))\n eBond[:,1] = hstack((BondM[0:LBond-LBEnd,1], arange(Np2-NSources, le+Np2-NSources)))\n econdvec = hstack((condvec, condvec));\n\n\n #one new point per bond\n dVertex = np.zeros((LBond-LBEnd,2))\n dVertex[:,0] = (VertexM[BondM[:LBond-LBEnd,0], 0] + VertexM[BondM[:LBond-LBEnd,1], 0] )/2.\n dVertex[:,1] = (VertexM[BondM[:LBond-LBEnd,0], 1] + VertexM[BondM[:LBond-LBEnd,1], 1] )/2.;\n VertexN = np.vstack((VertexM[:Np2-NSources,:], dVertex)); #will add Sources later\n\n #find connectivity\n #dum, BondN, FacetMp, dum = MtDLN.delaunay(VertexN[:,0],VertexN[:,1])\n triang = Triangulation(VertexM[:,0], VertexM[:,1])\n BondN = triang.edges\n\n #sort BondN\n BondN = sort(BondN, axis=1)\n\n tempdv =np.sqrt( ( VertexN[BondN[:,0],0]-VertexN[BondN[:,1],0] )**2 + \\\n ( VertexN[BondN[:,0],1]-VertexN[BondN[:,1],1] )**2);\n\n maxlength = 1.0001* min(tempdv);\n\n #remove outer higher length bonds\n vecX = np.zeros(BondN.shape)\n vecX[:,0] = VertexN[BondN[:,0],0]\n vecX[:,1] = VertexN[BondN[:,1],0]\n vecY = np.zeros(BondN.shape)\n vecY[:,0] = VertexN[BondN[:,0],1]\n vecY[:,1] = VertexN[BondN[:,1],1]\n K = np.sqrt(np.diff(vecX)**2+np.diff(vecY)**2)<=maxlength;\n\n BondN = BondN[np.squeeze(K),:].copy();\n\n Np2N = VertexN.shape[0]; #actual number of nodes\n LBondsN = BondN.shape[0]\n BondN = sort(BondN, axis=1)\n\n\n #construct neighbor list\n #NeighM = neighborsF(BondM,Mnei,Np2);\n #-----------------------\n\n\n\n\n #%New conductivitu\n #% [max(eBond(:)) Np2N NSources]\n CResN = np.zeros((Np2N,Np2N));\n indicesN = zip(*eBond)\n\n CResN[indicesN] = econdvec\n indicesA = zip(*BondN)\n\n temp = -log(np.random.rand(len(BondN)));\n CResN[indicesA] = CResN[indicesA] + temp/max(temp) * scipy.stats.gnp.mean(econdvec[econdvec!=0])+0;\n\n CResN = CResN+CResN.T\n CResN = CResN/sum(sum(CResN**gam))**(1/gam);\n vectorConductivityNew = CResN[zip(*BondN)]\n\n if condcheck == 'areole':\n print('not set up fr areole')\n ## %%%%%%%EDGE\n ## VertexN = [VertexN; VertexM(Np2-NSources+1:Np2,:)];\n ## CResN = [CResN np.zeros(size(CResN,1),NSources)];\n ## CResN = [CResN; np.zeros(NSources,size(CResN,2))];\n ##\n ## mxK = Mnei;\n ## for js =1:NSources\n ## BondM1 = [Kcon{js} (Np2N+js)*np.ones(length(Kcon{js}),1)];\n ## BondN = [BondN; BondM1];\n ##\n ## CKmC1 = max(CResN(BondM1(:,1),:),[],2);\n ## CResN(Kcon{js}, Np2N+js) = CKmC1;\n ## mxK = max(mxK, length(Kcon{js}));\n ## end\n ##\n ##\n ## % assignin('base', 'CResN', CResN);\n ## % assignin('base', 'CKmC1', CKmC1);\n ##\n ##\n ##\n ##\n ##\n ## CResN = CResN+CResN';\n ## CResN = CResN/sum(sum(CResN.^gam))^(1/gam);\n ##\n ## Np2N = length(VertexN); %actual number of nodes NEW\n ##\n ## NeighN = neighborsF(BondN,mxK,Np2N);\n ## %%%%%%%%%\n ##\n ##\n ## %set structure\n ## LEAF2.Vertex = VertexN;\n ## LEAF2.Neigh = NeighN;\n ## LEAF2.Bond = BondN;\n ## LEAF2.Conductivity = CResN;\n ## LEAF2.gamma = gam;\n\n else:\n NeighN = neighborsF(BondN,Mnei,Np2N);\n LEAF2.Vertex = VertexN;\n LEAF2.Neigh = NeighN;\n LEAF2.Bond = BondN;\n LEAF2.gamma = gam;\n\n\n #end\n\n\n return LEAF2, vectorConductivityNew\n\n##------------------------------------------------------------\n\"\"\"\n% displayNetworkF.m VERSION V.0.0\n% <NAME>, Dec 2010\n\n% This function plots the widths of each link\n\n% INPUT: TOPOL: structure with the following fields\n% x: vertex coordinate list\n% fld: np.diffusion coeff of every bond\n\n% magn: magnification of plot\n% funchnd: function handle that rescales d for plotting\n% funchcolor\n% domiffing\n\n% OUTPUT: plot CIRCUIT\n\"\"\"\n\ndef displayNetworkF(*varargin):\n #print 'TOPOL, magnitude, funchnd, plotted field'\n\n TOPOL = varargin[0];\n # defaults\n magn = 1;\n funchnd = lambda x: x/max(x)\n fld = np.ones(len(TOPOL.Bond));\n funchcolor= funchnd;\n domiffing =0;\n dorotate = 0;\n Col = np.array([1.,1.,1.]);\n concentrations = None\n currents = None\n\n\n numelVarargin = len(varargin);\n if numelVarargin ==1:\n pass\n elif numelVarargin ==2:\n magn = varargin[1];\n elif numelVarargin ==3:\n magn = varargin[1];\n funchnd = varargin[2];\n elif numelVarargin ==4:\n magn = varargin[1];\n funchnd = varargin[2];\n fld = varargin[3];\n if numelVarargin ==5:\n magn = varargin[1];\n funchnd = varargin[2];\n fld = varargin[3];\n funchcolor= varargin[4];\n elif numelVarargin ==6:\n magn = varargin[1];\n funchnd = varargin[2];\n fld = varargin[3];\n funchcolor= varargin[4];\n domiffing =varargin[5];\n elif numelVarargin ==7:\n magn = varargin[1];\n funchnd = varargin[2];\n fld = varargin[3];\n funchcolor= varargin[4];\n domiffing =varargin[5];\n dorotate = varargin[6];\n elif numelVarargin ==8:\n magn = varargin[1];\n funchnd = varargin[2];\n fld = varargin[3];\n funchcolor= varargin[4];\n domiffing =varargin[5];\n dorotate = varargin[6];\n Col = varargin[7];\n elif numelVarargin == 9:\n magn = varargin[1];\n funchnd = varargin[2];\n fld = varargin[3];\n funchcolor= varargin[4];\n domiffing =varargin[5];\n dorotate = varargin[6];\n Col = varargin[7];\n concentrations = varargin[8]\n elif numelVarargin == 10:\n magn = varargin[1];\n funchnd = varargin[2];\n fld = varargin[3];\n funchcolor= varargin[4];\n domiffing =varargin[5];\n dorotate = varargin[6];\n Col = varargin[7];\n concentrations = varargin[8]\n currents = varargin[9]\n\n if not(funchnd):\n funchnd = lambda x: x/max(x)\n\n\n if not(funchcolor):\n funchcolor = funchnd;\n\n if len(fld)==0:\n fld = TOPOL.vectorConductivity;\n\n x = TOPOL.Vertex.copy();\n if dorotate:\n xtemp = x.copy()\n x[:,1] = xtemp[:,0].copy()\n x[:,0] = xtemp[:,1].copy()\n del xtemp\n\n Bonds = TOPOL.Bond ;\n\n if len(fld)!=len(Bonds):\n print ('Hm, check vConductivity versus LEAF')\n\n pd = funchnd(fld);\n pdCol = funchcolor(fld);\n\n if concentrations == None:\n for i in xrange(len(Bonds)):\n\n if pd[i]!=0:\n dx = hstack((x[Bonds[i,0],0] , x[Bonds[i,1],0] ))\n dy = hstack((x[Bonds[i,0],1] , x[Bonds[i,1],1] ))\n plt.plot(dx, dy,linewidth=magn*pd[i], color = tuple( Col*(1. -pdCol[i])))\n\n if domiffing:\n dx = hstack((x[Bonds[i,0],0] , x[Bonds[i,1],0] ))\n dy = hstack((x[Bonds[i,0],1] , x[Bonds[i,1],1] ))\n\n plt.plot(dx, dy, marker='.',markersize=2*magn*pd[i],color = tuple( Col*(1. -pdCol[i])))\n\n if currents != None:\n # Arrows indicating current direction\n if currents[Bonds[i,0], Bonds[i,1]] > 0:\n plt.arrow(x[Bonds[i,1],0], x[Bonds[i,1],1], \\\n x[Bonds[i,0],0]-x[Bonds[i,1],0], x[Bonds[i,0],1]-x[Bonds[i,1],1])\n elif currents[Bonds[i,0], Bonds[i,1]] < 0:\n plt.arrow(x[Bonds[i,0],0], x[Bonds[i,0],1], \\\n x[Bonds[i,1],0]-x[Bonds[i,0],0], x[Bonds[i,1],1]-x[Bonds[i,0],1])\n\n\n # Plot concentrations\n else:\n plt.scatter(x[:,0], x[:,1], c=concentrations, s=70, cmap=plt.cm.jet)\n plt.colorbar()\n\n for i in xrange(len(Bonds)):\n if currents != None:\n # Arrows indicating current direction\n if currents[Bonds[i,0], Bonds[i,1]] > 1e-12:\n plt.arrow(x[Bonds[i,1],0], x[Bonds[i,1],1], \\\n (x[Bonds[i,0],0]-x[Bonds[i,1],0])/2, (x[Bonds[i,0],1]-x[Bonds[i,1],1])/2, \\\n linewidth=abs(currents[Bonds[i,0], Bonds[i,1]])**0.5, head_width=0.025)\n elif currents[Bonds[i,0], Bonds[i,1]] < -1e-12:\n plt.arrow(x[Bonds[i,0],0], x[Bonds[i,0],1], \\\n (x[Bonds[i,1],0]-x[Bonds[i,0],0])/2, (x[Bonds[i,1],1]-x[Bonds[i,0],1])/2, \\\n linewidth=abs(currents[Bonds[i,0], Bonds[i,1]])**0.5, head_width=0.025)\n\n\n plt.axes().set_aspect('equal')\n plt.axes().set_ylim([-1.3,1.3])\n\n\n return 0\n", "id": "1349886", "language": "Python", "matching_score": 3.8865268230438232, "max_stars_count": 0, "path": "InitFunctions.py" }, { "content": "import time\nimport numpy as np\nfrom matplotlib.collections import LineCollection\nimport scipy.optimize\nfrom scipy.interpolate import griddata\nimport matplotlib.cm as cm\nimport matplotlib.pyplot as plt\ntry:\n from itertools import izip as zip\nexcept ImportError: # will be 3.x series\n pass\nimport networkx as nx\n\ndef plotter(fn):\n \"\"\"\n A decorator that cleans the matplotlib figure given as\n the keyword argument 'figure' of the decorated class method,\n calls the drawing function,\n and then asks matplotlib to draw.\n If no kwarg 'figure' is provided, a new figure is maked.\n If the kwarg 'filename' is provided, plotting is done\n to the given file.\n \"\"\"\n def wrapped(*args, **kwargs):\n if 'figure' in kwargs:\n plt.figure(num=kwargs['figure'])\n del kwargs['figure']\n else:\n plt.figure()\n\n if 'filename' in kwargs:\n filename = kwargs['filename']\n del kwargs['filename']\n else:\n filename = None\n\n plt.clf()\n plt.cla()\n\n ret = fn(*args, **kwargs)\n\n plt.title(args[0].param_str())\n\n if filename != None:\n plt.savefig(filename)\n else:\n plt.draw()\n plt.show()\n\n return ret\n\n return wrapped\n\nclass VascularNetwork(object):\n def __init__(self, LEAF, LSHP, size, C0=None, print_stats=False):\n \"\"\"\n Constructs a new vascular network from the given\n Topology instance LEAF.\n A vascular network is defined as a collection of\n vertices and bonds between them, which can have a\n given conductivity.\n If no conductivites are provided, they are set to zero.\n This is essentially a wrapper for easier access to the\n parameters.\n\n size is supposed to be the network size parameter.\n C0 is a vector of initial conductivities.\n\n if print_stats is True, prints out basic network\n statistics such as number of bonds and nodes\n \"\"\"\n super(VascularNetwork, self).__init__()\n\n # Geometric parameters\n self.LEAF = LEAF\n self.LSHP = LSHP\n\n self.bonds = LEAF.Bond.shape[0]\n self.verts = LEAF.Vertex.shape[0]\n\n # normalize all length scales\n self.bond_lens = self.bond_lengths()\n scale = self.bond_lens.max()\n\n self.bond_lens /= scale\n self.intersection_lens = self.LEAF.RidgeLengths/scale\n self.cell_areas = self.LEAF.CellAreas/scale**2\n\n # Initialize these variables so the can be used for caching\n self.bond_dists_sqr = None\n self.vert_dists_sqr = None\n\n # Computational buffers/results\n # self.Q = np.zeros((self.verts, self.verts))\n # self.q = np.zeros(self.verts)\n # self.CM = np.zeros((self.verts, self.verts))\n # self.p = np.zeros(self.verts)\n # self.G = np.zeros((self.verts, self.verts))\n # self.DpM = np.zeros((self.verts, self.verts))\n\n # save the bonds that neighbor the sinks\n '''self.ind_sink0 = [i for i in range(self.bonds) \\\n if self.LEAF.Bond[i,0] == 0]\n self.ind_sink1 = [i for i in range(self.bonds) \\\n if self.LEAF.Bond[i,1] == 0]'''\n\n self.size = size\n\n self.I_mat = self.incidence_matrix()\n self.I_mat_red = self.I_mat[:,1:]\n #self.triangles = self.triangle_list()\n #self.triang_mat = self.triangle_matrix()\n\n if C0 is not None:\n if len(C0) == self.bonds:\n self.C = C0\n else:\n raise Exception(\"Initial conductivities have wrong dimension!\")\n else:\n self.C = -np.log(np.random.random(self.bonds))\n\n if print_stats:\n print (\"---\")\n print (\"Initialized network.\")\n print (\"Vertices: {}\".format(self.verts))\n print (\"Bonds: {}\".format(self.bonds))\n print (\"---\")\n\n def incidence_matrix(self):\n \"\"\" Construct a sparse oriented incidence matrix from\n the Bond list\n \"\"\"\n nodes = list(self.LEAF.Bond[:,0]) + list(self.LEAF.Bond[:,1])\n edges = 2*np.array(range(self.bonds)).tolist()\n data = self.bonds*[1] + self.bonds*[-1]\n\n I_mat = scipy.sparse.coo_matrix((data, (edges, nodes)),\n shape=(self.bonds, self.verts))\n\n return I_mat.tocsc()\n\n def triangle_list(self):\n \"\"\"\n Return a list of triangles in the network\n \"\"\"\n triangles = set()\n blist = [list(x) for x in list(self.LEAF.Bond)]\n\n for u, v in blist:\n for w in range(self.verts):\n if sorted([u, w]) in blist and sorted([v, w]) in blist:\n triangles.add(tuple(sorted([u, w, v])))\n\n return list(triangles)\n\n def triangle_matrix(self):\n \"\"\" Construct and return the triangle matrix which has\n one row for each triangle and verts columns.\n For each triangle, all entries the correspond to the\n triangle vertices are 1, the rest 0.\n \"\"\"\n triangs = self.triangle_list()\n\n tri = np.repeat(range(len(triangs)), 3)\n nodes = np.array(triangs).flatten()\n\n data = ones(len(nodes))\n\n return scipy.sparse.coo_matrix((data, (tri, nodes)),\n shape=(len(triangs), self.verts)).tocsc()\n\n def C_matrix(self, C):\n \"\"\"\n Constructs the conductivity matrix from the conductivity vector,\n or equivalently any M_{ij} from a list of values\n on the bonds.\n \"\"\"\n # Make Cond into matrix\n CRes = scipy.sparse.coo_matrix((C, (self.LEAF.Bond[:,0],\n self.LEAF.Bond[:,1])), shape=(self.verts, self.verts))\n CRes = CRes.todense().copy()\n CRes = np.array(CRes + CRes.T)\n\n return CRes\n\n def C_matrix_sparse(self, C):\n \"\"\"\n Constructs the conductivity matrix from the conductivity vector,\n or equivalently any M_{ij} from a list of values\n on the bonds.\n \"\"\"\n # Make Cond into matrix\n CRes = scipy.sparse.coo_matrix((C, (self.LEAF.Bond[:,0],\n self.LEAF.Bond[:,1])), shape=(self.verts, self.verts))\n CRes = CRes + CRes.T;\n\n return CRes\n\n def C_matrix_asym(self, C):\n \"\"\"\n Constructs the conductivity matrix from the conductivity vector,\n or equivalently any M_{ij} from a list of values\n on the bonds.\n \"\"\"\n # Make Cond into matrix\n CRes = scipy.sparse.coo_matrix((C[:self.bonds],\n (self.LEAF.Bond[:,0], self.LEAF.Bond[:,1])), \\\n shape=(self.verts, self.verts))\n\n CRes2 = scipy.sparse.coo_matrix((C[self.bonds:],\n (self.LEAF.Bond[:,1], self.LEAF.Bond[:,0])), \\\n shape=(self.verts, self.verts))\n\n return np.array((CRes + CRes2).todense())\n\n def Q_matrix(self, CM, p):\n \"\"\"\n Constructs the Q matrix from the C matrix and\n the p vector via fast np operations.\n\n We have Q_{ij} = C_{ij} (p_j - p_i).\n \"\"\"\n\n DpM = np.repeat(p[np.newaxis, :], self.verts, axis=0)\n tmp = DpM - DpM.T\n DpM = tmp\n\n return CM*DpM\n\n def Q_matrix_asym(self, CM, p):\n \"\"\"\n Constructs the Q matrix from the C matrix and\n the p vector via fast np operations.\n\n We have Q_{ij} = C_{ij} p_j - C_{ji} p_i.\n \"\"\"\n #print (CM*p).flatten().nonzero()\n #print (CM*p[:,np.newaxis]).flatten().nonzero()\n CMT = CM.T.copy()\n Q = (2*CM + CMT)*p - (CM + 2*CMT)*p[:,np.newaxis]\n\n #Q = CM*p - CMT*p[:,np.newaxis]\n return Q\n\n def Q_vector(self, C, p):\n Q = C*(p[self.LEAF.Bond[:,0]] - p[self.LEAF.Bond[:,1]])\n return Q\n\n def Q_vector_asym(self, C, p):\n Q = C[:self.bonds]*p[self.LEAF.Bond[:,1]] - \\\n C[self.bonds:]*p[self.LEAF.Bond[:,0]]\n return concatenate((Q, -Q))\n\n def G_matrix(self, CM):\n \"\"\"\n Constructs the G matrix from the conductivity matrix\n \"\"\"\n tvec = np.sum(CM, axis=1)\n\n return diag(tvec, 0) - CM\n\n def G_matrix_sparse(self, CM):\n \"\"\"\n Constructs the G matrix from the conductivity matrix\n \"\"\"\n tvec = CM.sum(axis=0)\n\n return (scipy.sparse.spdiags(tvec, [0], tvec.size, tvec.size,\n format='coo') - CM)\n\n def R_matrix_sparse(self, CM):\n \"\"\"Returns resistances matrix, for resistance distance\"\"\"\n CM = CM.power(-1)\n tvec = CM.sum(axis=0)\n\n return (scipy.sparse.spdiags(tvec, [0], tvec.size, tvec.size,\n format='coo') - CM)\n\n def adjacency_matrix(self, thr=1e-10):\n \"\"\"\n Returns the unweighted adjacency matrix.\n conductivities smaller than threshold are discarded\n \"\"\"\n CM = self.C_matrix(self.C)\n\n return (CM > thr).astype(int)\n\n def adjacency_matrix_asym(self, thr=1e-10):\n \"\"\"\n Returns the unweighted adjacency matrix for the directed graph\n defined by self.LEAF.Bond. A_vu = 1 if there is a bond (uv)\n \"\"\"\n return scipy.sparse.coo_matrix((np.ones(self.LEAF.Bond.shape[0]),\n (self.LEAF.Bond[:,1], self.LEAF.Bond[:,0])),\n shape=(self.verts, self.verts))\n\n def laplacian_matrix(self):\n \"\"\"\n Returns the unweighted Laplacian matrix.\n Uses adjacency_matrix\n \"\"\"\n\n A = self.adjacency_matrix()\n\n return diag(np.sum(A, axis=0)) - A\n\n def effective_gamma(self):\n \"\"\"\n Calculates the effective scaling exponent gamma\n by least squares fitting the scaling relation.\n The exponent is defined by\n\n C \\sim Q^{2/(1+gamma)}.\n\n returns\n gamma: the (approximate) effective scaling exponent\n\n If the approximate scaling exponent cannot be calculated\n because there is no data (for whatever reason), returns\n zero.\n \"\"\"\n Qs = log(self.Q.copy()**2).flatten()\n Cs = log(self.C_matrix(self.C)).flatten()\n\n keep = logical_and(logical_and(logical_and(isfinite(Qs),\n isfinite(Cs)), Cs > -10), Qs > -10)\n Qk = Qs[keep]\n Ck = Cs[keep]\n\n if Qk.shape[0] == 0:\n return 0\n\n A = vstack([Qk, ones(len(Qk))]).T\n\n x, res, rank, s = linalg.lstsq(A, Ck)\n\n #print \"lstsq residuum:\", res[0]\n #plt.figure()\n #plt.plot(Qk, Ck, 'o')\n #plt.show()\n #raw_input()\n\n return 1.0/x[0] - 1.0\n\n def topology_data(self):\n \"\"\"\n Computes and returns\n\n number of topological vertices\n Strahler number\n bifurcation ratios\n ramification matrix\n\n We numerically check whether the network is a tree\n (actually, only if it has cycles). If it is not,\n None's are returned.\n \"\"\"\n A = self.adjacency_matrix()\n\n if not is_acyclic(A):\n return None, None, None, None\n\n orders = np.zeros(self.verts)\n biorders = np.zeros((self.verts, self.verts))\n\n S = strahler_traversal(A, orders, biorders)\n\n topol_n = np.sum(orders)\n\n max_ord = max(where(orders != 0)[0])\n\n bif = orders[1:max_ord]/orders[2:(max_ord+1)]\n\n ram = biorders[:(max_ord + 1),:(max_ord + 1)]\n\n di = diag_indices(max_ord + 1)\n diag_entries = np.zeros(max_ord + 1)\n diag_entries[1:] = diag(ram)[:-1]\n ram[di] = diag_entries\n ram[1:,:] /= orders[1:max_ord+1,None]\n\n return topol_n, S, bif, ram[1:,:]\n\n def degree_distribution(self):\n \"\"\"\n Calculates the degree distribution\n associated with the network graph.\n \"\"\"\n A = self.adjacency_matrix()\n\n D = np.sum(A, axis=0)\n\n l = max(D)\n dist = np.zeros(l)\n\n # We ignore non-connected lattice points\n for i in xrange(1, l):\n dist[i] = (D == i).sum()\n\n return dist/np.sum(dist)\n\n def degrees(self, thr=1e-4):\n \"\"\" Return the vector of unweighted degrees of the network\n \"\"\"\n # find threshold if network is a tree\n #thresh = sorted(self.C, reverse=True)[-self.verts+1]\n D = self.adjacency_matrix(thr=thr).sum(axis=0)\n\n return D\n\n def mean_degree(self):\n \"\"\" Return the mean degree of the network\n \"\"\"\n # find threshold if network is a tree\n #thresh = sorted(self.C, reverse=True)[-self.verts+1]\n D = self.adjacency_matrix(threshold=1e-8).sum(axis=0)\n\n return D.mean()\n\n def std_degree(self):\n \"\"\" Return the standard deviation of the degree distribution\n \"\"\"\n D = self.adjacency_matrix(threshold=1e-8).sum(axis=0)\n\n return D.std(ddof=1)\n\n def mean_weighted_degree(self):\n \"\"\" Return the mean weighted degree, where\n the weight is given by the conductivity.\n \"\"\"\n CM = self.C_matrix(self.C)\n D = CM.sum(axis=0)\n\n return D.mean()\n\n def branch_lengths(self):\n \"\"\" Return the distribution of branch lengths,\n where we count the lengths between two nodes in the\n tree of degree > 2.\n \"\"\"\n G = nx.from_np_matrix(self.adjacency_matrix(threshold=1e-8))\n\n branch_lengths = []\n current_branch = 1\n for (u, v) in nx.dfs_edges(G, source=0):\n if G.degree(v) == 2:\n current_branch += 1\n else:\n branch_lengths.append(current_branch)\n current_branch = 1\n\n return np.array(branch_lengths)\n\n def bond_neighbors(self):\n \"\"\" Returns a list that for each bond contains\n an np.array of indices of nodes next to that particular bond.\n \"\"\"\n ns = []\n for b in self.LEAF.Bond:\n # Find left/right node neighbors\n b_l_ns = [c[0] for c in self.LEAF.Bond if not np.array_equal(b, c)\\\n and (c[1] == b[0] or c[1] == b[1])]\n b_n_ns = [c[1] for c in self.LEAF.Bond if not np.array_equal(b, c)\\\n and (c[0] == b[0] or c[0] == b[1])]\n\n ns.append(np.array(list(set(b_l_ns + b_n_ns))))\n\n return ns\n\n def bond_neighbor_indices(self):\n \"\"\" Returns a list that for each bond contains\n an np.array of indices of bonds next to that particular bond.\n \"\"\"\n ns = []\n for b in self.LEAF.Bond:\n # Find left/right node neighbors\n b_ns = [i for i in xrange(self.bonds) if \\\n len(set(b).intersection(self.LEAF.Bond[i])) == 1]\n\n ns.append(np.array(b_ns))\n\n return ns\n\n def bond_coords(self):\n \"\"\"\n Returns lists of the bonds' coordinates for plotting\n \"\"\"\n\n a = np.arange(self.bonds)\n\n xs = np.array([self.LEAF.Vertex[self.LEAF.Bond[a,0],0],\n self.LEAF.Vertex[self.LEAF.Bond[a,1],0]]).T\n\n ys = np.array([self.LEAF.Vertex[self.LEAF.Bond[a,0],1],\n self.LEAF.Vertex[self.LEAF.Bond[a,1],1]]).T\n\n return xs, ys\n\n def bond_distances_sqr(self):\n \"\"\"\n Returns the matrix d_ij containing the Euclidean distances\n squared between the midpoints of bonds i and j.\n \"\"\"\n\n if self.bond_dists_sqr == None:\n py = 0.5*(self.LEAF.Vertex[self.LEAF.Bond[:,0],1] + \\\n self.LEAF.Vertex[self.LEAF.Bond[:,1],1])\n px = 0.5*(self.LEAF.Vertex[self.LEAF.Bond[:,0],0] + \\\n self.LEAF.Vertex[self.LEAF.Bond[:,1],0])\n\n dx = np.repeat(px[np.newaxis, :], \\\n self.bonds, axis=0) - \\\n np.repeat(px[:, np.newaxis], self.bonds, axis=1)\n\n dy = np.repeat(py[np.newaxis, :], \\\n self.bonds, axis=0) - \\\n np.repeat(py[:, np.newaxis], self.bonds, axis=1)\n\n self.bond_dists_sqr = dx*dx + dy*dy\n\n return self.bond_dists_sqr\n\n def vert_distances_sqr(self, verts=None):\n \"\"\"\n Returns the matrix d_ij containing the Euclidean distances\n squared between the vertices i and j.\n \"\"\"\n if verts is None:\n px = self.LEAF.Vertex[:,0]\n py = self.LEAF.Vertex[:,1]\n else:\n px = self.LEAF.Vertex[verts,0]\n py = self.LEAF.Vertex[verts,1]\n\n dx = np.repeat(px[np.newaxis, :], \\\n len(px), axis=0) - \\\n np.repeat(px[:, np.newaxis], len(px), axis=1)\n\n dy = np.repeat(py[np.newaxis, :], \\\n len(py), axis=0) - \\\n np.repeat(py[:, np.newaxis], len(py), axis=1)\n\n if verts is None:\n self.vert_dists_sqr = dx*dx + dy*dy\n return self.vert_dists_sqr\n else:\n return dx*dx + dy*dy\n\n def bond_lengths(self, normalize=False):\n \"\"\"\n Returns a vector containing the bond lengths.\n If normalize is True, will normalize to the smallest\n length.\n \"\"\"\n ls = np.linalg.norm(self.LEAF.Vertex[self.LEAF.Bond[:,0],:] - \\\n self.LEAF.Vertex[self.LEAF.Bond[:,1],:], axis=1)\n\n if normalize:\n ls = ls/min(ls[ls>0])\n\n return ls\n\n def scale_plt_figure(self, ax=None):\n \"\"\"\n Sets the correct scales for the current\n matplotlib figure\n \"\"\"\n if ax == None:\n ax = plt.axes()\n\n ax.set_aspect('equal')\n\n try:\n ax.set_ylim([min(self.LEAF.Vertex[:,1]) - 0.05, \\\n max(self.LEAF.Vertex[:,1]) + 0.05])\n ax.set_xlim([min(self.LEAF.Vertex[:,0]) - 0.05, \\\n max(self.LEAF.Vertex[:,0]) + 0.05])\n except:\n print ('error scaling axis')\n ax.set_ylim([-1.3, 1.3])\n ax.set_xlim([-1.3, 1.3])\n\n def plot_conductivities_raw(self, process=lambda x: (x/np.amax(x))**0.25, \\\n magn=2, col=np.array([0,0,0]), ax=None, scale_axis=True,\n rescale=False, alpha=True):\n if ax == None:\n ax = plt.gca()\n\n xs, ys = self.bond_coords()\n conds = process(self.C.copy())\n\n if rescale:\n # rescale all coordinates to lie inside [0,1]^2\n xs -= xs.min()\n xs /= xs.max()\n\n ys -= ys.min()\n ys /= ys.max()\n\n colors = np.zeros((len(ys),4))\n colors[:,:3] = col*np.ones((len(ys),3))\n if alpha:\n colors[:,3] = conds\n else:\n colors[:,3] = 1\n\n segs = np.array([np.array([xs[bond], ys[bond]]).T for bond in range(len(ys))])\n '''#Thresholding to plot only lines with C > thr\n a = segs[np.where(self.C.copy()>thr)]\n acond = conds[np.where(self.C.copy()>thr)]\n acolors = colors[np.where(self.C.copy()>thr)]'''\n\n line_segments = LineCollection(segs,\n linewidths=conds*magn,\n colors=colors,\n capstyle='round')\n ax.add_collection(line_segments)\n\n if scale_axis:\n self.scale_plt_figure(ax=ax)\n\n def plot_conductivities_red(self, process=lambda x: x, magn=2,\n ax=None, rescale=False):\n \"\"\" Plot conductivities as red lines orthogonal\n to the edges\n \"\"\"\n col = np.array([1., 0., 0.])\n if ax == None:\n ax = plt.gca()\n\n xs, ys = self.bond_coords()\n conds = process(self.C.copy())\n\n xs = np.array(xs)\n ys = np.array(ys)\n if rescale:\n # rescale all coordinates to lie inside [0,1]^2\n xs -= xs.min()\n xs /= xs.max()\n\n ys -= ys.min()\n ys /= ys.max()\n\n\n for i in range(self.bonds):\n alpha = 1 if conds[i] > 1e-1 else conds[i]\n color = tuple(list((1. - conds[i])*col) + [alpha])\n\n # rotate by 90 degrees\n dx = xs[i][1] - xs[i][0]\n dy = ys[i][1] - ys[i][0]\n\n dxx = 0.5*(dx + dy)\n dyy = 0.5*(dy - dx)\n\n xx = [xs[i][0] + dxx, xs[i][1] - dxx]\n yy = [ys[i][0] + dyy, ys[i][1] - dyy]\n\n # make half as long\n xx = [3./4*xx[0] + 1./4*xx[1], 3./4*xx[1] + 1./4*xx[0]]\n yy = [3./4*yy[0] + 1./4*yy[1], 3./4*yy[1] + 1./4*yy[0]]\n\n ax.plot(xx, yy, linewidth=magn*conds[i], \\\n color=color)\n\n self.scale_plt_figure(ax=ax)\n\n def plot_conductivities_asym_raw(self, process=lambda x: (x/amax(x))**0.25, \\\n magn=2, col=np.array([1., 1., 1.])):\n \"\"\"\n Plots the leaf network to the current Matplotlib figure.\n It won't call matplotlib's draw, you have to do that yourself!\n\n process is a function used to process the conductivity vector.\n magn sets a magnification factor for vein thickness\n col allows you to specify a color for the conductivities.\n\n This is a re-implementation of the old displayNetworkF.\n\n Usage example:\n\n plt.figure(5)\n netw.plot_conductivities(process=lambda x: (x/max(x))**(0.5))\n plt.title(\"Concentrations\")\n plt.draw()\n\n Use the plot_conductivities method if you're lazy!\n \"\"\"\n xs, ys = self.bond_coords()\n conds = process(self.C.copy())\n\n for i in range(self.bonds):\n alpha = 1 if conds[i] > 1e-1 else conds[i]\n color = tuple(list((1. - conds[i])*col) + [alpha])\n\n x = xs[i].copy()\n y = ys[i].copy()\n\n x[0] = 0.5*(x[0] + x[1])\n y[0] = 0.5*(y[0] + y[1])\n\n plt.plot(x, y,\n linewidth=magn*conds[i], color=color)\n\n for i in range(self.bonds):\n alpha = 1 if conds[i+self.bonds] > 1e-1 else conds[i+self.bonds]\n color = tuple(list((1. - conds[i+self.bonds])*col) + [alpha])\n\n x = xs[i].copy()\n y = ys[i].copy()\n\n x[1] = 0.5*(x[0] + x[1])\n y[1] = 0.5*(y[0] + y[1])\n\n plt.plot(x, y,\n linewidth=magn*conds[i+self.bonds], color=color)\n\n self.scale_plt_figure()\n\n def plot_node_topol_raw(self, qty, process=lambda x: x):\n \"\"\"\n Plots qty as a function of node topological distance from the root\n node.\n \"\"\"\n A = self.adjacency_matrix()\n ords = float('inf')*np.ones(self.verts)\n order_nodes(A, ords)\n\n n = int(max(ords[np.where(np.isfinite(ords))]))\n\n qty = process(qty)\n\n vals = [ qty[np.where(ords == i)].mean() for i in np.xrange(n + 1) ]\n stds = [ qty[np.where(ords == i)].std() for i in np.xrange(n + 1) ]\n\n plt.xlim(-0.1, n + 0.1)\n plt.errorbar(np.arange(n + 1), vals, yerr=stds)\n\n def plot_bond_topol_raw(self, qty, process=lambda x: x):\n \"\"\"\n Plots qty as a function of bond topological\n distance from the root node\n \"\"\"\n A = self.adjacency_matrix()\n ords = float('inf')*ones(self.bonds)\n order_bonds(A, ords, self.LEAF.Bond)\n\n n = int(max(ords[where(isfinite(ords))]))\n\n qty = process(qty)\n\n vals = [ qty[where(ords == i)].mean() for i in xrange(n + 1) ]\n stds = [ qty[where(ords == i)].std() for i in xrange(n + 1) ]\n\n plt.xlim(-0.1, n + 0.1)\n plt.errorbar(arange(n + 1), vals, yerr=stds)\n\n @plotter\n def plot_conductivities_topol(self, figure=None, title=\"\"\"Conductivities vs\\\n topological distance\"\"\"):\n self.plot_bond_topol_raw(self.C)\n plt.title(title)\n\n @plotter\n def plot_conductivities(self, figure=None, title=\"\", \\\n proc_pow=0.25, ticks=True):\n \"\"\"\n Plots the vascular network to given matplotlib figure\n with some reasonable default values\n \"\"\"\n self.plot_conductivities_raw(process=lambda x: (x/amax(x))**proc_pow, \\\n magn=4)\n plt.title(title)\n ax = plt.gca()\n ax.get_xaxis().set_visible(False)\n ax.get_yaxis().set_visible(False)\n\n if not ticks:\n plt.xticks([])\n plt.yticks([])\n\n @plotter\n def plot_conductivities_line(self, figure=None, title=\"Conductivities\",\n fit=False, area_power=0.5):\n \"\"\"\n Plots the conductivities as a 1d plot (useful for pine needle).\n\n fit: True/False whether there should be attempted to fit the\n law A(1 - x/L)**B to the conductivities.\n will return (A, B, L)\n area_power: C**area_power is proportional to cross-sectional\n area\n \"\"\"\n if fit:\n size = np.sum(self.C**area_power > 1e-6)\n claw = self.line_fit(area_power)\n\n self.path_plot_raw(self.C_matrix(self.C**area_power))\n\n plt.title(\"Area distributions along tree paths\")\n\n if fit:\n x = linspace(0, size + 1, num=512)\n #plt.plot(x, claw[0]*(1 - x/claw[2])**claw[1], \\\n # label=\"Fitted a(1-x/l)^b\", linewidth=3)\n plt.legend()\n\n return claw\n\n def plot_currents_raw(self, currents, \\\n process=lambda x: x, thresh=1e-6, \\\n head_width=0.025, linewidth_magn=1):\n \"\"\"\n Plots the currents to the current figure, with maximum\n freedom in options.\n\n process: processes the current matrix again\n thresh: the threshold for currents\n head_width: the arrows' head width\n linewidth_magn: a magnification factor for the\n line widths\n \"\"\"\n currents = process(currents)\n x = self.LEAF.Vertex.copy()\n\n # Draw arrows\n for i in range(self.bonds):\n if currents[self.LEAF.Bond[i,0], self.LEAF.Bond[i,1]] > thresh:\n plt.arrow(x[self.LEAF.Bond[i,1],0], x[self.LEAF.Bond[i,1],1], \\\n (x[self.LEAF.Bond[i,0],0] - x[self.LEAF.Bond[i,1],0])/2, \\\n (x[self.LEAF.Bond[i,0],1] - x[self.LEAF.Bond[i,1],1])/2, \\\n linewidth=linewidth_magn*currents[self.LEAF.Bond[i,0], \\\n self.LEAF.Bond[i,1]], head_width=head_width)\n elif currents[self.LEAF.Bond[i,0], self.LEAF.Bond[i,1]] < -thresh:\n plt.arrow(x[self.LEAF.Bond[i,0],0], x[self.LEAF.Bond[i,0],1], \\\n (x[self.LEAF.Bond[i,1],0] - x[self.LEAF.Bond[i,0],0])/2, \\\n (x[self.LEAF.Bond[i,1],1] - x[self.LEAF.Bond[i,0],1])/2, \\\n linewidth=linewidth_magn*currents[self.LEAF.Bond[i,0], \\\n self.LEAF.Bond[i,1]], head_width=head_width)\n\n self.scale_plt_figure()\n\n def plot_current_vector_raw(self, currents, head_width=0.025,\n linewidth_magn=1, thresh=1e-6):\n x = self.LEAF.Vertex.copy()\n currents = 2*currents/currents.max()\n # Draw arrows\n for i in range(self.bonds):\n if currents[i] > thresh:\n plt.arrow(x[self.LEAF.Bond[i,1],0], x[self.LEAF.Bond[i,1],1], \\\n (x[self.LEAF.Bond[i,0],0] - x[self.LEAF.Bond[i,1],0])/2, \\\n (x[self.LEAF.Bond[i,0],1] - x[self.LEAF.Bond[i,1],1])/2, \\\n linewidth=linewidth_magn*currents[i], head_width=head_width)\n elif currents[i] < -thresh:\n plt.arrow(x[self.LEAF.Bond[i,0],0], x[self.LEAF.Bond[i,0],1], \\\n (x[self.LEAF.Bond[i,1],0] - x[self.LEAF.Bond[i,0],0])/2, \\\n (x[self.LEAF.Bond[i,1],1] - x[self.LEAF.Bond[i,0],1])/2, \\\n linewidth=linewidth_magn*currents[i], head_width=head_width)\n\n self.scale_plt_figure()\n\n\n @plotter\n def plot_currents(self, title=\"Currents\", magn=2, hw=0.025):\n \"\"\"\n Plots the currents to the given figure,\n with some reasonable defaults.\n \"\"\"\n self.plot_currents_raw(self.Q.copy(), \\\n process=lambda x: x/amax(abs(x)), \\\n linewidth_magn=magn, head_width=hw)\n plt.title(title)\n\n @plotter\n def plot_node_qty(self, v, include_zero=False):\n self.plot_node_qty_raw(*args, **kwargs)\n\n def plot_node_qty_raw(self, v, include_zero=False, ax=None,\n colorbar=True):\n if include_zero:\n data = zip(v, xrange(len(v)))\n else:\n data = zip(v, xrange(1, len(v)+1))\n\n if ax == None:\n ax = plt.gca()\n\n xs = []\n ys = []\n for d, i in data:\n xs.append(self.LEAF.Vertex[i,0])\n ys.append(self.LEAF.Vertex[i,1])\n\n sc = ax.scatter(xs, ys, c=v, s=70, zorder=10, cmap='np.summer')\n\n if colorbar:\n plt.colorbar(sc)\n\n def plot_node_qty_mesh_raw(self, v, ax=None, colorbar=True, cax=None,\n colorbar_label=\"\", zorder=-15, vmax=None, cmap='np.summer',\n rescale=False):\n \"\"\" Plot the node qty using pcolormesh.\n We choose a zorder=-15 by default so the rasterization threshold\n can be chosen appropriately.\n \"\"\"\n if ax == None:\n ax = plt.gca()\n\n data = izip(v, xrange(len(v)))\n\n xs = []\n ys = []\n\n if rescale:\n xmin_inside = self.LEAF.Vertex[:,0].min()\n xmax_inside = self.LEAF.Vertex[:,0].max()\n ymin_inside = self.LEAF.Vertex[:,1].min()\n ymax_inside = self.LEAF.Vertex[:,1].max()\n\n dx = xmax_inside - xmin_inside\n dy = ymax_inside - ymin_inside\n\n for d, i in data:\n xs.append((self.LEAF.Vertex[i,0] - xmin_inside)/dx)\n ys.append((self.LEAF.Vertex[i,1] - ymin_inside)/dy)\n\n xs.extend((self.LEAF.RestVertices[:,0] - xmin_inside)/dx)\n ys.extend((self.LEAF.RestVertices[:,1] - ymin_inside)/dy)\n else:\n for d, i in data:\n xs.append(self.LEAF.Vertex[i,0])\n ys.append(self.LEAF.Vertex[i,1])\n\n xs.extend(self.LEAF.RestVertices[:,0])\n ys.extend(self.LEAF.RestVertices[:,1])\n\n xs = np.array(xs)\n ys = np.array(ys)\n\n xmin = xs.min()\n xmax = xs.max()\n ymin = ys.min()\n ymax = ys.max()\n\n v = concatenate((v, -ones(self.LEAF.RestVertices.shape[0])))\n\n #if rescale:\n # xs -= xmin_inside\n # xs /= xmax_inside\n # ys -= ymin_inside\n # ys /= ymax_inside\n #print xs.max()\n #print xmax_inside\n\n # interpolate on grid\n X, Y = mgrid[xmin:xmax:500j, ymin:ymax:500j]\n C = griddata((xs, ys), v, (X, Y), method='nearest')\n\n # plot image\n cm.get_cmap(cmap).set_under('white')\n sc = ax.pcolormesh(X, Y, C, cmap=cmap, vmin=0, vmax=vmax,\n zorder=zorder)\n\n if colorbar:\n if cax == None:\n cb = plt.colorbar(sc, ax=ax, label=colorbar_label)\n else:\n cb = plt.colorbar(sc, cax=cax, label=colorbar_label)\n cb.ax.tick_params(axis='x', direction='in', labeltop='on')\n\n return sc\n\n def path_plot_raw(self, matrix, label=None):\n \"\"\"\n Plots the values in matrix (depending on bonds)\n along the tree paths.\n \"\"\"\n paths = []\n tree_paths(self.CM.copy(), paths)\n\n for p in paths:\n pvals = [matrix[p[i], p[i+1]] for i in xrange(len(p)-1)]\n if label != None:\n plt.plot(pvals, linewidth=3, label=label)\n else:\n plt.plot(pvals, linewidth=3)\n\n def path_plot_vert_raw(self, vector, label=None):\n \"\"\"\n Plots the values in vector (depending on vertices)\n along the tree paths.\n \"\"\"\n paths = []\n tree_paths(self.CM.copy(), paths)\n\n for p in paths:\n if label != None:\n plt.plot(vector[np.array(p)], linewidth=3, label=label)\n else:\n plt.plot(vector[np.array(p)], linewidth=3)\n\n\n def line_fit(self, area_power):\n \"\"\"\n Fits the law area = A(1-z/L)^B to the line of conductivities\n where area = conductivity^area_power\n \"\"\"\n size = np.sum(self.C**area_power > 1e-3)\n try:\n R = fit_circle_law(self.C**area_power, size)[0]\n except:\n R = None\n\n return R\n\n def plot_lattice_raw(self):\n \"\"\"\n Plots the lattice points.\n \"\"\"\n plt.plot(self.LEAF.Vertex[:,0], self.LEAF.Vertex[:,1], \"*\")\n self.scale_plt_figure()\n\n @plotter\n def plot_lattice(self, title=\"Lattice points\"):\n \"\"\"\n Plots the lattice points\n \"\"\"\n self.plot_lattice_raw()\n plt.title(title)\n\n def param_str(self):\n \"\"\"\n Returns a string containing the network's parameters\n \"\"\"\n return \"gam: {}\".format(self.gamma)\n\n def __repr__(self):\n \"\"\"\n Returns a string representation of the vascular network\n \"\"\"\n return \"\\nSize parameter: \" + str(self.size)\n", "id": "8435051", "language": "Python", "matching_score": 2.19584584236145, "max_stars_count": 0, "path": "network.py" }, { "content": "import numpy as np\nimport warnings\n\ndef stat(a, file, thr=1e-6):\n if a in ['<NAME>', 'Sarawak']:\n return 0\n if file == 'wbridges':\n \"\"\" Returns fraction of weighted channel length contained in loops.\n Channels are weighted by their width, so the statistic is\n the channel surface area of all deltaic channels that comprises\n loops\n \"\"\"\n with warnings.catch_warnings(record=True) as w:\n x = 1-a.bridges(weight=True, smooth=False, thr=thr)\n if len(w) > 0:\n x = 0\n delattr(a, 'G')\n elif file == 'bridges':\n \"\"\" Returns fraction of channel length (unweighted) contained in loops.\n \"\"\"\n x = 1-a.bridges(weight=False, smooth=True, thr=thr)\n delattr(a, 'G')\n elif file == 'nloops':\n \"\"\" returns the number of loops in the network\n \"\"\"\n x = a.n_cycles(thr=thr)#/a.area\n delattr(a, 'G')\n elif file == 'mstdiff':\n \"\"\" Returns the fraction of channel area (as weighted in wbridges)\n that has to be removed to make the minimum spanning tree.\n mstdiff = Minimum Spanning Tree DIFFerence\n \"\"\"\n x = a.mstdiff(thr=thr)\n delattr(a, 'G')\n elif file == 'mstdiffl':\n \"\"\" returns fraction of channel length removed to make minimum spanning\n tree. To mstdiff as bridges is to wbridges\n \"\"\"\n x = a.mstdiff(thr=thr, weight=False)\n delattr(a, 'G')\n elif file == 'resdist':\n \"\"\" Returns two lists. The first one of resistance distances from\n river apex (there can only be one for this method) to the sinks,\n the second from all of the tidal nodes to the sinks\n \"\"\"\n x, y = a.resistance_distances()\n elif file == 'loopareas':\n \"\"\" Delta plane area encompassed by loops divided by delta convex hull\n area\n \"\"\"\n x = np.sum(a.loop_areas(thr=thr)) / a.area\n delattr(a, 'G')\n delattr(a, 'cycles')\n elif file == 'pathnodes':\n \"\"\" returns a list of the number of paths from the source node to each\n of the sink nodes. slow\n \"\"\"\n x = a.path_nodes()\n elif file == 'flowchange':\n \"\"\" fraction of channels that change flow direction over tidal ensemble\n \"\"\"\n x = a.flow_change()\n return x\n", "id": "10611112", "language": "Python", "matching_score": 0.42138031125068665, "max_stars_count": 0, "path": "stats.py" } ]
2.507539
twissmueller
[ { "content": "class TimestampEvent:\n def __init__(self, timestamp):\n self.timestamp = timestamp", "id": "6501296", "language": "Python", "matching_score": 2.006699323654175, "max_stars_count": 16, "path": "python-tutorial/producer/TimestampEvent.py" }, { "content": "from kafka import KafkaConsumer\nimport json\n\nfrom TimestampEvent import TimestampEvent\n\nconsumer = KafkaConsumer('timestamp', bootstrap_servers='localhost:9092', value_deserializer=lambda x: json.loads(x.decode('utf-8')))\n\nfor message in consumer:\n timestampEvent = TimestampEvent(**(message.value))\n print(\"Received: \" + timestampEvent.timestamp)\n", "id": "12836277", "language": "Python", "matching_score": 0, "max_stars_count": 16, "path": "python-tutorial/consumer/main.py" }, { "content": "from selenium import webdriver\nfrom selenium.common.exceptions import NoSuchElementException\nfrom selenium.webdriver.common.by import By\nimport time\n\nPRODUCT = \"https://www.saturn.de/de/product/_sony-playstation%C2%AE5-2661938.html\"\n# PRODUCT = \"https://www.saturn.de/de/product/_sony-dualsense™-2681392.html\"\nUSERNAME = \"<EMAIL>\"\nPASSWORD = \"<PASSWORD>\"\n\n\ndef click_button(button_text):\n try:\n element = driver.find_element(By.XPATH, f'//button[text()=\"{button_text}\"]')\n driver.execute_script(\"arguments[0].click();\", element)\n time.sleep(3)\n return True\n except NoSuchElementException:\n return False\n\n\ndef login_mms(username, password):\n driver.find_element_by_id(\"mms-login-form__email\").send_keys(username)\n driver.find_element_by_id(\"mms-login-form__password\").send_keys(password)\n driver.find_element_by_id(\"mms-login-form__login-button\").click()\n time.sleep(3)\n\n\ndriver = webdriver.Chrome()\ndriver.get(PRODUCT)\nwhile True:\n click_button(\"Alle zulassen\")\n if click_button(\"In den Warenkorb\"):\n break\n driver.refresh()\nclick_button(\"Nein, danke\")\nclick_button(\"Zum Warenkorb\")\nclick_button(\"Zur Kasse gehen\")\nlogin_mms(USERNAME, PASSWORD)\nclick_button(\"Weiter\")\nclick_button(\"Fortfahren und bezahlen\")\n", "id": "1786242", "language": "Python", "matching_score": 0, "max_stars_count": 2, "path": "app.py" } ]
0
Mendeley
[ { "content": "from mendeley import Mendeley\nimport yaml\nimport os\n\n# Get the DOI to look up\nimport argparse\nparser = argparse.ArgumentParser()\nparser.add_argument(\"doi\", help=\"Searches the Mendeley catalogue for this DOI\")\nargs = parser.parse_args()\n\nconfig_file = 'config.yml'\n\nconfig = {}\n\nif os.path.isfile(config_file): \n with open('config.yml') as f:\n config = yaml.load(f)\nelse:\n config['clientId'] = os.environ.get('MENDELEY_CLIENT_ID')\n config['clientSecret'] = os.environ.get('MENDELEY_CLIENT_SECRET')\n\nmendeley = Mendeley(config['clientId'], config['clientSecret'])\nsession = mendeley.start_client_credentials_flow().authenticate()\n\ndoi = args.doi\n\ndoc = session.catalog.by_identifier(doi=doi, view='stats')\nprint '\"%s\" has %s readers.' % (doc.title, doc.reader_count)\n", "id": "6671503", "language": "Python", "matching_score": 0, "max_stars_count": 19, "path": "mendeley-catalog.py" } ]
0
PierreKimbanziR
[ { "content": "from django.db import models\nfrom django.db.models.lookups import StartsWith\n\n# Create your models here.\nclass TeamSeasonStats(models.Model):\n team_name = models.CharField(max_length=200, primary_key=True)\n owner = models.ForeignKey(\n \"auth.User\",\n related_name=\"teams\",\n on_delete=models.CASCADE,\n )\n number_of_players = models.IntegerField()\n mean_age_of_players = models.FloatField()\n possession = models.FloatField()\n matches_played = models.IntegerField()\n starts = models.IntegerField()\n minutes_played = models.IntegerField()\n minutes_played_by_90 = models.FloatField()\n\n goals_scored = models.IntegerField()\n assists = models.IntegerField()\n non_penalty_goals = models.IntegerField()\n penalty_goals = models.IntegerField()\n penalty_attempted = models.IntegerField()\n yellow_cards = models.IntegerField()\n red_cards = models.IntegerField()\n\n goals_per_90 = models.FloatField()\n assists_per_90 = models.FloatField()\n gls_asts_per_90 = models.FloatField()\n non_penalty_goals_per_90 = models.FloatField()\n non_penalty_goals_ast_per_90 = models.FloatField()\n\n xG = models.FloatField()\n npxG = models.FloatField()\n xA = models.FloatField()\n npxG_xA = models.FloatField()\n\n xG_per_90 = models.FloatField()\n xA_per_90 = models.FloatField()\n xG_xA_per_90 = models.FloatField()\n npxG_per_90 = models.FloatField()\n npxG_xA_per_90 = models.FloatField()\n\n def __str__(self):\n return self.team_name\n\n class Meta:\n ordering = (\"team_name\",)\n\n\n# Create your models here.\nclass PlayerSeasonStats(models.Model):\n\n team_name = models.ForeignKey(\n \"TeamSeasonStats\",\n on_delete=models.CASCADE,\n related_name=\"players\",\n blank=True,\n null=True,\n )\n\n player_name = models.CharField(\n max_length=200,\n primary_key=True,\n )\n owner = models.ForeignKey(\"auth.User\", related_name=\"+\", on_delete=models.CASCADE)\n nation = models.CharField(max_length=200)\n position = models.CharField(max_length=200)\n age = models.IntegerField()\n matches_played = models.IntegerField()\n starts = models.IntegerField()\n minutes_played = models.IntegerField()\n minutes_played_by_90 = models.FloatField()\n\n goals_scored = models.IntegerField()\n assists = models.IntegerField()\n non_penalty_goals = models.IntegerField()\n penalty_goals = models.IntegerField()\n penalty_attempted = models.IntegerField()\n yellow_cards = models.IntegerField()\n red_cards = models.IntegerField()\n\n goals_per_90 = models.FloatField()\n assists_per_90 = models.FloatField()\n gls_asts_per_90 = models.FloatField()\n non_penalty_goals_per_90 = models.FloatField()\n non_penalty_goals_ast_per_90 = models.FloatField()\n\n xG = models.FloatField()\n npxG = models.FloatField()\n xA = models.FloatField()\n npxG_xA = models.FloatField()\n\n xG_per_90 = models.FloatField()\n xA_per_90 = models.FloatField()\n xG_xA_per_90 = models.FloatField()\n npxG_per_90 = models.FloatField()\n npxG_xA_per_90 = models.FloatField()\n\n def __str__(self):\n return self.player_name\n\n def create_player_name(self):\n return f\"{self.first_name}_{self.last_name}\"\n\n class Meta:\n ordering = (\"player_name\",)\n", "id": "2007789", "language": "Python", "matching_score": 5.772402286529541, "max_stars_count": 0, "path": "soccer_api/season_stats/models.py" }, { "content": "# Generated by Django 3.2.5 on 2021-08-06 11:08\n\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n migrations.swappable_dependency(settings.AUTH_USER_MODEL),\n ]\n\n operations = [\n migrations.CreateModel(\n name='TeamSeasonStats',\n fields=[\n ('team_name', models.CharField(max_length=200, primary_key=True, serialize=False)),\n ('number_of_players', models.IntegerField()),\n ('mean_age_of_players', models.FloatField()),\n ('possession', models.FloatField()),\n ('matches_played', models.IntegerField()),\n ('starts', models.IntegerField()),\n ('minutes_played', models.IntegerField()),\n ('minutes_played_by_90', models.FloatField()),\n ('goals_scored', models.IntegerField()),\n ('assists', models.IntegerField()),\n ('non_penalty_goals', models.IntegerField()),\n ('penalty_goals', models.IntegerField()),\n ('penalty_attempted', models.IntegerField()),\n ('yellow_cards', models.IntegerField()),\n ('red_cards', models.IntegerField()),\n ('goals_per_90', models.FloatField()),\n ('assists_per_90', models.FloatField()),\n ('gls_asts_per_90', models.FloatField()),\n ('non_penalty_goals_per_90', models.FloatField()),\n ('non_penalty_goals_ast_per_90', models.FloatField()),\n ('xG', models.FloatField()),\n ('npxG', models.FloatField()),\n ('xA', models.FloatField()),\n ('npxG_xA', models.FloatField()),\n ('xG_per_90', models.FloatField()),\n ('xA_per_90', models.FloatField()),\n ('xG_xA_per_90', models.FloatField()),\n ('npxG_per_90', models.FloatField()),\n ('npxG_xA_per_90', models.FloatField()),\n ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='teams', to=settings.AUTH_USER_MODEL)),\n ],\n options={\n 'ordering': ('team_name',),\n },\n ),\n migrations.CreateModel(\n name='PlayerSeasonStats',\n fields=[\n ('player_name', models.CharField(max_length=200, primary_key=True, serialize=False)),\n ('nation', models.CharField(max_length=200)),\n ('position', models.CharField(max_length=200)),\n ('age', models.IntegerField()),\n ('matches_played', models.IntegerField()),\n ('starts', models.IntegerField()),\n ('minutes_played', models.IntegerField()),\n ('minutes_played_by_90', models.FloatField()),\n ('goals_scored', models.IntegerField()),\n ('assists', models.IntegerField()),\n ('non_penalty_goals', models.IntegerField()),\n ('penalty_goals', models.IntegerField()),\n ('penalty_attempted', models.IntegerField()),\n ('yellow_cards', models.IntegerField()),\n ('red_cards', models.IntegerField()),\n ('goals_per_90', models.FloatField()),\n ('assists_per_90', models.FloatField()),\n ('gls_asts_per_90', models.FloatField()),\n ('non_penalty_goals_per_90', models.FloatField()),\n ('non_penalty_goals_ast_per_90', models.FloatField()),\n ('xG', models.FloatField()),\n ('npxG', models.FloatField()),\n ('xA', models.FloatField()),\n ('npxG_xA', models.FloatField()),\n ('xG_per_90', models.FloatField()),\n ('xA_per_90', models.FloatField()),\n ('xG_xA_per_90', models.FloatField()),\n ('npxG_per_90', models.FloatField()),\n ('npxG_xA_per_90', models.FloatField()),\n ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)),\n ('team_name', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='players', to='season_stats.teamseasonstats')),\n ],\n options={\n 'ordering': ('player_name',),\n },\n ),\n ]\n", "id": "10290333", "language": "Python", "matching_score": 5.763786792755127, "max_stars_count": 0, "path": "soccer_api/season_stats/migrations/0001_initial.py" }, { "content": "from django.utils.http import urlencode\nfrom django.urls import reverse\nfrom rest_framework import status\nfrom rest_framework.test import APITestCase\nfrom season_stats.models import PlayerSeasonStats, TeamSeasonStats\nfrom faker import Faker\nfrom season_stats import views\nfrom django.contrib.auth.models import User\n\n\nclass TeamsSeasonStatsTests(APITestCase):\n \n def create_user(self):\n fk = Faker()\n username = fk.first_name()+fk.last_name()\n email = f'{<EMAIL>'\n password = \"<PASSWORD>\"\n user = User.objects.create_user(username, email, password)\n self.client.login(username=username, password=password)\n\n def post_team_stat(self, team_name):\n url = reverse(views.TeamSeasonStatsList.name)\n data = {\n \"team_name\": team_name,\n \"players\": [],\n \"number_of_players\": 0,\n \"mean_age_of_players\": 0,\n \"possession\": 48.6,\n \"matches_played\": 38,\n \"starts\": 38,\n \"minutes_played\": 366,\n \"minutes_played_by_90\": 346.9,\n \"goals_scored\": 33,\n \"assists\": 33,\n \"non_penalty_goals\": 4,\n \"penalty_goals\": 4,\n \"penalty_attempted\": 3,\n \"yellow_cards\": 3,\n \"red_cards\": 3,\n \"goals_per_90\": 3.8,\n \"assists_per_90\": 3.9,\n \"gls_asts_per_90\": 3.0,\n \"non_penalty_goals_per_90\": 3.0,\n \"non_penalty_goals_ast_per_90\": 7.9,\n \"xG\": 3.0,\n \"npxG\": 3.0,\n \"xA\": 78.0,\n \"npxG_xA\": 3.0,\n \"xG_per_90\": 3.0,\n \"xA_per_90\": 78.0,\n \"xG_xA_per_90\": 78.0,\n \"npxG_per_90\": 78.0,\n \"npxG_xA_per_90\": 78.0,\n }\n self.create_user()\n response = self.client.post(url, data, format=\"json\")\n return response\n\n def test_post_team_name(self):\n team_name = \"FCpierre\"\n\n response = self.post_team_stat(team_name)\n assert response.status_code == status.HTTP_201_CREATED\n\n def test_post_and_get_single_team_season_stats(self):\n \"\"\"\n Ensure we create new teams_stats instance and then retrieve it\n \"\"\"\n new_team_name = \"Fc Soignies\"\n response = self.post_team_stat(new_team_name)\n self.client.login(username=\"pierre\", password=\"<PASSWORD>\")\n print(\"Pk {0}\".format(TeamSeasonStats.objects.get().team_name))\n assert response.status_code == status.HTTP_201_CREATED\n assert TeamSeasonStats.objects.count() == 1\n assert TeamSeasonStats.objects.get().team_name == new_team_name\n\n def test_post_existing_team_name(self):\n \"\"\"\n Ensure we cannot create mutiple teams with the same name\n \"\"\"\n new_team_name = \"FC bggggg\"\n response1 = self.post_team_stat(new_team_name)\n assert response1.status_code == status.HTTP_201_CREATED\n \n response2 = self.post_team_stat(new_team_name)\n assert response2.status_code == status.HTTP_400_BAD_REQUEST\n\n def test_filtering_team_name(self):\n \"\"\"\n Ensure we can filter team by team_name\n \"\"\"\n team_name1 = \"FC beauxgosses\"\n team_name2 = \"FC chaoros\"\n self.post_team_stat(team_name1)\n self.post_team_stat(team_name2)\n filter_by_name = {\"team_name\": team_name1}\n url = \"{0}?{1}\".format(\n reverse(views.TeamSeasonStatsList.name), urlencode(filter_by_name)\n )\n print(url)\n response = self.client.get(url, format=\"json\")\n print(response)\n assert response.status_code == status.HTTP_200_OK\n # Make sure we receive only one element in the response\n assert response.data[\"count\"] == 1\n assert response.data[\"results\"][0][\"team_name\"] == team_name1\n\n def test_retrieving_teams_collection(self):\n new_team_name = \"Grabuge RFC\"\n self.post_team_stat(new_team_name)\n url = reverse(views.TeamSeasonStatsList.name)\n response = self.client.get(\n url,\n format=\"json\",\n )\n assert response.status_code == status.HTTP_200_OK\n assert response.data[\"results\"][0][\"team_name\"] == new_team_name\n\n def test_update_team_data(self):\n new_team_name = \"<NAME>\"\n response = self.post_team_stat(new_team_name)\n url = reverse(\n views.TeamSeasonStatsDetail.name, args={response.data[\"team_name\"]}\n )\n print(url)\n updated_team_name = \"<NAME>222\"\n data = {\n \"team_name\": updated_team_name,\n \"players\": [],\n \"number_of_players\": 0,\n \"mean_age_of_players\": 0,\n \"possession\": 48.6,\n \"matches_played\": 38,\n \"starts\": 38,\n \"minutes_played\": 366,\n \"minutes_played_by_90\": 346.9,\n \"goals_scored\": 33,\n \"assists\": 33,\n \"non_penalty_goals\": 4,\n \"penalty_goals\": 4,\n \"penalty_attempted\": 3,\n \"yellow_cards\": 3,\n \"red_cards\": 3,\n \"goals_per_90\": 3.8,\n \"assists_per_90\": 3.9,\n \"gls_asts_per_90\": 3.0,\n \"non_penalty_goals_per_90\": 3.0,\n \"non_penalty_goals_ast_per_90\": 7.9,\n \"xG\": 3.0,\n \"npxG\": 3.0,\n \"xA\": 78.0,\n \"npxG_xA\": 3.0,\n \"xG_per_90\": 3.0,\n \"xA_per_90\": 78.0,\n \"xG_xA_per_90\": 78.0,\n \"npxG_per_90\": 78.0,\n \"npxG_xA_per_90\": 78.0,\n }\n patch_response = self.client.patch(url, data, format=\"json\")\n assert patch_response.status_code == status.HTTP_200_OK\n assert patch_response.data[\"team_name\"] == updated_team_name\n\n def test_get_team_by_name(self):\n new_team_name = \"FC Tagada\"\n response = self.post_team_stat(new_team_name)\n url = reverse(\n views.TeamSeasonStatsDetail.name, None, {response.data[\"team_name\"]}\n )\n get_response = self.client.get(url, format=\"json\")\n assert get_response.status_code == status.HTTP_200_OK\n assert get_response.data[\"team_name\"] == new_team_name\n\nclass PlayerSeasonStatsTests(APITestCase):\n \n def create_user(self):\n fk = Faker()\n username = fk.first_name()+fk.last_name()\n email = f'{<EMAIL>'\n password = \"<PASSWORD>\"\n user = User.objects.create_user(username, email, password)\n self.client.login(username=username, password=password)\n\n def post_player_stats(self, player_name):\n url = reverse(views.PlayerSeasonsStatsList.name)\n data = {\n \"player_name\": player_name,\n \"team_name\": \"\",\n \"nation\": \"ar ARG\",\n \"position\": \"GK\",\n \"age\": 27,\n \"matches_played\": 38,\n \"starts\": 38,\n \"minutes_played\": 3420,\n \"minutes_played_by_90\": 38.0,\n \"goals_scored\": 0,\n \"assists\": 0,\n \"non_penalty_goals\": 0,\n \"penalty_goals\": 0,\n \"penalty_attempted\": 0,\n \"yellow_cards\": 1,\n \"red_cards\": 0,\n \"goals_per_90\": 0.0,\n \"assists_per_90\": 0.0,\n \"gls_asts_per_90\": 0.0,\n \"non_penalty_goals_per_90\": 0.0,\n \"non_penalty_goals_ast_per_90\": 0.0,\n \"xG\": 0.0,\n \"npxG\": 0.0,\n \"xA\": 0.2,\n \"npxG_xA\": 0.2,\n \"xG_per_90\": 0.0,\n \"xA_per_90\": 0.01,\n \"xG_xA_per_90\": 0.01,\n \"npxG_per_90\": 0.0,\n \"npxG_xA_per_90\": 0.01\n }\n self.create_user()\n response = self.client.post(url, data, format=\"json\")\n return response\n\n def test_post_player(self):\n team_name = \"FCpierre\"\n response = self.post_player_stats(team_name)\n assert response.status_code == status.HTTP_201_CREATED\n\n def test_post_and_get_single_player_stats(self):\n \"\"\"\n Ensure we create new player_stats instance and then retrieve it\n \"\"\"\n player_name = \"Jean_Luc_Duduche\"\n response = self.post_player_stats(player_name)\n print(\"Pk {0}\".format(PlayerSeasonStats.objects.get().team_name))\n assert response.status_code == status.HTTP_201_CREATED\n assert PlayerSeasonStats.objects.count() == 1\n assert PlayerSeasonStats.objects.get().player_name == player_name\n\n def test_post_existing_team_name(self):\n \"\"\"\n Ensure we cannot create mutiple teams with the same name\n \"\"\"\n player_name = \"axel_foley\"\n response1 = self.post_player_stats(player_name)\n assert response1.status_code == status.HTTP_201_CREATED\n \n response2 = self.post_player_stats(player_name)\n assert response2.status_code == status.HTTP_400_BAD_REQUEST\n\n def test_filtering_team_name(self):\n \"\"\"\n Ensure we can filter team by team_name\n \"\"\"\n player_name1 = \"Freacois_Venant\"\n player_name2 = \"Jean_Tand\"\n self.post_player_stats(player_name1)\n self.post_player_stats(player_name2)\n filter_by_name = {\"player_name\": player_name1}\n url = \"{0}?{1}\".format(\n reverse(views.PlayerSeasonsStatsList.name), urlencode(filter_by_name)\n )\n print(url)\n response = self.client.get(url, format=\"json\")\n print(response)\n assert response.status_code == status.HTTP_200_OK\n # Make sure we receive only one element in the response\n assert response.data[\"count\"] == 1\n assert response.data[\"results\"][0][\"player_name\"] == player_name1\n\n def test_retrieving_teams_collection(self):\n player_name = \"Gregory_Lebel\"\n self.post_player_stats(player_name)\n url = reverse(views.PlayerSeasonsStatsList.name)\n response = self.client.get(\n url,\n format=\"json\",\n )\n assert response.status_code == status.HTTP_200_OK\n assert response.data[\"results\"][0][\"player_name\"] == player_name\n\n def test_update_team_data(self):\n player_name = \"Cristiano_Ronaldo\"\n response = self.post_player_stats(player_name)\n url = reverse(\n views.PlayerSeasonStatsDetail.name, args={response.data[\"player_name\"]}\n )\n print(url)\n updated_player_name = \"Cristiano_Ronaldo_Jr\"\n data = {\n \"player_name\": updated_player_name,\n \"team_name\": \"\",\n \"nation\": \"ar ARG\",\n \"position\": \"GK\",\n \"age\": 27,\n \"matches_played\": 38,\n \"starts\": 38,\n \"minutes_played\": 3420,\n \"minutes_played_by_90\": 38.0,\n \"goals_scored\": 0,\n \"assists\": 0,\n \"non_penalty_goals\": 0,\n \"penalty_goals\": 0,\n \"penalty_attempted\": 0,\n \"yellow_cards\": 1,\n \"red_cards\": 0,\n \"goals_per_90\": 0.0,\n \"assists_per_90\": 0.0,\n \"gls_asts_per_90\": 0.0,\n \"non_penalty_goals_per_90\": 0.0,\n \"non_penalty_goals_ast_per_90\": 0.0,\n \"xG\": 0.0,\n \"npxG\": 0.0,\n \"xA\": 0.2,\n \"npxG_xA\": 0.2,\n \"xG_per_90\": 0.0,\n \"xA_per_90\": 0.01,\n \"xG_xA_per_90\": 0.01,\n \"npxG_per_90\": 0.0,\n \"npxG_xA_per_90\": 0.01\n }\n patch_response = self.client.patch(url, data, format=\"json\")\n assert patch_response.status_code == status.HTTP_200_OK\n assert patch_response.data[\"player_name\"] == updated_player_name\n\n def test_get_team_by_name(self):\n player_name = \"Frederic_Lemaitre\"\n response = self.post_player_stats(player_name)\n url = reverse(\n views.PlayerSeasonStatsDetail.name, None, {response.data[\"player_name\"]}\n )\n get_response = self.client.get(url, format=\"json\")\n assert get_response.status_code == status.HTTP_200_OK\n assert get_response.data[\"player_name\"] == player_name\n# Create your tests here.\n", "id": "10855232", "language": "Python", "matching_score": 1.3281195163726807, "max_stars_count": 0, "path": "soccer_api/season_stats/tests.py" }, { "content": "from rest_framework.pagination import LimitOffsetPagination\n\n\nclass LimitOffsetPaginationWithUpperBound(LimitOffsetPagination):\n # Set the maximum limit value to 8\n max_limit = 8\n", "id": "10456539", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "soccer_api/season_stats/custompagination.py" }, { "content": "from django.urls import re_path\nfrom season_stats import views\n\nurlpatterns = [\n re_path(\n r\"^teams/$\",\n views.TeamSeasonStatsList.as_view(),\n name=views.TeamSeasonStatsList.name,\n ),\n re_path(\n r\"^teams/(?P<pk>[\\w\\s]+)$\",\n views.TeamSeasonStatsDetail.as_view(),\n name=views.TeamSeasonStatsDetail.name,\n ),\n re_path(\n r\"^players/$\",\n views.PlayerSeasonsStatsList.as_view(),\n name=views.PlayerSeasonsStatsList.name,\n ),\n re_path(\n r\"^players/(?P<pk>[-\\w\\s]+)$\",\n views.PlayerSeasonStatsDetail.as_view(),\n name=views.PlayerSeasonStatsDetail.name,\n ),\n re_path(r\"^$\", views.SeasonStatsRoot.as_view(), name=views.SeasonStatsRoot.name),\n]\n", "id": "1648819", "language": "Python", "matching_score": 1.7132585048675537, "max_stars_count": 0, "path": "soccer_api/season_stats/urls.py" }, { "content": "from django.apps import AppConfig\n\n\nclass SeasonStatsConfig(AppConfig):\n default_auto_field = 'django.db.models.BigAutoField'\n name = 'season_stats'\n", "id": "10087606", "language": "Python", "matching_score": 0.4433786869049072, "max_stars_count": 0, "path": "soccer_api/season_stats/apps.py" }, { "content": "from rest_framework import serializers\nfrom season_stats.models import PlayerSeasonStats, TeamSeasonStats\nfrom django.contrib.auth.models import User\n\n\nclass UserTeamSerializer(serializers.ModelSerializer):\n class Meta:\n model = TeamSeasonStats\n fields = \"team_name\"\n\n\nclass UserPlayerSerializer(serializers.ModelSerializer):\n class Meta:\n model = PlayerSeasonStats\n fields = \"player_name\"\n\n\nclass UserSerializer(serializers.ModelSerializer):\n teams = UserTeamSerializer(many=True, read_only=True)\n players = UserPlayerSerializer(many=True, read_only=True)\n\n class Meta:\n model = User\n fields = (\"pk\", \"username\", \"drone\", \"player\")\n\n\nclass TeamSeasonStatsSerializer(serializers.ModelSerializer):\n players = serializers.StringRelatedField(many=True)\n owner = serializers.ReadOnlyField(source=\"owner.username\")\n\n class Meta:\n model = TeamSeasonStats\n fields = \"__all__\"\n\n\nclass PlayerSeasonStatsSerializer(serializers.ModelSerializer):\n team_name = serializers.SlugRelatedField(\n queryset=TeamSeasonStats.objects.all(), slug_field=\"team_name\", allow_null=True\n )\n owner = serializers.ReadOnlyField(source=\"owner.username\")\n\n class Meta:\n model = PlayerSeasonStats\n fields = \"__all__\"\n", "id": "9947186", "language": "Python", "matching_score": 3.7213032245635986, "max_stars_count": 0, "path": "soccer_api/season_stats/serializers.py" }, { "content": "from django.shortcuts import render\nfrom rest_framework import generics\nfrom rest_framework.response import Response\nfrom rest_framework.reverse import reverse\n\n\nfrom rest_framework import status\nfrom season_stats.models import TeamSeasonStats\nfrom season_stats.serializers import TeamSeasonStatsSerializer\nfrom season_stats.models import PlayerSeasonStats\nfrom season_stats.serializers import PlayerSeasonStatsSerializer\nfrom rest_framework import filters\nfrom django_filters import AllValuesFilter, DateTimeFilter, NumberFilter\n\nfrom rest_framework import permissions\nfrom season_stats import custompermissions\n\nfrom rest_framework.throttling import ScopedRateThrottle\n\n\nclass TeamSeasonStatsList(generics.ListCreateAPIView):\n queryset = TeamSeasonStats.objects.all()\n serializer_class = TeamSeasonStatsSerializer\n name = \"teams-season-stats-list\"\n filterset_fields = (\"team_name\",)\n search_fields = (\"team_name\",)\n ordering_fields = (\"team_name\",)\n permission_classes = (\n permissions.IsAuthenticatedOrReadOnly,\n custompermissions.IsCurrentUserOwnerOrReadOnly,\n )\n throttle_scope = \"teams\"\n throttle_classes = (ScopedRateThrottle,)\n\n def perform_create(self, serializer):\n serializer.save(owner=self.request.user)\n\n\nclass TeamSeasonStatsDetail(generics.RetrieveUpdateDestroyAPIView):\n queryset = TeamSeasonStats.objects.all()\n serializer_class = TeamSeasonStatsSerializer\n name = \"teams-season-stats-detail\"\n permission_classes = (\n permissions.IsAuthenticatedOrReadOnly,\n custompermissions.IsCurrentUserOwnerOrReadOnly,\n )\n throttle_scope = \"teams\"\n throttle_classes = (ScopedRateThrottle,)\n\n\nclass PlayerSeasonsStatsList(generics.ListCreateAPIView):\n queryset = PlayerSeasonStats.objects.all()\n serializer_class = PlayerSeasonStatsSerializer\n name = \"players-season-stats-list\"\n filterset_fields = (\"player_name\", \"team_name\")\n ordering_fields = \"player_name\"\n search_fields = (\"player_name\",)\n permission_classes = (\n permissions.IsAuthenticatedOrReadOnly,\n custompermissions.IsCurrentUserOwnerOrReadOnly,\n )\n throttle_scope = \"players\"\n throttle_classes = (ScopedRateThrottle,)\n\n def perform_create(self, serializer):\n serializer.save(owner=self.request.user)\n\n\nclass PlayerSeasonStatsDetail(generics.RetrieveUpdateDestroyAPIView):\n queryset = PlayerSeasonStats.objects.all()\n serializer_class = PlayerSeasonStatsSerializer\n name = \"players-season-stats-detail\"\n permission_classes = (\n permissions.IsAuthenticatedOrReadOnly,\n custompermissions.IsCurrentUserOwnerOrReadOnly,\n )\n throttle_scope = \"players\"\n throttle_classes = (ScopedRateThrottle,)\n\n\nclass SeasonStatsRoot(generics.GenericAPIView):\n name = \"season-stats-root\"\n\n def get(self, request, *args, **kwargs):\n return Response(\n {\n \"teams\": reverse(TeamSeasonStatsList.name, request=request),\n \"player\": reverse(PlayerSeasonsStatsList.name, request=request),\n }\n )\n", "id": "2957005", "language": "Python", "matching_score": 2.616757869720459, "max_stars_count": 0, "path": "soccer_api/season_stats/views.py" } ]
2.165008
miott
[ { "content": "#!/usr/bin/env python\n\n#python\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie package\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\n# Genie XBu_shared\nfrom genie.libs.conf.static_routing.static_routing import StaticRouting\n\n\nclass test_static_routing(TestCase):\n\n\n def test_static_routing_with_interface_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxr')\n\n static_routing = StaticRouting()\n static_routing.interface = 'GigabitEthernet0/1'\n static_routing.vrf = 'VRF1'\n static_routing.af = 'ipv4'\n static_routing.route = '10.2.1.0/24'\n\n static_routing.device_attr[dev1].vrf_attr[static_routing.vrf].address_family_attr[static_routing.af].route_attr[\n static_routing.route].interface_attr[static_routing.interface].if_nexthop = '192.168.1.2'\n\n static_routing.device_attr[dev1].vrf_attr[static_routing.vrf].address_family_attr[static_routing.af].route_attr[\n static_routing.route].interface_attr[static_routing.interface].if_preference = 2\n\n self.assertIs(static_routing.testbed, testbed)\n\n dev1.add_feature(static_routing)\n\n cfgs = static_routing.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['router static',\n ' vrf VRF1',\n ' address-family ipv4 unicast',\n ' 10.2.1.0/24 GigabitEthernet0/1 192.168.1.2 2',\n ' exit',\n ' exit',\n ' exit',\n\n ]))\n\n static_routing_ipv6 = StaticRouting()\n static_routing_ipv6.interface = 'GigabitEthernet0/0/0/0'\n static_routing_ipv6.af = 'ipv6'\n static_routing_ipv6.route = '2001:1:1:1::1/128'\n static_routing_ipv6.vrf = 'default'\n static_routing_ipv6.device_attr[dev1].vrf_attr[static_routing_ipv6.vrf].address_family_attr[static_routing_ipv6.af].route_attr[\n static_routing_ipv6.route].interface_attr[static_routing_ipv6.interface].if_nexthop = '2001:20:1:2::1'\n\n self.assertIs(static_routing_ipv6.testbed, testbed)\n\n dev1.add_feature(static_routing_ipv6)\n\n cfgs = static_routing_ipv6.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['router static',\n ' address-family ipv6 unicast',\n ' 2001:1:1:1::1/128 GigabitEthernet0/0/0/0 2001:20:1:2::1',\n ' exit',\n ' exit',\n\n ]))\n\n def test_static_routing_vrf_default_with_interface_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxr')\n\n static_routing = StaticRouting()\n static_routing.interface = 'GigabitEthernet0/0/0/0'\n static_routing.vrf = 'default'\n static_routing.af = 'ipv4'\n static_routing.route = '1.1.1.1/32'\n\n static_routing.device_attr[dev1].vrf_attr[static_routing.vrf].address_family_attr[static_routing.af].route_attr[\n static_routing.route].interface_attr[static_routing.interface]\n\n self.assertIs(static_routing.testbed, testbed)\n\n dev1.add_feature(static_routing)\n\n cfgs = static_routing.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['router static',\n ' address-family ipv4 unicast',\n ' 1.1.1.1/32 GigabitEthernet0/0/0/0',\n ' exit',\n ' exit',\n ]))\n\n static_routing = StaticRouting()\n static_routing.interface = 'GigabitEthernet0/0/0/0'\n static_routing.af = 'ipv4'\n static_routing.route = '10.2.1.0/24'\n static_routing.vrf = 'default'\n static_routing.device_attr[dev1].vrf_attr[static_routing.vrf].address_family_attr[static_routing.af].route_attr[\n static_routing.route].interface_attr[static_routing.interface].if_nexthop = '192.168.1.2'\n\n static_routing.device_attr[dev1].vrf_attr[static_routing.vrf].address_family_attr[static_routing.af].route_attr[\n static_routing.route].interface_attr[static_routing.interface].if_preference = 2\n\n self.assertIs(static_routing.testbed, testbed)\n\n dev1.add_feature(static_routing)\n\n cfgs = static_routing.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['router static',\n ' address-family ipv4 unicast',\n ' 10.2.1.0/24 GigabitEthernet0/0/0/0 192.168.1.2 2',\n ' exit',\n ' exit',\n ]))\n\n def test_static_routing_without_interface_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxr')\n\n static_routing = StaticRouting()\n static_routing.interface = 'GigabitEthernet0/1'\n static_routing.vrf = 'VRF1'\n static_routing.af = 'ipv4'\n\n\n static_routing.route = '{}'.format('10.2.1.0/24')\n\n static_routing.device_attr[dev1].vrf_attr[static_routing.vrf].address_family_attr[static_routing.af].route_attr[\n static_routing.route].next_hop_attr['192.168.1.2'].preference = 3\n\n self.assertIs(static_routing.testbed, testbed)\n\n dev1.add_feature(static_routing)\n\n cfgs = static_routing.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['router static',\n ' vrf VRF1',\n ' address-family ipv4 unicast',\n ' 10.2.1.0/24 192.168.1.2 3',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n def test_static_routing_ipv6_without_interface_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxr')\n\n static_routing = StaticRouting()\n static_routing.vrf = 'default'\n static_routing.af = 'ipv6'\n\n static_routing.route = '{}'.format('2001:3:3:3::3/128')\n\n static_routing.device_attr[dev1].vrf_attr[static_routing.vrf].address_family_attr[static_routing.af].route_attr[\n static_routing.route].next_hop_attr['2001:20:2:3::3'].preference = 3\n\n self.assertIs(static_routing.testbed, testbed)\n\n dev1.add_feature(static_routing)\n\n cfgs = static_routing.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['router static',\n ' address-family ipv6 unicast',\n ' 2001:3:3:3::3/128 2001:20:2:3::3 3',\n ' exit',\n ' exit',\n ]))\n\n def test_static_routing_ipv4_ipv6_with_vrf_interface_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxr')\n\n static_routing = StaticRouting()\n static_routing.interface = 'Null0'\n static_routing.vrf = 'VRF1'\n static_routing.af = 'ipv4'\n static_routing.route = '1.1.1.1/32'\n\n static_routing.device_attr[dev1].vrf_attr[static_routing.vrf].address_family_attr[static_routing.af].route_attr[\n static_routing.route].interface_attr[static_routing.interface].if_preference = 99\n\n self.assertIs(static_routing.testbed, testbed)\n\n dev1.add_feature(static_routing)\n\n cfgs = static_routing.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['router static',\n ' vrf VRF1',\n ' address-family ipv4 unicast',\n ' 1.1.1.1/32 Null0 99',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n static_routing_6 = StaticRouting()\n static_routing_6.interface = 'Null0'\n static_routing_6.vrf = 'VRF1'\n static_routing_6.af = 'ipv6'\n static_routing_6.route = '2001:1:1:1::1/128'\n\n static_routing_6.device_attr[dev1].vrf_attr[static_routing_6.vrf].address_family_attr[static_routing_6.af].route_attr[\n static_routing_6.route].interface_attr[static_routing_6.interface].if_preference = 99\n\n self.assertIs(static_routing_6.testbed, testbed)\n\n dev1.add_feature(static_routing_6)\n\n cfgs = static_routing_6.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['router static',\n ' vrf VRF1',\n ' address-family ipv6 unicast',\n ' 2001:1:1:1::1/128 Null0 99',\n ' exit',\n ' exit',\n ' exit',\n\n ]))\n\n\n def test_static_routing_uncfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxr')\n\n static_routing = StaticRouting()\n static_routing.vrf = 'VRF1'\n static_routing.af = 'ipv4'\n static_routing.route = '{}'.format('10.2.1.0/24')\n\n static_routing.device_attr[dev1].vrf_attr[static_routing.vrf].address_family_attr[static_routing.af].route_attr[\n static_routing.route].next_hop_attr[None]\n\n dev1.add_feature(static_routing)\n\n un_cfgs = static_routing.build_unconfig(apply=False)\n self.assertCountEqual(un_cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(un_cfgs[dev1.name]), '\\n'.join(\n ['router static',\n ' vrf VRF1',\n ' address-family ipv4 unicast',\n ' no 10.2.1.0/24',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "8132358", "language": "Python", "matching_score": 6.123589992523193, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/static_routing/iosxr/tests/test_static_routing.py" }, { "content": "#!/usr/bin/env python\n\n#python\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie package\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\n# Genie XBu_shared\nfrom genie.libs.conf.static_routing.static_routing import StaticRouting\n\n\nclass test_static_routing(TestCase):\n\n def test_static_routing_with_interface_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n\n static_routing = StaticRouting()\n\n static_routing.device_attr[dev1].vrf_attr['VRF1'].address_family_attr['ipv4'].route_attr[\n '10.2.1.0/24'].interface_attr['GigabitEthernet0/1'].if_nexthop = '192.168.1.2'\n\n static_routing.device_attr[dev1].vrf_attr['VRF1'].address_family_attr['ipv4'].route_attr[\n '10.2.1.0/24'].interface_attr['GigabitEthernet0/1'].if_preference = 2\n\n self.assertIs(static_routing.testbed, testbed)\n\n dev1.add_feature(static_routing)\n\n cfgs = static_routing.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['ip route vrf VRF1 10.2.1.0 255.255.255.0 GigabitEthernet0/1 192.168.1.2 2'\n ]))\n\n static_routing_4 = StaticRouting()\n\n static_routing_4.device_attr[dev1].vrf_attr['default'].address_family_attr['ipv4'].route_attr[\n '3.3.3.3/32'].interface_attr['GigabitEthernet0/3']\n\n self.assertIs(static_routing_4.testbed, testbed)\n\n dev1.add_feature(static_routing_4)\n\n cfgs = static_routing_4.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['ip route 3.3.3.3 255.255.255.255 GigabitEthernet0/3'\n ]))\n\n\n def test_static_routing_without_interface_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n\n static_routing = StaticRouting()\n\n static_routing.device_attr[dev1].vrf_attr['VRF1'].address_family_attr['ipv4'].route_attr[\n '10.2.1.0/24'].next_hop_attr['192.168.1.2'].preference = 3\n\n self.assertIs(static_routing.testbed, testbed)\n\n dev1.add_feature(static_routing)\n\n cfgs = static_routing.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['ip route vrf VRF1 10.2.1.0 255.255.255.0 192.168.1.2 3'\n ]))\n\n def test_static_routing_ipv6_without_interface_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n\n static_routing_6 = StaticRouting()\n\n static_routing_6.device_attr[dev1].vrf_attr['VRF1'].address_family_attr['ipv6'].route_attr[\n '2001:3:3:3::3/128'].next_hop_attr['2001:10:1:2::2'].preference = 3\n\n self.assertIs(static_routing_6.testbed, testbed)\n\n dev1.add_feature(static_routing_6)\n\n cfgs_6 = static_routing_6.build_config(apply=False)\n self.assertCountEqual(cfgs_6.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(cfgs_6[dev1.name]), '\\n'.join(\n ['ipv6 route vrf VRF1 2001:3:3:3::3/128 2001:10:1:2::2 3'\n ]))\n\n def test_static_routing_ipv6_with_interface_tag_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n\n static_routing = StaticRouting()\n\n static_routing.device_attr[dev1].vrf_attr['default'].address_family_attr['ipv6'].route_attr[\n '2001:2:2:2::2/128'].interface_attr['GigabitEthernet0/0'].if_nexthop = '2001:10:1:2::2'\n\n static_routing.device_attr[dev1].vrf_attr['default'].address_family_attr['ipv6'].route_attr[\n '2001:2:2:2::2/128'].interface_attr['GigabitEthernet0/0'].if_tag = 100\n\n static_routing.device_attr[dev1].vrf_attr['default'].address_family_attr['ipv6'].route_attr[\n '2001:2:2:2::2/128'].interface_attr['GigabitEthernet0/0'].if_track = 1\n static_routing.device_attr[dev1].vrf_attr['default'].address_family_attr['ipv6'].route_attr[\n '2001:2:2:2::2/128'].interface_attr['GigabitEthernet0/0'].if_preference = 11\n\n self.assertIs(static_routing.testbed, testbed)\n\n dev1.add_feature(static_routing)\n\n cfgs = static_routing.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['ipv6 route 2001:2:2:2::2/128 GigabitEthernet0/0 2001:10:1:2::2 11 tag 100 track 1'\n ]))\n\n def test_static_routing_ipv6_with_interface_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n\n static_routing = StaticRouting()\n interface = 'GigabitEthernet0/1'\n vrf = 'VRF1'\n af = 'ipv6'\n\n route = '{}'.format('2001:2:2:2::2/128')\n\n static_routing.device_attr[dev1].vrf_attr[vrf].address_family_attr[af].route_attr[\n route].interface_attr[interface].if_nexthop = '2001:20:1:2::2'\n\n self.assertIs(static_routing.testbed, testbed)\n\n dev1.add_feature(static_routing)\n\n cfgs = static_routing.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['ipv6 route vrf VRF1 2001:2:2:2::2/128 GigabitEthernet0/1 2001:20:1:2::2'\n ]))\n\n static_routing_2 = StaticRouting()\n\n static_routing_2.device_attr[dev1].vrf_attr['default'].address_family_attr['ipv6'].route_attr[\n '2001:2:2:2::2/128'].interface_attr['Null0'].if_preference = 5\n\n\n self.assertIs(static_routing_2.testbed, testbed)\n\n dev1.add_feature(static_routing_2)\n\n cfgs = static_routing_2.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['ipv6 route 2001:2:2:2::2/128 Null0 5'\n ]))\n\n def test_static_routing_uncfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n\n\n static_routing_2 = StaticRouting()\n\n static_routing_2.device_attr[dev1].vrf_attr['default'].address_family_attr['ipv4'].route_attr[\n '10.2.1.0/24'].next_hop_attr['192.168.3.11']\n\n dev1.add_feature(static_routing_2)\n\n un_cfgs = static_routing_2.build_unconfig(apply=False)\n self.assertCountEqual(un_cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(un_cfgs[dev1.name]), '\\n'.join(\n ['no ip route 10.2.1.0 255.255.255.0 192.168.3.11'\n ]))\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "1969258", "language": "Python", "matching_score": 2.1926257610321045, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/static_routing/iosxe/tests/test_static_routing.py" }, { "content": "#!/usr/bin/env python\n\n#python\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie package\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\n# Genie XBu_shared\nfrom genie.libs.conf.vxlan.vxlan import Vxlan\n\n\nclass test_vxlan(TestCase):\n\n def test_vxlan_enable_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='nxos')\n\n vxlan = Vxlan()\n vxlan.device_attr[dev1].enabled = True\n\n self.assertIs(vxlan.testbed, testbed)\n dev1.add_feature(vxlan)\n\n cfgs = vxlan.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['feature nv overlay',\n 'feature vn-segment-vlan-based',\n 'nv overlay evpn',\n ]))\n\n un_cfgs = vxlan.build_unconfig(apply=False)\n self.assertCountEqual(un_cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(un_cfgs[dev1.name]), '\\n'.join(\n ['no feature nv overlay',\n 'no feature vn-segment-vlan-based',\n 'no nv overlay evpn',\n ]))\n\n def test_trm_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='R2', os='nxos')\n\n vxlan = Vxlan()\n vxlan.device_attr[dev1].enabled_ngmvpn = True\n vxlan.device_attr[dev1].advertise_evpn_multicast = True\n\n self.assertIs(vxlan.testbed, testbed)\n dev1.add_feature(vxlan)\n\n cfgs = vxlan.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['feature ngmvpn',\n 'advertise evpn multicast',\n ]))\n\n un_cfgs = vxlan.build_unconfig(apply=False)\n self.assertCountEqual(un_cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(un_cfgs[dev1.name]), '\\n'.join(\n ['no feature ngmvpn',\n 'no advertise evpn multicast',\n ]))\n\n def test_vxlan_basic_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='nxos')\n\n vxlan = Vxlan()\n vxlan.device_attr[dev1].enabled_nv_overlay = True\n vxlan.device_attr[dev1].enabled_vn_segment_vlan_based = True\n vxlan.device_attr[dev1].enabled_nv_overlay_evpn = True\n vxlan.device_attr[dev1].fabric_fwd_anycast_gw_mac = '0002.0002.0002'\n\n self.assertIs(vxlan.testbed, testbed)\n dev1.add_feature(vxlan)\n cfgs = vxlan.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['feature nv overlay',\n 'feature vn-segment-vlan-based',\n 'nv overlay evpn',\n 'fabric forwarding anycast-gateway-mac 0002.0002.0002',\n ]))\n\n un_cfgs = vxlan.build_unconfig(apply=False)\n self.assertCountEqual(un_cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(un_cfgs[dev1.name]), '\\n'.join(\n ['no feature nv overlay',\n 'no feature vn-segment-vlan-based',\n 'no nv overlay evpn',\n ]))\n\n def test_vxlan_fabric_forwarding_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='nxos')\n\n vxlan = Vxlan()\n vxlan.device_attr[dev1].fabric_fwd_anycast_gw_mac = '0002.0002.0002'\n\n self.assertIs(vxlan.testbed, testbed)\n dev1.add_feature(vxlan)\n cfgs = vxlan.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['fabric forwarding anycast-gateway-mac 0002.0002.0002',\n ]))\n\n un_cfgs = vxlan.build_unconfig(apply=False)\n self.assertCountEqual(un_cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(un_cfgs[dev1.name]), '\\n'.join(\n ['no fabric forwarding anycast-gateway-mac 0002.0002.0002',\n ]))\n\n def test_vxlan_evpn_msite(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='nxos')\n\n vxlan = Vxlan()\n vxlan.device_attr[dev1].evpn_msite_attr[11111].evpn_msite_bgw_delay_restore_time = 30\n self.assertIs(vxlan.testbed, testbed)\n dev1.add_feature(vxlan)\n\n cfgs = vxlan.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['evpn multisite border-gateway 11111',\n ' delay-restore time 30',\n ' exit',\n ]))\n\n uncfgs = vxlan.build_unconfig(apply=False)\n self.assertCountEqual(uncfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(uncfgs[dev1.name]), '\\n'.join(\n ['no evpn multisite border-gateway 11111'\n ]))\n # uncfg with attributes\n uncfgs_1 = vxlan.build_unconfig(apply=False,\n attributes={'device_attr': {\n dev1 : {\n 'evpn_msite_attr': {\n '*': {\n 'evpn_msite_bgw_delay_restore_time': None\n }}}}})\n self.assertMultiLineEqual(str(uncfgs_1[dev1.name]), '\\n'.join([\n 'evpn multisite border-gateway 11111',\n ' no delay-restore time 30',\n ' exit',\n ]))\n\n def test_vxlan_evpn(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='nxos')\n vxlan = Vxlan()\n vxlan.device_attr[dev1].evpn_attr[None].vni_attr[11].evpn_vni_rd = 'auto'\n self.assertIs(vxlan.testbed, testbed)\n dev1.add_feature(vxlan)\n\n cfgs = vxlan.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['evpn',\n ' vni 11 l2',\n ' rd auto',\n ' exit',\n ' exit',\n ]))\n\n un_cfgs = vxlan.build_unconfig(apply=False)\n self.assertCountEqual(un_cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(un_cfgs[dev1.name]), '\\n'.join(\n ['no evpn',\n ]))\n\n un_cfgs_partial = vxlan.build_unconfig(apply=False,attributes={'device_attr':{\n dev1.name :{\n 'evpn_attr':{\n '*':{\n 'vni_attr':{\n '*':{\n 'evpn_vni_rd':None\n }}\n }}}}})\n self.assertCountEqual(un_cfgs_partial.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(un_cfgs_partial[dev1.name]), '\\n'.join(\n ['evpn',\n ' vni 11 l2',\n ' no rd auto',\n ' exit',\n ' exit',\n ]))\n un_cfgs_partial_2 = vxlan.build_unconfig(apply=False, attributes={'device_attr': {\n dev1.name: {\n 'evpn_attr': {\n '*': {\n 'vni_attr':{\n '*':None\n }\n }\n }}}})\n self.assertCountEqual(un_cfgs_partial_2.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(un_cfgs_partial_2[dev1.name]), '\\n'.join(\n ['evpn',\n ' no vni 11 l2',\n ' exit',\n ]))\n\n def test_vxlan_evpn_vni_route_type(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='nxos')\n\n vxlan = Vxlan()\n vxlan.device_attr[dev1].evpn_attr[None].vni_attr[11].route_target_attr['auto'].evpn_vni_rt_type = 'both'\n self.assertIs(vxlan.testbed, testbed)\n dev1.add_feature(vxlan)\n\n cfgs = vxlan.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['evpn',\n ' vni 11 l2',\n ' route-target both auto',\n ' exit',\n ' exit',\n ]))\n\n un_cfgs_partial = vxlan.build_unconfig(apply=False, attributes={'device_attr': {\n dev1.name: {\n 'evpn_attr': {\n '*': {\n 'vni_attr': {\n '*': {\n 'route_target_attr':{\n \"*\":None\n }}}}}}}})\n self.assertCountEqual(un_cfgs_partial.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(un_cfgs_partial[dev1.name]), '\\n'.join(\n ['evpn',\n ' vni 11 l2',\n ' no route-target both auto',\n ' exit',\n ' exit',\n ]))\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "7011477", "language": "Python", "matching_score": 4.292030334472656, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/vxlan/nxos/tests/test_vxlan.py" }, { "content": "#!/usr/bin/env python\n\n#python\nimport unittest\n# Genie package\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\n# Genie XBu_shared\nfrom genie.libs.conf.nd.nd import Nd\n\n\nclass test_nd(TestCase):\n\n def test_nd_with_interface_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='nxos')\n nd = Nd()\n nd.interface = 'Ethernet0/1'\n\n nd.device_attr[dev1].interface_attr[nd.interface].if_ra_interval = 201\n nd.device_attr[dev1].interface_attr[nd.interface].if_ra_lifetime = 1802\n nd.device_attr[dev1].interface_attr[nd.interface].if_ra_suppress = True\n\n self.assertIs(nd.testbed, testbed)\n dev1.add_feature(nd)\n cfgs = nd.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['interface Ethernet0/1',\n ' ipv6 nd ra-interval 201',\n ' ipv6 nd ra-lifetime 1802',\n ' ipv6 nd suppress-ra',\n ' exit',\n ]))\n uncfg = nd.build_unconfig(apply=False, attributes={\n 'device_attr':\n { dev1.name:\n {'interface_attr': {\n '*': {\n 'if_ra_interval': None,\n 'if_ra_lifetime': None,\n 'if_ra_suppress': False,\n }}}}})\n\n self.assertCountEqual(uncfg.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(uncfg[dev1.name]), '\\n'.join(\n ['interface Ethernet0/1',\n ' no ipv6 nd ra-interval 201',\n ' no ipv6 nd ra-lifetime 1802',\n ' no ipv6 nd suppress-ra',\n ' exit',\n ]))\n\n uncfg_intf = nd.build_unconfig(apply=False)\n self.maxDiff = None\n self.assertEqual(str(uncfg_intf[dev1.name]), '\\n'.join(\n ['no interface Ethernet0/1',\n ]))\n\n def test_nd_neighbor_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='nxos')\n\n nd = Nd()\n nd.interface = 'Ethernet1/1'\n nd.ip = 'fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b'\n\n nd.device_attr[dev1].interface_attr[nd.interface].neighbor_attr[nd.ip].link_layer_address = 'aabb.beef.cccc'\n self.assertIs(nd.testbed, testbed)\n dev1.add_feature(nd)\n\n cfgs = nd.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['interface Ethernet1/1',\n ' ipv6 neighbor fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b aabb.beef.cccc',\n ' exit'\n ]))\n uncfg = nd.build_unconfig(apply=False, attributes={\n 'device_attr':\n {dev1.name:\n {'interface_attr': {\n '*': { 'neighbor_attr':{\n '*':None}}}}}})\n self.assertCountEqual(uncfg.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(uncfg[dev1.name]), '\\n'.join(\n ['interface Ethernet1/1',\n ' no ipv6 neighbor fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b aabb.beef.cccc',\n ' exit',\n ]))\nif __name__ == '__main__':\n unittest.main()\n", "id": "1421185", "language": "Python", "matching_score": 2.3907923698425293, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/nd/nxos/tests/test_nd.py" }, { "content": "#!/usr/bin/env python\n\n# import python\nimport unittest\n\n# import genie\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Interface\n\n# import genie.libs\nfrom genie.libs.conf.arp import Arp\n\nclass test_arp(TestCase):\n def test_arp_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n\n arp = Arp()\n arp.device_attr[dev1].max_entries = 100\n\n self.assertIs(arp.testbed, testbed)\n dev1.add_feature(arp)\n\n cfgs = arp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['arp entries interface-limit 100',\n ]))\n\n un_cfgs = arp.build_unconfig(apply=False)\n self.assertCountEqual(un_cfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(un_cfgs[dev1.name]), '\\n'.join(\n ['no arp entries interface-limit 100']))\n\n\n def test_arp_sub_interface_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n arp = Arp()\n intf= 'GigabitEthernet2'\n arp.device_attr[dev1].interface_attr[intf].if_proxy_enable = True\n arp.device_attr[dev1].interface_attr[intf].if_local_proxy_enable = True\n arp.device_attr[dev1].interface_attr[intf].if_expire_time = 10\n\n self.assertIs(arp.testbed, testbed)\n dev1.add_feature(arp)\n\n cfg_all = arp.build_config(apply=False)\n self.assertCountEqual(cfg_all.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfg_all[dev1.name]), '\\n'.join(\n ['interface GigabitEthernet2',\n ' ip proxy-arp',\n ' ip local-proxy-arp',\n ' arp timeout 10',\n ' exit'\n ]))\n\n un_cfg_all = arp.build_unconfig(apply=False)\n self.assertCountEqual(un_cfg_all.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(un_cfg_all[dev1.name]), '\\n'.join(\n ['interface GigabitEthernet2',\n ' no ip proxy-arp',\n ' no ip local-proxy-arp',\n ' no arp timeout 10',\n ' exit'\n ]))\n\n def test_static_arp_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n arp = Arp()\n intf = 'GigabitEthernet2'\n arp.device_attr[dev1].interface_attr[intf].static_arp_attr['10.10.10.10'].if_static_mac_address ='aaaa.bbbb.cccc'\n arp.device_attr[dev1].interface_attr[intf].static_arp_attr['10.10.10.10'].if_static_encap_type ='arpa'\n\n self.assertIs(arp.testbed, testbed)\n dev1.add_feature(arp)\n\n cfg_all = arp.build_config(apply=False)\n self.assertCountEqual(cfg_all.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfg_all[dev1.name]), '\\n'.join(\n ['arp 10.10.10.10 aaaa.bbbb.cccc arpa']))\n\n un_cfg_all = arp.build_unconfig(apply=False)\n self.assertCountEqual(un_cfg_all.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(un_cfg_all[dev1.name]), '\\n'.join(\n ['no arp 10.10.10.10 aaaa.bbbb.cccc arpa']))\n\n\n def test_static_arp_with_vrf_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n intf = 'GigabitEthernet2'\n\n arp2 = Arp()\n arp2.device_attr[dev1].interface_attr[intf].static_arp_attr[\n '10.10.10.10'].if_static_mac_address = 'aaaa.bbbb.cccc'\n arp2.device_attr[dev1].interface_attr[intf].static_arp_attr['10.10.10.10'].if_static_encap_type = 'arpa'\n arp2.device_attr[dev1].interface_attr[intf].static_arp_attr['10.10.10.10'].if_static_vrf = 'VRF1'\n arp2.device_attr[dev1].interface_attr[intf].static_arp_attr['10.10.10.10'].if_static_alias = True\n self.assertIs(arp2.testbed, testbed)\n dev1.add_feature(arp2)\n\n cfg = arp2.build_config(apply=False)\n self.assertCountEqual(cfg.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfg[dev1.name]), '\\n'.join(\n ['arp vrf VRF1 10.10.10.10 aaaa.bbbb.cccc arpa alias']))\n\n un_cfg = arp2.build_unconfig(apply=False)\n self.assertCountEqual(un_cfg.keys(), [dev1.name])\n self.maxDiff = None\n self.assertEqual(str(un_cfg[dev1.name]), '\\n'.join(\n ['no arp vrf VRF1 10.10.10.10 aaaa.bbbb.cccc arpa alias']))\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "6806403", "language": "Python", "matching_score": 1.8090940713882446, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/arp/iosxe/tests/test_arp.py" }, { "content": "#!/usr/bin/env python\n\nimport unittest\nfrom unittest.mock import Mock\n\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\nfrom genie.libs.conf.isis import Isis\n\n\nclass test_isis(unittest.TestCase):\n\n def setUp(self):\n\n testbed = Genie.testbed = Testbed()\n self.dev1 = Device(testbed=testbed, name='PE1', os='iosxr')\n self.dev2 = Device(testbed=testbed, name='PE2', os='iosxr')\n self.intf1 = Interface(name='GigabitEthernet0/0/0/1',device=self.dev1)\n self.intf2 = Interface(name='GigabitEthernet0/0/0/2',device=self.dev2)\n\n self.link = Link(name='1_2_1',testbed=testbed)\n self.link.connect_interface(interface=self.intf1)\n self.link.connect_interface(interface=self.intf2)\n\n def test_init(self):\n\n isis = Isis(\"core\")\n self.assertCountEqual(isis.devices, [])\n self.assertCountEqual(isis.interfaces, [])\n self.assertCountEqual(isis.links, [])\n\n self.link.add_feature(isis)\n for intf_iter in self.link.interfaces:\n intf_iter.add_feature(isis)\n self.assertCountEqual(isis._devices_with_feature(), [self.dev1, self.dev2])\n self.assertCountEqual(isis._interfaces_with_feature(), [self.intf1, self.intf2])\n self.assertCountEqual(isis._links_with_feature(), [self.link])\n self.assertCountEqual(isis.devices, [self.dev1, self.dev2])\n self.assertCountEqual(isis.interfaces, [self.intf1, self.intf2])\n self.assertCountEqual(isis.links, [self.link])\n\n def test_IsisGlobal(self):\n\n isis = Isis(\"core\")\n self.link.add_feature(isis)\n\n self.assertCountEqual(isis.devices, [self.dev1, self.dev2])\n self.assertCountEqual(isis.interfaces, [self.intf1, self.intf2])\n self.assertCountEqual(isis.links, [self.link])\n\n # Global ISIS config\n isis.nsr = True\n isis.nsf_lifetime = 5\n\n # override default for both devices\n isis.is_type = Isis.IsType.level_1\n isis.device_attr['PE1'].is_type = Isis.IsType.level_1_2\n isis.device_attr['PE2'].is_type = Isis.IsType.level_2\n\n # use no default\n isis.device_attr['PE1'].nsf = Isis.Nsf.ietf\n isis.device_attr['PE2'].nsf = Isis.Nsf.cisco\n\n # override default for one device\n isis.lsp_mtu = 1400\n isis.device_attr['PE1'].lsp_mtu = 1500\n val = 1\n isis.device_attr['PE1'].net_id = \"00.0000.0000.000{}.00\".format(val)\n val += 1\n isis.device_attr['PE2'].net_id = \"00.0000.0000.000{}.00\".format(val)\n\n cfg1 = isis.build_config(apply=False)\n #print(\"\\nPE1 config\\n\" + str(cfg1['PE1']))\n #print(\"\\nPE2 config\\n\" + str(cfg1['PE2']))\n\n self.assertCountEqual(cfg1.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(str(cfg1['PE1']), '\\n'.join([\n 'router isis core',\n ' is-type level-1-2',\n ' lsp-mtu 1500',\n ' net 00.0000.0000.0001.00',\n ' nsf ietf',\n ' nsf lifetime 5',\n ' nsr',\n ' address-family ipv4 unicast',\n ' exit',\n ' interface GigabitEthernet0/0/0/1',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ' exit',\n ]))\n self.assertMultiLineEqual(str(cfg1['PE2']), '\\n'.join([\n 'router isis core',\n ' is-type level-2-only',\n ' lsp-mtu 1400',\n ' net 00.0000.0000.0002.00',\n ' nsf cisco',\n ' nsf lifetime 5',\n ' nsr',\n ' address-family ipv4 unicast',\n ' exit',\n ' interface GigabitEthernet0/0/0/2',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n def test_IsisPerAddrFamily(self):\n\n isis = Isis(\"core\")\n self.link.add_feature(isis)\n\n # per address family ISIS\n isis.device_attr['PE1'].address_family_attr['ipv4 unicast'].metric_style = \"wide\"\n isis.device_attr['PE1'].address_family_attr['ipv4 unicast'].mpls_te_level = Isis.IsType.level_1\n isis.device_attr['PE1'].address_family_attr['ipv4 unicast'].mpls_te_rtrid = self.intf1\n isis.device_attr['PE1'].address_family_attr['ipv4 unicast'].redistribute_connected = True \n\n isis.device_attr['PE2'].address_family_attr['ipv4 unicast'].metric_style = \"narrow\"\n isis.device_attr['PE2'].address_family_attr['ipv4 unicast'].mpls_te_level = Isis.IsType.level_2\n isis.device_attr['PE2'].address_family_attr['ipv4 unicast'].mpls_te_rtrid = self.intf2\n\n cfg1 = isis.build_config(apply=False)\n #print(\"\\nPE1 config\\n\" + str(cfg1['PE1']))\n #print(\"\\nPE2 config\\n\" + str(cfg1['PE2']))\n self.assertMultiLineEqual(str(cfg1['PE1']), '\\n'.join([\n 'router isis core',\n ' net 47.6B8D.854F.FFFF.4F2D.4CD8.00',\n ' address-family ipv4 unicast',\n ' metric-style wide',\n ' mpls traffic-eng level-1',\n ' mpls traffic-eng router-id GigabitEthernet0/0/0/1',\n ' redistribute connected',\n ' exit',\n ' interface GigabitEthernet0/0/0/1',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ' exit',\n ]))\n self.assertMultiLineEqual(str(cfg1['PE2']), '\\n'.join([\n 'router isis core',\n ' net 47.6B8D.854F.FFFF.D624.1D62.00',\n ' address-family ipv4 unicast',\n ' metric-style narrow',\n ' mpls traffic-eng level-2-only',\n ' mpls traffic-eng router-id GigabitEthernet0/0/0/2',\n ' exit',\n ' interface GigabitEthernet0/0/0/2',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n def test_IsisPerInterface(self):\n\n isis = Isis(\"core\")\n self.link.add_feature(isis)\n for intf_iter in self.link.interfaces:\n intf_iter.add_feature(isis)\n\n # per interface ISIS\n isis.device_attr['PE1'].interface_attr[self.intf1].passive = True\n isis.device_attr['PE1'].interface_attr[self.intf1].shutdown = True\n\n isis.device_attr['PE2'].interface_attr[self.intf2].point_to_point = True\n\n cfg1 = isis.build_config(apply=False)\n #print(\"\\nPE1 config\\n\" + str(cfg1['PE1']))\n #print(\"\\nPE2 config\\n\" + str(cfg1['PE2']))\n self.assertMultiLineEqual(str(cfg1['PE1']), '\\n'.join([\n 'router isis core',\n ' net 47.6B8D.854F.FFFF.4F2D.4CD8.00',\n ' address-family ipv4 unicast',\n ' exit',\n ' interface GigabitEthernet0/0/0/1',\n ' passive',\n ' shutdown',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ' exit',\n ]))\n self.assertMultiLineEqual(str(cfg1['PE2']), '\\n'.join([\n 'router isis core',\n ' net 47.6B8D.854F.FFFF.D624.1D62.00',\n ' address-family ipv4 unicast',\n ' exit',\n ' interface GigabitEthernet0/0/0/2',\n ' point-to-point',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n def test_IsisPerInterfacePerAddressFamily(self):\n\n # No attributes defined yet\n if 0:\n # per interface per address family ISIS\n isis.device_attr['PE1'].interface_attr[self.intf1].address_family_attr['ipv4 unicast'].attr = \"value\"\n\n isis.device_attr['PE1'].interface_attr[self.intf1].address_family_attr['ipv4 unicast'].attr = \"value\"\n\n cfg1 = isis.build_config(apply=False)\n self.assertMultiLineEqual(str(cfg1['PE1']), '\\n'.join([\n 'router isis 100',\n ]))\n self.assertMultiLineEqual(str(cfg1['PE2']), '\\n'.join([\n 'router isis 100',\n ]))\n \n def test_NET(self):\n\n isis = Isis(\"core\")\n\n self.assertEqual(isis.system_id, None)\n self.assertEqual(isis.device_attr[self.dev1].system_id, 'FFFF.4F2D.4CD8') # \"PE1\"\n self.assertEqual(isis.device_attr[self.dev2].system_id, 'FFFF.D624.1D62') # \"PE2\"\n isis.system_id = '0000.0000.0001'\n self.assertEqual(isis.device_attr[self.dev1].system_id, '0000.0000.0001')\n self.assertEqual(isis.device_attr[self.dev2].system_id, '0000.0000.0001')\n del isis.system_id\n self.assertEqual(isis.system_id, None)\n self.assertEqual(isis.device_attr[self.dev1].system_id, 'FFFF.4F2D.4CD8') # \"PE1\"\n self.assertEqual(isis.device_attr[self.dev2].system_id, 'FFFF.D624.1D62') # \"PE2\"\n\n self.assertEqual(isis.area_address, '47.6B8D.854F') # \"core\"\n self.assertEqual(isis.device_attr[self.dev1].area_address, '47.6B8D.854F') # \"core\"\n self.assertEqual(isis.device_attr[self.dev2].area_address, '47.6B8D.854F') # \"core\"\n isis.area_address = '47.0000.0000'\n self.assertEqual(isis.area_address, '47.0000.0000')\n self.assertEqual(isis.device_attr[self.dev1].area_address, '47.0000.0000')\n self.assertEqual(isis.device_attr[self.dev2].area_address, '47.0000.0000')\n isis.area_address = None\n self.assertEqual(isis.area_address, None)\n self.assertEqual(isis.device_attr[self.dev1].area_address, '47.4F2D.4CD8') # \"PE1\"\n self.assertEqual(isis.device_attr[self.dev2].area_address, '47.D624.1D62') # \"PE2\"\n del isis.area_address\n self.assertEqual(isis.area_address, '47.6B8D.854F') # \"core\"\n self.assertEqual(isis.device_attr[self.dev1].area_address, '47.6B8D.854F') # \"core\"\n self.assertEqual(isis.device_attr[self.dev2].area_address, '47.6B8D.854F') # \"core\"\n\n self.assertEqual(isis.device_attr[self.dev1].net_id, '47.6B8D.854F.FFFF.4F2D.4CD8.00') # \"core\".\"PE1\".00\n self.assertEqual(isis.device_attr[self.dev2].net_id, '47.6B8D.854F.FFFF.D624.1D62.00') # \"core\".\"PE2\".00\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "12625548", "language": "Python", "matching_score": 5.203156471252441, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/isis/tests/test_isis.py" }, { "content": "#!/usr/bin/env python\n\nimport unittest\nfrom unittest.mock import Mock\n\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\nfrom genie.libs.conf.isis import Isis\nfrom genie.libs.conf.address_family import AddressFamily, AddressFamilySubAttributes\n\n\n\nclass test_isis(unittest.TestCase):\n\n def setUp(self):\n pass\n\n testbed = Genie.testbed = Testbed()\n self.dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n self.dev2 = Device(testbed=testbed, name='PE2', os='iosxe')\n self.intf1 = Interface(name='GigabitEthernet0/0/1',device=self.dev1)\n self.intf2 = Interface(name='GigabitEthernet0/0/2',device=self.dev2)\n\n self.link = Link(name='1_2_1',testbed=testbed)\n self.link.connect_interface(interface=self.intf1)\n self.link.connect_interface(interface=self.intf2)\n self.assertSetEqual(\n set(self.link.find_interfaces()),\n set([self.intf1, self.intf2]))\n self.assertSetEqual(\n set(self.dev1.find_interfaces()),\n set([self.intf1]))\n self.assertSetEqual(\n set(self.dev2.find_interfaces()),\n set([self.intf2]))\n\n isis = Isis(\"core\")\n self.assertSetEqual(set(isis.devices), set([]))\n self.assertSetEqual(set(isis.links), set([]))\n\n self.link.add_feature(isis)\n for intf_iter in self.link.interfaces:\n intf_iter.add_feature(isis)\n self.assertCountEqual(isis.devices, [self.dev1, self.dev2])\n self.assertSetEqual(set(isis.links), set([self.link]))\n self.assertSetEqual(set(isis.interfaces), set([self.intf1, self.intf2]))\n\n \n def test_IsisGlobal(self):\n isis = Isis(\"core\")\n self.link.add_feature(isis)\n\n # Global ISIS config\n isis.nsr = True\n\n # override default for both devices\n isis.is_type = Isis.IsType.level_1\n isis.device_attr['PE1'].is_type = Isis.IsType.level_1_2\n isis.device_attr['PE2'].is_type = Isis.IsType.level_2\n\n # use no default\n isis.device_attr['PE1'].nsf = Isis.Nsf.ietf\n isis.device_attr['PE2'].nsf = Isis.Nsf.cisco\n\n # override default for one device\n val = 1\n isis.device_attr['PE1'].net_id = \"00.0000.0000.000{}.00\".format(val)\n val += 1\n isis.device_attr['PE2'].net_id = \"00.0000.0000.000{}.00\".format(val)\n\n cfg1 = isis.build_config(apply=False)\n\n self.assertCountEqual(cfg1.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(str(cfg1['PE1']), '\\n'.join([\n 'router isis core',\n ' is-type level-1-2',\n ' nsf ietf',\n ' nsr',\n ' net 00.0000.0000.0001.00',\n ' exit',\n 'interface GigabitEthernet0/0/1',\n ' ip router isis core',\n ' exit',\n ]))\n self.assertMultiLineEqual(str(cfg1['PE2']), '\\n'.join([\n 'router isis core',\n ' is-type level-2-only',\n ' nsf cisco',\n ' nsr',\n ' net 00.0000.0000.0002.00',\n ' exit',\n 'interface GigabitEthernet0/0/2',\n ' ip router isis core',\n ' exit',\n ]))\n\n\n uncfg1 = isis.build_unconfig(apply=False)\n\n self.assertCountEqual(uncfg1.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(str(uncfg1['PE1']), '\\n'.join([\n 'no router isis core',\n 'interface GigabitEthernet0/0/1',\n ' no ip router isis core',\n ' exit',\n ]))\n self.assertMultiLineEqual(str(uncfg1['PE2']), '\\n'.join([\n 'no router isis core',\n 'interface GigabitEthernet0/0/2',\n ' no ip router isis core',\n ' exit',\n ]))\n\n partial_cfg1 = isis.build_config(apply=False,attributes='device_attr__PE1__nsf')\n self.assertCountEqual(partial_cfg1.keys(), ['PE1'])\n self.assertMultiLineEqual(str(partial_cfg1['PE1']), '\\n'.join([\n 'router isis core',\n ' nsf ietf',\n ' exit',\n ]))\n\n\n def test_IsisPerAddrFamily(self):\n\n isis = Isis(\"core\")\n self.link.add_feature(isis)\n\n val = 1\n isis.device_attr['PE1'].net_id = \"00.0000.0000.000{}.00\".format(val)\n val += 1\n isis.device_attr['PE2'].net_id = \"00.0000.0000.000{}.00\".format(val)\n\n # per address family ISIS\n isis.device_attr['PE1'].address_family_attr['ipv4 unicast'].metric_style = \"wide\"\n isis.device_attr['PE1'].address_family_attr['ipv4 unicast'].mpls_te_level = Isis.IsType.level_1\n isis.device_attr['PE1'].address_family_attr['ipv4 unicast'].mpls_te_rtrid = self.intf1\n\n isis.device_attr['PE2'].address_family_attr['ipv4 unicast'].metric_style = \"narrow\"\n isis.device_attr['PE2'].address_family_attr['ipv4 unicast'].mpls_te_level = Isis.IsType.level_2\n isis.device_attr['PE2'].address_family_attr['ipv4 unicast'].mpls_te_rtrid = self.intf2\n\n cfg1 = isis.build_config(apply=False)\n\n self.assertMultiLineEqual(str(cfg1['PE1']), '\\n'.join([\n 'router isis core',\n ' net 00.0000.0000.0001.00',\n ' metric-style wide',\n ' mpls traffic-eng level-1',\n ' mpls traffic-eng router-id GigabitEthernet0/0/1',\n ' exit',\n 'interface GigabitEthernet0/0/1',\n ' ip router isis core',\n ' exit',\n ]))\n self.assertMultiLineEqual(str(cfg1['PE2']), '\\n'.join([\n 'router isis core',\n ' net 00.0000.0000.0002.00',\n ' metric-style narrow',\n ' mpls traffic-eng level-2',\n ' mpls traffic-eng router-id GigabitEthernet0/0/2',\n ' exit',\n 'interface GigabitEthernet0/0/2',\n ' ip router isis core',\n ' exit',\n ]))\n\n isis.address_families = set([AddressFamily.ipv4_unicast, AddressFamily.ipv6_unicast])\n isis.device_attr['PE1'].address_family_attr['ipv6 unicast'].metric_style = \"wide\"\n isis.device_attr['PE2'].address_family_attr['ipv6 unicast'].metric_style = \"narrow\"\n\n cfg2 = isis.build_config(apply=False)\n\n self.assertMultiLineEqual(str(cfg2['PE1']), '\\n'.join([\n 'router isis core',\n ' net 00.0000.0000.0001.00',\n ' metric-style wide',\n ' mpls traffic-eng level-1',\n ' mpls traffic-eng router-id GigabitEthernet0/0/1',\n ' address-family ipv6 unicast',\n ' metric-style wide',\n ' exit',\n ' exit',\n 'interface GigabitEthernet0/0/1',\n ' ip router isis core',\n ' ipv6 router isis core',\n ' exit',\n ]))\n\n self.assertMultiLineEqual(str(cfg2['PE2']), '\\n'.join([\n 'router isis core',\n ' net 00.0000.0000.0002.00',\n ' metric-style narrow',\n ' mpls traffic-eng level-2',\n ' mpls traffic-eng router-id GigabitEthernet0/0/2',\n ' address-family ipv6 unicast',\n ' metric-style narrow',\n ' exit',\n ' exit',\n 'interface GigabitEthernet0/0/2',\n ' ip router isis core',\n ' ipv6 router isis core',\n ' exit',\n ]))\n\n uncfg2 = isis.build_unconfig(apply=False)\n self.assertMultiLineEqual(str(uncfg2['PE1']), '\\n'.join([\n 'no router isis core',\n 'interface GigabitEthernet0/0/1',\n ' no ip router isis core',\n ' no ipv6 router isis core',\n ' exit',\n ]))\n\n self.assertMultiLineEqual(str(uncfg2['PE2']), '\\n'.join([\n 'no router isis core',\n 'interface GigabitEthernet0/0/2',\n ' no ip router isis core',\n ' no ipv6 router isis core',\n ' exit',\n ]))\n\n partial_uncfg2 = isis.build_unconfig(apply=False,attributes='device_attr__*__address_family_attr__*')\n\n self.assertMultiLineEqual(str(partial_uncfg2['PE1']), '\\n'.join([\n 'router isis core',\n ' no metric-style wide',\n ' no mpls traffic-eng level-1',\n ' no mpls traffic-eng router-id GigabitEthernet0/0/1',\n ' no address-family ipv4 unicast',\n ' no address-family ipv6 unicast',\n ' exit',\n ]))\n\n self.assertMultiLineEqual(str(partial_uncfg2['PE2']), '\\n'.join([\n 'router isis core',\n ' no metric-style narrow',\n ' no mpls traffic-eng level-2',\n ' no mpls traffic-eng router-id GigabitEthernet0/0/2',\n ' no address-family ipv4 unicast',\n ' no address-family ipv6 unicast',\n ' exit',\n ]))\n\n partial_cfg21 = isis.build_config(apply=False,attributes='device_attr__PE1__address_family_attr__ipv4 unicast__metric_style')\n self.assertMultiLineEqual(str(partial_cfg21['PE1']), '\\n'.join([\n 'router isis core',\n ' metric-style wide',\n ' exit',\n ]))\n\n\n partial_uncfg21 = isis.build_unconfig(apply=False,attributes='device_attr__PE1__address_family_attr__ipv4 unicast__metric_style')\n self.assertMultiLineEqual(str(partial_uncfg21['PE1']), '\\n'.join([\n 'router isis core',\n ' no metric-style wide',\n ' exit',\n ]))\n\n partial_cfg22 = isis.build_config(apply=False,attributes='device_attr__PE1__address_family_attr__ipv6 unicast__metric_style')\n self.assertMultiLineEqual(str(partial_cfg22['PE1']), '\\n'.join([\n 'router isis core',\n ' address-family ipv6 unicast',\n ' metric-style wide',\n ' exit',\n ' exit',\n ]))\n\n partial_uncfg22 = isis.build_unconfig(apply=False,attributes='device_attr__PE1__address_family_attr__ipv6 unicast__metric_style')\n self.assertMultiLineEqual(str(partial_uncfg22['PE1']), '\\n'.join([\n 'router isis core',\n ' address-family ipv6 unicast',\n ' no metric-style wide',\n ' exit',\n ' exit',\n ]))\n\n def test_IsisPerInterface(self):\n\n isis = Isis(\"core\")\n self.link.add_feature(isis)\n for intf_iter in self.link.interfaces:\n intf_iter.add_feature(isis)\n\n val = 1\n isis.device_attr['PE1'].net_id = \"00.0000.0000.000{}.00\".format(val)\n val += 1\n isis.device_attr['PE2'].net_id = \"00.0000.0000.000{}.00\".format(val)\n\n # per interface ISIS\n isis.device_attr['PE1'].interface_attr[self.intf1].passive = True\n isis.device_attr['PE1'].interface_attr[self.intf1].metric = 20\n isis.device_attr['PE2'].interface_attr[self.intf2].point_to_point = True\n\n cfg1 = isis.build_config(apply=False)\n\n self.assertMultiLineEqual(str(cfg1['PE1']), '\\n'.join([\n 'router isis core',\n ' net 00.0000.0000.0001.00',\n ' passive-interface GigabitEthernet0/0/1',\n ' exit',\n 'interface GigabitEthernet0/0/1',\n ' ip router isis core',\n ' isis metric 20',\n ' exit',\n ]))\n self.assertMultiLineEqual(str(cfg1['PE2']), '\\n'.join([\n 'router isis core',\n ' net 00.0000.0000.0002.00',\n ' exit',\n 'interface GigabitEthernet0/0/2',\n ' ip router isis core',\n ' isis network point-to-point',\n ' exit',\n ]))\n\n isis.address_families = set([AddressFamily.ipv4_unicast, AddressFamily.ipv6_unicast])\n\n cfg2 = isis.build_config(apply=False)\n\n self.assertMultiLineEqual(str(cfg2['PE1']), '\\n'.join([\n 'router isis core',\n ' net 00.0000.0000.0001.00',\n ' passive-interface GigabitEthernet0/0/1',\n ' exit',\n 'interface GigabitEthernet0/0/1',\n ' ip router isis core',\n ' ipv6 router isis core',\n ' isis metric 20',\n ' exit',\n ]))\n self.assertMultiLineEqual(str(cfg2['PE2']), '\\n'.join([\n 'router isis core',\n ' net 00.0000.0000.0002.00',\n ' exit',\n 'interface GigabitEthernet0/0/2',\n ' ip router isis core',\n ' ipv6 router isis core',\n ' isis network point-to-point',\n ' exit',\n ]))\n\n partial_cfg21 = isis.build_config(apply=False,attributes='device_attr__*__interface_attr__*__metric')\n self.assertMultiLineEqual(str(partial_cfg21['PE1']), '\\n'.join([\n 'interface GigabitEthernet0/0/1',\n ' isis metric 20',\n ' exit',\n ]))\n\n partial_cfg22 = isis.build_config(apply=False,attributes='device_attr__*__interface_attr__*__address_family_attr__ipv6 unicast')\n self.assertMultiLineEqual(str(partial_cfg22['PE1']), '\\n'.join([\n 'interface GigabitEthernet0/0/1',\n ' ipv6 router isis core',\n ' exit',\n ]))\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "12245599", "language": "Python", "matching_score": 1.9611517190933228, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/isis/iosxe/tests/test_isis.py" }, { "content": "#!/usr/bin/env python\n\nimport unittest,re\nfrom unittest.mock import Mock\n\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\nfrom genie.libs.conf.te import Te, Srlg\n\nif 0:\n print(\"\\n PE1 CONFIG\\n\" + str(out['PE1']))\n print(\"\\n PE2 CONFIG\\n\" + str(out['PE2']))\n\nclass test_te(unittest.TestCase):\n\n def setUp(self):\n self.tb = Genie.testbed = Testbed()\n self.dev1 = Device(testbed=self.tb, name='PE1', os='iosxr')\n self.dev2 = Device(testbed=self.tb, name='PE2', os='iosxr')\n self.i1 = Interface(name='GigabitEthernet0/0/0/1',device=self.dev1)\n self.i2 = Interface(name='GigabitEthernet0/0/0/2',device=self.dev2)\n self.i3 = Interface(name='GigabitEthernet0/0/0/3',device=self.dev1)\n self.i4 = Interface(name='GigabitEthernet0/0/0/4',device=self.dev2)\n self.i5 = Interface(name='GigabitEthernet0/0/0/5',device=self.dev1)\n self.i6 = Interface(name='GigabitEthernet0/0/0/6',device=self.dev2)\n self.i7 = Interface(name='GigabitEthernet0/0/0/7',device=self.dev1)\n self.i8 = Interface(name='GigabitEthernet0/0/0/8',device=self.dev2)\n self.link = Link(name='1_2_1',testbed=self.tb)\n self.link.connect_interface(interface=self.i1)\n self.link.connect_interface(interface=self.i2)\n self.link2 = Link(name='1_2_2',testbed=self.tb)\n self.link2.connect_interface(interface=self.i3)\n self.link2.connect_interface(interface=self.i4)\n self.link3 = Link(name='1_2_3',testbed=self.tb)\n self.link3.connect_interface(interface=self.i5)\n self.link3.connect_interface(interface=self.i6)\n self.link4 = Link(name='1_2_4',testbed=self.tb)\n self.link4.connect_interface(interface=self.i7)\n self.link4.connect_interface(interface=self.i8)\n self.assertSetEqual(\n set(self.link.find_interfaces()),\n set([self.i1, self.i2]))\n self.assertSetEqual(\n set(self.dev1.find_interfaces()),\n set([self.i1, self.i3, self.i5, self.i7]))\n self.assertSetEqual(\n set(self.dev2.find_interfaces()),\n set([self.i2, self.i4, self.i6, self.i8]))\n \n\n def test_MplsTe(self):\n\n te = Te()\n self.assertSetEqual(set(te.devices), set([]))\n self.assertSetEqual(set(te.links), set([]))\n\n if 0:\n print(\"before\")\n print(te.devices)\n print(te.links)\n print(te.interfaces)\n\n self.link.add_feature(te)\n self.link2.add_feature(te)\n self.link3.add_feature(te)\n self.link4.add_feature(te)\n self.assertCountEqual(te.devices, [self.dev1, self.dev2])\n self.assertSetEqual(set(te.links), set([self.link, self.link2, self.link3, self.link4]))\n self.assertSetEqual(set(te.interfaces), set([self.i1, self.i2, self.i3, self.i4, self.i5, self.i6, self.i7, self.i8]))\n\n if 0:\n print(\"after\")\n print(te.links)\n print(te.devices)\n print(te.interfaces)\n\n \n te.log_events_preemption = True\n te.log_events_frr_protection = True\n te.device_attr['PE1'].log_events_frr_protection_type = 'backup-tunnel'\n te.device_attr['PE2'].log_events_frr_protection_type = 'primary-lsp'\n te.device_attr['PE2'].log_events_frr_protection_primary_lsp_type = 'active-state'\n te.srlg_admin_weight = 20000\n te.backup_auto_tun_tunid_min = 210\n te.backup_auto_tun_tunid_max = 600\n te.auto_tun_backup_affinity_ignore = True\n te.affinity_map_val_dict['RED'] = \"0x1\"\n te.soft_preempt_timeout = 5\n te.reoptimize_delay_cleanup = 10\n te.reoptimize_delay_install = 1\n te.flooding_threshold_up = 1\n te.flooding_threshold_down = 1\n\n te.p2mp_auto_tun_tunid_min = 100\n te.p2mp_auto_tun_tunid_max = 200\n te.device_attr['PE1'].p2mp_auto_tun_tunid_min = 200\n te.device_attr['PE1'].p2mp_auto_tun_tunid_max = 300\n te.auto_tun_backup_affinity_ignore = True\n te.auto_tun_backup_timers_rem_unused = 100\n te.auto_tun_backup_attr_set = \"backup\"\n\n \n te.device_attr['PE1'].interface_attr[self.i1].auto_tun_backup_exclude_srlg = True\n te.device_attr['PE1'].interface_attr[self.i3].auto_tun_backup_exclude_srlg = True\n te.device_attr['PE1'].interface_attr[self.i3].auto_tun_backup_exclude_srlg_type = 'preferred'\n te.device_attr['PE1'].interface_attr[self.i5].auto_tun_backup_exclude_srlg = True\n te.device_attr['PE1'].interface_attr[self.i5].auto_tun_backup_exclude_srlg_type = 'weighted'\n te.device_attr['PE1'].affinity_map_val_dict = {}\n te.device_attr['PE1'].affinity_map_val_dict['RED'] = \"0x2\"\n\n te.device_attr['PE2'].affinity_map_bitpos_dict = {}\n te.device_attr['PE2'].affinity_map_bitpos_dict['BLUE'] = 94\n te.device_attr['PE2'].affinity_map_bitpos_dict['EDGE'] = 27\n te.device_attr['PE2'].affinity_map_bitpos_dict['PINK'] = 95\n te.device_attr['PE2'].affinity_map_bitpos_dict['GREEN'] = 91\n te.device_attr['PE2'].affinity_map_bitpos_dict['METRO'] = 29\n \n out = te.build_config(apply=False)\n self.maxDiff = None\n self.assertCountEqual(out.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(str(out['PE1']), '\\n'.join([\n 'mpls traffic-eng',\n ' auto-tunnel backup affinity ignore',\n ' auto-tunnel backup timers removal unused 100',\n ' auto-tunnel backup tunnel-id min 210 max 600',\n ' auto-tunnel p2mp tunnel-id min 200 max 300',\n ' logging events preemption',\n ' logging events frr-protection backup-tunnel',\n ' reoptimize timers delay cleanup 10',\n ' reoptimize timers delay installation 1',\n ' flooding threshold up 1 down 1',\n ' affinity-map RED 0x2',\n ' srlg admin-weight 20000',\n ' soft-preemption timeout 5',\n ' interface GigabitEthernet0/0/0/1',\n ' auto-tunnel backup attribute-set backup',\n ' auto-tunnel backup exclude srlg',\n ' exit',\n ' interface GigabitEthernet0/0/0/3',\n ' auto-tunnel backup attribute-set backup',\n ' auto-tunnel backup exclude srlg preferred',\n ' exit',\n ' interface GigabitEthernet0/0/0/5',\n ' auto-tunnel backup attribute-set backup',\n ' auto-tunnel backup exclude srlg weighted',\n ' exit',\n ' interface GigabitEthernet0/0/0/7',\n ' auto-tunnel backup attribute-set backup',\n ' exit',\n ' exit',\n ]))\n \n if 1:\n self.assertMultiLineEqual(str(out['PE2']), '\\n'.join([\n 'mpls traffic-eng',\n ' auto-tunnel backup affinity ignore',\n ' auto-tunnel backup timers removal unused 100',\n ' auto-tunnel backup tunnel-id min 210 max 600',\n ' auto-tunnel p2mp tunnel-id min 100 max 200',\n ' logging events preemption',\n ' logging events frr-protection primary-lsp active-state',\n ' reoptimize timers delay cleanup 10',\n ' reoptimize timers delay installation 1',\n ' flooding threshold up 1 down 1',\n ' affinity-map BLUE bit-position 94',\n ' affinity-map EDGE bit-position 27',\n ' affinity-map GREEN bit-position 91',\n ' affinity-map METRO bit-position 29',\n ' affinity-map PINK bit-position 95',\n ' affinity-map RED 0x1',\n ' srlg admin-weight 20000',\n ' soft-preemption timeout 5',\n ' interface GigabitEthernet0/0/0/2',\n ' auto-tunnel backup attribute-set backup',\n ' exit',\n ' interface GigabitEthernet0/0/0/4',\n ' auto-tunnel backup attribute-set backup',\n ' exit',\n ' interface GigabitEthernet0/0/0/6',\n ' auto-tunnel backup attribute-set backup',\n ' exit',\n ' interface GigabitEthernet0/0/0/8',\n ' auto-tunnel backup attribute-set backup',\n ' exit',\n ' exit',\n ]))\n\n def test_UnnumInterfaces(self):\n # Test unnum interface output\n te = Te()\n self.link.add_feature(te)\n\n te.ipv4_unnum_interfaces = {self.i1, self.i2, self.i3, self.i4}\n\n out = te.build_config(apply=False)\n self.assertRegex(str(out['PE1']), 'ipv4 unnumbered mpls traffic-eng GigabitEthernet0/0/0/1')\n self.assertRegex(str(out['PE1']), 'ipv4 unnumbered mpls traffic-eng GigabitEthernet0/0/0/3')\n \n self.assertRegex(str(out['PE2']), 'ipv4 unnumbered mpls traffic-eng GigabitEthernet0/0/0/2')\n self.assertRegex(str(out['PE2']), 'ipv4 unnumbered mpls traffic-eng GigabitEthernet0/0/0/4')\n \n \n def test_Srlg(self):\n srlg = Srlg()\n self.dev1.add_feature(srlg)\n self.dev2.add_feature(srlg)\n srlg.name_value_dict['R13'] = 10\n srlg.name_value_dict['R11'] = 20\n srlg.name_value_dict['R23'] = 30\n srlg.name_value_dict['R25'] = 40\n srlg.name_value_dict['R34'] = 50\n \n # if per-device dict is not initialized, base class dict will be over-written\n srlg.device_attr['PE1'].name_value_dict = {}\n srlg.device_attr['PE1'].name_value_dict['R13'] = 10\n srlg.device_attr['PE1'].name_value_dict['R11'] = 20\n srlg.device_attr['PE1'].name_value_dict['R23'] = 30\n srlg.device_attr['PE1'].name_value_dict['R25'] = 40\n srlg.device_attr['PE1'].name_value_dict['R34'] = 50\n srlg.device_attr['PE1'].name_value_dict['R35'] = 60\n srlg.device_attr['PE1'].name_value_dict['R45'] = 70\n \n srlg.device_attr['PE1'].interface_attr[self.i1].intf_name = 'R13'\n srlg.device_attr['PE1'].interface_attr[self.i3].intf_name = 'R11'\n srlg.device_attr['PE1'].interface_attr[self.i5].intf_name = 'R23'\n \n out = srlg.build_config(apply=False)\n self.assertCountEqual(out.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(str(out['PE1']), '\\n'.join([\n 'srlg',\n ' name R11 value 20',\n ' name R13 value 10',\n ' name R23 value 30',\n ' name R25 value 40',\n ' name R34 value 50',\n ' name R35 value 60',\n ' name R45 value 70',\n ' interface GigabitEthernet0/0/0/1',\n ' name R13',\n ' exit',\n ' interface GigabitEthernet0/0/0/3',\n ' name R11',\n ' exit',\n ' interface GigabitEthernet0/0/0/5',\n ' name R23',\n ' exit',\n ' exit',\n ]))\n self.assertMultiLineEqual(str(out['PE2']), '\\n'.join([\n 'srlg',\n ' name R11 value 20',\n ' name R13 value 10',\n ' name R23 value 30',\n ' name R25 value 40',\n ' name R34 value 50',\n ' exit',\n ]))\n \n\n\nif __name__ == '__main__':\n unittest.main() \n \n", "id": "10120216", "language": "Python", "matching_score": 4.386000633239746, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/te/tests/test_te.py" }, { "content": "#!/usr/bin/env python\n\nimport unittest,re\nfrom unittest.mock import Mock\n\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\nfrom genie.libs.conf.te import Te, Srlg\n\nclass test_te(unittest.TestCase):\n\n def setUp(self):\n self.tb = Genie.testbed = Testbed()\n self.dev1 = Device(testbed=self.tb, name='PE1', os='iosxe')\n self.dev2 = Device(testbed=self.tb, name='PE2', os='iosxe')\n self.i1 = Interface(name='GigabitEthernet0/0/1',device=self.dev1)\n self.i2 = Interface(name='GigabitEthernet0/0/2',device=self.dev2)\n self.i3 = Interface(name='GigabitEthernet0/0/3',device=self.dev1)\n self.i4 = Interface(name='GigabitEthernet0/0/4',device=self.dev2)\n self.i5 = Interface(name='GigabitEthernet0/0/5',device=self.dev1)\n self.i6 = Interface(name='GigabitEthernet0/0/6',device=self.dev2)\n self.i7 = Interface(name='GigabitEthernet0/0/7',device=self.dev1)\n self.i8 = Interface(name='GigabitEthernet0/0/8',device=self.dev2)\n self.link = Link(name='1_2_1',testbed=self.tb)\n self.link.connect_interface(interface=self.i1)\n self.link.connect_interface(interface=self.i2)\n self.link2 = Link(name='1_2_2',testbed=self.tb)\n self.link2.connect_interface(interface=self.i3)\n self.link2.connect_interface(interface=self.i4)\n self.link3 = Link(name='1_2_3',testbed=self.tb)\n self.link3.connect_interface(interface=self.i5)\n self.link3.connect_interface(interface=self.i6)\n self.link4 = Link(name='1_2_4',testbed=self.tb)\n self.link4.connect_interface(interface=self.i7)\n self.link4.connect_interface(interface=self.i8)\n self.assertSetEqual(\n set(self.link.find_interfaces()),\n set([self.i1, self.i2]))\n self.assertSetEqual(\n set(self.dev1.find_interfaces()),\n set([self.i1, self.i3, self.i5, self.i7]))\n self.assertSetEqual(\n set(self.dev2.find_interfaces()),\n set([self.i2, self.i4, self.i6, self.i8]))\n \n\n def test_MplsTe(self):\n\n te = Te()\n self.assertSetEqual(set(te.devices), set([]))\n self.assertSetEqual(set(te.links), set([]))\n\n\n self.link.add_feature(te)\n self.link2.add_feature(te)\n self.link3.add_feature(te)\n self.link4.add_feature(te)\n self.assertCountEqual(te.devices, [self.dev1, self.dev2])\n self.assertSetEqual(set(te.links), set([self.link, self.link2, self.link3, self.link4]))\n self.assertSetEqual(set(te.interfaces), set([self.i1, self.i2, self.i3, self.i4, self.i5, self.i6, self.i7, self.i8]))\n\n\n out = te.build_config(apply=False)\n\n self.assertMultiLineEqual(str(out['PE1']), '\\n'.join([\n 'mpls traffic-eng tunnels',\n 'interface GigabitEthernet0/0/1',\n ' mpls traffic-eng tunnels',\n ' exit',\n 'interface GigabitEthernet0/0/3',\n ' mpls traffic-eng tunnels',\n ' exit',\n 'interface GigabitEthernet0/0/5',\n ' mpls traffic-eng tunnels',\n ' exit',\n 'interface GigabitEthernet0/0/7',\n ' mpls traffic-eng tunnels',\n ' exit',\n ]))\n \n self.assertMultiLineEqual(str(out['PE2']), '\\n'.join([\n 'mpls traffic-eng tunnels',\n 'interface GigabitEthernet0/0/2',\n ' mpls traffic-eng tunnels',\n ' exit',\n 'interface GigabitEthernet0/0/4',\n ' mpls traffic-eng tunnels',\n ' exit',\n 'interface GigabitEthernet0/0/6',\n ' mpls traffic-eng tunnels',\n ' exit',\n 'interface GigabitEthernet0/0/8',\n ' mpls traffic-eng tunnels',\n ' exit',\n ]))\n\n\n out = te.build_unconfig(apply=False)\n\n\n self.assertMultiLineEqual(str(out['PE1']), '\\n'.join([\n 'default mpls traffic-eng tunnels',\n 'interface GigabitEthernet0/0/1',\n ' default mpls traffic-eng tunnels',\n ' exit',\n 'interface GigabitEthernet0/0/3',\n ' default mpls traffic-eng tunnels',\n ' exit',\n 'interface GigabitEthernet0/0/5',\n ' default mpls traffic-eng tunnels',\n ' exit',\n 'interface GigabitEthernet0/0/7',\n ' default mpls traffic-eng tunnels',\n ' exit',\n ]))\n \n self.assertMultiLineEqual(str(out['PE2']), '\\n'.join([\n 'default mpls traffic-eng tunnels',\n 'interface GigabitEthernet0/0/2',\n ' default mpls traffic-eng tunnels',\n ' exit',\n 'interface GigabitEthernet0/0/4',\n ' default mpls traffic-eng tunnels',\n ' exit',\n 'interface GigabitEthernet0/0/6',\n ' default mpls traffic-eng tunnels',\n ' exit',\n 'interface GigabitEthernet0/0/8',\n ' default mpls traffic-eng tunnels',\n ' exit',\n ]))\n\n\nif __name__ == '__main__':\n unittest.main() \n \n", "id": "3792424", "language": "Python", "matching_score": 0.7758832573890686, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/te/iosxe/tests/test_te.py" }, { "content": "#!/usr/bin/env python\n\nimport unittest\nfrom unittest.mock import Mock\n\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\nfrom genie.libs.conf.l2vpn import Xconnect\nfrom genie.libs.conf.bgp import RouteTarget\n\nclass test_xconnect(unittest.TestCase):\n\n def test_init(self):\n\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxr')\n intf1 = Interface(device=dev1, name='GigabitEthernet0/0/0/1')\n intf2 = Interface(device=dev1, name='GigabitEthernet0/0/0/2')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxr')\n intf3 = Interface(device=dev2, name='GigabitEthernet0/0/0/3')\n intf4 = Interface(device=dev2, name='GigabitEthernet0/0/0/4')\n link1 = Link(testbed=testbed, name='link1', interfaces=(intf1, intf3))\n link2 = Link(testbed=testbed, name='link2', interfaces=(intf2, intf4))\n\n with self.assertRaises(TypeError):\n xc1 = Xconnect()\n\n with self.assertRaises(TypeError):\n xc1 = Xconnect(group_name='bg1')\n\n xc1 = Xconnect(name='xc1', group_name='bg1')\n self.assertIs(xc1.xconnect_type, Xconnect.Type.p2p)\n self.assertEqual(xc1.name, 'xc1')\n self.assertEqual(xc1.group_name, 'bg1')\n\n xc1 = Xconnect(name='xc1')\n self.assertEqual(xc1.name, 'xc1')\n self.assertEqual(xc1.group_name, 'xc1g')\n\n self.assertCountEqual(xc1.devices, [])\n self.assertCountEqual(xc1.interfaces, [])\n self.assertCountEqual(xc1.segments, [])\n self.assertCountEqual(xc1.link.interfaces, [])\n\n dev1.add_feature(xc1)\n self.assertCountEqual(xc1.devices, [dev1])\n self.assertCountEqual(xc1.interfaces, [])\n self.assertCountEqual(xc1.segments, [])\n self.assertCountEqual(xc1.link.interfaces, [])\n\n cfgs = xc1.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'l2vpn',\n ' xconnect group xc1g',\n ' p2p xc1',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n #xc1.add_interface(intf1)\n intf1.l2transport.enabled = True\n #self.assertCountEqual(xc1.interfaces, [intf1])\n #self.assertCountEqual(xc1.devices, [dev1])\n #self.assertCountEqual(xc1.segments, [intf1])\n #self.assertCountEqual(xc1.link.interfaces, [intf3])\n #self.assertCountEqual(xc1.device_attr[dev1].interfaces, [intf1])\n #self.assertCountEqual(xc1.device_attr[dev2].interfaces, [])\n #self.assertCountEqual(xc1.device_attr[dev1].segments, [intf1])\n self.assertCountEqual(xc1.device_attr[dev2].segments, [])\n\n cfgs = xc1.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n if False:\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'l2vpn',\n ' xconnect group xc1g',\n ' p2p xc1',\n ' interface GigabitEthernet0/0/0/1',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n dev2.add_feature(xc1) \n xc1.xconnect_type = Xconnect.Type.mp2mp\n xc1.autodiscovery_bgp.enabled = True\n xc1.autodiscovery_bgp.signaling_protocol_bgp.enabled = True \n xc1.autodiscovery_bgp.export_route_targets = [RouteTarget.ImportExport('1.1.1.1:1')]\n xc1.autodiscovery_bgp.import_route_targets = [RouteTarget.ImportExport('1.1.1.1:1')]\n xc1.autodiscovery_bgp.rd = '1000:1'\n xc1.device_attr[dev1].vpn_id = 100\n xc1.device_attr[dev2].vpn_id = 101 \n\n ce_id1 = 1001\n xc1.device_attr[dev1].autodiscovery_bgp.signaling_protocol_bgp.add_ce_id(ce_id1)\n xc1.device_attr[dev1].autodiscovery_bgp.signaling_protocol_bgp.ce_attr[ce_id1].add_interface(intf1)\n ce_id2 = 1000\n xc1.device_attr[dev2].autodiscovery_bgp.signaling_protocol_bgp.add_ce_id(ce_id1)\n xc1.device_attr[dev2].autodiscovery_bgp.signaling_protocol_bgp.ce_attr[ce_id2].add_interface(intf2)\n\n\n xc1.device_attr[dev1].autodiscovery_bgp.signaling_protocol_bgp.ce_attr[ce_id1].interface_attr[intf1].remote_ce_id = ce_id2\n xc1.device_attr[dev2].autodiscovery_bgp.signaling_protocol_bgp.ce_attr[ce_id2].interface_attr[intf2].remote_ce_id = ce_id1\n cfgs = xc1.build_config(apply=False)\n # TODO print(cfgs)\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "12845903", "language": "Python", "matching_score": 6.1072001457214355, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/tests/test_xconnect.py" }, { "content": "#!/usr/bin/env python\n\nimport unittest\nfrom unittest.mock import Mock\n\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\nfrom genie.libs.conf.l2vpn import Xconnect\nfrom genie.libs.conf.bgp import RouteTarget\n\nclass test_xconnect(unittest.TestCase):\n\n def test_init(self):\n\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n intf1 = Interface(device=dev1, name='GigabitEthernet0/0/1')\n intf2 = Interface(device=dev1, name='GigabitEthernet0/0/2')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe')\n intf3 = Interface(device=dev2, name='GigabitEthernet0/0/3')\n intf4 = Interface(device=dev2, name='GigabitEthernet0/0/4')\n link1 = Link(testbed=testbed, name='link1', interfaces=(intf1, intf3))\n link2 = Link(testbed=testbed, name='link2', interfaces=(intf2, intf4))\n\n with self.assertRaises(TypeError):\n xc1 = Xconnect()\n\n xc1 = Xconnect(name='xc1')\n self.assertIs(xc1.xconnect_type, Xconnect.Type.p2p)\n self.assertEqual(xc1.name, 'xc1')\n\n self.assertCountEqual(xc1.devices, [])\n self.assertCountEqual(xc1.interfaces, [])\n self.assertCountEqual(xc1.segments, [])\n self.assertCountEqual(xc1.link.interfaces, [])\n\n dev1.add_feature(xc1)\n self.assertCountEqual(xc1.devices, [dev1])\n self.assertCountEqual(xc1.interfaces, [])\n self.assertCountEqual(xc1.segments, [])\n self.assertCountEqual(xc1.link.interfaces, [])\n\n cfgs = xc1.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'l2vpn xconnect context xc1',\n ' exit',\n ]))\n\n efp = Interface(device=dev1, name='GigabitEthernet0/0/1.20',service_instance=20)\n efp.encapsulation = 'dot1q'\n efp.rewrite_ingress = 'pop 1 symmetric'\n\n xc1.add_segment(efp)\n self.assertCountEqual(xc1.interfaces, [efp])\n self.assertCountEqual(xc1.devices, [dev1])\n self.assertCountEqual(xc1.segments, [efp])\n self.assertCountEqual(xc1.device_attr[dev1].interfaces, [efp])\n self.assertCountEqual(xc1.device_attr[dev2].interfaces, [])\n self.assertCountEqual(xc1.device_attr[dev1].segments, [efp])\n self.assertCountEqual(xc1.device_attr[dev2].segments, [])\n\n cfgs = xc1.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'l2vpn xconnect context xc1',\n ' member GigabitEthernet0/0/1 service-instance 20',\n ' exit',\n ]))\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "2083829", "language": "Python", "matching_score": 2.9820306301116943, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/iosxe/tests/test_xconnect.py" }, { "content": "#!/usr/bin/env python\n\nimport unittest\nimport unittest.mock\nfrom unittest.mock import Mock, MagicMock\n\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\nfrom genie.libs.conf.l2vpn import BridgeDomain, Xconnect, Vfi, Pseudowire\nfrom genie.libs.conf.l2vpn import PseudowireNeighbor, PseudowireIPNeighbor, PseudowireIPv4Neighbor\n\n\nclass test_pseudowire(TestCase):\n\n def test_init(self):\n\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n lo1 = Interface(device=dev1, name='Loopback0', ipv4='172.16.17.32/32')\n intf1 = Interface(device=dev1, name='GigabitEthernet0/0/1', ipv4='10.1.0.1/24')\n intf2 = Interface(device=dev1, name='GigabitEthernet0/0/2', ipv4='10.2.0.1/24')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe')\n lo2 = Interface(device=dev2, name='Loopback0', ipv4='172.16.17.32/32')\n intf3 = Interface(device=dev2, name='GigabitEthernet0/0/3', ipv4='10.1.0.2/24')\n intf4 = Interface(device=dev2, name='GigabitEthernet0/0/4', ipv4='10.2.0.2/24')\n link1 = Link(testbed=testbed, name='link1', interfaces=(intf1, intf3))\n link2 = Link(testbed=testbed, name='link2', interfaces=(intf2, intf4))\n dev3 = Device(testbed=testbed, name='PE3', os='iosxe')\n\n container = Mock()\n container.add_pseudowire = MagicMock()\n nbr1 = PseudowireNeighbor(container=container, device=dev1, ip='1.2.3.4')\n nbr2 = PseudowireNeighbor(container=container, device=dev2, ip='1.2.3.4')\n nbr3 = PseudowireNeighbor(container=container, device=dev3, ip='1.2.3.4')\n\n with self.assertRaises(TypeError):\n pw1 = Pseudowire()\n with self.assertRaises(TypeError):\n pw1 = Pseudowire(pw_id=1)\n with self.assertRaises(ValueError):\n pw1 = Pseudowire(neighbors=())\n with self.assertRaises(ValueError):\n pw1 = Pseudowire(neighbors=(nbr1,))\n with self.assertRaises(ValueError):\n pw1 = Pseudowire(neighbors=(nbr1, nbr1))\n with self.assertRaises(ValueError):\n pw1 = Pseudowire(neighbors=(nbr1, nbr2, nbr3))\n with self.assertRaises(ValueError):\n pw1 = Pseudowire(neighbors=(nbr1, intf2))\n\n pw1 = Pseudowire(neighbors=(nbr1, nbr2))\n self.assertCountEqual(pw1.neighbors, [nbr1, nbr2])\n self.assertEqual(nbr1.pw_id, None)\n self.assertEqual(nbr2.pw_id, None)\n container.add_pseudowire.assert_has_calls([\n unittest.mock.call(pw1),\n unittest.mock.call(pw1),\n ])\n container.add_pseudowire.reset_mock()\n\n pw1 = Pseudowire(neighbors=(nbr1, nbr2), pw_id=1)\n self.assertCountEqual(pw1.neighbors, [nbr1, nbr2])\n self.assertEqual(nbr1.pw_id, 1)\n self.assertEqual(nbr2.pw_id, 1)\n container.add_pseudowire.assert_has_calls([\n unittest.mock.call(pw1),\n unittest.mock.call(pw1),\n ])\n container.add_pseudowire.reset_mock()\n\n def test_init_vpls(self):\n\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n lo1 = Interface(device=dev1, name='Loopback0', ipv4='172.16.17.32/32')\n intf1 = Interface(device=dev1, name='GigabitEthernet0/0/1', ipv4='10.1.0.1/24')\n intf2 = Interface(device=dev1, name='GigabitEthernet0/0/2', ipv4='10.2.0.1/24')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe')\n lo2 = Interface(device=dev2, name='Loopback0', ipv4='172.16.17.32/32')\n intf3 = Interface(device=dev2, name='GigabitEthernet0/0/3', ipv4='10.1.0.2/24')\n intf4 = Interface(device=dev2, name='GigabitEthernet0/0/4', ipv4='10.2.0.2/24')\n link1 = Link(testbed=testbed, name='link1', interfaces=(intf1, intf3))\n link2 = Link(testbed=testbed, name='link2', interfaces=(intf2, intf4))\n dev3 = Device(testbed=testbed, name='PE3', os='iosxe')\n\n bd1 = BridgeDomain(name='200')\n dev1.add_feature(bd1)\n dev2.add_feature(bd1)\n\n self.assertCountEqual(bd1.pseudowires, [])\n nbr1 = bd1.create_pseudowire_neighbor(device=dev1, ip=lo2.ipv4.ip)\n nbr2 = bd1.device_attr[dev2].create_pseudowire_neighbor(ip=lo1.ipv4.ip)\n pw1 = Pseudowire(neighbors=(nbr1, nbr2), pw_id=1)\n self.assertCountEqual(bd1.pseudowires, [pw1])\n self.assertCountEqual(bd1.pseudowire_neighbors, [nbr1, nbr2])\n self.assertCountEqual(bd1.device_attr[dev1].pseudowires, [pw1])\n self.assertCountEqual(bd1.device_attr[dev1].pseudowire_neighbors, [nbr1])\n self.assertCountEqual(bd1.device_attr[dev2].pseudowires, [pw1])\n self.assertCountEqual(bd1.device_attr[dev2].pseudowire_neighbors, [nbr2])\n self.assertCountEqual(bd1.device_attr[dev3].pseudowires, [])\n self.assertCountEqual(bd1.device_attr[dev3].pseudowire_neighbors, [])\n self.assertCountEqual(bd1.segments, [pw1])\n self.assertCountEqual(bd1.device_attr[dev1].segments, [pw1])\n self.assertCountEqual(bd1.device_attr[dev2].segments, [pw1])\n self.assertCountEqual(bd1.device_attr[dev3].segments, [])\n\n cfgs = bd1.build_config(apply=False)\n self.assertMultiLineDictEqual(\n cfgs,\n {\n dev1.name: '\\n'.join([\n 'bridge-domain 200',\n ' exit',\n ]),\n dev2.name: '\\n'.join([\n 'bridge-domain 200',\n ' exit',\n ]),\n })\n\n def test_init_vpls_vfi(self):\n\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n lo1 = Interface(device=dev1, name='Loopback0', ipv4='172.16.17.32/32')\n intf1 = Interface(device=dev1, name='GigabitEthernet0/0/1', ipv4='10.1.0.1/24')\n intf2 = Interface(device=dev1, name='GigabitEthernet0/0/2', ipv4='10.2.0.1/24')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe')\n lo2 = Interface(device=dev2, name='Loopback0', ipv4='172.16.17.32/32')\n intf3 = Interface(device=dev2, name='GigabitEthernet0/0/3', ipv4='10.1.0.2/24')\n intf4 = Interface(device=dev2, name='GigabitEthernet0/0/4', ipv4='10.2.0.2/24')\n link1 = Link(testbed=testbed, name='link1', interfaces=(intf1, intf3))\n link2 = Link(testbed=testbed, name='link2', interfaces=(intf2, intf4))\n dev3 = Device(testbed=testbed, name='PE3', os='iosxe')\n\n bd1 = BridgeDomain(name='200')\n dev1.add_feature(bd1)\n dev2.add_feature(bd1)\n\n vfi1 = Vfi(name='vfi1', device=dev1)\n vfi1.vpn_id = 1\n bd1.add_vfi(vfi1)\n\n self.assertCountEqual(bd1.segments, [vfi1])\n self.assertCountEqual(bd1.device_attr[dev1].segments, [vfi1])\n self.assertCountEqual(bd1.device_attr[dev2].segments, [])\n self.assertCountEqual(bd1.device_attr[dev3].segments, [])\n\n self.assertCountEqual(bd1.pseudowires, [])\n nbr1 = vfi1.create_pseudowire_neighbor(ip=lo2.ipv4.ip)\n nbr2 = bd1.device_attr[dev2].create_pseudowire_neighbor(ip=lo1.ipv4.ip)\n pw1 = Pseudowire(neighbors=(nbr1, nbr2), pw_id=1)\n\n self.assertCountEqual(vfi1.pseudowires, [pw1])\n self.assertCountEqual(vfi1.pseudowire_neighbors, [nbr1])\n self.assertCountEqual(vfi1.segments, [pw1])\n\n self.assertCountEqual(bd1.pseudowires, [pw1])\n self.assertCountEqual(bd1.pseudowire_neighbors, [nbr2])\n self.assertCountEqual(bd1.device_attr[dev1].pseudowires, [])\n self.assertCountEqual(bd1.device_attr[dev1].pseudowire_neighbors, [])\n self.assertCountEqual(bd1.device_attr[dev2].pseudowires, [pw1])\n self.assertCountEqual(bd1.device_attr[dev2].pseudowire_neighbors, [nbr2])\n self.assertCountEqual(bd1.device_attr[dev3].pseudowires, [])\n self.assertCountEqual(bd1.device_attr[dev3].pseudowire_neighbors, [])\n self.assertCountEqual(bd1.segments, [vfi1, pw1])\n self.assertCountEqual(bd1.device_attr[dev1].segments, [vfi1])\n self.assertCountEqual(bd1.device_attr[dev2].segments, [pw1])\n self.assertCountEqual(bd1.device_attr[dev3].segments, [])\n\n cfgs = bd1.build_config(apply=False)\n #print(cfgs['PE1'])\n #print(cfgs['PE2'])\n self.assertMultiLineDictEqual(\n cfgs,\n {\n dev1.name: '\\n'.join([\n 'l2vpn vfi context vfi1',\n ' vpn id 1',\n ' member 172.16.17.32 1 encapsulation mpls',\n ' exit',\n 'bridge-domain 200',\n ' member vfi vfi1',\n ' exit',\n ]),\n dev2.name: '\\n'.join([\n 'bridge-domain 200',\n ' exit',\n ]),\n })\n\n def test_init_vpws(self):\n\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxr')\n lo1 = Interface(device=dev1, name='Loopback0', ipv4='172.16.17.32/32')\n intf1 = Interface(device=dev1, name='GigabitEthernet0/0/0/1', ipv4='10.1.0.1/24')\n intf2 = Interface(device=dev1, name='GigabitEthernet0/0/0/2', ipv4='10.2.0.1/24')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxr')\n lo2 = Interface(device=dev2, name='Loopback0', ipv4='172.16.17.32/32')\n intf3 = Interface(device=dev2, name='GigabitEthernet0/0/0/3', ipv4='10.1.0.2/24')\n intf4 = Interface(device=dev2, name='GigabitEthernet0/0/0/4', ipv4='10.2.0.2/24')\n link1 = Link(testbed=testbed, name='link1', interfaces=(intf1, intf3))\n link2 = Link(testbed=testbed, name='link2', interfaces=(intf2, intf4))\n dev3 = Device(testbed=testbed, name='PE3', os='iosxr')\n\n xc1 = Xconnect(name='xc1')\n dev1.add_feature(xc1)\n dev2.add_feature(xc1)\n\n self.assertCountEqual(xc1.pseudowires, [])\n nbr1 = xc1.create_pseudowire_neighbor(device=dev1, ip=lo2.ipv4.ip)\n nbr2 = xc1.device_attr[dev2].create_pseudowire_neighbor(ip=lo1.ipv4.ip)\n pw1 = Pseudowire(neighbors=(nbr1, nbr2), pw_id=1)\n self.assertCountEqual(xc1.pseudowires, [pw1])\n self.assertCountEqual(xc1.device_attr[dev1].pseudowires, [pw1])\n self.assertCountEqual(xc1.device_attr[dev2].pseudowires, [pw1])\n self.assertCountEqual(xc1.device_attr[dev3].pseudowires, [])\n self.assertCountEqual(xc1.segments, [pw1])\n self.assertCountEqual(xc1.device_attr[dev1].segments, [pw1])\n self.assertCountEqual(xc1.device_attr[dev2].segments, [pw1])\n self.assertCountEqual(xc1.device_attr[dev3].segments, [])\n\n cfgs = xc1.build_config(apply=False)\n self.assertMultiLineDictEqual(\n cfgs,\n {\n dev1.name: '\\n'.join([\n 'l2vpn',\n ' xconnect group xc1g',\n ' p2p xc1',\n ' neighbor ipv4 172.16.17.32 pw-id 1',\n ' exit',\n ' exit',\n ' exit',\n ' exit',\n ]),\n dev2.name: '\\n'.join([\n 'l2vpn',\n ' xconnect group xc1g',\n ' p2p xc1',\n ' neighbor ipv4 172.16.17.32 pw-id 1',\n ' exit',\n ' exit',\n ' exit',\n ' exit',\n ]),\n })\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "3094520", "language": "Python", "matching_score": 4.464391708374023, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/iosxe/tests/test_pseudowire.py" }, { "content": "#!/usr/bin/env python\n\nimport unittest\nfrom unittest.mock import Mock\n\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\nfrom genie.libs.conf.l2vpn import BridgeDomain\n\n\nclass test_bridge_domain(unittest.TestCase):\n\n def test_init(self):\n\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxr')\n intf1 = Interface(device=dev1, name='GigabitEthernet0/0/0/1')\n intf2 = Interface(device=dev1, name='GigabitEthernet0/0/0/2')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxr')\n intf3 = Interface(device=dev2, name='GigabitEthernet0/0/0/3')\n intf4 = Interface(device=dev2, name='GigabitEthernet0/0/0/4')\n link1 = Link(testbed=testbed, name='link1', interfaces=(intf1, intf3))\n link2 = Link(testbed=testbed, name='link2', interfaces=(intf2, intf4))\n\n with self.assertRaises(TypeError):\n bd1 = BridgeDomain()\n\n with self.assertRaises(TypeError):\n bd1 = BridgeDomain(group_name='bg1')\n\n bd1 = BridgeDomain(name='bd1', group_name='mybg1')\n self.assertEqual(bd1.name, 'bd1')\n self.assertEqual(bd1.group_name, 'mybg1')\n\n bd1 = BridgeDomain(name='bd1')\n self.assertEqual(bd1.name, 'bd1')\n self.assertEqual(bd1.group_name, 'bd1g')\n\n self.assertCountEqual(bd1.devices, [])\n self.assertCountEqual(bd1.interfaces, [])\n self.assertCountEqual(bd1.vnis, [])\n self.assertCountEqual(bd1.vfis, [])\n self.assertCountEqual(bd1.evis, [])\n self.assertCountEqual(bd1.segments, [])\n self.assertCountEqual(bd1.link.interfaces, [])\n\n dev1.add_feature(bd1)\n self.assertCountEqual(bd1.devices, [dev1])\n self.assertCountEqual(bd1.interfaces, [])\n self.assertCountEqual(bd1.vnis, [])\n self.assertCountEqual(bd1.vfis, [])\n self.assertCountEqual(bd1.evis, [])\n self.assertCountEqual(bd1.segments, [])\n self.assertCountEqual(bd1.link.interfaces, [])\n\n cfgs = bd1.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'l2vpn',\n ' bridge group bd1g',\n ' bridge-domain bd1',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n bd1.add_interface(intf1)\n intf1.l2transport.enabled = True\n self.assertCountEqual(bd1.interfaces, [intf1])\n self.assertCountEqual(bd1.devices, [dev1])\n self.assertCountEqual(bd1.vnis, [])\n self.assertCountEqual(bd1.vfis, [])\n self.assertCountEqual(bd1.evis, [])\n self.assertCountEqual(bd1.segments, [intf1])\n # Links under Genie Interface object is deprecated\n # Placed the below workaround to bypass the Unittest (commented out)\n # self.assertCountEqual(bd1.link.interfaces, [intf3])\n self.assertCountEqual(bd1.device_attr[dev1].interfaces, [intf1])\n self.assertCountEqual(bd1.device_attr[dev2].interfaces, [])\n self.assertCountEqual(bd1.device_attr[dev1].segments, [intf1])\n self.assertCountEqual(bd1.device_attr[dev2].segments, [])\n\n cfgs = bd1.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'l2vpn',\n ' bridge group bd1g',\n ' bridge-domain bd1',\n ' interface GigabitEthernet0/0/0/1',\n ' exit',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "1334165", "language": "Python", "matching_score": 5.106074810028076, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/tests/test_bridge_domain.py" }, { "content": "#!/usr/bin/env python\n\nimport unittest\nfrom unittest.mock import Mock\n\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\nfrom genie.libs.conf.l2vpn import BridgeDomain\n\n\nclass test_bridge_domain(unittest.TestCase):\n\n def test_init(self):\n\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n intf1 = Interface(device=dev1, name='GigabitEthernet0/0/1')\n intf2 = Interface(device=dev1, name='GigabitEthernet0/0/2')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe')\n intf3 = Interface(device=dev2, name='GigabitEthernet0/0/3')\n intf4 = Interface(device=dev2, name='GigabitEthernet0/0/4')\n link1 = Link(testbed=testbed, name='link1', interfaces=(intf1, intf3))\n link2 = Link(testbed=testbed, name='link2', interfaces=(intf2, intf4))\n\n bd1 = BridgeDomain(name='200')\n self.assertEqual(bd1.name, '200')\n\n self.assertCountEqual(bd1.devices, [])\n self.assertCountEqual(bd1.interfaces, [])\n self.assertCountEqual(bd1.vnis, [])\n self.assertCountEqual(bd1.vfis, [])\n self.assertCountEqual(bd1.evis, [])\n self.assertCountEqual(bd1.segments, [])\n self.assertCountEqual(bd1.link.interfaces, [])\n\n dev1.add_feature(bd1)\n self.assertCountEqual(bd1.devices, [dev1])\n self.assertCountEqual(bd1.interfaces, [])\n self.assertCountEqual(bd1.vnis, [])\n self.assertCountEqual(bd1.vfis, [])\n self.assertCountEqual(bd1.evis, [])\n self.assertCountEqual(bd1.segments, [])\n self.assertCountEqual(bd1.link.interfaces, [])\n\n cfgs = bd1.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'bridge-domain 200',\n ' exit',\n ]))\n\n efp = Interface(device=dev1, name='GigabitEthernet0/0/1.20',service_instance=20)\n efp.encapsulation = 'dot1q'\n efp.rewrite_ingress = 'pop 1 symmetric'\n\n bd1.add_segment(efp)\n self.assertCountEqual(bd1.interfaces, [efp])\n self.assertCountEqual(bd1.devices, [dev1])\n self.assertCountEqual(bd1.vnis, [])\n self.assertCountEqual(bd1.vfis, [])\n self.assertCountEqual(bd1.evis, [])\n self.assertCountEqual(bd1.segments, [efp])\n self.assertCountEqual(bd1.device_attr[dev1].interfaces, [efp])\n self.assertCountEqual(bd1.device_attr[dev2].interfaces, [])\n self.assertCountEqual(bd1.device_attr[dev1].segments, [efp])\n self.assertCountEqual(bd1.device_attr[dev2].segments, [])\n\n\n cfgs = bd1.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'bridge-domain 200',\n ' member GigabitEthernet0/0/1 service-instance 20',\n ' exit',\n ]))\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "4204551", "language": "Python", "matching_score": 2.078294277191162, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/iosxe/tests/test_bridge_domain.py" }, { "content": "#!/usr/bin/env python\n\nimport collections\nimport types\nimport unittest\nfrom unittest.mock import Mock\n\nfrom genie.utils.cisco_collections import typedset\n\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\nfrom genie.libs.conf.evpn import Evpn, Evi\n\n\nclass test_evi(unittest.TestCase):\n\n def assertDictEqual(self, d1, d2, *args, **kwargs):\n d1_modified = {key:str(value) for key, value in d1.items()}\n return super().assertDictEqual(d1_modified, d2, *args, **kwargs)\n\n def test_init(self):\n\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxr')\n intf1 = Interface(device=dev1, name='GigabitEthernet0/0/0/1')\n intf2 = Interface(device=dev1, name='GigabitEthernet0/0/0/2')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxr')\n intf3 = Interface(device=dev2, name='GigabitEthernet0/0/0/3')\n intf4 = Interface(device=dev2, name='GigabitEthernet0/0/0/4')\n\n with self.assertRaises(TypeError):\n evi1 = Evi()\n\n with self.assertRaises(TypeError):\n evi1 = Evi(device=dev1)\n\n with self.assertRaises(TypeError):\n evi1 = Evi(evi_id=1)\n\n with self.assertRaises(AssertionError):\n evi1 = Evi(device=dev1, evi_id=1)\n\n evpn = Evpn()\n self.assertSetEqual(evpn.evis, set())\n\n dev2.add_feature(evpn)\n\n with self.assertRaises(AssertionError):\n evi1 = Evi(device=dev1, evi_id=1)\n\n dev1.add_feature(evpn)\n evi1 = Evi(device=dev1, evi_id=1)\n self.assertIs(evi1.testbed, testbed)\n self.assertIsInstance(evpn.evis, typedset)\n self.assertSetEqual(evpn.evis, set([evi1]))\n self.assertIs(type(evpn.device_attr[dev1].evis), types.GeneratorType)\n self.assertCountEqual(evpn.device_attr[dev1].evis, [evi1])\n self.assertCountEqual(evpn.device_attr[dev2].evis, [])\n\n self.assertIsNotNone(evi1.bgp)\n with self.assertRaises(AttributeError):\n evi1.bgp = None\n\n self.assertIsNotNone(evi1.load_balancing)\n with self.assertRaises(AttributeError):\n evi1.load_balancing = None\n\n self.assertFalse(evi1.load_balancing.enabled)\n evpn.load_balancing.enabled = True\n self.assertTrue(evi1.load_balancing.enabled)\n with self.assertRaises(AttributeError):\n del evi1.load_balancing.enabled\n evi1.load_balancing.enabled = False\n self.assertFalse(evi1.load_balancing.enabled)\n del evi1.load_balancing.enabled\n self.assertTrue(evi1.load_balancing.enabled)\n\n cfgs = evpn.build_config(apply=False)\n self.assertDictEqual(cfgs, {\n dev1.name: '\\n'.join([\n 'evpn',\n ' evi 1',\n ' load-balancing',\n ' exit',\n ' exit',\n ' load-balancing',\n ' exit',\n ' exit',\n ]),\n dev2.name: '\\n'.join([\n 'evpn',\n ' load-balancing',\n ' exit',\n ' exit',\n ]),\n })\n\n dev2.remove_feature(evpn)\n cfgs = evpn.build_config(apply=False)\n self.assertDictEqual(cfgs, {\n dev1.name: '\\n'.join([\n 'evpn',\n ' evi 1',\n ' load-balancing',\n ' exit',\n ' exit',\n ' load-balancing',\n ' exit',\n ' exit',\n ]),\n })\n\n evi1.load_balancing.enabled = False\n cfgs = evpn.build_config(apply=False)\n self.assertDictEqual(cfgs, {\n dev1.name: '\\n'.join([\n 'evpn',\n ' evi 1',\n ' exit',\n ' load-balancing',\n ' exit',\n ' exit',\n ]),\n })\n\n # XXXJST\n # cfg = evi1.build_config(apply=False)\n # self.assertMultiLineEqual(cfg, '\\n'.join([\n # 'evpn',\n # ' evi 1',\n # ' exit',\n # ' exit',\n # ]))\n\n cfg = evi1.build_config(apply=False)\n self.assertMultiLineEqual(str(cfg), '\\n'.join([\n 'evi 1',\n ' exit',\n ]))\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "9738462", "language": "Python", "matching_score": 4.327888488769531, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/evpn/tests/test_evi.py" }, { "content": "#!/usr/bin/env python\n\nimport unittest\nfrom unittest.mock import Mock\n\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\nfrom genie.libs.conf.l2vpn import IccpGroup\n\n\nclass test_iccp_group(unittest.TestCase):\n\n def test_init(self):\n\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxr')\n intf1 = Interface(device=dev1, name='GigabitEthernet0/0/0/1')\n intf2 = Interface(device=dev1, name='GigabitEthernet0/0/0/2')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxr')\n intf3 = Interface(device=dev2, name='GigabitEthernet0/0/0/3')\n intf4 = Interface(device=dev2, name='GigabitEthernet0/0/0/4')\n link1 = Link(testbed=testbed, name='link1', interfaces=(intf1, intf3))\n link2 = Link(testbed=testbed, name='link2', interfaces=(intf2, intf4))\n\n with self.assertRaises(TypeError):\n grp1 = IccpGroup()\n\n grp1 = IccpGroup(group_id=1)\n self.assertEqual(grp1.group_id, 1)\n\n self.assertCountEqual(grp1.devices, [])\n self.assertCountEqual(grp1.interfaces, [])\n\n dev1.add_feature(grp1)\n self.assertCountEqual(grp1.devices, [dev1])\n self.assertCountEqual(grp1.interfaces, [])\n\n cfgs = grp1.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'redundancy',\n ' iccp group 1',\n ' exit',\n ' exit',\n ]))\n\n grp1.add_interface(intf1)\n self.assertCountEqual(grp1.interfaces, [intf1])\n self.assertCountEqual(grp1.devices, [dev1])\n self.assertCountEqual(grp1.device_attr[dev1].interfaces, [intf1])\n self.assertCountEqual(grp1.device_attr[dev2].interfaces, [])\n\n cfgs = grp1.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'redundancy',\n ' iccp group 1',\n ' interface GigabitEthernet0/0/0/1',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n dev2.add_feature(grp1)\n grp1.add_interface(intf2)\n grp1.add_interface(intf3)\n grp1.mac_flush = 'stp-tcn'\n grp1.device_attr[dev1].recovery_delay = 100\n grp1.device_attr[intf1.device].interface_attr[intf1].primary_vlan = 1\n grp1.device_attr[intf1.device].interface_attr[intf1].mac_flush = None\n\n self.assertCountEqual(grp1.interfaces, [intf1, intf2, intf3])\n self.assertCountEqual(grp1.devices, [dev1, dev2])\n self.assertCountEqual(grp1.device_attr[dev1].interfaces, [intf1, intf2])\n self.assertCountEqual(grp1.device_attr[dev2].interfaces, [intf3])\n\n cfgs = grp1.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name, dev2.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'redundancy',\n ' iccp group 1',\n ' interface GigabitEthernet0/0/0/1',\n ' primary vlan 1',\n ' recovery delay 100',\n ' exit',\n ' interface GigabitEthernet0/0/0/2',\n ' mac-flush stp-tcn',\n ' recovery delay 100',\n ' exit',\n ' exit',\n ' exit',\n ]))\n self.assertMultiLineEqual(str(cfgs[dev2.name]), '\\n'.join([\n 'redundancy',\n ' iccp group 1',\n ' interface GigabitEthernet0/0/0/3',\n ' mac-flush stp-tcn',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "6917886", "language": "Python", "matching_score": 3.453484296798706, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/tests/test_iccp_group.py" }, { "content": "#!/usr/bin/env python\n#python\nimport unittest\nfrom unittest.mock import Mock\nimport itertools\n\n# Genie package\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\n# Genie XBu_shared \nfrom genie.libs.conf.vlan import Vlan\n\n\nclass test_vlan(TestCase):\n\n def test_vlan_interface_configuration(self):\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxr')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxr')\n intf1 = Interface(name='GigabitEthernet0/0/1',device=dev1)\n intf2 = Interface(name='GigabitEthernet0/0/2',device=dev2)\n intf3 = Interface(name='GigabitEthernet0/0/3',device=dev1)\n link = Link(name='1_2_1',testbed=testbed)\n link.connect_interface(interface=intf1)\n link.connect_interface(interface=intf2)\n vlan = Vlan()\n link.add_feature(vlan)\n\n vlan.device_attr[dev1]\n vlan.device_attr[dev2]\n vlan.device_attr[dev1].interface_attr[intf1]\n vlan.device_attr[dev1].interface_attr[intf1].eth_encap_type1 = 'dot1q'\n vlan.device_attr[dev1].interface_attr[intf1].eth_encap_val1 = 2\n vlan.device_attr[dev1].interface_attr[intf1].eth_encap_type2 = 'second-dot1q'\n vlan.device_attr[dev1].interface_attr[intf1].eth_encap_val2 = 5\n\n cfg1 = vlan.build_config(apply=False)\n self.assertCountEqual(cfg1.keys(), ['PE1', 'PE2'])\n\n self.assertMultiLineEqual(\n str(cfg1['PE1']),\n '\\n'.join([\n 'interface GigabitEthernet0/0/1',\n ' encapsulation dot1q 2 second-dot1q 5',\n ' exit',\n ]))\n\n def test_basic_uncfg(self):\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxr')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxr')\n intf1 = Interface(name='GigabitEthernet0/0/1',device=dev1)\n intf2 = Interface(name='GigabitEthernet0/0/2',device=dev2)\n intf3 = Interface(name='GigabitEthernet0/0/3',device=dev1)\n link = Link(name='1_2_1',testbed=testbed)\n link.connect_interface(interface=intf1)\n link.connect_interface(interface=intf2)\n vlan = Vlan()\n link.add_feature(vlan)\n\n # Defining attributes section\n vlan.device_attr[dev1]\n vlan.device_attr[dev2]\n vlan.device_attr[dev1].interface_attr[intf1]\n vlan.device_attr[dev1].interface_attr[intf1].eth_encap_type1 = 'dot1q'\n vlan.device_attr[dev1].interface_attr[intf1].eth_encap_val1 = 2\n vlan.device_attr[dev1].interface_attr[intf1].eth_encap_type2 = 'second-dot1q'\n vlan.device_attr[dev1].interface_attr[intf1].eth_encap_val2 = 5\n\n # Unconfig testing\n # Set a mock\n dev1.cli = Mock()\n dev1.configure = Mock()\n dev2.cli = Mock()\n dev2.configure = Mock()\n dev1.add_feature(vlan)\n dev2.add_feature(vlan)\n # Mock config\n\n output = vlan.build_config(apply=True)\n\n uncfg = vlan.build_unconfig(apply=False)\n self.assertMultiLineEqual(\n str(uncfg['PE1']),\n '\\n'.join([\n 'interface GigabitEthernet0/0/1',\n ' no encapsulation dot1q 2 second-dot1q 5',\n ' exit',\n ]))\n\n all_vlan_interface_uncfg = vlan.build_unconfig(apply=False, \n attributes={'device_attr':\\\n {'*':{'interface_attr':'*'}}})\n self.assertCountEqual(all_vlan_interface_uncfg.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(\n str(all_vlan_interface_uncfg['PE1']),\n '\\n'.join([\n 'interface GigabitEthernet0/0/1',\n ' no encapsulation dot1q 2 second-dot1q 5',\n ' exit',\n ]))\n\n partial_vlan_interface_uncfg = vlan.build_unconfig(apply=False, \n attributes={'device_attr':\\\n {'*':{'interface_attr':\\\n {'*':\"eth_encap_type1\"}}}})\n self.assertCountEqual(partial_vlan_interface_uncfg.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(\n str(partial_vlan_interface_uncfg['PE1']),\n '\\n'.join([\n 'interface GigabitEthernet0/0/1',\n ' no encapsulation dot1q 2 second-dot1q 5',\n ' exit',\n ]))\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "791039", "language": "Python", "matching_score": 4.801156997680664, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/vlan/iosxr/tests/test_vlan.py" }, { "content": "#!/usr/bin/env python\n\n# python\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie package\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Interface\n\n\nclass test_interface(TestCase):\n\n def test_ethernet(self):\n testbed = Testbed()\n Genie.testbed = Testbed()\n dev1 = Device(name='PE1', os='ios')\n intf1 = Interface(device=dev1, name='GigabitEthernet0/0/1')\n\n # Defining attributes section\n intf1.description = 'test desc'\n intf1.enabled = True\n\n # Check config\n cfg = intf1.build_config(apply=False)\n self.assertMultiLineEqual(str(cfg), '\\n'.join([\n 'interface GigabitEthernet0/0/1',\n ' description test desc',\n ' no shutdown',\n ' exit',\n ]))\n\n # Check unconfig without attribtues\n uncfg = intf1.build_unconfig(apply=False)\n self.assertMultiLineEqual(str(uncfg), '\\n'.join([\n 'default interface GigabitEthernet0/0/1',\n ]))\n\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "1360893", "language": "Python", "matching_score": 0.8134688138961792, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/interface/ios/tests/test_interface.py" }, { "content": "#!/usr/bin/env python\n\nimport collections\nimport types\nimport unittest\nfrom unittest.mock import Mock\n\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\nfrom genie.libs.conf.evpn import ESI\nfrom genie.libs.conf.base import MAC, IPv4Address\n\n\nclass test_esi(unittest.TestCase):\n\n def test_init(self):\n\n esi = ESI(0)\n self.assertEqual(esi.type, 0)\n self.assertTupleEqual(esi.bytes, (0, 0, 0, 0, 0, 0, 0, 0, 0))\n self.assertEqual(esi.value, 0)\n self.assertEqual(str(esi), '00:00:00:00:00:00:00:00:00')\n self.assertEqual(esi.dotted, '00.00.00.00.00.00.00.00.00')\n\n esi = ESI(0, type=1)\n self.assertEqual(esi.type, 1)\n self.assertTupleEqual(esi.bytes, (0, 0, 0, 0, 0, 0, 0, 0, 0))\n self.assertEqual(esi.value, 0)\n self.assertEqual(str(esi), '00:00:00:00:00:00:00:00:00')\n self.assertEqual(esi.dotted, '00.00.00.00.00.00.00.00.00')\n\n esi = ESI('::')\n self.assertEqual(esi.type, 0)\n self.assertTupleEqual(esi.bytes, (0, 0, 0, 0, 0, 0, 0, 0, 0))\n self.assertEqual(esi.value, 0)\n self.assertEqual(str(esi), '00:00:00:00:00:00:00:00:00')\n self.assertEqual(esi.dotted, '00.00.00.00.00.00.00.00.00')\n\n esi = ESI('::1')\n self.assertEqual(esi.type, 0)\n self.assertTupleEqual(esi.bytes, (0, 0, 0, 0, 0, 0, 0, 0, 1))\n self.assertEqual(esi.value, 0x000000000000000001)\n self.assertEqual(str(esi), '00:00:00:00:00:00:00:00:01')\n self.assertEqual(esi.dotted, '00.00.00.00.00.00.00.00.01')\n\n esi = ESI('1::')\n self.assertEqual(esi.type, 0)\n self.assertTupleEqual(esi.bytes, (1, 0, 0, 0, 0, 0, 0, 0, 0))\n self.assertEqual(esi.value, 0x010000000000000000)\n self.assertEqual(str(esi), '01:00:00:00:00:00:00:00:00')\n self.assertEqual(esi.dotted, '01.00.00.00.00.00.00.00.00')\n\n esi = ESI('fdf8:f53e:61e4::18')\n self.assertEqual(esi.type, 0)\n self.assertTupleEqual(esi.bytes, (1, 2, 0, 0, 0, 0, 0, 0, 9))\n self.assertEqual(esi.value, 0x010200000000000009)\n self.assertEqual(str(esi), '01:02:00:00:00:00:00:00:09')\n self.assertEqual(esi.dotted, '01.02.00.00.00.00.00.00.09')\n\n esi = ESI('01:02:03:04:05:06:07:08:09')\n self.assertEqual(esi.type, 0)\n self.assertTupleEqual(esi.bytes, (1, 2, 3, 4, 5, 6, 7, 8, 9))\n self.assertEqual(esi.value, 0x010203040506070809)\n self.assertEqual(str(esi), '01:02:03:04:05:06:07:08:09')\n self.assertEqual(esi.dotted, '01.02.03.04.05.06.07.08.09')\n\n esi = ESI('01.02.03.04.05.06.07.08.09')\n self.assertEqual(esi.type, 0)\n self.assertTupleEqual(esi.bytes, (1, 2, 3, 4, 5, 6, 7, 8, 9))\n self.assertEqual(esi.value, 0x010203040506070809)\n self.assertEqual(str(esi), '01:02:03:04:05:06:07:08:09')\n self.assertEqual(esi.dotted, '01.02.03.04.05.06.07.08.09')\n\n esi = ESI('01.02.03.04.05.06.07.08.09.10')\n self.assertEqual(esi.type, 1)\n self.assertTupleEqual(esi.bytes, (2, 3, 4, 5, 6, 7, 8, 9, 16))\n self.assertEqual(esi.value, 0x020304050607080910)\n self.assertEqual(str(esi), '02:03:04:05:06:07:08:09:10')\n self.assertEqual(esi.dotted, '02.03.04.05.06.07.08.09.10')\n\n esi = ESI((1, 2, 3, 4, 5, 6, 7, 8, 9))\n self.assertEqual(esi.type, 0)\n self.assertTupleEqual(esi.bytes, (1, 2, 3, 4, 5, 6, 7, 8, 9))\n self.assertEqual(esi.value, 0x010203040506070809)\n self.assertEqual(str(esi), '01:02:03:04:05:06:07:08:09')\n self.assertEqual(esi.dotted, '01.02.03.04.05.06.07.08.09')\n\n esi = ESI((1, 2, 3, 4, 5, 6, 7, 8, 9, 10))\n self.assertEqual(esi.type, 1)\n self.assertTupleEqual(esi.bytes, (2, 3, 4, 5, 6, 7, 8, 9, 10))\n self.assertEqual(esi.value, 0x02030405060708090A)\n self.assertEqual(str(esi), '02:03:04:05:06:07:08:09:0a')\n self.assertEqual(esi.dotted, '02.03.04.05.06.07.08.09.0a')\n\n esi = ESI(0)\n esi = ESI(0xFFFFFFFFFFFFFFFFFF)\n with self.assertRaises(ValueError):\n esi = ESI(-1)\n with self.assertRaises(ValueError):\n esi = ESI(0x1FFFFFFFFFFFFFFFFFF)\n\n esi = ESI('::')\n esi = ESI('::9')\n esi = ESI('::2:3:4:5:6:7:8:9')\n esi = ESI('fdf8:f53e:61e4::18::')\n esi = ESI('fdf8:f53e:61e4::18:8:9')\n with self.assertRaises(ValueError):\n esi = ESI('1:2:3:4:5:6:7:8:9:10:11')\n with self.assertRaises(ValueError):\n esi = ESI('1.2.3.4.5.6.7.8.9.10.11')\n with self.assertRaises(ValueError):\n esi = ESI('fdf8:f53e:61e4::18')\n with self.assertRaises(ValueError):\n esi = ESI('1.2.3.4.5.6.7.8')\n with self.assertRaises(ValueError):\n esi = ESI('::1:2:3:4:5:6:7:8:9')\n with self.assertRaises(ValueError):\n esi = ESI('1:2:3:4:5:6:7:8:9::')\n with self.assertRaises(ValueError):\n esi = ESI('fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b:7:8:9')\n with self.assertRaises(ValueError):\n esi = ESI('1:2:3:4:::6:7:8:9')\n with self.assertRaises(ValueError):\n esi = ESI('fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b::7:8:9')\n with self.assertRaises(ValueError):\n esi = ESI('::1ff')\n\n esi = ESI((1, 2, 3, 4, 5, 6, 7, 8, 9))\n with self.assertRaises(TypeError):\n esi = ESI((1, 2, 3, 4, 5, 6, 7, 8))\n\n esi = ESI(0, type=0)\n esi = ESI(0, type=5)\n with self.assertRaises(ValueError):\n esi = ESI(0, type=-1)\n with self.assertRaises(ValueError):\n esi = ESI(0, type=6)\n\n esi1 = ESI((1, 2, 3, 4, 5, 6, 7, 8, 9), type=0)\n esi2 = ESI((1, 2, 3, 4, 5, 6, 7, 8, 9), type=0)\n esi3 = ESI((1, 2, 3, 4, 5, 6, 7, 8, 9), type=1)\n esi4 = ESI((1, 2, 3, 4, 5, 6, 7, 8, 10), type=1)\n self.assertEqual(esi1, esi2)\n self.assertNotEqual(esi1, esi3)\n self.assertNotEqual(esi2, esi3)\n self.assertNotEqual(esi3, esi4)\n self.assertLess(esi1, esi3)\n self.assertLess(esi1, esi4)\n self.assertLessEqual(esi1, esi3)\n self.assertLessEqual(esi1, esi2)\n self.assertLessEqual(esi1, esi4)\n self.assertGreater(esi3, esi1)\n self.assertGreater(esi4, esi3)\n self.assertGreater(esi4, esi1)\n self.assertGreaterEqual(esi3, esi1)\n self.assertGreaterEqual(esi4, esi1)\n self.assertGreaterEqual(esi2, esi1)\n\n def test_create(self):\n\n esi = ESI.create_type_0('1:2:3:4:5:6:7:8:9')\n self.assertEqual(esi, ESI((0, 1, 2, 3, 4, 5, 6, 7, 8, 9)))\n self.assertEqual(esi.value, 0x010203040506070809)\n\n esi = ESI.create_type_1('1:2:3:4:5:6', 0x0708)\n self.assertEqual(esi, ESI((1, 1, 2, 3, 4, 5, 6, 7, 8, 0)))\n self.assertEqual(esi.system_mac, MAC('1:2:3:4:5:6'))\n self.assertEqual(esi.port_key, 0x0708)\n\n esi = ESI.create_type_2('1:2:3:4:5:6', 0x0708)\n self.assertEqual(esi, ESI((2, 1, 2, 3, 4, 5, 6, 7, 8, 0)))\n self.assertEqual(esi.root_bridge_mac, MAC('1:2:3:4:5:6'))\n self.assertEqual(esi.root_bridge_priority, 0x0708)\n\n esi = ESI.create_type_3('1:2:3:4:5:6', 0x070809)\n self.assertEqual(esi, ESI((3, 1, 2, 3, 4, 5, 6, 7, 8, 9)))\n self.assertEqual(esi.system_mac, MAC('1:2:3:4:5:6'))\n self.assertEqual(esi.local_discriminator, 0x070809)\n\n esi = ESI.create_type_4('1.2.3.4', 0x05060708)\n self.assertEqual(esi, ESI((4, 1, 2, 3, 4, 5, 6, 7, 8, 0)))\n self.assertEqual(esi.router_id, IPv4Address('1.2.3.4'))\n self.assertEqual(esi.local_discriminator, 0x05060708)\n\n esi = ESI.create_type_5(0x01020304, 0x05060708)\n self.assertEqual(esi, ESI((5, 1, 2, 3, 4, 5, 6, 7, 8, 0)))\n self.assertEqual(esi.asn, 0x01020304)\n self.assertEqual(esi.local_discriminator, 0x05060708)\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "11926106", "language": "Python", "matching_score": 2.9451937675476074, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/evpn/tests/test_esi.py" }, { "content": "#!/usr/bin/env python\n\nimport collections\nimport types\nimport unittest\nfrom unittest.mock import Mock\nfrom ipaddress import IPv4Address\n\nfrom genie.conf import Genie\n\nfrom genie.libs.conf.bgp import RouteTarget\n\n\nclass test_route_target(unittest.TestCase):\n\n def test_init(self):\n\n rt = RouteTarget('1:0')\n self.assertEqual(rt.type, rt.Type.ASN2_index)\n self.assertTupleEqual(rt.fields, (rt.Type.ASN2_index, 1, 0))\n self.assertTupleEqual(rt.bytes, (0, 0, 1, 0, 0, 0, 0))\n self.assertTupleEqual(rt.value_bytes, (0, 1, 0, 0, 0, 0))\n self.assertTupleEqual(rt.value_words, (1, 0, 0))\n self.assertEqual(rt.dotted, '1:0')\n self.assertEqual(rt.dotted_hex3words, '0001.0000.0000')\n self.assertEqual(str(rt), '1:0')\n self.assertEqual(format(rt, ''), '1:0')\n self.assertEqual(format(rt, 'd:d'), '1:0')\n self.assertEqual(format(rt, 'd.d:d'), '1:0')\n self.assertEqual(format(rt, 'x.x.x'), '0001.0000.0000')\n self.assertEqual(rt, RouteTarget(rt.fields))\n\n rt = RouteTarget('65536:0')\n self.assertEqual(rt.type, rt.Type.ASN4_index)\n self.assertTupleEqual(rt.fields, (rt.Type.ASN4_index, 65536, 0))\n self.assertEqual(rt.dotted, '1.0:0')\n self.assertEqual(rt.dotted_hex3words, '0001.0000.0000')\n self.assertEqual(str(rt), '65536:0')\n self.assertEqual(format(rt, ''), '65536:0')\n self.assertEqual(format(rt, 'd:d'), '65536:0')\n self.assertEqual(format(rt, 'd.d:d'), '1.0:0')\n self.assertEqual(format(rt, 'x.x.x'), '0001.0000.0000')\n self.assertEqual(rt, RouteTarget(rt.fields))\n\n rt = RouteTarget('1.0:0')\n self.assertEqual(rt.type, rt.Type.ASN4_index)\n self.assertTupleEqual(rt.fields, (rt.Type.ASN4_index, 65536, 0))\n self.assertEqual(str(rt), '65536:0')\n self.assertEqual(rt.dotted, '1.0:0')\n self.assertEqual(rt.dotted_hex3words, '0001.0000.0000')\n self.assertEqual(rt, RouteTarget(rt.fields))\n\n rt = RouteTarget('1.2.3.4:0')\n self.assertEqual(rt.type, rt.Type.IPv4Address_index)\n self.assertTupleEqual(rt.fields, (rt.Type.IPv4Address_index, IPv4Address('1.2.3.4'), 0))\n self.assertEqual(str(rt), '1.2.3.4:0')\n self.assertEqual(rt.dotted, '1.2.3.4:0')\n self.assertEqual(rt.dotted_hex3words, '0102.0304.0000')\n self.assertEqual(rt, RouteTarget(rt.fields))\n\n with self.assertRaises(ValueError):\n format(rt, 'blah')\n\n #with self.assertRaises(ValueError):\n # rt = RouteTarget('0:0')\n with self.assertRaises(ValueError):\n rt = RouteTarget('5000000000:0')\n with self.assertRaises(ValueError):\n rt = RouteTarget('1:5000000000')\n with self.assertRaises(ValueError):\n rt = RouteTarget('65536:65536')\n with self.assertRaises(ValueError):\n rt = RouteTarget('65536.0:65535')\n with self.assertRaises(ValueError):\n rt = RouteTarget('0.65536:65535')\n with self.assertRaises(ValueError):\n rt = RouteTarget('1.2.3.4:65536')\n\n rt1 = RouteTarget('1:0')\n rt2 = RouteTarget('1:1')\n rt3 = RouteTarget('1.2.3.4:0')\n rt4 = RouteTarget('1.2.3.4:1')\n rt5 = RouteTarget('65536:0')\n rt6 = RouteTarget('65536:1')\n self.assertEqual(rt1, rt1)\n self.assertNotEqual(rt1, rt2)\n self.assertNotEqual(rt2, rt3)\n self.assertNotEqual(rt3, rt4)\n self.assertNotEqual(rt4, rt5)\n self.assertNotEqual(rt5, rt6)\n self.assertLess(rt1, rt2)\n self.assertLess(rt2, rt3)\n self.assertLess(rt3, rt4)\n self.assertLess(rt4, rt5)\n self.assertLess(rt5, rt6)\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "7398199", "language": "Python", "matching_score": 1.5243453979492188, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/base/tests/test_route_target.py" }, { "content": "#!/usr/bin/env python\n\nimport collections\nimport types\nimport unittest\nfrom unittest.mock import Mock\n\nfrom genie.conf import Genie\n\nfrom genie.libs.conf.vrf import VpnId\n\n\nclass test_vpn_id(unittest.TestCase):\n\n def test_init(self):\n\n vpn_id = VpnId('0:0')\n self.assertEqual(vpn_id.format, vpn_id.Format.OUI_VPN_index)\n self.assertTupleEqual(vpn_id.parts, (0, 0))\n self.assertEqual(str(vpn_id), '0:0')\n self.assertEqual(vpn_id, VpnId(vpn_id.parts))\n\n vpn_id = VpnId('ffffff:ffffffff')\n self.assertEqual(vpn_id.format, vpn_id.Format.OUI_VPN_index)\n self.assertTupleEqual(vpn_id.parts, (0xffffff, 0xffffffff))\n self.assertEqual(str(vpn_id), 'ffffff:ffffffff')\n self.assertEqual(vpn_id, VpnId(vpn_id.parts))\n\n with self.assertRaises(ValueError):\n vpn_id = VpnId('fffffff:0')\n with self.assertRaises(ValueError):\n vpn_id = VpnId('0:fffffffff')\n\n vpn_id1 = VpnId('0:0')\n vpn_id2 = VpnId('ffffff:ffffffff')\n self.assertEqual(vpn_id1, vpn_id1)\n self.assertNotEqual(vpn_id1, vpn_id2)\n self.assertLess(vpn_id1, vpn_id2)\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "7445192", "language": "Python", "matching_score": 1.567775845527649, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/vrf/tests/test_vpn_id.py" }, { "content": "from .vpn_id import *\nfrom .vrf import *\n", "id": "5463117", "language": "Python", "matching_score": 0.12193331122398376, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/vrf/__init__.py" }, { "content": "#!/usr/bin/env python\n\n# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie package\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\n# Genie Conf\nfrom genie.libs.conf.vrf import Vrf\nfrom genie.libs.conf.interface import Interface\nfrom genie.libs.conf.ospf import Ospf\nfrom genie.libs.conf.ospf.gracefulrestart import GracefulRestart\nfrom genie.libs.conf.ospf.stubrouter import StubRouter\nfrom genie.libs.conf.ospf.areanetwork import AreaNetwork\nfrom genie.libs.conf.ospf.arearange import AreaRange\nfrom genie.libs.conf.ospf.interfacestaticneighbor import InterfaceStaticNeighbor\n\n\nclass test_ospf(TestCase):\n\n def test_ospf_config1(self):\n\n # For failures\n self.maxDiff = None\n\n # Set testbed\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n\n # Create VRF objects\n vrf0 = Vrf('default')\n vrf1 = Vrf('VRF1')\n vrf2 = Vrf('VRF2')\n\n # Create Interface object\n intf1 = Interface(name='GigabitEthernet1',device=dev1)\n\n # Create OSPF object\n ospf1 = Ospf()\n\n # Add OSPF configurations to vrf default\n ospf1.device_attr[dev1].vrf_attr[vrf0].instance = '30'\n ospf1.device_attr[dev1].vrf_attr[vrf0].enable = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].router_id = '3.3.3.3'\n ospf1.device_attr[dev1].vrf_attr[vrf0].pref_all = 115\n ospf1.device_attr[dev1].vrf_attr[vrf0].nsr_enable = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].ldp_autoconfig = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].ldp_auto_config_area_id = '0.0.0.0'\n ospf1.device_attr[dev1].vrf_attr[vrf0].ldp_igp_sync = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_bgp_id = 100\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_bgp_metric = 555\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_bgp_metric_type = '1'\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_bgp_nssa_only = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_bgp_route_map = 'test'\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_bgp_subnets = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_bgp_tag = 12\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_connected = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_connected_metric = 12\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_connected_route_policy = 'test'\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_static = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_static_metric = 12\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_static_route_policy = 'test'\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_isis = 'ABC'\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_isis_metric = 12\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_isis_route_policy = 'test'\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_max_prefix = 12\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_max_prefix_thld = 10\n ospf1.device_attr[dev1].vrf_attr[vrf0].redist_max_prefix_warn_only = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].bfd_enable = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].bfd_strict_mode = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].log_adjacency_changes = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].log_adjacency_changes_detail = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].adjacency_stagger_initial_number = 10\n ospf1.device_attr[dev1].vrf_attr[vrf0].adjacency_stagger_maximum_number = 100\n ospf1.device_attr[dev1].vrf_attr[vrf0].auto_cost_enable = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].spf_paths = 15\n ospf1.device_attr[dev1].vrf_attr[vrf0].spf_start = 600\n ospf1.device_attr[dev1].vrf_attr[vrf0].spf_hold = 700\n ospf1.device_attr[dev1].vrf_attr[vrf0].spf_maximum = 800\n ospf1.device_attr[dev1].vrf_attr[vrf0].spf_lsa_start = 600\n ospf1.device_attr[dev1].vrf_attr[vrf0].spf_lsa_hold = 700\n ospf1.device_attr[dev1].vrf_attr[vrf0].spf_lsa_maximum = 800\n ospf1.device_attr[dev1].vrf_attr[vrf0].db_ctrl_max_lsa = 123\n ospf1.device_attr[dev1].vrf_attr[vrf0].default_originate = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].default_originate_always = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].area_te_enable = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_admin_control = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_cost = 10\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_type = 'point-to-point'\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_passive = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_demand_circuit = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_priority = 110\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_bfd_enable = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_bfd_interval = 999\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_bfd_min_interval = 999\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_bfd_multiplier = 7\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_hello_interval = 50\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_dead_interval = 60\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_retransmit_interval = 70\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_lls = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_ttl_sec_enable = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_ttl_sec_hops = 25\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_auth_trailer_key_chain = 'montreal'\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_auth_trailer_key_crypto_algorithm = 'md5'\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_auth_trailer_key = 'quebec'\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_mtu_ignore = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].if_prefix_suppression = True\n # Add interface static neighbor configuration to OSPF\n static_nbr1 = InterfaceStaticNeighbor(device=dev1)\n static_nbr1.if_static_neighbor = '10.10.10.10'\n static_nbr1.if_static_cost = 20\n static_nbr1.if_static_poll_interval = 60\n static_nbr1.if_static_priority = 110\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].add_staticnbr_key(static_nbr1)\n static_nbr2 = InterfaceStaticNeighbor(device=dev1)\n static_nbr2.if_static_neighbor = '20.20.20.20'\n static_nbr2.if_static_cost = 30\n static_nbr2.if_static_poll_interval = 120\n static_nbr2.if_static_priority = 113\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].add_staticnbr_key(static_nbr2)\n static_nbr3 = InterfaceStaticNeighbor(device=dev1)\n static_nbr3.if_static_neighbor = '30.30.30.30'\n static_nbr3.if_static_cost = 40\n static_nbr3.if_static_poll_interval = 150\n static_nbr3.if_static_priority = 115\n ospf1.device_attr[dev1].vrf_attr[vrf0].area_attr['0'].interface_attr[intf1].add_staticnbr_key(static_nbr3)\n\n # Add OSPF configurations to vrf VRF1\n ospf1.device_attr[dev1].vrf_attr[vrf1].instance = '10'\n ospf1.device_attr[dev1].vrf_attr[vrf1].enable = True\n ospf1.device_attr[dev1].vrf_attr[vrf1].router_id = '1.1.1.1'\n ospf1.device_attr[dev1].vrf_attr[vrf1].pref_intra_area = 112\n ospf1.device_attr[dev1].vrf_attr[vrf1].pref_inter_area = 113\n ospf1.device_attr[dev1].vrf_attr[vrf1].pref_external = 114\n ospf1.device_attr[dev1].vrf_attr[vrf1].auto_cost_enable = True\n ospf1.device_attr[dev1].vrf_attr[vrf1].auto_cost_reference_bandwidth = 60\n ospf1.device_attr[dev1].vrf_attr[vrf1].auto_cost_bandwidth_unit = 'gbps'\n ospf1.device_attr[dev1].vrf_attr[vrf1].area_attr['1'].area_te_enable = True\n ospf1.device_attr[dev1].vrf_attr[vrf1].area_attr['1'].area_type = 'stub'\n ospf1.device_attr[dev1].vrf_attr[vrf1].area_attr['1'].default_cost = 1111\n ospf1.device_attr[dev1].vrf_attr[vrf1].area_attr['1'].virtual_link_attr['OSPF_VL0'].vl_router_id = '7.7.7.7'\n ospf1.device_attr[dev1].vrf_attr[vrf1].area_attr['1'].virtual_link_attr['OSPF_VL0'].vl_hello_interval = 55\n ospf1.device_attr[dev1].vrf_attr[vrf1].area_attr['1'].virtual_link_attr['OSPF_VL0'].vl_dead_interval = 65\n ospf1.device_attr[dev1].vrf_attr[vrf1].area_attr['1'].virtual_link_attr['OSPF_VL0'].vl_retransmit_interval = 75\n ospf1.device_attr[dev1].vrf_attr[vrf1].area_attr['1'].virtual_link_attr['OSPF_VL0'].vl_transmit_delay = 85\n ospf1.device_attr[dev1].vrf_attr[vrf1].area_attr['1'].virtual_link_attr['OSPF_VL0'].vl_ttl_sec_hops = 167\n ospf1.device_attr[dev1].vrf_attr[vrf1].area_attr['1'].virtual_link_attr['OSPF_VL0'].vl_auth_trailer_key_chain = 'ottawa'\n ospf1.device_attr[dev1].vrf_attr[vrf1].area_attr['1'].virtual_link_attr['OSPF_VL0'].vl_auth_trailer_key_crypto_algorithm = 'simple'\n ospf1.device_attr[dev1].vrf_attr[vrf1].area_attr['1'].virtual_link_attr['OSPF_VL0'].vl_auth_trailer_key = 'anything'\n ospf1.device_attr[dev1].vrf_attr[vrf1].area_attr['1'].sham_link_attr['OSPF_SL0'].sl_local_id = '11.11.11.11'\n ospf1.device_attr[dev1].vrf_attr[vrf1].area_attr['1'].sham_link_attr['OSPF_SL0'].sl_remote_id = '12.12.12.12'\n ospf1.device_attr[dev1].vrf_attr[vrf1].area_attr['1'].sham_link_attr['OSPF_SL0'].sl_ttl_sec_hops = 10\n ospf1.device_attr[dev1].vrf_attr[vrf1].area_attr['1'].sham_link_attr['OSPF_SL1'].sl_local_id = '15.15.15.15'\n ospf1.device_attr[dev1].vrf_attr[vrf1].area_attr['1'].sham_link_attr['OSPF_SL1'].sl_remote_id = '16.16.16.16'\n ospf1.device_attr[dev1].vrf_attr[vrf1].area_attr['1'].sham_link_attr['OSPF_SL1'].sl_cost = 50\n \n # Add OSPF configurations to vrf VRF2\n ospf1.device_attr[dev1].vrf_attr[vrf2].instance = '20'\n ospf1.device_attr[dev1].vrf_attr[vrf2].enable = True\n ospf1.device_attr[dev1].vrf_attr[vrf2].router_id = '2.2.2.2'\n ospf1.device_attr[dev1].vrf_attr[vrf2].auto_cost_enable = False\n ospf1.device_attr[dev1].vrf_attr[vrf2].area_attr['2'].area_te_enable = True\n ospf1.device_attr[dev1].vrf_attr[vrf2].area_attr['2'].area_type = 'nssa'\n ospf1.device_attr[dev1].vrf_attr[vrf2].area_attr['2'].summary = False\n ospf1.device_attr[dev1].vrf_attr[vrf2].area_attr['2'].default_cost = 1111\n ospf1.device_attr[dev1].vrf_attr[vrf2].area_attr['2'].virtual_link_attr['OSPF_VL1'].vl_router_id = '8.8.8.8'\n ospf1.device_attr[dev1].vrf_attr[vrf2].area_attr['2'].virtual_link_attr['OSPF_VL1'].vl_hello_interval = 56\n ospf1.device_attr[dev1].vrf_attr[vrf2].area_attr['2'].virtual_link_attr['OSPF_VL1'].vl_dead_interval = 66\n ospf1.device_attr[dev1].vrf_attr[vrf2].area_attr['2'].virtual_link_attr['OSPF_VL1'].vl_retransmit_interval = 76\n ospf1.device_attr[dev1].vrf_attr[vrf2].area_attr['2'].virtual_link_attr['OSPF_VL1'].vl_transmit_delay = 86\n ospf1.device_attr[dev1].vrf_attr[vrf2].area_attr['2'].virtual_link_attr['OSPF_VL1'].vl_ttl_sec_hops = 168\n ospf1.device_attr[dev1].vrf_attr[vrf2].area_attr['2'].virtual_link_attr['OSPF_VL1'].vl_auth_trailer_key_chain = 'toronto'\n ospf1.device_attr[dev1].vrf_attr[vrf2].area_attr['2'].virtual_link_attr['OSPF_VL1'].vl_auth_trailer_key_crypto_algorithm = 'md5'\n ospf1.device_attr[dev1].vrf_attr[vrf2].area_attr['2'].virtual_link_attr['OSPF_VL1'].vl_auth_trailer_key = 'anything'\n # Add graceful restart configuration to OSPF\n gr1 = GracefulRestart(device=dev1)\n gr1.gr_enable = True\n gr1.gr_type = 'cisco'\n gr1.gr_helper_enable = False\n ospf1.device_attr[dev1].vrf_attr[vrf2].add_gr_key(gr1)\n gr2 = GracefulRestart(device=dev1)\n gr2.gr_enable = True\n gr2.gr_type = 'ietf'\n gr2.gr_helper_strict_lsa_checking = True\n ospf1.device_attr[dev1].vrf_attr[vrf2].add_gr_key(gr2)\n gr3 = GracefulRestart(device=dev1)\n gr3.gr_enable = True\n gr3.gr_type = 'ietf'\n gr3.gr_restart_interval = 50\n ospf1.device_attr[dev1].vrf_attr[vrf2].add_gr_key(gr3)\n # Add stub router configuration to OSPF\n sr1 = StubRouter(device=dev1)\n sr1.stub_router_always = True\n sr1.stub_router_include_stub = True\n sr1.stub_router_summary_lsa = True\n sr1.stub_router_external_lsa = True\n ospf1.device_attr[dev1].vrf_attr[vrf2].add_sr_key(sr1)\n sr2 = StubRouter(device=dev1)\n sr2.stub_router_on_startup = 50\n sr2.stub_router_include_stub = True\n sr2.stub_router_summary_lsa = True\n sr2.stub_router_external_lsa = True\n ospf1.device_attr[dev1].vrf_attr[vrf2].add_sr_key(sr2)\n # Add area network configuration to OSPF\n an1 = AreaNetwork(device=dev1)\n an1.area_network = '192.168.1.0'\n an1.area_network_wildcard = '0.0.0.0'\n ospf1.device_attr[dev1].vrf_attr[vrf2].area_attr['2'].add_areanetwork_key(an1)\n an2 = AreaNetwork(device=dev1)\n an2.area_network = '192.168.1.1'\n an2.area_network_wildcard = '0.0.0.255'\n ospf1.device_attr[dev1].vrf_attr[vrf2].area_attr['2'].add_areanetwork_key(an2)\n # Add area range configuration to OSPF\n ar1 = AreaRange(device=dev1)\n ar1.area_range_prefix = '1.1.1.1/24'\n ar1.area_range_advertise = False\n ar1.area_range_cost = 10\n ospf1.device_attr[dev1].vrf_attr[vrf2].area_attr['2'].add_arearange_key(ar1)\n ar2 = AreaRange(device=dev1)\n ar2.area_range_prefix = '2.2.2.2 255.255.255.255'\n ar2.area_range_advertise = True\n ospf1.device_attr[dev1].vrf_attr[vrf2].area_attr['2'].add_arearange_key(ar2)\n\n # Add OSPF to the device\n dev1.add_feature(ospf1)\n \n # Build config\n cfgs = ospf1.build_config(apply=False)\n\n # Check config strings built correctly\n self.assertMultiLineEqual(\n str(cfgs[dev1.name]),\n '\\n'.join([\n 'router ospf 30',\n ' no shutdown',\n ' router-id 3.3.3.3',\n ' distance 115',\n ' nsr',\n ' mpls ldp autoconfig area 0.0.0.0',\n ' mpls ldp sync',\n ' redistribute bgp 100 metric 555 metric-type 1 subnets nssa-only tag 12 route-map test',\n ' redistribute connected metric 12 route-map test',\n ' redistribute static metric 12 route-map test',\n ' redistribute isis ABC metric 12 route-map test',\n ' redistribute maximum-prefix 12 10 warning-only',\n ' bfd all-interfaces strict-mode',\n ' log-adjacency-changes detail',\n ' adjacency stagger 10 100',\n ' auto-cost',\n ' maximum-paths 15',\n ' timers throttle spf 600 700 800',\n ' timers throttle lsa 600 700 800',\n ' max-lsa 123',\n ' default-information originate always',\n ' mpls traffic-eng area 0',\n ' passive-interface GigabitEthernet1',\n ' neighbor 10.10.10.10 cost 20 poll-interval 60 priority 110',\n ' neighbor 20.20.20.20 cost 30 poll-interval 120 priority 113',\n ' neighbor 30.30.30.30 cost 40 poll-interval 150 priority 115',\n ' exit',\n 'router ospf 10 vrf VRF1',\n ' no shutdown',\n ' router-id 1.1.1.1',\n ' distance ospf intra-area 112 inter-area 113 external 114',\n ' auto-cost reference-bandwidth 60000',\n ' mpls traffic-eng area 1',\n ' area 1 stub',\n ' area 1 default-cost 1111',\n ' area 1 virtual-link 7.7.7.7',\n ' area 1 virtual-link 7.7.7.7 hello-interval 55',\n ' area 1 virtual-link 7.7.7.7 dead-interval 65',\n ' area 1 virtual-link 7.7.7.7 retransmit-interval 75',\n ' area 1 virtual-link 7.7.7.7 transmit-delay 85',\n ' area 1 virtual-link 7.7.7.7 ttl-security hops 167',\n ' area 1 virtual-link 7.7.7.7 authentication key-chain ottawa',\n ' area 1 virtual-link 7.7.7.7 authentication',\n ' area 1 virtual-link 7.7.7.7 authentication-key anything',\n ' area 1 sham-link 11.11.11.11 12.12.12.12',\n ' area 1 sham-link 11.11.11.11 12.12.12.12 ttl-security hops 10',\n ' area 1 sham-link 15.15.15.15 16.16.16.16',\n ' area 1 sham-link 15.15.15.15 16.16.16.16 cost 50',\n ' exit',\n 'router ospf 20 vrf VRF2',\n ' no shutdown',\n ' router-id 2.2.2.2',\n ' nsf cisco helper disable',\n ' nsf ietf restart-interval 50',\n ' nsf ietf helper strict-lsa-checking',\n ' no auto-cost',\n ' max-metric router-lsa external-lsa include-stub summary-lsa on-startup 50',\n ' max-metric router-lsa external-lsa include-stub summary-lsa',\n ' network 192.168.1.0 0.0.0.0 area 2',\n ' network 192.168.1.1 0.0.0.255 area 2',\n ' mpls traffic-eng area 2',\n ' area 2 nssa no-summary',\n ' area 2 default-cost 1111',\n ' area 2 range 1.1.1.1 255.255.255.0 not-advertise cost 10',\n ' area 2 range 2.2.2.2 255.255.255.255 advertise',\n ' area 2 virtual-link 8.8.8.8',\n ' area 2 virtual-link 8.8.8.8 hello-interval 56',\n ' area 2 virtual-link 8.8.8.8 dead-interval 66',\n ' area 2 virtual-link 8.8.8.8 retransmit-interval 76',\n ' area 2 virtual-link 8.8.8.8 transmit-delay 86',\n ' area 2 virtual-link 8.8.8.8 ttl-security hops 168',\n ' area 2 virtual-link 8.8.8.8 authentication key-chain toronto',\n ' area 2 virtual-link 8.8.8.8 authentication message-digest',\n ' area 2 virtual-link 8.8.8.8 message-digest-key 1 md5 anything',\n ' exit',\n 'interface GigabitEthernet1',\n ' ip ospf 30 area 0',\n ' ip ospf cost 10',\n ' ip ospf network point-to-point',\n ' ip ospf demand-circuit',\n ' ip ospf priority 110',\n ' ip ospf bfd',\n ' bfd interval 999 min_rx 999 multiplier 7',\n ' ip ospf hello-interval 50',\n ' ip ospf dead-interval 60',\n ' ip ospf retransmit-interval 70',\n ' ip ospf lls',\n ' ip ospf ttl-security hops 25',\n ' ip ospf authentication key-chain montreal',\n ' ip ospf authentication message-digest',\n ' ip ospf message-digest-key 1 md5 quebec',\n ' ip ospf mtu-ignore',\n ' ip ospf prefix-suppression',\n ' exit',\n ]))\n\n # Unconfig\n ospf_uncfg = ospf1.build_unconfig(apply=False)\n\n # Check unconfig strings built correctly\n self.assertMultiLineEqual(\n str(ospf_uncfg[dev1.name]),\n '\\n'.join([\n 'no router ospf 30',\n 'no router ospf 10 vrf VRF1',\n 'no router ospf 20 vrf VRF2',\n ]))\n\n\n def test_ospf_config2(self):\n\n # For failures\n self.maxDiff = None\n\n # Set testbed\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n\n # Create VRF objects\n vrf0 = Vrf('default')\n # Create OSPF object\n ospf1 = Ospf()\n\n # Add OSPF configurations to vrf default\n ospf1.device_attr[dev1].vrf_attr[vrf0].instance = '1'\n ospf1.device_attr[dev1].vrf_attr[vrf0].enable = True\n ospf1.device_attr[dev1].vrf_attr[vrf0].router_id = '1.1.1.1'\n ospf1.device_attr[dev1].vrf_attr[vrf0].pref_all = 115\n\n # Add OSPF to the device\n dev1.add_feature(ospf1)\n \n # Build config\n cfgs = ospf1.build_config(apply=False)\n\n # Check config strings built correctly\n self.assertMultiLineEqual(\n str(cfgs[dev1.name]),\n '\\n'.join([\n 'router ospf 1',\n ' no shutdown',\n ' router-id 1.1.1.1',\n ' distance 115',\n ' exit',\n ]))\n\n # Unconfigure router-id\n ospf_uncfg = ospf1.build_unconfig(apply=False, attributes={\n 'device_attr': {\n dev1.name: 'vrf_attr__default__router_id',\n }})\n\n # Check unconfig strings built correctly\n self.assertMultiLineEqual(\n str(ospf_uncfg[dev1.name]),\n '\\n'.join([\n 'router ospf 1',\n ' no router-id 1.1.1.1',\n ' exit',\n ]))\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "3627604", "language": "Python", "matching_score": 5.995461463928223, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/ospf/iosxe/tests/test_ospf.py" }, { "content": "\n__all__ = (\n 'Ospf',\n)\n\n# Python\nfrom enum import Enum\n\n# Genie\nfrom genie.utils.cisco_collections import typedset\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.base import DeviceFeature, InterfaceFeature, LinkFeature\n\nfrom genie.libs.conf.base import Routing\nfrom genie.libs.conf.vrf import Vrf, VrfSubAttributes\nfrom genie.conf.base.attributes import DeviceSubAttributes, SubAttributesDict,\\\n AttributesHelper, KeyedSubAttributes,\\\n InterfaceSubAttributes\n\n# Multi-line config classes\nfrom .arearange import AreaRange\nfrom .stubrouter import StubRouter\nfrom .areanetwork import AreaNetwork\nfrom .gracefulrestart import GracefulRestart\nfrom .interfacestaticneighbor import InterfaceStaticNeighbor\n\n# OSPF Heirarchy\n# --------------\n# Ospf\n# +- DeviceAttributes\n# +- VrfAttributes\n# +- AreaAttributes\n# +- VirtualLinkAttributes\n# +- ShamLinkAttributes\n# +- InterfaceAttributes\n\n\nclass Ospf(Routing, DeviceFeature, LinkFeature):\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n # ==========================================================================\n # CONF CLASS STRUCTURE\n # ==========================================================================\n \n # +- DeviceAttributes\n class DeviceAttributes(DeviceSubAttributes):\n\n # +- DeviceAttributes\n # +- VrfAttributes\n class VrfAttributes(VrfSubAttributes):\n\n # Graceful Restart multi-line configs\n gr_keys = managedattribute(\n name='gr_keys',\n finit=typedset(managedattribute.test_isinstance(GracefulRestart)).copy,\n type=typedset(managedattribute.test_isinstance(GracefulRestart))._from_iterable,\n doc='A `set` of GracefulRestart keys objects')\n\n def add_gr_key(self, gr_key):\n self.gr_keys.add(gr_key)\n\n def remove_gr_key(self, gr_key):\n gr_key._device = None\n try:\n self.gr_keys.remove(gr_key)\n except:\n pass\n\n # Stub Router multi-line configs\n sr_keys = managedattribute(\n name='sr_keys',\n finit=typedset(managedattribute.test_isinstance(StubRouter)).copy,\n type=typedset(managedattribute.test_isinstance(StubRouter))._from_iterable,\n doc='A `set` of StubRouter keys objects')\n\n def add_sr_key(self, sr_key):\n self.sr_keys.add(sr_key)\n\n def remove_sr_key(self, sr_key):\n sr_key._device = None\n try:\n self.sr_keys.remove(sr_key)\n except:\n pass\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n class AreaAttributes(KeyedSubAttributes):\n \n def __init__(self, parent, key):\n self.area = key\n super().__init__(parent)\n\n # Area Network multi-line configs\n areanetwork_keys = managedattribute(\n name='areanetwork_keys',\n finit=typedset(managedattribute.test_isinstance(AreaNetwork)).copy,\n type=typedset(managedattribute.test_isinstance(AreaNetwork))._from_iterable,\n doc='A `set` of AreaNetwork keys objects')\n\n def add_areanetwork_key(self, areanetwork_key):\n self.areanetwork_keys.add(areanetwork_key)\n\n def remove_areanetwork_key(self, areanetwork_key):\n areanetwork_key._device = None\n try:\n self.areanetwork_keys.remove(areanetwork_key)\n except:\n pass\n\n # Area Range multi-line configs\n arearange_keys = managedattribute(\n name='arearange_keys',\n finit=typedset(managedattribute.test_isinstance(AreaRange)).copy,\n type=typedset(managedattribute.test_isinstance(AreaRange))._from_iterable,\n doc='A `set` of AreaRange keys objects')\n\n def add_arearange_key(self, arearange_key):\n self.arearange_keys.add(arearange_key)\n\n def remove_arearange_key(self, arearange_key):\n arearange_key._device = None\n try:\n self.arearange_keys.remove(arearange_key)\n except:\n pass\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- VirtualLinkAttributes\n class VirtualLinkAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.vlink = key\n super().__init__(parent)\n\n virtual_link_attr = managedattribute(\n name='virtual_link_attr',\n read_only=True,\n doc=VirtualLinkAttributes.__doc__)\n\n @virtual_link_attr.initter\n def virtual_link_attr(self):\n return SubAttributesDict(self.VirtualLinkAttributes, parent=self)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- ShamLinkAttributes\n class ShamLinkAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.vlink = key\n super().__init__(parent)\n\n sham_link_attr = managedattribute(\n name='sham_link_attr',\n read_only=True,\n doc=ShamLinkAttributes.__doc__)\n\n @sham_link_attr.initter\n def sham_link_attr(self):\n return SubAttributesDict(self.ShamLinkAttributes, parent=self)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- InterfaceAttributes\n class InterfaceAttributes(InterfaceSubAttributes):\n\n # Interface Static Neighbor multi-line configs\n intf_staticnbr_keys = managedattribute(\n name='intf_staticnbr_keys',\n finit=typedset(managedattribute.test_isinstance(InterfaceStaticNeighbor)).copy,\n type=typedset(managedattribute.test_isinstance(InterfaceStaticNeighbor))._from_iterable,\n doc='A `set` of InterfaceStaticNeighbor keys objects')\n\n def add_staticnbr_key(self, intf_staticnbr_key):\n self.intf_staticnbr_keys.add(intf_staticnbr_key)\n\n def remove_staticnbr_key(self, intf_staticnbr_key):\n intf_staticnbr_key._device = None\n try:\n self.intf_staticnbr_keys.remove(intf_staticnbr_key)\n except:\n pass\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n\n area_attr = managedattribute(\n name='area_attr',\n read_only=True,\n doc=AreaAttributes.__doc__)\n\n @area_attr.initter\n def area_attr(self):\n return SubAttributesDict(self.AreaAttributes, parent=self)\n\n\n vrf_attr = managedattribute(\n name='vrf_attr',\n read_only=True,\n doc=VrfAttributes.__doc__)\n\n @vrf_attr.initter\n def vrf_attr(self):\n return SubAttributesDict(self.VrfAttributes, parent=self)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n # ==========================================================================\n # GLOBAL ENUM TYPES\n # ==========================================================================\n\n class REDIST_BGP_METRIC_TYPE(Enum):\n type_one = '1'\n type_two = '2'\n\n class AUTO_COST_BANDWIDTH_UNIT(Enum):\n mbps = 'mbps'\n gbps = 'gbps'\n\n class AREA_TYPE(Enum):\n normal = 'normal'\n stub = 'stub'\n nssa = 'nssa'\n\n class AUTH_CRYPTO_ALGORITHM(Enum):\n simple = 'simple'\n md5 = 'md5'\n\n class INTF_TYPE(Enum):\n broadcast = 'broadcast'\n non_broadcast = 'non-broadcast'\n point_to_multipoint = 'point-to-multipoint'\n point_to_point = 'point-to-point'\n\n # ==========================================================================\n # MANAGED ATTRIBUTES\n # ==========================================================================\n \n # enabled\n enabled = managedattribute(\n name='enabled',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # ==========================================================================\n # +- DeviceAttributes\n # +- VrfAttributes\n # ==========================================================================\n\n # instance\n instance = managedattribute(\n name='instance',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # vrf - N/A ?\n vrf = managedattribute(\n name='vrf',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # enable\n enable = managedattribute(\n name='enable',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # router_id\n router_id = managedattribute(\n name='router_id',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # pref_all\n pref_all = managedattribute(\n name='pref_all',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # pref_intra_area\n pref_intra_area = managedattribute(\n name='pref_intra_area',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # pref_inter_area\n pref_inter_area = managedattribute(\n name='pref_inter_area',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # pref_internal\n pref_internal = managedattribute(\n name='pref_internal',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # pref_external\n pref_external = managedattribute(\n name='pref_external',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # nsr_enable\n nsr_enable = managedattribute(\n name='nsr_enable',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # ldp_autoconfig\n ldp_autoconfig = managedattribute(\n name='ldp_autoconfig',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # ldp_auto_config_area_id\n ldp_auto_config_area_id = managedattribute(\n name='ldp_auto_config_area_id',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # ldp_igp_sync\n ldp_igp_sync = managedattribute(\n name='ldp_igp_sync',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # redist_bgp_id\n redist_bgp_id = managedattribute(\n name='redist_bgp_id',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # redist_bgp_metric\n redist_bgp_metric = managedattribute(\n name='redist_bgp_metric',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # redist_bgp_metric_type\n redist_bgp_metric_type = managedattribute(\n name='redist_bgp_metric_type',\n default=REDIST_BGP_METRIC_TYPE.type_one,\n type=(None, REDIST_BGP_METRIC_TYPE))\n\n # redist_bgp_nssa_only\n redist_bgp_nssa_only = managedattribute(\n name='redist_bgp_nssa_only',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # redist_bgp_route_map\n redist_bgp_route_map = managedattribute(\n name='redist_bgp_route_map',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # redist_bgp_subnets\n redist_bgp_subnets = managedattribute(\n name='redist_bgp_subnets',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # redist_bgp_tag\n redist_bgp_tag = managedattribute(\n name='redist_bgp_tag',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # redist_bgp_lsa_type_summary \n redist_bgp_lsa_type_summary = managedattribute(\n name='redist_bgp_lsa_type_summary',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # redist_bgp_preserve_med\n redist_bgp_preserve_med = managedattribute(\n name='redist_bgp_preserve_med',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # redist_connected\n redist_connected = managedattribute(\n name='redist_connected',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # redist_connected_metric\n redist_connected_metric = managedattribute(\n name='redist_connected_metric',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # redist_connected_route_policy\n redist_connected_route_policy = managedattribute(\n name='redist_connected_route_policy',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # redist_static\n redist_static = managedattribute(\n name='redist_static',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # redist_static_metric\n redist_static_metric = managedattribute(\n name='redist_static_metric',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # redist_static_route_policy\n redist_static_route_policy = managedattribute(\n name='redist_static_route_policy',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # redist_isis\n redist_isis = managedattribute(\n name='redist_isis',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # redist_isis_metric\n redist_isis_metric = managedattribute(\n name='redist_isis_metric',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # redist_isis_route_policy\n redist_isis_route_policy = managedattribute(\n name='redist_isis_route_policy',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # redist_max_prefix\n redist_max_prefix = managedattribute(\n name='redist_max_prefix',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # redist_max_prefix_thld\n redist_max_prefix_thld = managedattribute(\n name='redist_max_prefix_thld',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # redist_max_prefix_warn_only\n redist_max_prefix_warn_only = managedattribute(\n name='redist_max_prefix_warn_only',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # bfd_enable\n bfd_enable = managedattribute(\n name='bfd_enable',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # bfd_strict_mode\n bfd_strict_mode = managedattribute(\n name='bfd_strict_mode',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # te_router_id\n te_router_id = managedattribute(\n name='te_router_id',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # log_adjacency_changes\n log_adjacency_changes = managedattribute(\n name='log_adjacency_changes',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # log_adjacency_changes_detail\n log_adjacency_changes_detail = managedattribute(\n name='log_adjacency_changes_detail',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # adjacency_stagger_initial_number\n adjacency_stagger_initial_number = managedattribute(\n name='adjacency_stagger_initial_number',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # adjacency_stagger_maximum_number\n adjacency_stagger_maximum_number = managedattribute(\n name='adjacency_stagger_maximum_number',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # adjacency_stagger_disable\n adjacency_stagger_disable = managedattribute(\n name='adjacency_stagger_disable',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # adjacency_stagger_no_initial_limit\n adjacency_stagger_no_initial_limit = managedattribute(\n name='adjacency_stagger_no_initial_limit',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # auto_cost_enable\n auto_cost_enable = managedattribute(\n name='auto_cost_enable',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # auto_cost_reference_bandwidth\n auto_cost_reference_bandwidth = managedattribute(\n name='auto_cost_reference_bandwidth',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # auto_cost_bandwidth_unit\n auto_cost_bandwidth_unit = managedattribute(\n name='auto_cost_bandwidth_unit',\n default=None,\n type=(None, AUTO_COST_BANDWIDTH_UNIT))\n\n # maximum_interfaces\n maximum_interfaces = managedattribute(\n name='maximum_interfaces',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # spf_paths\n spf_paths = managedattribute(\n name='spf_paths',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # spf_start\n spf_start = managedattribute(\n name='spf_start',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # spf_hold\n spf_hold = managedattribute(\n name='spf_hold',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # spf_maximum\n spf_maximum = managedattribute(\n name='spf_maximum',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # spf_lsa_start\n spf_lsa_start = managedattribute(\n name='spf_lsa_start',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # spf_lsa_hold\n spf_lsa_hold = managedattribute(\n name='spf_lsa_hold',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # spf_lsa_maximum\n spf_lsa_maximum = managedattribute(\n name='spf_lsa_maximum',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # db_ctrl_max_lsa\n db_ctrl_max_lsa = managedattribute(\n name='db_ctrl_max_lsa',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # default_originate\n default_originate = managedattribute(\n name='default_originate',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # default_originate_always\n default_originate_always = managedattribute(\n name='default_originate_always',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # ==========================================================================\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # ==========================================================================\n \n # area_id\n area_id = managedattribute(\n name='area_id',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # area_te_enable\n area_te_enable = managedattribute(\n name='area_te_enable',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # area_bfd_enable\n area_bfd_enable = managedattribute(\n name='area_bfd_enable',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # area_bfd_min_interval\n area_bfd_min_interval = managedattribute(\n name='area_bfd_min_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # area_bfd_multiplier\n area_bfd_multiplier = managedattribute(\n name='area_bfd_multiplier',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # area_passive\n area_passive = managedattribute(\n name='area_passive',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # area_mtu_ignore\n area_mtu_ignore = managedattribute(\n name='area_mtu_ignore',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # area_demand_cirtuit\n area_demand_cirtuit = managedattribute(\n name='area_demand_cirtuit',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # area_external_out\n area_external_out = managedattribute(\n name='area_external_out',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # area_flood_reduction\n area_flood_reduction = managedattribute(\n name='area_flood_reduction',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # area_link_down_fast_detect\n area_link_down_fast_detect = managedattribute(\n name='area_link_down_fast_detect',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # area_ldp_auto_config\n area_ldp_auto_config = managedattribute(\n name='area_ldp_auto_config',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # area_ldp_sync\n area_ldp_sync = managedattribute(\n name='area_ldp_sync',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # area_ldp_sync_igp_shortcuts\n area_ldp_sync_igp_shortcuts = managedattribute(\n name='area_ldp_sync_igp_shortcuts',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # area_type\n area_type = managedattribute(\n name='area_type',\n default=AREA_TYPE.normal,\n type=(None, AREA_TYPE))\n\n # summary\n summary = managedattribute(\n name='summary',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # default_cost\n default_cost = managedattribute(\n name='default_cost',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # ==========================================================================\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- VirtualLinkAttributes\n # ==========================================================================\n\n # vl_router_id\n vl_router_id = managedattribute(\n name='vl_router_id',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # vl_hello_interval\n vl_hello_interval = managedattribute(\n name='vl_hello_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # vl_dead_interval\n vl_dead_interval = managedattribute(\n name='vl_dead_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # vl_retransmit_interval\n vl_retransmit_interval = managedattribute(\n name='vl_retransmit_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # vl_transmit_delay\n vl_transmit_delay = managedattribute(\n name='vl_transmit_delay',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # vl_ttl_sec_hops\n vl_ttl_sec_hops = managedattribute(\n name='vl_ttl_sec_hops',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # vl_auth_trailer_key_chain\n vl_auth_trailer_key_chain = managedattribute(\n name='vl_auth_trailer_key_chain',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # vl_auth_trailer_key\n vl_auth_trailer_key = managedattribute(\n name='vl_auth_trailer_key',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # vl_auth_trailer_key_crypto_algorithm\n vl_auth_trailer_key_crypto_algorithm = managedattribute(\n name='vl_auth_trailer_key_crypto_algorithm',\n default=None,\n type=(None, AUTH_CRYPTO_ALGORITHM))\n\n # ==========================================================================\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- ShamLinkAttributes\n # ==========================================================================\n\n # sl_local_id\n sl_local_id = managedattribute(\n name='sl_local_id',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # sl_remote_id\n sl_remote_id = managedattribute(\n name='sl_remote_id',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # sl_hello_interval\n sl_hello_interval = managedattribute(\n name='sl_hello_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # sl_dead_interval\n sl_dead_interval = managedattribute(\n name='sl_dead_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # sl_retransmit_interval\n sl_retransmit_interval = managedattribute(\n name='sl_retransmit_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # sl_transmit_delay\n sl_transmit_delay = managedattribute(\n name='sl_transmit_delay',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # sl_ttl_sec_hops\n sl_ttl_sec_hops = managedattribute(\n name='sl_ttl_sec_hops',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # sl_cost\n sl_cost = managedattribute(\n name='sl_cost',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # sl_auth_trailer_key_chain\n sl_auth_trailer_key_chain = managedattribute(\n name='sl_auth_trailer_key_chain',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # sl_auth_trailer_key\n sl_auth_trailer_key = managedattribute(\n name='sl_auth_trailer_key',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # sl_auth_trailer_key_crypto_algorithm\n sl_auth_trailer_key_crypto_algorithm = managedattribute(\n name='sl_auth_trailer_key_crypto_algorithm',\n default=None,\n type=(None, AUTH_CRYPTO_ALGORITHM))\n \n\n # ==========================================================================\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- InterfaceAttributes\n # ==========================================================================\n\n # if_name - Attribute key\n\n # if_admin_control\n if_admin_control = managedattribute(\n name='if_admin_control',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # if_cost\n if_cost = managedattribute(\n name='if_cost',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # if_type\n if_type = managedattribute(\n name='if_type',\n default=None,\n type=(None, INTF_TYPE))\n\n # if_passive\n if_passive = managedattribute(\n name='if_passive',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # if_demand_circuit\n if_demand_circuit = managedattribute(\n name='if_demand_circuit',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # if_priority\n if_priority = managedattribute(\n name='if_priority',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # if_bfd_enable\n if_bfd_enable = managedattribute(\n name='if_bfd_enable',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # if_bfd_min_interval\n if_bfd_min_interval = managedattribute(\n name='if_bfd_min_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # if_bfd_multiplier\n if_bfd_multiplier = managedattribute(\n name='if_bfd_multiplier',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # if_bfd_interval\n if_bfd_interval = managedattribute(\n name='if_bfd_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # if_hello_interval\n if_hello_interval = managedattribute(\n name='if_hello_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # if_dead_interval\n if_dead_interval = managedattribute(\n name='if_dead_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # if_retransmit_interval\n if_retransmit_interval = managedattribute(\n name='if_retransmit_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # if_lls\n if_lls = managedattribute(\n name='if_lls',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # if_ttl_sec_enable\n if_ttl_sec_enable = managedattribute(\n name='if_ttl_sec_enable',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # if_ttl_sec_hops\n if_ttl_sec_hops = managedattribute(\n name='if_ttl_sec_hops',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # if_auth_trailer_key_chain\n if_auth_trailer_key_chain = managedattribute(\n name='if_auth_trailer_key_chain',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # if_auth_trailer_key\n if_auth_trailer_key = managedattribute(\n name='if_auth_trailer_key',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # if_auth_trailer_key_crypto_algorithm\n if_auth_trailer_key_crypto_algorithm = managedattribute(\n name='if_auth_trailer_key_crypto_algorithm',\n default=None,\n type=(None, AUTH_CRYPTO_ALGORITHM))\n\n # if_mtu_ignore\n if_mtu_ignore = managedattribute(\n name='if_mtu_ignore',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # if_prefix_suppression\n if_prefix_suppression = managedattribute(\n name='if_prefix_suppression',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n\n # ==========================================================================\n # BUILD_CONFIG & BUILD_UNCONFIG\n # ==========================================================================\n\n def build_config(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n\n def build_unconfig(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n\n", "id": "6567042", "language": "Python", "matching_score": 7.01358699798584, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ospf/ospf.py" }, { "content": "''' \nOSPF Genie Conf Object Implementation for IOSXR - CLI.\n'''\n\n# Pyhon\nfrom abc import ABC\n\n# Genie\nfrom genie.conf.base.attributes import AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\n# Ospf\n# +- DeviceAttributes\n# +- VrfAttributes\n# +- AreaAttributes\n# +- VirtualLinkAttributes\n# +- ShamLinkAttributes\n# +- InterfaceAttributes\n\n\nclass Ospf(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n for sub, attributes2 in attributes.mapping_values('vrf_attr', \n sort=True, \n keys=self.vrf_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n class VrfAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # router ospf 1\n # router ospf 1 vrf VRF1\n with configurations.submode_context(\n attributes.format('router ospf {instance} vrf {vrf_name}', force=True) if self.vrf_name != 'default' else \\\n attributes.format('router ospf {instance}', force=True)):\n\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # router ospf 1\n # router-id 1.1.1.1\n if attributes.value('router_id'):\n configurations.append_line(attributes.format('router-id {router_id}'))\n\n # router ospf 1\n # distance 110\n if attributes.value('pref_all'):\n configurations.append_line(attributes.format('distance {pref_all}'))\n\n # router ospf 1\n # distance ospf inter-area 30\n # distance ospf intra-area 40\n # distance ospf external 50\n # distance ospf inter-area 30 intra-area 40\n # distance ospf intra-area 40 external 50\n # distance ospf inter-area 30 external 50\n # distance ospf inter-area 30 intra-area 40 external 50\n if attributes.value('pref_intra_area') or \\\n attributes.value('pref_inter_area') or \\\n attributes.value('pref_external'):\n\n # distance ospf\n dist_cfg_str = 'distance ospf'\n\n # If internal, overwrite intra with inter\n if attributes.value('pref_internal'):\n attributes.value('pref_intra_area').value = attributes.value('pref_inter_area').value\n\n # + intra-area {pref_intra_area}\n if attributes.value('pref_intra_area'):\n dist_cfg_str += ' intra-area {pref_intra_area}'\n \n # + inter-area {pref_inter_area}\n if attributes.value('pref_inter_area'):\n dist_cfg_str += ' inter-area {pref_inter_area}'\n \n # + external {pref_external}\n if attributes.value('pref_external'):\n dist_cfg_str += ' external {pref_external}'\n\n configurations.append_line(attributes.format(dist_cfg_str))\n\n # router ospf 1\n # nsr\n # nsr disable\n if attributes.value('nsr_enable') is True and self.vrf_name == 'default':\n configurations.append_line(attributes.format('nsr'))\n elif attributes.value('nsr_enable') is False and self.vrf_name == 'default':\n configurations.append_line(attributes.format('nsr disable'))\n\n # GracefulRestart attributes config\n for gr_key, attributes2 in attributes.sequence_values('gr_keys', sort=True):\n if unconfig:\n configurations.append_block(gr_key.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(gr_key.build_config(\n apply=False, attributes=attributes2, **kwargs))\n\n # router ospf 1\n # mpls ldp auto-config\n if attributes.value('ldp_autoconfig') and self.vrf_name == 'default':\n configurations.append_line(attributes.format('mpls ldp auto-config'))\n\n # router ospf 1\n # mpls ldp sync\n if attributes.value('ldp_igp_sync'):\n configurations.append_line(attributes.format('mpls ldp sync'))\n\n # router ospf 1\n # redistribute bgp 100\n # redistribute bgp 100 lsa-type summary\n # redistribute bgp 100 lsa-type summary metric 10\n # redistribute bgp 100 lsa-type summary metric 10 metric-type 2\n # redistribute bgp 100 lsa-type summary metric 10 metric-type 2 nssa-only\n # redistribute bgp 100 lsa-type summary metric 10 metric-type 2 nssa-only preserve-med\n # redistribute bgp 100 lsa-type summary metric 10 metric-type 2 nssa-only preserve-med tag 24\n # redistribute bgp 100 lsa-type summary metric 10 metric-type 2 nssa-only preserve-med tag 24 route-policy BGP_TO_OSPF\n if attributes.value('redist_bgp_id'):\n\n # redistribute bgp {redist_bgp_id}\n redist_bgp_str = 'redistribute bgp {redist_bgp_id}'\n\n # + lsa-type summary\n if attributes.value('redist_bgp_lsa_type_summary '):\n redist_bgp_str += ' lsa-type summary'\n\n # + metric {redist_bgp_metric}\n if attributes.value('redist_bgp_metric'):\n redist_bgp_str += ' metric {redist_bgp_metric}'\n\n # + metric-type {redist_bgp_metric_type}\n if attributes.value('redist_bgp_metric_type'):\n redist_type = attributes.value('redist_bgp_metric_type').value\n redist_bgp_str += ' metric-type {}'.format(redist_type)\n\n # + nssa-only\n if attributes.value('redist_bgp_nssa_only'):\n redist_bgp_str += ' nssa-only'\n\n # + preserve-med\n if attributes.value('redist_bgp_preserve_med'):\n redist_bgp_str += ' preserve-med'\n\n # + tag {redist_bgp_tag}\n if attributes.value('redist_bgp_tag'):\n redist_bgp_str += ' tag {redist_bgp_tag}'\n\n # + route-policy {redist_bgp_route_map}\n if attributes.value('redist_bgp_route_map'):\n redist_bgp_str += ' route-policy {redist_bgp_route_map}'\n\n configurations.append_line(attributes.format(redist_bgp_str))\n\n # router ospf 1\n # redistribute connected\n # redistribute connected metric 10\n # redistribute connected metric 10 route-policy BGP_TO_OSPF\n if attributes.value('redist_connected'):\n\n # redistribute connected\n redist_connected_str = 'redistribute connected'\n\n # + metric {redist_connected_metric}\n if attributes.value('redist_connected_metric'):\n redist_connected_str += ' metric {redist_connected_metric}'\n\n # + route-map {redist_connected_route_policy}\n if attributes.value('redist_connected_route_policy'):\n redist_connected_str += ' route-policy {redist_connected_route_policy}'\n\n configurations.append_line(attributes.format(redist_connected_str))\n\n # router ospf 1\n # redistribute static\n # redistribute static metric 10\n # redistribute static metric 10 route-policy BGP_TO_OSPF\n if attributes.value('redist_static'):\n\n # redistribute static\n redist_static_str = 'redistribute static'\n\n # + metric {redist_static_metric}\n if attributes.value('redist_static_metric'):\n redist_static_str += ' metric {redist_static_metric}'\n\n # + route-policy {redist_static_route_policy}\n if attributes.value('redist_static_route_policy'):\n redist_static_str += ' route-policy {redist_static_route_policy}'\n\n configurations.append_line(attributes.format(redist_static_str))\n\n # router ospf 1\n # redistribute isis ABC\n # redistribute isis ABC metric 10\n # redistribute isis ABC metric 10 route-policy test\n if attributes.value('redist_isis'):\n\n # redistribute isis {redist_isis}\n redist_isis_str = 'redistribute isis {redist_isis}'\n\n # + metric {redist_isis_metric}\n if attributes.value('redist_isis_metric'):\n redist_isis_str += ' metric {redist_isis_metric}'\n\n # + route-policy {redist_isis_route_policy}\n if attributes.value('redist_isis_route_policy'):\n redist_isis_str += ' route-policy {redist_isis_route_policy}'\n\n configurations.append_line(attributes.format(redist_isis_str))\n\n # router ospf 1\n # maximum redistributed-prefixes 10\n # maximum redistributed-prefixes 10 50\n # maximum redistributed-prefixes 10 50 warning-only\n if attributes.value('redist_max_prefix'):\n\n # maximum redistributed-prefixes {redist_max_prefix}\n redist_maxpfx_str = 'maximum redistributed-prefixes {redist_max_prefix}'\n\n # + {redist_max_prefix_thld}\n if attributes.value('redist_max_prefix_thld'):\n redist_maxpfx_str += ' {redist_max_prefix_thld}'\n\n # + warning-only\n if attributes.value('redist_max_prefix_warn_only'):\n redist_maxpfx_str += ' warning-only'\n\n configurations.append_line(attributes.format(redist_maxpfx_str))\n\n # router ospf 1\n # bfd fast-detect\n # bfd fast-detect strict-mode\n if attributes.value('bfd_enable'):\n\n # bfd all-interfaces\n bfd_str = 'bfd fast-detect'\n\n if attributes.value('bfd_strict_mode'):\n bfd_str += ' strict-mode'\n\n configurations.append_line(attributes.format(bfd_str))\n\n # router ospf 1\n # mpls traffic-eng router-id Loopback0\n if attributes.value('te_router_id') and self.vrf_name == 'default':\n configurations.append_line(attributes.format('mpls traffic-eng router-id {te_router_id}'))\n\n # router ospf 1\n # log adjacency changes\n # log adjacency changes detail\n if attributes.value('log_adjacency_changes'):\n\n # log adjacency changes\n log_str = 'log adjacency changes'\n\n # + detail\n if attributes.value('log_adjacency_changes_detail'):\n log_str += ' detail'\n \n configurations.append_line(attributes.format(log_str))\n\n # router ospf 1\n # adjacency stagger 563 1625\n if attributes.value('adjacency_stagger_initial_number') and\\\n attributes.value('adjacency_stagger_maximum_number'):\n configurations.append_line(attributes.format(\n 'adjacency stagger {adjacency_stagger_initial_number} {adjacency_stagger_maximum_number}'))\n\n # router ospf 1\n # no auto-cost disable\n # auto-cost reference-bandwidth 60000\n if attributes.value('auto_cost_enable') is False:\n configurations.append_line(attributes.format('auto-cost disable'))\n elif attributes.value('auto_cost_enable') is True and \\\n attributes.value('auto_cost_reference_bandwidth'):\n\n # auto-cost\n auto_cost_str = 'auto-cost reference-bandwidth'\n\n # Calculate bandwidth based on unit type\n if attributes.value('auto_cost_bandwidth_unit') and \\\n attributes.value('auto_cost_bandwidth_unit').value == 'gbps':\n bandwidth = str(attributes.value('auto_cost_reference_bandwidth') * 1000)\n else:\n bandwidth = attributes.value('auto_cost_reference_bandwidth')\n auto_cost_str += ' {}'.format(bandwidth)\n\n configurations.append_line(attributes.format(auto_cost_str))\n\n # router ospf 1\n # maximum paths 30\n if attributes.value('spf_paths'):\n configurations.append_line(attributes.format('maximum paths {spf_paths}'))\n\n # router ospf 1\n # maximum interfaces 123\n if attributes.value('maximum_interfaces'):\n configurations.append_line(attributes.format('maximum interfaces {maximum_interfaces}'))\n\n # router ospf 1\n # timers throttle spf 5000 10000 20000\n if attributes.value('spf_start'):\n\n # timers throttle spf {spf_start}\n throttle_str = 'timers throttle spf {spf_start}'\n\n # + {spf_hold}\n if attributes.value('spf_hold'):\n throttle_str += ' {spf_hold}'\n\n # + {spf_maximum}\n if attributes.value('spf_maximum'):\n throttle_str += ' {spf_maximum}'\n\n configurations.append_line(attributes.format(throttle_str))\n\n # router ospf 1\n # timers throttle lsa all 5000 10000 20000\n if attributes.value('spf_lsa_start'):\n\n # timers throttle {spf_lsa_start}\n throttle_lsa = 'timers throttle lsa all {spf_lsa_start}'\n\n # + {spf_lsa_hold}\n if attributes.value('spf_lsa_hold'):\n throttle_lsa += ' {spf_lsa_hold}'\n\n # + {spf_lsa_maximum}\n if attributes.value('spf_lsa_maximum'):\n throttle_lsa += ' {spf_lsa_maximum}'\n\n configurations.append_line(attributes.format(throttle_lsa))\n\n # router ospf 1\n # max-lsa 56666666\n if attributes.value('db_ctrl_max_lsa'):\n configurations.append_line(attributes.format('max-lsa {db_ctrl_max_lsa}'))\n\n # StubRouter attributes config\n for sr_key, attributes2 in attributes.sequence_values('sr_keys', sort=True):\n if unconfig:\n configurations.append_block(sr_key.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(sr_key.build_config(\n apply=False, attributes=attributes2, **kwargs))\n\n # router ospf 1\n # default-information originate always\n if attributes.value('default_originate'):\n \n # + default-information originate\n default_originate_str = 'default-information originate'\n\n # + always\n if attributes.value('default_originate_always'):\n default_originate_str += ' always'\n\n configurations.append_line(attributes.format(default_originate_str))\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n for sub, attributes2 in attributes.mapping_values('area_attr', \n sort=True, \n keys=self.area_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n class AreaAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # router ospf 1\n # area 0.0.0.0\n with configurations.submode_context(attributes.format('area {area_id}', force=True)):\n\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # router ospf 1\n # area 0.0.0.0\n # mpls traffic-eng\n if attributes.value('area_te_enable') and self.vrf_name == 'default':\n configurations.append_line(attributes.format('mpls traffic-eng'))\n\n # router ospf 1\n # area 0.0.0.0\n # bfd fast-detect\n # bfd fast-detect disable\n if attributes.value('area_bfd_enable') is True:\n configurations.append_line(attributes.format('bfd fast-detect'))\n elif attributes.value('area_bfd_enable') is False:\n configurations.append_line(attributes.format('bfd fast-detect disable'))\n\n # router ospf 1\n # area 0.0.0.0\n # bfd minimum-interval {area_bfd_min_interval}\n if attributes.value('area_bfd_min_interval'):\n configurations.append_line(attributes.format('bfd minimum-interval {area_bfd_min_interval}'))\n\n # router ospf 1\n # area 0.0.0.0\n # bfd multiplier {area_bfd_multiplier}\n if attributes.value('area_bfd_multiplier'):\n configurations.append_line(attributes.format('bfd multiplier {area_bfd_multiplier}'))\n\n # router ospf 1\n # area 0.0.0.0\n # passive enable\n # passive disable\n if attributes.value('area_passive') is True:\n configurations.append_line(attributes.format('passive enable'))\n elif attributes.value('area_passive') is False:\n configurations.append_line(attributes.format('passive disable'))\n\n # router ospf 1\n # area 0.0.0.0\n # mtu-ignore enable\n # mtu-ignore disable\n if attributes.value('area_mtu_ignore') is True:\n configurations.append_line(attributes.format('mtu-ignore enable'))\n elif attributes.value('area_mtu_ignore') is False:\n configurations.append_line(attributes.format('mtu-ignore disable'))\n\n # router ospf 1\n # area 0.0.0.0\n # demand-circuit enable\n # demand-circuit disable\n if attributes.value('area_demand_cirtuit') is True:\n configurations.append_line(attributes.format('demand-circuit enable'))\n elif attributes.value('area_demand_cirtuit') is False:\n configurations.append_line(attributes.format('demand-circuit disable'))\n\n # router ospf 1\n # area 0.0.0.0\n # external-out enable\n # external-out disable\n if attributes.value('area_external_out') is True:\n configurations.append_line(attributes.format('external-out enable'))\n elif attributes.value('area_external_out') is False:\n configurations.append_line(attributes.format('external-out disable'))\n\n # router ospf 1\n # area 0.0.0.0\n # flood-reduction enable\n # flood-reduction disable\n if attributes.value('area_flood_reduction') is True:\n configurations.append_line(attributes.format('flood-reduction enable'))\n elif attributes.value('area_flood_reduction') is False:\n configurations.append_line(attributes.format('flood-reduction disable'))\n\n # router ospf 1\n # area 0.0.0.0\n # link-down fast-detect\n # link-down fast-detect disable\n if attributes.value('area_link_down_fast_detect') is True:\n configurations.append_line(attributes.format('link-down fast-detect'))\n elif attributes.value('area_link_down_fast_detect') is False:\n configurations.append_line(attributes.format('link-down fast-detect disable'))\n\n # router ospf 1\n # area 0.0.0.0\n # mpls ldp auto-config\n if attributes.value('area_ldp_auto_config') and self.vrf == 'default':\n configurations.append_line(attributes.format('mpls ldp auto-config'))\n\n # router ospf 1\n # area 0.0.0.0\n # mpls ldp sync\n # mpls ldp sync disable\n if attributes.value('area_ldp_sync') is True:\n configurations.append_line(attributes.format('mpls ldp sync'))\n elif attributes.value('area_ldp_sync') is False:\n configurations.append_line(attributes.format('mpls ldp sync disable'))\n\n # router ospf 1\n # area 0.0.0.0\n # mpls ldp sync-igp-shortcuts\n if attributes.value('area_ldp_sync_igp_shortcuts') and self.vrf == 'default':\n configurations.append_line(attributes.format('mpls ldp sync-igp-shortcuts'))\n\n # router ospf 1\n # area 0.0.0.0\n # stub\n # nssa\n # stub no-summary\n # nssa no-summary\n if attributes.value('area_type').value != 'normal':\n # stub\n # nssa\n atype = attributes.value('area_type').value\n type_str = ' {}'.format(atype)\n\n # + no-summary\n if attributes.value('summary') is False:\n type_str += ' no-summary'\n\n configurations.append_line(attributes.format(type_str))\n\n # router ospf 1\n # area 0.0.0.0\n # default-cost 100\n if attributes.value('default_cost'):\n configurations.append_line(attributes.format('default-cost {default_cost}'))\n\n # AreaRange attributes config\n for arearange_key, attributes2 in attributes.sequence_values('arearange_keys', sort=True):\n if unconfig:\n configurations.append_block(arearange_key.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(arearange_key.build_config(\n apply=False, attributes=attributes2, **kwargs))\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- VirtualLinkAttributes\n for sub, attributes2 in attributes.mapping_values('virtual_link_attr', \n sort=True, \n keys=self.virtual_link_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- ShamLinkAttributes\n for sub, attributes2 in attributes.mapping_values('sham_link_attr', \n sort=True, \n keys=self.sham_link_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- InterfaceAttributes\n #configurations = CliConfigBuilder(unconfig=unconfig)\n for sub, attributes2 in attributes.mapping_values('interface_attr', \n sort=True, \n keys=self.interface_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- VirtualLinkAttributes\n class VirtualLinkAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # router ospf 1\n # area 0.0.0.0\n # virtual-link 7.7.7.7\n with configurations.submode_context(\n attributes.format('virtual-link {vl_router_id}', force=True)):\n\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # router ospf 1\n # area 0.0.0.0\n # virtual-link 7.7.7.7\n # hello-interval 55\n if attributes.value('vl_hello_interval'):\n configurations.append_line(attributes.format('hello-interval {vl_hello_interval}'))\n\n # router ospf 1\n # area 0.0.0.0\n # virtual-link 7.7.7.7\n # dead-interval 55\n if attributes.value('vl_dead_interval'):\n configurations.append_line(attributes.format('dead-interval {vl_dead_interval}'))\n\n # router ospf 1\n # area 0.0.0.0\n # virtual-link 7.7.7.7\n # retransmit-interval 55\n if attributes.value('vl_retransmit_interval'):\n configurations.append_line(attributes.format('retransmit-interval {vl_retransmit_interval}'))\n\n # router ospf 1\n # area 0.0.0.0\n # virtual-link 7.7.7.7\n # transmit-delay 55\n if attributes.value('vl_transmit_delay'):\n configurations.append_line(attributes.format('transmit-delay {vl_transmit_delay}'))\n\n # router ospf 1\n # area 0.0.0.0\n # virtual-link 7.7.7.7\n # authentication message-digest keychain HAHA\n if attributes.value('vl_auth_trailer_key_chain'):\n configurations.append_line(attributes.format('authentication message-digest keychain {vl_auth_trailer_key_chain}'))\n\n # router ospf 1\n # area 0.0.0.0\n # virtual-link 7.7.7.7\n # authentication\n # authentication-key ABC\n # authentication message-digest\n # message-digest-key 1 md5 XYZ\n if attributes.value('vl_auth_trailer_key_crypto_algorithm'):\n\n auth_type = attributes.value('vl_auth_trailer_key_crypto_algorithm').value\n\n # authentication\n if auth_type == 'simple':\n configurations.append_line(attributes.format('authentication'))\n\n # authentication-key ABC\n if attributes.value('vl_auth_trailer_key'):\n configurations.append_line(attributes.format('authentication-key {vl_auth_trailer_key}'))\n\n # authentication message-digest\n elif auth_type == 'md5':\n configurations.append_line(attributes.format('authentication message-digest'))\n\n # message-digest-key 1 md5 XYZ\n if attributes.value('vl_auth_trailer_key'):\n configurations.append_line(attributes.format('message-digest-key 1 md5 {vl_auth_trailer_key}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- ShamLinkAttributes\n class ShamLinkAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # router ospf 1\n # area 0.0.0.0\n # sham-link 7.7.7.7 8.8.8.8\n with configurations.submode_context(\n attributes.format('sham-link {sl_local_id} {sl_remote_id}', force=True)):\n\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # router ospf 1\n # area 0.0.0.0\n # sham-link 7.7.7.7 8.8.8.8\n # hello-interval 55\n if attributes.value('sl_hello_interval'):\n configurations.append_line(attributes.format('hello-interval {sl_hello_interval}'))\n\n # router ospf 1\n # area 0.0.0.0\n # sham-link 7.7.7.7 8.8.8.8\n # dead-interval 55\n if attributes.value('sl_dead_interval'):\n configurations.append_line(attributes.format('dead-interval {sl_dead_interval}'))\n\n # router ospf 1\n # area 0.0.0.0\n # sham-link 7.7.7.7 8.8.8.8\n # retransmit-interval 55\n if attributes.value('sl_retransmit_interval'):\n configurations.append_line(attributes.format('retransmit-interval {sl_retransmit_interval}'))\n\n # router ospf 1\n # area 0.0.0.0\n # sham-link 7.7.7.7 8.8.8.8\n # transmit-delay 55\n if attributes.value('sl_transmit_delay'):\n configurations.append_line(attributes.format('transmit-delay {sl_transmit_delay}'))\n\n # router ospf 1\n # area 0.0.0.0\n # sham-link 7.7.7.7 8.8.8.8\n # authentication message-digest keychain \"ottawa\"\n if attributes.value('sl_auth_trailer_key_chain'):\n configurations.append_line(attributes.format('authentication message-digest keychain {sl_auth_trailer_key_chain}'))\n\n # router ospf 1\n # area 0.0.0.0\n # sham-link 7.7.7.7 8.8.8.8\n # authentication\n # authentication-key ABC\n # authentication message-digest\n # message-digest-key 1 md5 XYZ\n if attributes.value('sl_auth_trailer_key_crypto_algorithm'):\n\n auth_type = attributes.value('sl_auth_trailer_key_crypto_algorithm').value\n\n # authentication\n if auth_type == 'simple':\n configurations.append_line(attributes.format('authentication'))\n\n # authentication-key ABC\n if attributes.value('sl_auth_trailer_key'):\n configurations.append_line(attributes.format('authentication-key {sl_auth_trailer_key}'))\n\n # authentication message-digest\n elif auth_type == 'md5':\n configurations.append_line(attributes.format('authentication message-digest'))\n\n # message-digest-key 1 md5 XYZ\n if attributes.value('sl_auth_trailer_key'):\n configurations.append_line(attributes.format('message-digest-key 1 md5 {sl_auth_trailer_key}'))\n\n # router ospf 1\n # area 0.0.0.0\n # sham-link 7.7.7.7 8.8.8.8\n # cost 10\n if attributes.value('sl_cost'):\n configurations.append_line(attributes.format('cost {sl_cost}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- InterfaceAttributes\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # router ospf 1\n # area 0.0.0.0\n # interface GigabitEthernet1\n with configurations.submode_context(\n attributes.format('interface {interface_name}', force=True)):\n\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # router ospf 1\n # area 0.0.0.0\n # interface GigabitEthernet1\n # cost 100\n if attributes.value('if_cost'):\n configurations.append_line(attributes.format('cost {if_cost}'))\n\n # InterfaceStaticNeighbor attributes config\n for intf_staticnbr_key, attributes2 in attributes.sequence_values('intf_staticnbr_keys', sort=True):\n if unconfig:\n configurations.append_block(intf_staticnbr_key.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(intf_staticnbr_key.build_config(\n apply=False, attributes=attributes2, **kwargs))\n\n # router ospf 1\n # area 0.0.0.0\n # interface GigabitEthernet1\n # network point-to-point\n if attributes.value('if_type'):\n iftype = attributes.value('if_type').value\n configurations.append_line(attributes.format('network {}'.format(iftype)))\n\n # router ospf 1\n # area 0.0.0.0\n # interface GigabitEthernet1\n # passive\n if attributes.value('if_passive'):\n configurations.append_line(attributes.format('passive'))\n\n # router ospf 1\n # area 0.0.0.0\n # interface GigabitEthernet1\n # demand-circuit\n if attributes.value('if_demand_circuit'):\n configurations.append_line(attributes.format('demand-circuit'))\n\n # router ospf 1\n # area 0.0.0.0\n # interface GigabitEthernet1\n # priority 100\n if attributes.value('if_priority'):\n configurations.append_line(attributes.format('priority {if_priority}'))\n\n # router ospf 1\n # area 0.0.0.0\n # interface GigabitEthernet1\n # bfd fast-detect\n if attributes.value('if_bfd_enable'):\n configurations.append_line(attributes.format('bfd fast-detect'))\n\n # router ospf 1\n # area 0.0.0.0\n # interface GigabitEthernet1\n # bfd minimum-interval 50\n if attributes.value('if_bfd_interval'):\n configurations.append_line(attributes.format('bfd minimum-interval {if_bfd_interval}'))\n\n # router ospf 1\n # area 0.0.0.0\n # interface GigabitEthernet1\n # bfd multiplier 7\n if attributes.value('if_bfd_multiplier'):\n configurations.append_line(attributes.format('bfd multiplier {if_bfd_multiplier}'))\n\n # router ospf 1\n # area 0.0.0.0\n # interface GigabitEthernet1\n # hello-interval 10\n if attributes.value('if_hello_interval'):\n configurations.append_line(attributes.format('hello-interval {if_hello_interval}'))\n\n # router ospf 1\n # area 0.0.0.0\n # interface GigabitEthernet1\n # dead-interval 10\n if attributes.value('if_dead_interval'):\n configurations.append_line(attributes.format('dead-interval {if_dead_interval}'))\n\n # router ospf 1\n # area 0.0.0.0\n # interface GigabitEthernet1\n # retransmit-interval 10\n if attributes.value('if_retransmit_interval'):\n configurations.append_line(attributes.format('retransmit-interval {if_retransmit_interval}'))\n\n # router ospf 1\n # area 0.0.0.0\n # interface GigabitEthernet1\n # security ttl\n # security ttl hops 50\n if attributes.value('if_ttl_sec_enable'):\n\n # security ttl\n ttl_str = 'security ttl'\n\n # + hops 50\n if attributes.value('if_ttl_sec_hops'):\n ttl_str += ' hops {if_ttl_sec_hops}'\n\n configurations.append_line(attributes.format(ttl_str))\n\n # router ospf 1\n # area 0.0.0.0\n # interface GigabitEthernet1\n # authentication message-digest keychain ottawa\n if attributes.value('if_auth_trailer_key_chain'):\n configurations.append_line(attributes.format('authentication message-digest keychain {if_auth_trailer_key_chain}'))\n\n # router ospf 1\n # area 0.0.0.0\n # interface GigabitEthernet1\n # authentication\n # authentication-key ABC\n # authentication message-digest\n # message-digest-key 1 md5 XYZ\n if attributes.value('if_auth_trailer_key_crypto_algorithm'):\n\n auth_type = attributes.value('if_auth_trailer_key_crypto_algorithm').value\n\n # authentication\n if auth_type == 'simple':\n configurations.append_line(attributes.format('authentication'))\n\n # authentication-key ABC\n if attributes.value('if_auth_trailer_key'):\n configurations.append_line(attributes.format('authentication-key {if_auth_trailer_key}'))\n\n # authentication message-digest\n elif auth_type == 'md5':\n configurations.append_line(attributes.format('authentication message-digest'))\n\n # message-digest-key 1 md5 anything\n if attributes.value('if_auth_trailer_key'):\n configurations.append_line(attributes.format('message-digest-key 1 md5 {if_auth_trailer_key}'))\n\n # router ospf 1\n # area 0.0.0.0\n # interface GigabitEthernet1\n # mtu-ignore\n if attributes.value('if_mtu_ignore'):\n configurations.append_line(attributes.format('mtu-ignore'))\n\n # router ospf 1\n # area 0.0.0.0\n # interface GigabitEthernet1\n # prefix-suppression\n if attributes.value('if_prefix_suppression'):\n configurations.append_line(attributes.format('prefix-suppression'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n", "id": "1574577", "language": "Python", "matching_score": 9.39747428894043, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ospf/iosxr/ospf.py" }, { "content": "''' \nOSPF Genie Conf Object Implementation for IOSXE - CLI.\n'''\n\n# Pyhon\nfrom abc import ABC\n\n# Genie\nfrom genie.conf.base.attributes import AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\n# Ospf\n# +- DeviceAttributes\n# +- VrfAttributes\n# +- AreaAttributes\n# +- VirtualLinkAttributes\n# +- ShamLinkAttributes\n# +- InterfaceAttributes\n\n\nclass Ospf(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n # Seperate CliConfigBuilder for InterfaceAttribute configuration\n self.interface_config = CliConfigBuilder(unconfig=unconfig)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n for sub, attributes2 in attributes.mapping_values('vrf_attr', \n sort=True, \n keys=self.vrf_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n # Add InterfaceAttribute configuration\n configurations.append_block(self.interface_config)\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n class VrfAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n self.interface_router_configurations = CliConfigBuilder(unconfig=unconfig)\n\n # router ospf 1\n # router ospf 1 vrf VRF1\n with configurations.submode_context(\n attributes.format('router ospf {instance} vrf {vrf_name}', force=True) if self.vrf_name != 'default' else \\\n attributes.format('router ospf {instance}', force=True)):\n\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # router ospf 1\n # shutdown/no shutdown\n if attributes.value('enable') is True:\n configurations.append_line(attributes.format('no shutdown'))\n elif attributes.value('enable'):\n configurations.append_line(attributes.format('shutdown'))\n\n # router ospf 1\n # router-id 1.1.1.1\n if attributes.value('router_id'):\n configurations.append_line(attributes.format('router-id {router_id}'))\n\n # router ospf 1\n # distance 110\n if attributes.value('pref_all'):\n configurations.append_line(attributes.format('distance {pref_all}'))\n\n # router ospf 1\n # distance ospf inter-area 30 intra-area 40 external 50\n if attributes.value('pref_intra_area') or \\\n attributes.value('pref_inter_area') or \\\n attributes.value('pref_external'):\n\n # distance ospf\n dist_cfg_str = 'distance ospf'\n\n # If internal, overwrite intra with inter\n if attributes.value('pref_internal'):\n attributes.value('pref_intra_area').value = attributes.value('pref_inter_area').value\n\n # + intra-area {pref_intra_area}\n if attributes.value('pref_intra_area'):\n dist_cfg_str += ' intra-area {pref_intra_area}'\n \n # + inter-area {pref_inter_area}\n if attributes.value('pref_inter_area'):\n dist_cfg_str += ' inter-area {pref_inter_area}'\n \n # + external {pref_external}\n if attributes.value('pref_external'):\n dist_cfg_str += ' external {pref_external}'\n\n configurations.append_line(attributes.format(dist_cfg_str))\n\n # router ospf 1\n # nsr\n if attributes.value('nsr_enable'):\n configurations.append_line(attributes.format('nsr'))\n\n # GracefulRestart attributes config\n for gr_key, attributes2 in attributes.sequence_values('gr_keys', sort=True):\n if unconfig:\n configurations.append_block(gr_key.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(gr_key.build_config(\n apply=False, attributes=attributes2, **kwargs))\n \n # router ospf 1\n # mpls ldp autoconfig area 0.0.0.0\n if attributes.value('ldp_autoconfig'):\n \n # mpls ldp autoconfig\n ldp_str = 'mpls ldp autoconfig'\n\n # + area {ldp_auto_config_area_id}\n if attributes.value('ldp_auto_config_area_id'):\n ldp_str += ' area {ldp_auto_config_area_id}'\n\n configurations.append_line(attributes.format(ldp_str))\n\n # router ospf 1\n # mpls ldp sync\n if attributes.value('ldp_igp_sync'):\n configurations.append_line(attributes.format('mpls ldp sync'))\n\n # router ospf 1\n # redistribute bgp 100 metric 10 metric-type 1 subnets nssa-only tag 5 route-map ottawa\n if attributes.value('redist_bgp_id'):\n\n # redistribute bgp {redist_bgp_id}\n redist_bgp_str = 'redistribute bgp {redist_bgp_id}'\n\n # + metric {redist_bgp_metric}\n if attributes.value('redist_bgp_metric'):\n redist_bgp_str += ' metric {redist_bgp_metric}'\n\n # + metric-type {redist_bgp_metric_type}\n if attributes.value('redist_bgp_metric_type'):\n redist_type = attributes.value('redist_bgp_metric_type').value\n redist_bgp_str += ' metric-type {}'.format(redist_type)\n\n # + subnets\n if attributes.value('redist_bgp_subnets'):\n redist_bgp_str += ' subnets'\n\n # + nssa-only\n if attributes.value('redist_bgp_nssa_only'):\n redist_bgp_str += ' nssa-only'\n\n # + tag {redist_bgp_tag}\n if attributes.value('redist_bgp_tag'):\n redist_bgp_str += ' tag {redist_bgp_tag}'\n\n # + route-map {redist_bgp_route_map}\n if attributes.value('redist_bgp_route_map'):\n redist_bgp_str += ' route-map {redist_bgp_route_map}'\n\n configurations.append_line(attributes.format(redist_bgp_str))\n\n # router ospf 1\n # redistribute connected metric 10 route-map toronto\n if attributes.value('redist_connected'):\n\n # redistribute connected\n redist_connected_str = 'redistribute connected'\n\n # + metric {redist_connected_metric}\n if attributes.value('redist_connected_metric'):\n redist_connected_str += ' metric {redist_connected_metric}'\n\n # + route-map {redist_connected_route_policy}\n if attributes.value('redist_connected_route_policy'):\n redist_connected_str += ' route-map {redist_connected_route_policy}'\n\n configurations.append_line(attributes.format(redist_connected_str))\n\n # router ospf 1\n # redistribute static metric 10 route-map montreal\n if attributes.value('redist_static'):\n\n # redistribute static\n redist_static_str = 'redistribute static'\n\n # + metric {redist_static_metric}\n if attributes.value('redist_static_metric'):\n redist_static_str += ' metric {redist_static_metric}'\n\n # + route-map {redist_static_route_policy}\n if attributes.value('redist_static_route_policy'):\n redist_static_str += ' route-map {redist_static_route_policy}'\n\n configurations.append_line(attributes.format(redist_static_str))\n\n # router ospf 1\n # redistribute isis metric 10 route-map test\n if attributes.value('redist_isis'):\n\n # redistribute isis {redist_isis}\n redist_isis_str = 'redistribute isis {redist_isis}'\n\n # + metric {redist_isis_metric}\n if attributes.value('redist_isis_metric'):\n redist_isis_str += ' metric {redist_isis_metric}'\n\n # + route-map {redist_isis_route_policy}\n if attributes.value('redist_isis_route_policy'):\n redist_isis_str += ' route-map {redist_isis_route_policy}'\n\n configurations.append_line(attributes.format(redist_isis_str))\n\n # router ospf 1\n # redistribute maximum-prefix 10 50 warning-only\n if attributes.value('redist_max_prefix'):\n\n # redistribute maximum-prefix {redist_max_prefix}\n redist_maxpfx_str = 'redistribute maximum-prefix {redist_max_prefix}'\n\n # + {redist_max_prefix_thld}\n if attributes.value('redist_max_prefix_thld'):\n redist_maxpfx_str += ' {redist_max_prefix_thld}'\n\n # + warning-only\n if attributes.value('redist_max_prefix_warn_only'):\n redist_maxpfx_str += ' warning-only'\n\n configurations.append_line(attributes.format(redist_maxpfx_str))\n\n # router ospf 1\n # bfd all-interfaces strict-mode\n if attributes.value('bfd_enable'):\n\n # bfd all-interfaces\n bfd_str = 'bfd all-interfaces'\n\n if attributes.value('bfd_strict_mode'):\n bfd_str += ' strict-mode'\n\n configurations.append_line(attributes.format(bfd_str))\n\n # router ospf 1\n # mpls traffic-eng router-id Loopback0\n if attributes.value('te_router_id'):\n configurations.append_line(attributes.format('mpls traffic-eng router-id {te_router_id}'))\n\n # router ospf 1\n # log-adjacency-changes\n # log-adjacency-changes detail\n if attributes.value('log_adjacency_changes'):\n\n # log-adjacency-changes\n log_str = 'log-adjacency-changes'\n\n # + detail\n if attributes.value('log_adjacency_changes_detail'):\n log_str += ' detail'\n \n configurations.append_line(attributes.format(log_str))\n\n # router ospf 1\n # adjacency stagger 563 1625\n if attributes.value('adjacency_stagger_initial_number'):\n \n # adjacency stagger {adjacency_stagger_initial_number}\n stagger_str = 'adjacency stagger {adjacency_stagger_initial_number}'\n\n # + {adjacency_stagger_maximum_number}\n if attributes.value('adjacency_stagger_maximum_number'):\n stagger_str += ' {adjacency_stagger_maximum_number}'\n\n configurations.append_line(attributes.format(stagger_str))\n\n # router ospf 1\n # auto-cost\n # auto-cost reference-bandwidth 60000\n if attributes.value('auto_cost_enable') is False:\n configurations.append_line(attributes.format('no auto-cost'))\n elif attributes.value('auto_cost_enable') is True:\n\n # auto-cost\n auto_cost_str = 'auto-cost'\n\n # + reference-bandwidth\n if attributes.value('auto_cost_reference_bandwidth'):\n auto_cost_str += ' reference-bandwidth'\n # Calculate bandwidth based on unit type\n if attributes.value('auto_cost_bandwidth_unit').value == 'gbps':\n bandwidth = str(attributes.value('auto_cost_reference_bandwidth') * 1000)\n else:\n bandwidth = attributes.value('auto_cost_reference_bandwidth')\n auto_cost_str += ' {}'.format(bandwidth)\n\n configurations.append_line(attributes.format(auto_cost_str))\n\n # router ospf 1\n # maximum-paths 15\n if attributes.value('spf_paths'):\n configurations.append_line(attributes.format('maximum-paths {spf_paths}'))\n\n # router ospf 1\n # timers throttle spf 5000 10000 20000\n if attributes.value('spf_start'):\n\n # timers throttle spf {spf_start}\n throttle_str = 'timers throttle spf {spf_start}'\n\n # + {spf_hold}\n if attributes.value('spf_hold'):\n throttle_str += ' {spf_hold}'\n\n # + {spf_maximum}\n if attributes.value('spf_maximum'):\n throttle_str += ' {spf_maximum}'\n\n configurations.append_line(attributes.format(throttle_str))\n\n # router ospf 1\n # timers throttle lsa 5000 10000 20000\n if attributes.value('spf_lsa_start'):\n\n # timers throttle {spf_lsa_start}\n throttle_lsa = 'timers throttle lsa {spf_lsa_start}'\n\n # + {spf_lsa_hold}\n if attributes.value('spf_lsa_hold'):\n throttle_lsa += ' {spf_lsa_hold}'\n\n # + {spf_lsa_maximum}\n if attributes.value('spf_lsa_maximum'):\n throttle_lsa += ' {spf_lsa_maximum}'\n\n configurations.append_line(attributes.format(throttle_lsa))\n\n # router ospf 1\n # max-lsa 56666666\n if attributes.value('db_ctrl_max_lsa'):\n configurations.append_line(attributes.format('max-lsa {db_ctrl_max_lsa}'))\n\n # StubRouter attributes config\n for sr_key, attributes2 in attributes.sequence_values('sr_keys', sort=True):\n if unconfig:\n configurations.append_block(sr_key.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(sr_key.build_config(\n apply=False, attributes=attributes2, **kwargs))\n\n # router ospf 1\n # default-information originate always\n if attributes.value('default_originate'):\n \n # + default-information originate\n default_originate_str = 'default-information originate'\n\n # + always\n if attributes.value('default_originate_always'):\n default_originate_str += ' always'\n\n configurations.append_line(attributes.format(default_originate_str))\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n for sub, attributes2 in attributes.mapping_values('area_attr', \n sort=True, \n keys=self.area_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n \n # Add interface configurations under router submode\n configurations.append_block(self.interface_router_configurations)\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n class AreaAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # AreaNetwork attributes config\n for areanetwork_key, attributes2 in attributes.sequence_values('areanetwork_keys', sort=True):\n kwargs = {'area':self.area}\n if unconfig:\n configurations.append_block(areanetwork_key.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(areanetwork_key.build_config(\n apply=False, attributes=attributes2, **kwargs))\n\n # router ospf 1\n # mpls traffic-eng area 2\n if attributes.value('area_te_enable'):\n configurations.append_line(attributes.format('mpls traffic-eng area {area}'))\n\n # router ospf 1\n # area 2 stub\n # area 2 nssa\n # area 2 stub no-summary\n # area 2 nssa no-summary\n if attributes.value('area_type').value != 'normal':\n # area 2 stub\n # area 2 nssa\n type_str = 'area {area}'\n atype = attributes.value('area_type').value\n type_str += ' {}'.format(atype)\n\n # + summary\n if attributes.value('summary') is False:\n type_str += ' no-summary'\n\n configurations.append_line(attributes.format(type_str))\n\n # router ospf 1\n # area 2 default-cost 100\n if attributes.value('default_cost'):\n configurations.append_line(attributes.format('area {area} default-cost {default_cost}'))\n\n # AreaRange attributes config\n for arearange_key, attributes2 in attributes.sequence_values('arearange_keys', sort=True):\n kwargs = {'area':self.area}\n if unconfig:\n configurations.append_block(arearange_key.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(arearange_key.build_config(\n apply=False, attributes=attributes2, **kwargs))\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- VirtualLinkAttributes\n for sub, attributes2 in attributes.mapping_values('virtual_link_attr', \n sort=True, \n keys=self.virtual_link_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- ShamLinkAttributes\n for sub, attributes2 in attributes.mapping_values('sham_link_attr', \n sort=True, \n keys=self.sham_link_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- InterfaceAttributes\n interface_config = CliConfigBuilder(unconfig=unconfig)\n for sub, attributes2 in attributes.mapping_values('interface_attr', \n sort=True, \n keys=self.interface_attr):\n interface_config.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n self.parent.parent.interface_config.append_block(interface_config)\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- VirtualLinkAttributes\n class VirtualLinkAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # router ospf 1\n # area 2 virtual-link 7.7.7.7\n if attributes.value('vl_router_id'):\n configurations.append_line(attributes.format('area {area} virtual-link {vl_router_id}'))\n\n # router ospf 1\n # area 2 virtual-link 7.7.7.7 hello-interval 55\n if attributes.value('vl_hello_interval'):\n configurations.append_line(attributes.format('area {area} virtual-link {vl_router_id} hello-interval {vl_hello_interval}'))\n\n # router ospf 1\n # area 2 virtual-link 7.7.7.7 dead-interval 55\n if attributes.value('vl_dead_interval'):\n configurations.append_line(attributes.format('area {area} virtual-link {vl_router_id} dead-interval {vl_dead_interval}'))\n\n # router ospf 1\n # area 2 virtual-link 7.7.7.7 retransmit-interval 55\n if attributes.value('vl_retransmit_interval'):\n configurations.append_line(attributes.format('area {area} virtual-link {vl_router_id} retransmit-interval {vl_retransmit_interval}'))\n\n # router ospf 1\n # area 2 virtual-link 7.7.7.7 transmit-delay 55\n if attributes.value('vl_transmit_delay'):\n configurations.append_line(attributes.format('area {area} virtual-link {vl_router_id} transmit-delay {vl_transmit_delay}'))\n\n # router ospf 1\n # area 2 virtual-link 7.7.7.7 ttl-security hops 163\n if attributes.value('vl_ttl_sec_hops'):\n configurations.append_line(attributes.format('area {area} virtual-link {vl_router_id} ttl-security hops {vl_ttl_sec_hops}'))\n\n # router ospf 1\n # area 2 virtual_link 7.7.7.7 authentication key-chain ottawa\n if attributes.value('vl_auth_trailer_key_chain'):\n configurations.append_line(attributes.format('area {area} virtual-link {vl_router_id} authentication key-chain {vl_auth_trailer_key_chain}'))\n\n # router ospf 1\n # area 2 virtual-link 7.7.7.7 authentication\n # area 2 virtual-link 7.7.7.7 authentication-key anything\n # area 2 virtual_link 7.7.7.7 authentication message-digest\n # area 2 virtual_link 7.7.7.7 message-digest-key 1 md5 anything\n if attributes.value('vl_auth_trailer_key_crypto_algorithm'):\n\n auth_type = attributes.value('vl_auth_trailer_key_crypto_algorithm').value\n\n # area 2 virtual-link 7.7.7.7 authentication\n if auth_type == 'simple':\n configurations.append_line(attributes.format('area {area} virtual-link {vl_router_id} authentication'))\n\n # area 2 virtual-link 7.7.7.7 authentication-key anything\n if attributes.value('vl_auth_trailer_key'):\n configurations.append_line(attributes.format('area {area} virtual-link {vl_router_id} authentication-key {vl_auth_trailer_key}'))\n\n # area 2 virtual_link 7.7.7.7 authentication message-digest\n elif auth_type == 'md5':\n configurations.append_line(attributes.format('area {area} virtual-link {vl_router_id} authentication message-digest'))\n\n # area 2 virtual_link 7.7.7.7 message-digest-key 1 md5 anything\n if attributes.value('vl_auth_trailer_key'):\n configurations.append_line(attributes.format('area {area} virtual-link {vl_router_id} message-digest-key 1 md5 {vl_auth_trailer_key}'))\n\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- ShamLinkAttributes\n class ShamLinkAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # router ospf 1\n # area 2 sham-link 1.1.1.1 2.2.2.2\n if attributes.value('sl_local_id') and attributes.value('sl_remote_id'):\n configurations.append_line(attributes.format('area {area} sham-link {sl_local_id} {sl_remote_id}'))\n\n # router ospf 1\n # area 2 sham-link 1.1.1.1 2.2.2.2 ttl-security hops 163\n if attributes.value('sl_ttl_sec_hops'):\n configurations.append_line(attributes.format('area {area} sham-link {sl_local_id} {sl_remote_id} ttl-security hops {sl_ttl_sec_hops}'))\n\n # router ospf 1\n # area 2 sham-link 1.1.1.1 2.2.2.2 cost 10\n if attributes.value('sl_cost'):\n configurations.append_line(attributes.format('area {area} sham-link {sl_local_id} {sl_remote_id} cost {sl_cost}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- InterfaceAttributes\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n intf_rtr_cfgs = CliConfigBuilder(unconfig=unconfig)\n\n # passive-interface GigabitEthernet1\n if attributes.value('if_passive'):\n intf_rtr_cfgs.append_line(attributes.format('passive-interface {interface_name}'))\n\n # InterfaceStaticNeighbor attributes config\n for intf_staticnbr_key, attributes2 in attributes.sequence_values('intf_staticnbr_keys', sort=True):\n if unconfig:\n intf_rtr_cfgs.append_block(intf_staticnbr_key.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n intf_rtr_cfgs.append_block(intf_staticnbr_key.build_config(\n apply=False, attributes=attributes2, **kwargs))\n\n # Add intf_rtr_cfgs to VrfAttributes\n self.parent.parent.interface_router_configurations.append_block(intf_rtr_cfgs)\n\n # interface GigabitEthernet1\n with configurations.submode_context(\n attributes.format('interface {interface_name}', force=True)):\n\n # interface GigabitEthernet1\n # ip ospf 1 area 2\n if attributes.value('if_admin_control'):\n configurations.append_line(attributes.format('ip ospf {instance} area {area}'))\n\n # interface GigabitEthernet1\n # ip ospf cost 100\n if attributes.value('if_cost'):\n configurations.append_line(attributes.format('ip ospf cost {if_cost}'))\n\n # interface GigabitEthernet1\n # ip ospf network point-to-point\n if attributes.value('if_type'):\n iftype = attributes.value('if_type').value\n configurations.append_line(attributes.format('ip ospf network {}'.format(iftype)))\n\n # interface GigabitEthernet1\n # ip ospf demand-circuit\n if attributes.value('if_demand_circuit'):\n configurations.append_line(attributes.format('ip ospf demand-circuit'))\n\n # interface GigabitEthernet1\n # ip ospf priority 100\n if attributes.value('if_priority'):\n configurations.append_line(attributes.format('ip ospf priority {if_priority}'))\n\n # interface GigabitEthernet1\n # ip ospf bfd\n if attributes.value('if_bfd_enable'):\n configurations.append_line(attributes.format('ip ospf bfd'))\n\n # interface GigabitEthernet1\n # bfd interval 50 min_rx 60 multiplier 2\n if attributes.value('if_bfd_interval'):\n\n # bfd interval [if_bfd_interval]\n bfd_str = 'bfd interval {if_bfd_interval}'\n\n # + min_rx {if_bfd_min_interval}\n if attributes.value('if_bfd_min_interval'):\n bfd_str += ' min_rx {if_bfd_min_interval}'\n\n # + multiplier {if_bfd_multiplier}\n if attributes.value('if_bfd_multiplier'):\n bfd_str += ' multiplier {if_bfd_multiplier}'\n\n configurations.append_line(attributes.format(bfd_str))\n\n # interface GigabitEthernet1\n # ip ospf hello-interval 10\n if attributes.value('if_hello_interval'):\n configurations.append_line(attributes.format('ip ospf hello-interval {if_hello_interval}'))\n\n # interface GigabitEthernet1\n # ip ospf dead-interval 10\n if attributes.value('if_dead_interval'):\n configurations.append_line(attributes.format('ip ospf dead-interval {if_dead_interval}'))\n\n # interface GigabitEthernet1\n # ip ospf retransmit-interval 10\n if attributes.value('if_retransmit_interval'):\n configurations.append_line(attributes.format('ip ospf retransmit-interval {if_retransmit_interval}'))\n\n # interface GigabitEthernet1\n # ip ospf lls\n # ip ospf lls disable\n if attributes.value('if_lls') is True:\n configurations.append_line(attributes.format('ip ospf lls'))\n elif attributes.value('if_lls') is False:\n configurations.append_line(attributes.format('ip ospf lls disable'))\n\n # interface GigabitEthernet1\n # ip ospf ttl-security hops 50\n if attributes.value('if_ttl_sec_enable'):\n\n # ip ospf ttl-security\n ttl_str = 'ip ospf ttl-security'\n\n # + hops 50\n if attributes.value('if_ttl_sec_hops'):\n ttl_str += ' hops {if_ttl_sec_hops}'\n\n configurations.append_line(attributes.format(ttl_str))\n\n # interface GigabitEthernet1\n # ip ospf authentication key-chain ottawa\n if attributes.value('if_auth_trailer_key_chain'):\n configurations.append_line(attributes.format('ip ospf authentication key-chain {if_auth_trailer_key_chain}'))\n\n # interface GigabitEthernet1\n # ip ospf authentication\n # ip ospf authentication-key anything\n # ip ospf authentication message-digest\n # ip ospf message-digest-key 1 md5 anything\n if attributes.value('if_auth_trailer_key_crypto_algorithm'):\n\n auth_type = attributes.value('if_auth_trailer_key_crypto_algorithm').value\n\n # ip ospf authentication\n if auth_type == 'simple':\n configurations.append_line(attributes.format('ip ospf authentication'))\n\n # ip ospf authentication-key anything\n if attributes.value('if_auth_trailer_key'):\n configurations.append_line(attributes.format('ip ospf authentication-key {if_auth_trailer_key}'))\n\n # ip ospf authentication message-digest\n elif auth_type == 'md5':\n configurations.append_line(attributes.format('ip ospf authentication message-digest'))\n\n # ip ospf message-digest-key 1 md5 anything\n if attributes.value('if_auth_trailer_key'):\n configurations.append_line(attributes.format('ip ospf message-digest-key 1 md5 {if_auth_trailer_key}'))\n\n # interface GigabitEthernet1\n # ip ospf mtu-ignore\n if attributes.value('if_mtu_ignore'):\n configurations.append_line(attributes.format('ip ospf mtu-ignore'))\n\n # interface GigabitEthernet1\n # ip ospf prefix-suppression\n if attributes.value('if_prefix_suppression'):\n configurations.append_line(attributes.format('ip ospf prefix-suppression'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n", "id": "1048698", "language": "Python", "matching_score": 8.826650619506836, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ospf/iosxe/ospf.py" }, { "content": "''' \nOSPF Genie Conf Object Implementation for NXOS - CLI.\n'''\n\n# Pyhon\nfrom abc import ABC\n\n# Genie\nfrom genie.conf.base.attributes import AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\n# Ospf\n# +- DeviceAttributes\n# +- VrfAttributes\n# +- AreaAttributes\n# +- VirtualLinkAttributes\n# +- ShamLinkAttributes\n# +- InterfaceAttributes\n\n\nclass Ospf(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n # Seperate CliConfigBuilder for InterfaceAttribute configuration\n self.interface_config = CliConfigBuilder(unconfig=unconfig)\n\n # feature ospf\n if attributes.value('enabled'):\n if unconfig is False:\n configurations.append_line(\n attributes.format('feature ospf'))\n\n # Make sure that only enabled was provided in attributes\n # If wildcard, then delete everything\n elif unconfig is True and\\\n attributes.attributes == {'enabled': {True: None}} or \\\n attributes.iswildcard:\n configurations.append_line('no feature ospf', raw=True)\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n for sub, attributes2 in attributes.mapping_values('vrf_attr', \n sort=True, \n keys=self.vrf_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n # Add InterfaceAttribute configuration\n configurations.append_block(self.interface_config)\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n class VrfAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # router ospf 1\n with configurations.submode_context(\n attributes.format('router ospf {instance}', force=True)):\n\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # router ospf 1\n # vrf VRF1\n if self.vrf_name != 'default':\n with configurations.submode_context(\n attributes.format('vrf {vrf_name}', force=True) ):\n\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # router ospf 1\n # vrf VRF1\n # shutdown/no shutdown\n if attributes.value('enable') is False:\n configurations.append_line(attributes.format('shutdown'))\n elif attributes.value('enable'):\n configurations.append_line(attributes.format('no shutdown'))\n\n # router ospf 1\n # vrf VRF1\n # router-id 1.1.1.1\n if attributes.value('router_id'):\n configurations.append_line(attributes.format('router-id {router_id}'))\n\n # router ospf 1\n # distance 110\n if attributes.value('pref_all'):\n configurations.append_line(attributes.format('distance {pref_all}'))\n\n # GracefulRestart attributes config\n for gr_key, attributes2 in attributes.sequence_values('gr_keys', sort=True):\n if unconfig:\n configurations.append_block(gr_key.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(gr_key.build_config(\n apply=False, attributes=attributes2, **kwargs))\n \n # router ospf 1\n # mpls ldp autoconfig area 0.0.0.0\n if attributes.value('ldp_autoconfig') and \\\n attributes.value('ldp_auto_config_area_id'):\n configurations.append_line(attributes.format(\n 'mpls ldp autoconfig area {ldp_auto_config_area_id}'))\n\n # router ospf 1\n # mpls ldp sync\n if attributes.value('ldp_igp_sync'):\n configurations.append_line(attributes.format('mpls ldp sync'))\n\n # router ospf 1\n # redistribute bgp 100 route-map ottawa\n if attributes.value('redist_bgp_id') and \\\n attributes.value('redist_bgp_route_map'):\n configurations.append_line(attributes.format(\n 'redistribute bgp {redist_bgp_id} route-map {redist_bgp_route_map}'))\n\n # router ospf 1\n # redistribute direct route-map toronto\n if attributes.value('redist_connected') and \\\n attributes.value('redist_connected_route_policy'):\n configurations.append_line(attributes.format(\n 'redistribute direct route-map {redist_connected_route_policy}'))\n\n # router ospf 1\n # redistribute static route-map montreal\n if attributes.value('redist_static') and \\\n attributes.value('redist_static_route_policy'):\n configurations.append_line(attributes.format(\n 'redistribute static route-map {redist_static_route_policy}'))\n\n # router ospf 1\n # redistribute isis ABC route-map nowhere\n if attributes.value('redist_isis') and \\\n attributes.value('redist_isis_route_policy'):\n configurations.append_line(attributes.format(\n 'redistribute isis {redist_isis} route-map {redist_isis_route_policy}'))\n\n # router ospf 1\n # redistribute maximum-prefix 123 10 warning-only\n if attributes.value('redist_max_prefix'):\n\n # redistribute maximum-prefix {redist_max_prefix}\n redist_maxpfx_str = 'redistribute maximum-prefix {redist_max_prefix}'\n\n # + {redist_max_prefix_thld}\n if attributes.value('redist_max_prefix_thld'):\n redist_maxpfx_str += ' {redist_max_prefix_thld}'\n\n # + warning-only\n if attributes.value('redist_max_prefix_warn_only'):\n redist_maxpfx_str += ' warning-only'\n\n configurations.append_line(attributes.format(redist_maxpfx_str))\n\n # router ospf 1\n # bfd\n if attributes.value('bfd_enable'):\n configurations.append_line(attributes.format('bfd'))\n\n # router ospf 1\n # mpls traffic-eng router-id Loopback0\n if attributes.value('te_router_id'):\n configurations.append_line(attributes.format('mpls traffic-eng router-id {te_router_id}'))\n\n # router ospf 1\n # log-adjacency-changes\n # log-adjacency-changes detail\n if attributes.value('log_adjacency_changes'):\n\n # log-adjacency-changes\n log_str = 'log-adjacency-changes'\n\n # + detail\n if attributes.value('log_adjacency_changes_detail'):\n log_str += ' detail'\n \n configurations.append_line(attributes.format(log_str))\n\n # router ospf 1\n # auto-cost reference-bandwidth 60000\n # auto-cost reference-bandwidth 60000 mbps\n # auto-cost reference-bandwidth 60000 gbps\n if attributes.value('auto_cost_enable') is False:\n configurations.append_line(attributes.format('no auto-cost reference-bandwidth'))\n elif attributes.value('auto_cost_enable') is True and \\\n attributes.value('auto_cost_reference_bandwidth'):\n\n # auto-cost reference-bandwidth {auto_cost_reference_bandwidth}\n auto_cost_str = 'auto-cost reference-bandwidth {auto_cost_reference_bandwidth}'\n\n # + {auto_cost_bandwidth_unit}\n if attributes.value('auto_cost_bandwidth_unit'):\n unit = attributes.value('auto_cost_bandwidth_unit').value\n auto_cost_str += ' {}'.format(unit)\n\n configurations.append_line(attributes.format(auto_cost_str))\n\n # router ospf 1\n # maximum-paths 15\n if attributes.value('spf_paths'):\n configurations.append_line(attributes.format('maximum-paths {spf_paths}'))\n\n # router ospf 1\n # timers throttle spf 5000 10000 20000\n if attributes.value('spf_start'):\n\n # timers throttle spf {spf_start}\n throttle_str = 'timers throttle spf {spf_start}'\n\n # + {spf_hold}\n if attributes.value('spf_hold'):\n throttle_str += ' {spf_hold}'\n\n # + {spf_maximum}\n if attributes.value('spf_maximum'):\n throttle_str += ' {spf_maximum}'\n\n configurations.append_line(attributes.format(throttle_str))\n\n # router ospf 1\n # timers throttle lsa 5000 10000 20000\n if attributes.value('spf_lsa_start'):\n\n # timers throttle {spf_lsa_start}\n throttle_lsa = 'timers throttle lsa {spf_lsa_start}'\n\n # + {spf_lsa_hold}\n if attributes.value('spf_lsa_hold'):\n throttle_lsa += ' {spf_lsa_hold}'\n\n # + {spf_lsa_maximum}\n if attributes.value('spf_lsa_maximum'):\n throttle_lsa += ' {spf_lsa_maximum}'\n\n configurations.append_line(attributes.format(throttle_lsa))\n\n # router ospf 1\n # max-lsa 56666666\n if attributes.value('db_ctrl_max_lsa'):\n configurations.append_line(attributes.format('max-lsa {db_ctrl_max_lsa}'))\n\n # StubRouter attributes config\n for sr_key, attributes2 in attributes.sequence_values('sr_keys', sort=True):\n if unconfig:\n configurations.append_block(sr_key.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(sr_key.build_config(\n apply=False, attributes=attributes2, **kwargs))\n\n # router ospf 1\n # default-information originate always\n if attributes.value('default_originate'):\n \n # + default-information originate\n default_originate_str = 'default-information originate'\n\n # + always\n if attributes.value('default_originate_always'):\n default_originate_str += ' always'\n\n configurations.append_line(attributes.format(default_originate_str))\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n for sub, attributes2 in attributes.mapping_values('area_attr', \n sort=True, \n keys=self.area_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n else:\n # router ospf 1\n # shutdown/no shutdown\n if attributes.value('enable') is False:\n configurations.append_line(attributes.format('shutdown'))\n elif attributes.value('enable'):\n configurations.append_line(attributes.format('no shutdown'))\n\n # router ospf 1\n # router-id 1.1.1.1\n if attributes.value('router_id'):\n configurations.append_line(attributes.format('router-id {router_id}'))\n\n # router ospf 1\n # distance 110\n if attributes.value('pref_all'):\n configurations.append_line(attributes.format('distance {pref_all}'))\n\n # GracefulRestart attributes config\n for gr_key, attributes2 in attributes.sequence_values('gr_keys', sort=True):\n if attributes.value('gr_key').value == 'cisco':\n continue\n if unconfig:\n configurations.append_block(gr_key.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(gr_key.build_config(\n apply=False, attributes=attributes2, **kwargs))\n \n # router ospf 1\n # mpls ldp autoconfig area 0.0.0.0\n if attributes.value('ldp_autoconfig') and \\\n attributes.value('ldp_auto_config_area_id'):\n configurations.append_line(attributes.format(\n 'mpls ldp autoconfig area {ldp_auto_config_area_id}'))\n\n # router ospf 1\n # mpls ldp sync\n if attributes.value('ldp_igp_sync'):\n configurations.append_line(attributes.format('mpls ldp sync'))\n\n # router ospf 1\n # redistribute bgp 100 route-map ottawa\n if attributes.value('redist_bgp_id') and \\\n attributes.value('redist_bgp_route_map'):\n configurations.append_line(attributes.format(\n 'redistribute bgp {redist_bgp_id} route-map {redist_bgp_route_map}'))\n\n # router ospf 1\n # redistribute direct route-map toronto\n if attributes.value('redist_connected') and \\\n attributes.value('redist_connected_route_policy'):\n configurations.append_line(attributes.format(\n 'redistribute direct route-map {redist_connected_route_policy}'))\n\n # router ospf 1\n # redistribute static route-map montreal\n if attributes.value('redist_static') and \\\n attributes.value('redist_static_route_policy'):\n configurations.append_line(attributes.format(\n 'redistribute static route-map {redist_static_route_policy}'))\n\n # router ospf 1\n # redistribute isis ABC route-map nowhere\n if attributes.value('redist_isis') and \\\n attributes.value('redist_isis_route_policy'):\n configurations.append_line(attributes.format(\n 'redistribute isis {redist_isis} route-map {redist_isis_route_policy}'))\n\n # router ospf 1\n # redistribute maximum-prefix 123 10 warning-only\n if attributes.value('redist_max_prefix'):\n\n # redistribute maximum-prefix {redist_max_prefix}\n redist_maxpfx_str = 'redistribute maximum-prefix {redist_max_prefix}'\n\n # + {redist_max_prefix_thld}\n if attributes.value('redist_max_prefix_thld'):\n redist_maxpfx_str += ' {redist_max_prefix_thld}'\n\n # + warning-only\n if attributes.value('redist_max_prefix_warn_only'):\n redist_maxpfx_str += ' warning-only'\n\n configurations.append_line(attributes.format(redist_maxpfx_str))\n\n # router ospf 1\n # bfd\n if attributes.value('bfd_enable'):\n configurations.append_line(attributes.format('bfd'))\n\n # router ospf 1\n # mpls traffic-eng router-id Loopback0\n if attributes.value('te_router_id'):\n configurations.append_line(attributes.format('mpls traffic-eng router-id {te_router_id}'))\n\n # router ospf 1\n # log-adjacency-changes\n # log-adjacency-changes detail\n if attributes.value('log_adjacency_changes'):\n\n # log-adjacency-changes\n log_str = 'log-adjacency-changes'\n\n # + detail\n if attributes.value('log_adjacency_changes_detail'):\n log_str += ' detail'\n \n configurations.append_line(attributes.format(log_str))\n\n # router ospf 1\n # auto-cost reference-bandwidth 60000\n # auto-cost reference-bandwidth 60000 mbps\n # auto-cost reference-bandwidth 60000 gbps\n if attributes.value('auto_cost_enable') is False:\n configurations.append_line(attributes.format('no auto-cost reference-bandwidth'))\n elif attributes.value('auto_cost_enable') is True and \\\n attributes.value('auto_cost_reference_bandwidth'):\n\n # auto-cost reference-bandwidth {auto_cost_reference_bandwidth}\n auto_cost_str = 'auto-cost reference-bandwidth {auto_cost_reference_bandwidth}'\n\n # + {auto_cost_bandwidth_unit}\n if attributes.value('auto_cost_bandwidth_unit'):\n unit = attributes.value('auto_cost_bandwidth_unit').value\n auto_cost_str += ' {}'.format(unit)\n\n configurations.append_line(attributes.format(auto_cost_str))\n\n # router ospf 1\n # maximum-paths 15\n if attributes.value('spf_paths'):\n configurations.append_line(attributes.format('maximum-paths {spf_paths}'))\n\n # router ospf 1\n # timers throttle spf 5000 10000 20000\n if attributes.value('spf_start'):\n\n # timers throttle spf {spf_start}\n throttle_str = 'timers throttle spf {spf_start}'\n\n # + {spf_hold}\n if attributes.value('spf_hold'):\n throttle_str += ' {spf_hold}'\n\n # + {spf_maximum}\n if attributes.value('spf_maximum'):\n throttle_str += ' {spf_maximum}'\n\n configurations.append_line(attributes.format(throttle_str))\n\n # router ospf 1\n # timers throttle lsa 5000 10000 20000\n if attributes.value('spf_lsa_start'):\n\n # timers throttle {spf_lsa_start}\n throttle_lsa = 'timers throttle lsa {spf_lsa_start}'\n\n # + {spf_lsa_hold}\n if attributes.value('spf_lsa_hold'):\n throttle_lsa += ' {spf_lsa_hold}'\n\n # + {spf_lsa_maximum}\n if attributes.value('spf_lsa_maximum'):\n throttle_lsa += ' {spf_lsa_maximum}'\n\n configurations.append_line(attributes.format(throttle_lsa))\n\n # router ospf 1\n # max-lsa 56666666\n if attributes.value('db_ctrl_max_lsa'):\n configurations.append_line(attributes.format('max-lsa {db_ctrl_max_lsa}'))\n\n # StubRouter attributes config\n for sr_key, attributes2 in attributes.sequence_values('sr_keys', sort=True):\n if unconfig:\n configurations.append_block(sr_key.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(sr_key.build_config(\n apply=False, attributes=attributes2, **kwargs))\n\n # router ospf 1\n # default-information originate always\n if attributes.value('default_originate'):\n \n # + default-information originate\n default_originate_str = 'default-information originate'\n\n # + always\n if attributes.value('default_originate_always'):\n default_originate_str += ' always'\n\n configurations.append_line(attributes.format(default_originate_str))\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n for sub, attributes2 in attributes.mapping_values('area_attr', \n sort=True, \n keys=self.area_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n class AreaAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # router ospf 1\n # mpls traffic-eng area 2\n if attributes.value('area_te_enable') and self.vrf_name == 'default':\n configurations.append_line(attributes.format('mpls traffic-eng area {area}'))\n\n # router ospf 1\n # area 2 stub\n # area 2 nssa\n # area 2 stub no-summary\n # area 2 nssa no-summary\n if attributes.value('area_type').value != 'normal':\n # area 2 stub\n # area 2 nssa\n type_str = 'area {area}'\n atype = attributes.value('area_type').value\n type_str += ' {}'.format(atype)\n\n # + no-summary\n if attributes.value('summary') is False:\n type_str += ' no-summary'\n\n configurations.append_line(attributes.format(type_str))\n\n # router ospf 1\n # area 2 default-cost 100\n if attributes.value('default_cost'):\n configurations.append_line(attributes.format('area {area} default-cost {default_cost}'))\n\n # AreaRange attributes config\n for arearange_key, attributes2 in attributes.sequence_values('arearange_keys', sort=True):\n kwargs = {'area':self.area}\n if unconfig:\n configurations.append_block(arearange_key.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(arearange_key.build_config(\n apply=False, attributes=attributes2, **kwargs))\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- VirtualLinkAttributes\n for sub, attributes2 in attributes.mapping_values('virtual_link_attr', \n sort=True, \n keys=self.virtual_link_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- ShamLinkAttributes\n for sub, attributes2 in attributes.mapping_values('sham_link_attr', \n sort=True, \n keys=self.sham_link_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- InterfaceAttributes\n interface_config = CliConfigBuilder(unconfig=unconfig)\n for sub, attributes2 in attributes.mapping_values('interface_attr', \n sort=True, \n keys=self.interface_attr):\n interface_config.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n self.parent.parent.interface_config.append_block(interface_config)\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- VirtualLinkAttributes\n class VirtualLinkAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # router ospf 1\n # area 2 virtual-link 7.7.7.7\n if attributes.value('vl_router_id') and \\\n attributes.value('area_type') != 'stub' and \\\n attributes.value('area_type') != 'nssa':\n with configurations.submode_context(\n attributes.format('area {area} virtual-link {vl_router_id}', force=True)):\n\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # router ospf 1\n # area 2 virtual-link 7.7.7.7\n # hello-interval 55\n if attributes.value('vl_hello_interval'):\n configurations.append_line(attributes.format('hello-interval {vl_hello_interval}'))\n\n # router ospf 1\n # area 2 virtual-link 7.7.7.7\n # dead-interval 55\n if attributes.value('vl_dead_interval'):\n configurations.append_line(attributes.format('dead-interval {vl_dead_interval}'))\n\n # router ospf 1\n # area 2 virtual-link 7.7.7.7\n # retransmit-interval 55\n if attributes.value('vl_retransmit_interval'):\n configurations.append_line(attributes.format('retransmit-interval {vl_retransmit_interval}'))\n\n # router ospf 1\n # area 2 virtual-link 7.7.7.7\n # transmit-delay 55\n if attributes.value('vl_transmit_delay'):\n configurations.append_line(attributes.format('transmit-delay {vl_transmit_delay}'))\n\n # router ospf 1\n # area 2 virtual-link 7.7.7.7\n # authentication key-chain \"ottawa\"\n if attributes.value('vl_auth_trailer_key_chain'):\n configurations.append_line(attributes.format('authentication key-chain {vl_auth_trailer_key_chain}'))\n\n # router ospf 1\n # area 2 virtual-link 7.7.7.7\n # authentication\n # authentication-key anything\n # authentication message-digest\n # message-digest-key 1 md5 anything\n if attributes.value('vl_auth_trailer_key_crypto_algorithm'):\n\n auth_type = attributes.value('vl_auth_trailer_key_crypto_algorithm').value\n\n # area 2 virtual-link 7.7.7.7 authentication\n if auth_type == 'simple':\n configurations.append_line(attributes.format('authentication'))\n\n # area 2 virtual-link 7.7.7.7 authentication-key anything\n if attributes.value('vl_auth_trailer_key'):\n configurations.append_line(attributes.format('authentication-key {vl_auth_trailer_key}'))\n\n # area 2 virtual-link 7.7.7.7 authentication message-digest\n elif auth_type == 'md5':\n configurations.append_line(attributes.format('authentication message-digest'))\n\n # area 2 virtual-link 7.7.7.7 message-digest-key 1 md5 anything\n if attributes.value('vl_auth_trailer_key'):\n configurations.append_line(attributes.format('message-digest-key 1 md5 {vl_auth_trailer_key}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- ShamLinkAttributes\n class ShamLinkAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # router ospf 1\n # area 2 sham-link 1.1.1.1 2.2.2.2\n if self.vrf_name != 'default' and attributes.value('sl_local_id') and attributes.value('sl_remote_id'):\n with configurations.submode_context(\n attributes.format('area {area} sham-link {sl_local_id} {sl_remote_id}', force=True)):\n\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # router ospf 1\n # area 2 sham-link 1.1.1.1 2.2.2.2\n # hello-interval 55\n if attributes.value('sl_hello_interval'):\n configurations.append_line(attributes.format('hello-interval {sl_hello_interval}'))\n\n # router ospf 1\n # area 2 sham-link 1.1.1.1 2.2.2.2\n # dead-interval 55\n if attributes.value('sl_dead_interval'):\n configurations.append_line(attributes.format('dead-interval {sl_dead_interval}'))\n\n # router ospf 1\n # area 2 sham-link 1.1.1.1 2.2.2.2\n # retransmit-interval 55\n if attributes.value('sl_retransmit_interval'):\n configurations.append_line(attributes.format('retransmit-interval {sl_retransmit_interval}'))\n\n # router ospf 1\n # area 2 sham-link 1.1.1.1 2.2.2.2\n # transmit-delay 55\n if attributes.value('sl_transmit_delay'):\n configurations.append_line(attributes.format('transmit-delay {sl_transmit_delay}'))\n\n # router ospf 1\n # area 2 sham-link 1.1.1.1 2.2.2.2\n # authentication key-chain \"ottawa\"\n if attributes.value('sl_auth_trailer_key_chain'):\n configurations.append_line(attributes.format('authentication key-chain {sl_auth_trailer_key_chain}'))\n\n # router ospf 1\n # area 2 sham-link 1.1.1.1 2.2.2.2\n # authentication\n # authentication-key anything\n # authentication message-digest\n # message-digest-key 1 md5 anything\n if attributes.value('sl_auth_trailer_key_crypto_algorithm'):\n\n auth_type = attributes.value('sl_auth_trailer_key_crypto_algorithm').value\n\n # area 2 sham-link 7.7.7.7 authentication\n if auth_type == 'simple':\n configurations.append_line(attributes.format('authentication'))\n\n # area 2 sham-link 7.7.7.7 authentication-key anything\n if attributes.value('sl_auth_trailer_key'):\n configurations.append_line(attributes.format('authentication-key {sl_auth_trailer_key}'))\n\n # area 2 sham-link 7.7.7.7 authentication message-digest\n elif auth_type == 'md5':\n configurations.append_line(attributes.format('authentication message-digest'))\n\n # area 2 sham-link 7.7.7.7 message-digest-key 1 md5 anything\n if attributes.value('sl_auth_trailer_key'):\n configurations.append_line(attributes.format('message-digest-key 1 md5 {sl_auth_trailer_key}'))\n\n # router ospf 1\n # area 2 sham-link 1.1.1.1 2.2.2.2\n # cost 10\n if attributes.value('sl_cost'):\n configurations.append_line(attributes.format('cost {sl_cost}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- InterfaceAttributes\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # interface GigabitEthernet1\n with configurations.submode_context(\n attributes.format('interface {interface_name}', force=True)):\n\n # interface GigabitEthernet1\n # ip ospf 1 area 2\n if attributes.value('if_admin_control'):\n configurations.append_line(attributes.format('ip router ospf {instance} area {area}'))\n\n # interface GigabitEthernet1\n # ip ospf cost 100\n if attributes.value('if_cost'):\n configurations.append_line(attributes.format('ip ospf cost {if_cost}'))\n\n # interface GigabitEthernet1\n # ip ospf network point-to-point\n if attributes.value('if_type'):\n iftype = attributes.value('if_type').value\n # Configure acceptable interface types\n if iftype == 'broadcast' or iftype == 'point-to-point':\n configurations.append_line(attributes.format('ip ospf network {}'.format(iftype)))\n\n # passive-interface GigabitEthernet1\n if attributes.value('if_passive'):\n configurations.append_line(attributes.format('ip ospf passive-interface'))\n\n # interface GigabitEthernet1\n # ip ospf priority 100\n if attributes.value('if_priority'):\n configurations.append_line(attributes.format('ip ospf priority {if_priority}'))\n\n # interface GigabitEthernet1\n # ip ospf bfd\n if attributes.value('if_bfd_enable'):\n configurations.append_line(attributes.format('ip ospf bfd'))\n\n # interface GigabitEthernet1\n # bfd interval 50 min_rx 60 multiplier 2\n if attributes.value('if_bfd_interval'):\n\n # bfd interval [if_bfd_interval]\n bfd_str = 'bfd interval {if_bfd_interval}'\n\n # + min_rx {if_bfd_min_interval}\n if attributes.value('if_bfd_min_interval'):\n bfd_str += ' min_rx {if_bfd_min_interval}'\n\n # + multiplier {if_bfd_multiplier}\n if attributes.value('if_bfd_multiplier'):\n bfd_str += ' multiplier {if_bfd_multiplier}'\n\n configurations.append_line(attributes.format(bfd_str))\n\n # interface GigabitEthernet1\n # ip ospf hello-interval 10\n if attributes.value('if_hello_interval'):\n configurations.append_line(attributes.format('ip ospf hello-interval {if_hello_interval}'))\n\n # interface GigabitEthernet1\n # ip ospf dead-interval 10\n if attributes.value('if_dead_interval'):\n configurations.append_line(attributes.format('ip ospf dead-interval {if_dead_interval}'))\n\n # interface GigabitEthernet1\n # ip ospf retransmit-interval 10\n if attributes.value('if_retransmit_interval'):\n configurations.append_line(attributes.format('ip ospf retransmit-interval {if_retransmit_interval}'))\n\n # interface GigabitEthernet1\n # ip ospf authentication key-chain ottawa\n if attributes.value('if_auth_trailer_key_chain'):\n configurations.append_line(attributes.format('ip ospf authentication key-chain {if_auth_trailer_key_chain}'))\n\n # interface GigabitEthernet1\n # ip ospf authentication\n # ip ospf authentication-key anything\n # ip ospf authentication message-digest\n # ip ospf message-digest-key 1 md5 anything\n if attributes.value('if_auth_trailer_key_crypto_algorithm'):\n\n auth_type = attributes.value('if_auth_trailer_key_crypto_algorithm').value\n\n # ip ospf authentication\n if auth_type == 'simple':\n configurations.append_line(attributes.format('ip ospf authentication'))\n\n # ip ospf authentication-key anything\n if attributes.value('if_auth_trailer_key'):\n configurations.append_line(attributes.format('ip ospf authentication-key {if_auth_trailer_key}'))\n\n # ip ospf authentication message-digest\n elif auth_type == 'md5':\n configurations.append_line(attributes.format('ip ospf authentication message-digest'))\n\n # ip ospf message-digest-key 1 md5 anything\n if attributes.value('if_auth_trailer_key'):\n configurations.append_line(attributes.format('ip ospf message-digest-key 1 md5 {if_auth_trailer_key}'))\n\n # interface GigabitEthernet1\n # ip ospf mtu-ignore\n if attributes.value('if_mtu_ignore'):\n configurations.append_line(attributes.format('ip ospf mtu-ignore'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n", "id": "8638443", "language": "Python", "matching_score": 2.443107843399048, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ospf/nxos/ospf.py" }, { "content": "\nfrom abc import ABC\nimport warnings\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\n\nclass Evi(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(\n attributes.format('evi {evi_id}', force=True)):\n\n # iosxr: evpn / evi 1 / advertise-mac\n if attributes.value('advertise_mac'):\n configurations.append_line('advertise-mac')\n\n # iosxr: evpn / evi 1 / control-word-disable\n if attributes.value('control_word_disable'):\n configurations.append_line('control-word-disable')\n\n sub, attributes2 = attributes.namespace('bgp')\n if sub is not None:\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n sub, attributes2 = attributes.namespace('load_balancing')\n if sub is not None:\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True)\n\n class BgpAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: evpn / evi 1 / bgp (config-evpn-evi-bgp)\n if attributes.value('enabled', force=True):\n with configurations.submode_context('bgp'):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: evpn / evi 1 / bgp / rd 100:200000\n # iosxr: evpn / evi 1 / bgp / rd 65536:200\n # iosxr: evpn / evi 1 / bgp / rd 1.2.3.4:1\n configurations.append_line(\n attributes.format('rd {rd}'))\n\n # iosxr: evpn / evi 1 / bgp / route-target 100:200000\n # iosxr: evpn / evi 1 / bgp / route-target 100:200000 stitching\n # iosxr: evpn / evi 1 / bgp / route-target 65536:200\n # iosxr: evpn / evi 1 / bgp / route-target 65536:200 stitching\n # iosxr: evpn / evi 1 / bgp / route-target 1.2.3.4:1\n # iosxr: evpn / evi 1 / bgp / route-target 1.2.3.4:1 stitching\n both_route_targets = set(self.export_route_targets) & set(self.import_route_targets)\n\n # iosxr: evpn / evi 1 / bgp / route-target export 100:200000\n # iosxr: evpn / evi 1 / bgp / route-target export 100:200000 stitching\n # iosxr: evpn / evi 1 / bgp / route-target export 65536:200\n # iosxr: evpn / evi 1 / bgp / route-target export 65536:200 stitching\n # iosxr: evpn / evi 1 / bgp / route-target export 1.2.3.4:1\n # iosxr: evpn / evi 1 / bgp / route-target export 1.2.3.4:1 stitching\n for v, attributes2 in attributes.sequence_values('export_route_targets', sort=True):\n if v in both_route_targets:\n cfg = 'route-target {}'.format(v.route_target)\n else:\n cfg = 'route-target export {}'.format(v.route_target)\n if v.stitching:\n cfg += ' stitching'\n configurations.append_line(cfg)\n\n # iosxr: evpn / evi 1 / bgp / route-target import 100:200000\n # iosxr: evpn / evi 1 / bgp / route-target import 100:200000 stitching\n # iosxr: evpn / evi 1 / bgp / route-target import 65536:200\n # iosxr: evpn / evi 1 / bgp / route-target import 65536:200 stitching\n # iosxr: evpn / evi 1 / bgp / route-target import 1.2.3.4:1\n # iosxr: evpn / evi 1 / bgp / route-target import 1.2.3.4:1 stitching\n for v, attributes2 in attributes.sequence_values('import_route_targets', sort=True):\n if v in both_route_targets:\n continue # Already done above\n cfg = 'route-target import {}'.format(v.route_target)\n if v.stitching:\n cfg += ' stitching'\n configurations.append_line(cfg)\n\n # iosxr: evpn / evi 1 / bgp / route-target export none\n if attributes.value('export_route_target_none'):\n if attributes.value('import_route_target_none', force=True):\n configurations.append_line('route-target none')\n else:\n configurations.append_line('route-target export none')\n\n # iosxr: evpn / evi 1 / bgp / route-target import none\n if attributes.value('import_route_target_none'):\n if attributes.value('export_route_target_none', force=True):\n pass # Already done above\n else:\n configurations.append_line('route-target import none')\n\n # iosxr: evpn / evi 1 / bgp / table-policy <rtepol>\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True)\n\n class LoadBalancingAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: evpn / evi 1 / load-balancing (config-evpn-evi-lb)\n if attributes.value('enabled', force=True):\n with configurations.submode_context('load-balancing'):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: evpn / evi 1 / load-balancing / flow-label static\n if attributes.value('flow_label_static'):\n configurations.append_line('flow-label static')\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True)\n\n", "id": "11728114", "language": "Python", "matching_score": 4.503549575805664, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/evpn/iosxr/evi.py" }, { "content": "\nfrom abc import ABC\nimport warnings\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\n\n\nclass Evi(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(\n attributes.format('l2vpn evpn instance {evi_id} {evi_mode}', force=True)):\n\n sub, attributes2 = attributes.namespace('bgp')\n if sub is not None:\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True)\n\n class BgpAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if attributes.value('enabled', force=True):\n\n configurations.append_line(\n attributes.format('rd {rd}'))\n\n both_route_targets = set(self.export_route_targets) & set(self.import_route_targets)\n\n for v, attributes2 in attributes.sequence_values('export_route_targets', sort=True):\n if v in both_route_targets:\n cfg = 'route-target {}'.format(v.route_target)\n else:\n cfg = 'route-target export {}'.format(v.route_target)\n configurations.append_line(cfg)\n\n for v, attributes2 in attributes.sequence_values('import_route_targets', sort=True):\n if v in both_route_targets:\n continue # Already done above\n cfg = 'route-target import {}'.format(v.route_target)\n configurations.append_line(cfg)\n\n if attributes.value('auto_route_target') is not None:\n if attributes.value('auto_route_target'):\n configurations.append_line('auto-route-target')\n else:\n configurations.append_line('no auto-route-target', unconfig_cmd = 'auto-route-target')\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True)\n\n class LoadBalancingAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True)\n\n", "id": "5960869", "language": "Python", "matching_score": 4.8495659828186035, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/evpn/iosxe/evi.py" }, { "content": "\"\"\"Implement Nexus (nxos) Specific Configurations for Evpn objects.\n\"\"\"\n\n# Table of contents:\n# class Evpn:\n# class InterfaceAttributes:\n# def build_config/build_unconfig:\n# class EthernetSegmentAttributes:\n# def build_config/build_unconfig:\n# class BgpAttributes:\n# def build_config/build_unconfig:\n# class DeviceAttributes:\n# def build_config/build_unconfig:\n\nfrom abc import ABC\nimport warnings\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning, AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nimport genie.conf.base.interface\nfrom genie.conf.base.config import CliConfig\n\n\nclass Evpn(ABC):\n\n class VniAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # nxos: evpn / vni 4096 l2 (config-evpn-evi)\n with configurations.submode_context(attributes.format('vni {vni.vni_id} l2', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n ns, attributes2 = attributes.namespace('ethernet_segment')\n if ns is not None:\n configurations.append_block(ns.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True)\n\n class EthernetSegmentAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n ns, attributes2 = attributes.namespace('bgp')\n if ns is not None:\n configurations.append_block(ns.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True)\n\n class BgpAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if attributes.value('enabled', force=True):\n\n # nxos: evpn / vni 4096 l2 / bgp\n if attributes.value('enabled'):\n configurations.append_line('bgp')\n\n # nxos: evpn / vni 4096 l2 / rd auto\n # nxos: evpn / vni 4096 l2 / rd 100:200000\n # nxos: evpn / vni 4096 l2 / rd 100000:200\n # nxos: evpn / vni 4096 l2 / rd 1.2.3.4:1\n configurations.append_line(attributes.format('rd {rd}'))\n\n # nxos: evpn / vni 4096 l2 / route-target both auto\n # nxos: evpn / vni 4096 l2 / route-target both 100:200000\n # nxos: evpn / vni 4096 l2 / route-target both 100000:200\n # nxos: evpn / vni 4096 l2 / route-target both 1.2.3.4:1\n both_route_targets = set(self.export_route_targets) & set(self.import_route_targets)\n\n # nxos: evpn / vni 4096 l2 / route-target export auto # XXXJST how does this match none in IOS-XR?\n # nxos: evpn / vni 4096 l2 / route-target export 100:200000\n # nxos: evpn / vni 4096 l2 / route-target export 100000:200\n # nxos: evpn / vni 4096 l2 / route-target export 1.2.3.4:1\n for v, attributes2 in attributes.sequence_values('export_route_targets'):\n if v == 'auto':\n cfg = 'route-target {} {}'.format(\n 'both' if v in both_route_targets else 'export',\n v)\n else:\n cfg = 'route-target {} {}'.format(\n 'both' if v in both_route_targets else 'export',\n v.route_target)\n if v.stitching:\n warnings.warn(\n 'export bgp route-target stitching',\n UnsupportedAttributeWarning)\n configurations.append_line(cfg)\n\n # nxos: evpn / vni 4096 l2 / route-target import auto\n # nxos: evpn / vni 4096 l2 / route-target import 100:200000\n # nxos: evpn / vni 4096 l2 / route-target import 100000:200\n # nxos: evpn / vni 4096 l2 / route-target import 1.2.3.4:1\n for v, attributes2 in attributes.sequence_values('import_route_targets'):\n if v == 'auto':\n cfg = 'route-target import {}'.format(v)\n else:\n cfg = 'route-target import {}'.format(v.route_target)\n if v.stitching:\n warnings.warn(\n 'import bgp route-target stitching',\n UnsupportedAttributeWarning)\n configurations.append_line(cfg)\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True)\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # nxos: evpn esi multihoming\n\n # nxos: evpn (config-evpn)\n with configurations.submode_context('evpn'):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # nxos: evpn / vni 4096 l2 (config-evpn-evi)\n for sub, attributes2 in attributes.mapping_values('vni_attr', keys=self.vnis, sort=True):\n configurations.append_block(sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, apply=True, attributes=None):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True)\n\n", "id": "10192818", "language": "Python", "matching_score": 4.462716102600098, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/evpn/nxos/evpn.py" }, { "content": "\"\"\"Implement IOS-XR (iosxr) Specific Configurations for Evpn objects.\n\"\"\"\n\n# Table of contents:\n# class Evpn:\n# class InterfaceAttributes:\n# def build_config/build_unconfig:\n# class EthernetSegmentAttributes:\n# def build_config/build_unconfig:\n# class BgpAttributes:\n# def build_config/build_unconfig:\n# class PseudowireNeighborAttributes:\n# def build_config/build_unconfig:\n# class EthernetSegmentAttributes:\n# def build_config/build_unconfig:\n# class BgpAttributes:\n# def build_config/build_unconfig:\n# class VfiAttributes:\n# def build_config/build_unconfig:\n# class EthernetSegmentAttributes:\n# def build_config/build_unconfig:\n# class BgpAttributes:\n# def build_config/build_unconfig:\n# class DeviceAttributes:\n# def build_config/build_unconfig:\n# class BgpAttributes:\n# def build_config/build_unconfig:\n# class LoadBalancingAttributes:\n# def build_config/build_unconfig:\n\nfrom abc import ABC\nimport warnings\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning, AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\n\nclass Evpn(ABC):\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: evpn / interface Bundle-Ether1 (config-evpn-ac)\n with configurations.submode_context(attributes.format('interface {interface_name}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: evpn / interface Bundle-Ether1 / ethernet-segment (config-evpn-ac-es)\n ns, attributes2 = attributes.namespace('ethernet_segment')\n if ns is not None:\n configurations.append_block(ns.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n # iosxr: evpn / interface Bundle-Ether1 / mac-flush mvrp\n configurations.append_line(attributes.format('mac-flush {mac_flush}'))\n\n # iosxr: evpn / interface Bundle-Ether1 / timers (config-evpn-ac-timers)\n with configurations.submode_context('timers', cancel_empty=True):\n\n # iosxr: evpn / interface Bundle-Ether1 / timers / recovery 20\n configurations.append_line(attributes.format('recovery {recovery_timer}', inherited=False))\n\n if attributes.value('peering_timer', inherited=False) is not None:\n warnings.warn(\n 'evpn interface peering_timer',\n UnsupportedAttributeWarning)\n\n return str(configurations)\n\n def build_unconfig(self, *args, **kwargs):\n return self.build_config(*args, unconfig=True, **kwargs)\n\n class EthernetSegmentAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: evpn / interface Bundle-Ether1 / ethernet-segment (config-evpn-ac-es)\n with configurations.submode_context('ethernet-segment'):\n if not attributes.value('enabled', force=True):\n configurations.submode_cancel()\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: evpn / interface Bundle-Ether1 / ethernet-segment / backbone-source-mac aaaa.bbbb.cccc\n configurations.append_line(attributes.format('backbone-source-mac {backbone_source_mac}'))\n\n # iosxr: evpn / interface Bundle-Ether1 / ethernet-segment / bgp route-target aaaa.bbbb.cccc\n ns, attributes2 = attributes.namespace('bgp')\n if ns is not None:\n configurations.append_block(ns.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n # iosxr: evpn / interface Bundle-Ether1 / ethernet-segment / force single-homed\n if attributes.value('force_single_homed'):\n configurations.append_line('force single-homed')\n\n # iosxr: evpn / interface Bundle-Ether1 / ethernet-segment / identifier type 0 00.11.22.33.44.55.66.77.88\n configurations.append_line(attributes.format('identifier type {esi.type} {esi.dotted}'))\n\n # iosxr: evpn / interface Bundle-Ether1 / ethernet-segment / load-balancing-mode single-active\n configurations.append_line(attributes.format('load-balancing-mode {load_balancing_mode}'))\n\n # iosxr: evpn / interface Bundle-Ether1 / ethernet-segment / service-carving manual (config-evpn-ac-es-vlan-man)\n # iosxr: evpn / interface Bundle-Ether1 / ethernet-segment / service-carving manual / primary someword secondary someword2\n\n return str(configurations)\n\n def build_unconfig(self, *args, **kwargs):\n return self.build_config(*args, unconfig=True, **kwargs)\n\n class BgpAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if attributes.value('enabled', force=True):\n\n # iosxr: evpn / interface Bundle-Ether1 / ethernet-segment / bgp route-target aaaa.bbbb.cccc\n configurations.append_line(attributes.format('bgp route-target {import_route_target}'))\n\n return str(configurations)\n\n def build_unconfig(self, *args, **kwargs):\n return self.build_config(*args, unconfig=True, **kwargs)\n\n class PseudowireNeighborAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # virtual neighbor 70.70.70.70 pw-id 17300005\n with configurations.submode_context(attributes.format('virtual neighbor {ip} pw-id {pw_id}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: evpn / virtual neighbor 172.16.58.3 pw-id 17300005 / ethernet-segment (config-evpn-ac-es)\n ns, attributes2 = attributes.namespace('ethernet_segment')\n if ns is not None:\n configurations.append_block(ns.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n return str(configurations)\n\n def build_unconfig(self, *args, **kwargs):\n return self.build_config(*args, unconfig=True, **kwargs)\n\n class EthernetSegmentAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: evpn / virtual neighbor 172.16.58.3 pw-id 17300005 / ethernet-segment (config-evpn-ac-es)\n with configurations.submode_context('ethernet-segment'):\n if not attributes.value('enabled', force=True):\n configurations.submode_cancel()\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: evpn / virtual neighbor 172.16.58.3 pw-id 17300005 / ethernet-segment / bgp route-target aaaa.bbbb.cccc\n ns, attributes2 = attributes.namespace('bgp')\n if ns is not None:\n configurations.append_block(ns.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n # iosxr: evpn / virtual neighbor 172.16.58.3 pw-id 17300005 / ethernet-segment / identifier type 0 00.11.22.33.44.55.66.77.88\n configurations.append_line(attributes.format('identifier type {esi.type} {esi.dotted}'))\n\n return str(configurations)\n\n def build_unconfig(self, *args, **kwargs):\n return self.build_config(*args, unconfig=True, **kwargs)\n\n class BgpAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if attributes.value('enabled', force=True):\n\n # iosxr: evpn / interface Bundle-Ether1 / ethernet-segment / bgp route-target aaaa.bbbb.cccc\n configurations.append_line(attributes.format('bgp route-target {import_route_target}'))\n\n return str(configurations)\n\n def build_unconfig(self, *args, **kwargs):\n return self.build_config(*args, unconfig=True, **kwargs)\n\n class VfiAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # virtual vfi ac-vfi-5\n with configurations.submode_context(attributes.format('virtual vfi {vfi_name}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: evpn / virtual vfi ac-vfi-5 / ethernet-segment (config-evpn-ac-es)\n ns, attributes2 = attributes.namespace('ethernet_segment')\n if ns is not None:\n configurations.append_block(ns.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n return str(configurations)\n\n def build_unconfig(self, *args, **kwargs):\n return self.build_config(*args, unconfig=True, **kwargs)\n\n class EthernetSegmentAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: evpn / virtual vfi ac-vfi-5 / ethernet-segment (config-evpn-ac-es)\n with configurations.submode_context('ethernet-segment'):\n if not attributes.value('enabled', force=True):\n configurations.submode_cancel()\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: evpn / virtual vfi ac-vfi-5 / ethernet-segment / bgp route-target aaaa.bbbb.cccc\n ns, attributes2 = attributes.namespace('bgp')\n if ns is not None:\n configurations.append_block(ns.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n # iosxr: evpn / virtual vfi ac-vfi-5 / ethernet-segment / identifier type 0 00.11.22.33.44.55.66.77.88\n configurations.append_line(attributes.format('identifier type {esi.type} {esi.dotted}'))\n\n return str(configurations)\n\n def build_unconfig(self, *args, **kwargs):\n return self.build_config(*args, unconfig=True, **kwargs)\n\n class BgpAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if attributes.value('enabled', force=True):\n\n # iosxr: evpn / interface Bundle-Ether1 / ethernet-segment / bgp route-target aaaa.bbbb.cccc\n configurations.append_line(attributes.format('bgp route-target {import_route_target}'))\n\n return str(configurations)\n\n def build_unconfig(self, *args, **kwargs):\n return self.build_config(*args, unconfig=True, **kwargs)\n\n class DeviceAttributes(ABC):\n\n def build_config(self, interfaces=None,\n apply=True, attributes=None, unconfig=False, **kwargs):\n # assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n if interfaces is None:\n interfaces = set(self.interfaces)\n else:\n interfaces = set(self.interfaces).intersection(interfaces)\n\n # iosxr: evpn (config-evpn)\n with configurations.submode_context('evpn'):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: evpn / bgp (config-evpn-bgp)\n ns, attributes2 = attributes.namespace('bgp')\n if ns is not None:\n configurations.append_block(ns.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n # iosxr: evpn / evi 1 (config-evpn-evi)\n for evi, attributes2 in attributes.sequence_values('evis', sort=True):\n if unconfig:\n configurations.append_block(evi.build_unconfig(apply=False, attributes=attributes2))\n else:\n configurations.append_block(evi.build_config(apply=False, attributes=attributes2))\n\n # iosxr: evpn / interface Bundle-Ether1 (config-evpn-ac)\n for sub, attributes2 in attributes.mapping_values('interface_attr', keys=interfaces, sort=True):\n configurations.append_block(sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n # iosxr: evpn / virtual neighbor 172.16.58.3 pw-id 17300005\n for sub, attributes2 in attributes.mapping_values('pw_neighbor_attr', sort=True):\n configurations.append_block(sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n # iosxr: evpn / virtual vfi ac-vfi-5\n for sub, attributes2 in attributes.mapping_values('vfi_attr', sort=True):\n configurations.append_block(sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n # iosxr: evpn / timers (config-evpn-timers)\n with configurations.submode_context('timers', cancel_empty=True):\n\n # iosxr: evpn / timers / recovery 20\n configurations.append_line(attributes.format('recovery {recovery_timer}'))\n\n # iosxr: evpn / timers / peering <0-300>\n configurations.append_line(attributes.format('peering {peering_timer}'))\n\n # iosxr: evpn / source interface Loopback0\n configurations.append_line(attributes.format('source interface {source_interface.name}'))\n\n # iosxr: evpn / load-balancing (config-evpn-lb)\n ns, attributes2 = attributes.namespace('load_balancing')\n if ns is not None:\n configurations.append_block(ns.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n # iosxr: evpn / bgp (config-evpn-bgp)\n ns, attributes2 = attributes.namespace('bgp')\n if ns is not None:\n configurations.append_block(ns.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, *args, **kwargs):\n return self.build_config(*args, unconfig=True, **kwargs)\n\n class BgpAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: evpn / bgp (config-evpn-bgp)\n with configurations.submode_context('bgp'):\n if not attributes.value('enabled', force=True):\n configurations.submode_cancel()\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: evpn / bgp / rd 100:200000\n # iosxr: evpn / bgp / rd 65536:200\n # iosxr: evpn / bgp / rd 1.2.3.4:1\n configurations.append_line(attributes.format('rd {rd}'))\n\n return str(configurations)\n\n def build_unconfig(self, *args, **kwargs):\n return self.build_config(*args, unconfig=True, **kwargs)\n\n class LoadBalancingAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: evpn / load-balancing (config-evpn-lb)\n with configurations.submode_context('load-balancing'):\n if not attributes.value('enabled', force=True):\n configurations.submode_cancel()\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: evpn / load-balancing / flow-label static\n if attributes.value('flow_label_static'):\n configurations.append_line('flow-label static')\n\n return str(configurations)\n\n def build_unconfig(self, *args, **kwargs):\n return self.build_config(*args, unconfig=True, **kwargs)\n\n", "id": "710365", "language": "Python", "matching_score": 5.600644111633301, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/evpn/iosxr/evpn.py" }, { "content": "# Table of contents:\n# class Evpn:\n# class InterfaceAttributes:\n# def build_config/build_unconfig:\n# class EthernetSegmentAttributes:\n# def build_config/build_unconfig:\n# class BgpAttributes:\n# def build_config/build_unconfig:\n# class DeviceAttributes:\n# def build_config/build_unconfig:\n# class BgpAttributes:\n# def build_config/build_unconfig:\n# class LoadBalancingAttributes:\n# def build_config/build_unconfig:\n\nfrom abc import ABC\nimport warnings\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning, AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\n\nclass Evpn(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, interfaces=None,\n apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if interfaces is None:\n interfaces = set(self.interfaces)\n else:\n interfaces = set(self.interfaces).intersection(interfaces)\n\n with configurations.submode_context('l2vpn evpn'):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n if attributes.value('arp_flooding_suppression') is not None:\n if attributes.value('arp_flooding_suppression') is False:\n configurations.append('arp flooding-suppression disable')\n\n configurations.append_line(attributes.format('replication-type {replication_type}'))\n configurations.append_line(attributes.format('mpls label mode {label_mode}'))\n\n for evi, attributes2 in attributes.sequence_values('evis', sort=True):\n if unconfig:\n configurations.append_block(evi.build_unconfig(apply=False, attributes=attributes2))\n else:\n configurations.append_block(evi.build_config(apply=False, attributes=attributes2))\n\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n return str(configurations)\n\n def build_unconfig(self, *args, **kwargs):\n return self.build_config(*args, unconfig=True, **kwargs)\n\n class BgpAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n return str(configurations)\n\n def build_unconfig(self, *args, **kwargs):\n return self.build_config(*args, unconfig=True, **kwargs)\n\n class LoadBalancingAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n return str(configurations)\n\n def build_unconfig(self, *args, **kwargs):\n return self.build_config(*args, unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n return str(configurations)\n\n def build_unconfig(self, *args, **kwargs):\n return self.build_config(*args, unconfig=True, **kwargs)\n\n class EthernetSegmentAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n return str(configurations)\n\n def build_unconfig(self, *args, **kwargs):\n return self.build_config(*args, unconfig=True, **kwargs)\n\n class BgpAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n return str(configurations)\n\n def build_unconfig(self, *args, **kwargs):\n return self.build_config(*args, unconfig=True, **kwargs)\n\n", "id": "4073348", "language": "Python", "matching_score": 1.428326964378357, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/evpn/iosxe/evpn.py" }, { "content": "''' \nBGP Genie Ops Object for IOSXR - YANG.\n'''\n\n# Genie\nfrom genie.ops.base import Context\nfrom genie.libs.ops.bgp.iosxr.bgp import Bgp as BgpOpsCli\n\n# Parser\nfrom genie.libs.parser.iosxr import show_bgp\n\n\nclass Bgp(BgpOpsCli):\n '''BGP Genie Ops Object'''\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n \n # ShowBgpInstanceProcessDetail\n self.context_manager[show_bgp.ShowBgpInstanceProcessDetail] = [Context.yang, Context.cli]\n\n # ShowBgpInstanceNeighborsDetail\n self.context_manager[show_bgp.ShowBgpInstanceNeighborsDetail] = [Context.yang, Context.cli]", "id": "10995628", "language": "Python", "matching_score": 3.553679943084717, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/bgp/iosxr/yang/bgp.py" }, { "content": "''' \nBGP Genie Ops Object for NXOS - YANG.\n'''\n\n# Genie\nfrom genie.ops.base import Context\nfrom genie.libs.ops.bgp.nxos.bgp import Bgp as BgpOpsCli\n\n# Parser\nfrom genie.libs.parser.nxos import show_bgp\n\n\nclass Bgp(BgpOpsCli):\n '''BGP Genie Ops Object'''\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n \n # ShowBgpProcessVrfAll\n self.context_manager[show_bgp.ShowBgpProcessVrfAll] = [Context.yang, Context.cli]\n\n # ShowBgpVrfAllNeighbors\n self.context_manager[show_bgp.ShowBgpVrfAllNeighbors] = [Context.yang, Context.cli]", "id": "10852151", "language": "Python", "matching_score": 2.0933473110198975, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/bgp/nxos/yang/bgp.py" }, { "content": "from genie.ops.base import Context\nfrom genie.libs.ops.ospf.nxos.ospf import Ospf as b_ospf\nfrom genie.libs.parser.nxos import show_ospf\n\nclass Ospf(b_ospf):\n '''Ospf Ops Object'''\n\n # To keep short names\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.context_manager[show_ospf.ShowIpOspfVrfAll] = Context.yang\n self.context_manager[show_ospf.ShowIpOspfInterfaceVrfAll] = Context.yang\n self.context_manager[show_ospf.ShowIpOspfDatabase] = Context.yang\n self.context_manager[show_ospf.ShowIpOspfNeighborsDetailVrfAll] = Context.yang\n", "id": "3860284", "language": "Python", "matching_score": 2.740900754928589, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/ospf/nxos/yang/ospf.py" }, { "content": "from genie.ops.base import Context\nfrom genie.libs.ops.ospf.iosxe.ospf import Ospf as b_ospf\nfrom genie.libs.parser.iosxe import show_ospf\n\nclass Ospf(b_ospf):\n '''Ospf Ops Object'''\n\n # To keep short names\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.context_manager[show_ospf.ShowIpOspf] = Context.yang\n # Rest use cli as their info cannot be retrieve via yang at the moment\n", "id": "4125294", "language": "Python", "matching_score": 0.18107910454273224, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/ospf/iosxe/yang/ospf.py" }, { "content": "# Python\nimport logging\n\n# Abstract\nfrom genie.abstract import Lookup\n\n# Parser\nfrom genie.libs import parser\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\nlog = logging.getLogger(__name__)\n\ndef save_device_information(device, **kwargs):\n '''Save running configuration to startup configuration'''\n\n # Check if device is VDC\n try:\n output = device.parse('show vdc current-vdc')\n except Exception as e:\n raise Exception(\"Unable to execute 'show vdc current-vdc' to check \"\n \"if device is VDC\") from e\n\n # Check if device is VDC\n if 'current_vdc' in output and output['current_vdc']['id'] != '1':\n cmd = 'copy running-config startup-config'\n else:\n cmd = 'copy running-config startup-config vdc-all'\n\n # Copy boot variables\n try:\n device.execute(cmd)\n except Exception as e:\n raise Exception(\"Unable to save running-config to startup-config\") from e\n\n\ndef get_default_dir(device):\n \"\"\" Get the default directory of this device\n\n Args:\n Mandatory:\n device (`obj`) : Device object.\n\n Returns:\n default_dir (`str`): Default directory of the system\n\n Raises:\n Exception\n\n Example:\n >>> get_default_dir(device=device)\n \"\"\"\n\n try:\n lookup = Lookup.from_device(device)\n parsed_dict = lookup.parser.show_platform.Dir(device=device).parse()\n default_dir = parsed_dict['dir']\n except SchemaEmptyParserError as e:\n raise Exception(\"No output when executing 'dir' command\") from e\n except Exception as e:\n raise Exception(\"Unable to execute 'dir' command\") from e\n\n # Return default_dir to caller\n log.info(\"Default directory on '{d}' is '{dir}'\".format(d=device.name,\n dir=default_dir))\n return default_dir\n", "id": "3861501", "language": "Python", "matching_score": 4.103691577911377, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/abstracted_libs/nxos/subsection.py" }, { "content": "# Python\nimport logging\nfrom os import path\n\n# Abstract\nfrom genie.abstract import Lookup\n\n# Parser\nfrom genie.libs import parser\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\nlog = logging.getLogger(__name__)\n\ndef save_device_information(device, **kwargs):\n \"\"\"Install the commit packages. This is for IOSXR devices.\n\n Args:\n Mandatory:\n device (`obj`) : Device object.\n\n Returns:\n True: Result is PASSED\n False: Result is PASSX\n\n\n Raises:\n None\n\n Example:\n >>> save_device_information(device=Device())\n \"\"\"\n\n # Checking the config-register has 0x2\n # if not configure 0x2\n # RP/0/RSP1/CPU0:PE1#admin config-register 0x2\n\n if device.is_ha:\n conn = device.active\n else:\n conn = device\n\n # Install commit ( when thre are package to bring up features)\n # from admin prompt\n conn.admin_execute('install commit')\n\n\ndef get_default_dir(device):\n \"\"\" Get the default directory of this device\n\n Args:\n Mandatory:\n device (`obj`) : Device object.\n\n Returns:\n default_dir (`str`): Default directory of the system\n\n Raises:\n Exception\n\n Example:\n >>> get_default_dir(device=device)\n \"\"\"\n\n try:\n lookup = Lookup.from_device(device)\n parsed_dict = lookup.parser.show_platform.Dir(device=device).parse()\n if \":\" in parsed_dict['dir']['dir_name']:\n default_dir = parsed_dict['dir']['dir_name']\n else:\n default_dir = ''\n except SchemaEmptyParserError as e:\n raise Exception(\"No output when executing 'dir' command\") from e\n except Exception as e:\n raise Exception(\"Unable to execute 'dir' command\") from e\n\n # Return default_dir to caller\n log.info(\"Default directory on '{d}' is '{dir}'\".format(d=device.name,\n dir=default_dir))\n return default_dir", "id": "3268566", "language": "Python", "matching_score": 2.5109050273895264, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/abstracted_libs/iosxr/subsection.py" }, { "content": "\n# python\nimport re\nimport logging\n\n# pyats\nfrom ats import aetest\n\nlog = logging.getLogger(__name__)\n\ndef save_device_information(device, **kwargs):\n \"\"\"Show version to print information users interest\n\n Args:\n Mandatory:\n device (`obj`) : Device object.\n\n Returns:\n True: Result is PASSED\n\n\n Raises:\n None\n\n Example:\n >>> save_device_information(device=Device())\n \"\"\"\n\n # bypass the section\n log.info('Junos device does not have bootvar.')\n\n\ndef get_default_dir(device):\n \"\"\" Get the default directory of this device\n\n Args:\n Mandatory:\n device (`obj`) : Device object.\n\n Returns:\n default_dir (`str`): Default directory of the system\n\n Raises:\n None\n\n Example:\n >>> get_default_dir(device=device)\n \"\"\"\n\n out = re.search('(\\/\\S+)', device.execute('file list'))\n if out:\n default_dir = out.groups()[0]\n else:\n default_dir = ''\n \n\n # Return default_dir to caller\n log.info(\"Default directory on '{d}' is '{dir}'\".format(d=device.name,\n dir=default_dir))\n return default_dir\n\n\ndef configure_replace(device, file_location, timeout=60):\n device.configure('load override {}'.format(file_location), timeout=timeout)", "id": "10679612", "language": "Python", "matching_score": 4.840592861175537, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/abstracted_libs/junos/subsection.py" }, { "content": "\n# python\nimport logging\nlog = logging.getLogger(__name__)\n\n\ndef save_device_information(device, **kwargs):\n \"\"\"Show version to print information users interest\n\n Args:\n Mandatory:\n device (`obj`) : Device object.\n\n Returns:\n True: Result is PASSED\n\n\n Raises:\n None\n\n Example:\n >>> save_device_information(device=Device())\n \"\"\"\n\n # bypass the section\n log.info('Linux device does not have bootvar.')\n\n\ndef get_default_dir(device):\n \"\"\" Get the default directory of this device\n\n Args:\n Mandatory:\n device (`obj`) : Device object.\n\n Returns:\n default_dir (`str`): Default directory of the system\n\n Raises:\n None\n\n Example:\n >>> get_default_dir(device=device)\n \"\"\"\n # bypass the section\n log.info('Linux device does not have default directory.')\n", "id": "11265678", "language": "Python", "matching_score": 0.011936627328395844, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/abstracted_libs/linux/subsection.py" }, { "content": "class VlanOutput(object):\r\n\r\n showEthernetTags = {'interface': \r\n {'Gi0/0/0/1': \r\n {'sub_interface': \r\n {'Gi0/0/0/1.501': \r\n {'vlan_id': {'501': {'layer': 'L3', 'mtu': '1518', 'status': 'Up', 'outer_encapsulation_type': 'dot1Q'}}}}}, \r\n 'Gi0/0/0/0': \r\n {'sub_interface': \r\n {'Gi0/0/0/0.503': \r\n {'vlan_id': \r\n {'503': \r\n {'layer': 'L3', 'mtu': '1518', 'status': 'Up', 'outer_encapsulation_type': 'dot1Q'}}}, \r\n 'Gi0/0/0/0.504': \r\n {'vlan_id': \r\n {'504': \r\n {'layer': 'L3', 'mtu': '1518', 'status': 'Up', 'outer_encapsulation_type': 'dot1Q'}}}, \r\n 'Gi0/0/0/0.501': \r\n {'vlan_id': \r\n {'4': \r\n {'outer_encapsulation_type': 'dot1ad', 'inner_encapsulation_vlan_id': '5', 'status': 'Up', 'inner_encapsulation_type': 'dot1Q', 'mtu': '1522', 'layer': 'L3'}}}, \r\n 'Gi0/0/0/0.511': \r\n {'vlan_id': \r\n {'511': \r\n {'layer': 'L3', 'mtu': '1518', 'status': 'Up', 'outer_encapsulation_type': 'dot1Q'}}}, \r\n 'Gi0/0/0/0.505': \r\n {'vlan_id': \r\n {'505': \r\n {'layer': 'L3', 'mtu': '1518', 'status': 'Up', 'outer_encapsulation_type': 'dot1Q'}}}, \r\n 'Gi0/0/0/0.510': \r\n {'vlan_id': \r\n {'510': \r\n {'layer': 'L3', 'mtu': '1518', 'status': 'Up', 'outer_encapsulation_type': 'dot1Q'}}}, \r\n 'Gi0/0/0/0.502': \r\n {'vlan_id': \r\n {'502': \r\n {'layer': 'L3', 'mtu': '1518', 'status': 'Up', 'outer_encapsulation_type': 'dot1Q'}}}}\r\n }\r\n }\r\n }\r\n\r\n showEthernetTagsempty = {}\r\n\r\n vlan_all = {'501': \r\n {'sub_interface': 'Gi0/0/0/1.501', 'ethernet_encapsulation_type': 'dot1Q'}, \r\n '503': \r\n {'sub_interface': 'Gi0/0/0/0.503', 'ethernet_encapsulation_type': 'dot1Q'}, \r\n '504': \r\n {'sub_interface': 'Gi0/0/0/0.504', 'ethernet_encapsulation_type': 'dot1Q'}, \r\n '4': \r\n {'sub_interface': 'Gi0/0/0/0.501', 'ethernet_encapsulation_type': 'dot1ad', 'inner_encapsulation_vlan_id': '5', 'inner_encapsulation_type': 'dot1Q'},\r\n '511': \r\n {'sub_interface': 'Gi0/0/0/0.511', 'ethernet_encapsulation_type': 'dot1Q'}, \r\n '505': \r\n {'sub_interface': 'Gi0/0/0/0.505', 'ethernet_encapsulation_type': 'dot1Q'}, \r\n '510': \r\n {'sub_interface': 'Gi0/0/0/0.510', 'ethernet_encapsulation_type': 'dot1Q'}, \r\n '502': \r\n {'sub_interface': 'Gi0/0/0/0.502', 'ethernet_encapsulation_type': 'dot1Q'} \r\n }\r\n\r\n vlan_all_empty = {}", "id": "10115477", "language": "Python", "matching_score": 0.47038158774375916, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/vlan/iosxr/tests/vlan_output.py" }, { "content": "'''\n Vlan Genie Ops Object Outputs for NXOS.\n'''\n\nclass VlanOutput(object):\n # 'show vlan' output\n showVlan = {\n 'vlans':{\n '1':{\n 'vlan_id': '1',\n 'name': 'default',\n 'state': 'active',\n 'interfaces': ['Port-channel4', 'Port-channel100', 'Ethernet1/2',\n 'Ethernet1/3', 'Ethernet1/4', 'Ethernet1/5',\n 'Ethernet1/6', 'Ethernet1/7', 'Ethernet1/8',\n 'Ethernet1/9', 'Ethernet1/10', 'Ethernet1/11',\n 'Ethernet1/12', 'Ethernet1/13', 'Ethernet1/14',\n 'Ethernet2/1','Ethernet2/2','Ethernet2/3','Ethernet2/4',\n 'Ethernet2/5','Ethernet2/6'],\n 'mode': 'ce',\n 'type': 'enet'\n },\n '2': {\n 'vlan_id': '2',\n 'name': 'VLAN0002',\n 'state': 'active',\n 'interfaces': ['Port-channel4', 'Port-channel100', 'Ethernet1/7',\n 'Ethernet1/8','Ethernet1/28'],\n 'mode': 'ce',\n 'type': 'enet'\n },\n\n '3': {\n 'vlan_id': '3',\n 'name': 'VLAN0003',\n 'state': 'active',\n 'interfaces': ['Port-channel4', 'Port-channel100', 'Ethernet1/7',\n 'Ethernet1/8', 'Ethernet1/28'],\n 'mode': 'ce',\n 'type': 'enet'\n },\n },\n }\n\n showFeature = {\n 'feature':{\n 'bash-shell':\n {'instance':\n {'1':\n {'state': 'disabled', }}},\n 'bgp':\n {'instance':\n {'1':\n {'state': 'enabled', }}},\n 'eigrp':\n {'instance':\n {'1':\n {'state': 'enabled', },\n '2':\n {'state': 'enabled',\n 'running': 'no', },\n '3':\n {'state': 'enabled',\n 'running': 'no', },\n '4':\n {'state': 'enabled',\n 'running': 'no', }, }},\n 'interface-vlan':{\n 'instance':{\n '1':{\n 'state': 'enabled',\n }\n }\n },\n 'vnseg_vlan': {\n 'instance':\n {'1':\n {'state': 'enabled', }\n }\n },\n }\n }\n\n showVlanIdVnSegment = {\n 'vlans': {\n '1': {\n 'vlan_id': '1',\n 'vn_segment_id': 5010,\n },\n '2': {\n 'vlan_id': '2',\n 'vn_segment_id': 5020,\n },\n },\n }\n\n showIgmp = \\\n {\n 'global_configuration': {\n 'enabled': 'enabled',\n 'v1v2_report_suppression': 'enabled',\n 'v3_report_suppression': 'disabled',\n 'link_local_groups_suppression': 'enabled',\n 'vpc_multicast_optimization': 'disabled',\n },\n 'vlans': {\n '1': { # configuration_vlan_id\n 'ip_igmp_snooping': 'enabled',\n 'lookup_mode': 'ip',\n 'switch_querier': 'disabled',\n 'igmp_explicit_tracking': 'enabled',\n 'v2_fast_leave': 'disabled',\n 'router_ports_count': 1,\n 'groups_count': 0,\n 'vlan_vpc_function': 'enabled',\n 'active_ports': ['Po20', 'Po30'],\n 'report_flooding': 'disabled',\n 'report_flooding_interfaces': 'n/a',\n 'group_address_for_proxy_leaves': 'no',\n },\n '2': { # configuration_vlan_id\n 'ip_igmp_snooping': 'enabled',\n 'lookup_mode': 'ip',\n 'igmp_querier': {\n 'address': '10.51.1.1',\n 'version': 2,\n 'interval': 125,\n 'last_member_query_interval': 1,\n 'robustness': 2,\n },\n 'switch_querier': 'disabled',\n 'igmp_explicit_tracking': 'enabled',\n 'v2_fast_leave': 'disabled',\n 'router_ports_count': 2,\n 'groups_count': 0,\n 'vlan_vpc_function': 'enabled',\n 'active_ports': ['Po20', 'Po30'],\n 'report_flooding': 'disabled',\n 'report_flooding_interfaces': 'n/a',\n 'group_address_for_proxy_leaves': 'no',\n },\n '3': { # configuration_vlan_id\n 'ip_igmp_snooping': 'enabled',\n 'lookup_mode': 'ip',\n 'switch_querier': 'disabled',\n 'igmp_explicit_tracking': 'enabled',\n 'v2_fast_leave': 'disabled',\n 'router_ports_count': 1,\n 'groups_count': 0,\n 'vlan_vpc_function': 'enabled',\n 'active_ports': ['Po20', 'Po30'],\n 'report_flooding': 'disabled',\n 'report_flooding_interfaces': 'n/a',\n 'group_address_for_proxy_leaves': 'no',\n },\n },\n }\n vlanOpsOutput = \\\n {'vlans':{\n 'interface_vlan_enabled': True,\n 'vn_segment_vlan_based_enabled': True,\n '1':{\n 'vlan_id': '1',\n 'name': 'default',\n 'state': 'active',\n 'shutdown': False,\n 'interfaces': ['Port-channel4', 'Port-channel100', 'Ethernet1/2',\n 'Ethernet1/3', 'Ethernet1/4', 'Ethernet1/5',\n 'Ethernet1/6', 'Ethernet1/7', 'Ethernet1/8',\n 'Ethernet1/9', 'Ethernet1/10', 'Ethernet1/11',\n 'Ethernet1/12', 'Ethernet1/13', 'Ethernet1/14',\n 'Ethernet2/1','Ethernet2/2','Ethernet2/3','Ethernet2/4',\n 'Ethernet2/5','Ethernet2/6'],\n 'mode': 'ce',\n 'vn_segment_id' : 5010,\n },\n '2': {\n 'vlan_id': '2',\n 'name': 'VLAN0002',\n 'state': 'active',\n 'shutdown': False,\n 'interfaces': ['Port-channel4', 'Port-channel100', 'Ethernet1/7',\n 'Ethernet1/8','Ethernet1/28'],\n 'mode': 'ce',\n 'vn_segment_id': 5020,\n },\n '3': {\n 'vlan_id': '3',\n 'name': 'VLAN0003',\n 'state': 'active',\n 'shutdown': False,\n 'interfaces': ['Port-channel4', 'Port-channel100', 'Ethernet1/7',\n 'Ethernet1/8', 'Ethernet1/28'],\n 'mode': 'ce',\n\n },\n 'configuration': {\n '1': {\n 'ip_igmp_snooping': 'enabled'\n },\n '2': {\n 'ip_igmp_snooping': 'enabled'\n },\n '3': {\n 'ip_igmp_snooping': 'enabled'\n },\n }\n },\n }\n\n\n showVlanOld = {'vlan_id':\n {'108':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0108', 'ports': None, 'vlan_type': 'enet'},\n '105':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0105', 'ports': None, 'vlan_type': 'enet'},\n '110':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0110', 'ports': None, 'vlan_type': 'enet'},\n '100':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0100', 'ports': None, 'vlan_type': 'enet'},\n '101':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0101', 'ports': None, 'vlan_type': 'enet'},\n '1':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'default', 'ports': 'Eth3/1, Eth3/2, Eth3/3, Eth3/4, Eth3/5, Eth3/6, Eth3/7, Eth3/8, Eth3/9, Eth3/10, Eth3/11, Eth3/12, Eth3/13, Eth3/14, Eth3/15, Eth3/16, Eth3/17, Eth3/18, Eth3/19, Eth3/20, Eth3/21, Eth3/22, Eth3/23, Eth3/24, Eth3/25, Eth3/26, Eth3/27, Eth3/28, Eth3/29, Eth3/30, Eth3/31, Eth3/32, Eth3/33, Eth3/34, Eth3/35, Eth3/36, Eth3/37, Eth3/38, Eth3/39, Eth3/40, Eth3/41, Eth3/42, Eth3/43, Eth3/44, Eth3/45, Eth3/46, Eth3/47, Eth3/48', 'vlan_type': 'enet'},\n '103':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0103', 'ports': None, 'vlan_type': 'enet'},\n '102':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0102', 'ports': None, 'vlan_type': 'enet'},\n '23':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0023', 'ports': 'Eth6/24', 'vlan_type': 'enet'},\n '109':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0109', 'ports': None, 'vlan_type': 'enet'},\n '106':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0106', 'ports': None, 'vlan_type': 'enet'},\n '104':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0104', 'ports': None, 'vlan_type': 'enet'},\n '107':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0107', 'ports': None, 'vlan_type': 'enet'}\n }\n }\n\n showVlanInternalInfo = {'vlan_id':\n {'3':\n {'vlan_configuration': True},\n '8':\n {'vlan_configuration': True},\n '108':\n {'vlan_configuration': True},\n '5':\n {'vlan_configuration': True}\n }\n }\n\n showVlanAccessMap = {'access_map_id':\n {'map_id_tag':\n {'access_map_sequence':\n {'10':\n {'access_map_action_value': 'forward', 'access_map_match_protocol_value': 'foo', 'access_map_match_protocol': 'ip'}\n }\n }\n }\n }\n\n showVlanFilter = {'vlan_id':\n {'100':\n {'access_map_tag': 'map_id_tag'}\n }\n }\n\n showIpInterfaceBriefPipeVlan = {'interface':\n {'Vlan100':\n {'vlan_id':\n {'100':\n {'interface_status': 'protocol-down/link-down/admin-up', 'ip_address': '192.168.234.1'}\n }\n },\n 'Vlan101':\n {'vlan_id':\n {'101':\n {'interface_status': 'protocol-down/link-down/admin-up', 'ip_address': '192.168.151.1'}\n }\n }\n }\n }\n\n # Case without using 'vlan_id' as the structure header\n # ----------------------------------------------------\n # When we call VlanOutput.vlan_all it automatically exclude the dictionary duplicate key's values.\n vlan_all = {\n \"interface_vlan\": {\n \"Vlan100\":{\n \"vlan_id\":{\n \"100\": {\n \"ip_address\": \"192.168.234.1\"\n }\n }\n },\n \"Vlan101\":{\n \"vlan_id\":{\n \"101\": {\n \"ip_address\": \"192.168.151.1\"\n }\n }\n }\n },\n '108':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0108', 'members': None, 'vlan_type': 'enet', 'vlan_configuration': True},\n '105':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0105', 'members': None, 'vlan_type': 'enet'},\n '110':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0110', 'members': None, 'vlan_type': 'enet'},\n '100':\n {'access_map_sequence':\n {'10':\n {'access_map_action_value': 'forward', 'access_map_match_protocol_value': 'foo', 'access_map_match_protocol': 'ip'}\n }, 'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0100', 'members': None, 'vlan_type': 'enet'},\n '101':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0101', 'members': None, 'vlan_type': 'enet'},\n '1':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'default', 'members': 'Eth3/1, Eth3/2, Eth3/3, Eth3/4, Eth3/5, Eth3/6, Eth3/7, Eth3/8, Eth3/9, Eth3/10, Eth3/11, Eth3/12, Eth3/13, Eth3/14, Eth3/15, Eth3/16, Eth3/17, Eth3/18, Eth3/19, Eth3/20, Eth3/21, Eth3/22, Eth3/23, Eth3/24, Eth3/25, Eth3/26, Eth3/27, Eth3/28, Eth3/29, Eth3/30, Eth3/31, Eth3/32, Eth3/33, Eth3/34, Eth3/35, Eth3/36, Eth3/37, Eth3/38, Eth3/39, Eth3/40, Eth3/41, Eth3/42, Eth3/43, Eth3/44, Eth3/45, Eth3/46, Eth3/47, Eth3/48', 'vlan_type': 'enet'},\n '103':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0103', 'members': None, 'vlan_type': 'enet'},\n '102':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0102', 'members': None, 'vlan_type': 'enet'},\n '23':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0023', 'members': 'Eth6/24', 'vlan_type': 'enet'},\n '109':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0109', 'members': None, 'vlan_type': 'enet'},\n '106':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0106', 'members': None, 'vlan_type': 'enet'},\n '104':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0104', 'members': None, 'vlan_type': 'enet'},\n '107':\n {'vl_mode': 'CE', 'status': 'active', 'name': 'VLAN0107', 'members': None, 'vlan_type': 'enet'},\n '3':\n {'vlan_configuration': True},\n '8':\n {'vlan_configuration': True},\n '5':\n {'vlan_configuration': True}\n }\n\n vlan_all_empty_switchport = {'1': {'members': 'Eth3/1, Eth3/2, Eth3/3, Eth3/4, Eth3/5, Eth3/6, Eth3/7, '\n 'Eth3/8, Eth3/9, Eth3/10, Eth3/11, Eth3/12, Eth3/13, '\n 'Eth3/14, Eth3/15, Eth3/16, Eth3/17, Eth3/18, Eth3/19, '\n 'Eth3/20, Eth3/21, Eth3/22, Eth3/23, Eth3/24, Eth3/25, '\n 'Eth3/26, Eth3/27, Eth3/28, Eth3/29, Eth3/30, Eth3/31, '\n 'Eth3/32, Eth3/33, Eth3/34, Eth3/35, Eth3/36, Eth3/37, '\n 'Eth3/38, Eth3/39, Eth3/40, Eth3/41, Eth3/42, Eth3/43, '\n 'Eth3/44, Eth3/45, Eth3/46, Eth3/47, Eth3/48',\n 'name': 'default',\n 'status': 'active',\n 'vl_mode': 'CE',\n 'vlan_type': 'enet'},\n '100': {\"vl_mode\": \"CE\",\n \"members\": None,\n \"access_map_sequence\": {\n \"10\": {\n \"access_map_action_value\": \"forward\",\n \"access_map_match_protocol_value\": \"foo\",\n \"access_map_match_protocol\": \"ip\"\n }\n },\n \"status\": \"active\",\n \"name\": \"VLAN0100\",\n \"vlan_type\": \"enet\"},\n '101': {'members': None,\n 'name': 'VLAN0101',\n 'status': 'active',\n 'vl_mode': 'CE',\n 'vlan_type': 'enet'},\n '102': {'members': None,\n 'name': 'VLAN0102',\n 'status': 'active',\n 'vl_mode': 'CE',\n 'vlan_type': 'enet'},\n '103': {'members': None,\n 'name': 'VLAN0103',\n 'status': 'active',\n 'vl_mode': 'CE',\n 'vlan_type': 'enet'},\n '104': {'members': None,\n 'name': 'VLAN0104',\n 'status': 'active',\n 'vl_mode': 'CE',\n 'vlan_type': 'enet'},\n '105': {'members': None,\n 'name': 'VLAN0105',\n 'status': 'active',\n 'vl_mode': 'CE',\n 'vlan_type': 'enet'},\n '106': {'members': None,\n 'name': 'VLAN0106',\n 'status': 'active',\n 'vl_mode': 'CE',\n 'vlan_type': 'enet'},\n '107': {'members': None,\n 'name': 'VLAN0107',\n 'status': 'active',\n 'vl_mode': 'CE',\n 'vlan_type': 'enet'},\n '108': {'members': None,\n 'name': 'VLAN0108',\n 'status': 'active',\n 'vl_mode': 'CE',\n 'vlan_configuration': True,\n 'vlan_type': 'enet'},\n '109': {'members': None,\n 'name': 'VLAN0109',\n 'status': 'active',\n 'vl_mode': 'CE',\n 'vlan_type': 'enet'},\n '110': {'members': None,\n 'name': 'VLAN0110',\n 'status': 'active',\n 'vl_mode': 'CE',\n 'vlan_type': 'enet'},\n '23': {'members': 'Eth6/24',\n 'name': 'VLAN0023',\n 'status': 'active',\n 'vl_mode': 'CE',\n 'vlan_type': 'enet'},\n '3': {'vlan_configuration': True},\n '5': {'vlan_configuration': True},\n '8': {'vlan_configuration': True},\n \"interface_vlan\": {\n \"Vlan100\":{\n \"vlan_id\":{\n \"100\": {\n \"ip_address\": \"192.168.234.1\"\n }\n }\n },\n \"Vlan101\":{\n \"vlan_id\":{\n \"101\": {\n \"ip_address\": \"192.168.151.1\"\n }\n }\n }\n }\n }\n", "id": "2527173", "language": "Python", "matching_score": 3.2962794303894043, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/vlan/nxos/tests/vlan_output.py" }, { "content": "# Python\nimport unittest\n\n# Ats\nfrom ats.topology import Device\nfrom unittest.mock import Mock\n\n# genie-libs\nfrom genie.libs.ops.vlan.nxos.vlan import Vlan\nfrom genie.libs.ops.vlan.nxos.tests.vlan_output import VlanOutput\n\nfrom genie.libs.parser.nxos.show_vlan import ShowVlan, \\\n ShowVlanIdVnSegment,\\\n ShowVlanInternalInfo, \\\n ShowVlanFilter, \\\n ShowVlanAccessMap\nfrom genie.libs.parser.nxos.show_feature import ShowFeature\nfrom genie.libs.parser.nxos.show_igmp import ShowIpIgmpSnooping\n\n\nfrom genie.libs.parser.nxos.show_interface import ShowIpInterfaceBriefPipeVlan\n\n\n\nclass test_new_vlan_all(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'nxos'\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n self.device.connectionmgr.connections['cli'] = '5'\n\n def test_new_vlan_full(self):\n f = Vlan(device=self.device)\n # Get 'show vlan' output\n f.maker.outputs[ShowVlan] = {'': VlanOutput.showVlan}\n f.maker.outputs[ShowFeature] = {'': VlanOutput.showFeature}\n f.maker.outputs[ShowIpIgmpSnooping] = {'': VlanOutput.showIgmp}\n f.maker.outputs[ShowVlanIdVnSegment] = {'': VlanOutput.showVlanIdVnSegment}\n self.device.execute = Mock()\n # Learn the feature\n f.learn()\n\n self.maxDiff = None\n self.assertEqual(f.info, VlanOutput.vlanOpsOutput)\n\n def test_new_vlan_selective_attribute(self):\n f = Vlan(device=self.device)\n # Get 'show vlan' output\n f.maker.outputs[ShowVlan] = {'': VlanOutput.showVlan}\n f.maker.outputs[ShowFeature] = {'': VlanOutput.showFeature}\n f.maker.outputs[ShowIpIgmpSnooping] = {'': VlanOutput.showIgmp}\n f.maker.outputs[ShowVlanIdVnSegment] = {'': VlanOutput.showVlanIdVnSegment}\n\n # Learn the feature\n f.learn()\n # Check match\n self.assertEqual('VLAN0002', f.info['vlans']['2']['name'])\n # Check does not match\n self.assertNotEqual(1, f.info['vlans']['2']['vlan_id'])\n\n\n def test_new_vlan_missing_attributes(self):\n f = Vlan(device=self.device)\n # Get 'show vlan' output\n f.maker.outputs[ShowVlan] = {'': VlanOutput.showVlan}\n f.maker.outputs[ShowFeature] = {'': VlanOutput.showFeature}\n f.maker.outputs[ShowIpIgmpSnooping] = {'': VlanOutput.showIgmp}\n f.maker.outputs[ShowVlanIdVnSegment] = {'': VlanOutput.showVlanIdVnSegment}\n # Learn the feature\n f.learn()\n\n with self.assertRaises(KeyError):\n interfaces = f.info['vn_segment_id']\n\n def test_new_vlan_empty_output(self):\n self.maxDiff = None\n f = Vlan(device=self.device)\n # Get outputs\n f.maker.outputs[ShowVlan] = {'': {}}\n f.maker.outputs[ShowFeature] = {'': {}}\n f.maker.outputs[ShowIpIgmpSnooping] = {'': {}}\n f.maker.outputs[ShowVlanIdVnSegment] = {'': {}}\n\n # Learn the feature\n f.learn()\n\n # Check no attribute not found\n with self.assertRaises(AttributeError):\n f.info['vlans']\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "5353713", "language": "Python", "matching_score": 2.367521286010742, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/vlan/nxos/tests/test_vlan.py" }, { "content": "# Python\nimport unittest\n\n# ATS\nfrom ats.topology import Device\n\n# Genie\nfrom genie.libs.ops.route_policy.iosxe.route_policy import RoutePolicy\nfrom genie.libs.ops.route_policy.iosxe.tests.route_policy_output import \\\n RoutePolicyOutput\n\n# iosxe show_route_map\nfrom genie.libs.parser.iosxe.show_route_map import ShowRouteMapAll\n\n\nclass test_route_policy(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'iosxe'\n self.device.custom['abstraction'] = {'order':['os']}\n self.device.mapping={}\n\n def test_sample(self):\n\n f = RoutePolicy(device=self.device)\n\n f.maker.outputs[ShowRouteMapAll] = \\\n {'':RoutePolicyOutput.showRouteMapAll}\n\n f.learn()\n\n self.assertEqual(f.info, RoutePolicyOutput.RoutePolicy['info'])\n\n def test_missing_attributes(self):\n f = RoutePolicy(device=self.device)\n\n f.maker.outputs[ShowRouteMapAll] = \\\n {'':RoutePolicyOutput.showRouteMapAll}\n\n f.learn()\n with self.assertRaises(KeyError):\n vlan_access_map_value=(f.info['test']['statements']\\\n ['20']['actions']['clause']['True'])\n\n def test_ignored(self):\n\n f = RoutePolicy(device=self.device)\n g = RoutePolicy(device=self.device)\n\n\n f.maker.outputs[ShowRouteMapAll] = \\\n {'':RoutePolicyOutput.showRouteMapAll}\n\n g.maker.outputs[ShowRouteMapAll] = \\\n {'':RoutePolicyOutput.showRouteMapAll}\n\n f.learn()\n g.learn()\n\n f.s = 2\n\n self.assertNotEqual(f,g)\n # Verify diff now\n diff = f.diff(g)\n sorted_diff = str(diff)\n sorted_result = ('+s: 2')\n self.assertEqual(sorted_diff,sorted_result)\n\n def test_selective_attribute(self):\n\n f = RoutePolicy(device=self.device, attributes=['info[(.*)][statements]'])\n\n f.maker.outputs[ShowRouteMapAll] = \\\n {'':RoutePolicyOutput.showRouteMapAll}\n\n f.learn()\n\n self.assertIn('10', f.info['test']['statements'])\n self.assertNotIn('20', f.info['test']['statements'])\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "1887395", "language": "Python", "matching_score": 2.3806917667388916, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/route_policy/iosxe/tests/test_route_policy.py" }, { "content": "'''\nRoute_policy Genie Ops Object for IOS - CLI.\n'''\nfrom ..iosxe.route_policy import RoutePolicy as RoutePolicyXE\n\nclass RoutePolicy(RoutePolicyXE):\n pass", "id": "3813881", "language": "Python", "matching_score": 1.5427242517471313, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/route_policy/ios/route_policy.py" }, { "content": "from .route_policy import *\n", "id": "12367843", "language": "Python", "matching_score": 0.3913397490978241, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/route_policy/__init__.py" }, { "content": "__all__ = (\n 'Redistribution',\n)\n\nfrom genie.decorator import managedattribute\n\nfrom .routing import Routing\n\n\ndef _defer_route_policy_type(value):\n # Avoid the cyclic dependency by deferring the route_policy type\n # transformations\n from genie.libs.conf.route_policy import RoutePolicy\n transforms = (\n None,\n managedattribute.test_isinstance(RoutePolicy))\n # Cache for speed\n Redistribution.route_policy = Redistribution.route_policy.copy(\n type=transforms)\n return managedattribute._transform(value, transforms)\n\n\nclass Redistribution(object):\n\n protocol = managedattribute(\n name='protocol',\n type=(\n managedattribute.test_in((\n 'connected',\n 'subscriber',\n )),\n managedattribute.test_isinstance(Routing)))\n\n metric = managedattribute(\n name='metric',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n route_policy = managedattribute(\n name='route_policy',\n default=None,\n type=_defer_route_policy_type)\n\n def __init__(self, protocol, **kwargs):\n\n if not kwargs and isinstance(protocol, Redistribution):\n # copy constructor\n kwargs = vars(protocol)\n else:\n self.protocol = protocol\n\n for k, v in kwargs.items():\n setattr(self, k, v)\n\n", "id": "9247702", "language": "Python", "matching_score": 1.8080205917358398, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/base/redistribution_attr.py" }, { "content": "\n__all__ = (\n 'RoutePolicy',\n)\n\nimport operator\nimport fnmatch\n\nfrom enum import Enum\n\nfrom ipaddress import IPv4Address, IPv6Address\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import DeviceFeature, Interface\nfrom genie.conf.base.attributes import SubAttributes, SubAttributesDict,\\\n AttributesInheriter, AttributesHelper, KeyedSubAttributes, \\\n DeviceSubAttributes\n\nfrom genie.libs.conf.base import ip_address, ip_network\nfrom genie.libs.conf.community_set import CommunitySet\n\n\ndef _identity(value):\n return value\n\n\nclass RoutePolicyAttributes(object):\n\n custom_config_cli = managedattribute(\n name='custom_config_cli',\n finit=str,\n type=managedattribute.test_istype(str))\n\n conditions = managedattribute(\n name='conditions',\n finit=list,\n # Cyclic dependency -- set later\n #type=managedattribute.test_list_of((\n # managedattribute.test_isinstance(RoutePolicyCondition),\n #)),\n )\n\n set_label_index = managedattribute(\n name='label_index',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='The \"set label-index\" option')\n\n set_community = managedattribute(\n name='set_community',\n default=None,\n type=(None, managedattribute.test_istype(CommunitySet),\n managedattribute.test_istype(list)),\n doc='The \"set community\" option')\n\n set_nexthop = managedattribute(\n name='nexthop',\n default=None,\n type=(None, ip_address),\n doc='The \"set next-hop\" option')\n\n pass_on = managedattribute(\n name='pass_on',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='The \"pass\" option: Pass this route for further processing')\n\n drop_on = managedattribute(\n name='drop_on',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='The \"drop\" option: Reject this route with no further processing')\n\n # ==== Statement section ===================\n policy_definition = managedattribute(\n name='policy_definition',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='The route-policy name')\n\n statement_name = managedattribute(\n name='statement_name',\n default=None,\n type=(None,\n managedattribute.test_istype(str),\n managedattribute.test_istype(int)),\n doc='The route-policy statement name')\n\n description = managedattribute(\n name='description',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n class ROUTE_DISPOSITION(Enum):\n permit = 'permit'\n deny = 'deny'\n\n route_disposition = managedattribute(\n name='route_disposition',\n default='permit',\n type=(None, ROUTE_DISPOSITION),\n doc='Route Disposition Enum value')\n\n match_med_eq = managedattribute(\n name='match_med_eq',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n match_nexthop_in = managedattribute(\n name='match_nexthop_in',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n match_nexthop_in_v6 = managedattribute(\n name='match_nexthop_in_v6',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n match_local_pref_eq = managedattribute(\n name='match_local_pref_eq',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n class MATCH_ROUTE_TYPE(Enum):\n internal = 'internal'\n external = 'external'\n\n match_route_type = managedattribute(\n name='match_route_type',\n default=None,\n type=(None, MATCH_ROUTE_TYPE))\n\n match_community_list = managedattribute(\n name='match_community_list',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n match_ext_community_list = managedattribute(\n name='match_ext_community_list',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # ==== XR Specific ===================\n\n class MATCH_ORIGIN_EQ(Enum):\n igp = 'igp'\n egp = 'egp'\n incomplete = 'incomplete'\n\n match_origin_eq = managedattribute(\n name='match_origin_eq',\n default=None,\n type=(None, MATCH_ORIGIN_EQ))\n\n class MATCH_EXT_COMMUNITY_LIST_TYPE(Enum):\n soo = 'soo'\n rt = 'rt'\n\n match_ext_community_list_type = managedattribute(\n name='match_ext_community_list_type ',\n default=None,\n type=(None, MATCH_EXT_COMMUNITY_LIST_TYPE))\n\n match_as_path_length = managedattribute(\n name='match_as_path_length',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n class MATCH_AS_PATH_LENGTH_OPER(Enum):\n eq = 'eq'\n ge = 'ge'\n le = 'le'\n\n match_as_path_length_oper = managedattribute(\n name='match_as_path_length_oper',\n default=None,\n type=(None, MATCH_AS_PATH_LENGTH_OPER))\n\n area_eq = managedattribute(\n name='area_eq',\n default=None,\n type=(None, managedattribute.test_istype(int), IPv4Address))\n\n class SET_EXT_COMMUNITY_DELETE_TYPE(Enum):\n soo = 'soo'\n rt = 'rt'\n\n set_ext_community_delete_type = managedattribute(\n name='set_ext_community_delete_type ',\n default=None,\n type=(None, SET_EXT_COMMUNITY_DELETE_TYPE))\n\n class ACTIONS(Enum):\n rppass = 'pass'\n done = 'done'\n drop = 'drop'\n\n actions = managedattribute(\n name='actions',\n default=None,\n type=(None, ACTIONS))\n\n # =======================\n\n match_as_path_list = managedattribute(\n name='match_as_path_list',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n class MATCH_LEVEL_EQ(Enum):\n level_1 = 'level-1'\n level_2 = 'level-2'\n level_1_2 = 'level-1-2'\n\n match_level_eq = managedattribute(\n name='match_level_eq',\n default=None,\n type=(None, MATCH_LEVEL_EQ))\n\n match_interface = managedattribute(\n name='match_interface',\n default=None,\n type=(None, managedattribute.test_istype(str), Interface))\n\n match_prefix_list = managedattribute(\n name='match_prefix_list',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n match_prefix_list_v6 = managedattribute(\n name='match_prefix_list_v6',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n match_tag_list = managedattribute(\n name='match_tag_list',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n class SET_ROUTE_ORIGIN(Enum):\n igp = 'igp'\n egp = 'egp'\n incomplete = 'incomplete'\n\n set_route_origin = managedattribute(\n name='set_route_origin',\n default=None,\n type=(None, SET_ROUTE_ORIGIN))\n\n set_local_pref = managedattribute(\n name='set_local_pref',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n set_next_hop = managedattribute(\n name='set_next_hop',\n default=None,\n type=(None, managedattribute.test_istype(str), IPv4Address))\n\n set_next_hop_v6 = managedattribute(\n name='set_next_hop_v6',\n default=None,\n type=(None, managedattribute.test_istype(str), IPv6Address))\n\n set_next_hop_self = managedattribute(\n name='set_next_hop_self',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n set_med = managedattribute(\n name='set_med',\n default=None,\n type=(None,\n managedattribute.test_istype(int),\n managedattribute.test_istype(str)))\n\n set_as_path_prepend = managedattribute(\n name='set_as_path_prepend',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n set_as_path_prepend_n = managedattribute(\n name='set_as_path_prepend_n',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n set_community_no_export = managedattribute(\n name='set_community_no_export',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n set_community_no_advertise = managedattribute(\n name='set_community_no_advertise',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n set_community_additive = managedattribute(\n name='set_community_additive',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n set_community_delete = managedattribute(\n name='set_community_delete',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n set_ext_community_rt = managedattribute(\n name='set_ext_community_rt',\n default=None,\n type=(None, managedattribute.test_istype(list)))\n\n set_ext_community_rt_additive = managedattribute(\n name='set_ext_community_rt_additive',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n set_ext_community_soo = managedattribute(\n name='set_ext_community_soo',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n set_ext_community_soo_additive = managedattribute(\n name='set_ext_community_soo_additive',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n set_ext_community_vpn = managedattribute(\n name='set_ext_community_vpn',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n set_ext_community_vpn_additive = managedattribute(\n name='set_ext_community_vpn_additive',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n set_ext_community_delete = managedattribute(\n name='set_ext_community_delete',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n class SET_LEVEL(Enum):\n level_1 = 'level-1'\n level_2 = 'level-2'\n level_1_2 = 'level-1-2'\n\n set_level = managedattribute(\n name='set_level',\n default=None,\n type=(None, SET_LEVEL))\n\n class SET_METRIC_TYPE(Enum):\n internal = 'internal'\n external = 'external'\n\n set_metric_type = managedattribute(\n name='set_metric_type',\n default=None,\n type=(None, SET_METRIC_TYPE))\n\n set_metric = managedattribute(\n name='set_metric',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n class SET_OSPF_METRIC_TYPE(Enum):\n type_1 = 'type-1'\n type_2 = 'type-2'\n\n set_ospf_metric_type = managedattribute(\n name='set_ospf_metric_type',\n default=None,\n type=(None, SET_OSPF_METRIC_TYPE))\n\n set_ospf_metric = managedattribute(\n name='set_ospf_metric',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n set_tag = managedattribute(\n name='set_tag',\n default=None,\n type=(None, managedattribute.test_istype(int), IPv4Address))\n\n set_weight = managedattribute(\n name='set_weight',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n def rpl_apply_attributes(self, obj, *, setattr=setattr, getattr=getattr):\n '''Apply RoutePolicyAttributes rules to an object.\n\n It is best to apply device-specific rules from a RoutePolicy instead::\n\n rpl.device_attr[device].rpl_apply_attributes(obj, ...)\n\n Returns:\n True: pass -- explicit\n False: drop\n None: undetermined\n '''\n implicit_pass_on = None\n if self.custom_config_cli:\n setattr(obj, 'custom_config_cli', self.custom_config_cli)\n implicit_pass_on = True\n for cond in self.conditions:\n if cond.rpl_test_condition(obj, getattr=getattr):\n sub_pass_on = cond.if_attr.rpl_apply_attributes(obj,\n setattr=setattr, getattr=getattr)\n if sub_pass_on is not None:\n if sub_pass_on:\n implicit_pass_on = True\n else:\n return False\n else:\n sub_pass_on = cond.else_attr.rpl_apply_attributes(obj,\n setattr=setattr, getattr=getattr)\n if sub_pass_on is not None:\n if sub_pass_on:\n implicit_pass_on = True\n else:\n return False\n if self.set_nexthop is not None:\n setattr(obj, 'nexthop', self.set_nexthop)\n implicit_pass_on = True\n if self.set_label_index is not None:\n setattr(obj, 'label_index', self.set_label_index)\n implicit_pass_on = True\n if self.set_community is not None:\n setattr(obj, 'community', self.set_community)\n implicit_pass_on = True\n if self.pass_on:\n assert not self.drop_on\n setattr(obj, 'pass', True)\n return True\n elif self.drop_on:\n setattr(obj, 'drop', True)\n return False\n else:\n return implicit_pass_on\n\n\nclass RoutePolicyCondition(object):\n\n def op_contains(a, vb):\n try:\n vb = ip_address(vb)\n except ValueError:\n pass\n else:\n # b-ip in (a-ip|networks...)\n return any(vb in ip_network(va) for va in a)\n try:\n vb = ip_network(vb)\n except ValueError:\n pass\n else:\n # b-net in (a-ip|networks...)\n return any(vb == ip_network(va) for va in a)\n return vb in a\n\n def op_matches_any(a, vb):\n if isinstance(a, CommunitySet):\n a = a.communities\n sb = str(vb)\n return any(\n fnmatch.fnmatchcase(sb, a)\n if isinstance(a , str)\n else vb == a)\n\n op = managedattribute(\n name='op',\n type=managedattribute.test_in((\n op_contains,\n op_matches_any,\n )))\n\n operands = managedattribute(\n name='operands',\n type=managedattribute.test_tuple_of(_identity))\n\n if_attr = managedattribute(\n name='if_attr',\n finit=RoutePolicyAttributes,\n type=managedattribute.test_istype(RoutePolicyAttributes))\n\n else_attr = managedattribute(\n name='else_attr',\n finit=RoutePolicyAttributes,\n type=managedattribute.test_istype(RoutePolicyAttributes))\n\n def __init__(self, op, *operands):\n self.op = op\n self.operands = operands\n super().__init__()\n\n def rpl_test_condition(self, obj, *, getattr=getattr):\n if self.op in (\n RoutePolicyCondition.op_contains,\n RoutePolicyCondition.op_matches_any,\n ):\n a, b = self.operands\n return self.op(a, getattr(obj, b))\n else:\n assert NotImplementedError(self.op)\n\n# Cyclic dependency -- set later\nRoutePolicyAttributes.conditions = RoutePolicyAttributes.conditions.copy(\n type=managedattribute.test_list_of((\n managedattribute.test_isinstance(RoutePolicyCondition),\n )),\n)\n\nclass RoutePolicyMixin(DeviceFeature):\n\n pass\n\n\nclass RoutePolicy(RoutePolicyAttributes, RoutePolicyMixin, DeviceFeature):\n\n Condition = RoutePolicyCondition\n\n name = managedattribute(\n name='name',\n type=managedattribute.test_istype(str))\n\n def rpl_apply_attributes(self, obj, **kwargs):\n '''Apply RoutePolicyAttributes rules to an object.\n\n It is best to apply device-specific rules using instead::\n\n rpl.device_attr[device].rpl_apply_attributes(obj, ...)\n\n Returns:\n True: pass -- implicit or explicit\n False: drop\n '''\n pass_on = super().rpl_apply_attributes(obj, **kwargs)\n return False if pass_on is None else pass_on\n\n custom_unconfig_cli = managedattribute(\n name='custom_unconfig_cli',\n finit=str,\n type=managedattribute.test_istype(str))\n\n class DeviceAttributes(DeviceSubAttributes):\n\n def rpl_apply_attributes(self, obj, **kwargs):\n '''Apply device-specific RoutePolicyAttributes rules to an object.\n\n Returns:\n True: pass -- implicit or explicit\n False: drop\n '''\n pass_on = RoutePolicyAttributes.rpl_apply_attributes(self, obj, **kwargs)\n return False if pass_on is None else pass_on\n\n class StatementAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.statement_name = key\n super().__init__(parent)\n\n statement_attr = managedattribute(\n name='statement_attr',\n read_only=True,\n doc=StatementAttributes.__doc__)\n\n @statement_attr.initter\n def statement_attr(self):\n return SubAttributesDict(self.StatementAttributes, parent=self)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n def __init__(self, name=None, policy_definition=None, *args, **kwargs):\n if name:\n self.name = name\n if policy_definition:\n self.policy_definition = policy_definition\n # Make sure at least one was populated:\n if not name and not policy_definition:\n raise TypeError(\"__init__() requires either 'name' or \"\n \"'policy_definition' to be provided\")\n if 'route_disposition' in kwargs:\n self.route_disposition = kwargs['route_disposition']\n super().__init__(*args, **kwargs)\n\n def build_config(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n cfgs = {key: value for key, value in cfgs.items() if value}\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n cfgs = {key: value for key, value in cfgs.items() if value}\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n", "id": "8559249", "language": "Python", "matching_score": 3.919853448867798, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/route_policy/route_policy.py" }, { "content": "# import python\nimport unittest\nimport unittest.mock\nfrom unittest.mock import Mock\n\n# import genie\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Interface\nfrom genie.libs.conf.route_policy import RoutePolicy\n\n\nclass test_route_policy(TestCase):\n\n def test_basic_cfg(self):\n\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe')\n\n rpl1 = RoutePolicy(policy_definition='rpl1',\n route_disposition='permit')\n dev1.add_feature(rpl1)\n\n rpl1.device_attr[dev1].statement_attr['10']\n rpl1.device_attr[dev1].statement_attr['10'].match_med_eq = 5\n rpl1.device_attr[dev1].statement_attr['10'].description = 'test'\n rpl1.device_attr[dev1].statement_attr['10'].match_nexthop_in = '10'\n rpl1.device_attr[dev1].statement_attr['10'].match_nexthop_in_v6 = '30'\n rpl1.device_attr[dev1].statement_attr['10'].match_local_pref_eq = 40\n rpl1.device_attr[dev1].statement_attr['10'].match_route_type = 'internal'\n rpl1.device_attr[dev1].statement_attr['10'].match_community_list = \\\n 'community-list1'\n rpl1.device_attr[dev1].statement_attr['10'].match_ext_community_list = \\\n 'community-list2'\n rpl1.device_attr[dev1].statement_attr['10'].match_as_path_list = \\\n '100'\n rpl1.device_attr[dev1].statement_attr['10'].match_level_eq = \\\n 'level-1-2'\n rpl1.device_attr[dev1].statement_attr['10'].match_interface = \\\n 'GigabitEthernet3/0/1'\n rpl1.device_attr[dev1].statement_attr['10'].match_prefix_list = \\\n 'prefixlist1'\n rpl1.device_attr[dev1].statement_attr['10'].match_prefix_list_v6 = \\\n 'prefixlist1v6'\n rpl1.device_attr[dev1].statement_attr['10'].match_tag_list = \\\n 'match_tag_list'\n rpl1.device_attr[dev1].statement_attr['10'].set_route_origin = \\\n 'igp'\n rpl1.device_attr[dev1].statement_attr['10'].set_local_pref = \\\n 100\n rpl1.device_attr[dev1].statement_attr['10'].set_next_hop = \\\n '1.1.1.1'\n rpl1.device_attr[dev1].statement_attr['10'].set_next_hop_v6 = \\\n '2001:db8:1::1'\n rpl1.device_attr[dev1].statement_attr['10'].set_next_hop_self = \\\n True\n rpl1.device_attr[dev1].statement_attr['10'].set_med = \\\n '100'\n rpl1.device_attr[dev1].statement_attr['10'].set_as_path_prepend = \\\n '100'\n rpl1.device_attr[dev1].statement_attr['10'].set_as_path_prepend_n = \\\n 3\n rpl1.device_attr[dev1].statement_attr['10'].set_community = \\\n ['100:100', '200:200']\n rpl1.device_attr[dev1].statement_attr['10'].set_community_no_export = \\\n True\n rpl1.device_attr[dev1].statement_attr['10'].set_community_no_advertise = \\\n True\n rpl1.device_attr[dev1].statement_attr['10'].set_community_additive = \\\n True\n rpl1.device_attr[dev1].statement_attr['10'].set_community_delete = \\\n 'communit_list'\n rpl1.device_attr[dev1].statement_attr['10'].set_ext_community_rt = \\\n ['100:10']\n rpl1.device_attr[dev1].statement_attr['10'].set_ext_community_rt_additive = \\\n True\n rpl1.device_attr[dev1].statement_attr['10'].set_ext_community_soo = \\\n '100:10'\n rpl1.device_attr[dev1].statement_attr['10'].set_ext_community_vpn = \\\n '100:10'\n rpl1.device_attr[dev1].statement_attr['10'].set_ext_community_delete = \\\n 'community_list'\n rpl1.device_attr[dev1].statement_attr['10'].set_level = \\\n 'level-1'\n rpl1.device_attr[dev1].statement_attr['10'].set_metric_type = \\\n 'internal'\n rpl1.device_attr[dev1].statement_attr['10'].set_metric = \\\n 30\n rpl1.device_attr[dev1].statement_attr['10'].set_ospf_metric_type = \\\n 'type-1'\n rpl1.device_attr[dev1].statement_attr['10'].set_ospf_metric = \\\n 200\n rpl1.device_attr[dev1].statement_attr['10'].set_tag = \\\n 111\n rpl1.device_attr[dev1].statement_attr['10'].set_weight = \\\n 100\n\n self.maxDiff = None\n cfgs = rpl1.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['route-map rpl1 permit 10',\n ' description test',\n ' match metric 5',\n ' match ip next-hop prefix-list 10',\n ' match ipv6 next-hop prefix-list 30',\n ' match local-preference 40',\n ' match route-type internal',\n ' match community community-list1',\n ' match extcommunity community-list2',\n ' match as-path 100',\n ' match route-type level-1',\n ' match route-type level-2',\n ' match interface GigabitEthernet3/0/1',\n ' match ip address prefix-list prefixlist1',\n ' match ipv6 address prefix-list prefixlist1v6',\n ' match tag list match_tag_list',\n ' set origin igp',\n ' set local-preference 100',\n ' set ip next-hop 1.1.1.1',\n ' set ipv6 next-hop 2001:db8:1::1',\n ' set ip next-hop self',\n ' set metric 100',\n \" set community 100:100 200:200 no-export no-advertise additive\",\n ' set comm-list communit_list delete',\n \" set extcommunity rt 100:10 additive\",\n ' set extcommunity soo 100:10',\n ' set extcommunity vpn-distinguisher 100:10',\n ' set extcomm-list community_list delete',\n ' set level level-1',\n ' set metric-type internal',\n ' set metric 30',\n ' set metric-type type-1',\n ' set metric 200',\n ' set tag 111',\n ' set weight 100',\n ' exit'\n ]))\n\n rpl2 = RoutePolicy(policy_definition='rpl2',\n route_disposition='deny')\n dev2.add_feature(rpl2)\n\n rpl2.device_attr[dev2].statement_attr['20']\n rpl2.device_attr[dev2].statement_attr['20'].set_metric_type = \\\n 'internal'\n\n cfgs = rpl2.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev2.name])\n self.assertMultiLineEqual(str(cfgs[dev2.name]), '\\n'.join(\n ['route-map rpl2 deny 20',\n ' set metric-type internal',\n ' exit'\n ]))\n\n uncfg1 = rpl1.build_unconfig(apply=False)\n self.assertCountEqual(uncfg1.keys(), [dev1.name])\n self.assertMultiLineEqual(\n str(uncfg1[dev1.name]),\n '\\n'.join([\n 'no route-map rpl1 permit 10'\n ]))\n\n partial_uncfg1 = rpl1.build_unconfig(\n apply=False,\n attributes={'device_attr':{'*':{'statement_attr':\\\n {'*':\"match_med_eq\"}}}})\n\n self.assertCountEqual(partial_uncfg1.keys(), [dev1.name])\n self.assertMultiLineEqual(\n str(partial_uncfg1[dev1.name]),\n '\\n'.join([\n 'route-map rpl1 permit 10',\n ' no match metric 5',\n ' exit'\n ]))\n\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "5612580", "language": "Python", "matching_score": 3.7857916355133057, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/route_policy/iosxe/tests/test_route_policy.py" }, { "content": "''' \nRoutePolicy Genie Ops Object Outputs for NXOS.\n'''\n\n\nclass RoutePolicyOutput(object):\n\n showRouteMap = {'BGPPeers': \n {'description': 'test15',\n 'statements':\n {'10': \n {'actions': \n {'clause': True,\n 'route_disposition': 'permit'},\n 'conditions': {'match_as_number_list': 'list1,list2'}\n }\n }\n },\n 'bgp-to-rib':\n {'statements': \n {'10': \n {'actions': \n {'clause': True,\n 'route_disposition': 'permit',\n 'set_distance': 10},\n 'conditions': \n {'match_community_list': '100'}\n }\n }\n },\n 'eigrp-distance': \n {'statements': \n {'10': \n {'actions': \n {'clause': True,\n 'route_disposition': 'permit',\n 'set_distance': 10},\n 'conditions': \n {'match_nexthop_in_v6': 'ipv6-nexthop'}\n }\n }\n },\n 'eigrp-filter': \n {'statements': \n {'10': \n {'actions': \n {'clause': True,\n 'route_disposition': 'permit'},\n 'conditions': \n {'match_nexthop_in': 'ipv4-nexthop'}\n }\n }\n },\n 'foo': \n {'statements': \n {'10': \n {'actions': \n {'clause': True,\n 'route_disposition': 'permit',\n 'set_distance': 150},\n 'conditions': \n {'match_route_type': 'inter-area'}\n },\n '20': \n {'actions': \n {'clause': True,\n 'route_disposition': 'permit',\n 'set_distance': 200},\n 'conditions': \n {'match_route_type': 'external'}\n }\n }\n },\n 'isis-distance': \n {'statements': \n {'10': \n {'actions': \n {'clause': True,\n 'route_disposition': 'permit',\n 'set_distance': 10},\n 'conditions': \n {'match_interface': 'Ethernet1/1'}\n }\n }\n },\n 'isis-filter': \n {'statements': \n {'10': \n {'actions': \n {'clause': True,\n 'route_disposition': 'permit'},\n 'conditions': \n {'match_interface': 'Ethernet1/1'}\n }\n }\n },\n 'metric-range': \n {'statements': \n {'10': \n {'actions': \n {'clause': True,\n 'route_disposition': 'permit',\n 'set_tag': 5},\n 'conditions': {'match_med_eq': 50}\n }\n }\n },\n 'pbr-sample': \n {'statements': \n {'10': \n {'actions': \n {'clause': True,\n 'route_disposition': 'permit',\n 'set_community': '100:1',\n 'set_community_additive': True,\n 'set_community_no_advertise': True,\n 'set_community_no_export': True,\n 'set_next_hop': '192.168.1.1'},\n 'conditions': {}\n }\n }\n },\n 'setrrnh': \n {'statements': \n {'10': \n {'actions': \n {'clause': True,\n 'route_disposition': 'permit',\n 'set_community': '100:1',\n 'set_community_additive': True,\n 'set_community_no_advertise': True,\n 'set_community_no_export': True},\n 'conditions': {}\n }\n }\n },\n 'setrrnhv6': \n {'statements': \n {'10': \n {'actions': \n {'clause': True,\n 'route_disposition': 'permit',\n 'set_community': '100:1',\n 'set_community_additive': True,\n 'set_community_no_advertise': True,\n 'set_community_no_export': True,\n 'set_ext_community_rt': '100:10',\n 'set_ext_community_rt_additive': True},\n 'conditions': {}\n }\n }\n },\n 'test': \n {'statements': \n {'10': \n {'actions': \n {'clause': True,\n 'route_disposition': 'permit',\n 'set_as_path_group': ['10',\n '10',\n '10'],\n 'set_as_path_prepend': '10',\n 'set_as_path_prepend_repeat_n': 3,\n 'set_community': '100:1',\n 'set_community_additive': True,\n 'set_community_delete': 'test',\n 'set_community_no_advertise': True,\n 'set_community_no_export': True,\n 'set_ext_community_delete': 'cisco',\n 'set_ext_community_rt': '100:10',\n 'set_level': 'level-1',\n 'set_local_pref': 20,\n 'set_med': 100,\n 'set_metric_type': 'external',\n 'set_next_hop': '10.64.4.4',\n 'set_next_hop_v6': '2001:db8:1::1',\n 'set_route_origin': 'igp',\n 'set_tag': 30,\n 'set_weight': 40},\n 'conditions': \n {'match_as_path_list': 'aspathlist1',\n 'match_community_list': 'test3 '\n 'ipv6 '\n 'next-hop',\n 'match_ext_community_list': 'testing',\n 'match_interface': 'Ethernet2/2',\n 'match_med_eq': 20,\n 'match_nexthop_in': 'test',\n 'match_nexthop_in_v6': 'test2',\n 'match_prefix_list': 'test-test',\n 'match_prefix_list_v6': 'test-test',\n 'match_route_type': 'level-1 '\n 'level-2'}\n }\n }\n }\n }\n\n RoutePolicy = {'info':\n {'BGPPeers':\n {'description': 'test15',\n 'statements':\n {'10': \n {'actions': \n {'route_disposition': 'permit'},\n 'conditions': {'match_as_number_list': 'list1,list2'}\n }\n }\n },\n 'bgp-to-rib': \n {'statements':\n {'10': \n {'actions': \n {'route_disposition': 'permit',\n 'set_distance': 10},\n 'conditions': \n {'match_community_list': '100'}\n }\n }\n },\n 'eigrp-distance': \n {'statements':\n {'10': \n {'actions': \n {'route_disposition': 'permit',\n 'set_distance': 10},\n 'conditions': \n {'match_nexthop_in_v6': 'ipv6-nexthop'}\n }\n }\n },\n 'eigrp-filter': \n {'statements':\n {'10': \n {'actions': \n {'route_disposition': 'permit'},\n 'conditions': \n {'match_nexthop_in': 'ipv4-nexthop'}\n }\n }\n },\n 'foo': \n {'statements':\n {'10': \n {'actions': \n {'route_disposition': 'permit',\n 'set_distance': 150},\n 'conditions': \n {'match_route_type': 'inter-area'}\n },\n '20': \n {'actions': \n {'route_disposition': 'permit',\n 'set_distance': 200},\n 'conditions': \n {'match_route_type': 'external'}\n }\n }\n },\n 'isis-distance': \n {'statements':\n {'10': \n {'actions': \n {'route_disposition': 'permit',\n 'set_distance': 10},\n 'conditions': \n {'match_interface': 'Ethernet1/1'}\n }\n }\n },\n 'isis-filter': \n {'statements':\n {'10': \n {'actions': \n {'route_disposition': 'permit'},\n 'conditions': \n {'match_interface': 'Ethernet1/1'}\n }\n }\n },\n 'metric-range': \n {'statements':\n {'10': \n {'actions': \n {'route_disposition': 'permit',\n 'set_tag': 5},\n 'conditions': {'match_med_eq': 50}\n }\n }\n },\n 'pbr-sample': \n {'statements':\n {'10': \n {'actions': \n {'route_disposition': 'permit',\n 'set_community': '100:1',\n 'set_community_additive': True,\n 'set_community_no_advertise': True,\n 'set_community_no_export': True,\n 'set_next_hop': '192.168.1.1'},\n 'conditions': {}\n }\n }\n },\n 'setrrnh': \n {'statements':\n {'10': \n {'actions': \n {'route_disposition': 'permit',\n 'set_community': '100:1',\n 'set_community_additive': True,\n 'set_community_no_advertise': True,\n 'set_community_no_export': True},\n 'conditions': {}\n }\n }\n },\n 'setrrnhv6': \n {'statements':\n {'10': \n {'actions': \n {'route_disposition': 'permit',\n 'set_community': '100:1',\n 'set_community_additive': True,\n 'set_community_no_advertise': True,\n 'set_community_no_export': True,\n 'set_ext_community_rt': '100:10',\n 'set_ext_community_rt_additive': True},\n 'conditions': {}\n }\n }\n },\n 'test': \n {'statements':\n {'10': \n {'actions': \n {'route_disposition': 'permit',\n 'set_as_path_group': ['10',\n '10',\n '10'],\n 'set_as_path_prepend': '10',\n 'set_as_path_prepend_repeat_n': 3,\n 'set_community': '100:1',\n 'set_community_additive': True,\n 'set_community_delete': 'test',\n 'set_community_no_advertise': True,\n 'set_community_no_export': True,\n 'set_ext_community_delete': 'cisco',\n 'set_ext_community_rt': '100:10',\n 'set_level': 'level-1',\n 'set_local_pref': 20,\n 'set_med': 100,\n 'set_metric_type': 'external',\n 'set_next_hop': '10.64.4.4',\n 'set_next_hop_v6': '2001:db8:1::1',\n 'set_tag': 30,\n 'set_weight': 40},\n 'conditions': \n {'match_as_path_list': 'aspathlist1',\n 'match_community_list': 'test3 '\n 'ipv6 '\n 'next-hop',\n 'match_ext_community_list': 'testing',\n 'match_interface': 'Ethernet2/2',\n 'match_med_eq': 20,\n 'match_nexthop_in': 'test',\n 'match_nexthop_in_v6': 'test2',\n 'match_prefix_list': 'test-test',\n 'match_prefix_list_v6': 'test-test',\n 'match_route_type': 'level-1 '\n 'level-2'}\n }\n }\n }\n }\n }\n", "id": "2317407", "language": "Python", "matching_score": 4.054409980773926, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/route_policy/nxos/tests/route_policy_output.py" }, { "content": "# import python\nfrom abc import ABC\n\n# import genie\nfrom genie.conf.base.attributes import AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\n# Structure Hierarchy:\n# +--Class RoutePolicy:\n# +--Class DeviceAttributes\n# +--Class StatementAttributes\n\n\nclass RoutePolicy(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n for sub, attributes2 in attributes.mapping_values(\n 'statement_attr', keys=self.statement_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig,\n **kwargs))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class StatementAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(\n attributes.format('route-map {policy_definition}'\n ' {route_disposition.value} {statement_name}',\n force=True)):\n if unconfig and attributes.iswildcard:\n # Never reached!\n configurations.submode_unconfig()\n\n # nxos: description <description>\n if attributes.value('description'):\n configurations.append_line(\n attributes.format('description {description}'))\n\n # nxos: match metric <match_med_eq>\n if attributes.value('match_med_eq'):\n configurations.append_line(\n attributes.format('match metric {match_med_eq}'))\n\n # nxos: match ip next-hop prefix-list <match_nexthop_in>\n if attributes.value('match_nexthop_in'):\n configurations.append_line(\n attributes.format('match ip next-hop prefix-list '\n '{match_nexthop_in}'))\n\n # nxos: match ipv6 next-hop prefix-list\n # <match_nexthop_in_v6>\n if attributes.value('match_nexthop_in_v6'):\n configurations.append_line(\n attributes.format('match ipv6 next-hop '\n 'prefix-list {match_nexthop_in_v6}'))\n\n # nxos: match route-type <match_route_type>\n if attributes.value('match_route_type'):\n configurations.append_line(\n attributes.format('match route-type '\n '{match_route_type.value}'))\n\n # nxos: match community <match_community_list>\n if attributes.value('match_community_list'):\n configurations.append_line(\n attributes.format('match community '\n '{match_community_list}'))\n\n # nxos: match extcommunity <match_ext_community_list>\n if attributes.value('match_ext_community_list'):\n configurations.append_line(\n attributes.format('match extcommunity '\n '{match_ext_community_list}'))\n\n # nxos: match as-path <match_as_path_list>\n if attributes.value('match_as_path_list'):\n configurations.append_line(\n attributes.format('match as-path '\n '{match_as_path_list}'))\n\n # nxos: match route-type <match_level_eq>\n if attributes.value('match_level_eq'):\n if 'level_1_2' in attributes.value('match_level_eq'):\n configurations.append_line(\n attributes.format('match route-type '\n 'level-1'))\n configurations.append_line(\n attributes.format('match route-type '\n 'level-2'))\n else:\n configurations.append_line(\n attributes.format('match route-type '\n '{match_level_eq.value}'))\n\n # nxos: match interface <match_interface>\n if attributes.value('match_interface'):\n configurations.append_line(\n attributes.format('match interface '\n '{match_interface}'))\n\n # nxos: match ip address prefix-list <match_prefix_list>\n if attributes.value('match_prefix_list'):\n configurations.append_line(\n attributes.format('match ip address prefix-list '\n '{match_prefix_list}'))\n\n # nxos: match ipv6 address prefix-list\n # <match_prefix_list_v6>\n if attributes.value('match_prefix_list_v6'):\n configurations.append_line(\n attributes.format('match ipv6 address prefix-list '\n '{match_prefix_list_v6}'))\n\n # nxos: set origin <set_route_origin>\n if attributes.value('set_route_origin'):\n configurations.append_line(\n attributes.format('set origin '\n '{set_route_origin.value}'))\n\n # nxos: set local-preference <set_local_pref>\n if attributes.value('set_local_pref'):\n configurations.append_line(\n attributes.format('set local-preference '\n '{set_local_pref}'))\n\n # nxos: set ip next-hop <set_next_hop>\n if attributes.value('set_next_hop') or \\\n attributes.value('set_nexthop'):\n configurations.append_line(\n attributes.format('set ip next-hop '\n '{set_next_hop}'))\n\n # nxos: set ipv6 next-hop <set_next_hop_v6>\n if attributes.value('set_next_hop_v6'):\n configurations.append_line(\n attributes.format('set ipv6 next-hop '\n '{set_next_hop_v6}'))\n\n # nxos: set metric <set_med>\n if attributes.value('set_med'):\n configurations.append_line(\n attributes.format('set metric '\n '{set_med}'))\n\n # nxos: set as-path prepend <set_as_path_prepend>\n # nxos: set as-path prepend <set_as_path_prepend>*\n # {set_as_path_prepend_repeat_n}\n if attributes.value('set_as_path_prepend'):\n if attributes.value('set_as_path_prepend_n'):\n configurations.append_line(\n attributes.format('set as-path prepend '\n '{set_as_path_prepend}* '\n '{set_as_path_prepend_repeat_n}'))\n else:\n configurations.append_line(\n attributes.format('set as-path prepend '\n '{set_as_path_prepend}'))\n\n # nxos: set community <set_community> [no-export]\n # [no-advertise] [additive]\n if attributes.value('set_community'):\n cfg = attributes.format('set community {set_community}',\n force=True)\n v1 = attributes.value('set_community_no_export')\n if v1 is not None:\n cfg += ' no-export'\n v2 = attributes.value('set_community_no_advertise')\n if v2 is not None:\n cfg += ' no-advertise'\n v3 = attributes.value('set_community_additive')\n if v3 is not None:\n cfg += ' additive'\n\n configurations.append_line(cfg)\n\n # nxos: set comm-list <set_community_delete> delete\n if attributes.value('set_community_delete'):\n configurations.append_line(\n attributes.format('set comm-list '\n '{set_community_delete} delete'))\n\n # nxos: set extcommunity rt <set_ext_community_rt>\n # [additive]\n if attributes.value('set_ext_community_rt'):\n cfg = 'set extcommunity rt {set_ext_community_rt}'\n if attributes.value(\n 'set_ext_community_rt_additive') == True:\n cfg += ' additive'\n\n configurations.append_line(attributes.format(cfg))\n\n # nxos: set extcomm-list <set_ext_community_delete> delete\n if attributes.value('set_ext_community_delete'):\n configurations.append_line(\n attributes.format('set extcomm-list '\n '{set_ext_community_delete} delete'))\n\n # nxos: set level <set_level>\n if attributes.value('set_level'):\n configurations.append_line(\n attributes.format('set level {set_level.value}'))\n\n # nxos: set metric-type <set_metric_type>\n if attributes.value('set_metric_type'):\n configurations.append_line(\n attributes.format('set metric-type '\n '{set_metric_type.value}'))\n\n # nxos: set metric <set_metric>\n if attributes.value('set_metric'):\n configurations.append_line(\n attributes.format('set metric '\n '{set_metric}'))\n\n # nxos: set metric-type <set_ospf_metric_type>\n if attributes.value('set_ospf_metric_type'):\n configurations.append_line(\n attributes.format('set metric-type '\n '{set_ospf_metric_type.value}'))\n\n # nxos: set metric <set_ospf_metric>\n if attributes.value('set_ospf_metric'):\n configurations.append_line(\n attributes.format('set metric '\n '{set_ospf_metric}'))\n\n # nxos: set tag <set_tag>\n if attributes.value('set_tag'):\n configurations.append_line(\n attributes.format('set tag '\n '{set_tag}'))\n\n # nxos: set weight <set_weight>\n if attributes.value('set_weight'):\n configurations.append_line(\n attributes.format('set weight '\n '{set_weight}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n", "id": "11648185", "language": "Python", "matching_score": 5.907039165496826, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/route_policy/nxos/route_policy.py" }, { "content": "\"\"\"Implement IOS-XR (iosxr) Specific Configurations for RoutePolicy objects.\n\"\"\"\n\n# Structure Hierarchy:\n# +--Class RoutePolicy:\n# +--Class DeviceAttributes\n# +--Class StatementAttributes\n\n# import python\nimport operator\nimport warnings\nfrom abc import ABC\n\n# import genie\nfrom genie.conf.base.attributes import AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\nfrom genie.libs.conf.community_set import CommunitySet\n\n# import route_policy\nfrom ..route_policy import RoutePolicyCondition\n\n\nclass UnsupportedSelectiveRoutePolicyConfig(UserWarning):\n '''Warning class for Unsupported Selective RoutePolicy Configuration.'''\n pass\n\n\ndef _build_condition_string(base, cond, attributes):\n\n if cond.op is RoutePolicyCondition.op_contains:\n a, b = cond.operands\n return '{} in ({})'.format(\n b,\n ', '.join('{}'.format(va) for va in a),\n )\n\n elif cond.op is RoutePolicyCondition.op_matches_any:\n a, b = cond.operands\n if isinstance(a, CommunitySet):\n return '{} matches-any {}'.format(\n b,\n a.name,\n )\n else:\n return '{} matches-any ({})'.format(\n b,\n ', '.join('{}'.format(va) for va in a),\n )\n\n else:\n raise NotImplementedError(cond.op)\n\n\ndef _build_condition_cli(base, cond, attributes):\n configurations = CliConfigBuilder()\n\n str = _build_condition_string(base, cond, attributes=attributes)\n assert str\n\n with configurations.submode_context('if {} then'.format(str), exit_cmd=None):\n if_attr, attributes2 = attributes.namespace('if_attr')\n configurations.append_block(_build_attributes_cli(base, if_attr, attributes=attributes2))\n if not configurations:\n configurations.append_line('done')\n\n with configurations.submode_context('else', cancel_empty=True, exit_cmd=None):\n else_attr, attributes2 = attributes.namespace('else_attr')\n configurations.append_block(_build_attributes_cli(base, else_attr, attributes=attributes2))\n\n configurations.append_line('endif')\n\n return configurations\n\n\ndef _build_attributes_cli(base, attrobj, attributes):\n configurations = CliConfigBuilder()\n\n for cond, attributes2 in attributes.sequence_values('conditions'):\n configurations.append_block(_build_condition_cli(base, cond, attributes=attributes2))\n\n configurations.append_block(\n attributes.format('{custom_config_cli}'))\n\n # iosxr: route-policy <rtepol> / set label-index ...\n configurations.append_line(attributes.format('set label-index {set_label_index}'))\n\n # iosxr: route-policy <rtepol> / set community ...\n v = attributes.value('set_community')\n if v is not None:\n if isinstance(v, CommunitySet):\n v = v.name\n configurations.append_line('set community {}'.format(v))\n\n # iosxr: route-policy <rtepol> / set next-hop ...\n configurations.append_line(attributes.format('set next-hop {set_nexthop}'))\n\n # iosxr: route-policy <rtepol> / pass\n if attributes.value('pass_on'):\n configurations.append_line('pass')\n\n # iosxr: route-policy <rtepol> / drop\n if attributes.value('drop_on'):\n configurations.append_line('drop')\n\n return configurations\n\n\nclass RoutePolicy(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n '''IOS-XR RoutePolicy configuration.\n\n Note:\n Selective configuration is not supported on IOS-XR; The whole\n route-policy is always removed and re-configured.\n '''\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n if not attributes.iswildcard:\n warnings.warn(UnsupportedSelectiveRoutePolicyConfig,\n 'IOS-XR does not support selective route-policy'\n ' configuration.')\n attributes = AttributesHelper(self)\n configurations = CliConfigBuilder()\n\n # First remove any existing to avoid CLI warnings\n if False:\n # Actually, a commit is needed to avoid the warning, so don't even bother!\n configurations.append_line(attributes.format(\n 'no route-policy {name}', force=True))\n\n # iosxr: route-policy <rtepol> (config-rpl)\n with configurations.submode_context(\n attributes.format('route-policy {name}', force=True),\n exit_cmd='end-policy'):\n\n configurations.append_block(_build_attributes_cli(self,\n self,\n attributes))\n\n # Initializing variables \n need_end_if = False\n need_elseif = False\n\n for sub, attributes2 in attributes.mapping_values(\n 'statement_attr', keys=self.statement_attr, sort=True):\n if not unconfig:\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig,\n need_elseif=need_elseif,\n exit_cmd=None,\n **kwargs))\n\n # Checking if 'if' or 'elseif' will be added to the\n # config\n if len(self.statement_attr) >1:\n need_elseif = True\n\n # Means configuartion contains if/else statmenets\n if sub.conditional_keys:\n need_end_if = True\n\n # Case of unconfig is handled seperately as in XR we can't\n # unconfig attributes. The whole route-policy need to be\n # unconfigured.\n else:\n configurations.append_block(\n sub.build_unconfig(apply=False,\n attributes=attributes2,\n unconfig=unconfig,\n need_elseif=need_elseif,\n exit_cmd=None,\n **kwargs))\n\n if self.policy_definition and not unconfig:\n # Case of if/else statements\n if need_end_if:\n configurations.append_line(attributes.format(' endif'))\n configurations.append_line(attributes.format(' end-policy'))\n configurations.append_line(attributes.format(' exit'))\n\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=False,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, unconfig=True,\n **kwargs):\n '''IOS-XR RoutePolicy unconfiguration.\n\n Note:\n Selective unconfiguration is not supported on IOS-XR; The whole\n route-policy is always removed.\n '''\n\n try:\n self.name\n # Sebastien's block of code\n # -------------------------\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n if not attributes.iswildcard:\n warnings.warn(UnsupportedSelectiveRoutePolicyConfig,\n 'IOS-XR does not support selective'\n ' route-policy unconfiguration.')\n attributes = AttributesHelper(self)\n configurations = CliConfigBuilder()\n\n configurations.append_line(attributes.format(\n 'no route-policy {name}', force=True))\n\n if apply:\n if configurations:\n self.device.configure(configurations,\n fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=True,\n cli_config=configurations,\n fail_invalid=True)\n except AttributeError:\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class StatementAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n need_elseif=False, exit_cmd=None, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n self.conditional_keys = {}\n\n def config_assembly(self, need_elseif=False,\n unconfig=False, indent_count=0, **kwargs):\n\n new_configurations = \\\n CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: if/elif med eq <match_med_eq>\n if attributes.value('match_med_eq'):\n self.conditional_keys['match_med_eq'] = \\\n attributes.format('med eq {match_med_eq}')\n\n # iosxr: if/elif origin is <match_origin_eq>\n if attributes.value('match_origin_eq'):\n self.conditional_keys['match_origin_eq'] = \\\n attributes.format('origin is '\n '{match_origin_eq.value}')\n\n # iosxr: if/elif nexthop in <match_nexthop_in>\n if attributes.value('match_nexthop_in'):\n self.conditional_keys['match_nexthop_in'] = \\\n attributes.format(\n 'next-hop in {match_nexthop_in}')\n\n # iosxr: if/elif nexthop in <match_nexthop_in_v6>\n if attributes.value('match_nexthop_in_v6'):\n self.conditional_keys['match_nexthop_in_v6'] = \\\n attributes.format('next-hop in '\n '{match_nexthop_in_v6}')\n\n # iosxr: if/elif local-preference eq\n # <match_local_pref_eq>\n if attributes.value('match_local_pref_eq'):\n self.conditional_keys['match_local_pref_eq'] = \\\n attributes.format('local-preference eq '\n '{match_local_pref_eq}')\n\n # iosxr: if/elif community matches-any\n # <match_community_list>\n if attributes.value('match_community_list'):\n self.conditional_keys['match_community_list'] = \\\n attributes.format('community matches-any '\n '{match_community_list}')\n\n # iosxr: if/elif extcommunity\n # <match_ext_community_list_type>\n # matches-any <match_ext_community_list>\n if attributes.value('match_ext_community_list') and \\\n attributes.value('match_ext_community_list_type'):\n self.conditional_keys['match_ext_community_list'] = \\\n attributes.format('extcommunity '\n '{match_ext_community_list_type.value} '\n 'matches-any {match_ext_community_list}')\n\n # iosxr: if/elif as-path in <match_as_path_list>\n if attributes.value('match_as_path_list'):\n self.conditional_keys['match_as_path_list'] = \\\n attributes.format('as-path in '\n '{match_as_path_list}')\n\n # iosxr: if/elif as-path length\n # <match_as_path_length_oper> <match_as_path_length>\n if attributes.value('match_as_path_length_oper') and \\\n attributes.value('match_as_path_length'):\n self.conditional_keys['match_as_path_length'] = \\\n attributes.format('as-path length '\n '{match_as_path_length_oper.value} '\n '{match_as_path_length}')\n\n # iosxr: if/elif route-type is <match_level_eq>\n if attributes.value('match_level_eq'):\n if 'level_1_2' in attributes.value(\n 'match_level_eq'):\n self.conditional_keys['match_level_eq'] = \\\n attributes.format(\n 'route-type is interarea')\n else:\n self.conditional_keys['match_level_eq'] = \\\n attributes.format('route-type is '\n '{match_level_eq.value}')\n\n # iosxr: if/elif ospf-area is <area_eq>\n if attributes.value('area_eq'):\n self.conditional_keys['area_eq'] = \\\n attributes.format('ospf-area is '\n '{area_eq}')\n\n # iosxr: if/elif destination in <match_prefix_list>\n if attributes.value('match_prefix_list'):\n self.conditional_keys['match_prefix_list'] = \\\n attributes.format('destination in '\n '{match_prefix_list}')\n\n # iosxr: if/elif destination in <match_prefix_list_v6>\n if attributes.value('match_prefix_list_v6'):\n self.conditional_keys['match_prefix_list_v6'] = \\\n attributes.format('destination in '\n '{match_prefix_list_v6}')\n\n # iosxr: if/elif tag in <match_tag_list>\n if attributes.value('match_tag_list'):\n self.conditional_keys['match_tag_list'] = \\\n attributes.format('tag in '\n '{match_tag_list}')\n\n ## Here we construct the config line\n if self.conditional_keys:\n if need_elseif:\n conditional_cfg = attributes.format(' elseif',\n force=True)\n else:\n conditional_cfg = attributes.format('if',\n force=True)\n for index, key in enumerate(\n sorted(self.conditional_keys.keys())):\n conditional_cfg += ' {}'.format(\n self.conditional_keys[key])\n if index < (len(self.conditional_keys)-1):\n conditional_cfg += ' and'\n else:\n conditional_cfg += ' then'\n\n new_configurations.append_line(conditional_cfg)\n else:\n # No spaces should preceed the configuration\n # if there is no if/else statements\n indent_count = 0\n\n # Counting the spaces to preceed the configuration\n # as per the if/else state.\n spaces = ' ' * indent_count\n\n # iosxr: # <statement_name>\n if attributes.value('statement_name'):\n new_configurations.append_line(\n attributes.format(spaces+'# {statement_name}'))\n\n # iosxr: # <description>\n if attributes.value('description'):\n new_configurations.append_line(\n attributes.format(spaces+'# {description}'))\n\n # iosxr: set origin <set_route_origin>\n if attributes.value('set_route_origin'):\n new_configurations.append_line(\n attributes.format(spaces+'set origin '\n '{set_route_origin}'))\n\n # iosxr: set local-preference <set_local_pref>\n if attributes.value('set_local_pref'):\n new_configurations.append_line(\n attributes.format(\n spaces+'set local-preference '\n '{set_local_pref}'))\n\n # iosxr: set next-hop <set_next_hop>\n if attributes.value('set_next_hop') or \\\n attributes.value('set_nexthop'):\n new_configurations.append_line(\n attributes.format(spaces+'set next-hop '\n '{set_next_hop}'))\n\n # iosxr: set next-hop <set_next_hop_v6>\n if attributes.value('set_next_hop_v6'):\n new_configurations.append_line(\n attributes.format(spaces+'set next-hop '\n '{set_next_hop_v6}'))\n\n # iosxr: set next-hop self\n if attributes.value('set_next_hop_self'):\n new_configurations.append_line(\n attributes.format(spaces+'set next-hop self'))\n\n # iosxr: set med <set_med>\n if attributes.value('set_med'):\n new_configurations.append_line(\n attributes.format(spaces+'set med {set_med}'))\n\n # iosxr: prepend as-path <set_as_path_prepend>\n # <set_as_path_prepend_repeat_n>\n if attributes.value('set_as_path_prepend') and \\\n attributes.value('set_as_path_prepend_repeat_n'):\n new_configurations.append_line(\n attributes.format(spaces+'prepend as-path '\n '{set_as_path_prepend} '\n '{set_as_path_prepend_repeat_n}'))\n\n # iosxr: set community (<set_community>, no-export,\n # no-advertise) additive\n if attributes.value('set_community'):\n cfg = attributes.format(spaces+'set community '\n '({set_community}', force=True)\n v1 = attributes.value('set_community_no_export')\n if v1 is not None:\n cfg += ' ,no-export'\n v2 = attributes.value('set_community_no_advertise')\n if v2 is not None:\n cfg += ' ,no-advertise'\n cfg += ')'\n v3 = attributes.value('set_community_additive')\n if v3 is not None:\n cfg += ' additive'\n\n new_configurations.append_line(cfg)\n\n # iosxr: delete community in <set_community_delete>\n if attributes.value('set_community_delete'):\n new_configurations.append_line(\n attributes.format(spaces+'delete community in '\n '{set_community_delete}'))\n\n # iosxr: set extcommunity rt (<set_ext_community_rt>)\n # [additive]\n if attributes.value('set_ext_community_rt'):\n if attributes.value(\n 'set_ext_community_rt_additive'):\n new_configurations.append_line(\n attributes.format(\n spaces+'set extcommunity rt '\n '({set_ext_community_rt}) additive'))\n else:\n new_configurations.append_line(\n attributes.format(\n spaces+'set extcommunity rt '\n '({set_ext_community_rt})'))\n\n # iosxr: set extcommunity soo (<set_ext_community_soo>)\n # [additive]\n if attributes.value('set_ext_community_soo'):\n if attributes.value(\n 'set_ext_community_soo_additive'):\n new_configurations.append_line(\n attributes.format(\n spaces+'set extcommunity soo '\n '({set_ext_community_soo}) additive'))\n else:\n new_configurations.append_line(\n attributes.format(\n spaces+'set extcommunity soo '\n '({set_ext_community_soo})'))\n\n # iosxr: set extcommunity vpn (<set_ext_community_vpn>)\n # [additive]\n if attributes.value('set_ext_community_vpn'):\n if attributes.value(\n 'set_ext_community_vpn_additive'):\n new_configurations.append_line(\n attributes.format(\n spaces+'set extcommunity vpn '\n '({set_ext_community_vpn}) additive'))\n else:\n new_configurations.append_line(\n attributes.format(\n spaces+'set extcommunity vpn '\n '({set_ext_community_vpn})'))\n\n # iosxr: delete extcommunity\n # <set_ext_community_delete_type>\n # <set_ext_community_delete>\n if attributes.value('set_community_delete') and \\\n attributes.value('set_ext_community_delete_type'):\n new_configurations.append_line(\n attributes.format(spaces+'delete extcommunity '\n '{set_ext_community_delete_type.value} '\n '{set_community_delete}'))\n\n # iosxr: set level <set_level>\n if attributes.value('set_level'):\n new_configurations.append_line(\n attributes.format(spaces+'set level '\n '{set_level}'))\n\n # iosxr: set metric-type <set_metric_type>\n if attributes.value('set_metric_type'):\n new_configurations.append_line(\n attributes.format(spaces+'set metric-type '\n '{set_metric_type}'))\n\n # iosxr: set isis-metric <set_metric>\n if attributes.value('set_metric'):\n new_configurations.append_line(\n attributes.format(spaces+'set isis-metric '\n '{set_metric}'))\n\n # iosxr: set metric-type <set_ospf_metric_type>\n if attributes.value('set_ospf_metric_type'):\n new_configurations.append_line(\n attributes.format(spaces+'set metric-type '\n '{set_ospf_metric_type}'))\n\n # iosxr: set ospf-metric <set_ospf_metric>\n if attributes.value('set_ospf_metric'):\n new_configurations.append_line(\n attributes.format(spaces+'set ospf-metric '\n '{set_ospf_metric}'))\n\n # iosxr: set tag <set_tag>\n if attributes.value('set_tag'):\n new_configurations.append_line(\n attributes.format(spaces+'set tag '\n '{set_tag}'))\n\n # iosxr: set weight <set_weight>\n if attributes.value('set_weight'):\n new_configurations.append_line(\n attributes.format(spaces+'set weight '\n '{set_weight}'))\n\n # iosxr: pass|done|drop\n if attributes.value('actions'):\n new_configurations.append_line(\n attributes.format(spaces+'{actions.value}'))\n\n return str(new_configurations)\n\n if need_elseif:\n configurations.append_block(config_assembly(self,\n need_elseif=need_elseif, unconfig=unconfig,\n indent_count=2))\n else:\n with configurations.submode_context(\n attributes.format('route-policy {policy_definition}',\n force=True), exit_cmd=None):\n if unconfig and attributes.iswildcard:\n # Never reached!\n configurations.submode_unconfig()\n\n configurations.append_block(config_assembly(self,\n need_elseif=need_elseif, unconfig=unconfig,\n indent_count=1))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n\n configurations = CliConfigBuilder()\n configurations.append_line(attributes.format(\n 'no route-policy {policy_definition}', force=True))\n\n return str(configurations)\n\n", "id": "1122903", "language": "Python", "matching_score": 5.329113483428955, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/route_policy/iosxr/route_policy.py" }, { "content": "\"\"\"Implement IOS-XR (iosxr) Specific Configurations for CommunitySet objects.\n\"\"\"\n\n# Table of contents:\n# class CommunitySet:\n# class DeviceAttributes:\n# def build_config/build_unconfig:\n\nfrom abc import ABC\nimport operator\nimport warnings\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\n\n\nclass UnsupportedSelectiveCommunitySetConfig(UserWarning):\n '''Warning class for Unsupported Selective CommunitySet Configuration.'''\n pass\n\n\nclass CommunitySet(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, **kwargs):\n '''IOS-XR CommunitySet configuration.\n\n Note:\n Selective configuration is not supported on IOS-XR; The whole\n community-set is always removed and re-configured.\n '''\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n if not attributes.iswildcard:\n warnings.warn(UnsupportedSelectiveCommunitySetConfig,\n 'IOS-XR does not support selective community-set'\n ' configuration.')\n attributes = AttributesHelper(self)\n configurations = CliConfigBuilder()\n\n # First remove any existing to avoid CLI warnings\n if False:\n # Actually, a commit is needed to avoid the warning, so don't even bother!\n configurations.append_line(attributes.format(\n 'no community-set {name}', force=True))\n\n # iosxr: community-set <cs> (config-comm)\n with configurations.submode_context(\n attributes.format('community-set {name}', force=True),\n exit_cmd='end-set'):\n\n for icomm, community in enumerate(self.communities):\n last = icomm == (len(self.communities) - 1)\n # * Wildcard (any community or part thereof) \n # <0-65535> 16-bit half-community number \n # [ Left bracket to begin range \n # accept-own Accept-Own (BGP well-known community) \n # dfa-regex DFA style regular expression \n # graceful-shutdown Graceful Shutdown (BGP well-known community) \n # internet Internet (BGP well-known community) \n # ios-regex Traditional IOS style regular expression \n # local-AS Do not send outside local AS (BGP well-known community) \n # no-advertise Do not advertise to any peer (BGP well-known community) \n # no-export Do not export to next AS (BGP well-known community) \n # private-as Match within BGP private AS range [64512..65534] \n configurations.append_line('{}{}'.format(\n community,\n '' if last else ','))\n\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n '''IOS-XR CommunitySet unconfiguration.\n\n Note:\n Selective unconfiguration is not supported on IOS-XR; The whole\n community-set is always removed.\n '''\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n if not attributes.iswildcard:\n warnings.warn(UnsupportedSelectiveCommunitySetConfig,\n 'IOS-XR does not support selective community-set'\n ' unconfiguration.')\n attributes = AttributesHelper(self)\n configurations = CliConfigBuilder()\n\n configurations.append_line(attributes.format(\n 'no community-set {name}', force=True))\n\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n return str(configurations)\n\n\n", "id": "7856728", "language": "Python", "matching_score": 2.93835186958313, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/community_set/iosxr/community_set.py" }, { "content": "\nfrom abc import ABC\nimport warnings\nimport contextlib\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\n\nclass Vni(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n contained=False, **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: interface nve1 (config-nve)\n submode_stack = contextlib.ExitStack()\n if not contained and self.nve:\n submode_stack.enter_context(\n self.nve._build_config_create_interface_submode_context(configurations))\n\n # iosxr: interface nve1 / member vni 1 (config-nve-vni)\n # iosxr: interface nve1 / member vni 1-2 (config-nve-vni)\n with configurations.submode_context(attributes.format('member vni {vni_id}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: interface nve1 / member vni 1 / host-reachability protocol bgp\n configurations.append_line(attributes.format('host-reachability protocol {host_reachability_protocol}'))\n\n # iosxr: interface nve1 / member vni 1 / mcast-group 1.2.3.4\n # iosxr: interface nve1 / member vni 1 / mcast-group 1.2.3.4 1.2.3.4\n configurations.append_line(attributes.format('mcast-group {mcast_group}'))\n\n # iosxr: interface nve1 / member vni 1 / vrf someword\n configurations.append_line(attributes.format('vrf {vrf.name}'))\n\n # iosxr: interface nve1 / member vni 1 / load-balance ...\n configurations.append_line(attributes.format('load-balance {load_balance}'))\n\n submode_stack.close()\n if apply:\n if configurations:\n self.device.configure(str(configurations), fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n", "id": "6545784", "language": "Python", "matching_score": 3.3593008518218994, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/evpn/iosxr/vni.py" }, { "content": "\nfrom abc import ABC\nimport warnings\nimport contextlib\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\nfrom genie.libs.conf.l2vpn.pseudowire import PseudowireNeighbor,\\\n PseudowireIPv4Neighbor\n\n\nclass Vfi(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxe: l2vpn vfi context {name}\n with configurations.submode_context(attributes.format('l2vpn vfi context {name}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: l2vpn vfi context {name} / vpn id {vpn_id}\n assert self.vpn_id is not None\n configurations.append_line(attributes.format('vpn id {vpn_id}'))\n\n sub, attributes2 = attributes.namespace('autodiscovery_bgp')\n if sub is not None:\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2,\n unconfig=unconfig))\n\n for sub, attributes2 in attributes.mapping_values('neighbor_attr', keys=self.pseudowire_neighbors, sort=True):\n configurations.append_block(\n str(sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig)))\n\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class AutodiscoveryBgpAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if attributes.value('enabled'):\n\n sub, attributes2 = attributes.namespace('signaling_protocol_ldp')\n if sub is not None:\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n sub, attributes2 = attributes.namespace('signaling_protocol_bgp')\n if sub is not None:\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class SignalingProtocolBgpAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context('autodiscovery bgp signaling bgp'):\n if not attributes.value('enabled', force=True):\n configurations.submode_cancel()\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class SignalingProtocolLdpAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context('autodiscovery bgp signaling ldp'):\n if not attributes.value('enabled', force=True):\n configurations.submode_cancel()\n\n configurations.append_line(attributes.format('vpls-id {vpls_id}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class MulticastP2mpAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class TransportRsvpTeAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class NeighborAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if isinstance(self.neighbor, PseudowireIPv4Neighbor):\n assert self.ip is not None\n if attributes.value('pw_id'):\n configurations.append_line(attributes.format('member {ip} {pw_id} encapsulation mpls',force_ip = True))\n else:\n configurations.append_line(attributes.format('member {ip} encapsulation mpls',force_ip = True))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n", "id": "12746612", "language": "Python", "matching_score": 3.897372245788574, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/iosxe/vfi.py" }, { "content": "\"\"\"\nImplement NXOS Specific Configurations for Nd objects.\n\"\"\"\n\n# Table of contents:\n# class Nd:\n# class DeviceAttributes:\n# class InterfaceAttributes:\n# class NeighborAttributes:\n\n# Python\nfrom abc import ABC\n# Genie package\nfrom genie.conf.base.attributes import AttributesHelper\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\n\nclass Nd(ABC):\n\n class DeviceAttributes(ABC):\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # InterfaceAttributes\n with configurations.submode_context(attributes.format(\n 'interface {interface}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n for sub, attributes2 in attributes.mapping_values('interface_attr',\n sort=True,\n keys=self.interface_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if attributes.value('if_ra_interval'):\n configurations.append_line(\n attributes.format('ipv6 nd ra-interval {if_ra_interval}'))\n if attributes.value('if_ra_lifetime'):\n configurations.append_line(\n attributes.format('ipv6 nd ra-lifetime {if_ra_lifetime}'))\n if attributes.value('if_ra_suppress') == True:\n configurations.append_line(\n attributes.format('ipv6 nd suppress-ra'))\n\n # NeighborAttributes\n for sub, attributes2 in attributes.mapping_values('neighbor_attr',\n sort=True,\n keys=self.neighbor_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class NeighborAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if attributes.value('ip') and attributes.value('link_layer_address'):\n configurations.append_line(attributes.format('ipv6 neighbor {ip} {link_layer_address}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n", "id": "2201441", "language": "Python", "matching_score": 2.958789587020874, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/nd/nxos/nd.py" }, { "content": "\"\"\"Implement IOSXR (iosxr) Specific Configurations for Vlan objects.\n\"\"\"\n\n# Table of contents:\n# class Vlan:\n# class DeviceAttributes:\n# class AccessMapAttributes:\n# class VlanConfigurationAttributes:\n# class InterfaceAttributes:\n\n# Python\nfrom abc import ABC\n\n# Genie package\nfrom genie.conf.base.attributes import AttributesHelper\nfrom genie.conf.base import Interface\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\n\n\nclass Vlan(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n for sub, attributes2 in attributes.mapping_values(\n 'access_map_attr',\n keys=self.access_map_attr):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2,\n unconfig=unconfig, **kwargs))\n\n for sub, attributes2 in attributes.mapping_values(\n 'vlan_configuration_attr',\n keys=self.vlan_configuration_attr):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2,\n unconfig=unconfig, **kwargs))\n\n for sub, attributes2 in attributes.mapping_values(\n 'interface_attr',\n keys=self.interface_attr.keys()):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2,\n unconfig=unconfig, **kwargs))\n\n if apply:\n if configurations:\n self.device.configure(str(configurations),\n fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class AccessMapAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class VlanConfigurationAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n\n if attributes.value('eth_encap_type1'):\n self.interface.eth_encap_type1 = \\\n attributes.value('eth_encap_type1')\n\n if attributes.value('eth_encap_val1'):\n self.interface.eth_encap_val1 = \\\n attributes.value('eth_encap_val1')\n\n if attributes.value('eth_encap_type2'):\n self.interface.eth_encap_type2 = \\\n attributes.value('eth_encap_type2')\n\n if attributes.value('eth_encap_val2'):\n self.interface.eth_encap_val2 = \\\n attributes.value('eth_encap_val2')\n\n if unconfig:\n if attributes.attributes is not None:\n configurations = \\\n self.interface.build_unconfig(\n apply=False,\n attributes=attributes.attributes)\n else:\n configurations = \\\n self.interface.build_unconfig(\n apply=False,\n attributes={'eth_encap_type1': None,\n 'eth_encap_val1': None,\n 'eth_encap_type2': None,\n 'eth_encap_val2': None})\n else:\n # self.interface.build_config always calls cliconfig method\n # A fix is needed in Genie infrastructure to fix that\n # context abstraction issue.\n configurations = self.interface.build_config(apply=False)\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n", "id": "8425961", "language": "Python", "matching_score": 2.6620326042175293, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/vlan/iosxr/vlan.py" }, { "content": "\"\"\"\nImplement IOSXE (iosxe) Specific Configurations for Static Route objects.\n\"\"\"\n\n# Table of contents:\n# class StaticRouting:\n# class DeviceAttributes:\n# class VrfAttributes:\n# class AddressFamilyAttributes:\n# class RouteAttributes:\n# class InterfaceAttributes:\n# class NextHopAttributes:\n\n# Python\nfrom abc import ABC\nfrom netaddr import IPNetwork\n# Genie package\nfrom genie.conf.base.attributes import AttributesHelper\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\n\nclass StaticRouting(ABC):\n\n class DeviceAttributes(ABC):\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # VrfAttributes\n for sub, attributes2 in attributes.mapping_values('vrf_attr',\n sort=True,\n keys=self.vrf_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class VrfAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # AddressFamilyAttributes\n for sub, attributes2 in attributes.mapping_values('address_family_attr',\n sort=True,\n keys=self.address_family_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class AddressFamilyAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n self.vrf = self.parent.vrf\n\n for sub, attributes2 in attributes.mapping_values('route_attr',\n sort=True,\n keys=self.route_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n # RouteAttributes\n class RouteAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n self.vrf = self.parent.vrf\n self.af = self.parent.af\n\n # InterfaceAttributes\n for sub, attributes2 in attributes.mapping_values('interface_attr',\n sort=True,\n keys=self.interface_attr):\n\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n # NextHopAttributes\n for sub, attributes2 in attributes.mapping_values('next_hop_attr',\n sort=True,\n keys=self.next_hop_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n self.vrf = self.parent.vrf\n self.af = self.parent.af\n self.route = self.parent.route\n\n if attributes.value('af'):\n af = 'ip' if attributes.value('af').value == 'ipv4' else 'ipv6'\n join_all = \"{} route\".format(af)\n\n if attributes.value('vrf') and 'default' not in attributes.value('vrf'):\n join_all += \" vrf {}\".format(attributes.value('vrf'))\n\n if attributes.value('route'):\n if 'ipv6' in attributes.value('af').value:\n join_all += \" {}\".format(attributes.value('route'))\n else:\n if '/' in attributes.value('route'):\n network_netmask = IPNetwork(attributes.value('route'))\n network = str(network_netmask.network)\n netmask = str(network_netmask.netmask)\n join_all += \" {} {}\".format(network,netmask)\n else:\n join_all += \" {}\".format(attributes.value('route'))\n\n if attributes.value('interface'):\n join_all += \" {}\".format(attributes.value('interface'))\n\n if attributes.value('if_nexthop'):\n join_all += ' {}'.format(attributes.value('if_nexthop'))\n\n if attributes.value('if_preference'):\n join_all += ' {}'.format(attributes.value('if_preference'))\n\n if attributes.value('if_tag'):\n join_all += \" tag {}\".format(attributes.value('if_tag'))\n\n if attributes.value('if_track'):\n join_all += \" track {}\".format(attributes.value('if_track'))\n\n configurations.append_line(join_all)\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class NextHopAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n self.vrf = self.parent.vrf\n self.af = self.parent.af\n self.route = self.parent.route\n\n if attributes.value('af'):\n af = 'ip' if attributes.value('af').value == 'ipv4' else 'ipv6'\n join_all = \"{} route\".format(af)\n\n if attributes.value('vrf') and 'default' not in attributes.value('vrf'):\n join_all += \" vrf {}\".format(attributes.value('vrf'))\n\n if attributes.value('route'):\n if 'ipv6' in attributes.value('af').value:\n join_all += \" {}\".format(attributes.value('route'))\n else:\n if '/' in attributes.value('route'):\n network_netmask = IPNetwork(attributes.value('route'))\n network = str(network_netmask.network)\n netmask = str(network_netmask.netmask)\n join_all += \" {} {}\".format(network,netmask)\n else:\n join_all += \" {}\".format(attributes.value('route'))\n\n if attributes.value('nexthop'):\n join_all += ' {}'.format(attributes.value('nexthop'))\n\n if attributes.value('preference'):\n join_all += ' {}'.format(attributes.value('preference'))\n\n if attributes.value('tag'):\n join_all += \" tag {}\".format(attributes.value('tag'))\n\n if attributes.value('track'):\n join_all += \" track {}\".format(attributes.value('track'))\n\n configurations.append_line(join_all)\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n", "id": "6884722", "language": "Python", "matching_score": 3.253472328186035, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/static_routing/iosxe/static_routing.py" }, { "content": "\"\"\"\nImplement IOSXE (iosxe) Specific Configurations for Route objects.\n\"\"\"\n\n# Routing Heirarchy\n# -----------------\n# Routing\n# +- DeviceAttributes\n\n# Python\nfrom abc import ABC\n\n# import genie\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\n\n\nclass Routing(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n cfg_line = []\n unconfig_line = []\n\n # enabled\n if attributes.value('enabled'):\n if unconfig is False:\n configurations.append_line(attributes.format('ip routing'))\n configurations.append_line(attributes.format('ipv6 unicast routing'))\n\n # Make sure that only enabled was provided in attributes\n # If wildcard, then delete everything\n elif unconfig is True and\\\n attributes.attributes == {'enabled': {True: None}} or \\\n attributes.iswildcard:\n configurations.append_line('no ip routing', raw=True)\n configurations.append_line('no ipv6 unicast routing', raw=True)\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n # enabled_ip_routing\n elif attributes.value('enabled_ip_routing'):\n cfg_line.append('ip routing')\n unconfig_line.append('no ip routing')\n \n # enabled_ipv6_unicast_routing\n elif attributes.value('enabled_ipv6_unicast_routing'):\n cfg_line.append('ipv6 unicast routing')\n unconfig_line.append('no ipv6 unicast routing')\n\n if cfg_line:\n if unconfig is False:\n configurations.append_line('\\n'.join(cfg_line))\n elif unconfig is True:\n configurations.append_line('\\n'.join(unconfig_line), raw=True)\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n", "id": "11074256", "language": "Python", "matching_score": 1.8404982089996338, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/routing/iosxe/routing.py" }, { "content": "'''\r\nIOSXE specific configurations for IPv6Addr feature object.\r\n'''\r\n\r\n# Python\r\nfrom abc import ABC\r\n\r\n# Genie\r\nfrom genie.conf.base.cli import CliConfigBuilder\r\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\r\n AttributesHelper\r\n\r\n\r\nclass IPv6Addr(ABC):\r\n\r\n def build_config(self, apply=True, attributes=None, unconfig=False,\r\n **kwargs):\r\n assert not kwargs, kwargs\r\n assert not apply\r\n attributes = AttributesHelper(self, attributes)\r\n configurations = CliConfigBuilder(unconfig=unconfig)\r\n\r\n # ===================================\r\n # ipv6\r\n # ipv6_prefix_length\r\n # ipv6_anycast\r\n # ipv6_eui_64\r\n # ===================================\r\n\r\n # ipv6 address <ipv6prefix> [ anycast | eui-64 ]\r\n if attributes.value('ipv6') and attributes.value('ipv6_prefix_length'):\r\n\r\n cmd = 'ipv6 address {ipv6}/{ipv6_prefix_length}'.\\\r\n format(ipv6=attributes.value('ipv6'),\r\n ipv6_prefix_length=attributes.value('ipv6_prefix_length'))\r\n\r\n if attributes.value('ipv6_eui_64'):\r\n cmd += ' eui-64'\r\n elif attributes.value('ipv6_anycast'):\r\n cmd += ' anycast'\r\n\r\n configurations.append_line(cmd)\r\n\r\n return str(configurations)\r\n\r\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\r\n return self.build_config(apply=apply, attributes=attributes,\r\n unconfig=True)\r\n", "id": "9021096", "language": "Python", "matching_score": 1.2437883615493774, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/interface/iosxe/ipv6addr.py" }, { "content": "'''\nNXOS specific configurations for static rp feature object.\n'''\n\n# Python\nimport warnings\nfrom abc import ABC\n\n# Genie\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\n\n\nclass RPAddressGroup(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # ===================================\n # static_rp_address\n # static_rp_group_list\n # static_rp_prefix_list\n # static_rp_route_map\n # static_rp_policy\n # static_rp_bidir\n # ===================================\n\n if attributes.value('static_rp_address'):\n # ip/ipv6 pim rp-address <static_rp_address>\n static_str = '{ip} pim rp-address'.format(ip=kwargs['ip_type'])\n static_str += ' {static_rp_address}'\n\n # group-list {static_rp_group_list} |\n # route-map {static_rp_route_map} |\n # prefix-list {static_rp_prefix_list}\n if attributes.value('static_rp_group_list'):\n static_str += ' group-list {static_rp_group_list}'\n elif attributes.value('static_rp_route_map'):\n static_str += ' route-map {static_rp_route_map}'\n elif attributes.value('static_rp_prefix_list'):\n static_str += ' prefix-list {static_rp_prefix_list}'\n else:\n configurations.append_line(\n attributes.format(static_str))\n static_str = ''\n\n if static_str:\n # bidir\n if attributes.value('static_rp_bidir'):\n static_str += ' bidir'\n\n # bidir |\n # override |\n # bidir override\n if attributes.value('static_rp_override') and \\\n kwargs['ip_type'] == 'ipv6':\n static_str += ' override'\n\n configurations.append_line(\n attributes.format(static_str))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n", "id": "3992833", "language": "Python", "matching_score": 1.9720361232757568, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/pim/nxos/rp_address.py" }, { "content": "'''\nIOSXE specific configurations for MldGroup feature object.\n'''\n\n# Python\nimport warnings\nfrom abc import ABC\n\n# Genie\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\n\n\nclass MldGroup(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # ===================================\n # join_group\n # join_group_source_addr\n # static_group\n # static_group_source_addr\n # ===================================\n\n if attributes.value('join_group'):\n\n cmd_str = 'ipv6 mld join-group {join_group}'\n\n # build up configuration string\n if attributes.value('join_group_source_addr'):\n cmd_str += ' {join_group_source_addr}'\n\n configurations.append_line(attributes.format(cmd_str))\n\n elif attributes.value('static_group'):\n\n cmd_str = 'ipv6 mld static-group {static_group}'\n \n # build up configuration string\n if attributes.value('static_group_source_addr'):\n cmd_str += ' {static_group_source_addr}'\n\n configurations.append_line(attributes.format(cmd_str))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n", "id": "7545026", "language": "Python", "matching_score": 3.361937999725342, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/mld/iosxe/mld_group.py" }, { "content": "'''\nIOSXE specific configurations for Ssm feature object.\n'''\n\n# Python\nimport warnings\nfrom abc import ABC\n\n# Genie\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\n\n\nclass Ssm(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # ===================================\n # ssm_group_policy\n # ssm_source_addr\n # ===================================\n\n # get vrf info\n vrf = kwargs['vrf']\n\n if vrf == 'default':\n cmd_str = 'ipv6 mld'\n else:\n cmd_str = 'ipv6 mld vrf {}'.format(vrf)\n\n if attributes.value('ssm_group_policy') and \\\n attributes.value('ssm_source_addr'):\n\n cmd_str += ' ssm-map static {ssm_group_policy} {ssm_source_addr}'\n configurations.append_line(attributes.format(cmd_str))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n", "id": "7485309", "language": "Python", "matching_score": 3.5055999755859375, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/mld/iosxe/ssm.py" }, { "content": "'''\nNXOS specific configurations for Ssm feature object.\n'''\n\n# Python\nimport warnings\nfrom abc import ABC\n\n# Genie\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\n\n\nclass Ssm(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # ===================================\n # ssm_group_range\n # ssm_source_addr\n # ===================================\n\n if attributes.value('ssm_group_range') and \\\n attributes.value('ssm_source_addr'):\n \n configurations.append_line(\n attributes.format('ip igmp ssm-translate '\n '{ssm_group_range} {ssm_source_addr}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n", "id": "10510629", "language": "Python", "matching_score": 1.1352211236953735, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/igmp/nxos/ssm.py" }, { "content": "from .igmp import *\nfrom .igmp_group import *\nfrom .ssm import *\n", "id": "7581878", "language": "Python", "matching_score": 0.8947348594665527, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/igmp/__init__.py" }, { "content": "'''\nIGMP Genie Ops Object for IOS - CLI.\n'''\nfrom ..iosxe.igmp import Igmp as IgmpXE\n\nclass Igmp(IgmpXE):\n pass", "id": "12123265", "language": "Python", "matching_score": 1.3853024244308472, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/igmp/ios/igmp.py" }, { "content": "''' \nInterface Genie Ops Object for IOS - CLI.\n'''\n\nimport re\n\n# iosxe interface ops\nfrom ..iosxe.interface import Interface as InterfaceXE\n\n\nclass Interface(InterfaceXE):\n '''Interface Genie Ops Object'''\n pass", "id": "3058527", "language": "Python", "matching_score": 1.3363111019134521, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/interface/ios/interface.py" }, { "content": "'''\nVrf Genie Ops Object for IOS - CLI.\n'''\nfrom ..iosxe.vrf import Vrf as Vrfs\n\nclass Vrf(Vrfs):\n pass", "id": "3567892", "language": "Python", "matching_score": 0.34498268365859985, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/vrf/ios/vrf.py" }, { "content": "'''\nLag Genie Ops Object for IOS - CLI.\n'''\nfrom ..iosxe.lag import Lag as LagXE\n\nclass Lag(LagXE):\n pass", "id": "11708293", "language": "Python", "matching_score": 0.8718468546867371, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/lag/ios/lag.py" }, { "content": "'''\nBGP Genie Ops Object for IOS - CLI.\n'''\nfrom ..iosxe.bgp import Bgp as BgpXE\n\nclass Bgp(BgpXE):\n pass", "id": "2348689", "language": "Python", "matching_score": 0.7268499732017517, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/bgp/ios/bgp.py" }, { "content": "'''\nACL Genie Ops Object for IOS - CLI.\n'''\nfrom ..iosxe.acl import Acl as AclXE\n\nclass Acl(AclXE):\n pass", "id": "6599745", "language": "Python", "matching_score": 1, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/acl/ios/acl.py" }, { "content": "from .acl import *\n", "id": "10067050", "language": "Python", "matching_score": 0.886210024356842, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/acl/__init__.py" }, { "content": "#!/usr/bin/env python\n\n# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning\n\n# Stp\nfrom genie.libs.conf.acl import Acl\n\n\nclass test_acl(TestCase):\n\n def setUp(self):\n \n # Set Genie Tb\n testbed = Testbed()\n Genie.testbed = testbed\n \n # Device\n self.dev1 = Device(name='PE1', testbed=testbed, os='iosxe')\n\n def test_acl_full_config(self):\n\n # For failures\n self.maxDiff = None\n \n # Pim object\n acl = Acl()\n self.dev1.add_feature(acl)\n\n # ipv4\n acl.device_attr[self.dev1].acl_attr['ipv4_acl']\\\n .acl_type = 'ipv4-acl-type'\n acl.device_attr[self.dev1].acl_attr['ipv4_acl']\\\n .ace_attr['20'].actions_forwarding = 'permit'\n acl.device_attr[self.dev1].acl_attr['ipv4_acl']\\\n .ace_attr['20'].protocol = 'ip'\n acl.device_attr[self.dev1].acl_attr['ipv4_acl']\\\n .ace_attr['20'].src = 'any'\n acl.device_attr[self.dev1].acl_attr['ipv4_acl']\\\n .ace_attr['20'].dst = 'any'\n\n acl.device_attr[self.dev1].acl_attr['ipv4_acl']\\\n .ace_attr['10'].actions_forwarding = 'deny'\n acl.device_attr[self.dev1].acl_attr['ipv4_acl']\\\n .ace_attr['10'].protocol = 'tcp'\n acl.device_attr[self.dev1].acl_attr['ipv4_acl']\\\n .ace_attr['10'].src = '1.1.1.1 255.255.255.0'\n acl.device_attr[self.dev1].acl_attr['ipv4_acl']\\\n .ace_attr['10'].src_operator = 'eq'\n acl.device_attr[self.dev1].acl_attr['ipv4_acl']\\\n .ace_attr['10'].src_port = '37 32 www'\n acl.device_attr[self.dev1].acl_attr['ipv4_acl']\\\n .ace_attr['10'].dst = 'host 2.2.2.2'\n acl.device_attr[self.dev1].acl_attr['ipv4_acl']\\\n .ace_attr['10'].dst_operator = 'lt'\n acl.device_attr[self.dev1].acl_attr['ipv4_acl']\\\n .ace_attr['10'].dst_port = '20'\n acl.device_attr[self.dev1].acl_attr['ipv4_acl']\\\n .ace_attr['10'].established = True\n acl.device_attr[self.dev1].acl_attr['ipv4_acl']\\\n .ace_attr['10'].precedence = 'flash'\n acl.device_attr[self.dev1].acl_attr['ipv4_acl']\\\n .ace_attr['10'].option = 'ssr'\n acl.device_attr[self.dev1].acl_attr['ipv4_acl']\\\n .ace_attr['10'].actions_logging = 'log-syslog'\n acl.device_attr[self.dev1].acl_attr['ipv4_acl']\\\n .interface_attr['GigabitEthernet2/0/15'].if_in = True\n\n # ipv6\n acl.device_attr[self.dev1].acl_attr['ipv6_acl']\\\n .acl_type = 'ipv6-acl-type'\n acl.device_attr[self.dev1].acl_attr['ipv6_acl']\\\n .ace_attr['20'].actions_forwarding = 'permit'\n acl.device_attr[self.dev1].acl_attr['ipv6_acl']\\\n .ace_attr['20'].protocol = 'ipv6'\n acl.device_attr[self.dev1].acl_attr['ipv6_acl']\\\n .ace_attr['20'].src = 'any'\n acl.device_attr[self.dev1].acl_attr['ipv6_acl']\\\n .ace_attr['20'].dst = 'any'\n acl.device_attr[self.dev1].acl_attr['ipv6_acl']\\\n .ace_attr['20'].dscp = 'cs7'\n acl.device_attr[self.dev1].acl_attr['ipv6_acl']\\\n .ace_attr['20'].actions_logging = 'log-syslog'\n\n acl.device_attr[self.dev1].acl_attr['ipv6_acl']\\\n .ace_attr['10'].actions_forwarding = 'deny'\n acl.device_attr[self.dev1].acl_attr['ipv6_acl']\\\n .ace_attr['10'].protocol = 'tcp'\n acl.device_attr[self.dev1].acl_attr['ipv6_acl']\\\n .ace_attr['10'].src = 'any'\n acl.device_attr[self.dev1].acl_attr['ipv6_acl']\\\n .ace_attr['10'].src_operator = 'eq'\n acl.device_attr[self.dev1].acl_attr['ipv6_acl']\\\n .ace_attr['10'].src_port = 'www 8443'\n acl.device_attr[self.dev1].acl_attr['ipv6_acl']\\\n .ace_attr['10'].dst = 'host 2001:2::2'\n acl.device_attr[self.dev1].acl_attr['ipv6_acl']\\\n .interface_attr['GigabitEthernet2/0/15'].if_in = True\n\n # mac\n acl.device_attr[self.dev1].acl_attr['mac_acl']\\\n .acl_type = 'eth-acl-type'\n acl.device_attr[self.dev1].acl_attr['mac_acl']\\\n .ace_attr['20'].actions_forwarding = 'permit'\n acl.device_attr[self.dev1].acl_attr['mac_acl']\\\n .ace_attr['20'].src = 'host aaaa.aaaa.aaaa'\n acl.device_attr[self.dev1].acl_attr['mac_acl']\\\n .ace_attr['20'].dst = 'host bbbb.bbbb.bbbb'\n acl.device_attr[self.dev1].acl_attr['mac_acl']\\\n .ace_attr['20'].ether_type = 'aarp'\n\n acl.device_attr[self.dev1].acl_attr['mac_acl']\\\n .ace_attr['10'].actions_forwarding = 'deny'\n acl.device_attr[self.dev1].acl_attr['mac_acl']\\\n .ace_attr['10'].src = 'host 0000.0000.0000'\n acl.device_attr[self.dev1].acl_attr['mac_acl']\\\n .ace_attr['10'].dst = 'host 0000.0000.0000'\n\n acl.device_attr[self.dev1].acl_attr['mac_acl']\\\n .interface_attr['GigabitEthernet2/0/15'].if_in = True\n\n cfgs = acl.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'ip access-list extended ipv4_acl',\n ' 10 deny tcp 1.1.1.1 255.255.255.0 eq 37 32 www host 2.2.2.2 lt 20 option ssr precedence flash established log',\n ' 20 permit ip any any',\n ' exit',\n 'interface GigabitEthernet2/0/15',\n ' ip access-group ipv4_acl in',\n ' exit',\n 'ipv6 access-list ipv6_acl',\n ' sequence 10 deny tcp any eq www 8443 host 2001:2::2',\n ' sequence 20 permit ipv6 any any dscp cs7 log',\n ' exit',\n 'interface GigabitEthernet2/0/15',\n ' ipv6 traffic-filter ipv6_acl in',\n ' exit',\n 'mac access-list extended mac_acl',\n ' deny host 0000.0000.0000 host 0000.0000.0000',\n ' permit host aaaa.aaaa.aaaa host bbbb.bbbb.bbbb aarp',\n ' exit',\n 'interface GigabitEthernet2/0/15',\n ' mac access-group mac_acl in',\n ' exit',\n \n ]))\n\n cfgs = acl.build_unconfig(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'no ip access-list extended ipv4_acl',\n 'interface GigabitEthernet2/0/15',\n ' no ip access-group ipv4_acl in',\n ' exit',\n 'no ipv6 access-list ipv6_acl',\n 'interface GigabitEthernet2/0/15',\n ' no ipv6 traffic-filter ipv6_acl in',\n ' exit',\n 'no mac access-list extended mac_acl',\n 'interface GigabitEthernet2/0/15',\n ' no mac access-group mac_acl in',\n ' exit',\n ]))\n\n # uncfg with attributes\n cfgs = acl.build_unconfig(apply=False,\n attributes={'device_attr': {\n self.dev1: {\n 'acl_attr': {\n 'ipv4_acl': {\n 'acl_type': None,\n 'ace_attr': {\n '20': {\n 'protocol': None,\n 'actions_forwarding': None,\n 'src': None,\n 'dst': None\n }\n }\n },\n 'ipv6_acl': {\n 'acl_type': None,\n 'ace_attr': {\n '20': {\n 'protocol': None,\n 'actions_forwarding': None,\n 'src': None,\n 'dst': None,\n 'dscp': None,\n 'actions_logging': None\n }\n }\n },\n 'mac_acl': {\n 'acl_type': None,\n 'ace_attr': {\n '10': {\n 'actions_forwarding': None,\n 'src': None,\n 'dst': None,\n }\n }\n },\n },}}})\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'ip access-list extended ipv4_acl',\n ' no 20 permit ip any any',\n ' exit',\n 'ipv6 access-list ipv6_acl',\n ' no sequence 20 permit ipv6 any any dscp cs7 log',\n ' exit',\n 'mac access-list extended mac_acl',\n ' no deny host 0000.0000.0000 host 0000.0000.0000',\n ' exit',\n ])) \n\nif __name__ == '__main__':\n unittest.main()\n", "id": "549417", "language": "Python", "matching_score": 3.6799519062042236, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/acl/iosxe/tests/test_acl.py" }, { "content": "\n# import python\nfrom abc import ABC\n\n# import genie\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning, \\\n AttributesHelper\n\n\nclass Acl(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n \n # acl attributes\n for sub, attributes2 in attributes.mapping_values('acl_attr',\n sort=True, keys=self.acl_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class AclAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n # acl_type\n # 'ipv4-acl-type','ipv6-acl-type','eth-acl-type\n if attributes.value('acl_type'):\n if 'ipv4-acl-type' in attributes.value('acl_type'):\n self.cmd = 'ip access-list extended '\n elif 'ipv6-acl-type' in attributes.value('acl_type'):\n self.cmd = 'ipv6 access-list '\n elif 'eth-acl-type' in attributes.value('acl_type'):\n self.cmd = 'mac access-list extended '\n else:\n self.cmd = None\n else:\n self.cmd = None\n\n with configurations.submode_context(\n attributes.format('{cmd}{name}'.format(\n cmd=self.cmd, name=self.acl_name), force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n self.acl_type = self.parent.acl_type\n\n # ace attributes\n for sub, attributes2 in attributes.mapping_values('ace_attr',\n sort=True, keys=self.ace_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n # interface attributes\n for sub, attributes2 in attributes.mapping_values('interface_attr',\n sort=True, keys=self.interface_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n\n class AceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n self.acl_type = self.parent.acl_type\n\n # [<seq>] {deny|permit} <protocol> <src> [<src_operator> <src_port>]\n # <dst> [<dst_operator> <dst_port>] [option <option>] \n # [precedence <precedence>] [established] [log] [ttl <ttl_operator> <ttl>]\n if 'ipv4' in self.parent.acl_type:\n cmd = '{seq} {actions_forwarding} {protocol} {src}' if self.seq else \\\n '{actions_forwarding} {protocol} {src}'\n if attributes.value('src_operator') and attributes.value('src_port'):\n cmd += ' {src_operator} {src_port}'\n cmd += ' {dst}'\n if attributes.value('dst_operator') and attributes.value('dst_port'):\n cmd += ' {dst_operator} {dst_port}'\n if attributes.value('option'):\n cmd += ' option {option}'\n if attributes.value('precedence'):\n cmd += ' precedence {precedence}'\n if attributes.value('established'):\n cmd += ' established'\n if attributes.value('actions_logging') and \\\n 'syslog' in attributes.value('actions_logging'):\n cmd += ' log'\n if attributes.value('ttl_operator') and attributes.value('ttl'):\n cmd += ' ttl {ttl_operator} {ttl}'\n\n # [sequence <seq>] {permit|deny} [<protocol>] <src>\n # [<src_operator> <src_port>] <dst> [<dst_operator> <dst_port>]\n # [dscp <dscp>] [established] [log]\n elif 'ipv6' in self.parent.acl_type:\n cmd = 'sequence {seq} {actions_forwarding} {protocol} {src}' if self.seq else \\\n '{actions_forwarding} {protocol} {src}'\n if attributes.value('src_operator') and attributes.value('src_port'):\n cmd += ' {src_operator} {src_port}'\n cmd += ' {dst}'\n if attributes.value('dst_operator') and attributes.value('dst_port'):\n cmd += ' {dst_operator} {dst_port}'\n if attributes.value('dscp'):\n cmd += ' dscp {dscp}'\n if attributes.value('established'):\n cmd += ' established'\n if attributes.value('actions_logging') and \\\n 'syslog' in attributes.value('actions_logging'):\n cmd += ' log'\n elif 'eth' in self.parent.acl_type:\n # {permit|deny} <src> <dst> [<ether_type>]\n cmd = '{actions_forwarding} {src} {dst}'\n if attributes.value('ether_type'):\n cmd += ' {ether_type}'\n\n # append the line configuration\n configurations.append_line(attributes.format(cmd, force=True),\n unconfig_cmd=attributes.format('no ' + cmd, force=True))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n self.acl_name = self.parent.acl_name\n\n with configurations.submode_context(\n attributes.format('interface {interface_id}', force=True)): \n \n if attributes.value('if_in') and 'ipv4' in self.parent.acl_type:\n configurations.append_line(\n attributes.format('ip access-group {acl_name} in', force=True))\n elif attributes.value('if_in') and 'ipv6' in self.parent.acl_type:\n configurations.append_line(\n attributes.format('ipv6 traffic-filter {acl_name} in', force=True))\n elif attributes.value('if_in') and 'eth' in self.parent.acl_type:\n configurations.append_line(\n attributes.format('mac access-group {acl_name} in', force=True))\n\n if attributes.value('if_out') and 'ipv4' in self.parent.acl_type:\n configurations.append_line(\n attributes.format('ip access-group {acl_name} out', force=True))\n elif attributes.value('if_out') and 'ipv6' in self.parent.acl_type:\n configurations.append_line(\n attributes.format('ipv6 traffic-filter {acl_name} out', force=True))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)", "id": "6762816", "language": "Python", "matching_score": 4.555123805999756, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/acl/iosxe/acl.py" }, { "content": "\n__all__ = (\n 'Acl',\n)\n# import genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import ConfigurableBase\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.base import DeviceFeature, InterfaceFeature\nfrom genie.conf.base.attributes import DeviceSubAttributes,\\\n SubAttributesDict,\\\n AttributesHelper, \\\n KeyedSubAttributes\n# import genie.libs\nfrom genie.conf.base.attributes import InterfaceSubAttributes\n\n\n# Structure\n# Acl\n# +- DeviceAttributes\n# +- AclAttributes\n# +- AceAttributes\n# +- InterfaceAttributes\n\n\nclass Acl(DeviceFeature):\n\n # device attributes\n acl_type = managedattribute(\n name='acl_type',\n default=None,\n type=(None, managedattribute.test_in(['ipv4-acl-type','ipv6-acl-type','eth-acl-type'])))\n\n acl_name = managedattribute(\n name='acl_name',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n seq = managedattribute(\n name='seq',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n actions_forwarding = managedattribute(\n name='actions_forwarding',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n protocol = managedattribute(\n name='protocol',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n src = managedattribute(\n name='src',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n src_operator = managedattribute(\n name='src_operator',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n src_port = managedattribute(\n name='src_port',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n dst = managedattribute(\n name='dst',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n dst_operator = managedattribute(\n name='dst_operator',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n dst_port = managedattribute(\n name='dst_port',\n default=None,\n type=(None, managedattribute.test_istype(str),\n managedattribute.test_istype(str)))\n\n option = managedattribute(\n name='option',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n precedence = managedattribute(\n name='precedence',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n dscp = managedattribute(\n name='dscp',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n established = managedattribute(\n name='established',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n actions_logging = managedattribute(\n name='actions_logging',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n ttl = managedattribute(\n name='ttl',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n ttl_operator = managedattribute(\n name='ttl_operator',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n ether_type = managedattribute(\n name='ether_type',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n interface_id = managedattribute(\n name='interface_id',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n if_in = managedattribute(\n name='if_in',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n if_out = managedattribute(\n name='if_out',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n\n class DeviceAttributes(DeviceSubAttributes):\n\n\n class AclAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.acl_name = key\n super().__init__(parent)\n \n class InterfaceAttributes(InterfaceSubAttributes):\n\n def __init__(self, parent, key):\n self.interface_id = key\n super().__init__(parent, key)\n \n \n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(\n self.InterfaceAttributes, parent=self)\n\n \n class AceAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.seq = key\n super().__init__(parent)\n \n \n ace_attr = managedattribute(\n name='ace_attr',\n read_only=True,\n doc=AceAttributes.__doc__)\n\n @ace_attr.initter\n def ace_attr(self):\n return SubAttributesDict(\n self.AceAttributes, parent=self)\n\n acl_attr = managedattribute(\n name='acl_attr',\n read_only=True,\n doc=AclAttributes.__doc__)\n\n @acl_attr.initter\n def acl_attr(self):\n return SubAttributesDict(\n self.AclAttributes, parent=self)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n def build_config(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n", "id": "6408002", "language": "Python", "matching_score": 3.9330430030822754, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/acl/acl.py" }, { "content": "\n__all__ = (\n 'Stp',\n )\n\n# import python\nimport operator\n\n# import genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.base import DeviceFeature, InterfaceFeature\nfrom genie.conf.base.attributes import DeviceSubAttributes,\\\n SubAttributesDict,\\\n AttributesHelper, \\\n KeyedSubAttributes\n# import genie.libs\nfrom genie.conf.base.attributes import InterfaceSubAttributes\n\n\n# Structure\n# Stp\n# +- Device\n# +- Mode\n# +- Pvst\n# | +- Vlan\n# | | +- Interface\n# | +- Interface\n# +- Pvrstag\n# | +- Interface\n# | +- Vlan\n# +- Pvstag\n# | +- Interface\n# | +- Vlan\n# +- Mst\n# | +- Instance\n# | +- Interface\n# | +- Interface\n# +- Mstag\n# +- Interface\n# +- Instance\n\nclass Stp(DeviceFeature, InterfaceFeature):\n\n # callable to check regexp\n @staticmethod\n def test_isregexp(reg):\n '''Create a transformation function that allows only an object\n contained in the specified reg.\n\n Use with the managedattribute 'type' argument to accept only an object\n contained in the specified reg (where `value in reg`)\n\n Upon success, the resulting transformation function returns the value\n unchanged.\n\n Args:\n reg: Any reg, such as an regexp pattern ('\\d+')\n\n Example:\n\n attr = managedattribute(\n name='attr',\n type=managedattribute.test_in({1, 2, 3}))\n\n attr = managedattribute(\n name='attr',\n type=managedattribute.test_in(range(10)))\n '''\n\n msg = 'Not string like %r.' % (reg,)\n import re\n\n def f(value):\n if not re.search(reg, value):\n raise ValueError(msg)\n return value\n\n return f\n\n # callable to check regexp\n @staticmethod\n def test_isincrements_in_range(base, container):\n '''Create a transformation function that allows only an object\n in increments of base number, and in a range of numbers\n\n Args:\n base: Any integer, such as 16, 4096\n\n Example:\n\n attr = managedattribute(\n name='attr',\n type=managedattribute.test_isincrements(16))\n '''\n\n msg = 'Not in increments of %r.' % (base,)\n\n def f(value):\n if value not in container:\n raise ValueError('Not in %r.' % (container,))\n\n if value%base:\n raise ValueError(msg)\n return value\n\n return f\n\n # add method to managedattribute\n managedattribute.test_isregexp = test_isregexp\n managedattribute.test_isincrements_in_range = test_isincrements_in_range\n\n # device attributes\n bridge_assurance = managedattribute(\n name='bridge_assurance',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n etherchannel_misconfig_guard = managedattribute(\n name='etherchannel_misconfig_guard',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n bpduguard_timeout_recovery = managedattribute(\n name='bpduguard_timeout_recovery',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n loop_guard = managedattribute(\n name='loop_guard',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n bpdu_guard = managedattribute(\n name='bpdu_guard',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n bpdu_filter = managedattribute(\n name='bpdu_filter',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n hold_count = managedattribute(\n name='hold_count',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # mode mst attributes\n mst_domain = managedattribute(\n name='mst_domain',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n m_max_hop = managedattribute(\n name='m_max_hop',\n default=None,\n type=(None, managedattribute.test_in(range(1, 256))))\n\n m_hello_time = managedattribute(\n name='m_hello_time',\n default=None,\n type=(None, managedattribute.test_in(range(1, 11))))\n\n m_max_age = managedattribute(\n name='m_max_age',\n default=None,\n type=(None, managedattribute.test_in(range(6, 41))))\n\n m_forwarding_delay = managedattribute(\n name='m_forwarding_delay',\n default=None,\n type=(None, managedattribute.test_in(range(4, 31))))\n\n mst_id = managedattribute(\n name='mst_id',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n \n m_vlans = managedattribute(\n name='m_vlans',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n \n m_name = managedattribute(\n name='m_name',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n m_revision = managedattribute(\n name='m_revision',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n m_bridge_priority = managedattribute(\n name='m_bridge_priority',\n default=None,\n type=(None, managedattribute.test_isincrements_in_range(\n base=4096, container=range(0, 61441))))\n\n m_inst_if_cost = managedattribute(\n name='m_inst_if_cost',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n m_inst_if_port_priority = managedattribute(\n name='m_inst_if_port_priority',\n default=None,\n type=(None, managedattribute.test_isincrements_in_range(\n base=16, container=range(0, 241))))\n \n m_if_edge_port = managedattribute(\n name='m_if_edge_port',\n default=None,\n type=(None, managedattribute.test_in(['edge_enable','edge_disable','edge_auto'])))\n \n m_if_link_type = managedattribute(\n name='m_if_link_type',\n default=None,\n type=(None, managedattribute.test_in(['p2p','shared','auto'])))\n \n m_if_guard = managedattribute(\n name='m_if_guard',\n default=None,\n type=(None, managedattribute.test_in(['root','loop', 'none'])))\n\n m_if_bpdu_guard = managedattribute(\n name='m_if_bpdu_guard',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n m_if_bpdu_filter = managedattribute(\n name='m_if_bpdu_filter',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n m_if_hello_time = managedattribute(\n name='m_if_hello_time',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # mode mstag attributes\n mag_domain = managedattribute(\n name='mag_domain',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n mag_if_name = managedattribute(\n name='mag_if_name',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n mag_if_revision = managedattribute(\n name='mag_if_revision',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n mag_if_bridge_id = managedattribute(\n name='mag_if_bridge_id',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n \n mag_id = managedattribute(\n name='mag_id',\n default=None,\n type=(None, managedattribute.test_in(range(0, 4095))))\n\n mag_if_root_id = managedattribute(\n name='mag_if_root_id',\n default=None,\n type=(None, managedattribute.test_isregexp('\\w+\\.\\w+\\.\\w+')))\n \n mag_if_vlans = managedattribute(\n name='mag_if_vlans',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n \n mag_if_priority = managedattribute(\n name='mag_if_priority',\n default=None,\n type=(None, managedattribute.test_in(range(0, 61441))))\n\n mag_if_root_priority = managedattribute(\n name='mag_if_root_priority',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # attribtues for pvst mode\n pvst_id = managedattribute(\n name='pvst_id',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n p_max_age = managedattribute(\n name='p_max_age',\n default=None,\n type=(None, managedattribute.test_in(range(6, 41))))\n\n p_hold_count = managedattribute(\n name='p_hold_count',\n default=None,\n type=(None, managedattribute.test_in(range(1, 11))))\n\n p_forwarding_delay = managedattribute(\n name='p_forwarding_delay',\n default=None,\n type=(None, managedattribute.test_in(range(4, 31))))\n\n vlan_id = managedattribute(\n name='vlan_id',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n v_hello_time = managedattribute(\n name='v_hello_time',\n default=None,\n type=(None, managedattribute.test_in(range(1, 11))))\n\n v_max_age = managedattribute(\n name='v_max_age',\n default=None,\n type=(None, managedattribute.test_in(range(6, 41))))\n\n v_forwarding_delay = managedattribute(\n name='v_forwarding_delay',\n default=None,\n type=(None, managedattribute.test_in(range(4, 31))))\n\n v_bridge_priority = managedattribute(\n name='v_bridge_priority',\n default=None,\n type=(None, managedattribute.test_isincrements_in_range(\n base=4096, container=range(0, 61441))))\n\n v_interface = managedattribute(\n name='v_interface',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n \n v_if_cost = managedattribute(\n name='v_if_cost',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n v_if_port_priority = managedattribute(\n name='v_if_port_priority',\n default=None,\n type=(None, managedattribute.test_isincrements_in_range(\n base=16, container=range(0, 241))))\n\n p_if_edge_port = managedattribute(\n name='p_if_edge_port',\n default=None,\n type=(None, managedattribute.test_in(['edge_enable','edge_disable','edge_auto'])))\n\n p_if_link_type = managedattribute(\n name='p_if_link_type',\n default=None,\n type=(None, managedattribute.test_in(['p2p','shared','auto'])))\n\n p_if_guard = managedattribute(\n name='p_if_guard',\n default=None,\n type=(None, managedattribute.test_in(['root','loop','none'])))\n\n p_if_bpdu_guard = managedattribute(\n name='p_if_bpdu_guard',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n p_if_bpdu_filter = managedattribute(\n name='p_if_bpdu_filter',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n p_if_hello_time = managedattribute(\n name='p_if_hello_time',\n default=None,\n type=(None, managedattribute.test_in([1, 2])))\n\n # attributes for mode pvrstag\n prag_domain = managedattribute(\n name='prag_domain',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n prag_if_v_root_priority = managedattribute(\n name='prag_if_v_root_priority',\n default=None,\n type=(None, managedattribute.test_in(range(0, 61441))))\n\n prag_if_v_root_id = managedattribute(\n name='prag_if_v_root_id',\n default=None,\n type=(None, managedattribute.test_isregexp('\\w+\\.\\w+\\.\\w+')))\n \n prag_if_v_root_cost = managedattribute(\n name='prag_if_v_root_cost',\n default=None,\n type=(None, managedattribute.test_in(range(0, 4294967296))))\n \n prag_if_v_priority = managedattribute(\n name='prag_if_v_priority',\n default=None,\n type=(None, managedattribute.test_in(range(0, 61441))))\n\n prag_if_v_bridge_id = managedattribute(\n name='prag_if_v_bridge_id',\n default=None,\n type=(None, managedattribute.test_isregexp('\\w+\\.\\w+\\.\\w+')))\n\n prag_if_v_port_priority = managedattribute(\n name='prag_if_v_port_priority',\n default=None,\n type=(None, managedattribute.test_in(range(0, 241))))\n\n prag_if_v_max_age = managedattribute(\n name='prag_if_v_max_age',\n default=None,\n type=(None, managedattribute.test_in(range(6, 41))))\n\n prag_if_v_hello_time = managedattribute(\n name='prag_if_v_hello_time',\n default=None,\n type=(None, managedattribute.test_in([1,2])))\n\n # attributes for mode pvstag\n pag_domain = managedattribute(\n name='pag_domain',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n pag_if_v_root_priority = managedattribute(\n name='pag_if_v_root_priority',\n default=None,\n type=(None, managedattribute.test_in(range(0, 65536))))\n\n pag_if_v_root_id = managedattribute(\n name='pag_if_v_root_id',\n default=None,\n type=(None, managedattribute.test_isregexp('\\w+\\.\\w+\\.\\w+')))\n \n pag_if_v_root_cost = managedattribute(\n name='pag_if_v_root_cost',\n default=None,\n type=(None, managedattribute.test_in(range(0, 4294967296))))\n \n pag_if_v_priority = managedattribute(\n name='pag_if_v_priority',\n default=None,\n type=(None, managedattribute.test_in(range(0, 65536))))\n\n pag_if_v_bridge_id = managedattribute(\n name='pag_if_v_bridge_id',\n default=None,\n type=(None, managedattribute.test_isregexp('\\w+\\.\\w+\\.\\w+')))\n\n pag_if_v_port_priority = managedattribute(\n name='pag_if_v_port_priority',\n default=None,\n type=(None, managedattribute.test_in(range(0, 256))))\n\n pag_if_v_max_age = managedattribute(\n name='pag_if_v_max_age',\n default=None,\n type=(None, managedattribute.test_in(range(6, 41))))\n\n pag_if_v_hello_time = managedattribute(\n name='pag_if_v_hello_time',\n default=None,\n type=(None, managedattribute.test_in([1,2])))\n\n\n class DeviceAttributes(DeviceSubAttributes):\n\n class ModeAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.mode = key\n super().__init__(parent)\n\n mode = property(operator.attrgetter('_mode'))\n\n @mode.setter\n def mode(self, d):\n assert d in ['mstp', 'mstag', 'pvst', 'rapid-pvst', 'pvrstag', 'pvstag'], \\\n \"should be 'mstp', 'mstag', 'pvst', 'rapid-pvst', 'pvrstag', 'pvstag' \"\n self._mode = d\n\n # ---------------\n # mode MST\n # ---------------\n class MstAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.mst_domain = key\n super().__init__(parent)\n\n # +- Mst\n # | +- Interface\n class InterfaceAttributes(InterfaceSubAttributes):\n pass\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n # +- Mst\n # | +- Instance\n # | +- Interface\n class InstanceAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.mst_id = key\n super().__init__(parent)\n\n class InterfaceAttributes(InterfaceSubAttributes):\n pass\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n instance_attr = managedattribute(\n name='instance_attr',\n read_only=True,\n doc=InstanceAttributes.__doc__)\n\n @instance_attr.initter\n def instance_attr(self):\n return SubAttributesDict(self.InstanceAttributes, parent=self)\n\n mst_attr = managedattribute(\n name='mst_attr',\n read_only=True,\n doc=MstAttributes.__doc__)\n\n @mst_attr.initter\n def mst_attr(self):\n return SubAttributesDict(self.MstAttributes, parent=self)\n\n # ---------------\n # mode Mstag\n # ---------------\n class MstagAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.mag_domain = key\n super().__init__(parent)\n\n # +- Mstag\n # | +- Interface\n # | +- Instance\n class InterfaceAttributes(InterfaceSubAttributes):\n\n class InstanceAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.mag_id = key\n super().__init__(parent)\n\n instance_attr = managedattribute(\n name='instance_attr',\n read_only=True,\n doc=InstanceAttributes.__doc__)\n\n @instance_attr.initter\n def instance_attr(self):\n return SubAttributesDict(self.InstanceAttributes, parent=self)\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n mstag_attr = managedattribute(\n name='mstag_attr',\n read_only=True,\n doc=MstagAttributes.__doc__)\n\n @mstag_attr.initter\n def mstag_attr(self):\n return SubAttributesDict(self.MstagAttributes, parent=self)\n\n\n # ---------------\n # mode Pvst\n # ---------------\n class PvstAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.pvst_id = key\n super().__init__(parent)\n\n # +- Pvst\n # | +- Interface\n class InterfaceAttributes(InterfaceSubAttributes):\n pass\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n # +- Pvst\n # | +- Vlan\n # | +- Interface\n class VlanAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.vlan = key\n super().__init__(parent)\n\n class InterfaceAttributes(InterfaceSubAttributes):\n pass\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n vlan_attr = managedattribute(\n name='vlan_attr',\n read_only=True,\n doc=VlanAttributes.__doc__)\n\n @vlan_attr.initter\n def vlan_attr(self):\n return SubAttributesDict(self.VlanAttributes, parent=self)\n\n pvst_attr = managedattribute(\n name='pvst_attr',\n read_only=True,\n doc=PvstAttributes.__doc__)\n\n @pvst_attr.initter\n def pvst_attr(self):\n return SubAttributesDict(self.PvstAttributes, parent=self)\n\n\n # ---------------\n # mode Pvrstag\n # ---------------\n class PvrstagAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.prag_domain = key\n super().__init__(parent)\n\n # +- Pvrstag\n # | +- Interface\n # | +- Vlan\n class InterfaceAttributes(InterfaceSubAttributes):\n \n class VlanAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.prag_vlan = key\n super().__init__(parent)\n\n vlan_attr = managedattribute(\n name='vlan_attr',\n read_only=True,\n doc=VlanAttributes.__doc__)\n\n @vlan_attr.initter\n def vlan_attr(self):\n return SubAttributesDict(self.VlanAttributes, parent=self)\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n pvrstag_attr = managedattribute(\n name='pvrstag_attr',\n read_only=True,\n doc=PvrstagAttributes.__doc__)\n\n @pvrstag_attr.initter\n def pvrstag_attr(self):\n return SubAttributesDict(self.PvrstagAttributes, parent=self)\n\n # ---------------\n # mode Pvstag\n # ---------------\n class PvstagAttributes(PvrstagAttributes):\n def __init__(self, parent, key):\n self.pag_domain = key\n super().__init__(parent)\n\n # +- Pvstag\n # | +- Interface\n # | +- Vlan\n\n pvstag_attr = managedattribute(\n name='pvstag_attr',\n read_only=True,\n doc=PvrstagAttributes.__doc__)\n\n @pvstag_attr.initter\n def pvstag_attr(self):\n return SubAttributesDict(self.PvstagAttributes, parent=self)\n\n\n mode_attr = managedattribute(\n name='mode_attr',\n read_only=True,\n doc=ModeAttributes.__doc__)\n\n @mode_attr.initter\n def mode_attr(self):\n return SubAttributesDict(self.ModeAttributes, parent=self)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n def build_config(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n", "id": "10587026", "language": "Python", "matching_score": 3.527113199234009, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/stp/stp.py" }, { "content": "\n__all__ = (\n 'Lisp',\n)\n\n# Python\nfrom enum import Enum\n\n# Genie\nfrom genie.utils.cisco_collections import typedset\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import ConfigurableBase\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.base import DeviceFeature, InterfaceFeature, LinkFeature\n\nfrom genie.libs.conf.base import Routing\nfrom genie.libs.conf.vrf import Vrf, VrfSubAttributes\nfrom genie.conf.base.attributes import DeviceSubAttributes, SubAttributesDict,\\\n AttributesHelper, KeyedSubAttributes,\\\n InterfaceSubAttributes\n\n\n# LISP Hierarchy\n# --------------\n# Lisp\n# +- DeviceAttributes\n# +- InterfaceAttributes\n# | +- MobilityDynamicEidAttributes\n# +- RouterInstanceAttributes\n# +- LocatorSetAttributes\n# | +- InterfaceAttributes\n# | +- InterfacdTypeAttributes\n# +- ServiceAttributes\n# | +- ItrMrAttributes\n# | +- EtrMsAttributes\n# | +- ProxyItrAttributes\n# +- InstanceAttributes\n# | +- DynamicEidAttributes\n# | +- DbMappingAttributes\n# | +- ServiceAttributes\n# | +- DbMappingAttributes\n# | +- UsePetrAttributes\n# | +- MapCacheAttributes\n# +- SiteAttributes\n# | +- InstanceIdAttributes\n# | +- EidRecordAttributes\n# +- ExtranetAttributes\n# +- InstanceIdAttributes\n# +- EidRecordProviderAttributes\n# +- EidRecordSubscriberAttributes\n\n\n# ==========================================================================\n# GLOBAL ENUM TYPES\n# ==========================================================================\n\nclass ENCAP(Enum):\n lisp = 'lisp'\n vxlan = 'vxlan'\n\nclass ETR_AUTH_KEY_TYPE(Enum):\n none = None\n sha1 = 'hmac-sha-1-96'\n sha2 = 'hmac-sha-256-128'\n\n\nclass Lisp(Routing, DeviceFeature):\n\n # ==========================================================================\n # CONF CLASS STRUCTURE\n # ==========================================================================\n\n # +- DeviceAttributes\n class DeviceAttributes(DeviceSubAttributes):\n\n # +- DeviceAttributes\n # +- InterfaceAttributes\n class InterfaceAttributes(InterfaceSubAttributes):\n\n # +- DeviceAttributes\n # +- InterfaceAttributes\n # +- MobilityDynamicEidAttributes\n class MobilityDynamicEidAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.if_mobility_dynamic_eid_name = key\n super().__init__(parent)\n\n mobility_dynamic_eid_attr = managedattribute(\n name='mobility_dynamic_eid_attr',\n \tread_only=True,\n \tdoc=MobilityDynamicEidAttributes.__doc__)\n\n @mobility_dynamic_eid_attr.initter\n def mobility_dynamic_eid_attr(self):\n return SubAttributesDict(self.MobilityDynamicEidAttributes, parent=self)\n\n intf_attr = managedattribute(\n name='intf_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @intf_attr.initter\n def intf_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n class RouterInstanceAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.lisp_router_instance_id = key\n super().__init__(parent)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n #\t +- LocatorSetAttributes\n class LocatorSetAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.locator_set_name = key\n super().__init__(parent)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- LocatorSetAttributes\n # +- InterfaceAttributes\n class InterfaceAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.ls_interface = key\n super().__init__(parent)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- LocatorSetAttributes\n # +- InterfaceAttributes\n # +- InterfaceTypeAttributes\n class InterfaceTypeAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n assert key in ['ipv4', 'ipv6', 'ethernet'],\\\n \"'{key}' is not supported for locator_set_intf_type_attr, only 'ipv4' and 'ipv6' are supported\".format(key=key)\n self.ls_interface_type = key\n super().__init__(parent)\n\n locator_set_intf_type_attr = managedattribute(\n name='locator_set_intf_type_attr',\n read_only=True,\n doc=InterfaceTypeAttributes.__doc__)\n\n @locator_set_intf_type_attr.initter\n def locator_set_intf_type_attr(self):\n return SubAttributesDict(self.InterfaceTypeAttributes, parent=self)\n\n locator_set_intf_attr = managedattribute(\n name='InterfaceAttributes',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @locator_set_intf_attr.initter\n def locator_set_intf_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n locator_set_attr = managedattribute(\n name='locator_set_attr',\n read_only=True,\n doc=LocatorSetAttributes.__doc__)\n\n @locator_set_attr.initter\n def locator_set_attr(self):\n return SubAttributesDict(self.LocatorSetAttributes, parent=self)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- ServiceAttributes\n class ServiceAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n assert key in ['ipv4', 'ipv6', 'ethernet'],\\\n \"'{key}' is not supported for service_attr, only 'ipv4', 'ipv6' and 'ethernet' are supported\".format(key=key)\n self.service = key\n super().__init__(parent)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- ServiceAttributes\n # +- ItrMrAttributes\n class ItrMrAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.itr_map_resolver = key\n super().__init__(parent)\n\n itr_mr_attr = managedattribute(\n name='itr_mr_attr',\n read_only=True,\n doc=ItrMrAttributes.__doc__)\n\n @itr_mr_attr.initter\n def itr_mr_attr(self):\n return SubAttributesDict(self.ItrMrAttributes, parent=self)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- ServiceAttributes\n # +- EtrMsAttributes\n class EtrMsAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.etr_map_server = key\n super().__init__(parent)\n\n etr_ms_attr = managedattribute(\n name='etr_ms_attr',\n read_only=True,\n doc=EtrMsAttributes.__doc__)\n\n @etr_ms_attr.initter\n def etr_ms_attr(self):\n return SubAttributesDict(self.EtrMsAttributes, parent=self)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- ServiceAttributes\n # +- ProxyItrAttributes\n class ProxyItrAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.proxy_itr = key\n super().__init__(parent)\n\n proxy_attr = managedattribute(\n name='proxy_attr',\n read_only=True,\n doc=ProxyItrAttributes.__doc__)\n\n @proxy_attr.initter\n def proxy_attr(self):\n return SubAttributesDict(self.ProxyItrAttributes, parent=self)\n\n service_attr = managedattribute(\n name='service_attr',\n read_only=True,\n doc=ServiceAttributes.__doc__)\n\n @service_attr.initter\n def service_attr(self):\n return SubAttributesDict(self.ServiceAttributes, parent=self)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- InstanceAttributes\n class InstanceAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.instance_id = key\n super().__init__(parent)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- InstanceAttributes\n # +- DynamicEidAttributes\n class DynamicEidAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.inst_dyn_eid = key\n super().__init__(parent)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- InstanceAttributes\n # +- DynamicEidAttributes\n # +- DbMappingAttributes\n class DbMappingAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.etr_dyn_eid_id = key\n super().__init__(parent)\n\n db_mapping_attr = managedattribute(\n name='db_mapping_attr',\n read_only=True,\n doc=DbMappingAttributes.__doc__)\n\n @db_mapping_attr.initter\n def db_mapping_attr(self):\n return SubAttributesDict(self.DbMappingAttributes, parent=self)\n\n dynamic_eid_attr = managedattribute(\n name='dynamic_eid_attr',\n read_only=True,\n doc=DynamicEidAttributes.__doc__)\n\n @dynamic_eid_attr.initter\n def dynamic_eid_attr(self):\n return SubAttributesDict(self.DynamicEidAttributes, parent=self)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- InstanceAttributes\n # +- ServiceAttributes\n class ServiceAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.inst_service = key\n super().__init__(parent)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- InstanceAttributes\n # +- ServiceAttributes\n # +- DbMappingAttributes\n class DbMappingAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.etr_eid_id = key\n super().__init__(parent)\n\n service_db_mapping_attr = managedattribute(\n name='service_db_mapping_attr',\n read_only=True,\n doc=DbMappingAttributes.__doc__)\n\n @service_db_mapping_attr.initter\n def service_db_mapping_attr(self):\n return SubAttributesDict(self.DbMappingAttributes, parent=self)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- InstanceAttributes\n # +- ServiceAttributes\n # +- UsePetrAttributes\n class UsePetrAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.etr_use_petr = key\n super().__init__(parent)\n\n use_petr_attr = managedattribute(\n name='use_petr_attr',\n read_only=True,\n doc=UsePetrAttributes.__doc__)\n\n @use_petr_attr.initter\n def use_petr_attr(self):\n return SubAttributesDict(self.UsePetrAttributes, parent=self)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- InstanceAttributes\n # +- ServiceAttributes\n # +- MapCacheAttributes\n class MapCacheAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.itr_mc_id = key\n super().__init__(parent)\n\n map_cache_attr = managedattribute(\n name='map_cache_attr',\n read_only=True,\n doc=MapCacheAttributes.__doc__)\n\n @map_cache_attr.initter\n def map_cache_attr(self):\n return SubAttributesDict(self.MapCacheAttributes, parent=self)\n\n inst_service_attr = managedattribute(\n name='inst_service_attr',\n read_only=True,\n doc=ServiceAttributes.__doc__)\n\n @inst_service_attr.initter\n def inst_service_attr(self):\n return SubAttributesDict(self.ServiceAttributes, parent=self)\n\n instance_id_attr = managedattribute(\n name='instance_id_attr',\n read_only=True,\n doc=InstanceAttributes.__doc__)\n\n @instance_id_attr.initter\n def instance_id_attr(self):\n return SubAttributesDict(self.InstanceAttributes, parent=self)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- SiteAttributes\n class SiteAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.ms_site_id = key\n super().__init__(parent)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- SiteAttributes\n # +- InstanceIdAttributes\n class InstanceIdAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.site_inst_id = key\n super().__init__(parent)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- SiteAttributes\n # +- InstanceIdAttributes\n # +- EidRecordAttributes\n class EidRecordAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.ms_eid_id = key\n super().__init__(parent)\n\n eid_record_attr = managedattribute(\n name='eid_record_attr',\n read_only=True,\n doc=EidRecordAttributes.__doc__)\n\n @eid_record_attr.initter\n def eid_record_attr(self):\n return SubAttributesDict(self.EidRecordAttributes, parent=self)\n\n site_inst_id_attr = managedattribute(\n name='site_inst_id_attr',\n read_only=True,\n doc=InstanceIdAttributes.__doc__)\n\n @site_inst_id_attr.initter\n def site_inst_id_attr(self):\n return SubAttributesDict(self.InstanceIdAttributes, parent=self)\n\n site_attr = managedattribute(\n name='site_attr',\n read_only=True,\n doc=SiteAttributes.__doc__)\n\n @site_attr.initter\n def site_attr(self):\n return SubAttributesDict(self.SiteAttributes, parent=self)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- ExtranetAttributes\n class ExtranetAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.ms_extranet = key\n super().__init__(parent)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- ExtranetAttributes\n # +- InstanceIdAttributes\n class InstanceIdAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.extranet_inst_id = key\n super().__init__(parent)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- ExtranetAttributes\n # +- InstanceIdAttributes\n # +- EidRecordProviderAttributes\n class EidRecordProviderAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.ms_extranet_provider_eid = key\n super().__init__(parent)\n\n eid_record_provider_attr = managedattribute(\n name='eid_record_provider_attr',\n read_only=True,\n doc=EidRecordProviderAttributes.__doc__)\n\n @eid_record_provider_attr.initter\n def eid_record_provider_attr(self):\n return SubAttributesDict(self.EidRecordProviderAttributes, parent=self)\n\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- ExtranetAttributes\n # +- InstanceIdAttributes\n # +- EidRecordSubscriberAttributes\n class EidRecordSubscriberAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.ms_extranet_subscriber_eid = key\n super().__init__(parent)\n\n eid_record_subscriber_attr = managedattribute(\n name='eid_record_subscriber_attr',\n read_only=True,\n doc=EidRecordSubscriberAttributes.__doc__)\n\n @eid_record_subscriber_attr.initter\n def eid_record_subscriber_attr(self):\n return SubAttributesDict(self.EidRecordSubscriberAttributes, parent=self)\n\n extranet_inst_id_attr = managedattribute(\n name='extranet_inst_id_attr',\n read_only=True,\n doc=InstanceIdAttributes.__doc__)\n\n @extranet_inst_id_attr.initter\n def extranet_inst_id_attr(self):\n return SubAttributesDict(self.InstanceIdAttributes, parent=self)\n\n extranet_attr = managedattribute(\n name='extranet_attr',\n read_only=True,\n doc=ExtranetAttributes.__doc__)\n\n @extranet_attr.initter\n def extranet_attr(self):\n return SubAttributesDict(self.ExtranetAttributes, parent=self)\n\n router_instance_attr = managedattribute(\n name='router_instance_attr',\n read_only=True,\n doc=RouterInstanceAttributes.__doc__)\n\n @router_instance_attr.initter\n def router_instance_attr(self):\n return SubAttributesDict(self.RouterInstanceAttributes, parent=self)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n\n # ==========================================================================\n # MANAGED ATTRIBUTES\n # ==========================================================================\n\n # enabled\n enabled = managedattribute(\n name='enabled',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # ==========================================================================\n # +- DeviceAttributes\n # +- InterfaceAttributes\n # ==========================================================================\n\n # if_mobility_liveness_test_disabled\n if_mobility_liveness_test_disabled = managedattribute(\n name='if_mobility_liveness_test_disabled',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # ==========================================================================\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- LocatorSetAttributes\n # +- InterfaceAttributes\n # +- InterfaceTypeAttributes\n # ==========================================================================\n\n # ls_priority\n ls_priority = managedattribute(\n name='ls_priority',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # ls_weight\n ls_weight = managedattribute(\n name='ls_weight',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # ==========================================================================\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- ServiceAttributes\n # ==========================================================================\n\n # itr_enabled\n itr_enabled = managedattribute(\n name='itr_enabled',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # etr_enabled\n etr_enabled = managedattribute(\n name='etr_enabled',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # ms_enabled\n ms_enabled = managedattribute(\n name='ms_enabled',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # mr_enabled\n mr_enabled = managedattribute(\n name='mr_enabled',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # proxy_etr_enabled\n proxy_etr_enabled = managedattribute(\n name='proxy_etr_enabled',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # locator_vrf\n locator_vrf = managedattribute(\n name='locator_vrf',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # encapsulation\n encapsulation = managedattribute(\n name='encapsulation',\n default=ENCAP.lisp,\n type=(None, ENCAP))\n\n # ==========================================================================\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- ServiceAttributes\n # +- EtrMsAttributes\n # ==========================================================================\n\n # etr_auth_key\n etr_auth_key = managedattribute(\n name='etr_auth_key',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # etr_auth_key_type\n etr_auth_key_type = managedattribute(\n name='etr_auth_key_type',\n default=ETR_AUTH_KEY_TYPE.none,\n type=(None, ETR_AUTH_KEY_TYPE))\n\n # etr_proxy_reply\n etr_proxy_reply = managedattribute(\n name='etr_proxy_reply',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # ==========================================================================\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- InstanceAttributes\n # +- DynamicEidAttributes\n # +- DbMappingAttributes\n # ==========================================================================\n\n # etr_dyn_eid_rlocs\n etr_dyn_eid_rlocs = managedattribute(\n name='etr_dyn_eid_rlocs',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # etr_dyn_eid_loopback_address\n etr_dyn_eid_loopback_address = managedattribute(\n name='etr_dyn_eid_loopback_address',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # etr_dyn_eid_priority\n etr_dyn_eid_priority = managedattribute(\n name='etr_dyn_eid_priority',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # etr_dyn_eid_weight\n etr_dyn_eid_weight = managedattribute(\n name='etr_dyn_eid_weight',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # ==========================================================================\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- InstanceAttributes\n # +- ServiceAttributes\n # ==========================================================================\n\n # etr_eid_vrf\n etr_eid_vrf = managedattribute(\n name='etr_eid_vrf',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # ==========================================================================\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- InstanceAttributes\n # +- ServiceAttributes\n # +- DbMappingAttributes\n # ==========================================================================\n\n # etr_eid_rlocs\n etr_eid_rlocs = managedattribute(\n name='etr_eid_rlocs',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # etr_eid_loopback_address\n etr_eid_loopback_address = managedattribute(\n name='etr_eid_loopback_address',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # etr_eid_priority\n etr_eid_priority = managedattribute(\n name='etr_eid_priority',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # etr_eid_weight\n etr_eid_weight = managedattribute(\n name='etr_eid_weight',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # ==========================================================================\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- InstanceAttributes\n # +- ServiceAttributes\n # +- UsePetrAttributes\n # ==========================================================================\n\n # etr_use_petr_priority\n etr_use_petr_priority = managedattribute(\n name='etr_use_petr_priority',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # etr_use_petr_weight\n etr_use_petr_weight = managedattribute(\n name='etr_use_petr_weight',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # ==========================================================================\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- InstanceAttributes\n # +- ServiceAttributes\n # +- MapCacheAttributes\n # ==========================================================================\n\n # itr_mc_map_request\n itr_mc_map_request = managedattribute(\n name='itr_mc_map_request',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # ==========================================================================\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- SiteAttributes\n # ==========================================================================\n\n # ms_site_auth_key\n ms_site_auth_key = managedattribute(\n name='ms_site_auth_key',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # ==========================================================================\n # +- DeviceAttributes\n # +- RouterInstanceAttributes\n # +- SiteAttributes\n # +- InstanceIdAttributes\n # +- EidRecordAttributes\n # ==========================================================================\n\n # ms_eid_accept_more_specifics\n ms_eid_accept_more_specifics = managedattribute(\n name='ms_eid_accept_more_specifics',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n\n # ==========================================================================\n # BUILD_CONFIG & BUILD_UNCONFIG\n # ==========================================================================\n\n def build_config(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n\n def build_unconfig(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n", "id": "5492595", "language": "Python", "matching_score": 5.510098934173584, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/lisp/lisp.py" }, { "content": "\n__all__ = (\n 'Routing',\n)\n\n# Python\nfrom enum import Enum\n\n# Genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.base import DeviceFeature, LinkFeature\nfrom genie.conf.base.attributes import DeviceSubAttributes, SubAttributesDict,\\\n AttributesHelper\n\n# Routing Heirarchy\n# -----------------\n# Routing\n# +- DeviceAttributes\n\n\nclass Routing(DeviceFeature, LinkFeature):\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n # ==========================================================================\n # CONF CLASS STRUCTURE\n # ==========================================================================\n\n # +- DeviceAttributes\n class DeviceAttributes(DeviceSubAttributes):\n pass\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n # ==========================================================================\n # MANAGED ATTRIBUTES\n # ==========================================================================\n \n # enabled\n enabled = managedattribute(\n name='enabled',\n default=None,\n type=managedattribute.test_istype(bool),\n doc='Enable both ip routing and ipv6 unicast routing')\n\n # enabled_ipv6_unicast_routing\n enabled_ipv6_unicast_routing = managedattribute(\n name='enabled_ipv6_unicast_routing',\n default=None,\n type=managedattribute.test_istype(bool),\n doc='Enable ipv6 unicast routing')\n\n # enabled_ip_routing\n enabled_ip_routing = managedattribute(\n name='enabled_ip_routing',\n default=None,\n type=managedattribute.test_istype(bool),\n doc='Enable ip routing')\n\n # ==========================================================================\n # BUILD_CONFIG & BUILD_UNCONFIG\n # ==========================================================================\n\n def build_config(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n\n def build_unconfig(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n", "id": "8293870", "language": "Python", "matching_score": 4.300717353820801, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/routing/routing.py" }, { "content": "from enum import Enum\n\n# Genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.base import DeviceFeature\nfrom genie.conf.base.attributes import DeviceSubAttributes,\\\n SubAttributesDict,\\\n AttributesHelper, \\\n KeyedSubAttributes\n\n\n# Prefix_list\n# +-- DeviceAttributes\n# +-- PrefixAttributes\n# +-- MaxLengthRangeAttributes\n\n\nclass PrefixList(DeviceFeature):\n\n def __init__(self, prefix_set_name, *args, **kwargs):\n self.name = prefix_set_name\n super().__init__(*args, **kwargs)\n\n\n class DeviceAttributes(DeviceSubAttributes):\n\n\n class PrefixAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.prefix = key\n super().__init__(parent)\n\n\n class MaxLengthRangeAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.maxlength_range = key\n super().__init__(parent)\n\n maxlength_range_attr = managedattribute(\n name='maxlength_range_attr',\n read_only=True,\n doc=MaxLengthRangeAttributes.__doc__)\n\n @maxlength_range_attr.initter\n def maxlength_range_attr(self):\n return SubAttributesDict(self.MaxLengthRangeAttributes, parent=self)\n\n prefix_attr = managedattribute(\n name='prefix_attr',\n read_only=True,\n doc=PrefixAttributes.__doc__)\n\n @prefix_attr.initter\n def prefix_attr(self):\n return SubAttributesDict(self.PrefixAttributes, parent=self)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n # ==================== Prefix-list attributes ==================== \n\n # protocol\n class PROTOCOL(Enum):\n ipv4 = 'ipv4'\n ipv6 = 'ipv6'\n\n protocol = managedattribute(\n name='protocol',\n default=None,\n type=(None, PROTOCOL),\n doc='ipv4 or ipv6')\n\n def build_config(self, devices=None, apply=True, attributes=None,\n unconfig=False):\n cfgs = {}\n attributes = AttributesHelper(self, attributes)\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr', sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n if apply:\n for device_name, cfg in sorted(cfgs.items()):\n self.testbed.config_on_devices(cfg, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True, attributes=None):\n cfgs = {}\n attributes = AttributesHelper(self, attributes)\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr', sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n for device_name, cfg in sorted(cfgs.items()):\n self.testbed.config_on_devices(cfg, fail_invalid=True)\n else:\n return cfgs\n\n", "id": "11767971", "language": "Python", "matching_score": 2.887747287750244, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/prefix_list/prefix_list.py" }, { "content": "'''\nNXOS Genie Conf using CLI for prefix-list.\n'''\n\n# Python\nimport warnings\nfrom abc import ABC\n\n# Genie\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\n\n\nclass PrefixList(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, devices=None, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # loop over all prefixes\n for sub, attributes2 in attributes.mapping_values(\n 'prefix_attr', keys=self.prefix_attr.keys(), sort=True):\n configurations.append_block(sub.build_config(apply=False,\n attributes=attributes2, unconfig=unconfig, **kwargs))\n\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class PrefixAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # loop over all maxlength_range\n for sub, attributes2 in attributes.mapping_values(\n 'maxlength_range_attr', sort=True,\n keys=self.maxlength_range_attr.keys()):\n configurations.append_block(sub.build_config(apply=False,\n attributes=attributes2, unconfig=unconfig, **kwargs))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class MaxLengthRangeAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # <protocol> prefix-list <prefix_set_name> permit <prefix> [ge length | le length]\n if attributes.value('protocol'):\n ip = attributes.value('protocol').value\n cfg_str = '{}'.format(ip) if ip == 'ipv6' else 'ip'\n else:\n return str(configurations)\n\n # prefix-list <prefix_set_name>\n cfg_str += ' prefix-list {}'.format(self.name)\n\n # prefix <prefix> \n cfg_str += ' permit {prefix}'.format(prefix=self.prefix)\n\n if not attributes.value('maxlength_range_attr'):\n configurations.append_line(cfg_str)\n return str(configurations)\n\n # get range edge value from the maxlength_range\n [min_val, max_val] = self.maxlength_range.split('..')\n min_val = int(min_val)\n max_val = int(max_val)\n\n # get mask of prefix to compare\n mask = int(self.prefix.split('/')[1])\n\n # compare with range edge values\n if mask == min_val:\n if min_val < max_val:\n cfg_str += ' le {}'.format(max_val)\n elif mask < min_val:\n if max_val == 32 or max_val == 128:\n cfg_str += ' ge {}'.format(min_val)\n else:\n cfg_str += ' ge {a} le {b}'.format(a=min_val, b=max_val)\n\n\n configurations.append_line(cfg_str)\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n", "id": "12297883", "language": "Python", "matching_score": 3.247791051864624, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/prefix_list/nxos/prefix_list.py" }, { "content": "#!/usr/bin/env python\n\n'''\nIOSXE unit tests for Genie prefix-list conf using CLI.\n'''\n\n# Python\nimport re\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie\nfrom genie.conf import Genie\nfrom genie.conf.tests import TestCase\nfrom genie.conf.base import Testbed, Device\nfrom genie.libs.conf.prefix_list.prefix_list import PrefixList\n\n\nclass test_prefix_list(TestCase):\n\n def setUp(self):\n # Set Genie Tb\n testbed = Testbed()\n Genie.testbed = testbed\n # Device\n self.dev1 = Device(name='PE1', testbed=testbed, os='iosxe')\n\n def test_cli_config_v4(self): \n # prefix-list conf\n prefix_conf = PrefixList(prefix_set_name='test')\n self.dev1.add_feature(prefix_conf)\n # Apply configuration\n prefix_conf.device_attr[self.dev1].prefix_attr['192.168.3.11/8'].\\\n maxlength_range_attr['8..8'].protocol = 'ipv4'\n prefix_conf.device_attr[self.dev1].prefix_attr['192.168.3.11/8'].\\\n maxlength_range_attr['8..16'].protocol = 'ipv4'\n prefix_conf.device_attr[self.dev1].prefix_attr['192.168.127.12/8'].\\\n maxlength_range_attr['16..24'].protocol = 'ipv4'\n prefix_conf.device_attr[self.dev1].prefix_attr['172.16.17.32/8'].\\\n maxlength_range_attr['16..32'].protocol = 'ipv4'\n prefix_conf.device_attr[self.dev1].prefix_attr['2001:DB8:4::/64']\\\n .maxlength_range_attr['65..98'].protocol = 'ipv6'\n\n # Build config\n cfgs = prefix_conf.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'ipv6 prefix-list test permit 2001:DB8:4::/64 ge 65 le 98',\n 'ip prefix-list test permit 192.168.3.11/8 le 16',\n 'ip prefix-list test permit 192.168.3.11/8',\n 'ip prefix-list test permit 192.168.127.12/8 ge 16 le 24',\n 'ip prefix-list test permit 172.16.17.32/8 ge 16',\n ]))\n\n # Build unconfig\n cfgs = prefix_conf.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'no ipv6 prefix-list test permit 2001:DB8:4::/64 ge 65 le 98',\n 'no ip prefix-list test permit 192.168.3.11/8 le 16',\n 'no ip prefix-list test permit 192.168.3.11/8',\n 'no ip prefix-list test permit 192.168.127.12/8 ge 16 le 24',\n 'no ip prefix-list test permit 172.16.17.32/8 ge 16',\n ]))\n\n def test_cli_config_v6(self):\n # prefix-list conf\n prefix_conf_v6 = PrefixList(prefix_set_name='test6')\n prefix_conf_v6.device_attr[self.dev1].prefix_attr['2001:DB8:1::/64']\\\n .maxlength_range_attr['64..64'].protocol = 'ipv6'\n prefix_conf_v6.device_attr[self.dev1].prefix_attr['2001:DB8:2::/64']\\\n .maxlength_range_attr['65..128'].protocol = 'ipv6'\n prefix_conf_v6.device_attr[self.dev1].prefix_attr['2001:DB8:3::/64']\\\n .maxlength_range_attr['64..128'].protocol = 'ipv6'\n prefix_conf_v6.device_attr[self.dev1].prefix_attr['2001:DB8:4::/64']\\\n .maxlength_range_attr['65..98'].protocol = 'ipv6'\n\n # Build config\n cfgs = prefix_conf_v6.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'ipv6 prefix-list test6 permit 2001:DB8:1::/64',\n 'ipv6 prefix-list test6 permit 2001:DB8:2::/64 ge 65',\n 'ipv6 prefix-list test6 permit 2001:DB8:3::/64 le 128',\n 'ipv6 prefix-list test6 permit 2001:DB8:4::/64 ge 65 le 98',\n ]))\n\n # Build unconfig\n cfgs = prefix_conf_v6.build_unconfig(apply=False)\n \n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'no ipv6 prefix-list test6 permit 2001:DB8:1::/64',\n 'no ipv6 prefix-list test6 permit 2001:DB8:2::/64 ge 65',\n 'no ipv6 prefix-list test6 permit 2001:DB8:3::/64 le 128',\n 'no ipv6 prefix-list test6 permit 2001:DB8:4::/64 ge 65 le 98',\n ]))\n\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "12695842", "language": "Python", "matching_score": 2.065082550048828, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/prefix_list/iosxe/tests/test_prefix_list.py" }, { "content": "'''\nPrefix_list Genie Ops Object for IOS - CLI.\n'''\nfrom ..iosxe.prefix_list import PrefixList as PrefixListXE\n\nclass PrefixList(PrefixListXE):\n pass", "id": "8885939", "language": "Python", "matching_score": 1.8201947212219238, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/prefix_list/ios/prefix_list.py" }, { "content": "# Genie\nfrom genie.ops.base import Base\n\nclass PrefixList(Base):\n exclude = []", "id": "10216867", "language": "Python", "matching_score": 1.3854084014892578, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/prefix_list/prefix_list.py" }, { "content": "# Genie\nfrom genie.ops.base import Base\n\n\nclass Acl(Base):\n exclude = []", "id": "11175642", "language": "Python", "matching_score": 1.3589321374893188, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/acl/acl.py" }, { "content": "from genie.ops.base import Base\n\nclass Dot1X(Base):\n exclude = []", "id": "9911334", "language": "Python", "matching_score": 1.3425464630126953, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/dot1x/dot1x.py" }, { "content": "'''\nDot1x Genie Ops Object for IOS - CLI.\n'''\nfrom ..iosxe.dot1x import Dot1X as Dot1XXE\n\nclass Dot1X(Dot1XXE):\n pass", "id": "8648151", "language": "Python", "matching_score": 1, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/dot1x/ios/dot1x.py" }, { "content": "from .dot1x import *\n", "id": "9415023", "language": "Python", "matching_score": 0.6091669201850891, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/dot1x/__init__.py" }, { "content": "#!/usr/bin/env python\n\n# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device\n\n# Stp\nfrom genie.libs.conf.dot1x import Dot1x\n\n\nclass test_dot1x(TestCase):\n\n def setUp(self):\n \n # Set Genie Tb\n testbed = Testbed()\n Genie.testbed = testbed\n \n # Device\n self.dev1 = Device(name='PE1', testbed=testbed, os='iosxe')\n\n def test_dot1x_full_config(self):\n\n # For failures\n self.maxDiff = None\n \n # Pim object\n dot1x = Dot1x()\n self.dev1.add_feature(dot1x)\n\n # device_attr\n dot1x.device_attr[self.dev1].system_auth_control = True\n dot1x.device_attr[self.dev1].supplicant_force_mcast = True\n\n # credentials_attr\n dot1x.device_attr[self.dev1].credentials_attr['switch4']\\\n .credential_username = 'switch4'\n dot1x.device_attr[self.dev1].credentials_attr['switch4']\\\n .credential_pwd_type = '0'\n dot1x.device_attr[self.dev1].credentials_attr['switch4']\\\n .credential_secret = 'cisco'\n\n # interface_attr\n dot1x.device_attr[self.dev1].interface_attr['GigabitEthernet1/0/9']\\\n .if_pae = 'supplicant'\n dot1x.device_attr[self.dev1].interface_attr['GigabitEthernet1/0/9']\\\n .if_supplicant_eap_profile = 'EAP-METH'\n dot1x.device_attr[self.dev1].interface_attr['GigabitEthernet1/0/9']\\\n .if_credentials = 'switch4'\n dot1x.device_attr[self.dev1].interface_attr['GigabitEthernet1/0/9']\\\n .if_closed = True\n dot1x.device_attr[self.dev1].interface_attr['GigabitEthernet1/0/9']\\\n .if_port_control = 'auto'\n dot1x.device_attr[self.dev1].interface_attr['GigabitEthernet1/0/9']\\\n .if_host_mode = 'single-host'\n\n cfgs = dot1x.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'dot1x system-auth-control',\n 'dot1x supplicant force-multicast',\n 'dot1x credential switch4',\n ' username switch4',\n ' password <PASSWORD>',\n ' exit',\n 'interface GigabitEthernet1/0/9',\n ' dot1x pae supplicant',\n ' dot1x supplicant eap profile EAP-METH',\n ' dot1x credentials switch4',\n ' access-session port-control auto',\n ' access-session host-mode single-host',\n ' access-session closed',\n ' exit',\n ]))\n\n cfgs = dot1x.build_unconfig(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'no dot1x system-auth-control',\n 'no dot1x supplicant force-multicast',\n 'no dot1x credential switch4',\n 'interface GigabitEthernet1/0/9',\n ' no dot1x pae supplicant',\n ' no dot1x supplicant eap profile EAP-METH',\n ' no dot1x credentials switch4',\n ' no access-session port-control auto',\n ' no access-session host-mode single-host',\n ' no access-session closed',\n ' exit',\n ]))\n\n # uncfg with attributes\n cfgs = dot1x.build_unconfig(apply=False,\n attributes={'device_attr': {\n self.dev1: {\n 'system_auth_control': None,\n 'credentials_attr': {\n 'switch4': {\n 'credential_username': None\n }\n },\n 'interface_attr': {\n 'GigabitEthernet1/0/9': {\n 'if_supplicant_eap_profile': None,\n }\n },}}})\n \n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'no dot1x system-auth-control',\n 'dot1x credential switch4',\n ' no username switch4',\n ' exit',\n 'interface GigabitEthernet1/0/9',\n ' no dot1x supplicant eap profile EAP-METH',\n ' exit',\n ])) \n\nif __name__ == '__main__':\n unittest.main()\n", "id": "1388521", "language": "Python", "matching_score": 3.9203743934631348, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/dot1x/iosxe/tests/test_dot1x.py" }, { "content": "\n# import python\nfrom abc import ABC\n\n# import genie\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import AttributesHelper\n\n\nclass Dot1x(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # dot1x system-auth-control\n if attributes.value('system_auth_control'):\n configurations.append_line('dot1x system-auth-control')\n\n # dot1x supplicant force-multicast\n if attributes.value('supplicant_force_mcast'):\n configurations.append_line('dot1x supplicant force-multicast')\n \n # credentials attributes\n for sub, attributes2 in attributes.mapping_values('credentials_attr',\n sort=True, keys=self.credentials_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n \n # interfaces attributes\n for sub, attributes2 in attributes.mapping_values('interface_attr',\n sort=True, keys=self.interface_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n\n class CredentialsAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(\n attributes.format('dot1x credential {credential_profile}', force=True)): \n \n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # username <credential_username>\n configurations.append_line(\n attributes.format('username {credential_username}'))\n\n # password [<credential_pwd_type>] <credential_secret>\n if attributes.value('credential_pwd_type'):\n configurations.append_line(attributes.format(\n 'password {credential_pwd_type} {credential_secret}'))\n else:\n configurations.append_line(\n attributes.format('password {credential_secret}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(\n attributes.format('interface {interface_id}', force=True)):\n\n # dot1x pae {if_pae}\n configurations.append_line(\n attributes.format('dot1x pae {if_pae}'))\n\n # dot1x authenticator eap profile <if_authen_eap_profile>\n configurations.append_line(\n attributes.format(\n 'dot1x authenticator eap profile {if_authen_eap_profile}'))\n\n # dot1x supplicant eap profile <if_supplicant_eap_profile>\n configurations.append_line(\n attributes.format(\n 'dot1x supplicant eap profile {if_supplicant_eap_profile}'))\n\n # dot1x credentials <if_credentials>\n configurations.append_line(\n attributes.format('dot1x credentials {if_credentials}'))\n\n # access-session port-control <if_port_control>\n configurations.append_line(\n attributes.format('access-session port-control {if_port_control}'))\n\n # access-session host-mode <if_host_mode>\n configurations.append_line(\n attributes.format('access-session host-mode {if_host_mode}'))\n\n # access-session closed\n if attributes.value('if_closed'):\n configurations.append_line(\n attributes.format('access-session closed'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)", "id": "2493287", "language": "Python", "matching_score": 4.633955955505371, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/dot1x/iosxe/dot1x.py" }, { "content": "\n__all__ = (\n 'Dot1x',\n)\n# import genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.base import DeviceFeature\nfrom genie.conf.base.attributes import DeviceSubAttributes,\\\n SubAttributesDict,\\\n AttributesHelper, \\\n KeyedSubAttributes\n# import genie.libs\nfrom genie.conf.base.attributes import InterfaceSubAttributes\n\n\n# Structure\n# Dot1x\n# +- DeviceAttributes\n# +- CredentialsAttributes\n# +- InterfaceAttributes\n\n\nclass Dot1x(DeviceFeature):\n\n # Device Attributes\n enabled = managedattribute(\n name='enabled',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n system_auth_control = managedattribute(\n name='system_auth_control',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n supplicant_force_mcast = managedattribute(\n name='supplicant_force_mcast',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # Credentials Attributes\n credential_profile = managedattribute(\n name='credential_profile',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n credential_username = managedattribute(\n name='credential_username',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n credential_pwd_type = managedattribute(\n name='credential_pwd_type',\n default=None,\n type=(None, managedattribute.test_in(['0','7'])))\n\n credential_secret = managedattribute(\n name='credential_secret',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # Interfaces Attributes\n if_pae = managedattribute(\n name='if_pae',\n default=None,\n type=(None, managedattribute.test_in(['authenticator','supplicant','both'])))\n\n if_authen_eap_profile = managedattribute(\n name='if_authen_eap_profile',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n if_supplicant_eap_profile = managedattribute(\n name='if_supplicant_eap_profile',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n if_credentials = managedattribute(\n name='if_credentials',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n if_closed = managedattribute(\n name='if_closed',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n if_port_control = managedattribute(\n name='if_port_control',\n default=None,\n type=(None, managedattribute.test_in(['auto','force-authorized','force-unauthorized'])))\n\n if_host_mode = managedattribute(\n name='if_host_mode',\n default=None,\n type=(None, managedattribute.test_in(['multi-auth','multi-domain','multi-host','single-host'])))\n\n\n class DeviceAttributes(DeviceSubAttributes):\n \n class InterfaceAttributes(InterfaceSubAttributes):\n\n def __init__(self, parent, key):\n self.interface_id = key\n super().__init__(parent, key)\n \n \n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(\n self.InterfaceAttributes, parent=self)\n\n \n class CredentialsAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.credential_profile = key\n super().__init__(parent) \n \n credentials_attr = managedattribute(\n name='credentials_attr',\n read_only=True,\n doc=CredentialsAttributes.__doc__)\n\n @credentials_attr.initter\n def credentials_attr(self):\n return SubAttributesDict(\n self.CredentialsAttributes, parent=self)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n def build_config(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n", "id": "1800107", "language": "Python", "matching_score": 3.5215585231781006, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/dot1x/dot1x.py" }, { "content": "# Python\nimport functools\nfrom enum import Enum\n\n# Ats\n\n# Genie package\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import Base, \\\n DeviceFeature, \\\n LinkFeature, \\\n Interface\nimport genie.conf.base.attributes\nfrom genie.conf.base.attributes import SubAttributes, \\\n SubAttributesDict, \\\n AttributesHelper, \\\n KeyedSubAttributes\n\n# Genie Xbu_shared\nimport genie.libs.conf.interface\nfrom genie.libs.conf.base.feature import consolidate_feature_args\n\n__all__ = (\n 'Vlan',\n )\n\n# Table of contents:\n# class Vlan:\n# class DeviceAttributes:\n# class AccessMapAttributes:\n# class VlanConfigurationAttributes:\n# def build_config/build_unconfig\n\n # Vlan\n # +-- DeviceAttributes\n # +-- VlanAttributes\n # +-- VlanConfigAttributes\n\n\nclass Vlan(DeviceFeature, LinkFeature):\n\n @property\n def interfaces(self):\n interfaces = set()\n interfaces.update(*[link.interfaces for link in self.links])\n return frozenset(interfaces)\n\n def __init__(self, vlan_id=None, vlan=None, *args, **kwargs):\n if vlan_id: # old structure\n self.vlan_id = int(vlan_id)\n if vlan: # new structure\n self.vlan = vlan\n super().__init__(*args, **kwargs)\n\n\n vlan_id = managedattribute(\n name='vlan_id',\n default=None,\n type=managedattribute.test_istype(int),\n doc='A single-tagged VLAN')\n\n shutdown = managedattribute(\n name='shutdown',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='Shutdown VLAN switching')\n\n class Media(Enum):\n enet = 'enet'\n fddi = 'fddi'\n\n media = managedattribute(\n name='media',\n default=None,\n type=(None, Media),\n doc='Vlan media type')\n\n name = managedattribute(\n name='name',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Vlan name')\n\n are = managedattribute(\n name='are',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Maximum number of All Route Explorer hops for this VLAN')\n\n bridge = managedattribute(\n name='bridge',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='''Value of the bridge number for FDDI Net or\n Token Ring Net type VLANs''')\n\n bridge_type = managedattribute(\n name='bridge_type',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Vlan bridge type')\n\n stp_type = managedattribute(\n name='stp_type',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Spanning tree type of the VLAN')\n\n ste = managedattribute(\n name='ste',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Maximum number of Spanning Tree Explorer hops for this VLAN')\n\n class Status(Enum):\n ACTIVE = 'active'\n SUSPENDED = 'suspend'\n\n status = managedattribute(\n name='Status',\n default=None,\n type=(None, Status),\n doc='Vlan state')\n\n class Tpid(Enum):\n DEFAULT = 'TPID_0x8100'\n QNQ = 'TPID_0x8A88'\n ALTERANTE = 'TPID_0x9100'\n ALTERANTE2 = 'TPID_0X9200'\n\n tpid = managedattribute(\n name='tpid',\n default=None,\n type=(None, Tpid),\n doc='''Tag protocol identifier field (TPID)\n that is accepted on the VLAN''')\n\n class Backupcrf(Enum):\n enable = 'enable'\n disable = 'disable'\n\n backupcrf = managedattribute(\n name='backupcrf',\n default=None,\n type=(None, Backupcrf),\n doc='Backup CRF mode of the VLAN')\n\n parent_id = managedattribute(\n name='parent_id',\n default=None,\n type=(None, managedattribute.test_isinstance(int)),\n doc='ID number of the Parent VLAN of FDDI or Token Ring type VLANs')\n\n tb_vlan1 = managedattribute(\n name='tb_vlan1',\n default=None,\n type=(None, managedattribute.test_isinstance(int)),\n doc='ID number of the first translational VLAN for this VLAN')\n\n tb_vlan2 = managedattribute(\n name='tb_vlan2',\n default=None,\n type=(None, managedattribute.test_isinstance(int)),\n doc='ID number of the second translational VLAN for this VLAN')\n\n said = managedattribute(\n name='said',\n default=None,\n type=(None, managedattribute.test_isinstance(int)),\n doc='IEEE 802.10 SAID')\n\n ring = managedattribute(\n name='ring',\n default=None,\n type=(None, managedattribute.test_isinstance(int)),\n doc='Ring number of FDDI or Token Ring type VLANs')\n\n dot1q_tag_native = managedattribute(\n name='dot1q_tag_native',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='Tag native vlan')\n\n accounting_type = managedattribute(\n name='accounting_type',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Input/Output accounting packets')\n\n group_name = managedattribute(\n name='group_name',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Vlan group name')\n\n configuration_id_list = managedattribute(\n name='configuration_id_list',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Vlan id list')\n\n group_id_list = managedattribute(\n name='group_id_list',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='List of vlans in this group')\n\n private_vlan_type = managedattribute(\n name='private_vlan_type',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Configure a private VLAN')\n\n private_vlan_association_action = managedattribute(\n name='private_vlan_association_action',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Vlan private association action, add/remove')\n\n private_vlan_association_ids = managedattribute(\n name='private_vlan_association_ids',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='VLAN IDs of the private VLANs to be configured')\n\n remote_span = managedattribute(\n name='remote_span',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='Configure as Remote SPAN VLAN')\n\n access_map_action = managedattribute(\n name='access_map_action',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Vlan access-map action value, Drop packets/Forward packets')\n\n access_map_sequence = managedattribute(\n name='access_map_sequence',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Sequence to insert to/delete from existing vlan access-map entry')\n\n datalink_flow_monitor = managedattribute(\n name='datalink_flow_monitor',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='Apply a Flow Monitor for vlan NetFlow configuration commands')\n\n redirect_interface = managedattribute(\n name='redirect_interface',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Redirect matched packets to the specified interface(s)')\n\n access_map_match = managedattribute(\n name='access_map_match',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Access-list match type, IP/IPV6/Mac')\n\n access_list = managedattribute(\n name='access_list',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Access-list name')\n\n # ============NXOS specific===========================\n egress_load_balance = managedattribute(\n name='egress_load_balance',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='Load balancing on the egress interface')\n\n # ========================XE and NX new Structure=================\n vlan = managedattribute(\n name='vlan',\n default=None,\n type=managedattribute.test_istype(str),\n doc='A VLAN id')\n\n class State(Enum):\n ACTIVE = 'active'\n SUSPEND = 'suspended'\n UNSUPPORT = 'unsupport'\n SHUTDOWN = 'shutdown'\n\n state = managedattribute(\n name = 'state',\n default = None,\n type = (None, State),\n doc = 'Obtain vlan state')\n\n # ====================================================\n # NXOS specific managed attributes for new structure\n # ====================================================\n\n # enabled\n enabled = managedattribute(\n name='enabled',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='Enable feature interface-vlan and feature vn-segment-vlan-based')\n\n # enabled_interface_vlan\n enabled_interface_vlan = managedattribute(\n name='enabled_interface_vlan',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='Enable feature interface-vlan')\n\n # enabled_vn_segment_vlan_based\n enabled_vn_segment_vlan_based = managedattribute(\n name='enabled_vn_segment_vlan_based',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='Enable feature vn-segment-vlan-based')\n\n vn_segment_id = managedattribute(\n name='vn_segment_id',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Segment id')\n\n class Mode(Enum):\n CE = 'ce'\n FABRICPATH = 'fabricpath'\n\n mode = managedattribute(\n name='mode',\n default=None,\n type=(None, Mode),\n doc='Vlan mode')\n\n config_vlan_id = managedattribute(\n name='config_vlan_id',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Configuration vlan id')\n\n ip_igmp_snooping = managedattribute(\n name='ip_igmp_snooping',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='IGMP Snooping information for the vlan')\n\n # =============================================\n # Device attributes\n # =============================================\n class DeviceAttributes(genie.conf.base.attributes.DeviceSubAttributes):\n\n @property\n def interfaces(self):\n device = self.device\n interfaces = set(self.parent.interfaces)\n interfaces = {intf for intf in interfaces if intf.device is device}\n return frozenset(interfaces)\n\n class AccessMapAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.access_map_id = key\n super().__init__(parent=parent)\n\n access_map_attr = managedattribute(\n name='access_map_attr',\n read_only=True,\n doc=AccessMapAttributes.__doc__)\n\n @access_map_attr.initter\n def access_map_attr(self):\n return SubAttributesDict(self.AccessMapAttributes, parent=self)\n\n # added for new vlan structure\n class VlanAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.vlan = key\n super().__init__(parent)\n\n vlan_attr = managedattribute(\n name='vlan_attr',\n read_only=True,\n doc=VlanAttributes.__doc__)\n\n @vlan_attr.initter\n def vlan_attr(self):\n return SubAttributesDict(self.VlanAttributes, parent=self)\n\n class VlanConfigAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.vlan = key\n super().__init__(parent)\n\n config_vlan_attr = managedattribute(\n name='config_vlan_attr',\n read_only=True,\n doc=VlanConfigAttributes.__doc__)\n\n @config_vlan_attr.initter\n def config_vlan_attr(self):\n return SubAttributesDict(self.VlanConfigAttributes, parent=self)\n\n # added for old vlan structure\n class VlanConfigurationAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.vlan_configuration_id = key\n super().__init__(parent=parent)\n\n vlan_configuration_attr = managedattribute(\n name='vlan_configuration_attr',\n read_only=True,\n doc=VlanConfigurationAttributes.__doc__)\n\n @vlan_configuration_attr.initter\n def vlan_configuration_attr(self):\n return SubAttributesDict(self.VlanConfigurationAttributes,\n parent=self)\n\n class InterfaceAttributes(genie.conf.base.attributes.\n InterfaceSubAttributes):\n # Fix parent recursion\n @property\n def parent(self):\n return self._device_attr\n\n @property\n def device_name(self):\n return self._device_attr.device_name\n\n # Fix parent recursion\n @property\n def device(self):\n return self._device_attr.device\n\n def __init__(self, parent, key, **kwargs):\n self._device_attr = parent\n super().__init__(parent=None, key=key, **kwargs)\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n # =========================================================\n # build_config\n # =========================================================\n def build_config(self, devices=None, interfaces=None, links=None,\n apply=True, attributes=None, **kwargs):\n attributes = AttributesHelper(self, attributes)\n cfgs = {}\n\n devices, interfaces, links = \\\n consolidate_feature_args(self, devices, interfaces, links)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n if apply:\n for device_name, cfg in sorted(cfgs.items()):\n self.testbed.config_on_devices(cfg, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, interfaces=None, links=None,\n apply=True, attributes=None, **kwargs):\n attributes = AttributesHelper(self, attributes)\n\n cfgs = {}\n\n devices, interfaces, links = \\\n consolidate_feature_args(self, devices, interfaces, links)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n for device_name, cfg in sorted(cfgs.items()):\n self.testbed.config_on_devices(cfg, fail_invalid=True)\n else:\n return cfgs\n\n", "id": "6730755", "language": "Python", "matching_score": 5.373142242431641, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/vlan/vlan.py" }, { "content": "\n__all__ = (\n 'Fdb',\n)\n# import genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.base import DeviceFeature, InterfaceFeature\nfrom genie.conf.base.attributes import DeviceSubAttributes,\\\n SubAttributesDict,\\\n AttributesHelper, \\\n KeyedSubAttributes ,\\\n InterfaceSubAttributes\n\n\n# Structure\n# Fdb\n# +- DeviceAttributes\n# +- VlanAttributes\n# +- MacAddressAttributes\n# +- InterfaceAttributes\n\nclass Fdb(DeviceFeature, InterfaceFeature):\n\n # device attributes\n mac_learning = managedattribute(\n name='mac_learning',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n mac_aging_time = managedattribute(\n name='mac_aging_time',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n maximum_entries = managedattribute(\n name='maximum_entries',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # VlanAttributes\n vlan_mac_learning = managedattribute(\n name='vlan_mac_learning',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n vlan_mac_aging_time = managedattribute(\n name='vlan_mac_aging_time',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n vlan_maximum_entries = managedattribute(\n name='vlan_maximum_entries',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # MacAddressAttributes\n mac_address = managedattribute(\n name='mac_address',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # InterfaceAttributes\n drop = managedattribute(\n name='drop',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n interface = managedattribute(\n name='interface',\n default=None,\n type=(None, managedattribute.test_istype(list)))\n\n class DeviceAttributes(DeviceSubAttributes):\n\n class VlanAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.vlan_id = key\n super().__init__(parent)\n\n\n class MacAddressAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.mac_address = key\n super().__init__(parent)\n\n mac_address_attr = managedattribute(\n name='mac_address_attr',\n read_only=True,\n doc=MacAddressAttributes.__doc__)\n\n @mac_address_attr.initter\n def mac_address_attr(self):\n return SubAttributesDict(self.MacAddressAttributes, parent=self)\n\n\n vlan_attr = managedattribute(\n name='vlan_attr',\n read_only=True,\n doc=VlanAttributes.__doc__)\n\n @vlan_attr.initter\n def vlan_attr(self):\n return SubAttributesDict(self.VlanAttributes, parent=self)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n def build_config(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n", "id": "903042", "language": "Python", "matching_score": 2.0004260540008545, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/fdb/fdb.py" }, { "content": "'''\nDevice class for devices with iosxr OS.\n'''\n\n__all__ = (\n 'Device',\n)\n\nfrom enum import Enum\nimport logging\nimport re\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.attributes import AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\nimport genie.libs.conf.device\nimport genie.libs.conf.device.cisco\n\nlogger = logging.getLogger(__name__)\n\n\nclass Device(genie.libs.conf.device.cisco.Device):\n '''Device class for devices with iosxr OS'''\n\n def learn_interface_mac_addresses(self):\n\n cmd = 'show interfaces | include \"line protocol\\\\|Hardware is\"'\n cmd = re.escape(cmd) # XXXJST TODO Csccon bug!\n out = self.execute(cmd)\n interface = None\n for line in out.splitlines():\n line = line.rstrip()\n m = re.match(r'^(?i)(?P<interface>\\S+) is .* line protocol is', line)\n if m:\n interface = self.interfaces[m.group('interface')]\n # GigabitEthernet0/0/0/0 is administratively down, line protocol is administratively down\n continue\n if interface:\n m = re.match(r'(?i)^ +Hardware is +[^,]+, +address is +(?P<mac>[A-Fa-f0-9.:-]+)(?: \\(bia (?P<bi_mac>[A-Fa-f0-9.:-]+)\\))?', line) \\\n or re.match(r'(?i)^ +address: +([A-Fa-f0-9.:-]+)(?: \\(bia (?P<bi_mac>[A-Fa-f0-9.:-]+)\\))?', line)\n if m:\n # IOS, IOS-XR:\n # Hardware is GigabitEthernet, address is 6c9c.ed74.06e8 (bia 6c9c.ed74.06e8)\n # Hardware is C6k 1000Mb 802.3, address is 0012.8020.de00 (bia 0012.8020.de00)\n # Hardware is Bridge-Group Virtual Interface, address is 02a0.0964.e808\n # NX-OS:\n # address: 547f.eefd.a96a (bia 547f.eefd.a96a)\n mac = m.group('mac')\n bi_mac = m.group('bi_mac')\n try:\n if bi_mac:\n interface.burnin_mac_address = bi_mac\n if mac != bi_mac:\n interface.mac_address = mac\n except AttributeError:\n # Ok, attribute may be read-only, such as for SubInterface\n pass\n continue\n\n def build_config(self, apply=True, attributes=None):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder()\n\n # TODO\n # \"logging console disable\"\n # \"logging monitor disable\"\n # \"logging buffered [expr 10 * 1024 * 1024]\"\n\n configurations.append_block(\n super().build_config(apply=False, attributes=attributes))\n\n # TODO\n # switch -regexp -- [enaTbGetTestDeviceParam $router -platform] {\n # {^ncs4\\d\\d\\d$} -\n # {^ncs5\\d\\d\\d$} -\n # {^ncs6\\d\\d\\d$} -\n # {^ng$} -\n # {^crs-ng$} -\n # {^enxr$} {\n # #exception choice not supported on these\n # continue\n # }\n #\n # default {\n # enaGetTftpServerInfo arr_tftp_info -router $router -default_sub_dir \"hfr-mpls\" ;# XXXJST /hfr-mpls ???\n # if { [info exists arr_tftp_info(tftp_addr)] } {\n # lappend cfgs($router) \\\n # \"exception choice [incr choice] compress on filepath\\\n # tftp://$arr_tftp_info(tftp_addr)/$arr_tftp_info(sub_dir)\"\n # }\n # if { $img_type eq \"ena\" } {\n # lappend cfgs($router) \\\n # \"exception choice [incr choice] compress on filepath disk0:\"\n # } else {\n # switch -regexp -- [enaTbGetTestDeviceParam $router -platform] {\n # {^xrvr$} {\n # lappend cfgs($router) \\\n # \"exception choice [incr choice] compress on filepath disk0:\"\n # }\n # {^xrv9\\d\\d\\d$} {\n # lappend cfgs($router) \\\n # \"exception choice [incr choice] filepath harddisk:\"\n # }\n # default {\n # lappend cfgs($router) \\\n # \"exception choice [incr choice] compress on filepath harddisk:\"\n # }\n # }\n # }\n # }\n # }\n\n # nodename\n if attributes.value('nodename'):\n configurations.append_line(\n attributes.format('hostname {nodename}'))\n\n if apply:\n if configurations:\n self.configure(str(configurations), fail_invalid=True)\n else:\n # Return configuration\n return CliConfig(device=self, unconfig=False,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder()\n\n configurations.append_block(\n super().build_unconfig(apply=False, attributes=attributes,\n **kwargs))\n\n if apply:\n if configurations:\n self.configure(str(configurations), fail_invalid=True)\n else:\n # Return configuration\n return CliConfig(device=self, unconfig=True,\n cli_config=configurations, fail_invalid=True)\n\n def get_connected_node(self, connection=None):\n try:\n return self.connected_node\n except AttributeError:\n pass\n if not connection:\n connectionmgr = self.connectionmgr\n connection = connectionmgr.connections[connectionmgr.default_alias]\n output = connection.execute('')\n\n # '\\rRP/0/0/CPU0:'\n m = re.match(r'^RP/(?P<node>\\d+(?:/(?:RP|RSP|CPU)?\\d+)+):',\n output.strip())\n if not m:\n raise ValueError(\n 'Cannot determine {} connected node from prompt {}'.format(\n self.name,\n re.escape(output)))\n node = m.group('node')\n\n return node\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n", "id": "3023473", "language": "Python", "matching_score": 4.30896520614624, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/device/iosxr/device.py" }, { "content": "'''\n Generic Device class for Cisco-based devices.\n'''\n\n__all__ = (\n 'Device',\n)\n\nimport re\nimport logging\nlogger = logging.getLogger(__name__)\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.attributes import AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\nimport genie.libs.conf.device\nfrom genie.libs.conf.base.ipaddress import ip_address, IPv4Network\n\ndebug_clean_config = False\n\n\nclass Device(genie.libs.conf.device.Device):\n '''Base Device class for Cisco devices'''\n\n role = genie.libs.conf.device.Device.role.copy(\n default=genie.libs.conf.device.Device.Role.router)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=True)\n\n configurations.append_block(\n super().build_unconfig(apply=False, attributes=attributes,\n **kwargs))\n\n if apply:\n if configurations:\n self.configure(str(configurations),\n # fail_invalid=True, -- best effort?\n )\n else:\n # Return configuration\n return CliConfig(device=self, unconfig=True,\n cli_config=configurations, fail_invalid=True)\n\n", "id": "5535269", "language": "Python", "matching_score": 1.1625865697860718, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/device/cisco/device.py" }, { "content": "from genie.conf.base.config import RestConfig\nfrom ..bgp import Bgp as BgpCli\n\nclass Bgp(BgpCli):\n class DeviceAttributes(BgpCli.DeviceAttributes):\n\n def build_config(self, apply=True, unconfig=False,\n **kwargs):\n\n # Get the cli output from Cli BGP\n output = BgpCli.DeviceAttributes.build_config(self=self,\n apply=False,\n unconfig=unconfig,\n **kwargs)\n\n # Get the straight cli from the Config object\n output = '\\n'.join(output.cli_config)\n\n # Add the necessary lines in front and at the end\n output = '{pri}\\n{output}\\n{show}'.format(\\\n output=output,\n pri='configure private sandbox',\n show='show configuration session nx-api rest')\n\n # Limitation, where feature bgp is not applied\n # then the conversion tool doesnt work\n if unconfig is False:\n self.device.configure('feature bgp')\n\n if apply:\n out = RestConfig(device=self.device, unconfig=unconfig,\n cli_payload=output, dn='api/mo/sys.json',\n partition='nx-api rest')\n out.apply()\n else:\n return RestConfig(device=self.device, unconfig=unconfig,\n cli_payload=output, dn='api/mo/sys.json',\n partition='nx-api rest')\n", "id": "230354", "language": "Python", "matching_score": 3.937077283859253, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/bgp/nxos/rest/bgp.py" }, { "content": "from genie.conf.base.config import RestConfig\nfrom ..interface import Interface as InterfaceCli\n\n\nclass Interface(InterfaceCli):\n def build_config(self, apply=True, unconfig=False,\n **kwargs):\n\n # Get the cli output from Cli Interface\n output = InterfaceCli.build_config(self=self,\n apply=False,\n unconfig=unconfig,\n **kwargs)\n\n # Get the straight cli from the Config object\n output = '\\n'.join(output.cli_config)\n\n # Add the necessary lines in front and at the end\n output = '{pri}\\n{output}\\n{show}'.format(\\\n output=output,\n pri='configure private sandbox',\n show='show configuration session nx-api rest')\n\n if apply:\n out = RestConfig(device=self.device, unconfig=unconfig,\n cli_payload=output, dn='api/mo/sys.json',\n partition='nx-api rest')\n out.apply()\n else:\n return RestConfig(device=self.device, unconfig=unconfig,\n cli_payload=output, dn='api/mo/sys.json',\n partition='nx-api rest')", "id": "2308355", "language": "Python", "matching_score": 0.3013571500778198, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/interface/nxos/rest/interface.py" }, { "content": "''' \nOSPF Genie Conf Object Implementation for IOSXR:\n - StubRouter multi-line configuration implementation for IOSXR - CLI\n'''\n\n# Python\nfrom abc import ABC\n\n# Genie\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import AttributesHelper\n\n\nclass StubRouter(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # router ospf 1\n # max-metric router-lsa include-stub summary-lsa external-lsa\n # max-metric router-lsa on-startup 50 include-stub summary-lsa external-lsa\n # max-metric router-lsa on-switchover 66 include-stub summary-lsa external-lsa\n if attributes.value('stub_router_always') or \\\n attributes.value('stub_router_on_startup') or\\\n attributes.value('stub_router_on_switchover'):\n \n # max-metric router-lsa\n sr_str = 'max-metric router-lsa'\n\n # + on-startup {stub_router_on_startup}\n # + on-switchover {stub_router_on_switchover}\n if attributes.value('stub_router_on_startup'):\n sr_str += ' on-startup {stub_router_on_startup}'\n elif attributes.value('stub_router_on_switchover'):\n sr_str += ' on-switchover {stub_router_on_switchover}'\n\n # + include-stub\n if attributes.value('stub_router_include_stub'):\n sr_str += ' include-stub'\n\n # + summary-lsa\n if attributes.value('stub_router_summary_lsa'):\n sr_str += ' summary-lsa'\n\n # + external-lsa\n if attributes.value('stub_router_external_lsa'):\n sr_str += ' external-lsa'\n\n configurations.append_line(attributes.format(sr_str))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)", "id": "670826", "language": "Python", "matching_score": 1.566440224647522, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ospf/iosxr/stubrouter.py" }, { "content": "''' \nOSPF Genie Conf Object Implementation for IOSXE:\n - AreaNetwork multi-line configuration implementation for IOSXE - CLI\n'''\n\n# Python\nimport warnings\nfrom abc import ABC\n\n# Genie\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import AttributesHelper\n\n\nclass AreaNetwork(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # Get area information\n area = kwargs['area']\n\n # router ospf 1\n # network 192.168.1.0 0.0.0.0 area 2\n # network 192.168.1.1 1.1.1.1 area 3\n if attributes.value('area_network') and attributes.value('area_network_wildcard'):\n\n # network 192.168.1.0 0.0.0.0\n an_str = 'network {area_network} {area_network_wildcard}'\n\n # + area 2\n an_str += ' area {}'.format(area)\n \n configurations.append_line(attributes.format(an_str))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)", "id": "592057", "language": "Python", "matching_score": 2.0955166816711426, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ospf/iosxe/areanetwork.py" }, { "content": "''' \nOSPF Genie Conf Object Implementation for NXOS:\n - AreaRange multi-line configuration implementation for NXOS - CLI\n'''\n\n# Python\nimport re\nimport warnings\nfrom abc import ABC\nfrom netaddr import IPNetwork\n\n# Genie\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import AttributesHelper\n\n\nclass AreaRange(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # Get area information\n area = kwargs['area']\n\n # router ospf 1\n # area 2 range 192.168.1.0 255.255.255.0 cost 10\n # area 2 range 192.168.1.0 255.255.255.0 advertise cost 10\n # area 2 range 192.168.1.0 255.255.255.0 not-advertise cost 10 \n if attributes.value('area_range_prefix'):\n\n # area {area}\n ar_str = 'area {}'.format(area)\n\n # + range {area_range_prefix}\n if re.search(\"\\/\", attributes.value('area_range_prefix')):\n range_val = IPNetwork(attributes.value('area_range_prefix'))\n prefix = str(range_val.ip)\n netmask = str(range_val.netmask)\n ar_str += ' range {} {}'.format(prefix, netmask)\n else:\n ar_str += ' range {area_range_prefix}'\n\n # + advertise\n # + not-advertise\n if attributes.value('area_range_advertise') is True:\n ar_str += ' advertise'\n elif attributes.value('area_range_advertise') is False:\n ar_str += ' not-advertise'\n\n # + cost {area_range_cost}\n if attributes.value('area_range_cost'):\n ar_str += ' cost {area_range_cost}'\n \n configurations.append_line(attributes.format(ar_str))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)", "id": "12507747", "language": "Python", "matching_score": 4.335984230041504, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ospf/nxos/arearange.py" }, { "content": "''' \nOSPF Genie Conf Object Implementation for IOSXR:\n - AreaRange multi-line configuration implementation for IOSXR - CLI\n'''\n\n# Python\nimport re\nfrom abc import ABC\nfrom netaddr import IPNetwork\n\n# Genie\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import AttributesHelper\n\n\nclass AreaRange(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # router ospf 1\n # range 192.168.1.0 255.255.255.0\n # range 192.168.1.0 255.255.255.0 advertise\n # range 192.168.1.0 255.255.255.0 not-advertise\n if attributes.value('area_range_prefix'):\n\n # + range {area_range_prefix}\n if re.search(\"\\/\", attributes.value('area_range_prefix')):\n range_val = IPNetwork(attributes.value('area_range_prefix'))\n prefix = str(range_val.ip)\n netmask = str(range_val.netmask)\n ar_str = ' range {} {}'.format(prefix, netmask)\n else:\n ar_str = ' range {area_range_prefix}'\n\n # + advertise\n # + not-advertise\n if attributes.value('area_range_advertise') is True:\n ar_str += ' advertise'\n elif attributes.value('area_range_advertise') is False:\n ar_str += ' not-advertise'\n\n configurations.append_line(attributes.format(ar_str))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)", "id": "6600333", "language": "Python", "matching_score": 2.081923723220825, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ospf/iosxr/arearange.py" }, { "content": "''' \nOSPF Genie Conf Object Implementation for IOSXE:\n - GracefulRestart multi-line configuration implementation for IOSXE - CLI\n'''\n\n# Python\nimport warnings\nfrom abc import ABC\n\n# Genie\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import AttributesHelper\n\n\nclass GracefulRestart(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # router ospf 1\n # nsf cisco helper\n # nsf cisco helper disable\n # nsf ietf helper\n # nsf ietf helper disable\n # nsf ietf helper strict-lsa-checking\n # nsf ietf restart-interval 50\n if attributes.value('gr_enable'):\n \n # nsf\n gr_str = 'nsf'\n\n # + {gr_type}\n if attributes.value('gr_type'):\n grtype = attributes.value('gr_type').value\n gr_str += ' {}'.format(grtype)\n\n # + helper\n # + helper disable\n # + helper strict-lsa-checking\n # + restart-interval {gr_restart_interval}\n if attributes.value('gr_helper_enable') is True:\n gr_str += ' helper'\n elif attributes.value('gr_helper_enable') is False:\n gr_str += ' helper disable'\n elif attributes.value('gr_helper_strict_lsa_checking') and grtype == 'ietf':\n gr_str += ' helper strict-lsa-checking'\n elif attributes.value('gr_restart_interval') and grtype == 'ietf':\n gr_str += ' restart-interval {gr_restart_interval}'\n\n configurations.append_line(attributes.format(gr_str))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)", "id": "961941", "language": "Python", "matching_score": 4.965299606323242, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ospf/iosxe/gracefulrestart.py" }, { "content": "''' \nOSPF Genie Conf Object Implementation for IOSXR:\n - GracefulRestart multi-line configuration implementation for IOSXR - CLI\n'''\n\n# Python\nfrom abc import ABC\n\n# Genie\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import AttributesHelper\n\n\nclass GracefulRestart(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # router ospf 1\n # nsf cisco\n # nsf ietf\n # nsf ietf helper disable\n # nsf interval 500\n if attributes.value('gr_enable'):\n \n # nsf\n gr_str = 'nsf'\n\n # + {gr_type}\n if attributes.value('gr_type').value == 'cisco':\n gr_str += ' cisco'\n elif attributes.value('gr_type').value == 'ietf':\n gr_str += ' ietf'\n # + helper disable\n if attributes.value('gr_helper_enable') is False:\n gr_str += ' helper disable'\n elif attributes.value('gr_restart_interval'):\n gr_str += ' interval {gr_restart_interval}'\n\n configurations.append_line(attributes.format(gr_str))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)", "id": "1050157", "language": "Python", "matching_score": 2.271620750427246, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ospf/iosxr/gracefulrestart.py" }, { "content": "''' \nOSPF Genie Conf Object Implementation for IOSXE:\n - InterfaceStaticNeighbor multi-line configuration implementation for IOSXE - CLI\n'''\n\n# Python\nimport warnings\nfrom abc import ABC\n\n# Genie\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import AttributesHelper\n\n\nclass InterfaceStaticNeighbor(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # interface GigabitEthernet1\n # neighbor 10.10.10.10 cost 100 poll-interval 66 priority 12\n if attributes.value('if_static_neighbor'):\n \n # neighbor 10.10.10.10 \n intf_cfg_str = 'neighbor {if_static_neighbor}'\n\n # + cost {if_static_cost}\n if attributes.value('if_static_cost'):\n intf_cfg_str += ' cost {if_static_cost}'\n\n # + poll-interval {if_static_poll_interval}\n if attributes.value('if_static_poll_interval'):\n intf_cfg_str += ' poll-interval {if_static_poll_interval}'\n\n # + priority {if_static_priority}\n if attributes.value('if_static_priority'):\n intf_cfg_str += ' priority {if_static_priority}'\n\n configurations.append_line(attributes.format(intf_cfg_str))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)", "id": "6066120", "language": "Python", "matching_score": 3.0999109745025635, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ospf/iosxe/interfacestaticneighbor.py" }, { "content": "__all__ = (\n 'InterfaceStaticNeighbor'\n)\n\n# Python\nimport weakref\nimport functools\n\n# Genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import ConfigurableBase\n\n\n@functools.total_ordering\nclass InterfaceStaticNeighbor(ConfigurableBase):\n\n @property\n def testbed(self):\n return self.device.testbed\n\n @property\n def device(self):\n return self._device()\n\n # ==========================================================================\n # MANAGED ATTRIBUTES\n # ==========================================================================\n\n # +- DeviceAttributes\n # +- VrfAttributes\n # +- AreaAttributes\n # +- InterfaceAttributes\n\n # if_static_neighbor\n if_static_neighbor = managedattribute(\n name='if_static_neighbor',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n # if_static_cost\n if_static_cost = managedattribute(\n name='if_static_cost',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # if_static_poll_interval\n if_static_poll_interval = managedattribute(\n name='if_static_poll_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # if_static_priority\n if_static_priority = managedattribute(\n name='if_static_priority',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # ==========================================================================\n\n # Overload __eq__\n def __eq__(self, other):\n if not isinstance(other, InterfaceStaticNeighbor):\n raise NotImplemented\n \n return (self.if_static_neighbor,\n self.if_static_cost,\n self.if_static_poll_interval,\n self.if_static_priority,\n self.device) == \\\n (other.if_static_neighbor,\n other.if_static_cost,\n other.if_static_poll_interval,\n other.if_static_priority,\n other.device)\n\n # Overload __lt__\n def __lt__(self, other):\n if not isinstance(other, InterfaceStaticNeighbor):\n raise NotImplemented(\"Cannot compare '{s}' to a '{o}'\".format(s=type(self), o=type(other)))\n\n str1 = '{} {} {} {}'.format(self.if_static_neighbor,\\\n self.if_static_cost,\\\n self.if_static_poll_interval,\\\n self.if_static_priority)\n str2 = '{} {} {} {}'.format(other.if_static_neighbor,\\\n other.if_static_cost,\\\n other.if_static_poll_interval,\\\n other.if_static_priority)\n return str1 < str2\n \n # Overload __hash__\n def __hash__(self):\n return hash((self.if_static_neighbor,\n self.if_static_cost,\n self.if_static_poll_interval,\n self.if_static_priority,\n self.device))\n\n # Overload __repr__\n def __repr__(self):\n return '%s object at 0x%x with static neighbor %s' % (\n self.__class__.__name__,\n id(self),\n self.if_static_neighbor)\n\n\n def __init__(self, device, *args, **kwargs):\n self._device = weakref.ref(device)\n super().__init__(*args, **kwargs)", "id": "9523752", "language": "Python", "matching_score": 0.8455725908279419, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ospf/interfacestaticneighbor.py" }, { "content": "'''\n Generic Device class for TGEN-based devices.\n'''\n\n__all__ = (\n 'Device',\n)\n\nimport contextlib\nimport logging\nlogger = logging.getLogger(__name__)\n\nfrom genie.decorator import managedattribute\n\nimport genie.libs.conf.device\nfrom genie.libs.conf.stream.stream import Stream\n\nclass Device(genie.libs.conf.device.Device):\n '''Base Device class for TGEN devices'''\n\n role = genie.libs.conf.device.Device.role.copy(\n default=genie.libs.conf.device.Device.Role.tgen)\n\n @property\n def tgen_port_interfaces(self):\n from genie.libs.conf.interface.tgen import PhysicalInterface\n def testPhysicalInterface(intf):\n return isinstance(intf, PhysicalInterface)\n return self.find_interfaces(\n # XXXJST This is supposed to be self_, not callable_!!\n callable_=testPhysicalInterface)\n\n streams = managedattribute(\n name='streams',\n finit=set,\n read_only=True)\n\n def find_streams(self, *rs, iterable=None, count=None,\n cls=Stream, obj_state='active', **kwargs):\n '''Find Stream objects from Device object or from a provided iterable'''\n if iterable is None:\n iterable = self.streams\n\n return self._find_objects(*rs, iterable=iterable, count=count, cls=cls,\n obj_state=obj_state, **kwargs)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n def restart_traffic(self, **kwargs):\n raise NotImplementedError\n\n def start_traffic(self, **kwargs):\n raise NotImplementedError\n\n def stop_traffic(self, **kwargs):\n raise NotImplementedError\n\n def traffic_control(self, **kwargs):\n raise NotImplementedError\n\n def is_traffic_running(self, **kwargs):\n raise NotImplementedError\n\n def get_stream_stats(self, streams=None, **kwargs):\n raise NotImplementedError\n\n def get_stream_resolved_mac_addresses(self, streams=None):\n raise NotImplementedError\n\n def start_emulation(self, **kwargs):\n raise NotImplementedError\n\n def stop_emulation(self, **kwargs):\n raise NotImplementedError\n\n @contextlib.contextmanager\n def defer_apply_context(self):\n '''A context during which low-level apply calls are deferred.\n\n Implementation is Vendor-specific and may not be available, in such\n cases, this is a no-op.\n '''\n yield # no-op\n\n", "id": "3482987", "language": "Python", "matching_score": 2.7478508949279785, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/device/tgen/device.py" }, { "content": "'''\n Generic Interface classes for TGEN devices.\n'''\n\n__all__ = (\n 'Interface',\n 'PhysicalInterface',\n 'EmulatedInterface',\n 'VirtualInterface',\n 'SubInterface',\n)\n\nimport abc\n\nfrom genie.decorator import managedattribute\n\nimport genie.libs.conf.interface\n\n\nclass Interface(genie.libs.conf.interface.Interface):\n '''Base Interface class for TGEN devices'''\n\n @property\n def streams_tx(self):\n for stream in self.device.streams:\n if stream.source_tgen_interface is self:\n yield stream\n\n @property\n def streams_rx(self):\n for stream in self.device.streams:\n if self in stream.destination_tgen_interfaces:\n yield stream\n\n @abc.abstractmethod\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nclass PhysicalInterface(Interface,\n genie.libs.conf.interface.PhysicalInterface):\n '''Class for physical TGEN interfaces/ports'''\n\n tgen_port_configured = managedattribute(\n name='tgen_port_configured',\n default=False,\n type=managedattribute.test_istype(bool))\n\n @property\n def tgen_interface(self):\n '''Return the physical TGEN interface (self)'''\n return self\n\n @abc.abstractmethod\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nclass EmulatedInterface(Interface,\n genie.libs.conf.interface.EmulatedInterface):\n '''Class for emulated TGEN interfaces'''\n\n @property\n def tgen_interface(self):\n '''Return the physical TGEN interface (from emulated device)'''\n return self.device.tgen_interface\n\n @abc.abstractmethod\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nclass VirtualInterface(Interface,\n genie.libs.conf.interface.VirtualInterface):\n '''Class for virtual TGEN interfaces'''\n\n @abc.abstractmethod\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nclass SubInterface(VirtualInterface,\n genie.libs.conf.interface.SubInterface):\n '''Class for TGEN sub-interfaces\n\n Typically, TGEN sub-interfaces are purely logical and only meant for\n symmetry with sub-interfaces of normal devices as they do not have a state\n of their own and their configuration is done through their parent\n interface.\n '''\n\n def build_config(self, apply=True, attributes=None, **kwargs):\n '''SubInterface build_config of TGEN devices does nothing.\n Configuration is done through their parent_interface.\n '''\n return '' # no CLI\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n '''SubInterface build_unconfig of TGEN devices does nothing.\n Configuration is done through their parent_interface.\n '''\n return '' # no CLI\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n", "id": "9433913", "language": "Python", "matching_score": 1.8101460933685303, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/interface/tgen/interface.py" }, { "content": "'''\n Generic Interface classes for HLTAPI-based TGEN devices.\n'''\n\n__all__ = (\n 'Interface',\n 'PhysicalInterface',\n 'EthernetInterface',\n 'PosInterface',\n 'AtmInterface',\n 'EmulatedInterface',\n 'VirtualInterface',\n 'SubInterface',\n)\n\nimport abc\nfrom enum import Enum\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.attributes import AttributesHelper\n\nimport genie.libs.conf.interface\nimport genie.libs.conf.interface.tgen\n\n\nclass Interface(genie.libs.conf.interface.tgen.Interface):\n '''Base Interface class for HLTAPI-based TGEN devices'''\n\n @abc.abstractmethod\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nclass PhysicalInterface(Interface,\n genie.libs.conf.interface.tgen.PhysicalInterface):\n '''Class for physical HLTAPI-based TGEN interfaces/ports'''\n\n class InterfaceMode(Enum):\n ethernet = 'ethernet'\n atm = 'atm'\n pos_hdlc = 'pos_hdlc'\n fr = 'fr'\n pos_ppp = 'pos_ppp'\n\n intf_mode = managedattribute(\n name='intf_mode',\n default=InterfaceMode.ethernet,\n type=InterfaceMode)\n\n tgen_port_handle = managedattribute(\n name='tgen_port_handle',\n doc='''The port handle, as understood by HLTAPI/low-level vendor APIs.\n\n If the HLTAPI connection sets this value on the interface\n object, this value will be returned.\n\n Otherwise, the Interface's name is used.\n ''')\n\n tgen_handle = managedattribute(\n name='tgen_handle',\n default=None,\n doc='''The logical interface configuration handle, as understood by\n HLTAPI/low-level vendor APIs.''')\n\n @tgen_port_handle.defaulter\n def tgen_port_handle(self):\n try:\n return self.tgen_port_handle\n except AttributeError:\n pass\n\n return self.name\n\n class PhysicalMode(Enum):\n fiber = gbic = 'fiber'\n copper = rj45 = 'copper'\n sfp = 'sfp'\n\n phy_mode = managedattribute(\n name='phy_mode',\n default=None,\n type=(None, PhysicalMode))\n\n class OperationalMode(Enum):\n normal = 'normal'\n loopback = 'loopback'\n\n op_mode = managedattribute(\n name='op_mode',\n default=OperationalMode.normal,\n type=OperationalMode)\n\n @property\n def layer2_peer_interfaces(self):\n '''Get the list of layer2 peer interfaces, the one(s) physically\n connected and from which layer2 protocol is chosen.'''\n # TODO find appropriate peer...\n # - from link of type [iflink ifmesh ctrlink]\n # - if multiple peers (broadcast), return all\n # **NOTE** \n # Links under Genie Interface object is deprecated\n # Placed the below workaround to bypass the Unittest (commented out)\n # for link in self.links:\n # if self.link.obj_state != 'active':\n # continue\n for interface in self.link.interfaces:\n if interface.obj_state != 'active':\n continue\n if interface.device is not self.device:\n yield interface\n\n gateway_interface = managedattribute(\n name='gateway_interface',\n type=(None, managedattribute.test_isinstance(\n genie.libs.conf.interface.Interface)))\n\n @gateway_interface.defaulter\n def gateway_interface(self):\n # TODO find appropriate peer...\n # - in priority from link of type [xc bd otnxc],\n # then [iflink ifmesh ctrlink], then other types.\n # - if multiple peers (broadcast), take the first.\n # **NOTE** \n # Links under Genie Interface object is deprecated\n # Placed the below workaround to bypass the Unittest (commented out)\n # for link in self.links:\n # if self.link.obj_state != 'active':\n # continue\n for interface in self.link.interfaces:\n if interface.obj_state != 'active':\n continue\n if interface.device is not self.device:\n return interface\n\n @property\n def gateway_ipv4(self):\n gw_ip = None\n gw_intf = self.gateway_interface\n if gw_intf is not None and gw_intf.ipv4:\n return gw_intf.ipv4.ip\n if self.ipv4:\n # Find a linked interface on the same IP network\n for gw_intf in (intf for intf in self.link.interfaces):\n if gw_intf is not self \\\n and gw_intf.ipv4 \\\n and gw_intf.ipv4.network == self.ipv4.network:\n return gw_intf.ipv4\n # Pick a dummy IP on the same network\n for gw_ip in self.ipv4.network:\n if gw_ip != self.ipv4.ip:\n return gw_ip\n return None\n\n @property\n def gateway_ipv6(self):\n gw_ip = None\n gw_intf = self.gateway_interface\n if gw_intf is not None and gw_intf.ipv6:\n return gw_intf.ipv6.ip\n if self.ipv6:\n # Find a linked interface on the same IP network\n for gw_intf in (intf for intf in self.link.interfaces):\n if gw_intf is not self \\\n and gw_intf.ipv6 \\\n and gw_intf.ipv6.network == self.ipv6.network:\n return gw_intf.ipv6\n # Pick a dummy IP on the same network\n for gw_ip in self.ipv6.network:\n if gw_ip != self.ipv6.ip:\n return gw_ip\n return None\n\n def _build_interface_config_hltkwargs(self, attributes=None,\n unconfig=False):\n attributes = AttributesHelper(self, attributes)\n\n hltkwargs = None\n\n if unconfig:\n\n if self.tgen_port_configured:\n hltkwargs = {}\n hltkwargs['port_handle'] = self.tgen_port_handle\n hltkwargs['mode'] = 'destroy'\n\n else:\n\n hltkwargs = {}\n hltkwargs['mode'] = mode = 'modify' \\\n if self.tgen_port_configured else 'config'\n hltkwargs['port_handle'] = self.tgen_port_handle\n # hltkwargs['aps'] = TODO\n # hltkwargs['aps_arch'] = TODO\n # hltkwargs['aps_channel'] = TODO\n # hltkwargs['aps_request_1_1'] = TODO\n # hltkwargs['aps_request_1_n'] = TODO\n # hltkwargs['aps_switch_mode'] = TODO\n hltkwargs.update(attributes.format_dict({\n 'phy_mode': '{phy_mode.value}'}))\n hltkwargs.update(attributes.format_dict({\n 'intf_mode': '{intf_mode.value}'}))\n if self.intf_mode is self.InterfaceMode.ethernet:\n hltkwargs.update(attributes.format_dict({\n 'autonegotiation': '{auto_negotiation:d}'}))\n # speed and duplex may also be used with auto_negotiation to\n # limit possibilities.\n hltkwargs.update(attributes.format_dict({\n 'speed': '{speed}'}))\n hltkwargs.update(attributes.format_dict({\n 'duplex': '{duplex}'}))\n elif self.intf_mode is self.InterfaceMode.atm:\n # hltkwargs['speed'] = TODO\n # hltkwargs['atm_enable_coset'] = TODO\n # hltkwargs['atm_enable_pattern_matching'] = TODO\n # hltkwargs['atm_encapsulation'] = TODO\n # hltkwargs['atm_filler_cell'] = TODO\n # hltkwargs['atm_interface_type'] = TODO\n # hltkwargs['atm_packet_decode_mode'] = TODO\n # hltkwargs['atm_reassembly_timeout'] = TODO\n pass\n elif self.intf_mode in (\n self.InterfaceMode.pos_hdlc,\n self.InterfaceMode.fr,\n self.InterfaceMode.pos_ppp,\n ):\n # hltkwargs['speed'] = TODO\n hltkwargs.update(attributes.format_dict({\n 'tx_scrambling': '{tx_scrambling:d}'}))\n hltkwargs.update(attributes.format_dict({\n 'rx_scrambling': '{rx_scrambling:d}'}))\n else:\n raise ValueError(\n 'Unsupported intf_mode %r' % (self.intf_mode,))\n\n hltkwargs.update(attributes.format_dict({\n 'op_mode': '{op_mode.value}'}))\n if self.op_mode is self.OperationalMode.loopback:\n pass\n elif self.op_mode is self.OperationalMode.normal:\n if self.intf_mode is self.InterfaceMode.ethernet:\n hltkwargs.update(attributes.format_dict({\n 'src_mac_addr': '{mac_address}'}))\n if self.mac_address is not None:\n pass # hltkwargs['src_mac_addr_step'] = TODO\n if self.ipv4 is not None:\n # hltkwargs['arp_cache_retrieve'] = TODO\n # hltkwargs['arp_req_retries'] = TODO\n # hltkwargs['arp_req_timer'] = TODO\n hltkwargs.update(attributes.format_dict({\n 'arp_send_req': '1'}))\n if self.eth_encap_val1 is not None:\n hltkwargs['vlan'] = 1\n hltkwargs['vlan_id'] = self.eth_encap_val1\n # TODO\n # if { [set count [enaTbGetInterfaceParam $vIntf -count]] > 1 } {\n # hltkwargs['vlan_id_mode'] = \"increment\"\n # hltkwargs['vlan_id_step'] = [expr {\n # [enaTbGetInterfaceParam $vIntf -instance 1 -eth-encap-val1] -\n # $vlan\n # }]\n # hltkwargs['vlan_id_count'] = $count\n # } else {\n hltkwargs['vlan_id_mode'] = \"fixed\"\n # }\n # hltkwargs['vlan_user_priority'] = TODO\n if self.eth_encap_val2 is not None:\n hltkwargs['vlan_id_inner'] = self.eth_encap_val2\n # if { [set count [enaTbGetInterfaceParam $vIntf -count]] > 1 } {\n # hltkwargs['vlan_id_inner_mode'] = \"increment\"\n # hltkwargs['vlan_id_inner_step'] = [expr {\n # [enaTbGetInterfaceParam $vIntf -instance 1 -eth-encap-val2] -\n # $vlan\n # }]\n # hltkwargs['vlan_id_inner_count'] = $count\n # } else {\n hltkwargs['vlan_id_inner_mode'] = \"fixed\"\n # }\n else:\n hltkwargs['vlan'] = 0\n elif self.intf_mode is self.InterfaceMode.atm:\n pass\n elif self.intf_mode in (\n self.InterfaceMode.pos_hdlc,\n self.InterfaceMode.fr,\n self.InterfaceMode.pos_ppp,\n ):\n pass\n else:\n raise ValueError(\n 'Unsupported intf_mode %r' % (self.intf_mode,))\n\n if self.ipv4:\n hltkwargs['intf_ip_addr'] = self.ipv4.ip\n hltkwargs['netmask'] = self.ipv4.netmask\n gw_ip = self.gateway_ipv4\n if gw_ip:\n hltkwargs['gateway'] = gw_ip\n else:\n hltkwargs['intf_ip_addr'] = '0.0.0.0'\n hltkwargs['netmask'] = '255.255.255.0'\n hltkwargs['gateway'] = '0.0.0.0'\n\n if self.ipv6:\n hltkwargs['ipv6_intf_addr'] = self.ipv6.ip\n hltkwargs['ipv6_prefix_length'] = \\\n self.ipv6.network.prefixlen\n gw_ip = self.gateway_ipv6\n if gw_ip:\n hltkwargs['ipv6_gateway'] = gw_ip\n else:\n # hltkwargs['ipv6_intf_addr'] = '::'\n # hltkwargs['ipv6_prefix_length'] = 112\n # hltkwargs['ipv6_gateway'] = '::'\n pass\n\n # hltkwargs['auto_line_rdi'] = TODO\n # hltkwargs['auto_line_rei'] = TODO\n # hltkwargs['auto_path_rdi'] = TODO\n # hltkwargs['auto_path_rei'] = TODO\n # hltkwargs['clocksource'] = TODO\n # hltkwargs['collision_exponent'] = TODO\n # hltkwargs['control_plane_mtu'] = TODO\n # hltkwargs['crlf_path_trace'] = TODO\n # hltkwargs['data_integrity'] = TODO\n # hltkwargs['dst_mac_addr'] = TODO\n # hltkwargs['enforce_mtu_on_rx'] = TODO\n # hltkwargs['ether_pause_mode'] = TODO\n # hltkwargs['framing'] = TODO\n # hltkwargs['gre_checksum_enable'] = TODO\n # hltkwargs['gre_dst_ip_addr'] = TODO\n # hltkwargs['gre_ip_addr'] = TODO\n # hltkwargs['gre_ip_prefix_length'] = TODO\n # hltkwargs['gre_ipv6_addr'] = TODO\n # hltkwargs['gre_ipv6_prefix_length'] = TODO\n # hltkwargs['gre_key_enable'] = TODO\n # hltkwargs['gre_key_in'] = TODO\n # hltkwargs['gre_key_out'] = TODO\n # hltkwargs['gre_seq_enable'] = TODO\n # hltkwargs['ignore_pause_frames'] = TODO\n # hltkwargs['internal_ppm_adjust'] = TODO\n # hltkwargs['interpacket_gap'] = TODO\n # hltkwargs['lais_lrdi_threshold'] = TODO\n # hltkwargs['line_ais'] = TODO\n # hltkwargs['line_bip24'] = TODO\n # hltkwargs['line_bip384'] = TODO\n # hltkwargs['line_bip96'] = TODO\n # hltkwargs['line_rdi'] = TODO\n # hltkwargs['line_rei'] = TODO\n # hltkwargs['line_type'] = TODO\n # hltkwargs['long_lof_wait'] = TODO\n # hltkwargs['output_enable'] = TODO\n # hltkwargs['path_ais'] = TODO\n # hltkwargs['path_bip8'] = TODO\n # hltkwargs['path_rdi'] = TODO\n # hltkwargs['path_rei'] = TODO\n # hltkwargs['path_type'] = TODO\n # hltkwargs['pause_length'] = TODO\n # hltkwargs['port_setup_mode'] = TODO\n # hltkwargs['prdi_threshold'] = TODO\n # hltkwargs['rpr_hec_seed'] = TODO\n # hltkwargs['rx_c2'] = TODO\n # hltkwargs['rx_enhanced_prdi'] = TODO\n # hltkwargs['rx_equalization'] = TODO\n # hltkwargs['rx_fcs'] = TODO\n # hltkwargs['rx_hec'] = TODO\n # hltkwargs['section_bip8'] = TODO\n # hltkwargs['section_unequip'] = TODO\n # hltkwargs['signal_fail_ber'] = TODO\n # hltkwargs['src_mac_addr'] = TODO\n # hltkwargs['ss_bits_pointer_interp'] = TODO\n # hltkwargs['static_atm_header_encapsulation'] = TODO\n # hltkwargs['static_atm_range_count'] = TODO\n # hltkwargs['static_dlci_count_mode'] = TODO\n # hltkwargs['static_dlci_repeat_count'] = TODO\n # hltkwargs['static_dlci_repeat_count_step'] = TODO\n # hltkwargs['static_dlci_value'] = TODO\n # hltkwargs['static_dlci_value_step'] = TODO\n # hltkwargs['static_enable'] = TODO\n # hltkwargs['static_fr_range_count'] = TODO\n # hltkwargs['static_indirect'] = TODO\n # hltkwargs['static_intf_handle'] = TODO\n # hltkwargs['static_ip_dst_addr'] = TODO\n # hltkwargs['static_ip_dst_count'] = TODO\n # hltkwargs['static_ip_dst_count_step'] = TODO\n # hltkwargs['static_ip_dst_increment'] = TODO\n # hltkwargs['static_ip_dst_increment_step'] = TODO\n # hltkwargs['static_ip_dst_prefix_len'] = TODO\n # hltkwargs['static_ip_dst_prefix_len_step'] = TODO\n # hltkwargs['static_ip_dst_range_step'] = TODO\n # hltkwargs['static_ip_range_count'] = TODO\n # hltkwargs['static_l3_protocol'] = TODO\n # hltkwargs['static_lan_range_count'] = TODO\n # hltkwargs['static_mac_dst'] = TODO\n # hltkwargs['static_mac_dst_count'] = TODO\n # hltkwargs['static_mac_dst_count_step'] = TODO\n # hltkwargs['static_mac_dst_mode'] = TODO\n # hltkwargs['static_mac_dst_step'] = TODO\n # hltkwargs['static_pvc_count'] = TODO\n # hltkwargs['static_pvc_count_step'] = TODO\n # hltkwargs['static_range_per_spoke'] = TODO\n # hltkwargs['static_site_id'] = TODO\n # hltkwargs['static_site_id_enable'] = TODO\n # hltkwargs['static_site_id_step'] = TODO\n # hltkwargs['static_vci'] = TODO\n # hltkwargs['static_vci_increment'] = TODO\n # hltkwargs['static_vci_increment_step'] = TODO\n # hltkwargs['static_vci_step'] = TODO\n # hltkwargs['static_vlan_enable'] = TODO\n # hltkwargs['static_vlan_id'] = TODO\n # hltkwargs['static_vlan_id_mode'] = TODO\n # hltkwargs['static_vlan_id_step'] = TODO\n # hltkwargs['static_vpi'] = TODO\n # hltkwargs['static_vpi_increment'] = TODO\n # hltkwargs['static_vpi_increment_step'] = TODO\n # hltkwargs['static_vpi_step'] = TODO\n # hltkwargs['transmit_clock_source'] = TODO\n # hltkwargs['transmit_mode'] = TODO\n # hltkwargs['tx_c2'] = TODO\n # hltkwargs['tx_enhanced_prdi'] = TODO\n # hltkwargs['tx_fcs'] = TODO\n # hltkwargs['tx_k2'] = TODO\n # hltkwargs['tx_preemphasis_main_tap'] = TODO\n # hltkwargs['tx_preemphasis_post_tap'] = TODO\n # hltkwargs['tx_s1'] = TODO\n\n return hltkwargs\n\n @abc.abstractmethod\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nclass EthernetInterface(PhysicalInterface,\n genie.libs.conf.interface.EthernetInterface):\n '''Class for physical ethernet HLTAPI-based TGEN interfaces/ports'''\n\n intf_mode = PhysicalInterface.intf_mode.copy(\n default=PhysicalInterface.InterfaceMode.ethernet)\n\n # Because not all vendors agree on the default, make it False as defined in\n # the Cisco HLTAPI spec.\n auto_negotiation = genie.libs.conf.interface.EthernetInterface.auto_negotiation.copy(\n default=False)\n\n # Restrict duplex to only HLTAPI-allowed strings\n duplex = genie.libs.conf.interface.EthernetInterface.duplex.copy(\n type=(None, managedattribute.test_in((\n 'full',\n 'half',\n ))))\n\n # Restrict speed to only HLTAPI-allowed strings\n speed = genie.libs.conf.interface.EthernetInterface.speed.copy(\n type=(None, managedattribute.test_in((\n 'ether10',\n 'ether100',\n 'ether1000',\n 'ether10000',\n 'ether40Gig',\n 'ether100Gig',\n 'ether10000lan', # Ixia\n 'ether40000lan', # Ixia\n 'ether100000lan', # Ixia\n ))))\n\n @abc.abstractmethod # XXXJST TODO\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nclass AtmInterface(PhysicalInterface,\n genie.libs.conf.interface.AtmInterface):\n '''Class for physical ATM HLTAPI-based TGEN interfaces/ports'''\n\n intf_mode = PhysicalInterface.intf_mode.copy(\n default=PhysicalInterface.InterfaceMode.atm)\n\n @abc.abstractmethod # XXXJST TODO\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nclass PosInterface(PhysicalInterface,\n genie.libs.conf.interface.PosInterface):\n '''Class for physical POS HLTAPI-based TGEN interfaces/ports'''\n\n intf_mode = PhysicalInterface.intf_mode.copy(\n default=PhysicalInterface.InterfaceMode.pos_hdlc)\n\n tx_scrambling = managedattribute(\n name='tx_scrambling',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n rx_scrambling = managedattribute(\n name='rx_scrambling',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n @abc.abstractmethod # XXXJST TODO\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nclass EmulatedInterface(Interface,\n genie.libs.conf.interface.tgen.EmulatedInterface):\n '''Class for emulated HLTAPI-based TGEN interfaces'''\n\n @abc.abstractmethod\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nclass VirtualInterface(Interface,\n genie.libs.conf.interface.tgen.VirtualInterface):\n '''Class for virtual HLTAPI-based TGEN interfaces'''\n\n @abc.abstractmethod\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nclass SubInterface(VirtualInterface,\n genie.libs.conf.interface.tgen.SubInterface):\n '''Class for HLTAPI-based TGEN sub-interfaces'''\n\n @abc.abstractmethod\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\n", "id": "3423938", "language": "Python", "matching_score": 5.933353424072266, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/interface/hltapi/interface.py" }, { "content": "'''\nDevice class for HLTAPI devices with ixia OS.\n'''\n\n__all__ = (\n 'Device',\n 'EmulatedDevice',\n)\n\nfrom enum import Enum\nimport logging\n\ntry:\n from ats.tcl import tclstr\n import ats.tcl\nexcept Exception:\n pass\n\nfrom genie.decorator import managedattribute\n\nfrom genie.libs.conf.device.hltapi import Device as HltapiDevice\nimport genie.libs.conf.device\nimport genie.libs.conf.interface.hltapi\nfrom genie.libs.conf.stream import Stream\nfrom genie.libs.conf.base import IPv4Address, IPv6Address, MAC\n\nlogger = logging.getLogger(__name__)\n#logger.setLevel(logging.DEBUG)\n\n\nclass Device(HltapiDevice):\n '''Device class for HLTAPI devices with ixia OS'''\n\n class Hltapi(HltapiDevice.Hltapi):\n '''Hltapi class customized for Ixia.\n \n The following APIs are also provided by the HLTAPI connection:\n - ixNet\n '''\n\n def traffic_config(self, **kwargs):\n if 'name' in kwargs:\n assert '.' not in kwargs['name'], \\\n 'Ixia stream names are used as stream IDs in Tcl keyed ' \\\n 'lists and should not contain \".\" characters: {}' \\\n .format(kwargs['name'])\n # Supports frame_size or l3_length\n try:\n kwargs['frame_size'] = kwargs.pop('packet_len')\n except KeyError:\n pass\n try:\n kwargs['frame_size'] = kwargs.pop('l2_length')\n except KeyError:\n pass\n # IPv4/IPv6 steps have to be in IP format\n try:\n kwargs['ip_src_step'] = str(IPv4Address(kwargs['ip_src_step']))\n except KeyError:\n pass\n try:\n kwargs['ip_dst_step'] = str(IPv4Address(kwargs['ip_dst_step']))\n except KeyError:\n pass\n try:\n kwargs['ipv6_src_step'] = str(IPv6Address(kwargs['ipv6_src_step']))\n except KeyError:\n pass\n try:\n kwargs['ipv6_dst_step'] = str(IPv6Address(kwargs['ipv6_dst_step']))\n except KeyError:\n pass\n # Legacy API accepts MAC steps can be in either MAC or integer format and internally converts to integer.\n # ixnetwork_540 API accepts only MAC steps in MAC format.\n try:\n kwargs['mac_src_step'] = str(MAC(kwargs['mac_src_step']))\n except KeyError:\n pass\n try:\n kwargs['mac_dst_step'] = str(MAC(kwargs['mac_dst_step']))\n except KeyError:\n pass\n try:\n kwargs['mac_src2_step'] = str(MAC(kwargs['mac_src2_step']))\n except KeyError:\n pass\n try:\n kwargs['mac_dst2_step'] = str(MAC(kwargs['mac_dst2_step']))\n except KeyError:\n pass\n try:\n kwargs['arp_src_hw_step'] = str(MAC(kwargs['arp_src_hw_step']))\n except KeyError:\n pass\n try:\n kwargs['arp_dst_hw_step'] = str(MAC(kwargs['arp_dst_hw_step']))\n except KeyError:\n pass\n # Ixia HLTAPI does not support l3_protocol=none, not sending it is equivalent.\n if kwargs.get('l3_protocol', None) == 'none':\n del kwargs['l3_protocol']\n # Ixia HLTAPI does not support l4_protocol=none, not sending it is equivalent.\n if kwargs.get('l4_protocol', None) == 'none':\n del kwargs['l4_protocol']\n # Ixia HLTAPI does not support mac_discovery_gw\n kwargs.pop('mac_discovery_gw', None)\n kwargs.pop('mac_discovery_gw_count', None)\n kwargs.pop('mac_discovery_gw_step', None)\n # Enable tracking\n if kwargs.get('mode', None) == 'create':\n kwargs.setdefault('track_by', 'traffic_item')\n # Extra Ixia options for MPLS\n if 'mpls_labels' in kwargs:\n kwargs.setdefault('mpls_labels_mode', 'fixed')\n kwargs.setdefault('mpls', 'enable')\n # Extra Ixia vlan toggle\n if kwargs.get('mode', None) == 'create' and 'vlan_id' in kwargs:\n kwargs.setdefault('vlan', 'enable')\n\n # -type is Agilent-specific. Default should be \"stream\"; Anything else is not supported.\n if kwargs.get('type', None) == 'stream':\n del kwargs['type']\n # -dut_type is Agilent-specific.\n kwargs.pop('dut_type', None)\n # Ixia uses -port_handle2 instead of -dest_port_list (for unidirectional streams)\n if 'dest_port_list' in kwargs:\n kwargs.setdefault('port_handle2', kwargs.pop('dest_port_list'))\n\n hltkl = self.pyats_connection.traffic_config(**kwargs)\n\n return hltkl\n\n def traffic_control(self, **kwargs):\n\n # TODO\n # if {\n # ![info exists opts(traffic_generator)] &&\n # [info exists ::enaTgnUtils::traffic_generator] &&\n # $::enaTgnUtils::traffic_generator ne \"\"\n # } {\n # set opts(traffic_generator) $::enaTgnUtils::traffic_generator\n # }\n\n # -port_handle is mandatory if -traffic_generator is \"ixos\", default to all anyway\n if 'port_handle' not in kwargs:\n kwargs['port_handle'] = self.device.all_port_handles\n\n hltkl = self.pyats_connection.traffic_control(**kwargs)\n\n return hltkl\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nclass EmulatedDevice(genie.libs.conf.device.EmulatedDevice):\n\n def __init__(self, name, *, tgen_interface, **kwargs):\n super().__init__(name=name, tgen_interface=tgen_interface, **kwargs)\n\n def build_config(self, *, apply=True, **kwargs):\n\n if self.tgen_handle is not None:\n # TODO support modifying values\n logger.warn('%r: Nothing to do (already configured).', self)\n return ''\n\n assert self.tgen_port_handle\n\n tgen_device = self.tgen_device\n hltapi = tgen_device.hltapi\n tcl = hltapi.tcl\n\n emulated_interface = self.emulated_interface\n emulated_loopback = self.emulated_loopback\n\n from genie.libs.conf.interface import EthernetInterface\n\n intf_kwargs = {}\n intf_kwargs['port_handle'] = self.tgen_port_handle\n\n # -count NUMERIC\n # DEFAULT 1\n # -check_opposite_ip_version CHOICES 0 1\n # DEFAULT 1\n # -override_existence_check CHOICES 0 1\n # -override_tracking CHOICES 0 1\n\n if emulated_interface and emulated_interface.ipv4:\n intf_kwargs['ipv4_address'] = emulated_interface.ipv4.ip\n # -ipv4_address_step IPV4\n intf_kwargs['ipv4_prefix_length'] = emulated_interface.ipv4.network.prefixlen\n # -ipv4_prefix_length NUMERIC\n if isinstance(emulated_interface, EthernetInterface):\n # -check_gateway_exists CHOICES 0 1\n # DEFAULT 0\n intf_kwargs['gateway_address'] = self.gateway_ipv4\n # -gateway_address_step IPV4\n\n if emulated_interface and emulated_interface.ipv6:\n intf_kwargs['ipv6_address'] = emulated_interface.ipv6.ip\n intf_kwargs['ipv6_prefix_length'] = emulated_interface.ipv6.network.prefixlen\n # -ipv6_address_step IPV6\n if isinstance(emulated_interface, EthernetInterface):\n intf_kwargs['ipv6_gateway'] = self.gateway_ipv6\n # -ipv6_gateway_step IPV6\n # -target_link_layer_address CHOICES 0 1\n\n intf_kwargs['loopback_count'] = 0\n if emulated_loopback and emulated_loopback.ipv4:\n intf_kwargs['loopback_count'] = 1\n intf_kwargs['loopback_ipv4_address'] = emulated_loopback.ipv4.ip\n # -loopback_ipv4_address_outside_step IPV4\n # -loopback_ipv4_address_step IPV4\n intf_kwargs['loopback_ipv4_prefix_length'] = emulated_loopback.ipv4.network.prefixlen\n if emulated_loopback and emulated_loopback.ipv6:\n intf_kwargs['loopback_count'] = 1\n intf_kwargs['loopback_ipv6_address'] = emulated_loopback.ipv6.ip\n # -loopback_ipv6_address_outside_step IPV6\n # -loopback_ipv6_address_step IPV6\n intf_kwargs['loopback_ipv6_prefix_length'] = emulated_loopback.ipv6.network.prefixlen\n\n if isinstance(emulated_interface, EthernetInterface):\n intf_kwargs['mac_address'] = emulated_interface.mac_address or self.tgen_interface.mac_address or '00:00:01:00:00:01'\n # -mac_address_step\n\n if emulated_interface.mtu is not None:\n intf_kwargs['mtu'] = emulated_interface.mtu\n\n # -atm_encapsulation CHOICES VccMuxIPV4Routed\n # CHOICES VccMuxIPV6Routed\n # CHOICES VccMuxBridgedEthernetFCS\n # CHOICES VccMuxBridgedEthernetNoFCS\n # CHOICES LLCRoutedCLIP\n # CHOICES LLCBridgedEthernetFCS\n # CHOICES LLCBridgedEthernetNoFCS\n # CHOICES VccMuxMPLSRouted\n # CHOICES VccMuxPPPoA\n # CHOICES LLCNLPIDRouted\n # CHOICES LLCPPPoA\n # -atm_vci RANGE 0-65535\n # -atm_vci_step RANGE 0-65535\n # -atm_vpi RANGE 0-255\n # -atm_vpi_step RANGE 0-255\n\n # -gre_count NUMERIC\n # DEFAULT 1\n # -gre_ipv4_address IPV4\n # -gre_ipv4_prefix_length NUMERIC\n # -gre_ipv4_address_step IPV4\n # -gre_ipv4_address_outside_connected_reset CHOICES 0 1\n # DEFAULT 1\n # -gre_ipv4_address_outside_connected_step IPV4\n # -gre_ipv4_address_outside_loopback_step IPV4\n # -gre_ipv6_address IPV6\n # -gre_ipv6_prefix_length NUMERIC\n # -gre_ipv6_address_step IPV6\n # -gre_ipv6_address_outside_connected_reset CHOICES 0 1\n # DEFAULT 1\n # -gre_ipv6_address_outside_connected_step IPV6\n # -gre_ipv6_address_outside_loopback_step IPV6\n # -gre_dst_ip_address IP\n # -gre_dst_ip_address_step IP\n # -gre_dst_ip_address_reset CHOICES 0 1\n # DEFAULT 1\n # -gre_dst_ip_address_outside_connected_step IP\n # -gre_dst_ip_address_outside_loopback_step IP\n # -gre_src_ip_address CHOICES connected routed\n # DEFAULT connected\n # -gre_checksum_enable CHOICES 0 1\n # -gre_seq_enable CHOICES 0 1\n # -gre_key_enable CHOICES 0 1\n # -gre_key_in NUMERIC\n # -gre_key_in_step NUMERIC\n # -gre_key_out NUMERIC\n # -gre_key_out_step NUMERIC\n\n intf_kwargs['vlan_enabled'] = 0\n if emulated_interface.eth_encap_val1 is not None:\n intf_kwargs['vlan_enabled'] = 1\n intf_kwargs['vlan_id'] = emulated_interface.eth_encap_val1 # REGEXP ^[0-9]{1,4}(,[0-9]{1,4})*$\n intf_kwargs['vlan_id_mode'] = 'fixed' # REGEXP ^(fixed|increment)(,(fixed|increment))*$\n # -vlan_id_step REGEXP ^[0-9]{1,4}(,[0-9]{1,4})*$\n # -vlan_tpid REGEXP ^0x[0-9a-fA-F]+(,0x[0-9a-fA-F]+)*$\n # -vlan_user_priority REGEXP ^[0-7](,[0-7])*$\n # -vlan_user_priority_step REGEXP ^[0-7](,[0-7])*$\n\n kl = tcl.cast_keyed_list(\n hltapi.ixNetworkProtocolIntfCfg(**intf_kwargs),\n item_cast=tclstr)\n if int(kl.get('status', 0)) == 0:\n raise ValueError(kl.get('log', 'Unknown message'))\n\n self.tgen_handle = tcl.cast_list(\n kl.connected_interfaces,\n item_cast=tclstr)\n\n # TODO self.name\n # TODO emulated_interface.ipv6_link_local\n\n return ''\n\n def build_unconfig(self, **kwargs):\n if self.tgen_handle is None:\n logger.warn('%r: Nothing to do (no tgen_handle).', self)\n return ''\n\n tgen_device = self.tgen_device\n hltapi = tgen_device.hltapi\n #tcl = hltapi.tcl\n\n try:\n hltapi.isNetworkRemove(self.tgen_handle)\n hltapi.ixNetworkCommit()\n finally:\n self.tgen_handle = None\n\n return ''\n\n @property\n def _ix_hltapi_interface_handle(self):\n if self.tgen_handle:\n return tuple(\n '{}|dummy|ProtocolIntf'.format(ix_connected_interface)\n for ix_connected_interface in self.tgen_handle)\n\n", "id": "8397646", "language": "Python", "matching_score": 6.051332473754883, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/device/ixia/device.py" }, { "content": "'''\nDevice class for HLTAPI devices with pagent OS.\n'''\n\n__all__ = (\n 'Device',\n)\n\nfrom enum import Enum\nimport collections.abc\nimport logging\nlogger = logging.getLogger(__name__)\n\ntry:\n from ats.tcl import tclstr, TclCommand\nexcept Exception:\n pass\n\nfrom genie.decorator import managedattribute\n\nfrom genie.libs.conf.device.hltapi import Device as HltapiDevice\nimport genie.libs.conf.interface.hltapi\nimport genie.libs.conf.device.ios\nfrom genie.libs.conf.stream import Stream\nfrom genie.libs.conf.base import IPv4Address, IPv6Address, MAC\n\n\nclass Device(HltapiDevice, genie.libs.conf.device.ios.Device):\n '''Device class for HLTAPI devices with pagent OS'''\n\n class Hltapi(HltapiDevice.Hltapi):\n '''Hltapi class customized for Pagent.'''\n\n def interface_config(self, **kwargs):\n\n # Pagent does not support -arp_send_req\n kwargs.pop('arp_send_req', None)\n # Pagent does not support -intf_mode\n kwargs.pop('intf_mode', None)\n # Pagent does not support -op_mode; Unset if \"normal\"\n if kwargs.get('op_mode', None) == 'normal':\n kwargs.pop('op_mode')\n # Pagent does not support -vlan; Unset if false\n if self.tcl.cast_boolean(kwargs.get('vlan', True)) is False:\n kwargs.pop('vlan')\n\n hltkl = self.pyats_connection.interface_config(**kwargs)\n\n return hltkl\n\n def traffic_config(self, **kwargs):\n\n # Supports l3_length\n try:\n kwargs['l2_length'] = kwargs.pop('frame_size')\n except KeyError:\n pass\n try:\n kwargs['l2_length'] = kwargs.pop('packet_len')\n except KeyError:\n pass\n if 'l2_length' in kwargs or 'l2_length_min' in kwargs:\n l2_encap = kwargs.get('l2_encap', '')\n if l2_encap in (\n 'ethernet_ii',\n 'ethernet_ii_vlan',\n 'ethernet_ii_unicast_mpls',\n 'ethernet_ii_multicast_mpls',\n 'ethernet_ii_vlan_unicast_mpls',\n 'ethernet_ii_vlan_multicast_mpls',\n 'ethernet_ii_pppoe',\n 'ethernet_ii_vlan_pppoe',\n 'ethernet_ii_qinq_pppoe',\n ):\n # L2 = ETH(14) [VLAN(n*4)] [MPLS(n*4)] [PPPoE(6) PPP(2)] L3 FCS(4)\n l2_hdr_len = 18 # ETH(14) ... FCS(4)\n if 'vlan' in l2_encap or 'qinq' in l2_encap:\n if 'vlan_id2' in kwargs or 'vlan_id_outer' in kwargs:\n l2_hdr_len += 8 # VLAN(2*4)\n else:\n l2_hdr_len += 4 # VLAN(4)\n if 'mpls' in l2_encap:\n l2_hdr_len += len(self.tcl.cast_list(kwargs['mpls_labels'])) * 4 # MPLS(n*4)\n if 'pppoe' in l2_encap:\n l2_hdr_len += 8 # PPPoE(6) PPP(2)\n elif l2_encap in (\n 'ethernet_mac_in_mac',\n 'atm_snap',\n 'atm_snap_802.3snap',\n 'atm_snap_802.3snap_nofcs',\n 'atm_snap_ethernet_ii',\n 'atm_snap_ppp',\n 'atm_snap_pppoe',\n 'atm_llcsnap',\n 'atm_vc_mux',\n 'atm_vc_mux_802.3snap',\n 'atm_vc_mux_802.3snap_nofcs',\n 'atm_vc_mux_ethernet_ii',\n 'atm_vc_mux_ppp',\n 'atm_vc_mux_pppoe',\n 'atm_mpls',\n 'hdlc_unicast',\n 'hdlc_broadcast',\n 'hdlc_unicast_mpls',\n 'hdlc_multicast_mpls',\n 'ppp_link',\n 'cisco_framerelay',\n 'ietf_framerelay',\n 'eth',\n 'raw_l2',\n ):\n # TODO\n l2_hdr_len = 18\n else:\n # TODO\n l2_hdr_len = 18\n try:\n kwargs['l3_length'] = int(kwargs['l2_length']) - l2_hdr_len\n except KeyError:\n pass\n try:\n kwargs['l3_length_min'] = int(kwargs['l2_length_min']) - l2_hdr_len\n except KeyError:\n pass\n try:\n kwargs['l3_length_max'] = int(kwargs['l2_length_max']) - l2_hdr_len\n except KeyError:\n pass\n try:\n kwargs['l3_length_step'] = int(kwargs['l2_length_step'])\n except KeyError:\n pass\n kwargs.pop('l2_length', None)\n kwargs.pop('l2_length_min', None)\n kwargs.pop('l2_length_max', None)\n kwargs.pop('l2_length_step', None)\n\n # IPv4/IPv6 steps have to be in IP format\n try:\n kwargs['ip_src_step'] = str(IPv4Address(kwargs['ip_src_step']))\n except KeyError:\n pass\n try:\n kwargs['ip_dst_step'] = str(IPv4Address(kwargs['ip_dst_step']))\n except KeyError:\n pass\n try:\n kwargs['ipv6_src_step'] = str(IPv6Address(kwargs['ipv6_src_step']))\n except KeyError:\n pass\n try:\n kwargs['ipv6_dst_step'] = str(IPv6Address(kwargs['ipv6_dst_step']))\n except KeyError:\n pass\n # MAC steps have to be in MAC format\n try:\n kwargs['mac_src_step'] = str(MAC(kwargs['mac_src_step']))\n except KeyError:\n pass\n try:\n kwargs['mac_dst_step'] = str(MAC(kwargs['mac_dst_step']))\n except KeyError:\n pass\n try:\n kwargs['mac_src2_step'] = str(MAC(kwargs['mac_src2_step']))\n except KeyError:\n pass\n try:\n kwargs['mac_dst2_step'] = str(MAC(kwargs['mac_dst2_step']))\n except KeyError:\n pass\n # Pagent HLTAPI does not support l3_protocol=none, not sending it is equivalent.\n if kwargs.get('l3_protocol', None) == 'none':\n del kwargs['l3_protocol']\n # Pagent HLTAPI does not support l4_protocol=none, not sending it is equivalent.\n if kwargs.get('l4_protocol', None) == 'none':\n del kwargs['l4_protocol']\n # Pagent does not support -mac_discovery_gw\n kwargs.pop('mac_discovery_gw', None)\n # Pagent does not support -name\n kwargs.pop('name', None)\n # Pagent only supports -mpls_labels in this format: <label>,<cos>,<bottom>,<ttl>\n if 'mpls_labels' in kwargs:\n mpls_labels = self.tcl.cast_list(kwargs['mpls_labels'], item_cast=tclstr)\n for i, mpls_label in enumerate(mpls_labels):\n try:\n mpls_label = int(mpls_label)\n except ValueError:\n continue\n else:\n mpls_label = '{mpls_label},{cos},{bottom},{ttl}'.format(\n mpls_label=mpls_label,\n cos=0,\n bottom=int(i == len(mpls_labels) - 1),\n ttl=0)\n mpls_labels[i] = mpls_label\n # Pagent does not support -gateway and -ipv6_gateway\n kwargs.pop('gateway', None)\n kwargs.pop('ipv6_gateway', None)\n # Pagent does not support -mpls_labels_mode, support is equivalent to \"fixed\"\n if kwargs.get('mpls_labels_mode', None) == 'fixed':\n del kwargs['mpls_labels_mode']\n\n # -type is Agilent-specific. Default should be \"stream\"; Anything else is not supported.\n if kwargs.get('type', None) == 'stream':\n del kwargs['type']\n # -dut_type is Agilent-specific.\n kwargs.pop('dut_type', None)\n # Pagent does not support -dest_port_list\n kwargs.pop('dest_port_list', None)\n\n hltkl = self.pyats_connection.traffic_config(**kwargs)\n\n if kwargs.get('mode', None) == 'remove' \\\n and 'port_handle' in kwargs \\\n and 'stream_id' in kwargs:\n pagent_stream_ids_var = self.tcl.vars.byref('::Pagent::_Tgn_Info', array_index=kwargs['port_handle'])\n if pagent_stream_ids_var.exists():\n # Workaround a bug in Pagent where -mode remove does not\n # \"forget\" the stream IDs associated with a port\n pagent_stream_ids = self.tcl.cast_list(pagent_stream_ids_var.get_obj(), item_cast=tclstr)\n kwarg_stream_ids = set(self.tcl.cast_list(kwargs['stream_id'], item_cast=tclstr))\n pagent_stream_ids = [stream_id\n for stream_id in pagent_stream_ids\n if stream_id not in kwarg_stream_ids]\n if pagent_stream_ids:\n pagent_stream_ids_var.set(pagent_stream_ids)\n else:\n # If left empty, commands such as traffic_control -action\n # poll may return failure.\n pagent_stream_ids_var.unset()\n\n return hltkl\n\n def traffic_stats(self, **kwargs):\n\n if 'streams' in kwargs:\n streams_ids = self.tcl.cast_list(kwargs['streams'], item_cast=tclstr)\n Pagent_GET_STREAMS_STATS_proc = TclCommand(\n '::Pagent::GET_STREAMS_STATS',\n tcl=self.tcl)\n if len(streams_ids) > 1 \\\n and Pagent_GET_STREAMS_STATS_proc.exists() \\\n and 'lsearch $stream_id $streams' in Pagent_GET_STREAMS_STATS_proc.proc_body():\n # Due to a bug in ::Pagent::GET_STREAMS_STATS where stream\n # IDs are incorrectly matched (wrong order of [lsearch]\n # parameters), revert to getting all the streams on all or\n # only the specified ports\n del kwargs['streams']\n\n hltkl = self.pyats_connection.traffic_stats(**kwargs)\n\n return hltkl\n\n def traffic_control(self, **kwargs):\n\n hltkl = self.pyats_connection.traffic_control(**kwargs)\n\n if kwargs.get('action', None) == 'poll' \\\n and 'stopped' not in hltkl:\n # -------------------------- Keyed List: hltkl -------------------------\n # status = 0\n # log = {No traffic streams defined in Ethernet0/3 of SEB2-PT}\n # SEB2-PT_Ethernet0/1\n # stopped = 0\n # SEB2-PT_Ethernet0/3\n # stopped = 0\n # ----------------------------------------------------------------------\n stopped = True\n for k, v in hltkl.items():\n if not isinstance(v, collections.abc.Mapping):\n continue\n if 'stopped' not in v:\n continue\n stopped = self.tcl.cast_boolean(v['stopped'])\n if not stopped:\n break\n stopped = 1 if stopped else 0\n logger.debug('Pagent: setting hltkl stopped = %r', stopped)\n hltkl['stopped'] = stopped\n\n return hltkl\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n", "id": "7568309", "language": "Python", "matching_score": 3.9744699001312256, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/device/pagent/device.py" }, { "content": "'''\nDevice class for HLTAPI devices with agilent OS.\n'''\n\n__all__ = (\n 'Device',\n)\n\nfrom enum import Enum\nimport logging\nlogger = logging.getLogger(__name__)\n\nfrom genie.decorator import managedattribute\n\nfrom genie.libs.conf.device.hltapi import Device as HltapiDevice\nimport genie.libs.conf.interface.hltapi\nfrom genie.libs.conf.stream import Stream\n\n\nclass Device(HltapiDevice):\n '''Device class for HLTAPI devices with agilent OS'''\n\n class Hltapi(HltapiDevice.Hltapi):\n '''Hltapi class customized for Agilent.'''\n\n def traffic_control(self, **kwargs):\n\n # Optional arg, but fails to stop with port_handle (at least it\n # does for HLTAPI 2.25 2.27 and 2.28)\n kwargs.pop('port_handle', None)\n\n hltkl = self.pyats_connection.traffic_control(**kwargs)\n\n return hltkl\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n", "id": "7761375", "language": "Python", "matching_score": 0.7157682180404663, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/device/agilent/device.py" }, { "content": "'''\nDevice class for Ixia traffic generator device\n\nRequirements:\n * IxOS/IxVM 8.42 or higher\n * [Either] IxNetork Windows API Server version 8.42 or higher\n * [Either] IxNetork Linux API Server version 8.42 or higher\n'''\n\n# Genie Devices\nfrom genie.libs.conf.device.cisco import Device as CiscoDevice\n\n\nclass Device(CiscoDevice):\n '''Device class for Ixia traffic generator device\n __init__ instantiates a single connection instance.'''\n\n def __init__(self, *args, **kwargs):\n\n super().__init__(*args, **kwargs)\n", "id": "3563725", "language": "Python", "matching_score": 4.311882972717285, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/device/ixiarestpy/device.py" }, { "content": "'''\nDevice class for Ixia traffic generator device\n\nRequirements:\n * IxOS/IxVM 7.40 or higher\n * IxNetork EA version 7.40 or higher\n'''\n\n# Genie Devices\nfrom genie.libs.conf.device.cisco import Device as CiscoDevice\n\n\nclass Device(CiscoDevice):\n '''Device class for Ixia traffic generator device\n __init__ instantiates a single connection instance.'''\n\n def __init__(self, *args, **kwargs):\n\n super().__init__(*args, **kwargs)\n", "id": "3627695", "language": "Python", "matching_score": 0.5581804513931274, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/device/ixianative/device.py" }, { "content": "\nimport re\nimport functools\n\nfrom ats.topology import Device as ATSDevice\n\nfrom genie.utils.cisco_collections import OrderedSet\n\ntry:\n from ats.tcl.internal import DictionaryCompare\nexcept Exception:\n pass\n\n\nclass Device(ATSDevice):\n\n @classmethod\n def validate_name(cls, name):\n # TODO Support other types\n if re.match(r'^(?:R\\d+|TGEN)$', name):\n return name\n raise ValueError('Not a valid %s name: %r'\n % (cls.__class__.__name__, name))\n\n def __init__(self, name, **kwargs):\n self.validate_name(name)\n # Parameters\n kwargs.setdefault('label', [])\n kwargs.setdefault('role', [])\n kwargs.setdefault('draw_rank', None)\n kwargs.setdefault('draw_cluster', None)\n # Constraints\n kwargs.setdefault('type', None)\n kwargs.setdefault('match_name', None)\n kwargs.setdefault('platform', None)\n kwargs.setdefault('tgen_platform', None)\n kwargs.setdefault('os', None)\n kwargs.setdefault('multinode_requested', None)\n kwargs.setdefault('predicates', None)\n super().__init__(name=name, **kwargs)\n\n @property\n def device_name(self):\n return self.name\n\n @property\n def interface_names(self):\n '''OrderedSet of interface names (#R#I) on this device.'''\n sRIs = [intf.interface_name for intf in self.interfaces.values()]\n sRIs = OrderedSet(sorted(sRIs, key=functools.cmp_to_key(DictionaryCompare)))\n return sRIs\n\n @property\n def next_interface_name(self):\n I = len(self.interfaces) + 1\n while True:\n sRI = '%sI%d' % (self.name, I)\n if sRI not in self.interfaces:\n return sRI\n\n\n# vim: ft=python ts=8 sw=4 et\n", "id": "7351543", "language": "Python", "matching_score": 3.03039813041687, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/topology_mapper/device.py" }, { "content": "\nimport re\n\nfrom ats.topology import Interface as ATSInterface\n\n\nclass Interface(ATSInterface):\n\n @classmethod\n def validate_name(cls, name):\n if re.match(r'^(?:I\\d+)$', name):\n return name\n raise ValueError('Not a valid %s \"I#\" name: %r'\n % (cls.__class__.__name__, name))\n\n @classmethod\n def validate_name_RI(cls, name):\n if re.match(r'^(?:R\\d+|TGEN)(?:I\\d+)$', name):\n return name\n raise ValueError('Not a valid %s \"R#I#\" name: %r'\n % (cls.__class__.__name__, name))\n\n def __init__(self, name, **kwargs):\n self.validate_name_RI(name)\n # Parameters\n kwargs.setdefault('label', [])\n # Constraints\n #kwargs.setdefault('router', None)\n kwargs.setdefault('match_name', None)\n kwargs.setdefault('type', None)\n #kwargs.setdefault('engine', None)\n kwargs.setdefault('product_id', None)\n #kwargs.setdefault('diff_slot', None)\n #kwargs.setdefault('same_slot', None)\n kwargs.setdefault('predicates', None)\n super().__init__(name=name, **kwargs)\n\n @property\n def interface_name(self):\n return self.name # R#I#\n\n @property\n def device_name(self):\n return self.device.device_name # R#\n\n# vim: ft=python ts=8 sw=4 et\n", "id": "11615493", "language": "Python", "matching_score": 0.8924273252487183, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/topology_mapper/interface.py" }, { "content": "# Python\r\nimport logging\r\nimport time\r\nfrom os.path import basename\r\n\r\n# Genie\r\nfrom genie.harness.utils import connect_device\r\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\r\n\r\n# PyATS\r\nfrom ats.utils.fileutils import FileUtils\r\nfrom ats.utils.fileutils import FileUtils\r\nfrom pyats.aetest.steps import Steps\r\n\r\n# Unicon\r\nfrom unicon.core.errors import SubCommandFailure\r\nfrom unicon.core.errors import ConnectionError\r\n\r\n# ISSU\r\nfrom genie.libs.sdk.apis.iosxe.issu.verify import (\r\n is_issu_in_state,\r\n is_issu_terminal_state_reached_on_slot,\r\n is_issu_rollback_timer_in_state,\r\n)\r\n\r\n# PLATFORM\r\nfrom genie.libs.sdk.apis.iosxe.platform.get import get_platform_standby_rp\r\nfrom genie.libs.sdk.apis.iosxe.platform.verify import is_platform_slot_in_state\r\n\r\n# UTILS\r\nfrom genie.libs.sdk.apis.utils import reconnect_device\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef prepare_issu(device, image, path, address, protocol=\"tftp\", disks=None):\r\n \"\"\" Prepare image and check device before starting issu process\r\n Args:\r\n device ('obj'): Device object\r\n image ('str'): Image name\r\n path ('str'): Path on dsetr\r\n protocol ('str'): Protocol to be used on copying image to device\r\n address ('str'): Address of server from where image will be copied.\r\n disks ('list'): List of disks where image will be copied\r\n Raises:\r\n Exception: Failed preparing ISSU image\r\n Returns:\r\n None\r\n \"\"\"\r\n\r\n if disks is None:\r\n disks = [\"bootflash:\", \"stby-bootflash:\"]\r\n\r\n for disk in disks:\r\n try:\r\n copy_issu_image_to_disk(\r\n device=device,\r\n disk=disk,\r\n path=path,\r\n protocol=protocol,\r\n address=address,\r\n image=image,\r\n )\r\n except Exception as e:\r\n raise Exception(e)\r\n\r\n\r\ndef copy_issu_image_to_disk(\r\n device, disk, path, address, image, protocol=\"tftp\"\r\n):\r\n \"\"\" Copy image from a server to disk\r\n Args:\r\n device ('obj'): Device object\r\n disk ('str'): Disk name\r\n address ('str'): Server address\r\n path ('str'): Path on server\r\n protocol ('str'): Transfer protocol\r\n image ('str'): Image name\r\n Raises:\r\n Exception: Failed copying ISSU image to disk\r\n Returns:\r\n None\r\n \"\"\"\r\n\r\n from_url = \"{protocol}://{address}//{path}/{image}\".format(\r\n protocol=protocol, address=address, path=path, image=image\r\n )\r\n\r\n filetransfer = FileUtils.from_device(device)\r\n\r\n filetransfer.copyfile(\r\n source=from_url, destination=disk, device=device, timeout_seconds=\"600\"\r\n )\r\n\r\n output = device.execute(\r\n \"dir {disk}{image}\".format(disk=disk, image=basename(image))\r\n )\r\n if \"Error\" not in output:\r\n log.info(\"Copied ISSU image to '{disk}'\".format(disk=disk))\r\n else:\r\n raise Exception(\r\n \"Unable to copy ISSU image to '{disk}'\".format(disk=disk)\r\n )\r\n\r\n\r\ndef perform_issu(device, image, disk, steps=Steps()):\r\n \"\"\" Execute ISSU on device\r\n Args:\r\n device ('obj'): Device object\r\n image ('str'): Image name on disk\r\n disk ('str'): Disk where is located image\r\n Raise:\r\n None\r\n Returns:\r\n None\r\n \"\"\"\r\n\r\n with steps.start(\"Command 'issu loadversion'\") as step:\r\n\r\n slot_number = get_platform_standby_rp(device=device)\r\n\r\n if not slot_number:\r\n raise ValueError(\"Could not retrieve standby rp slot number\")\r\n\r\n # Load version\r\n standby_slot = \"R{}\".format(slot_number)\r\n try:\r\n issu_loadversion(\r\n device=device, standby_slot=slot_number, disk=disk, image=image\r\n )\r\n except Exception:\r\n step.failed(\"Unable to execute 'issu loadversion'\")\r\n\r\n with steps.start(\"Command 'issu runversion'\") as step:\r\n\r\n if not is_platform_slot_in_state(\r\n device=device, slot=standby_slot, state=\"ok, standby\"\r\n ):\r\n step.failed(\r\n \"Slot {slot} is not in 'ok, standby' state\".format(\r\n slot=standby_slot\r\n )\r\n )\r\n\r\n if not is_issu_terminal_state_reached_on_slot(\r\n device=device, slot=standby_slot\r\n ):\r\n step.failed(\r\n \"Slot {slot} has not reached terminal state\".format(\r\n slot=standby_slot\r\n )\r\n )\r\n\r\n # Run version\r\n try:\r\n issu_runversion(device=device)\r\n except (Exception, ConnectionError) as e:\r\n step.failed(e)\r\n\r\n with steps.start(\"Command 'issu acceptversion'\") as step:\r\n\r\n in_state = is_issu_in_state(\r\n device=device, slot=standby_slot, expected_state=\"runversion\"\r\n )\r\n\r\n if not in_state:\r\n step.failed(\"Issu is not in state 'runversion'\")\r\n\r\n # Accept version\r\n try:\r\n issu_acceptversion(device=device)\r\n except Exception as e:\r\n step.failed(e)\r\n\r\n with steps.start(\r\n \"Save running-configuration to startup-configuration\"\r\n ) as step:\r\n\r\n filetransfer = FileUtils.from_device(device)\r\n filetransfer.copyconfiguration(\r\n source=\"running-config\",\r\n destination=\"startup-config\",\r\n device=device,\r\n )\r\n\r\n with steps.start(\"Command 'issu commitversion'\") as step:\r\n\r\n in_state = is_issu_in_state(\r\n device=device, slot=standby_slot, expected_state=\"acceptversion\"\r\n )\r\n\r\n if not in_state:\r\n step.failed(\"Issu is not in state 'acceptversion'\")\r\n\r\n in_state = is_issu_rollback_timer_in_state(\r\n device=device, slot=standby_slot, expected_state=\"inactive\"\r\n )\r\n\r\n if not in_state:\r\n step.failed(\"Issu rollback timer is not 'inactive'\")\r\n\r\n # Commit version\r\n try:\r\n issu_commitversion(device=device)\r\n except Exception as e:\r\n step.failed(e)\r\n\r\n with steps.start(\"Reload standby slot\") as step:\r\n\r\n slot_number = get_platform_standby_rp(device=device)\r\n\r\n if not slot_number:\r\n raise ValueError(\"Could not retrieve standby rp slot number\")\r\n\r\n standby_slot = \"R{}\".format(slot_number)\r\n try:\r\n reload_issu_slot(device=device, slot=standby_slot)\r\n except Exception as e:\r\n step.failed(e)\r\n\r\n\r\ndef issu_loadversion(device, standby_slot, disk, image, timeout=1200):\r\n \"\"\" Execute issu loadversion command on device\r\n Args:\r\n device ('obj'): Device object\r\n standby_slot ('int'): Standby slot number\r\n disk ('str'): Disk name\r\n image ('str'): Image name\r\n timeout ('int'): Time out in seconds\r\n Raise:\r\n Exception: Failed to load version on device\r\n Returns:\r\n None\r\n \"\"\"\r\n log.info(\"Loading version on slot {slot}\".format(slot=standby_slot))\r\n\r\n try:\r\n output = device.execute(\r\n \"issu loadversion rp {srp} file {disk}{image}\".format(\r\n srp=standby_slot, disk=disk, image=image\r\n ),\r\n timeout=timeout,\r\n )\r\n except Exception as e:\r\n raise Exception(\"Unable to execute 'issu loadversion'\")\r\n\r\n if \"FAILED\" in output:\r\n device.execute(\"issu abortversion\", timeout=timeout)\r\n raise Exception(\"Unable to execute 'issu loadversion'\")\r\n\r\n\r\ndef issu_runversion(device, timeout=300):\r\n \"\"\" Execute issu runversion on device\r\n Args:\r\n device ('obj'): Device object\r\n timeout ('int'): Timeout in seconds\r\n Raise:\r\n Exception Failed to reconnect to device\r\n Returns:\r\n None\r\n \"\"\"\r\n log.info(\"Running version\")\r\n try:\r\n output = device.execute(\"issu runversion\", timeout=timeout)\r\n except SubCommandFailure:\r\n # Timeout Unicon SubCommandFailure expected\r\n # Wait a bit as the device is booting with the ISSU upgrade image\r\n time.sleep(timeout)\r\n\r\n log.info(\"Reconnecting device\")\r\n try:\r\n reconnect_device(device=device)\r\n except Exception as e:\r\n log.error(\"Failed to reconnect to device {dev}\")\r\n raise ConnectionError(\r\n \"Failed to connect to device {dev}\".format(dev=device.name)\r\n )\r\n\r\n\r\ndef issu_acceptversion(device, timeout=300):\r\n \"\"\" Execute issu acceptversion on device\r\n Args:\r\n device ('obj'): Device object\r\n timeout ('int'): Timeout in seconds\r\n Raise:\r\n Exception: Failed executing 'issu acceptversion' command\r\n Returns:\r\n None\r\n \"\"\"\r\n\r\n try:\r\n output = device.execute(\"issu acceptversion\", timeout=timeout)\r\n except Exception as e:\r\n raise Exception(\"Unable to execute 'issu acceptversion'\")\r\n\r\n if \"FAILED\" in output:\r\n log.error(\r\n \"Failed executing command 'issu acceptversion'\" \"Aborting ISSU\"\r\n )\r\n device.execute(\"issu abortversion\", timeout=timeout)\r\n raise Exception(\"Unable to execute 'issu acceptversion'\")\r\n\r\n\r\ndef issu_commitversion(device, timeout=1200):\r\n \"\"\" Execute issu commitversion on device\r\n Args:\r\n device ('obj'): Device object\r\n timeout ('int'): Timeout in seconds\r\n Raise:\r\n Exception: Failed executing 'issu commitversion' command\r\n Returns:\r\n None\r\n \"\"\"\r\n log.info(\"Commiting version\")\r\n try:\r\n output = device.execute(\"issu commitversion\", timeout=timeout)\r\n except Exception as e:\r\n log.error(\r\n \"Failed executing command 'issu acceptversion'\" \"Aborting ISSU\"\r\n )\r\n device.execute(\"issu abortversion\", timeout=timeout)\r\n raise Exception(\"Unable to execute 'issu commitversion'\")\r\n\r\n if \"FAILED\" in output:\r\n log.error(\r\n \"Failed executing command 'issu acceptversion'\" \"Aborting ISSU\"\r\n )\r\n device.execute(\"issu abortversion\", timeout=timeout)\r\n raise Exception(\"Unable to execute 'issu commitversion'\")\r\n\r\n\r\ndef reload_issu_slot(device, slot, wait_time=60, timeout=1200):\r\n \"\"\" Reload slot on device\r\n Args:\r\n device ('obj'): Device object\r\n slot ('str'): Slot to be reloaded\r\n wait_time ('int'): Time to wait in seconds after slot reload\r\n Raise:\r\n Exception\r\n \"\"\"\r\n log.info(\"Reloading slot {slot}\".format(slot=slot))\r\n try:\r\n output = device.execute(\r\n \"hw-module slot {slot} reload\".format(slot=slot), timeout=timeout\r\n )\r\n except Exception as e:\r\n raise Exception(\r\n \"Unable to reload slot {slot} on device {dev}\".format(\r\n slot=slot, dev=device.name\r\n )\r\n )\r\n\r\n log.info(\"Sleeping for {sec} seconds\".format(sec=wait_time))\r\n time.sleep(wait_time)\r\n\r\n\r\ndef downgrade_issu_image_on_router(\r\n device, upgraded_image, downgrade_image, disk=\"bootflash:\", timeout=500\r\n):\r\n \"\"\" Execute software downgrade on router\r\n Args:\r\n device ('obj'): Device object\r\n upgraded_image ('str'): Name of current installed image\r\n downgrade_image ('str'): Name of image to be used in downgrade\r\n disk ('str'): Disk name \r\n timeout ('int'): Timeout in seconds\r\n Raise:\r\n SubCommandFailure: Failed downgrading image on device\r\n ConnectionError: Failed reconnecting to device\r\n Returns:\r\n None\r\n\r\n \"\"\"\r\n\r\n commands = (\r\n \"no boot system {disk}{upgraded_image}\\n\"\r\n \"boot system {disk}{downgrade_image}\".format(\r\n downgrade_image=downgrade_image,\r\n disk=disk,\r\n upgraded_image=upgraded_image,\r\n )\r\n )\r\n\r\n slot_number = get_platform_standby_rp(device=device)\r\n\r\n if not slot_number:\r\n raise ValueError(\"Could not retrieve standby rp slot number\")\r\n\r\n standby_slot = \"R{}\".format(slot_number)\r\n\r\n if not is_platform_slot_in_state(\r\n device=device, slot=standby_slot, state=\"ok, standby\"\r\n ):\r\n raise ValueError(\r\n \"Slot {slot} is not in 'ok, standby' state\".format(\r\n slot=standby_slot\r\n )\r\n )\r\n\r\n log.info(\"Applying configuration to device {dev}\".format(dev=device.name))\r\n try:\r\n device.configure(commands)\r\n except SubCommandFailure as e:\r\n raise SubCommandFailure(\r\n \"Could not apply the following configuratin on \"\r\n \"device {dev}:\\n{config}\".format(dev=device.name, config=commands)\r\n )\r\n\r\n log.info(\"Saving changes on device {dev}\".format(dev=device.name))\r\n try:\r\n device.execute(\"write memory\", timeout=timeout)\r\n except SubCommandFailure as e:\r\n raise SubCommandFailure(\r\n \"Could not execute command 'write memory' on \"\r\n \"device {dev}\".format(dev=device.name)\r\n )\r\n\r\n log.info(\"Reloading device {dev}\".format(dev=device.name))\r\n try:\r\n device.reload(timeout=timeout)\r\n except SubCommandFailure as e:\r\n # Timeout Unicon SubCommandFailure expected\r\n pass\r\n\r\n log.info(\r\n \"Waiting {sec} seconds before reconnecting to device\".format(\r\n sec=timeout\r\n )\r\n )\r\n\r\n time.sleep(timeout)\r\n\r\n log.info(\"Reconnecting device\")\r\n try:\r\n reconnect_device(device=device)\r\n except Exception as e:\r\n raise ConnectionError(\r\n \"Failed to connect to device {dev}\".format(dev=device.name)\r\n )\r\n", "id": "10102837", "language": "Python", "matching_score": 3.7778944969177246, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/issu/configure.py" }, { "content": "# Python\r\nimport logging\r\nfrom os.path import getsize\r\n\r\n# ATS\r\nfrom ats.utils.objects import R, find\r\n\r\n# Genie\r\nfrom genie.utils.timeout import Timeout\r\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\r\n\r\n# ISSU\r\nfrom genie.libs.sdk.apis.iosxe.issu.get import get_issu_free_space_on_disk\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef is_issu_terminal_state_reached_on_slot(\r\n device, slot, max_time=1200, interval=120\r\n):\r\n \"\"\" Verify if a slot has reached\r\n Args:\r\n device ('obj'): Device object\r\n slot ('str'): Slot to check\r\n max_time ('int'): Max time checking\r\n interval ('int'): Interval of checking\r\n Raise:\r\n None\r\n Return:\r\n True\r\n False\r\n \"\"\"\r\n log.info(\"Verifying terminal state of slot {slot}\".format(slot=slot))\r\n\r\n timeout = Timeout(max_time=1200, interval=120)\r\n\r\n while timeout.iterate():\r\n try:\r\n output = device.parse(\"show issu state detail\")\r\n except SchemaEmptyParserError as e:\r\n timeout.sleep()\r\n continue\r\n\r\n reached = (\r\n output[\"slot\"].get(slot, {}).get(\"terminal_state_reached\", False)\r\n )\r\n if reached:\r\n return True\r\n\r\n timeout.sleep()\r\n\r\n return False\r\n\r\n\r\ndef is_issu_in_state(device, slot, expected_state, max_time=1200, interval=30):\r\n \"\"\" Verify if ISSU is in state for a specific slot\r\n Args:\r\n device ('obj'): Device object\r\n slot ('str'): Slot for which we need to check ISSU state\r\n expected_state ('str'): Acceptable ISSU states are:\r\n - loadversion\r\n - runversion\r\n - acceptversion\r\n - commitversion\r\n max_time ('int'): Max time checking issu state\r\n interval ('int': Interval checking\r\n Raise:\r\n None\r\n Return\r\n True\r\n False\r\n \"\"\"\r\n\r\n assert expected_state in [\r\n \"loadversion\",\r\n \"runversion\",\r\n \"acceptversion\",\r\n \"commitversion\",\r\n ]\r\n\r\n rs = R([\"slot\", slot, \"last_operation\", expected_state])\r\n\r\n timeout = Timeout(max_time=max_time, interval=interval)\r\n\r\n while timeout.iterate():\r\n try:\r\n output = device.parse(\"show issu state detail\")\r\n except SchemaEmptyParserError:\r\n timeout.sleep()\r\n continue\r\n\r\n ret = find([output], rs, filter_=False, all_keys=True)\r\n if ret:\r\n return True\r\n\r\n timeout.sleep()\r\n\r\n return False\r\n\r\n\r\ndef is_issu_rollback_timer_in_state(\r\n device, slot, expected_state, max_time=120, interval=30\r\n):\r\n \"\"\" Verify if issu rollback timer is in state\r\n Args:\r\n device ('obj'): Device object\r\n slot ('str'): Slot name\r\n expected_state ('str'): Expected state ('active', 'inactive')\r\n max_time ('int'): Max time checking \r\n interval ('int'): Check interfal\r\n Return:\r\n True\r\n False\r\n Raise:\r\n SchemaEmptyParserError\r\n\r\n \"\"\"\r\n\r\n assert expected_state in [\"active\", \"inactive\"]\r\n log.info(\r\n \"Verifying roll back timer state on slot {slot}\".format(slot=slot)\r\n )\r\n\r\n timeout = Timeout(max_time=max_time, interval=interval)\r\n\r\n while timeout.iterate():\r\n try:\r\n output = device.parse(\"show issu rollback-timer\")\r\n except SchemaEmptyParserError:\r\n timeout.sleep()\r\n continue\r\n\r\n if (\r\n output\r\n and \"rollback_timer_state\" in output\r\n and output[\"rollback_timer_state\"] == expected_state\r\n ):\r\n return True\r\n\r\n timeout.sleep()\r\n\r\n return False\r\n", "id": "5676995", "language": "Python", "matching_score": 2.480879306793213, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/issu/verify.py" }, { "content": "# Python\r\nimport logging\r\n\r\n# Genie\r\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\r\nfrom genie.utils.timeout import Timeout\r\n\r\n# REDUNDANCY\r\nfrom genie.libs.sdk.apis.iosxe.redundancy.get import (\r\n get_redundancy_operational_state,\r\n)\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef is_redundancy_state_in_state(\r\n device, expected_state, max_time=500, check_interval=5, output=\"\"\r\n):\r\n \"\"\" Verify if redundancy state is in state:\r\n Args:\r\n device ('obj'): Device object\r\n output ('dict'): Parsed output of show redundancy state\r\n expected_state ('str'): Expected state\r\n max_time ('int'): Max time in seconds to check redundancy state\r\n check_interval ('int'): Interval in seconds between each check\r\n Returns:\r\n True\r\n False\r\n Raises:\r\n None\r\n \"\"\"\r\n\r\n if not output:\r\n redundancy_state = get_redundancy_operational_state(device=device)\r\n\r\n timeout = Timeout(max_time, check_interval)\r\n while timeout.iterate():\r\n\r\n if redundancy_state == expected_state:\r\n log.info(\r\n \"Redundancy state on device {dev} is {state}\".format(\r\n dev=device.name, state=redundancy_state\r\n )\r\n )\r\n return True\r\n\r\n log.info(\r\n \"Redundancy state in device {dev} is {state}.\\n\"\r\n \"Expected {expected_state}\".format(\r\n dev=device.name,\r\n state=redundancy_state,\r\n expected_state=expected_state,\r\n )\r\n )\r\n\r\n redundancy_state = get_redundancy_operational_state(device=device)\r\n\r\n timeout.sleep()\r\n\r\n log.error(\r\n \"Redundancy state in device {dev} is {state}.\\n\"\r\n \"Expected {expected_state}\".format(\r\n dev=device.name,\r\n state=redundancy_state,\r\n expected_state=expected_state,\r\n )\r\n )\r\n\r\n return False\r\n", "id": "3383986", "language": "Python", "matching_score": 2.583301544189453, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/redundancy/verify.py" }, { "content": "# Python\r\nimport logging\r\n\r\n# Genie\r\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef get_redundancy_operational_state(device):\r\n \"\"\" Get redundancy states of device\r\n Args:\r\n device ('obj'): Device object\r\n Returns:\r\n String: Redundancy state\r\n None\r\n Raises:\r\n None\r\n \"\"\"\r\n\r\n try:\r\n output = device.parse(\"show redundancy states\")\r\n except SchemaEmptyParserError:\r\n log.info(\r\n \"Command 'show redundancy states' has \" \"not returned any results\"\r\n )\r\n return None\r\n\r\n redundancy_state = output.get(\"redundancy_state\", None)\r\n\r\n if redundancy_state:\r\n log.info(\r\n \"Found redundancy state {state} on device {dev}\".format(\r\n state=redundancy_state, dev=device.name\r\n )\r\n )\r\n else:\r\n log.info(\r\n \"Could not find any redundancy state on device {dev}\".format(\r\n dev=device.name\r\n )\r\n )\r\n\r\n return redundancy_state\r\n", "id": "878677", "language": "Python", "matching_score": 0.8885486125946045, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/redundancy/get.py" }, { "content": "'''IOSXR implementation of InitExecCommands class'''\n\n\nclass InitExecCommands(object):\n \"\"\"InitExecCommands class\n\n `InitExecCommands` class provides the functionality to retrieve the\n platform specific initiation executed commands.\n\n \"\"\"\n\n def get_commands(self):\n \"\"\"Method to return the list of the platform specific initiation\n executed commands\n\n Args:\n None\n\n Returns:\n `list`: a `list` of the initiation commands\n\n Examples:\n # Creating an instnace of InitExecCommands\n >>> init_exec_commands_instance = Lookup.from_device(device).sdk.\\\n libs.abstracted_libs.init_exec_commands.InitExecCommands()\n\n # Calling get_commands method\n >>> init_exec_commands = init_exec_commands_instance.get_commands()\n ['term length 0', 'term width 0']\n\n \"\"\"\n\n # Create parser object\n init_commands = ['term length 0', 'term width 0']\n\n return init_commands", "id": "4323765", "language": "Python", "matching_score": 1.8619271516799927, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/abstracted_libs/iosxr/init_exec_commands.py" }, { "content": "'''Junos implementation of ShowRun class'''\n\n\nclass ConfigureSnapshotCommand(object):\n \"\"\"ConfigureSnapshotCommands class\n\n `ConfigureSnapshotCommands` class provides the functionality to\n retrieve the platform specific configurations command.\n\n \"\"\"\n\n @property\n def command(self):\n \"\"\"Method to return the configurations fetch command.\n\n Args:\n None\n\n Returns:\n `str`: a `str` of the command\n\n Examples:\n # Creating an instnace of ConfigureSnapshotCommand\n >>> configure_snapshot_command = Lookup.from_device(device).sdk.\\\n libs.abstracted_libs.configure_snapshot_command.ConfigureSnapshotCommand()\n\n # Calling get_commands method\n >>> configure_snapshot_command = configure_snapshot_command.command\n 'show configuration'\n\n \"\"\"\n # return Junos default command\n return 'show configuration | display set'\n", "id": "1228353", "language": "Python", "matching_score": 0.9588857889175415, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/abstracted_libs/junos/configure_snapshot_command.py" }, { "content": "'''Junos implementation of ManagementInterface class'''\n\n# parser\nfrom genie.libs.parser.junos.show_interface import ShowInterfacesTerse\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\n# ManagementInterface\nfrom ..management_interface import ManagementInterface as ManagementInterface_main\n\n\nclass ManagementInterface(ManagementInterface_main):\n \"\"\"ManagementInterface class\n\n `ManagementInterface` class the functionality to retrieve the given ip\n address corresponding interface name.\n\n Args:\n `None`\n\n Returns:\n an `interafce_name` string\n\n Examples:\n # Creating an instnace of ManagementInterface\n managment_interface_instance = lookup.sdk.libs.abstracted_libs.\\\n management_interface.ManagementInterface()\n\n >>> managment_interface_instance = lookup.sdk.libs.\\\n abstracted_libs.management_interface.ManagementInterface()\n <genie.libs.sdk.libs.abstracted_libs.nxos.management_interface.\\\n ManagementInterface object at 0xf5a73a2c>\n\n \"\"\"\n\n def get_interface_name(self, device, ipaddress):\n \"\"\"Method to return the ip address corresponding interface name\n\n Args:\n device (`Device`): Genie device object\n ipaddress (`str`): connection ip address\n\n Returns:\n `str`: a `str` of the interface name\n\n Examples:\n >>> managment_interface_instance.\\\n get_interface_name(device, ipaddress)\n\n \"\"\"\n\n # Create parser object\n parser_obj = ShowInterfacesTerse(device=device)\n try:\n parsed_output = parser_obj.parse(interface='fxp0')\n except SchemaEmptyParserError:\n # We are looping over all the ips provided in the testbed yaml file\n # Show command output will be empty in some cases.\n return None\n\n # Get the corresponding interface name\n for intf in parsed_output.keys():\n # Parser structure only has one interface\n if parsed_output.get(intf, {}).get('protocol', {}).get('inet', {}).keys() or \\\n parsed_output.get(intf, {}).get('protocol', {}).get('inet6', {}).keys():\n return intf\n", "id": "11930491", "language": "Python", "matching_score": 5.694829940795898, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/abstracted_libs/junos/management_interface.py" }, { "content": "'''ManagementInterface class'''\n\n# metaparser\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\n\nclass ManagementInterface(object):\n \"\"\"ManagementInterface class\n\n `ManagementInterface` class the functionality to retrieve the given ip\n address corresponding interface name.\n\n Args:\n `None`\n\n Returns:\n an `interafce_name` string\n\n Examples:\n # Creating an instnace of ManagementInterface\n managment_interface_instance = lookup.sdk.libs.abstracted_libs.\\\n management_interface.ManagementInterface()\n\n >>> managment_interface_instance = lookup.sdk.libs.\\\n abstracted_libs.management_interface.ManagementInterface()\n <genie.libs.sdk.libs.abstracted_libs.nxos.management_interface.\\\n ManagementInterface object at 0xf5a73a2c>\n\n \"\"\"\n\n def get_interface_name(self, device, ipaddress, parser_obj=None):\n \"\"\"Method to return the ip address corresponding interface name\n\n Args:\n ipaddress (`str`): connection ip address\n parser_obj (`obj`): Os specific parser object\n\n Returns:\n `str`: a `str` of the interface name\n\n Examples:\n >>> managment_interface_instance.\\\n get_interface_name(ipaddress, parser_obj)\n\n \"\"\"\n\n # Calling parser\n try:\n parsed_output = parser_obj.parse(ip=ipaddress)\n except SchemaEmptyParserError:\n # We are looping over all the ips provided in the testbed yaml file\n # Show command output will be empty in some cases.\n return None\n\n # Get the corresponding interface name\n for intf in parsed_output['interface'].keys():\n # Parser structure only has one interface\n interface_name = intf\n\n return interface_name", "id": "6562483", "language": "Python", "matching_score": 1.2848491668701172, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/abstracted_libs/management_interface.py" }, { "content": "import os\nimport sys\nimport yaml\nimport shutil\nimport logging\nimport inspect\nimport argparse\nimport importlib\nfrom ats.log.utils import banner\nfrom collections import OrderedDict\nfrom pathlib import Path\n\nfrom genie.metaparser import MetaParser\nfrom genie.metaparser.util import merge_dict\n\nlogging.basicConfig(stream=sys.stdout, level=logging.INFO)\nlog = logging.getLogger(__name__)\n\nEXCLUDE_PARSERS = ['ShowDir', 'ShowBgpPolicyStatistics', 'ShowBgpVrfAllNeighborsRoutes',\n 'ShowBgpPolicyStatisticsNeighbor', 'ShowBgpVrfAllNeighborsAdvertisedRoutes',\n 'ShowBgpVrfAllNeighborsReceivedRoutes', 'ShowShowBgpVrfAllNeighborsRoutes',\n 'ShowIpOspfVrfAll', 'ShowIpOspfNeighborsDetailVrfAll', 'ShowIpOspfInterfaceVrfAll',\n 'ShowIpOspfDatabase', 'ShowIpRipNeighborVrfAll', 'ShowIpRipStatistics', 'ShowIpRipVrfAll',\n 'ShowIpRipInterfaceVrfAll', 'ShowIpRipRouteVrfAll', 'ShowIpRpf', 'ShowIpv6Rpf',\n 'ShowIpv6RipInterfaceVrfAll', 'ShowIpv6RipNeighborVrfAll', 'ShowIpv6RipStatistics',\n 'ShowIpv6RipVrfAll', 'ShowRunRip', 'ShowSystemInternalSysmgrServiceName',\n 'ShowIpIgmpSsmMapping', 'ShowIpv6MldSsmMap', 'ShowPimRp', 'ShowPimNeighbor',\n 'ShowIpOspf', 'ShowBgpAllNeighborsAdvertisedRoutes', 'ShowRunningConfigInterface',\n 'ShowBgpAllNeighborsPolicy', 'ShowBgpAllNeighborsReceivedRoutes',\n 'ShowBgpAllNeighborsRoutes', 'ShowBgpInstanceNeighborsAdvertisedRoutes',\n 'ShowBgpInstanceNeighborsReceivedRoutes', 'ShowBgpInstanceNeighborsRoutes',\n 'ShowL2VpnXconnectMp2mpDetail', 'ShowL2vpnForwardingProtectionMainInterface',\n 'ShowL2vpnForwardingBridgeDomainMacAddress', 'ShowOspfVrfAllInclusiveLinksParser',\n 'ShowOspfVrfAllInclusiveDatabaseParser', 'ShowIpOspfDatabaseDetailParser',\n 'ShowIpOspfDatabaseParser', 'ShowIpOspfLinksParser', 'ShowBgpSessions', 'ShowEvpnEvi',\n 'ShowEvpnEviDetail', 'ShowL2VpnXconnectSummary', 'ShowL2VpnXconnectBrief',\n 'ShowL2VpnXconnectDetail', 'ShowL2VpnBridgeDomain', 'ShowL2VpnBridgeDomainBrief',\n 'ShowL2VpnBridgeDomainDetail', 'ShowL2VpnBridgeDomainSummary', 'ShowEvpnEthernetSegment',\n 'ShowIpRouteWord', 'ShowIpv6RouteWord', 'ShowInterfacesCounters','ShowPowerInlineInterface',\n 'ShowProcessesCpuSorted','ShowVlanOld', 'ShowIpMsdpPolicyStatisticsSaPolicyIn',\n 'ShowIpMsdpPolicyStatisticsSaPolicyInOut', 'ShowIpMsdpPolicyStatisticsSaPolicyOut',\n 'ShowBgpL2vpnEvpnWord', 'ShowL2routeEvpnMacEvi', 'ShowMacAddressTableVni',\n 'ShowIpInterfaceBriefPipeIp', 'ShowLispExtranet', 'ShowBgpPolicyStatisticsParser']\n\nEXCLUDE_DEVICES = ['Verify_BgpOpenconfigYang_yang',\n 'Verify_BgpProcessVrfAll_yang',\n 'Verify_BgpVrfAllNeighbors_yang_vrf_default',\n 'Verify_BgpInstanceNeighborsDetail_yang_vrf_type_all',\n 'Verify_BgpInstanceNeighborsDetail_yang_vrf_type_vrf_af_type_ipv4_unicast',\n 'Verify_BgpInstanceNeighborsDetail_yang_vrf_type_vrf_af_type_ipv6_unicast',\n 'Verify_BgpInstanceProcessDetail_yang_vrf_type_all',\n 'Verify_BgpInstanceProcessDetail_yang_vrf_type_vrf_af_type_ipv4_unicast',\n 'Verify_BgpInstanceProcessDetail_yang_vrf_type_vrf_af_type_ipv6_unicast',\n 'Verify_EthernetTags_yang',\n 'Verify_BgpPolicyStatisticsParser_xml',\n 'Verify_IpInterfaceBrief_yang', 'Verify_StandbyAll_yang',\n 'Verify_IpInterfaceBriefPipeVlan_yang',\n 'Verify_NveInterface']\n\nCONTEXTS = ['cli', 'yang', 'xml', 'rest']\nOSES = ['iosxe', 'ios', 'iosxr', 'nxos', 'junos']\nYAMLS = os.path.join(os.environ['VIRTUAL_ENV'], 'genie_yamls')\n\nclass CreateVerificationDataFiles(object):\n def __init__(self):\n # No idea\n # Make sure we got a virtual_env\n assert 'VIRTUAL_ENV' in os.environ\n self.parsers = {}\n\n def find_all_parsers(self):\n # User should have pypi/genieparser location to use the the\n # verification_generator\n # Can't access parsers under projects in the new structured design\n path = ['pypi', 'genieparser', 'src', 'genie', 'libs', 'parser']\n root_dir = os.path.join(os.environ['VIRTUAL_ENV'],\n 'pypi',\n 'genieparser')\n\n for dirname, subdir, files in os.walk(root_dir):\n # dirName is how we categorize them - Right now only support os\n log.debug('Directory: {dirname}'.format(dirname=dirname))\n\n # For each file\n for file in files:\n # Safe assumption ?\n # Check class within, if no Schema in name, assume its a parser\n filename = Path(file)\n dirname = os.path.basename(dirname)\n if not filename.suffix == '.py' or\\\n filename.name == '__init__.py' or\\\n dirname not in OSES or\\\n dirname == 'tests' or\\\n dirname in path:\n continue\n\n log.debug('File: {file}'.format(file=file))\n pythonpath = '{p}.{d}.{f}'.format(p='.'.join(path),\n d=dirname,\n f=filename.stem)\n module = importlib.import_module(pythonpath)\n for name, obj in inspect.getmembers(module):\n\n # skip internal attributes\n # ------------------------\n # assuming anything starting with _, including __\n if name.startswith('_'):\n continue\n # skip non-class items\n # --------------------\n if not inspect.isclass(obj):\n continue\n\n # Only get the parsers\n\n if not issubclass(obj, MetaParser):\n continue\n\n if obj is MetaParser:\n continue\n\n # Assumption!\n if 'schema' in name.lower():\n continue\n\n if name in EXCLUDE_PARSERS:\n continue\n\n # Rest is good\n if dirname not in self.parsers:\n self.parsers[dirname] = {}\n if filename.stem not in self.parsers[dirname]:\n self.parsers[dirname][filename.stem] = []\n\n self.parsers[dirname][filename.stem].append(obj)\n\n def create_yaml_files(self, datafile):\n # Load existing Yaml file (If any)\n # Load main verifications datafiles\n #main_file = OrderedDict()\n main_file = {}\n nxos_file = {'extends': '%CALLABLE{genie.libs.sdk.genie_yamls.datafile(verification)}'}\n iosxe_file = {'extends': '%CALLABLE{genie.libs.sdk.genie_yamls.datafile(verification)}'}\n ios_file = {'extends': '%CALLABLE{genie.libs.sdk.genie_yamls.datafile(verification)}'}\n iosxr_file = {'extends': '%CALLABLE{genie.libs.sdk.genie_yamls.datafile(verification)}'}\n junos_file = {'extends': '%CALLABLE{genie.libs.sdk.genie_yamls.datafile(verification)}'}\n nxos = []\n iosxe = []\n ios = []\n iosxr = []\n junos = []\n ios = []\n # Load the file\n with open(datafile, 'r') as f:\n parser_yaml = yaml.safe_load(f)\n\n main_yaml = os.path.join(YAMLS, 'verification_datafile.yaml')\n with open(main_yaml, 'r') as f:\n content = yaml.safe_load(f)\n\n nxos_yaml = os.path.join(YAMLS, 'nxos', 'verification_datafile_nxos.yaml')\n with open(nxos_yaml, 'r') as f:\n nxos_content = yaml.safe_load(f)\n\n iosxe_yaml = os.path.join(YAMLS, 'iosxe', 'verification_datafile_iosxe.yaml')\n with open(iosxe_yaml, 'r') as f:\n iosxe_content = yaml.safe_load(f)\n\n ios_yaml = os.path.join(YAMLS, 'ios', 'verification_datafile_ios.yaml')\n with open(ios_yaml, 'r') as f:\n ios_content = yaml.safe_load(f)\n\n iosxr_yaml = os.path.join(YAMLS, 'iosxr', 'verification_datafile_xr.yaml')\n with open(iosxr_yaml, 'r') as f:\n iosxr_content = yaml.safe_load(f)\n\n junos_yaml = os.path.join(YAMLS, 'junos', 'verification_datafile_junos.yaml')\n with open(junos_yaml, 'r') as f:\n junos_content = yaml.safe_load(f)\n\n # All parser should be in this verification datafile\n for osx in self.parsers:\n if osx == 'nxos':\n os_yaml = nxos_content\n os_file = nxos_file\n triggers = nxos\n elif osx == 'iosxe':\n os_yaml = iosxe_content\n os_file = iosxe_file\n triggers = iosxe\n elif osx == 'ios':\n os_yaml = ios_content\n os_file = ios_file\n triggers = ios\n elif osx == 'iosxr':\n os_yaml = iosxr_content\n os_file = iosxr_file\n triggers = iosxr\n elif osx == 'junos':\n os_yaml = junos_content\n os_file = junos_file\n triggers = junos\n\n for file in self.parsers[osx]:\n for parser in self.parsers[osx][file]:\n # Check which context exists\n for context in CONTEXTS:\n if not hasattr(parser, context):\n continue\n\n parser_name = parser.__name__\n # Verification name\n verification_name = 'Verify_{p}'.format(p=parser_name.replace('Show', ''))\n if context != 'cli':\n verification_name = '{v}_{c}'.format(v=verification_name, c=context)\n\n values = []\n if parser_name in parser_yaml:\n\n # initial index number\n index_num = None\n # For all of the combination, add it\n # Make the lists ready to go\n for key, items in sorted(parser_yaml[parser_name].items(), reverse=True):\n\n if isinstance(items, dict):\n if key not in parser.__module__:\n continue\n for ky, val in sorted(items.items(), reverse=True):\n count = 0\n if ky == 'zos':\n try:\n index_num = val.index(osx)\n except:\n values.append(None)\n break\n continue\n\n if index_num is not None:\n val = val[index_num]\n\n for item in val:\n if item == '' or item is None:\n count += 1\n continue\n try:\n values[count].extend([ky, val[count]])\n except IndexError:\n values.append([ky, val[count]])\n count += 1\n\n else:\n count = 0\n if key == 'zos':\n try:\n index_num = items.index(osx)\n except:\n values.append(None)\n break\n continue\n\n if index_num is not None:\n items = items[index_num]\n\n for item in items:\n if item == '' or item is None:\n count += 1\n continue\n try:\n values[count].extend([key, items[count]])\n except IndexError:\n values.append([key, items[count]])\n count += 1\n else:\n values.append(None)\n\n for value in values:\n\n if value is not None:\n veri_name = '{v}_{e}'.format(v=verification_name,\n e='_'.join(value).replace(' ', '_'))\n else:\n veri_name = verification_name\n\n main_file[veri_name] = {}\n main_file[veri_name]['source'] = {'class':'genie.harness.base.Template'}\n main_file[veri_name]['context'] = context\n main_file[veri_name]['cmd'] = {}\n main_file[veri_name]['cmd']['pkg'] = 'genie.libs.parser'\n main_file[veri_name]['cmd']['class'] = '{f}.{p}'.format(f=file, p=parser.__name__)\n\n os_file[veri_name] = {}\n if veri_name not in EXCLUDE_DEVICES:\n os_file[veri_name]['devices'] = ['uut']\n\n if value is not None:\n for i in range(0,len(value),2):\n if value[i+1] != 'default':\n if 'parameters' not in os_file[veri_name]:\n os_file[veri_name]['parameters'] = {}\n os_file[veri_name]['parameters'][value[i]] = value[i+1]\n\n if veri_name in content:\n # Good already exists\n # Do not copy source and cmd\n # But keep the rest\n try:\n del content[veri_name]['source']\n except:\n pass\n try:\n del content[veri_name]['cmd']\n except:\n pass\n merge_dict(main_file[veri_name], content[veri_name])\n\n if veri_name in os_yaml:\n merge_dict(os_file[veri_name], os_yaml[veri_name])\n triggers.append(veri_name)\n\n # Create the files\n with open('verification_datafile.yaml', 'w') as f:\n yaml.dump(main_file, f, default_flow_style=False)\n\n self.clean_up('nxos')\n with open('nxos/verification_datafile_nxos.yaml', 'w') as f:\n yaml.dump(nxos_file, f, default_flow_style=False)\n\n self.clean_up('iosxe')\n with open('iosxe/verification_datafile_iosxe.yaml', 'w') as f:\n yaml.dump(iosxe_file, f, default_flow_style=False)\n\n self.clean_up('ios')\n with open('ios/verification_datafile_ios.yaml', 'w') as f:\n yaml.dump(ios_file, f, default_flow_style=False)\n\n self.clean_up('iosxr')\n with open('iosxr/verification_datafile_xr.yaml', 'w') as f:\n yaml.dump(iosxr_file, f, default_flow_style=False)\n\n self.clean_up('junos')\n with open('junos/verification_datafile_junos.yaml', 'w') as f:\n yaml.dump(junos_file, f, default_flow_style=False)\n\n log.info(banner('nxos'))\n log.info('\\n'.join(nxos))\n\n log.info(banner('iosxe'))\n log.info('\\n'.join(iosxe))\n\n log.info(banner('ios'))\n log.info('\\n'.join(ios))\n\n log.info(banner('iosxr'))\n log.info('\\n'.join(iosxr))\n\n log.info(banner('junos'))\n log.info('\\n'.join(junos))\n \n return main_file\n\n def clean_up(self, dir):\n if os.path.isdir(dir):\n shutil.rmtree(dir)\n os.makedirs(dir)\n\n\nif __name__ == '__main__':\n\n parser = argparse.ArgumentParser(description='')\n parser.add_argument('-datafile',\n metavar='FILE',\n type=str,\n default=None,\n help='File containing parser information')\n custom_args = parser.parse_known_args()[0]\n\n cv = CreateVerificationDataFiles()\n cv.find_all_parsers()\n mail_file = cv.create_yaml_files(custom_args.datafile)\n", "id": "2006584", "language": "Python", "matching_score": 3.8774518966674805, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/genie_yamls/tools/verification_generator/datafiles_maker.py" }, { "content": "import os\nimport sys\nimport yaml\nimport json\nimport pathlib\nimport logging\nimport inspect\nimport argparse\nimport importlib\nIGNORE_DIR = ['.git', '__pycache__', 'template', 'tests']\nIGNORE_FILE = ['__init__.py', 'base.py', 'common.py']\n\nlogging.basicConfig(stream=sys.stdout, level=logging.INFO)\nlog = logging.getLogger(__name__)\n\nclass CreateApiDoc(object):\n def __init__(self, datafile):\n assert 'VIRTUAL_ENV' in os.environ\n with open(datafile, 'r') as f:\n self.datafile = yaml.safe_load(f)\n self.output = {}\n\n def _expand(self, name):\n if '$env(VIRTUAL_ENV)' in name:\n # Replace '$env(VIRTUAL_ENV)' with the actual value\n return name.replace('$env(VIRTUAL_ENV)', os.environ['VIRTUAL_ENV'])\n return name\n\n def _find_ops(self, mod, tokens):\n for name, obj in inspect.getmembers(mod):\n # starts with _ are ignored\n if name.startswith('_'):\n continue\n # ignore the imported functions\n if inspect.isclass(obj) and obj.__module__ == mod.__name__:\n sub_dict = self.output.setdefault(mod.__name__.split('.')[-1], {})\n for token in tokens:\n if token not in sub_dict:\n sub_dict[token] = {}\n sub_dict = sub_dict[token]\n sub_dict['module_name'] = self._get_mod_name(mod)\n sub_dict['doc'] = obj.__doc__\n sub_dict['uid'] = name.lower()\n sub_dict['class_name'] = name\n line = inspect.getsourcelines(obj)[-1]\n\n temp_url = mod.__file__.replace(os.path.join(os.environ['VIRTUAL_ENV'], 'pypi', 'genielibs') + '/', '')\n style = self.root['url']['style']\n\n if style == 'bitbucket':\n url = '{p}{t}#{l}'.format(p=self.root['url']['link'], t=temp_url,\n l=line)\n elif style == 'github':\n\n url = self.root['url']['link'].format(\n branch=self.root['url']['branch'])\n url = '{p}{t}#L{l}'.format(p=url, t=temp_url, l=line)\n\n sub_dict['url'] = url\n\n def _get_mod_name(self, mod):\n mod_name = []\n name_list = mod.__name__.replace(self.root['root'], '').split('.')\n # if directory is abstracted\n for i, e in enumerate(name_list):\n if not hasattr(importlib.import_module(self.root['root'] + '.'.join(name_list[0:i+1])), '__abstract_token'):\n mod_name.append(e)\n return '.'.join(mod_name)[1:]\n\n def _add_ops(self, item, tokens):\n module_path = self.root['root'] + str(item).rsplit('.', 1)[0]. \\\n replace(self.module_loc, '').replace('/', '.')\n mod = importlib.import_module(module_path)\n self._find_ops(mod, tokens)\n\n def _is_abstract_dir(self, dir):\n mod = str(dir).replace(self.module_loc, '').replace('/', '.')\n return hasattr(importlib.import_module(mod, package=self.root['root']), '__abstract_token')\n\n def _recursive_find(self, item, token):\n for item in item.iterdir():\n if item.is_dir():\n if item.name in IGNORE_DIR:\n # Ignore\n continue\n elif self._is_abstract_dir(item.as_posix()):\n self._recursive_find(item, token + [item.name])\n else:\n self._recursive_find(item, token)\n elif item.is_file():\n if item.name in IGNORE_FILE or item.suffix != '.py':\n continue\n # Then add it to the self.datafile\n self._add_ops(item, token)\n\n def find_all_apis(self):\n if 'root_directories' not in self.datafile:\n return {}\n\n for name, values in self.datafile['root_directories'].items():\n log.info(\"Learning '{name}'\".format(name=name))\n\n # Figure out location of package so you can walk it\n self.root = values\n self.module_loc = importlib.import_module(self.root['root']).__path__[0]\n\n # Walk all file in there and go through the apis\n self._recursive_find(pathlib.Path(self.module_loc), [])\n\n\nif __name__ == '__main__':\n\n parser = argparse.ArgumentParser(description='')\n parser.add_argument('-datafile',\n metavar='FILE',\n type=str,\n default=None,\n help='File containing directory information')\n parser.add_argument('-save_location',\n metavar='FILE',\n type=str,\n default=None,\n help='Location to save the output file')\n custom_args = parser.parse_known_args()[0]\n apiDoc = CreateApiDoc(custom_args.datafile)\n apiDoc.find_all_apis()\n output = json.dumps(apiDoc.output)\n os.makedirs(os.path.dirname(custom_args.save_location), exist_ok=True)\n with open(custom_args.save_location, 'w+') as f:\n f.write(output)", "id": "6873606", "language": "Python", "matching_score": 3.600632905960083, "max_stars_count": 0, "path": "pkgs/ops-pkg/ops_generator/ops.py" }, { "content": "import yaml\nimport argparse\nfrom genie.utils.diff import Diff\n\nif __name__ == '__main__':\n\n parser = argparse.ArgumentParser(description='')\n parser.add_argument('-original',\n metavar='FILE',\n type=str,\n default=None,\n help='File containing original information')\n\n parser.add_argument('-new',\n metavar='FILE',\n type=str,\n default=None,\n help='File containing original information')\n custom_args = parser.parse_known_args()[0]\n\n with open(custom_args.original, 'r') as f:\n original = f.read()\n\n with open(custom_args.new, 'r') as f:\n new = f.read()\n\n new = yaml.safe_load(new)\n original = yaml.safe_load(original)\n\n diff = Diff(original, new)\n diff.findDiff()\n print(diff)\n\n\n", "id": "960046", "language": "Python", "matching_score": 0.8637972474098206, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/genie_yamls/tools/verification_generator/differ.py" }, { "content": "import os\nfrom pyats.utils.fileutils import FileUtils\n\n\nclass TcpDump(object):\n '''\n Run TcpDump command to a linux server\n Can save pcap file if pcap_file is provided\n If a local_dir is provided, it will copy back the pcap file to this\n directory\n\n Example:\n from genie.libs.sdk.libs.utils.tcpdump import TcpDump\n\n # Local dir should be runinfo as we need to keep all those pcap\n # files with testcase on it.\n pcap_file = testcasename.pcap\n # local_dir = runinfo/pcap directory\n t = TcpDump(d, pcap_file = '/tmp/output23.pcap',\n local_dir='/Users/jeaubin/pcap')\n d.connect()\n\n t.send('tcpdump -i any udp port 514 -vn')\n # wait some time\n t.stop()\n x = t.parse()\n x[0]['IP']['dst']\n '''\n def __init__(self, device, pcap_file=None, protocol='scp', local_dir=None):\n self.device = device\n self.protocol = protocol\n self.pcap_file = pcap_file\n self.local_dir = local_dir\n\n def send(self, cmd):\n cmd = '{c}'.format(c=cmd)\n if self.pcap_file:\n cmd = '{c} -w {pf}'.format(c=cmd, pf=self.pcap_file)\n self.device.send(cmd+'\\n')\n\n def stop(self):\n # Get buffer\n output = self.device.expect(\".*\")\n\n # send cntrl+c\n self.device.send('\\x03')\n # Bring back the pcap file if any\n if self.pcap_file and self.local_dir:\n # Copy it back to local host\n\n # Find server\n servers = self.device.testbed.servers\n\n # Check if there is a self.protocol server\n if self.protocol not in servers:\n raise Exception(\"'{p}' server missing in the testbed \"\n \"yaml file\".format(p=self.protocol))\n\n # Find ip\n ip = servers[self.protocol]['address']\n port = servers[self.protocol].get('custom', {}).get('port', 22)\n local_file = os.path.join(self.local_dir,\n os.path.basename(self.pcap_file))\n\n # Create directory if doesnt exists\n os.makedirs(self.local_dir, exist_ok=True)\n with FileUtils(testbed=self.device.testbed) as futils:\n futils.get_child(self.protocol)\n futils.children[self.protocol].SSH_DEFAULT_PORT = port\n futils.copyfile(\n source = '{p}://{i}/{path}'.format(p=self.protocol, i=ip,\n path=self.pcap_file),\n destination = self.local_dir)\n return output\n\n def parse(self):\n try:\n from scapy.all import rdpcap\n except ImportError:\n raise ImportError('scapy is not installed, please install it by running: '\n 'pip install scapy') from None\n\n local_file = os.path.join(self.local_dir,\n os.path.basename(self.pcap_file))\n # Make sure it is not of size 0\n if os.path.isfile(local_file) and os.stat(local_file).st_size:\n return rdpcap(local_file)\n", "id": "2900664", "language": "Python", "matching_score": 1.7422350645065308, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/utils/tcpdump.py" }, { "content": "from ats.utils.fileutils import FileUtils\n\ndef show_tech(device, features, vrf='management'):\n if not hasattr(device, 'filetransfer_attributes'):\n raise Exception('No information about file transfer on the device')\n\n address = device.filetransfer_attributes['server_address']\n protocol = device.filetransfer_attributes['protocol']\n path = device.filetransfer_attributes.get('path', '')\n\n for feature in features:\n file_ = '{p}_{f}'.format(p=path, f=feature)\n url = '{p}://{address}/{file}'.format(p=protocol,\n address=address, file=file_)\n device.execute('show tech-support {f} > {u} vrf {v}'.format(f=feature,\n u=url,\n v=vrf))\n", "id": "4730866", "language": "Python", "matching_score": 0.31934040784835815, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/abstracted_libs/nxos/show_tech.py" }, { "content": "\"\"\"Common verify functions for SNMP\"\"\"\r\n\r\n# Python\r\nimport logging\r\n\r\n# Genie\r\nfrom pyats.async_ import pcall\r\n\r\n# SNMP\r\nfrom genie.libs.sdk.apis.iosxe.snmp.get import get_snmp_cli_dict, get_snmp_dict\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef verify_cli_and_snmp_cpu_memory(\r\n device,\r\n snmp_device,\r\n community,\r\n ip_address,\r\n oid,\r\n id_slot_map,\r\n snmp_map,\r\n version,\r\n load_tolerance=0,\r\n memory_tolerance=500,\r\n):\r\n \"\"\" Verify CPU and Memory usage information from \r\n CLI and SNMP are equivalent\r\n\r\n Args:\r\n device (`obj`): Device object\r\n snmp_device (`obj`): SNMP device\r\n community (`str`): Community name\r\n ip_address (`str`): IP address\r\n oid (`str`): Oid code\r\n id_slot_map (`dict`): Id-slot mapping\r\n ex: {\"1\": \"sip1\", \"7\": \"rp0\", \"9\": \"esp0\"}\r\n snmp_map (`dict`): SNMP-CLI mapping\r\n ex: {'12': 'used', '13': 'free', '24': '1_min',\r\n '25': '5_min', '26': '15_min', '27': 'committed'}\r\n version (`str`): SNMP version\r\n load_tolerance (`int`): Tolerance for load information\r\n memory_tolerance (`int`): Tolerance for memory information\r\n Returns:\r\n result (`bool`): Verified result\r\n Raises:\r\n None\r\n \"\"\"\r\n result = True\r\n try:\r\n cli_dict, snmp_dict = pcall(\r\n [get_snmp_cli_dict, get_snmp_dict],\r\n iargs=[\r\n [device],\r\n [\r\n snmp_device,\r\n community,\r\n ip_address,\r\n oid,\r\n id_slot_map,\r\n snmp_map,\r\n version,\r\n ],\r\n ],\r\n )\r\n except Exception as e:\r\n log.error(\r\n \"Failed to get CPU and Memory information \"\r\n \"from CLI and SNMP:\\n{}\".format(e)\r\n )\r\n return False\r\n\r\n if not snmp_dict or not cli_dict:\r\n log.error(\r\n \"Failed to get CPU and Memory information \"\r\n \"from CLI and SNMP\"\r\n )\r\n return False\r\n\r\n for slot, data in snmp_dict.items():\r\n if slot in cli_dict:\r\n for key, value in data.items():\r\n if \"min\" in key:\r\n cli_value = (\r\n cli_dict.get(slot, {})\r\n .get(\"load_average\", {})\r\n .get(key, 0)\r\n * 100\r\n )\r\n if abs(value - cli_value) > load_tolerance:\r\n log.error(\r\n \"Load average {} of {} didn't match:\\n\"\r\n \"SNMP value: {} CLI value: {}\\n \".format(\r\n key, slot, value, cli_value\r\n )\r\n )\r\n result = False\r\n else:\r\n log.info(\r\n \"Load average {} of {} matched:\\n\"\r\n \"SNMP value: {} CLI value: {}\\n \".format(\r\n key, slot, value, cli_value\r\n )\r\n )\r\n else:\r\n cli_value = (\r\n cli_dict.get(slot, {}).get(\"memory\", {}).get(key, 0)\r\n )\r\n if abs(value - cli_value) > memory_tolerance:\r\n log.error(\r\n \"Memory {} of {} did't match:\\n\"\r\n \"SNMP value: {} CLI value: {}\\n \".format(\r\n key, slot, value, cli_value\r\n )\r\n )\r\n result = False\r\n else:\r\n log.info(\r\n \"Memory {} of {} matched:\\n\"\r\n \"SNMP value: {} CLI value: {}\\n \".format(\r\n key, slot, value, cli_value\r\n )\r\n )\r\n else:\r\n log.error(\"Slot {} is not in parsed CLI output\".format(slot))\r\n result = False\r\n\r\n return result\r\n\r\n\r\ndef is_snmp_message_received(message, server, output):\r\n \"\"\" Verify if a message was received in snmp server\r\n Args:\r\n output ('obj'): Tcpdump output\r\n server ('str'): Syslog server address\r\n message ('str'): Message to be verified in Syslog server\r\n Returns:\r\n True\r\n False\r\n Raises:\r\n None\r\n \"\"\"\r\n\r\n for packet in output:\r\n if packet.dst == server:\r\n try:\r\n varbindlist = (\r\n packet.getlayer(\"IP\")\r\n .getlayer(\"UDP\")\r\n .getlayer(\"SNMP\")\r\n .getlayer(\"SNMPtrapv1\")\r\n .varbindlist\r\n )\r\n\r\n except AttributeError:\r\n continue\r\n\r\n for item in varbindlist:\r\n try:\r\n output = item.value.val.decode()\r\n except (AttributeError, UnicodeDecodeError):\r\n continue\r\n\r\n if message in output:\r\n log.info(\r\n \"Message '{message}' has been found in SNMP \"\r\n \"server {ip}\".format(message=message, ip=server)\r\n )\r\n log.info(\"Packet details\")\r\n log.info(packet.show(dump=True))\r\n return True\r\n\r\n log.error(\r\n \"Message '{message}' has not been found in SNMP \"\r\n \"server {ip}\".format(message=message, ip=server)\r\n )\r\n\r\n return False\r\n", "id": "5169616", "language": "Python", "matching_score": 3.9498534202575684, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/snmp/verify.py" }, { "content": "\"\"\"Common get info functions for SNMP\"\"\"\n\n# Python\nimport re\nimport logging\n\n# Genie\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\nlog = logging.getLogger(__name__)\n\n\ndef get_snmp_snmpwalk(\n device, community, ip_address, oid, version=\"2c\", option=None\n):\n \"\"\" Get snmpwalk output from SNMP device\n\n Args:\n device (`obj`): SNMP device\n community (`str`): Community name\n ip_address (`str`): IP address\n oid (`str`): Oid code\n version (`str`): SNMP version\n option (`str`): Optional command\n Returns:\n out (`str`): Executed output of SNMP command\n Raises:\n None\n \"\"\"\n if option:\n cmd = \"snmpwalk -v {version} -c {community} {ip_address} {oid} {option}\".format(\n version=version,\n community=community,\n ip_address=ip_address,\n oid=oid,\n option=option,\n )\n else:\n cmd = \"snmpwalk -v {version} -c {community} {ip_address} {oid}\".format(\n version=version,\n community=community,\n ip_address=ip_address,\n oid=oid,\n )\n\n return device.execute(cmd)\n\n\ndef get_snmp_id_slot_map(device, community, ip_address, oids, version=\"2c\"):\n \"\"\" Get id-slot mapping from SNMP server\n\n Args:\n device (`obj`): SNMP device\n community (`str`): Community name\n ip_address (`str`): IP address\n oids (`list`): Oid codes\n version (`str`): SNMP version\n Returns:\n id_slot_map (`dict`): Id slot mapping\n ex: {\"1\": \"sip1\", \"7\": \"rp0\", \"9\": \"esp0\"}\n Raises:\n None\n \"\"\"\n sub_map1 = {}\n pids = []\n out1 = get_snmp_snmpwalk(device, community, ip_address, oids[0], version)\n\n # SNMPv2-SMI::enterprises.10.69.109.1.10.4.1.2.9 = INTEGER: 9036\n p = re.compile(r\"\\.(?P<sid>[\\d]+) += +.*: +(?P<pid>[\\d]+)\")\n found1 = p.findall(out1)\n\n for item in found1:\n sub_map1.update({item[0]: item[1]})\n pids.append(item[1])\n\n sub_map2 = {}\n option = \"| grep -E '{}'\".format(\"|\".join(pids))\n out2 = get_snmp_snmpwalk(\n device, community, ip_address, oids[1], version, option\n )\n\n # SNMPv2-SMI::mib-10.106.1.1.1.1.7.7031 = STRING: \"cpu R0/0\"\n p2 = re.compile(\n r'(?P<pid>{}).* += +.*: +\"cpu +(?P<slot>[\\w]+)\\/'.format(\n \"|\".join(pids)\n )\n )\n found2 = p2.findall(out2)\n\n for item in found2:\n if re.match(r\"^\\d\", item[1]):\n slot = \"sip\" + item[1]\n else:\n slot = item[1].replace(\"F\", \"esp\").replace(\"R\", \"rp\")\n sub_map2.update({item[0]: slot.lower()})\n\n id_slot_map = {}\n for sid, pid in sub_map1.items():\n slot = sub_map2.get(pid)\n id_slot_map.update({sid: slot})\n\n return id_slot_map\n\n\ndef get_snmp_dict(\n snmp_device,\n community,\n ip_address,\n oid,\n id_slot_map,\n snmp_map,\n version=\"2c\",\n):\n \"\"\" Get CPU and memory usage information from SNMP device\n\n Args:\n snmp_device (`obj`): SNMP device\n community (`str`): Community name\n ip_address (`str`): IP address\n oid (`str`): Oid code\n id_slot_map (`dict`): Id-slot mapping\n ex: {\"1\": \"sip1\", \"7\": \"rp0\", \"9\": \"esp0\"}\n snmp_map (`dict`): SNMP-CLI mapping\n ex: {'12': 'used', '13': 'free', '24': '1_min',\n '25': '5_min', '26': '15_min', '27': 'committed'}\n version (`str`): SNMP version\n Returns:\n snmp_dict (`dict`): Information dictionary\n ex: {\"sip0\": {\n \"used\": 575640,\n \"free\": 389036,\n \"1_min\": 3,\n \"5_min\": 4,\n \"15_min\": 0,\n \"committed\": 869368}}\n \"\"\"\n out = get_snmp_snmpwalk(snmp_device, community, ip_address, oid, version)\n\n snmp_dict = {}\n\n for key, slot in id_slot_map.items():\n slot_dict = snmp_dict.setdefault(slot, {})\n\n # SNMPv2-SMI::enterprises.10.69.109.1.10.4.1.12.1 = Gauge32: 465360\n p = re.compile(\n r\"(?P<lid>[\\d]+)\\.{} += +.*: +(?P<value>[\\d]+)\".format(key)\n )\n found = p.finditer(out)\n\n for item in found:\n lid = item.groups()[0]\n value = item.groups()[1]\n if lid in snmp_map:\n slot_dict.update({snmp_map[lid]: int(value)})\n\n return snmp_dict\n\n\ndef get_snmp_cli_dict(device):\n \"\"\" Get CPU and memory usage information from CLI\n\n Args:\n device (`obj`): Device object\n Returns:\n None\n out (`dict`): Information dictionary\n ex: {\"sip0\": {\n \"load_average\": {\n \"status\": \"healthy\",\n \"1_min\": 0.07,\n \"5_min\": 0.02,\n \"15_min\": 0.0\n },\n \"memory\": {\n \"status\": \"healthy\",\n \"total\": 964676,\n \"used\": 575896,\n \"used_percentage\": 60,\n \"free\": 388780,\n \"free_percentage\": 40,\n \"committed\": 869972,\n \"committed_percentage\": 90\n },\n \"cpu\": {\n \"0\": {\n \"user\": 3.4,\n \"system\": 0.8,\n \"nice_process\": 0.0,\n \"idle\": 95.69,\n \"irq\": 0.0,\n \"sirq\": 0.1,\n \"waiting\": 0.0\n }\n }}}\n Raises:\n None\n \"\"\"\n try:\n out = device.parse(\n \"show platform software status control-processor brief\"\n )\n except SchemaEmptyParserError:\n return {}\n\n return out[\"slot\"]\n", "id": "5453626", "language": "Python", "matching_score": 1.1931877136230469, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/snmp/get.py" }, { "content": "class PlatformOutput(object):\n showVersion = {'platform':\n {'reason': 'Reset Requested by CLI command reload',\n 'system_version': '6.2(6)',\n 'os': 'NX-OS',\n 'hardware': \n {'bootflash': '2007040',\n 'chassis': '(\"Supervisor Module-2\")',\n 'cpu': 'Intel(R) Xeon(R)',\n 'device_name': 'PE1',\n 'memory': '32938744',\n 'model': 'Nexus7000 C7009',\n 'processor_board_id': 'JAF1708AAKL',\n 'slots': '9'}, \n 'kernel_uptime': \n {'days': '0',\n 'hours': '0',\n 'minutes': '53',\n 'seconds': '5'},\n 'software': \n {'bios': 'version 2.12.0',\n 'bios_compile_time': '05/29/2013',\n 'kickstart': 'version 8.1(1) [build 8.1(0.129)] [gdb]',\n 'kickstart_compile_time': '4/30/2017 23:00:00 [04/15/2017 ''04:34:05]',\n 'kickstart_image_file': 'slot0:///n7000-s2-kickstart.10.81.0.129.gbin',\n 'system_version': 'version 8.1(1) [build 8.1(0.129)] [gdb]',\n 'system_compile_time': '4/30/2017 23:00:00 [04/15/2017 ''06:43:41]',\n 'system_image_file': 'slot0:///n7000-s2-dk10.172.16.31.10.gbin'}\n }\n }\n\n\n showInventory = {'name':\n {'Chassis':\n {'description': 'Nexus7000 C7009 (9 Slot) Chassis ',\n 'pid': 'N7K-C7009',\n 'slot': 'None',\n 'vid': 'V01',\n 'serial_number': 'JAF1704ARQG'},\n 'Slot 1':\n {'description': 'Supervisor Module-2',\n 'pid': 'N7K-SUP2',\n 'slot': '1',\n 'vid': 'V01',\n 'serial_number': 'JAF1708AGTH'},\n 'Slot 2':\n {'description': 'Supervisor Module-2',\n 'pid': 'N7K-SUP2',\n 'slot': '2',\n 'vid': 'V01',\n 'serial_number': 'JAF1708AGQH'},\n 'Slot 3':\n {'description': '1/10 Gbps Ethernet Module',\n 'pid': 'N7K-F248XP-25E',\n 'slot': '3',\n 'vid': 'V01',\n 'serial_number': 'JAF1717AAND'},\n 'Slot 4':\n {'description': '10/40 Gbps Ethernet Module',\n 'pid': 'N7K-F312FQ-25',\n 'slot': '4',\n 'vid': 'V01',\n 'serial_number': 'JAE18120FLU'},\n 'Slot 33':\n {'description': 'Nexus7000 C7009 (9 Slot) Chassis Power Supply',\n 'pid': 'N7K-AC-6.0KW',\n 'slot': '33',\n 'vid': 'V03',\n 'serial_number': 'DTM171300QB'},\n 'Slot 35':\n {'description': 'Nexus7000 C7009 (9 Slot) Chassis Fan Module',\n 'pid': 'N7K-C7009-FAN',\n 'slot': '35',\n 'vid': 'V01',\n 'serial_number': 'JAF1702AEBE'}\n }\n }\n\n showInstallActive = {'boot_images':\n {'kickstart_image': 'slot0:/n7000-s2-kickstart.8.3.0.CV.0.658.gbin',\n 'system_image': 'slot0:/n7000-s2-dk10.34.3.0.CV.0.658.gbin'},\n 'active_packages':\n {'active_package_module_0':\n {'active_package_name': 'n7700-s2-dk10.1.2.0.D1.1.CSCuo7721.bin'},\n 'active_package_module_3': \n {'active_package_name': 'n7700-s2-dk10.1.2.0.D1.1.CSCuo7721.bin'}\n }\n }\n\n showSystemRedundancyStatus = {'redundancy_mode':\n {'administrative': 'HA',\n 'operational': 'HA'},\n 'supervisor_1':\n {'redundancy_state': 'Active',\n 'supervisor_state': 'Active',\n 'internal_state':'Active with HA standby'},\n 'supervisor_2':\n {'redundancy_state': 'Standby',\n 'supervisor_state': 'HA standby',\n 'internal_state':'HA standby'},\n }\n\n showRedundancyStatus = {'redundancy_mode':\n {'administrative': 'HA',\n 'operational': 'HA'},\n 'supervisor_1':\n {'redundancy_state': 'Active',\n 'supervisor_state': 'Active',\n 'internal_state':'Active with HA standby'},\n 'supervisor_2':\n {'redundancy_state': 'Standby',\n 'supervisor_state': 'HA standby',\n 'internal_state':'HA standby'},\n 'system_start_time': 'Fri Apr 21 01:53:24 2017',\n 'system_uptime': '0 days, 7 hours, 57 minutes, 30 seconds',\n 'kernel_uptime': '0 days, 8 hours, 0 minutes, 56 seconds',\n 'active_supervisor_time': '0 days, 7 hours, 57 minutes, 30 seconds'}\n\n showBoot = {'current_boot_variable':\n {'sup_number':\n {'sup-1':\n {'kickstart_variable': 'slot0:/n7000-s2-kickstart.8.3.0.CV.0.658.gbin',\n 'system_variable': 'slot0:/n7000-s2-dk10.34.3.0.CV.0.658.gbin',\n 'boot_poap':'Disabled'},\n 'sup-2':\n {'kickstart_variable': 'slot0:/n7000-s2-kickstart.8.3.0.CV.0.658.gbin',\n 'system_variable': 'slot0:/n7000-s2-dk10.34.3.0.CV.0.658.gbin',\n 'boot_poap':'Disabled'}\n }\n },\n 'next_reload_boot_variable':\n {'sup_number':\n {'sup-1':\n {'kickstart_variable': 'slot0:/n7000-s2-kickstart.8.3.0.CV.0.658.gbin',\n 'system_variable': 'slot0:/n7000-s2-dk10.34.3.0.CV.0.658.gbin',\n 'boot_poap':'Disabled'},\n 'sup-2':\n {'kickstart_variable': 'slot0:/n7000-s2-kickstart.8.3.0.CV.0.658.gbin',\n 'system_variable': 'slot0:/n7000-s2-dk10.34.3.0.CV.0.658.gbin',\n 'boot_poap':'Disabled'}\n }\n }\n }\n\n showBootEmpty = {}\n\n showModule = {'slot':\n {'rp':\n {'1':\n {'Supervisor Module-2':\n {'ports': '0',\n 'model': 'N7K-SUP2',\n 'status': 'active',\n 'software': '8.3(0)CV(0.658)',\n 'hardware': '1.0',\n 'mac_address': '84-78-ac-0f-c4-cd to 84-78-ac-0f-c4-df',\n 'serial_number': 'JAF1708AGTH',\n 'online_diag_status': 'Pass'}\n },\n '2':\n {'Supervisor Module-2':\n {'ports': '0',\n 'model': 'N7K-SUP2',\n 'status': 'ha-standby',\n 'software': '8.3(0)CV(0.658)',\n 'hardware': '1.0',\n 'mac_address': '84-78-ac-0f-b9-00 to 84-78-ac-0f-b9-12',\n 'serial_number': 'JAF1708AGQH',\n 'online_diag_status': 'Pass'}\n }\n },\n 'lc':\n {'3':\n {'1/10 Gbps Ethernet Module':\n {'ports': '48',\n 'model': 'N7K-F248XP-25E',\n 'status': 'ok',\n 'software': '8.3(0)CV(0.658)',\n 'hardware': '1.0',\n 'mac_address': '84-78-ac-18-dd-30 to 84-78-ac-18-dd-63',\n 'serial_number': 'JAF1717AAND',\n 'online_diag_status': 'Pass'}\n },\n '4':\n {'10/40 Gbps Ethernet Module':\n {'ports': '12',\n 'model': 'N7K-F312FQ-25',\n 'status': 'ok',\n 'software': '8.3(0)CV(0.658)',\n 'hardware': '1.0',\n 'mac_address': '54-4a-00-ad-19-40 to 54-4a-00-ad-19-7b',\n 'serial_number': 'JAE18120FLU',\n 'online_diag_status': 'Pass'}\n },\n '6':\n {'10 Gbps Ethernet XL Module':\n {'ports': '32',\n 'model': 'N7K-M132XP-12L',\n 'status': 'ok',\n 'software': '8.3(0)CV(0.658)',\n 'hardware': '2.0',\n 'mac_address': 'bc-16-65-54-af-64 to bc-16-65-54-af-87',\n 'serial_number': 'JAF1719AHMB',\n 'online_diag_status': 'Pass'}\n },\n '7':\n {'10 Gbps Ethernet Module':\n {'ports': '24',\n 'model': 'N7K-M224XP-23L',\n 'status': 'ok',\n 'software': '8.3(0)CV(0.658)',\n 'hardware': '1.0',\n 'mac_address': 'd8-67-d9-0e-91-c8 to d8-67-d9-0e-91-e3',\n 'serial_number': 'JAF1641APPF',\n 'online_diag_status': 'Pass'}\n },\n '8':\n {'10/100/1000 Mbps Ethernet XL Module':\n {'ports': '48',\n 'model': 'N7K-M148GT-11L',\n 'status': 'ok',\n 'software': '8.3(0)CV(0.658)',\n 'hardware': '2.1',\n 'mac_address': 'bc-16-65-3a-b8-d0 to bc-16-65-3a-b9-03',\n 'serial_number': 'JAF1717BEAT',\n 'online_diag_status': 'Pass'}\n }\n }\n },\n 'xbar':\n {'1':\n {'ports': '0',\n 'module_type': 'Fabric Module 2',\n 'model': 'N7K-C7009-FAB-2',\n 'status': 'ok',\n 'software': 'NA',\n 'hardware': '3.1',\n 'mac_address': 'NA',\n 'serial_number': 'JAF1705AEEF'},\n '2':\n {'ports': '0',\n 'module_type': 'Fabric Module 2',\n 'model': 'N7K-C7009-FAB-2',\n 'status': 'ok',\n 'software': 'NA',\n 'hardware': '3.1',\n 'mac_address': 'NA',\n 'serial_number': 'JAF1705BFBM'},\n '3':\n {'ports': '0',\n 'module_type': 'Fabric Module 2',\n 'model': 'N7K-C7009-FAB-2',\n 'status': 'ok',\n 'software': 'NA',\n 'hardware': '3.1',\n 'mac_address': 'NA',\n 'serial_number': 'JAF1705AELK'},\n '4':\n {'ports': '0',\n 'module_type': 'Fabric Module 2',\n 'model': 'N7K-C7009-FAB-2',\n 'status': 'ok',\n 'software': 'NA',\n 'hardware': '3.1',\n 'mac_address': 'NA',\n 'serial_number': 'JAF1705BFCF'},\n '5':\n {'ports': '0',\n 'module_type': 'Fabric Module 2',\n 'model': 'N7K-C7009-FAB-2',\n 'status': 'ok',\n 'software': 'NA',\n 'hardware': '3.1',\n 'mac_address': 'NA',\n 'serial_number': 'JAF1704APQH'}\n }\n }\n\n directory = {'files':\n {'.patch/': \n {'size': '4096', 'date': 'Apr 20 2017', 'time': '10:23:05'},\n '20170202_074746_poap_7537_init.log': \n {'size': '1398', 'date': 'Feb 02 2017', 'time': '00:48:18'},\n 'ethpm_act_logs.log': \n {'size': '251599', 'date': 'Mar 15 2017', 'time': '10:35:50'},\n 'ethpm_im_tech.log': \n {'size': '1171318', 'date': 'Mar 15 2017', 'time': '10:35:55'},\n 'ethpm_mts_details.log': \n {'size': '3837', 'date': 'Mar 15 2017', 'time': '10:35:50'},\n 'ethpm_syslogs.log': \n {'size': '81257', 'date': 'Mar 15 2017', 'time': '10:35:50'},\n 'ethpm_tech.log': \n {'size': '3930383', 'date': 'Mar 15 2017', 'time': '10:35:55'},\n 'fault-management-logs/': \n {'size': '24576', 'date': 'Apr 21 2017', 'time': '04:18:28'},\n 'lost+found/': \n {'size': '4096', 'date': 'Nov 23 2016', 'time': '08:25:40'},\n 'n7000-s2-debug-sh.10.81.0.125.gbin': \n {'size': '4073830', 'date': 'Apr 20 2017', 'time': '10:19:08'},\n 'virtual-instance-stby-sync/': \n {'size': '4096', 'date': 'Apr 20 2017', 'time': '10:28:55'}\n },\n 'dir': 'bootflash:',\n 'disk_used_space': '108449792',\n 'disk_free_space': '1674481664',\n 'disk_total_space': '1782931456'\n }\n\n showVdcDetail = {'vdc':\n {'1':\n {'name': 'PE1',\n 'state': 'active',\n 'mac_address': '84:78:ac:5a:86:c1',\n 'ha_policy': 'RELOAD',\n 'dual_sup_ha_policy': 'SWITCHOVER',\n 'boot_order': '1',\n 'cpu_share': '5',\n 'cpu_share_percentage': '33%',\n 'create_time': 'Fri Apr 28 03:36:26 2017',\n 'reload_count': '0',\n 'uptime': '0 day(s), 10 hour(s), 35 minute(s), 47 second(s)',\n 'restart_count': '1',\n 'restart_time': 'Fri Apr 28 03:36:26 2017',\n 'type': 'Ethernet',\n 'supported_linecards': 'f3'},\n '2':\n {'name': 'PE2',\n 'state': 'active',\n 'mac_address': '84:78:ac:5a:86:c2',\n 'ha_policy': 'RESTART',\n 'dual_sup_ha_policy': 'SWITCHOVER',\n 'boot_order': '1',\n 'cpu_share': '5',\n 'cpu_share_percentage': '33%',\n 'create_time': 'Fri Apr 28 03:48:01 2017',\n 'reload_count': '0',\n 'uptime': '0 day(s), 10 hour(s), 25 minute(s), 2 second(s)',\n 'restart_count': '1',\n 'restart_time': 'Fri Apr 28 03:48:01 2017',\n 'type': 'Ethernet',\n 'supported_linecards': 'f3'},\n '3':\n {'name': 'CORE',\n 'state': 'active',\n 'mac_address': '84:78:ac:5a:86:c3',\n 'ha_policy': 'RESTART',\n 'dual_sup_ha_policy': 'SWITCHOVER',\n 'boot_order': '1',\n 'cpu_share': '5',\n 'cpu_share_percentage': '33%',\n 'create_time': 'Fri Apr 28 03:49:33 2017',\n 'reload_count': '0',\n 'uptime': '0 day(s), 10 hour(s), 23 minute(s), 39 second(s)',\n 'restart_count': '1',\n 'restart_time': 'Fri Apr 28 03:49:33 2017',\n 'type': 'Ethernet',\n 'supported_linecards': 'f3'}\n }\n }\n\n showVdcCurrent = {'current_vdc':\n {'id': '1',\n 'name': 'PE1'}\n }\n\n showVdcMembershipStatus = {'virtual_device':\n {'0':\n {'membership':\n {'Unallocated':\n {'Eth3/1':\n {'vd_ms_name': 'Eth3/1',\n 'vd_ms_status': 'OK',\n 'vd_ms_type': 'Ethernet'},\n 'Eth3/2':\n {'vd_ms_name': 'Eth3/2',\n 'vd_ms_status': 'OK',\n 'vd_ms_type': 'Ethernet'}\n }\n }\n },\n '1':\n {'membership':\n {'PE1':\n {'Eth4/5':\n {'vd_ms_name': 'Eth4/5',\n 'vd_ms_status': 'OK',\n 'vd_ms_type': 'Ethernet'},\n 'Eth4/6':\n {'vd_ms_name': 'Eth4/6',\n 'vd_ms_status': 'OK',\n 'vd_ms_type': 'Ethernet'}\n }\n }\n },\n '2':\n {'membership':\n {'PE2':\n {'Eth4/3':\n {'vd_ms_name': 'Eth4/3',\n 'vd_ms_status': 'OK',\n 'vd_ms_type': 'Ethernet'},\n 'Eth4/4':\n {'vd_ms_name': 'Eth4/4',\n 'vd_ms_status': 'OK',\n 'vd_ms_type': 'Ethernet'}\n }\n }\n },\n '3':\n {'membership':\n {'CORE':\n {'Eth4/1':\n {'vd_ms_name': 'Eth4/1',\n 'vd_ms_status': 'OK',\n 'vd_ms_type': 'Ethernet'},\n 'Eth4/2(b)':\n {'vd_ms_name': 'Eth4/2(b)',\n 'vd_ms_status': 'OK',\n 'vd_ms_type': 'Ethernet'}\n }\n }\n }\n }\n }\n\n slot = {\n \"oc\": {\n \"1\": {\n \"ports\": \"0\",\n \"module_type\": \"Fabric Module 2\",\n \"model\": \"N7K-C7009-FAB-2\",\n \"status\": \"ok\",\n \"software\": \"NA\",\n \"hardware\": \"3.1\",\n \"mac_address\": \"NA\",\n \"serial_number\": \"JAF1705AEEF\",\n \"state\": \"ok\",\n \"name\": \"Fabric Module 2\"\n },\n \"2\": {\n \"ports\": \"0\",\n \"module_type\": \"Fabric Module 2\",\n \"model\": \"N7K-C7009-FAB-2\",\n \"status\": \"ok\",\n \"software\": \"NA\",\n \"hardware\": \"3.1\",\n \"mac_address\": \"NA\",\n \"serial_number\": \"JAF1705BFBM\",\n \"state\": \"ok\",\n \"name\": \"Fabric Module 2\"\n },\n \"3\": {\n \"ports\": \"0\",\n \"module_type\": \"Fabric Module 2\",\n \"model\": \"N7K-C7009-FAB-2\",\n \"status\": \"ok\",\n \"software\": \"NA\",\n \"hardware\": \"3.1\",\n \"mac_address\": \"NA\",\n \"serial_number\": \"JAF1705AELK\",\n \"state\": \"ok\",\n \"name\": \"Fabric Module 2\"\n },\n \"4\": {\n \"ports\": \"0\",\n \"module_type\": \"Fabric Module 2\",\n \"model\": \"N7K-C7009-FAB-2\",\n \"status\": \"ok\",\n \"software\": \"NA\",\n \"hardware\": \"3.1\",\n \"mac_address\": \"NA\",\n \"serial_number\": \"JAF1705BFCF\",\n \"state\": \"ok\",\n \"name\": \"Fabric Module 2\"\n },\n \"5\": {\n \"ports\": \"0\",\n \"module_type\": \"Fabric Module 2\",\n \"model\": \"N7K-C7009-FAB-2\",\n \"status\": \"ok\",\n \"software\": \"NA\",\n \"hardware\": \"3.1\",\n \"mac_address\": \"NA\",\n \"serial_number\": \"JAF1704APQH\",\n \"state\": \"ok\",\n \"name\": \"Fabric Module 2\"\n },\n \"35\": {\n \"name\": \"Nexus7000 C7009 (9 Slot) Chassis Fan Module\",\n \"sn\": \"JAF1702AEBE\"\n },\n \"33\": {\n \"name\": \"Nexus7000 C7009 (9 Slot) Chassis Power Supply\",\n \"sn\": \"DTM171300QB\"\n }\n },\n \"rp\": {\n \"2\": {\n \"name\": \"N7K-SUP2\",\n \"state\": \"ha-standby\",\n \"sn\": \"JAF1708AGQH\",\n \"redundancy_state\": \"ha-standby\",\n \"rp_boot_image\": \"slot0:/n7000-s2-dk10.34.3.0.CV.0.658.gbin\",\n \"rp_kickstart_boot_image\": \"slot0:/n7000-s2-kickstart.8.3.0.CV.0.658.gbin\",\n \"rp_uptime\": 28650\n },\n \"1\": {\n \"name\": \"N7K-SUP2\",\n \"state\": \"active\",\n \"sn\": \"JAF1708AGTH\",\n \"redundancy_state\": \"active\",\n \"rp_boot_image\": \"slot0:/n7000-s2-dk10.34.3.0.CV.0.658.gbin\",\n \"rp_kickstart_boot_image\": \"slot0:/n7000-s2-kickstart.8.3.0.CV.0.658.gbin\",\n \"rp_uptime\": 28650\n }\n },\n \"lc\": {\n \"4\": {\n \"name\": \"10/40 Gbps Ethernet Module\",\n \"sn\": \"JAE18120FLU\",\n \"state\": \"ok\"\n },\n \"3\": {\n \"name\": \"1/10 Gbps Ethernet Module\",\n \"sn\": \"JAF1717AAND\",\n \"state\": \"ok\"\n }\n }\n }\n virtual_device = {'2': \n {'vd_name': 'PE2', 'membership': {'Eth4/4': {'status': 'OK', 'type': 'Ethernet'}, 'Eth4/3': {'status': 'OK', 'type': 'Ethernet'}}, 'vd_status': 'active'}, \n '3': \n {'vd_name': 'CORE', 'membership': {'Eth4/1': {'status': 'OK', 'type': 'Ethernet'}, 'Eth4/2(b)': {'status': 'OK', 'type': 'Ethernet'}}, 'vd_status': 'active'}, \n '1': \n {'vd_name': 'PE1', 'membership': {'Eth4/5': {'status': 'OK', 'type': 'Ethernet'}, 'Eth4/6': {'status': 'OK', 'type': 'Ethernet'}}, 'vd_status': 'active'}, \n '0': \n {'membership': {'Eth3/2': {'status': 'OK', 'type': 'Ethernet'}, 'Eth3/1': {'status': 'OK', 'type': 'Ethernet'}}}\n }\n\n platform_all = {'attributes': None,\n # 'callables': {},\n 'chassis': 'Nexus7000 C7009 (9 Slot) Chassis',\n 'chassis_sn': 'JAF1704ARQG',\n 'connections': None,\n 'context_manager': {},\n # 'device': <Device aDevice at 0xf7111f2c>,\n # 'diff_ignore': deque(['maker', 'callables', 'device']),\n 'dir': 'bootflash:',\n 'disk_free_space': '1674481664',\n 'disk_total_space': '1782931456',\n 'disk_used_space': '108449792',\n 'image': 'slot0:///n7000-s2-dk10.34.1.0.129.gbin',\n 'installed_packages': 'n7700-s2-dk10.1.2.0.D1.1.CSCuo7721.bin',\n 'kickstart_image': 'slot0:///n7000-s2-kickstart.10.81.0.129.gbin',\n 'kickstart_version': 'version 8.1(1) [build 8.1(0.129)] [gdb]',\n 'main_mem': '32938744',\n # 'maker': <genie.ops.base.maker.Maker object at 0xf712414c>,\n 'os': 'NX-OS',\n 'rtr_type': 'Nexus7000 C7009',\n 'rp_uptime': 28650,\n 'slot': {'lc': {'3': {'name': '1/10 Gbps Ethernet Module',\n 'sn': 'JAF1717AAND',\n 'state': 'ok'},\n '4': {'name': '10/40 Gbps Ethernet Module',\n 'sn': 'JAE18120FLU',\n 'state': 'ok'}},\n 'oc': {'33': {'name': 'Nexus7000 C7009 (9 Slot) Chassis Power '\n 'Supply',\n 'sn': 'DTM171300QB'},\n '35': {'name': 'Nexus7000 C7009 (9 Slot) Chassis Fan '\n 'Module',\n 'sn': 'JAF1702AEBE'}},\n 'rp': {'1': {'name': 'N7K-SUP2',\n 'redundancy_state': 'active',\n 'rp_boot_image': 'slot0:/n7000-s2-dk10.34.3.0.CV.0.658.gbin',\n 'rp_kickstart_boot_image': 'slot0:/n7000-s2-kickstart.8.3.0.CV.0.658.gbin',\n 'rp_uptime': 28650,\n 'sn': 'JAF1708AGTH',\n 'state': 'active'},\n '2': {'name': 'N7K-SUP2',\n 'redundancy_state': 'ha-standby',\n 'rp_boot_image': 'slot0:/n7000-s2-dk10.34.3.0.CV.0.658.gbin',\n 'rp_kickstart_boot_image': 'slot0:/n7000-s2-kickstart.8.3.0.CV.0.658.gbin',\n 'rp_uptime': 28650,\n 'sn': 'JAF1708AGQH',\n 'state': 'ha-standby'}}},\n 'version': 'version 8.1(1) [build 8.1(0.129)] [gdb]',\n 'virtual_device': {'0': {'membership': {'Eth3/1': {'status': 'OK',\n 'type': 'Ethernet'},\n 'Eth3/2': {'status': 'OK',\n 'type': 'Ethernet'}}},\n '1': {'membership': {'Eth4/5': {'status': 'OK',\n 'type': 'Ethernet'},\n 'Eth4/6': {'status': 'OK',\n 'type': 'Ethernet'}},\n 'vd_name': 'PE1',\n 'vd_status': 'active'},\n '2': {'membership': {'Eth4/3': {'status': 'OK',\n 'type': 'Ethernet'},\n 'Eth4/4': {'status': 'OK',\n 'type': 'Ethernet'}},\n 'vd_name': 'PE2',\n 'vd_status': 'active'},\n '3': {'membership': {'Eth4/1': {'status': 'OK',\n 'type': 'Ethernet'},\n 'Eth4/2(b)': {'status': 'OK',\n 'type': 'Ethernet'}},\n 'vd_name': 'CORE',\n 'vd_status': 'active'}}}\n", "id": "3413194", "language": "Python", "matching_score": 3.771660089492798, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/platform/nxos/tests/platform_output.py" }, { "content": "# Python\nimport unittest\n\n# ATS\nfrom ats.topology import Device\n\n# Genie\nfrom genie.ops.base import Base\nfrom genie.ops.base.maker import Maker\nfrom genie.libs.ops.platform.iosxr.platform import Platform\nfrom genie.libs.ops.platform.iosxr.tests.platform_output import PlatformOutput\n\n# Parser\nfrom genie.libs.parser.iosxr.show_platform import ShowVersion, ShowSdrDetail,\\\n ShowPlatform, ShowPlatformVm,\\\n ShowInstallActiveSummary, ShowInventory,\\\n ShowRedundancySummary, AdminShowDiagChassis,\\\n ShowRedundancy, Dir\n\n\nclass test_platform(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'iosxr'\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n self.device.connectionmgr.connections['cli'] = '5'\n\n def test_all_attributes(self):\n self.maxDiff = None\n p = Platform(device=self.device)\n # Get 'show version' output\n p.maker.outputs[ShowVersion] = {'':PlatformOutput.showVersionOutput}\n # Get 'show sdr detail' output\n p.maker.outputs[ShowSdrDetail] = {'':PlatformOutput.showSdrDetailOutput}\n # Get 'show platform' output\n p.maker.outputs[ShowPlatform] = {'':PlatformOutput.showPlatformOutput}\n # Get 'show platform vm' output\n p.maker.outputs[ShowPlatformVm] = {'':PlatformOutput.showPlatformVmOutput}\n # Get 'show install active summar' output\n p.maker.outputs[ShowInstallActiveSummary] = \\\n {'':PlatformOutput.showInstallActiveSummaryOutput}\n # Get 'show inventory' output\n p.maker.outputs[ShowInventory] = {'':PlatformOutput.showInventoryOutput}\n # Get 'show redundancy summary' output\n p.maker.outputs[ShowRedundancySummary] = \\\n {'':PlatformOutput.showRedundancySummaryOutput}\n # Get 'show redundancy' output\n p.maker.outputs[ShowRedundancy] = \\\n {'':PlatformOutput.showRedundancyOutput}\n # Get 'admin show diag chassis' output\n p.maker.outputs[AdminShowDiagChassis] = \\\n {'':PlatformOutput.adminShowDiagChassisOutput}\n # Get 'dir' output\n p.maker.outputs[Dir] = {'':PlatformOutput.dirOutput}\n # Learn the feature\n p.learn()\n\n # Check all match\n self.assertEqual(p.chassis, 'ASR 9006 4 Line Card Slot Chassis with V2 AC PEM')\n self.assertEqual(p.chassis_sn, 'FOX1810G8LR')\n self.assertEqual(p.config_register, '0x1922')\n dir_value = {\n 'dir_name': 'disk0a:/usr',\n 'total_bytes': '2562719744 bytes',\n 'total_free_bytes': '1918621184 bytes'}\n self.assertEqual(p.dir, dir_value)\n self.assertEqual(p.image, 'disk0:asr9k-os-mbi-6.1.4.10I/0x100305/mbiasr9k-rsp3.vm')\n packages = ['disk0:asr9k-mini-px-6.1.21.15I',\n 'disk0:asr9k-mpls-px-6.1.21.15I',\n 'disk0:asr9k-mcast-px-6.1.21.15I',\n 'disk0:asr9k-mgbl-px-6.1.21.15I']\n self.assertEqual(p.installed_packages, packages)\n self.assertEqual(p.main_mem, '6291456K')\n self.assertEqual(p.os, 'IOSXR')\n self.assertEqual(p.rtr_type, 'ASR9K')\n self.assertEqual(p.sdr_owner, 'Owner')\n self.assertEqual(p.version, '6.1.4.10I')\n self.assertEqual(p.rp_uptime, 480)\n slots = {\n 'lc': {\n '0/0': {\n 'config_state': 'PWR,NSHUT,MON',\n 'name': 'A9K-MOD80-SE',\n 'state': 'IOS XR RUN',\n 'subslot': {\n '0': {\n 'config_state': 'PWR,NSHUT,MON',\n 'name': 'A9K-MPA-20X1GE',\n 'redundancy_state': 'None',\n 'state': 'OK'},\n '1': {\n 'config_state': 'PWR,NSHUT,MON',\n 'name': 'A9K-MQA-20X2GE',\n 'redundancy_state': 'None',\n 'state': 'OK'},\n '2': {\n 'config_state': 'PWR,NSHUT,MON',\n 'name': 'A9K-MRA-20X3GE',\n 'redundancy_state': 'None',\n 'state': 'OK'}}}},\n 'rp': {\n '0/RSP0': {\n 'config_state': 'PWR,NSHUT,MON',\n 'name': 'A9K-RSP440-TR',\n 'redundancy_state': 'Active',\n 'state': 'IOS XR RUN'},\n '0/RSP1': {\n 'config_state': 'PWR,NSHUT,MON',\n 'name': 'A9K-RSP440-TR',\n 'redundancy_state': 'Standby',\n 'state': 'IOS XR RUN'},\n 'rp_config_register': '0x1922'}}\n self.assertEqual(p.slot, slots)\n virtual_device_dict = {\n 0: {\n 'membership': {\n '0/0/CPU0': {\n 'vd_ms_partner_name': 'NONE',\n 'vd_ms_red_state': 'Not-known',\n 'vd_ms_status': 'IOS '\n 'XR '\n 'RUN',\n 'vd_ms_type': 'LC'},\n '0/RSP0/CPU0': {\n 'vd_ms_partner_name': '0/RSP1/CPU0',\n 'vd_ms_red_state': 'Primary',\n 'vd_ms_status': 'IOS '\n 'XR '\n 'RUN',\n 'vd_ms_type': 'RP'},\n '0/RSP1/CPU0': {\n 'vd_ms_partner_name': '0/RSP0/CPU0',\n 'vd_ms_red_state': 'Backup',\n 'vd_ms_status': 'IOS '\n 'XR '\n 'RUN',\n 'vd_ms_type': 'RP'}},\n 'vd_dSDRsc_nod': '0/RSP0/CPU0',\n 'vd_dSDRsc_partner_node': '0/RSP1/CPU0',\n 'vd_mac_addr': 'a80c.0d5f.ab17',\n 'vd_name': 'Owner',\n 'vd_primary_node1': '0/RSP0/CPU0',\n 'vd_primary_node2': '0/RSP1/CPU0'}}\n self.assertEqual(p.virtual_device, virtual_device_dict)\n\n\n def test_missing_attributes(self):\n self.maxDiff = None\n p = Platform(device=self.device)\n # Get 'show version' output\n p.maker.outputs[ShowVersion] = {'':PlatformOutput.showVersionOutput}\n # Get 'show sdr detail' output\n p.maker.outputs[ShowSdrDetail] = {'':PlatformOutput.showSdrDetailOutput}\n # Get 'show platform' output\n p.maker.outputs[ShowPlatform] = {'':PlatformOutput.showPlatformOutput}\n # Get 'show platform vm' output\n p.maker.outputs[ShowPlatformVm] = {'':PlatformOutput.showPlatformVmOutput}\n # Get 'show install active summar' output\n p.maker.outputs[ShowInstallActiveSummary] = \\\n {'':PlatformOutput.showInstallActiveSummaryOutput}\n # Get 'show inventory' output\n p.maker.outputs[ShowInventory] = {'':PlatformOutput.showInventoryOutput}\n # Get 'show redundancy summary' output\n p.maker.outputs[ShowRedundancySummary] = \\\n {'':PlatformOutput.showRedundancySummaryOutput}\n # Get 'show redundancy' output\n p.maker.outputs[ShowRedundancy] = \\\n {'':PlatformOutput.showRedundancyOutput}\n # Get 'admin show diag chassis' output\n p.maker.outputs[AdminShowDiagChassis] = \\\n {'':PlatformOutput.adminShowDiagChassisOutput}\n # Get 'dir' output\n p.maker.outputs[Dir] = {'':PlatformOutput.dirOutput}\n # Learn the feature\n p.learn()\n\n # Check attribute not found\n with self.assertRaises(AttributeError):\n platform_type=(p.module)\n\n\n def test_selective_attribute(self):\n self.maxDiff = None\n p = Platform(device=self.device)\n # Get 'show version' output\n p.maker.outputs[ShowVersion] = {'':PlatformOutput.showVersionOutput}\n # Get 'show sdr detail' output\n p.maker.outputs[ShowSdrDetail] = {'':PlatformOutput.showSdrDetailOutput}\n # Get 'show platform' output\n p.maker.outputs[ShowPlatform] = {'':PlatformOutput.showPlatformOutput}\n # Get 'show platform vm' output\n p.maker.outputs[ShowPlatformVm] = {'':PlatformOutput.showPlatformVmOutput}\n # Get 'show install active summar' output\n p.maker.outputs[ShowInstallActiveSummary] = \\\n {'':PlatformOutput.showInstallActiveSummaryOutput}\n # Get 'show inventory' output\n p.maker.outputs[ShowInventory] = {'':PlatformOutput.showInventoryOutput}\n # Get 'show redundancy summary' output\n p.maker.outputs[ShowRedundancySummary] = \\\n {'':PlatformOutput.showRedundancySummaryOutput}\n # Get 'show redundancy' output\n p.maker.outputs[ShowRedundancy] = \\\n {'':PlatformOutput.showRedundancyOutput}\n # Get 'admin show diag chassis' output\n p.maker.outputs[AdminShowDiagChassis] = \\\n {'':PlatformOutput.adminShowDiagChassisOutput}\n # Get 'dir' output\n p.maker.outputs[Dir] = {'':PlatformOutput.dirOutput}\n # Learn the feature\n p.learn()\n\n # Check selective attribute value\n self.assertIn('IOSXR', p.os)\n self.assertNotIn('IOSXE', p.os)\n\n\n def test_ignored(self):\n self.maxDiff = None\n \n p1 = Platform(device=self.device)\n # Get 'show version' output\n p1.maker.outputs[ShowVersion] = {'':PlatformOutput.showVersionOutput}\n # Get 'show sdr detail' output\n p1.maker.outputs[ShowSdrDetail] = {'':PlatformOutput.showSdrDetailOutput}\n # Get 'show platform' output\n p1.maker.outputs[ShowPlatform] = {'':PlatformOutput.showPlatformOutput}\n # Get 'show platform vm' output\n p1.maker.outputs[ShowPlatformVm] = {'':PlatformOutput.showPlatformVmOutput}\n # Get 'show install active summar' output\n p1.maker.outputs[ShowInstallActiveSummary] = \\\n {'':PlatformOutput.showInstallActiveSummaryOutput}\n # Get 'show inventory' output\n p1.maker.outputs[ShowInventory] = {'':PlatformOutput.showInventoryOutput}\n # Get 'show redundancy summary' output\n p1.maker.outputs[ShowRedundancySummary] = \\\n {'':PlatformOutput.showRedundancySummaryOutput}\n # Get 'show redundancy' output\n p1.maker.outputs[ShowRedundancy] = \\\n {'':PlatformOutput.showRedundancyOutput}\n # Get 'admin show diag chassis' output\n p1.maker.outputs[AdminShowDiagChassis] = \\\n {'':PlatformOutput.adminShowDiagChassisOutput}\n # Get 'dir' output\n p1.maker.outputs[Dir] = {'':PlatformOutput.dirOutput}\n\n p2 = Platform(device=self.device)\n # Get 'show version' output\n p2.maker.outputs[ShowVersion] = {'':PlatformOutput.showVersionOutput}\n # Get 'show sdr detail' output\n p2.maker.outputs[ShowSdrDetail] = {'':PlatformOutput.showSdrDetailOutput}\n # Get 'show platform' output\n p2.maker.outputs[ShowPlatform] = {'':PlatformOutput.showPlatformOutput}\n # Get 'show platform vm' output\n p2.maker.outputs[ShowPlatformVm] = {'':PlatformOutput.showPlatformVmOutput}\n # Get 'show install active summar' output\n p2.maker.outputs[ShowInstallActiveSummary] = \\\n {'':PlatformOutput.showInstallActiveSummaryOutput}\n # Get 'show inventory' output\n p2.maker.outputs[ShowInventory] = {'':PlatformOutput.showInventoryOutput}\n # Get 'show redundancy summary' output\n p2.maker.outputs[ShowRedundancySummary] = \\\n {'':PlatformOutput.showRedundancySummaryOutput}\n # Get 'show redundancy' output\n p2.maker.outputs[ShowRedundancy] = \\\n {'':PlatformOutput.showRedundancyOutput}\n # Get 'admin show diag chassis' output\n p2.maker.outputs[AdminShowDiagChassis] = \\\n {'':PlatformOutput.adminShowDiagChassisOutput}\n # Get 'dir' output\n p2.maker.outputs[Dir] = {'':PlatformOutput.dirOutput}\n \n # Learn the feature\n p1.learn()\n p2.learn()\n\n p1.diff_ignore.append('[chassis_sn][JAF1704ARQG]')\n\n self.assertNotEqual(p1.__dict__['diff_ignore'],p2.__dict__['diff_ignore'])\n\n\nif __name__ == '__main__':\n unittest.main()\n\n# vim: ft=python et sw=4\n", "id": "5616750", "language": "Python", "matching_score": 7.430522918701172, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/platform/iosxr/tests/test_platform.py" }, { "content": "class PlatformOutput(object):\n\n # 'show version output\n showVersionOutput = {\n 'chassis_detail': 'ASR 9006 4 Line Card Slot Chassis with V2 AC PEM',\n 'config_register': '0x1922',\n 'image': 'disk0:asr9k-os-mbi-6.1.4.10I/0x100305/mbiasr9k-rsp3.vm',\n 'main_mem': 'cisco ASR9K Series (Intel 686 F6M14S4) processor with 6291456K '\n 'bytes of memory.',\n 'operating_system': 'IOSXR',\n 'processor': 'Intel 686 F6M14S4',\n 'processor_memory_bytes': '6291456K',\n 'device_family': 'ASR9K',\n 'rp_config_register': '0x1922',\n 'software_version': '6.1.4.10I',\n 'uptime': '5 hours, 14 minutes'}\n\n # 'show sdr detail' output\n showSdrDetailOutput = {\n 'sdr_id': {\n 0: {\n 'dsdrsc_node': '0/RSP0/CPU0',\n 'dsdrsc_partner_node': '0/RSP1/CPU0',\n 'mac_address': 'a80c.0d5f.ab17',\n 'membership': {\n '0/0/CPU0': {\n 'node_status': 'IOS XR RUN',\n 'partner_name': 'NONE',\n 'red_state': 'Not-known',\n 'type': 'LC'},\n '0/RSP0/CPU0': {\n 'node_status': 'IOS XR RUN',\n 'partner_name': '0/RSP1/CPU0',\n 'red_state': 'Primary',\n 'type': 'RP'},\n '0/RSP1/CPU0': {\n 'node_status': 'IOS XR RUN',\n 'partner_name': '0/RSP0/CPU0',\n 'red_state': 'Backup',\n 'type': 'RP'}},\n 'primary_node1': '0/RSP0/CPU0',\n 'primary_node2': '0/RSP1/CPU0',\n 'sdr_name': 'Owner'}}}\n\n # 'show plaform' output\n showPlatformOutput = {\n 'slot': {\n 'lc': {\n '0/0': {\n 'config_state': 'PWR,NSHUT,MON',\n 'name': 'A9K-MOD80-SE',\n 'state': 'IOS XR RUN',\n 'subslot': {\n '0': {\n 'config_state': 'PWR,NSHUT,MON',\n 'name': 'A9K-MPA-20X1GE',\n 'redundancy_state': 'None',\n 'state': 'OK'},\n '1': {\n 'config_state': 'PWR,NSHUT,MON',\n 'name': 'A9K-MQA-20X2GE',\n 'redundancy_state': 'None',\n 'state': 'OK'},\n '2': {\n 'config_state': 'PWR,NSHUT,MON',\n 'name': 'A9K-MRA-20X3GE',\n 'redundancy_state': 'None',\n 'state': 'OK'}}}},\n 'rp': {\n '0/RSP0': {\n 'config_state': 'PWR,NSHUT,MON',\n 'name': 'A9K-RSP440-TR',\n 'redundancy_state': 'Active',\n 'state': 'IOS XR RUN'},\n '0/RSP1': {\n 'config_state': 'PWR,NSHUT,MON',\n 'name': 'A9K-RSP440-TR',\n 'redundancy_state': 'Standby',\n 'state': 'IOS XR RUN'}}}}\n\n # 'show platform vm' output\n showPlatformVmOutput = {\n 'node': {\n '0/0/CPU0': {\n 'ip_address': '192.0.0.6',\n 'partner_name': 'NONE',\n 'sw_status': 'FINAL Band',\n 'type': 'LC (ACTIVE)'},\n '0/RP0/CPU0': {\n 'ip_address': '192.0.0.4',\n 'partner_name': 'NONE',\n 'sw_status': 'FINAL Band',\n 'type': 'RP (ACTIVE)'}}}\n\n # 'show install active summary'\n showInstallActiveSummaryOutput = {\n 'active_packages': ['disk0:asr9k-mini-px-6.1.21.15I',\n 'disk0:asr9k-mpls-px-6.1.21.15I',\n 'disk0:asr9k-mcast-px-6.1.21.15I',\n 'disk0:asr9k-mgbl-px-6.1.21.15I'],\n 'sdr': 'Owner'}\n\n # 'show inventory' output\n showInventoryOutput = {\n 'module_name': {\n 'module 0/RSP0/CPU0': {\n 'descr': 'ASR9K Route Switch '\n 'Processor with 440G/slot '\n 'Fabric and 6GB',\n 'pid': 'A9K-RSP440-TR',\n 'sn': 'FOC1808NEND',\n 'vid': 'V05'},\n 'module 0/RSP1/CPU0': {\n 'descr': 'ASR9K Route Switch '\n 'Processor with 440G/slot '\n 'Fabric and 6GB',\n 'pid': 'A9K-MPA-20X1GE',\n 'sn': 'FOC1811N49J',\n 'vid': 'V02'},\n 'module mau 0/0/0/0': {\n 'descr': 'Unknown or Unsupported '\n 'CPAK Module',\n 'pid': 'GLC-T',\n 'sn': '00000MTC160107LP',\n 'vid': 'N/A'},\n 'module mau 0/0/0/1': {\n 'descr': 'Unknown or Unsupported '\n 'CPAK Module',\n 'pid': 'GLC-T',\n 'sn': '00000MTC17150731',\n 'vid': 'N/A'}}}\n\n # 'admin show diag chassis' ouptput\n adminShowDiagChassisOutput = {\n 'chassis_feature': 'V2 AC PEM',\n 'clei': 'IPMUP00BRB',\n 'desc': 'ASR 9006 4 Line Card Slot Chassis with V2 AC PEM',\n 'device_family': 'ASR',\n 'device_series': 9006,\n 'num_line_cards': 4,\n 'pid': 'ASR-9006-AC-V2',\n 'rack_num': 0,\n 'sn': 'FOX1810G8LR',\n 'top_assy_num': '68-4235-02',\n 'vid': 'V02'}\n\n # 'show redundancy summary' output\n showRedundancySummaryOutput = {\n \"redundancy_communication\": True,\n 'node': {\n '0/RSP0/CPU0(A)': {\n 'node_detail': 'Node Not Ready, NSR: Not '\n 'Configured',\n 'standby_node': '0/RSP1/CPU0(S)',\n 'type': 'active'},\n '0/RSP0/CPU0(P)': {\n 'backup_node': '0/RSP1/CPU0(B)',\n 'node_detail': 'Proc Group Not Ready, NSR: '\n 'Ready',\n 'standby_node': '0/RSP1/CPU0(B)',\n 'type': 'primary'}}}\n\n # 'show redundancy' output\n showRedundancyOutput = {\n 'node': {\n '0/RSP0/CPU0': {\n 'group': {\n 'central-services': {\n 'backup': 'N/A',\n 'primary': '0/RSP0/CPU0',\n 'status': 'Not '\n 'Ready'},\n 'dlrsc': {\n 'backup': 'N/A',\n 'primary': '0/RSP0/CPU0',\n 'status': 'Not Ready'},\n 'dsc': {\n 'backup': 'N/A',\n 'primary': '0/RSP0/CPU0',\n 'status': 'Not Ready'},\n 'mcast-routing': {\n 'backup': 'N/A',\n 'primary': '0/RSP0/CPU0',\n 'status': 'Not '\n 'Ready'},\n 'netmgmt': {\n 'backup': 'N/A',\n 'primary': '0/RSP0/CPU0',\n 'status': 'Not Ready'},\n 'v4-routing': {\n 'backup': 'N/A',\n 'primary': '0/RSP0/CPU0',\n 'status': 'Not Ready'},\n 'v6-routing': {\n 'backup': 'N/A',\n 'primary': '0/RSP0/CPU0',\n 'status': 'Not Ready'}},\n 'last_reload_timestamp': 'Thu Apr 27 02:14:12 '\n '2017',\n 'last_switchover_timepstamp': 'Thu Apr 27 '\n '03:29:57 2017',\n 'node_uptime': '8 minutes',\n 'node_uptime_in_seconds': 480,\n 'node_uptime_timestamp': 'Thu Apr 27 03:22:37 '\n '2017',\n 'primary_rmf_state': 'not ready',\n 'primary_rmf_state_reason': 'Backup is not '\n 'Present',\n 'reload_cause': 'Initiating switch-over',\n 'role': 'ACTIVE',\n 'time_since_last_reload': '1 hour, 16 minutes ago',\n 'time_since_last_switchover': '1 minute ago',\n 'valid_partner': ''}}}\n\n # 'dir:' output\n dirOutput = {\n 'dir': {\n 'dir_name': 'disk0a:/usr',\n 'total_bytes': '2562719744 bytes',\n 'total_free_bytes': '1918621184 bytes'}}\n\n # Platform Ops Object final output\n platformOpsOutput = {\n 'attributes': None,\n \"redundancy_communication\": True,\n #'callables': {},\n 'chassis': 'ASR 9006 4 Line Card Slot Chassis with V2 AC PEM',\n 'chassis_sn': 'FOX1810G8LR',\n 'config_register': '0x1922',\n 'connections': None,\n #'context_manager': {},\n #'device': <Device aDevice at 0xf6fcf8ac>,\n #'diff_ignore': deque(['maker', 'callables', 'device']),\n 'image': 'disk0:asr9k-os-mbi-6.1.4.10I/0x100305/mbiasr9k-rsp3.vm',\n 'installed_packages': ['disk0:asr9k-mini-px-6.1.21.15I',\n 'disk0:asr9k-mpls-px-6.1.21.15I',\n 'disk0:asr9k-mcast-px-6.1.21.15I',\n 'disk0:asr9k-mgbl-px-6.1.21.15I'],\n 'main_mem': '6291456K',\n #'maker': <genie.ops.base.maker.Maker object at 0xf6fcfacc>,\n 'os': 'IOSXR',\n 'rtr_type': 'ASR9K',\n 'sdr_owner': 'Owner',\n 'rp_uptime': 480,\n 'slot': {\n 'lc': {\n '0/0': {\n 'config_state': 'PWR,NSHUT,MON',\n 'name': 'A9K-MOD80-SE',\n 'state': 'IOS XR RUN',\n 'subslot': {\n '0': {\n 'config_state': 'PWR,NSHUT,MON',\n 'name': 'A9K-MPA-20X1GE',\n 'redundancy_state': 'None',\n 'state': 'OK'},\n '1': {\n 'config_state': 'PWR,NSHUT,MON',\n 'name': 'A9K-MQA-20X2GE',\n 'redundancy_state': 'None',\n 'state': 'OK'},\n '2': {\n 'config_state': 'PWR,NSHUT,MON',\n 'name': 'A9K-MRA-20X3GE',\n 'redundancy_state': 'None',\n 'state': 'OK'}}}},\n 'rp': {\n '0/RSP0': {\n 'config_state': 'PWR,NSHUT,MON',\n 'name': 'A9K-RSP440-TR',\n 'redundancy_state': 'Active',\n 'state': 'IOS XR RUN'},\n '0/RSP1': {\n 'config_state': 'PWR,NSHUT,MON',\n 'name': 'A9K-RSP440-TR',\n 'redundancy_state': 'Standby',\n 'state': 'IOS XR RUN'},\n 'rp_config_register': '0x1922'}},\n 'version': '6.1.4.10I',\n 'virtual_device': {\n 0: {\n 'membership': {\n '0/0/CPU0': {\n 'vd_ms_partner_name': 'NONE',\n 'vd_ms_red_state': 'Not-known',\n 'vd_ms_status': 'IOS '\n 'XR '\n 'RUN',\n 'vd_ms_type': 'LC'},\n '0/RSP0/CPU0': {\n 'vd_ms_partner_name': '0/RSP1/CPU0',\n 'vd_ms_red_state': 'Primary',\n 'vd_ms_status': 'IOS '\n 'XR '\n 'RUN',\n 'vd_ms_type': 'RP'},\n '0/RSP1/CPU0': {\n 'vd_ms_partner_name': '0/RSP0/CPU0',\n 'vd_ms_red_state': 'Backup',\n 'vd_ms_status': 'IOS '\n 'XR '\n 'RUN',\n 'vd_ms_type': 'RP'}},\n 'vd_dSDRsc_nod': '0/RSP0/CPU0',\n 'vd_dSDRsc_partner_node': '0/RSP1/CPU0',\n 'vd_mac_addr': 'a80c.0d5f.ab17',\n 'vd_name': 'Owner',\n 'vd_primary_node1': '0/RSP0/CPU0',\n 'vd_primary_node2': '0/RSP1/CPU0'}}}\n\n\n# vim: ft=python et sw=4\n", "id": "12172428", "language": "Python", "matching_score": 5.117885589599609, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/platform/iosxr/tests/platform_output.py" }, { "content": "''' \nPlatform Genie Ops Object for IOSXR.\n'''\n\n# super class\nfrom genie.libs.ops.platform.platform import Platform as SuperPlatform\n\n# Parser\nfrom genie.libs.parser.iosxr.show_platform import ShowVersion, ShowSdrDetail,\\\n ShowPlatform, ShowPlatformVm,\\\n ShowInstallActiveSummary, ShowInventory,\\\n ShowRedundancySummary, AdminShowDiagChassis,\\\n ShowRedundancy, Dir\n\n\nclass Platform(SuperPlatform):\n '''Platform Genie Ops Object'''\n\n def get_active_uptime(self, item):\n for node in item:\n if 'active' in item[node]['role'].lower():\n return item[node]['node_uptime_in_seconds']\n \n def learn(self):\n '''Learn Platform Ops'''\n\n # ================\n # DeviceAttributes\n # ================\n\n # chassis\n self.add_leaf(cmd=ShowVersion,\n src='[chassis_detail]',\n dest='[chassis]')\n\n # chassis_sn\n self.add_leaf(cmd=AdminShowDiagChassis,\n src='[sn]',\n dest='[chassis_sn]')\n\n\n # rtr_type\n self.add_leaf(cmd=ShowVersion,\n src='[device_family]',\n dest='[rtr_type]')\n\n # os\n self.add_leaf(cmd=ShowVersion,\n src='[operating_system]',\n dest='[os]')\n\n # version\n self.add_leaf(cmd=ShowVersion,\n src='[software_version]',\n dest='[version]')\n\n # image\n self.add_leaf(cmd=ShowVersion,\n src='[image]',\n dest='[image]')\n\n # installed_packages\n self.add_leaf(cmd=ShowInstallActiveSummary,\n src='[active_packages]',\n dest='[installed_packages]')\n\n # config_register\n self.add_leaf(cmd=ShowVersion,\n src='[config_register]',\n dest='[config_register]')\n\n # main_mem\n self.add_leaf(cmd=ShowVersion,\n src='[processor_memory_bytes]',\n dest='[main_mem]')\n\n # dir\n self.add_leaf(cmd=Dir,\n src='[dir]',\n dest='[dir]')\n\n # sdr_owner\n self.add_leaf(cmd=ShowInstallActiveSummary,\n src='[sdr]',\n dest='[sdr_owner]')\n \n # =======================\n # VirtualDeviceAttributes\n # =======================\n\n # vd_id == sdr_id\n # vd_name == sdr_name\n self.add_leaf(cmd=ShowSdrDetail,\n src='[sdr_id][(?P<sdr_id>.*)][sdr_name][(?P<sdr_name>.*)]',\n dest='[virtual_device][(?P<sdr_id>.*)][vd_name][(?P<sdr_name>.*)]')\n\n # vd_dSDRsc_nod\n self.add_leaf(cmd=ShowSdrDetail,\n src='[sdr_id][(?P<sdr_id>.*)][dsdrsc_node][(?P<dsdrsc_node>.*)]',\n dest='[virtual_device][(?P<sdr_id>.*)][vd_dSDRsc_nod][(?P<dsdrsc_node>.*)]')\n\n # vd_dSDRsc_partner_node\n self.add_leaf(cmd=ShowSdrDetail,\n src='[sdr_id][(?P<sdr_id>.*)][dsdrsc_partner_node][(?P<dsdrsc_partner_node>.*)]',\n dest='[virtual_device][(?P<sdr_id>.*)][vd_dSDRsc_partner_node][(?P<dsdrsc_partner_node>.*)]')\n\n # vd_primary_node1\n self.add_leaf(cmd=ShowSdrDetail,\n src='[sdr_id][(?P<sdr_id>.*)][primary_node1][(?P<primary_node1>.*)]',\n dest='[virtual_device][(?P<sdr_id>.*)][vd_primary_node1][(?P<primary_node1>.*)]')\n\n # vd_primary_node2\n self.add_leaf(cmd=ShowSdrDetail,\n src='[sdr_id][(?P<sdr_id>.*)][primary_node2][(?P<primary_node2>.*)]',\n dest='[virtual_device][(?P<sdr_id>.*)][vd_primary_node2][(?P<primary_node2>.*)]')\n\n # vd_mac_addr\n self.add_leaf(cmd=ShowSdrDetail,\n src='[sdr_id][(?P<sdr_id>.*)][mac_address][(?P<mac_address>.*)]',\n dest='[virtual_device][(?P<sdr_id>.*)][vd_mac_addr][(?P<mac_address>.*)]')\n\n # ====================\n # MembershipAttributes\n # ====================\n\n # vd_ms_name == node_name\n # vd_ms_type == type\n self.add_leaf(cmd=ShowSdrDetail,\n src='[sdr_id][(?P<sdr_id>.*)][membership][(?P<node_name>.*)][type][(?P<type>.*)]',\n dest='[virtual_device][(?P<sdr_id>.*)][membership][(?P<node_name>.*)][vd_ms_type][(?P<type>.*)]')\n\n # vd_ms_status\n self.add_leaf(cmd=ShowSdrDetail,\n src='[sdr_id][(?P<sdr_id>.*)][membership][(?P<node_name>.*)][node_status][(?P<node_status>.*)]',\n dest='[virtual_device][(?P<sdr_id>.*)][membership][(?P<node_name>.*)][vd_ms_status][(?P<node_status>.*)]')\n\n # vd_ms_red_state\n self.add_leaf(cmd=ShowSdrDetail,\n src='[sdr_id][(?P<sdr_id>.*)][membership][(?P<node_name>.*)][red_state][(?P<red_state>.*)]',\n dest='[virtual_device][(?P<sdr_id>.*)][membership][(?P<node_name>.*)][vd_ms_red_state][(?P<red_state>.*)]')\n\n # vd_ms_partner_name\n self.add_leaf(cmd=ShowSdrDetail,\n src='[sdr_id][(?P<sdr_id>.*)][membership][(?P<node_name>.*)][partner_name][(?P<partner_name>.*)]',\n dest='[virtual_device][(?P<sdr_id>.*)][membership][(?P<node_name>.*)][vd_ms_partner_name][(?P<partner_name>.*)]')\n\n # ==============\n # SlotAttributes\n # ==============\n\n # card_name\n self.add_leaf(cmd=ShowPlatform,\n src='[slot][(?P<slot_type>.*)][(?P<slot_name>.*)][name][(?P<card_name>.*)]',\n dest='[slot][(?P<slot_type>.*)][(?P<slot_name>.*)][name][(?P<card_name>.*)]')\n\n # state\n self.add_leaf(cmd=ShowPlatform,\n src='[slot][(?P<slot_type>.*)][(?P<slot_name>.*)][state][(?P<state>.*)]',\n dest='[slot][(?P<slot_type>.*)][(?P<slot_name>.*)][state][(?P<state>.*)]')\n\n # config_state\n self.add_leaf(cmd=ShowPlatform,\n src='[slot][(?P<slot_type>.*)][(?P<slot_name>.*)][config_state][(?P<config_state>.*)]',\n dest='[slot][(?P<slot_type>.*)][(?P<slot_name>.*)][config_state][(?P<config_state>.*)]')\n\n # redundancy_state\n self.add_leaf(cmd=ShowPlatform,\n src='[slot][(?P<slot_type>.*)][(?P<slot_name>.*)][redundancy_state][(?P<redundancy_state>.*)]',\n dest='[slot][(?P<slot_type>.*)][(?P<slot_name>.*)][redundancy_state][(?P<redundancy_state>.*)]')\n\n # subslot\n self.add_leaf(cmd=ShowPlatform,\n src='[slot][(?P<slot_type>.*)][(?P<slot_name>.*)][subslot][(?P<subslot>.*)]',\n dest='[slot][(?P<slot_type>.*)][(?P<slot_name>.*)][subslot][(?P<subslot>.*)]')\n\n # rp_config_register\n self.add_leaf(cmd=ShowVersion,\n src='[rp_config_register][(?P<rp_config_register>.*)]',\n dest='[slot][rp][rp_config_register][(?P<rp_config_register>.*)]')\n\n # ==============\n # Redudancy\n # ==============\n\n # redundancy_communication\n self.add_leaf(cmd=ShowRedundancySummary,\n src='[redundancy_communication]',\n dest='[redundancy_communication]')\n\n # rp_uptime\n self.add_leaf(cmd=ShowRedundancy,\n src='[node]',\n dest='[rp_uptime]',\n action=self.get_active_uptime)\n\n # Make Ops object\n self.make(final_call=True)\n\n# vim: ft=python et sw=4\n", "id": "1843611", "language": "Python", "matching_score": 2.03239107131958, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/platform/iosxr/platform.py" }, { "content": "''' \nLAG Genie Ops Object for IOSXR - CLI.\n'''\n# Genie\nfrom genie.ops.base import Base\nfrom genie.ops.base import Context\n\n# Parser\nfrom genie.libs.parser.iosxr.show_lag import ShowLacpSystemId, \\\n ShowBundle, \\\n ShowLacp\n\nclass Lag(Base):\n '''LAG Genie Ops Object'''\n\n def toInt(self, item):\n # 0x0001\n return int(item, 0)\n\n def getPortNum(self, item):\n # 0x8000,0x0002\n port_num = item.split(',')[1]\n return int(port_num, 0)\n\n def getPortPriority(self, item):\n # 0x8000,0x0002\n priority = item.split(',')[0]\n return int(priority, 0)\n\n def formatMAC(self, item):\n # 0x8000,00-0c-86-5e-68-23\n mac = item.split(',')[1].strip()\n mac = mac.replace('-', '').lower()\n mac = \".\".join([\"%s\" % (mac[i:i+4]) for i in range(0, 12, 4)])\n return mac\n\n def getBundled(self, item):\n return True if item.lower() == 'active' else False\n\n def setProtocol(self, item):\n return 'lacp'\n\n def learn(self):\n '''Learn lag Ops'''\n \n ########################################################################\n # info\n ########################################################################\n # enabled N/A\n # system_priority \n # interfaces \n # interface\n # name\n # interval N/A\n # lacp_mode \n # lacp_max_bundle \n # lacp_min_bundle \n # system_id_mac \n # system_priority \n # bundle_id \n # protocol \n # oper_status\n # members \n # member_interface \n # bundle_id \n # bundled \n # interface\n # activity N/A\n # non_silent N/A\n # force N/A\n # timeout N/A\n # synchronization\n # aggregatable\n # collecting\n # distributing\n # system_id \n # oper_key\n # partner_id\n # partner_key\n # port_num\n # partner_port_num\n # lacp_port_priority \n # pagp_port_priority N/A\n # age N/A\n # counters N/A\n\n # ----------- system_priority ------------\n self.add_leaf(cmd=ShowLacpSystemId,\n src='[system_priority]',\n dest='info[system_priority]')\n\n # ----------- interfaces ------------\n intf_src = '[interfaces][(?P<intf>.*)]'\n intf_dst = 'info[interfaces][(?P<intf>.*)]'\n\n for key in ['name', 'bundle_id', 'oper_status']:\n self.add_leaf(cmd=ShowBundle,\n src=intf_src + '[{}]'.format(key),\n dest=intf_dst + '[{}]'.format(key))\n\n self.add_leaf(cmd=ShowBundle, \n src=intf_src + '[mac_address]',\n dest=intf_dst + '[system_id_mac]')\n\n self.add_leaf(cmd=ShowBundle, \n src=intf_src + '[max_active_link]',\n dest=intf_dst + '[lacp_max_bundle]')\n\n self.add_leaf(cmd=ShowBundle, \n src=intf_src + '[min_active_link]',\n dest=intf_dst + '[lacp_min_bundle]')\n\n self.add_leaf(cmd=ShowLacp, \n src=intf_src + '[lacp_mode]',\n dest=intf_dst + '[lacp_mode]')\n\n self.add_leaf(cmd=ShowLacp,\n src=intf_src + '[lacp_mode]',\n dest=intf_dst + '[protocol]',\n action=self.setProtocol)\n self.make()\n \n # system_priority\n if hasattr(self, 'info') and 'system_priority' in self.info and 'interfaces' in self.info:\n system_priority = self.info['system_priority']\n for interface, intf_dict in self.info['interfaces'].items():\n intf_dict['system_priority'] = system_priority\n \n\n # ----------- members ------------\n mem_src = intf_src + '[port][(?P<mem>.*)]'\n mem_dst = intf_dst + '[members][(?P<mem>.*)]'\n\n for key in ['interface', 'synchronization', 'aggregatable', 'collecting', 'distributing', 'bundle_id']:\n self.add_leaf(cmd=ShowLacp,\n src=mem_src + '[{}]'.format(key),\n dest=mem_dst + '[{}]'.format(key))\n\n # bundled\n self.add_leaf(cmd=ShowBundle,\n src=mem_src + '[state]',\n dest=mem_dst + '[bundled]',\n action=self.getBundled)\n\n self.add_leaf(cmd=ShowLacp,\n src=mem_src + '[system_id]',\n dest=mem_dst + '[system_id]',\n action=self.formatMAC)\n\n self.add_leaf(cmd=ShowLacp,\n src=mem_src + '[key]',\n dest=mem_dst + '[oper_key]',\n action=self.toInt)\n\n self.add_leaf(cmd=ShowLacp,\n src=mem_src + '[port_id]',\n dest=mem_dst + '[port_num]',\n action=self.getPortNum)\n\n self.add_leaf(cmd=ShowLacp,\n src=mem_src + '[port_id]',\n dest=mem_dst + '[lacp_port_priority]',\n action=self.getPortPriority)\n\n # ----------- partner ------------\n partner_src = mem_src + '[partner]'\n self.add_leaf(cmd=ShowLacp,\n src=partner_src + '[system_id]',\n dest=mem_dst + '[partner_id]',\n action=self.formatMAC)\n\n self.add_leaf(cmd=ShowLacp,\n src=partner_src + '[key]',\n dest=mem_dst + '[partner_key]',\n action=self.toInt)\n\n self.add_leaf(cmd=ShowLacp,\n src=partner_src + '[port_id]',\n dest=mem_dst + '[partner_port_num]',\n action=self.getPortNum) \n\n # make to write in cache\n self.make(final_call=True)\n \n", "id": "10473196", "language": "Python", "matching_score": 4.309900760650635, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/lag/iosxr/lag.py" }, { "content": "'''LAG Genie Ops Object Outputs for IOSXR.'''\n\n\nclass LagOutput(object):\n\n ShowLacpSystemId = {\n \"system_priority\": 100,\n \"system_id_mac\": \"00-1b-0c-10-5a-26\"\n }\n\n ShowBundle = {\n \"interfaces\": {\n \"Bundle-Ether1\": {\n \"name\": \"Bundle-Ether1\",\n \"bundle_id\": 1,\n \"oper_status\": \"up\",\n \"local_links\": {\n \"active\": 2,\n \"standby\": 0,\n \"configured\": 2\n },\n \"local_bandwidth_kbps\": {\n \"effective\": 2000000,\n \"available\": 2000000\n },\n \"mac_address\": \"001b.0c10.5a25\",\n \"mac_address_source\": \"Chassis pool\",\n \"inter_chassis_link\": \"No\",\n \"min_active_link\": 1,\n \"min_active_bw_kbps\": 1,\n \"max_active_link\": 8,\n \"wait_while_timer_ms\": 2000,\n \"load_balance\": {\n \"link_order_signaling\": \"Not configured\",\n \"hash_type\": \"Default\",\n \"locality_threshold\": \"None\"\n },\n \"lacp\": {\n \"lacp\": \"Operational\",\n \"flap_suppression_timer\": \"Off\",\n \"cisco_extensions\": \"Disabled\",\n \"non_revertive\": \"Disabled\"\n },\n \"mlacp\": {\n \"mlacp\": \"Not configured\"\n },\n \"ipv4_bfd\": {\n \"ipv4_bfd\": \"Not configured\"\n },\n \"ipv6_bfd\": {\n \"ipv6_bfd\": \"Not configured\"\n },\n \"port\": {\n \"GigabitEthernet0/0/0/0\": {\n \"interface\": \"GigabitEthernet0/0/0/0\",\n \"device\": \"Local\",\n \"state\": \"Active\",\n \"port_id\": \"0x000a, 0x0001\",\n \"bw_kbps\": 1000000,\n \"link_state\": \"Active\"\n },\n \"GigabitEthernet0/0/0/1\": {\n \"interface\": \"GigabitEthernet0/0/0/1\",\n \"device\": \"Local\",\n \"state\": \"Active\",\n \"port_id\": \"0x8000, 0x0002\",\n \"bw_kbps\": 1000000,\n \"link_state\": \"Active\"\n }\n }\n },\n \"Bundle-Ether2\": {\n \"name\": \"Bundle-Ether2\",\n \"bundle_id\": 2,\n \"oper_status\": \"up\",\n \"local_links\": {\n \"active\": 2,\n \"standby\": 1,\n \"configured\": 3\n },\n \"local_bandwidth_kbps\": {\n \"effective\": 2000000,\n \"available\": 2000000\n },\n \"mac_address\": \"001b.0c10.5a24\",\n \"mac_address_source\": \"Chassis pool\",\n \"inter_chassis_link\": \"No\",\n \"min_active_link\": 2,\n \"min_active_bw_kbps\": 1,\n \"max_active_link\": 2,\n \"wait_while_timer_ms\": 2000,\n \"load_balance\": {\n \"link_order_signaling\": \"Not configured\",\n \"hash_type\": \"Default\",\n \"locality_threshold\": \"None\"\n },\n \"lacp\": {\n \"lacp\": \"Operational\",\n \"flap_suppression_timer\": \"Off\",\n \"cisco_extensions\": \"Disabled\",\n \"non_revertive\": \"Disabled\"\n },\n \"mlacp\": {\n \"mlacp\": \"Not configured\"\n },\n \"ipv4_bfd\": {\n \"ipv4_bfd\": \"Not configured\"\n },\n \"ipv6_bfd\": {\n \"ipv6_bfd\": \"Not configured\"\n },\n \"port\": {\n \"GigabitEthernet0/0/0/2\": {\n \"interface\": \"GigabitEthernet0/0/0/2\",\n \"device\": \"Local\",\n \"state\": \"Standby\",\n \"port_id\": \"0x8000, 0x0005\",\n \"bw_kbps\": 1000000,\n \"link_state\": \"Standby due to maximum-active links configuration\"\n },\n \"GigabitEthernet0/0/0/3\": {\n \"interface\": \"GigabitEthernet0/0/0/3\",\n \"device\": \"Local\",\n \"state\": \"Active\",\n \"port_id\": \"0x8000, 0x0004\",\n \"bw_kbps\": 1000000,\n \"link_state\": \"Active\"\n },\n \"GigabitEthernet0/0/0/4\": {\n \"interface\": \"GigabitEthernet0/0/0/4\",\n \"device\": \"Local\",\n \"state\": \"Active\",\n \"port_id\": \"0x8000, 0x0003\",\n \"bw_kbps\": 1000000,\n \"link_state\": \"Active\"\n }\n }\n }\n }\n }\n\n ShowLacp = {\n \"interfaces\": {\n \"Bundle-Ether1\": {\n \"name\": \"Bundle-Ether1\",\n \"bundle_id\": 1,\n \"lacp_mode\": \"active\",\n \"port\": {\n \"GigabitEthernet0/0/0/0\": {\n \"interface\": \"GigabitEthernet0/0/0/0\",\n \"bundle_id\": 1,\n \"rate\": 30,\n \"state\": \"ascdA---\",\n \"port_id\": \"0x000a,0x0001\",\n \"key\": \"0x0001\",\n \"system_id\": \"0x0064,00-1b-0c-10-5a-26\",\n \"aggregatable\": True,\n \"synchronization\": \"in_sync\",\n \"collecting\": True,\n \"distributing\": True,\n \"partner\": {\n \"rate\": 30,\n \"state\": \"ascdA---\",\n \"port_id\": \"0x000a,0x0001\",\n \"key\": \"0x0001\",\n \"system_id\": \"0x8000,00-0c-86-5e-68-23\",\n \"aggregatable\": True,\n \"synchronization\": \"in_sync\",\n \"collecting\": True,\n \"distributing\": True\n },\n \"receive\": \"Current\",\n \"period\": \"Slow\",\n \"selection\": \"Selected\",\n \"mux\": \"Distrib\",\n \"a_churn\": \"None\",\n \"p_churn\": \"None\"\n },\n \"GigabitEthernet0/0/0/1\": {\n \"interface\": \"GigabitEthernet0/0/0/1\",\n \"bundle_id\": 1,\n \"rate\": 30,\n \"state\": \"ascdA---\",\n \"port_id\": \"0x8000,0x0002\",\n \"key\": \"0x0001\",\n \"system_id\": \"0x0064,00-1b-0c-10-5a-26\",\n \"aggregatable\": True,\n \"synchronization\": \"in_sync\",\n \"collecting\": True,\n \"distributing\": True,\n \"partner\": {\n \"rate\": 30,\n \"state\": \"ascdA---\",\n \"port_id\": \"0x8000,0x0005\",\n \"key\": \"0x0001\",\n \"system_id\": \"0x8000,00-0c-86-5e-68-23\",\n \"aggregatable\": True,\n \"synchronization\": \"in_sync\",\n \"collecting\": True,\n \"distributing\": True\n },\n \"receive\": \"Current\",\n \"period\": \"Slow\",\n \"selection\": \"Selected\",\n \"mux\": \"Distrib\",\n \"a_churn\": \"None\",\n \"p_churn\": \"None\"\n }\n }\n },\n \"Bundle-Ether2\": {\n \"name\": \"Bundle-Ether2\",\n \"bundle_id\": 2,\n \"lacp_mode\": \"active\",\n \"port\": {\n \"GigabitEthernet0/0/0/2\": {\n \"interface\": \"GigabitEthernet0/0/0/2\",\n \"bundle_id\": 2,\n \"rate\": 30,\n \"state\": \"a---A---\",\n \"port_id\": \"0x8000,0x0005\",\n \"key\": \"0x0002\",\n \"system_id\": \"0x0064,00-1b-0c-10-5a-26\",\n \"aggregatable\": True,\n \"synchronization\": \"out_sync\",\n \"collecting\": False,\n \"distributing\": False,\n \"partner\": {\n \"rate\": 30,\n \"state\": \"as--A---\",\n \"port_id\": \"0x8000,0x0004\",\n \"key\": \"0x0002\",\n \"system_id\": \"0x8000,00-0c-86-5e-68-23\",\n \"aggregatable\": True,\n \"synchronization\": \"in_sync\",\n \"collecting\": False,\n \"distributing\": False\n },\n \"receive\": \"Current\",\n \"period\": \"Slow\",\n \"selection\": \"Standby\",\n \"mux\": \"Waiting\",\n \"a_churn\": \"Churn\",\n \"p_churn\": \"None\"\n },\n \"GigabitEthernet0/0/0/3\": {\n \"interface\": \"GigabitEthernet0/0/0/3\",\n \"bundle_id\": 2,\n \"rate\": 30,\n \"state\": \"ascdA---\",\n \"port_id\": \"0x8000,0x0004\",\n \"key\": \"0x0002\",\n \"system_id\": \"0x0064,00-1b-0c-10-5a-26\",\n \"aggregatable\": True,\n \"synchronization\": \"in_sync\",\n \"collecting\": True,\n \"distributing\": True,\n \"partner\": {\n \"rate\": 30,\n \"state\": \"ascdA---\",\n \"port_id\": \"0x8000,0x0003\",\n \"key\": \"0x0002\",\n \"system_id\": \"0x8000,00-0c-86-5e-68-23\",\n \"aggregatable\": True,\n \"synchronization\": \"in_sync\",\n \"collecting\": True,\n \"distributing\": True\n },\n \"receive\": \"Current\",\n \"period\": \"Slow\",\n \"selection\": \"Selected\",\n \"mux\": \"Distrib\",\n \"a_churn\": \"None\",\n \"p_churn\": \"None\"\n },\n \"GigabitEthernet0/0/0/4\": {\n \"interface\": \"GigabitEthernet0/0/0/4\",\n \"bundle_id\": 2,\n \"rate\": 30,\n \"state\": \"ascdA---\",\n \"port_id\": \"0x8000,0x0003\",\n \"key\": \"0x0002\",\n \"system_id\": \"0x0064,00-1b-0c-10-5a-26\",\n \"aggregatable\": True,\n \"synchronization\": \"in_sync\",\n \"collecting\": True,\n \"distributing\": True,\n \"partner\": {\n \"rate\": 30,\n \"state\": \"ascdA---\",\n \"port_id\": \"0x8000,0x0002\",\n \"key\": \"0x0002\",\n \"system_id\": \"0x8000,00-0c-86-5e-68-23\",\n \"aggregatable\": True,\n \"synchronization\": \"in_sync\",\n \"collecting\": True,\n \"distributing\": True\n },\n \"receive\": \"Current\",\n \"period\": \"Slow\",\n \"selection\": \"Selected\",\n \"mux\": \"Distrib\",\n \"a_churn\": \"None\",\n \"p_churn\": \"None\"\n }\n }\n }\n }\n }\n\n LagOpsOutput = {\n \"system_priority\": 100,\n \"interfaces\": {\n \"Bundle-Ether2\": {\n \"name\": \"Bundle-Ether2\",\n \"bundle_id\": 2,\n \"oper_status\": \"up\",\n \"system_id_mac\": \"001b.0c10.5a24\",\n \"system_priority\": 100,\n \"lacp_max_bundle\": 2,\n \"lacp_min_bundle\": 2,\n \"lacp_mode\": \"active\",\n \"protocol\": \"lacp\",\n \"members\": {\n \"GigabitEthernet0/0/0/4\": {\n \"interface\": \"GigabitEthernet0/0/0/4\",\n \"synchronization\": \"in_sync\",\n \"aggregatable\": True,\n \"collecting\": True,\n \"distributing\": True,\n \"bundle_id\": 2,\n \"bundled\": True,\n \"system_id\": \"001b.0c10.5a26\",\n \"oper_key\": 2,\n \"port_num\": 3,\n \"lacp_port_priority\": 32768,\n \"partner_id\": \"000c.865e.6823\",\n \"partner_key\": 2,\n \"partner_port_num\": 2\n },\n \"GigabitEthernet0/0/0/3\": {\n \"interface\": \"GigabitEthernet0/0/0/3\",\n \"synchronization\": \"in_sync\",\n \"aggregatable\": True,\n \"collecting\": True,\n \"distributing\": True,\n \"bundle_id\": 2,\n \"bundled\": True,\n \"system_id\": \"001b.0c10.5a26\",\n \"oper_key\": 2,\n \"port_num\": 4,\n \"lacp_port_priority\": 32768,\n \"partner_id\": \"000c.865e.6823\",\n \"partner_key\": 2,\n \"partner_port_num\": 3\n },\n \"GigabitEthernet0/0/0/2\": {\n \"interface\": \"GigabitEthernet0/0/0/2\",\n \"synchronization\": \"out_sync\",\n \"aggregatable\": True,\n \"collecting\": False,\n \"distributing\": False,\n \"bundle_id\": 2,\n \"bundled\": False,\n \"system_id\": \"001b.0c10.5a26\",\n \"oper_key\": 2,\n \"port_num\": 5,\n \"lacp_port_priority\": 32768,\n \"partner_id\": \"000c.865e.6823\",\n \"partner_key\": 2,\n \"partner_port_num\": 4\n }\n }\n },\n \"Bundle-Ether1\": {\n \"name\": \"Bundle-Ether1\",\n \"bundle_id\": 1,\n \"oper_status\": \"up\",\n \"system_id_mac\": \"001b.0c10.5a25\",\n \"system_priority\": 100,\n \"lacp_max_bundle\": 8,\n \"lacp_min_bundle\": 1,\n \"lacp_mode\": \"active\",\n \"protocol\": \"lacp\",\n \"members\": {\n \"GigabitEthernet0/0/0/1\": {\n \"interface\": \"GigabitEthernet0/0/0/1\",\n \"synchronization\": \"in_sync\",\n \"aggregatable\": True,\n \"collecting\": True,\n \"distributing\": True,\n \"bundle_id\": 1,\n \"bundled\": True,\n \"system_id\": \"001b.0c10.5a26\",\n \"oper_key\": 1,\n \"port_num\": 2,\n \"lacp_port_priority\": 32768,\n \"partner_id\": \"000c.865e.6823\",\n \"partner_key\": 1,\n \"partner_port_num\": 5\n },\n \"GigabitEthernet0/0/0/0\": {\n \"interface\": \"GigabitEthernet0/0/0/0\",\n \"synchronization\": \"in_sync\",\n \"aggregatable\": True,\n \"collecting\": True,\n \"distributing\": True,\n \"bundle_id\": 1,\n \"bundled\": True,\n \"system_id\": \"001b.0c10.5a26\",\n \"oper_key\": 1,\n \"port_num\": 1,\n \"lacp_port_priority\": 10,\n \"partner_id\": \"000c.865e.6823\",\n \"partner_key\": 1,\n \"partner_port_num\": 1\n }\n }\n }\n }\n }\n\n", "id": "8920544", "language": "Python", "matching_score": 2.3940958976745605, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/lag/iosxr/tests/lag_output.py" }, { "content": "'''Lag Genie Ops Object Outputs for IOSXE.'''\n\n\nclass LagOutput(object):\n\n ShowLacpSysId = {\n 'system_id_mac': '001e.49af.8c00',\n 'system_priority': 32768,\n }\n\n ShowLacpCounters = {\n 'interfaces': {\n 'Port-channel1': {\n 'name': 'Port-channel1',\n 'protocol': 'lacp',\n 'members': {\n 'GigabitEthernet2': {\n 'interface': 'GigabitEthernet2',\n 'counters': {\n 'lacp_in_pkts': 22,\n 'lacp_out_pkts': 27,\n 'lacp_errors': 0,\n 'marker_in_pkts': 0,\n 'marker_out_pkts': 0,\n 'marker_response_in_pkts': 0,\n 'marker_response_out_pkts': 0,\n },\n },\n 'GigabitEthernet3': {\n 'interface': 'GigabitEthernet3',\n 'counters': {\n 'lacp_in_pkts': 21,\n 'lacp_out_pkts': 24,\n 'lacp_errors': 0,\n 'marker_in_pkts': 0,\n 'marker_out_pkts': 0,\n 'marker_response_in_pkts': 0,\n 'marker_response_out_pkts': 0,\n },\n },\n }\n },\n 'Port-channel2': {\n 'name': 'Port-channel2',\n 'protocol': 'lacp',\n 'members': {\n 'GigabitEthernet4': {\n 'interface': 'GigabitEthernet4',\n 'counters': {\n 'lacp_in_pkts': 31,\n 'lacp_out_pkts': 24,\n 'lacp_errors': 0,\n 'marker_in_pkts': 0,\n 'marker_out_pkts': 0,\n 'marker_response_in_pkts': 0,\n 'marker_response_out_pkts': 0,\n },\n },\n 'GigabitEthernet5': {\n 'interface': 'GigabitEthernet5',\n 'counters': {\n 'lacp_in_pkts': 10,\n 'lacp_out_pkts': 14,\n 'lacp_errors': 0,\n 'marker_in_pkts': 0,\n 'marker_out_pkts': 0,\n 'marker_response_in_pkts': 0,\n 'marker_response_out_pkts': 0,\n },\n },\n 'GigabitEthernet6': {\n 'interface': 'GigabitEthernet6',\n 'counters': {\n 'lacp_in_pkts': 11,\n 'lacp_out_pkts': 13,\n 'lacp_errors': 0,\n 'marker_in_pkts': 0,\n 'marker_out_pkts': 0,\n 'marker_response_in_pkts': 0,\n 'marker_response_out_pkts': 0,\n },\n },\n },\n },\n },\n }\n\n ShowEtherchannelSummary = {\n 'number_of_lag_in_use': 2,\n 'number_of_aggregators': 2,\n 'interfaces': {\n 'Port-channel1': {\n 'name': 'Port-channel1',\n 'bundle_id': 1,\n 'protocol': 'lacp',\n 'flags': 'RU',\n 'oper_status': 'up',\n 'members': {\n 'GigabitEthernet2': {\n 'interface': 'GigabitEthernet2',\n 'flags': 'bndl',\n 'bundled': True,\n 'port_channel': {\n \"port_channel_member\": True,\n \"port_channel_int\": \"Port-channel1\"\n },\n },\n 'GigabitEthernet3': {\n 'interface': 'GigabitEthernet3',\n 'flags': 'bndl',\n 'bundled': True,\n 'port_channel': {\n \"port_channel_member\": True,\n \"port_channel_int\": \"Port-channel1\"\n },\n },\n },\n 'port_channel': {\n 'port_channel_member': True,\n 'port_channel_member_intfs': ['GigabitEthernet2', 'GigabitEthernet3'],\n }\n },\n 'Port-channel2': {\n 'name': 'Port-channel2',\n 'bundle_id': 2,\n 'protocol': 'lacp',\n 'flags': 'RU',\n 'oper_status': 'up',\n 'members': {\n 'GigabitEthernet4': {\n 'interface': 'GigabitEthernet4',\n 'flags': 'bndl',\n 'bundled': True,\n 'port_channel': {\n \"port_channel_member\": True,\n \"port_channel_int\": \"Port-channel2\"\n },\n },\n 'GigabitEthernet5': {\n 'interface': 'GigabitEthernet5',\n 'flags': 'hot-sby',\n 'bundled': False,\n 'port_channel': {\n \"port_channel_member\": True,\n \"port_channel_int\": \"Port-channel2\"\n },\n },\n 'GigabitEthernet6': {\n 'interface': 'GigabitEthernet6',\n 'flags': 'bndl',\n 'bundled': True,\n 'port_channel': {\n \"port_channel_member\": True,\n \"port_channel_int\": \"Port-channel2\"\n },\n },\n },\n 'port_channel': {\n 'port_channel_member': True,\n 'port_channel_member_intfs': ['GigabitEthernet4', 'GigabitEthernet5', 'GigabitEthernet6'],\n }\n },\n },\n }\n\n ShowLacpInternal = {\n 'interfaces': {\n 'Port-channel1': {\n 'name': 'Port-channel1',\n 'protocol': 'lacp',\n 'members': {\n 'GigabitEthernet2': {\n 'interface': 'GigabitEthernet2',\n 'oper_key': 1,\n 'admin_key': 1,\n 'port_num': 1,\n 'lacp_port_priority': 32768,\n 'flags': 'SA',\n 'activity': 'auto',\n 'state': 'bndl',\n 'bundled': True,\n 'port_state': 61,\n },\n 'GigabitEthernet3': {\n 'interface': 'GigabitEthernet3',\n 'oper_key': 1,\n 'admin_key': 1,\n 'port_num': 1,\n 'lacp_port_priority': 32768,\n 'flags': 'SA',\n 'activity': 'auto',\n 'state': 'bndl',\n 'bundled': True,\n 'port_state': 61,\n },\n },\n },\n 'Port-channel2': {\n 'name': 'Port-channel2',\n 'protocol': 'lacp',\n 'members': {\n 'GigabitEthernet4': {\n 'interface': 'GigabitEthernet4',\n 'oper_key': 2,\n 'admin_key': 2,\n 'port_num': 1,\n 'lacp_port_priority': 32768,\n 'flags': 'SA',\n 'state': 'bndl',\n 'activity': 'auto',\n 'bundled': True,\n 'port_state': 61,\n },\n 'GigabitEthernet5': {\n 'interface': 'GigabitEthernet5',\n 'oper_key': 2,\n 'admin_key': 2,\n 'port_num': 1,\n 'lacp_port_priority': 32768,\n 'flags': 'SA',\n 'activity': 'auto',\n 'state': 'bndl',\n 'bundled': True,\n 'port_state': 61,\n },\n 'GigabitEthernet6': {\n 'interface': 'GigabitEthernet6',\n 'oper_key': 2,\n 'admin_key': 2,\n 'port_num': 1,\n 'lacp_port_priority': 32768,\n 'flags': 'SA',\n 'activity': 'auto',\n 'state': 'bndl',\n 'bundled': True,\n 'port_state': 61,\n },\n },\n },\n }\n }\n\n ShowLacpNeighbor = {\n 'interfaces': {\n 'Port-channel1': {\n 'name': 'Port-channel1',\n 'protocol': 'lacp',\n 'members': {\n 'GigabitEthernet2': {\n 'interface': 'GigabitEthernet2',\n 'oper_key': 1,\n 'admin_key': 0,\n 'port_num': 1,\n 'lacp_port_priority': 32768,\n 'flags': 'SA',\n 'activity' : 'active',\n 'partner_id': '001e.49e6.bc00',\n 'age': 25,\n 'port_state': 61\n },\n 'GigabitEthernet3': {\n 'interface': 'GigabitEthernet3',\n 'oper_key': 1,\n 'admin_key': 0,\n 'port_num': 1,\n 'lacp_port_priority': 32768,\n 'flags': 'SA',\n 'activity': 'active',\n 'port_state': 61,\n 'partner_id': '001e.49e6.bc00',\n 'age': 19,\n },\n },\n },\n 'Port-channel2': {\n 'name': 'Port-channel2',\n 'protocol': 'lacp',\n 'members': {\n 'GigabitEthernet4': {\n 'interface': 'GigabitEthernet4',\n 'oper_key': 2,\n 'admin_key': 0,\n 'port_num': 1,\n 'lacp_port_priority': 32768,\n 'flags': 'SP',\n 'port_state': 60,\n 'activity': 'passive',\n 'partner_id': '001e.49e6.bc00',\n 'age': 15,\n },\n 'GigabitEthernet5': {\n 'interface': 'GigabitEthernet5',\n 'oper_key': 2,\n 'admin_key': 0,\n 'port_num': 1,\n 'lacp_port_priority': 32768,\n 'flags': 'SP',\n 'port_state': 60,\n 'activity': 'passive',\n 'partner_id': '001e.49e6.bc00',\n 'age': 1\n },\n 'GigabitEthernet6': {\n 'interface': 'GigabitEthernet6',\n 'oper_key': 2,\n 'admin_key': 0,\n 'port_num': 1,\n 'lacp_port_priority': 32768,\n 'flags': 'SP',\n 'port_state': 60,\n 'activity': 'passive',\n 'partner_id': '001e.49e6.bc00',\n 'age': 0\n },\n },\n },\n }\n }\n\n ShowPagpCounters = {\n 'interfaces': {\n 'Port-channel1': {\n 'name': 'Port-channel1',\n 'protocol': 'pagp',\n 'members': {\n 'GigabitEthernet0/1': {\n 'interface': 'GigabitEthernet0/1',\n 'counters': {\n 'information_in_pkts': 52,\n 'information_out_pkts': 60,\n 'pagp_errors': 0,\n 'flush_in_pkts': 0,\n 'flush_out_pkts': 0,\n },\n },\n 'GigabitEthernet0/2': {\n 'interface': 'GigabitEthernet0/2',\n 'counters': {\n 'information_in_pkts': 52,\n 'information_out_pkts': 59,\n 'pagp_errors': 0,\n 'flush_in_pkts': 0,\n 'flush_out_pkts': 0,\n },\n },\n },\n },\n 'Port-channel2': {\n 'name': 'Port-channel2',\n 'protocol': 'pagp',\n 'members': {\n 'GigabitEthernet0/3': {\n 'interface': 'GigabitEthernet0/3',\n 'counters': {\n 'information_in_pkts': 11,\n 'information_out_pkts': 21,\n 'pagp_errors': 0,\n 'flush_in_pkts': 0,\n 'flush_out_pkts': 0,\n },\n },\n 'GigabitEthernet1/0': {\n 'interface': 'GigabitEthernet1/0',\n 'counters': {\n 'information_in_pkts': 11,\n 'information_out_pkts': 19,\n 'pagp_errors': 0,\n 'flush_in_pkts': 0,\n 'flush_out_pkts': 0,\n },\n },\n 'GigabitEthernet1/1': {\n 'interface': 'GigabitEthernet1/1',\n 'counters': {\n 'information_in_pkts': 10,\n 'information_out_pkts': 19,\n 'pagp_errors': 0,\n 'flush_in_pkts': 0,\n 'flush_out_pkts': 0,\n },\n },\n\n },\n },\n }\n }\n\n ShowPagpNeighbor = {\n \"interfaces\": {\n \"Port-channel14\": {\n \"members\": {\n \"GigabitEthernet1/0/7\": {\n \"age\": 22,\n \"flags\": \"SC\",\n \"group_cap\": \"E0001\",\n \"interface\": \"GigabitEthernet1/0/7\",\n \"partner_id\": \"ecbd.1d09.5680\",\n \"partner_name\": \"R4\",\n \"partner_port\": \"GigabitEthernet1/0/7\"\n },\n \"GigabitEthernet1/0/8\": {\n \"age\": 16,\n \"flags\": \"SC\",\n \"group_cap\": \"E0001\",\n \"interface\": \"GigabitEthernet1/0/8\",\n \"partner_id\": \"ecbd.1d09.5680\",\n \"partner_name\": \"R4\",\n \"partner_port\": \"GigabitEthernet1/0/8\"\n },\n \"GigabitEthernet1/0/9\": {\n \"age\": 18,\n \"flags\": \"SC\",\n \"group_cap\": \"E0001\",\n \"interface\": \"GigabitEthernet1/0/9\",\n \"partner_id\": \"ecbd.1d09.5680\",\n \"partner_name\": \"R4\",\n \"partner_port\": \"GigabitEthernet1/0/9\"\n }\n },\n \"name\": \"Port-channel14\",\n \"protocol\": \"pagp\"\n }\n }\n }\n\n ShowPagpInternal = {\n 'interfaces': {\n 'Port-channel1': {\n 'name': 'Port-channel1',\n 'protocol': 'pagp',\n 'members': {\n 'GigabitEthernet0/1': {\n 'interface': 'GigabitEthernet0/1',\n 'group_ifindex': 8,\n 'partner_count': 1,\n 'hello_interval': 30,\n 'timers': 'H',\n 'pagp_port_priority': 128,\n 'flags': 'SC',\n 'state': 'U6/S7',\n 'learn_method': 'any',\n },\n 'GigabitEthernet0/2': {\n 'interface': 'GigabitEthernet0/2',\n 'group_ifindex': 8,\n 'partner_count': 1,\n 'hello_interval': 30,\n 'timers': 'H',\n 'pagp_port_priority': 128,\n 'flags': 'SC',\n 'state': 'U6/S7',\n 'learn_method': 'any',\n },\n },\n },\n 'Port-channel2': {\n 'name': 'Port-channel2',\n 'protocol': 'pagp',\n 'members': {\n 'GigabitEthernet0/3': {\n 'interface': 'GigabitEthernet0/3',\n 'group_ifindex': 11,\n 'partner_count': 1,\n 'hello_interval': 30,\n 'timers': 'H',\n 'pagp_port_priority': 128,\n 'flags': 'SC',\n 'state': 'U6/S7',\n 'learn_method': 'any',\n },\n 'GigabitEthernet1/0': {\n 'interface': 'GigabitEthernet1/0',\n 'group_ifindex': 11,\n 'partner_count': 1,\n 'hello_interval': 30,\n 'timers': 'H',\n 'pagp_port_priority': 128,\n 'flags': 'SC',\n 'state': 'U6/S7',\n 'learn_method': 'any',\n },\n 'GigabitEthernet1/1': {\n 'interface': 'GigabitEthernet1/1',\n 'group_ifindex': 11,\n 'partner_count': 1,\n 'hello_interval': 30,\n 'timers': 'H',\n 'pagp_port_priority': 128,\n 'flags': 'SC',\n 'state': 'U6/S7',\n 'learn_method': 'any',\n },\n },\n },\n }\n }\n\n Lag_info = {\n \"system_priority\": 32768,\n \"interfaces\": {\n \"Port-channel1\": {\n \"name\": \"Port-channel1\",\n \"oper_status\": \"up\",\n \"bundle_id\": 1,\n \"protocol\": \"lacp\",\n \"members\": {\n \"GigabitEthernet2\": {\n \"lacp_port_priority\": 32768,\n \"counters\": {\n \"lacp_in_pkts\": 22,\n \"lacp_errors\": 0,\n \"lacp_out_pkts\": 27\n },\n \"interface\": \"GigabitEthernet2\",\n \"bundled\": True,\n \"port_num\": 1,\n \"activity\": \"active\",\n \"partner_id\": \"001e.49e6.bc00\",\n \"bundle_id\": 1,\n \"age\": 25,\n \"oper_key\": 1\n },\n \"GigabitEthernet3\": {\n \"lacp_port_priority\": 32768,\n \"counters\": {\n \"lacp_in_pkts\": 21,\n \"lacp_errors\": 0,\n \"lacp_out_pkts\": 24\n },\n \"interface\": \"GigabitEthernet3\",\n \"bundled\": True,\n \"port_num\": 1,\n \"activity\": \"active\",\n \"partner_id\": \"001e.49e6.bc00\",\n \"bundle_id\": 1,\n \"age\": 19,\n \"oper_key\": 1\n },\n \"GigabitEthernet0/2\": {\n \"bundle_id\": 1,\n \"pagp_port_priority\": 128\n },\n \"GigabitEthernet0/1\": {\n \"bundle_id\": 1,\n \"pagp_port_priority\": 128\n }\n }\n },\n \"Port-channel2\": {\n \"name\": \"Port-channel2\",\n \"oper_status\": \"up\",\n \"bundle_id\": 2,\n \"protocol\": \"lacp\",\n \"members\": {\n \"GigabitEthernet5\": {\n \"lacp_port_priority\": 32768,\n \"counters\": {\n \"lacp_in_pkts\": 10,\n \"lacp_errors\": 0,\n \"lacp_out_pkts\": 14\n },\n \"interface\": \"GigabitEthernet5\",\n \"bundled\": False,\n \"port_num\": 1,\n \"activity\": \"passive\",\n \"partner_id\": \"001e.49e6.bc00\",\n \"bundle_id\": 2,\n \"age\": 1,\n \"oper_key\": 2\n },\n \"GigabitEthernet4\": {\n \"lacp_port_priority\": 32768,\n \"counters\": {\n \"lacp_in_pkts\": 31,\n \"lacp_errors\": 0,\n \"lacp_out_pkts\": 24\n },\n \"interface\": \"GigabitEthernet4\",\n \"bundled\": True,\n \"port_num\": 1,\n \"activity\": \"passive\",\n \"partner_id\": \"001e.49e6.bc00\",\n \"bundle_id\": 2,\n \"age\": 15,\n \"oper_key\": 2\n },\n \"GigabitEthernet1/1\": {\n \"bundle_id\": 2,\n \"pagp_port_priority\": 128\n },\n \"GigabitEthernet6\": {\n \"lacp_port_priority\": 32768,\n \"counters\": {\n \"lacp_in_pkts\": 11,\n \"lacp_errors\": 0,\n \"lacp_out_pkts\": 13\n },\n \"interface\": \"GigabitEthernet6\",\n \"bundled\": True,\n \"port_num\": 1,\n \"activity\": \"passive\",\n \"partner_id\": \"001e.49e6.bc00\",\n \"bundle_id\": 2,\n \"age\": 0,\n \"oper_key\": 2\n },\n \"GigabitEthernet0/3\": {\n \"bundle_id\": 2,\n \"pagp_port_priority\": 128\n },\n \"GigabitEthernet1/0\": {\n \"bundle_id\": 2,\n \"pagp_port_priority\": 128\n }\n }\n },\n \"Port-channel14\": {\n \"members\": {\n \"GigabitEthernet1/0/7\": {\n \"partner_id\": \"ecbd.1d09.5680\",\n \"age\": 22\n },\n \"GigabitEthernet1/0/9\": {\n \"partner_id\": \"ecbd.1d09.5680\",\n \"age\": 18\n },\n \"GigabitEthernet1/0/8\": {\n \"partner_id\": \"ecbd.1d09.5680\",\n \"age\": 16\n }\n }\n }\n }\n }\n", "id": "10036238", "language": "Python", "matching_score": 2.7519595623016357, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/lag/iosxe/tests/lag_output.py" }, { "content": "# Genie\r\nfrom genie.ops.base import Base\r\n\r\n\r\nclass Lag(Base):\r\n exclude = ['age',\r\n 'lacp_in_pkts',\r\n 'lacp_out_pkts']", "id": "218726", "language": "Python", "matching_score": 1.2028131484985352, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/lag/lag.py" }, { "content": "from genie.ops.base import Base\n\nclass Fdb(Base):\n exclude = []", "id": "12481308", "language": "Python", "matching_score": 1.5090572834014893, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/fdb/fdb.py" }, { "content": "from genie.ops.base import Base\n\nclass Lisp(Base):\n exclude = []", "id": "2598308", "language": "Python", "matching_score": 0.9351776242256165, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/lisp/lisp.py" }, { "content": "# Genie\r\nfrom genie.ops.base import Base\r\n\r\n\r\nclass Routing(Base):\r\n exclude = ['updated']", "id": "3282274", "language": "Python", "matching_score": 1.8072595596313477, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/routing/routing.py" }, { "content": "# Genie\nfrom genie.ops.base import Base\n\nclass StaticRouting(Base):\n exclude = []", "id": "10039063", "language": "Python", "matching_score": 2.0779335498809814, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/static_routing/static_routing.py" }, { "content": "'''\nStatic_routing Genie Ops Object for IOS - CLI\n'''\nfrom ..iosxe.static_routing import StaticRouting as StaticRoutingXE\n\nclass StaticRouting(StaticRoutingXE):\n pass", "id": "2974991", "language": "Python", "matching_score": 2.4415173530578613, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/static_routing/ios/static_routing.py" }, { "content": "'''\nRouting Genie Ops Object for IOS - CLI.\n'''\nfrom ..iosxe.routing import Routing as RoutingXE\n\nclass Routing(RoutingXE):\n pass", "id": "59031", "language": "Python", "matching_score": 0.698357105255127, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/routing/ios/routing.py" }, { "content": "'''\nOSPF Genie Ops Object for IOS - CLI\n'''\nfrom ..iosxe.ospf import Ospf as OspfXE\n\nclass Ospf(OspfXE):\n pass", "id": "506783", "language": "Python", "matching_score": 1, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/ospf/ios/ospf.py" }, { "content": "from .ospf import *\n", "id": "12425905", "language": "Python", "matching_score": 0, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ospf/__init__.py" }, { "content": "from .interface import *\n\n# Enable abstraction using this directory name as the abstraction token\ntry:\n from genie import abstract\n abstract.declare_token(__name__)\nexcept Exception as e:\n import warnings\n warnings.warn('Could not declare abstraction token: ' + str(e))\n", "id": "11902447", "language": "Python", "matching_score": 1.2561768293380737, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/interface/iosxe/__init__.py" }, { "content": "from genie import abstract\r\n\r\nabstract.declare_package(__name__)\r\n", "id": "8010797", "language": "Python", "matching_score": 0.0013241204433143139, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/__init__.py" }, { "content": "'''\nFDB Genie Ops Object for IOS - CLI.\n'''\nfrom ..iosxe.fdb import Fdb as FdbXE\n\nclass Fdb(FdbXE):\n pass", "id": "9367097", "language": "Python", "matching_score": 1, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/fdb/ios/fdb.py" }, { "content": "from .fdb import *\n", "id": "11562059", "language": "Python", "matching_score": 0, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/fdb/__init__.py" }, { "content": "'''Implementation for vlan modify triggers'''\n\nfrom genie.libs.sdk.triggers.modify.modify import TriggerModify\n", "id": "5246530", "language": "Python", "matching_score": 1.7026959657669067, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/modify/vlan/modify.py" }, { "content": "'''Implementation for vxlan modify triggers'''\n\n# import genie.libs\nfrom genie.libs.sdk.triggers.modify.modify import TriggerModify", "id": "12383507", "language": "Python", "matching_score": 1.9883930683135986, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/modify/vxlan/modify.py" }, { "content": "'''Implementation for Dot1x modify triggers'''\n\n# import genie.libs\nfrom genie.libs.sdk.triggers.modify.modify import TriggerModify\n\nclass TriggerModifyDot1xUserCredential(TriggerModify):\n pass", "id": "1591966", "language": "Python", "matching_score": 2.010864019393921, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/modify/dot1x/modify.py" }, { "content": "'''Implementation for spanning tree modify triggers'''\n\n# import genie.libs\nfrom genie.libs.sdk.triggers.modify.modify import TriggerModify\n\n\nclass TriggerModifyPvstDesgToRoot(TriggerModify):\n pass\n\n\nclass TriggerModifyRapidPvstDesgToRoot(TriggerModifyPvstDesgToRoot):\n pass", "id": "3285419", "language": "Python", "matching_score": 1.8914445638656616, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/modify/stp/modify.py" }, { "content": "'''Implementation for Igmp modify triggers'''\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.modify.modify import TriggerModify\n\n\nclass TriggerModifyIgmpVersion(TriggerModify):\n\tpass\n\n", "id": "2755076", "language": "Python", "matching_score": 1.9057762622833252, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/modify/mld/modify.py" }, { "content": "'''Implementation for Pim modify triggers'''\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.modify.modify import TriggerModify\n\n\nclass TriggerModifyPimNeighborFilter(TriggerModify):\n\tpass\n", "id": "8521960", "language": "Python", "matching_score": 0.47296708822250366, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/modify/pim/modify.py" }, { "content": "'''Implementation for routing addremove triggers'''\n\n# import genie.libs\nfrom genie.libs.sdk.triggers.addremove.addremove import TriggerAddRemove\n\n\nclass TriggerAddRemoveTrunkEtherchannelLacp(TriggerAddRemove):\n pass\n\nclass TriggerAddRemoveAccessEtherchannelPagp(TriggerAddRemoveTrunkEtherchannelLacp):\n pass\n\nclass TriggerAddRemoveL3EtherchannelPagp(TriggerAddRemoveTrunkEtherchannelLacp):\n pass", "id": "3040615", "language": "Python", "matching_score": 2.3953258991241455, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/addremove/lag/addremove.py" }, { "content": "'''Implementation for routing addremove triggers'''\n\n# import genie.libs\nfrom genie.libs.sdk.triggers.addremove.addremove import TriggerAddRemove\n\n\nclass TriggerAddRemoveEthernetMacAcl(TriggerAddRemove):\n pass\n\nclass TriggerAddRemoveEthernetIpAclPermit(TriggerAddRemove):\n pass\n\nclass TriggerAddRemoveEthernetIpAclDeny(TriggerAddRemove):\n pass\n\nclass TriggerAddRemoveVlanIpAclPermit(TriggerAddRemove):\n pass\n", "id": "8417837", "language": "Python", "matching_score": 2.2566399574279785, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/addremove/acl/addremove.py" }, { "content": "'''Implementation for routing addremove triggers'''\n\n# import genie.libs\nfrom genie.libs.sdk.triggers.addremove.addremove import TriggerAddRemove\n\n\nclass TriggerAddRemoveIpv4StaticRoutes(TriggerAddRemove):\n pass\n\n\nclass TriggerAddRemoveIpv6StaticRoutes(TriggerAddRemoveIpv4StaticRoutes):\n pass", "id": "4067122", "language": "Python", "matching_score": 2.6059796810150146, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/addremove/routing/addremove.py" }, { "content": "'''Implementation for IGMP addremove triggers'''\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.addremove.addremove import TriggerAddRemove\n\n\nclass TriggerAddRemoveIgmpEnable(TriggerAddRemove):\n\tpass\n\n\nclass TriggerAddRemoveIgmpVersion(TriggerAddRemove):\n\tpass\n\n\nclass TriggerAddRemoveIgmpJoinGroup(TriggerAddRemove):\n\tpass\n\n\nclass TriggerAddRemoveIgmpStaticGroup(TriggerAddRemove):\n\tpass\n\n", "id": "8123384", "language": "Python", "matching_score": 3.0308997631073, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/addremove/igmp/addremove.py" }, { "content": "'''Implementation for MLD addremove triggers'''\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.addremove.addremove import TriggerAddRemove\n\n\nclass TriggerAddRemoveMldEnable(TriggerAddRemove):\n\tpass\n\n\nclass TriggerAddRemoveMldVersion(TriggerAddRemove):\n\tpass\n\n\nclass TriggerAddRemoveMldJoinGroup(TriggerAddRemove):\n\tpass\n\n\nclass TriggerAddRemoveMldStaticGroup(TriggerAddRemove):\n\tpass\n\n", "id": "6263377", "language": "Python", "matching_score": 2.7209935188293457, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/addremove/mld/addremove.py" }, { "content": "'''Implementation for vxlan addremove triggers'''\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.addremove.addremove import TriggerAddRemove\n\n", "id": "6064346", "language": "Python", "matching_score": 2.7571752071380615, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/addremove/vxlan/addremove.py" }, { "content": "'''Implementation for MSDP addremove triggers'''\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.addremove.addremove import TriggerAddRemove\n\n\nclass TriggerAddRemoveMsdpPeer(TriggerAddRemove):\n\tpass\n\n\nclass TriggerAddRemoveMsdpOriginatorId(TriggerAddRemove):\n\tpass\n\n\nclass TriggerAddRemoteMsdpSaFilterIn(TriggerAddRemove):\n\tpass\n\n\nclass TriggerAddRemoveMsdpSaFilterOut(TriggerAddRemove):\n\tpass\n\nclass TriggerAddRemoveMsdpSaLimit(TriggerAddRemove):\n\tpass\n\n\nclass TriggerAddRemoveMsdpMeshGroup(TriggerAddRemove):\n\tpass\n\n\nclass TriggerAddRemoveMsdpKeepaliveHoldtime(TriggerAddRemove):\n\tpass\n\n\nclass TriggerAddRemoveMsdpReconnectInterval(TriggerAddRemove):\n\tpass\n\nclass TriggerAddRemoveMsdpDescription(TriggerAddRemove):\n\tpass\n", "id": "6155737", "language": "Python", "matching_score": 3.1555488109588623, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/addremove/msdp/addremove.py" }, { "content": "'''Implementation for Pim addremove triggers'''\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.addremove.addremove import TriggerAddRemove\n\n\nclass TriggerAddRemovePimNeighborFilter(TriggerAddRemove):\n\tpass\n", "id": "12030271", "language": "Python", "matching_score": 0.35594460368156433, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/addremove/pim/addremove.py" }, { "content": "'''\nIOSXE Implementation for BGP ShutNoShut triggers\n'''\n\n# import python\nfrom collections import OrderedDict\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.shutnoshut.shutnoshut import TriggerShutNoShut\n\n\n# Which key to exclude for BGP Ops comparison\nbgp_exclude = ['maker', 'bgp_session_transport', 'route_refresh',\n 'bgp_negotiated_capabilities', 'notifications', 'capability',\n 'keepalives', 'total', 'total_bytes', 'up_time', 'last_reset',\n 'bgp_negotiated_keepalive_timers', 'updates', 'opens',\n 'bgp_table_version', 'holdtime', 'keepalive_interval',\n 'distance_internal_as', 'routing_table_version',\n 'total_memory']\n\n\nclass TriggerShutNoShutBgpNeighbors(TriggerShutNoShut):\n \"\"\"Shut and unshut the dynamically learned BGP neighbore(s).\"\"\"\n\n __description__ = \"\"\"Shut and unshut the dynamically learned BGP neighbore(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and verify if has established state, otherwise, Skip the trigger.\n 2. Shut the BGP neighbor that learned from step 1 with BGP Conf object\n 3. Verify the state of learned neighbor(s)\n from step 2 is \"down\"\n 4. Unshut the BGP neighbor(s)\n 5. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n mapping = Mapping(\\\n requirements={\\\n 'ops.bgp.bgp.Bgp': {\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'session_state', '(?P<established>[E|e]stablished)'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys':True, \n 'kwargs':\n {'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={\\\n 'conf.bgp.Bgp': {\n 'requirements':[\\\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'neighbor_attr', '(?P<neighbor>.*)', 'nbr_shutdown', True]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={\\\n 'ops.bgp.bgp.Bgp': {\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'session_state', '(?P<idle>[I|i]dle)'],\n ['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'shutdown', True]],\n 'kwargs':\n {'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'instance':1, 'vrf':1, 'neighbor': 'all'})\n", "id": "2865812", "language": "Python", "matching_score": 6.761823654174805, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/shutnoshut/bgp/iosxe/shutnoshut.py" }, { "content": "'''Implementation for bgp addremove triggers'''\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.addremove.addremove import TriggerAddRemove\n\n# Which key to exclude for BGP Ops comparison\nbgp_exclude = ['maker', 'bgp_session_transport', 'route_refresh',\n 'bgp_negotiated_capabilities', 'notifications', 'capability',\n 'keepalives', 'total', 'total_bytes', 'up_time', 'last_reset',\n 'bgp_negotiated_keepalive_timers', 'updates', 'opens',\n 'bgp_table_version', 'holdtime', 'keepalive_interval',\n 'distance_internal_as', 'distance_extern_as', 'totals',\n 'reset_reason', 'holdtime', 'keepalive_interval']\n\nroute_map_exclude = ['maker']\n\nbgp_exclude_keepalive = ['maker', 'bgp_session_transport', 'route_refresh',\n 'bgp_negotiated_capabilities', 'notifications', 'last_reset',\n 'keepalives', 'total', 'total_bytes', 'up_time',\n 'bgp_negotiated_keepalive_timers', 'updates', 'opens',\n 'bgp_table_version', 'holdtime', 'keepalive_interval',\n 'route_reflector_client', 'capability',\n 'distance_internal_as', 'bgp_neighbor_counters', 'memory_usage',\n 'total_entries', 'routing_table_version', 'total_memory',\n 'path', 'prefixes', 'cluster_id']\n\nclass TriggerAddRemoveBgpNeighborRoutemapPrefixlist(TriggerAddRemove):\n \"\"\"Apply the inbound dynamically learned \"prefix-list\" route-map\n to the dynamically learned BGP neighbor(s), and remove the\n added route-map configurations.\n \"\"\"\n\n __description__ = \"\"\"Apply the inbound dynamically learned \"prefix-list\" route-map\n to the dynamically learned BGP neighbor(s), and remove the\n added route-map configurations.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n address_family: `str`\n neighbor: `str`\n policy_definition: `str`\n statements: `str`\n bgp_id: `int`\n bgp_table_version: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the BGP neighbor(s) if has any,\n otherwise, SKIP the trigger. Learn RoutePolicy Ops object\n to store the route-map name with prefix-list configured if has any,\n otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Add the configuration of the inbound route-map (step 1) to learned\n BGP neighbor(s) from step 1 with BGP Conf object\n 4. Verify the BGP neighbor(s) from step 3 has inbound route-map (step 1) configured\n 5. Remove the route-map (step 1) configurations from the learned\n BGP neighbor(s) from step 1\n 6. Recover the device configurations to the one in step 2\n 7. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n mapping = Mapping(requirements={'ops.route_policy.route_policy.RoutePolicy':{\n 'requirements':[['info', '(?P<policy_definition>.*)',\n 'statements', '(?P<statements>.*)',\n 'conditions',\n 'match_prefix_list', '(?P<match_prefix_list>.*)']],\n 'exclude': route_map_exclude},\n 'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)',\n 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<address_family>.*)',\n 'bgp_table_version', '(?P<bgp_table_version>.*)'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys': True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={'conf.bgp.Bgp':{\n 'requirements':[['device_attr', '{uut}',\n 'vrf_attr', '(?P<vrf>.*)',\n 'neighbor_attr','(?P<neighbor>.*)',\n 'address_family_attr', '(?P<address_family>.*)',\n 'nbr_af_route_map_name_in', '(?P<policy_definition>.*)']],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements': [['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)',\n 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<address_family>.*)',\n 'route_map_name_in', '(?P<policy_definition>.*)']],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'policy_definition':1, 'statements':'all', 'match_prefix_list':1, 'instance':1, 'vrf':1, 'address_family':1, 'neighbor':1, 'bgp_id':1, 'bgp_table_version':1})\n\n\nclass TriggerAddRemoveBgpNeighborRoutemapWeight(TriggerAddRemove):\n \"\"\"Apply the inbound dynamically learned \"weight\" route-map\n to the dynamically learned BGP neighbor(s), and remove the\n added route-map configurations.\n \"\"\"\n\n __description__ = \"\"\"Apply the inbound dynamically learned \"weight\" route-map\n to the dynamically learned BGP neighbor(s), and remove the\n added route-map configurations.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n address_family: `str`\n neighbor: `str`\n policy_definition: `str`\n statements: `str`\n set_weight: `int`\n bgp_id: `int`\n bgp_table_version: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the BGP neighbor(s) if has any,\n otherwise, SKIP the trigger. Learn RoutePolicy Ops object\n to store the route-map name with weight configured if has any,\n otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Add the configuration of the inbound route-map (step 1) to learned\n BGP neighbor(s) from step 1 with BGP Conf object\n 4. Verify the BGP neighbor(s) from step 3 has inbound route-map (step 1) configured\n 5. Remove the route-map (step 1) configurations from the learned BGP\n neighbor(s) from step 1\n 6. Recover the device configurations to the one in step 2\n 7. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n mapping = Mapping(requirements={'ops.route_policy.route_policy.RoutePolicy':{\n 'requirements':[['info', '(?P<policy_definition>.*)',\n 'statements', '(?P<statements>.*)',\n 'actions',\n 'set_weight', '(?P<set_weight>.*)']],\n 'exclude': route_map_exclude},\n 'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)',\n 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<address_family>.*)',\n 'bgp_table_version', '(?P<bgp_table_version>.*)'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys': True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={'conf.bgp.Bgp':{\n 'requirements':[['device_attr', '{uut}',\n 'vrf_attr', '(?P<vrf>.*)',\n 'neighbor_attr','(?P<neighbor>.*)',\n 'address_family_attr', '(?P<address_family>.*)',\n 'nbr_af_route_map_name_in', '(?P<policy_definition>.*)']],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements': [['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)',\n 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<address_family>.*)',\n 'route_map_name_in', '(?P<policy_definition>.*)']],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'policy_definition':1, 'statements':'all', 'set_weight':1, 'instance':1, 'vrf':1, 'address_family':1, 'neighbor':1, 'bgp_id':1, 'bgp_table_version':1})\n\n", "id": "955597", "language": "Python", "matching_score": 6.4263997077941895, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/addremove/bgp/addremove.py" }, { "content": "'''IOSXE Implementation for bgp modify triggers'''\n\n# import python\nimport time\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.modify.modify import TriggerModify\n\n# Which key to exclude for BGP Ops comparison\nbgp_exclude = ['maker', 'bgp_session_transport', 'route_refresh',\n 'bgp_negotiated_capabilities', 'notifications', 'last_reset',\n 'keepalives', 'total', 'total_bytes', 'up_time',\n 'bgp_negotiated_keepalive_timers', 'updates', 'opens',\n 'bgp_table_version', 'holdtime', 'keepalive_interval',\n 'route_reflector_client', 'capability',\n 'distance_internal_as', 'bgp_neighbor_counters', 'memory_usage',\n 'total_entries', 'routing_table_version', 'total_memory',\n 'path', 'prefixes', 'cluster_id']\n\n\nclass TriggerModifyBgpNeighborAsn(TriggerModify):\n \"\"\"Modify the neighbor remote_as configured under BGP and then restore the\n configuration by reapplying the whole running configuration\"\"\"\n\n __description__ = \"\"\"Modify the neighbor remote_as configured under BGP and then restore the\n configuration by reapplying the whole running configuration\n\n trigger_datafile:\n Mandatory Arguments:\n timeout:\n max_time (`int`): Maximum wait time for the trigger in seconds.\n Default: 180\n interval (`int`): Wait time between iteration when looping is\n needed in seconds. Default: 15\n method (`str`): Method to recover the device configuration.\n Supported methods:\n 'checkpoint': Rollback the configuration\n using checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config\n file on disk (iosxr)\n Optional Arguments:\n tgn_timeout (`int`): Maximum wait time for all traffic streams to be\n restored to the reference rate in seconds.\n Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic\n is resumed in seconds. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the\n end of the trigger execution. Used when previous timeouts have\n been exhausted.\n max_time (`int`): Maximum wait time for the last step of the\n trigger in seconds. Default: 180\n interval (`int`): Wait time between iteration when looping is\n needed in seconds. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n remote_as: `int`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n Steps:\n 1. Learn BGP Ops configured on device. SKIP the trigger if there\n is no BGP configured on the device.\n 2. Save the current device configurations using \"method\" specified.\n 3. Modify the remote-as learned in step 1 using Genie BGP Conf.\n 4. Verify the change to remote_as config under BGP is\n reflected in device configuration.\n 5. Restore the device configuration to the original configuration saved\n in step 2.\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1.\n \"\"\"\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)', 'neighbor',\n '(?P<neighbor>.*)', 'remote_as',\n '(?P<remote_as>.*)'],\n ['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'session_state', 'established'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys':True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={'conf.bgp.Bgp':{\n 'requirements':[['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'neighbor_attr','(?P<neighbor>.*)', 'nbr_remote_as',\n 88]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'remote_as', 88],\n ['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor',\n '(?P<neighbor>.*)', 'session_state', 'idle']],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'vrf':1, 'instance':1, 'neighbor':1})\n\n\nclass TriggerModifyBgpNeighborCluster(TriggerModify):\n \"\"\"Modify the cluster id configured under BGP and then restore the\n configuration by reapplying the whole running configuration\"\"\"\n\n __description__ = \"\"\"Modify the cluster id configured under BGP and then restore the\n configuration by reapplying the whole running configuration\n\n trigger_datafile:\n Mandatory Arguments:\n timeout:\n max_time (`int`): Maximum wait time for the trigger in seconds.\n Default: 180\n interval (`int`): Wait time between iteration when looping is\n needed in seconds. Default: 15\n method (`str`): Method to recover the device configuration.\n Supported methods:\n 'checkpoint': Rollback the configuration\n using checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config\n file on disk (iosxr)\n Optional Arguments:\n tgn_timeout (`int`): Maximum wait time for all traffic streams to be\n restored to the reference rate in seconds.\n Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic\n is resumed in seconds. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the\n end of the trigger execution. Used when previous timeouts have\n been exhausted.\n max_time (`int`): Maximum wait time for the last step of the\n trigger in seconds. Default: 180\n interval (`int`): Wait time between iteration when looping is\n needed in seconds. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n cluster_id: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n Steps:\n 1. Learn BGP Ops configured on device. SKIP the trigger if there\n is no BGP configured on the device.\n 2. Save the current device configurations using \"method\" specified.\n 3. Modify the cluster id learned in step 1 using Genie BGP Conf.\n 4. Verify the change to cluster id config under BGP is\n reflected in device configuration.\n 5. Restore the device configuration to the original configuration saved\n in step 2.\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1.\n \"\"\"\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)',\n 'vrf', 'default', 'cluster_id', '(?P<cluster_id>.*)'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys':True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={'conf.bgp.Bgp':{\n 'requirements':[['device_attr', '{uut}', 'vrf_attr', 'default',\n 'cluster_id', '1.0.0.1']],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)', 'vrf',\n 'default', 'cluster_id', '1.0.0.1']],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'instance':1, 'neighbor':1, 'address_family':1})\n\n\nclass TriggerModifyBgpNeighborRoutemapIn(TriggerModify):\n \"\"\"Modify the neighbor inbound route map configured under BGP and then restore the\n configuration by reapplying the whole running configuration\"\"\"\n\n __description__ = \"\"\"Modify the neighbor inbound route map configured under BGP and then restore the\n configuration by reapplying the whole running configuration\n\n trigger_datafile:\n Mandatory Arguments:\n timeout:\n max_time (`int`): Maximum wait time for the trigger in seconds.\n Default: 180\n interval (`int`): Wait time between iteration when looping is\n needed in seconds. Default: 15\n method (`str`): Method to recover the device configuration.\n Supported methods:\n 'checkpoint': Rollback the configuration\n using checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config\n file on disk (iosxr)\n Optional Arguments:\n tgn_timeout (`int`): Maximum wait time for all traffic streams to be\n restored to the reference rate in seconds.\n Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic\n is resumed in seconds. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the\n end of the trigger execution. Used when previous timeouts have\n been exhausted.\n max_time (`int`): Maximum wait time for the last step of the\n trigger in seconds. Default: 180\n interval (`int`): Wait time between iteration when looping is\n needed in seconds. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `vrf`\n neighbor: `str`\n address_family: `str`\n route_map: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n Steps:\n 1. Learn BGP Ops configured on device. SKIP the trigger if there\n is no BGP configured on the device.\n 2. Save the current device configurations using \"method\" specified.\n 3. Modify the inbound route map learned in step 1 using Genie BGP Conf.\n 4. Verify the change to inbound route map config under BGP is\n reflected in device configuration.\n 5. Restore the device configuration to the original configuration saved\n in step 2.\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1.\n \"\"\"\n\n # Create a name for router map in\n new_name = 'bgp_' + time.ctime().replace(' ', '_').replace(':', '_')\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>(?!:).*)',\n 'address_family', '(?P<address_family>.*)',\n 'route_map_name_in', '(?P<route_map>.*)'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys':True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={'conf.bgp.Bgp':{\n 'requirements':[['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'neighbor_attr','(?P<neighbor>.*)', 'address_family_attr',\n '(?P<address_family>.*)', 'nbr_af_route_map_name_in',\n new_name]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<address_family>.*)',\n 'route_map_name_in', new_name]],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'vrf':1, 'instance':1, 'neighbor':1,\n 'address_family':1})\n\n\nclass TriggerModifyBgpNeighborRoutemapOut(TriggerModify):\n \"\"\"Modify the neighbor outbound route map configured under BGP and then restore the\n configuration by reapplying the whole running configuration\"\"\"\n\n __description__ = \"\"\"Modify the neighbor outbound route map configured under BGP and then restore the\n configuration by reapplying the whole running configuration\n\n trigger_datafile:\n Mandatory Arguments:\n timeout:\n max_time (`int`): Maximum wait time for the trigger in seconds.\n Default: 180\n interval (`int`): Wait time between iteration when looping is\n needed in seconds. Default: 15\n method (`str`): Method to recover the device configuration.\n Supported methods:\n 'checkpoint': Rollback the configuration\n using checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config\n file on disk (iosxr)\n Optional Arguments:\n tgn_timeout (`int`): Maximum wait time for all traffic streams to be\n restored to the reference rate in seconds.\n Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic\n is resumed in seconds. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the\n end of the trigger execution. Used when previous timeouts have\n been exhausted.\n max_time (`int`): Maximum wait time for the last step of the\n trigger in seconds. Default: 180\n interval (`int`): Wait time between iteration when looping is\n needed in seconds. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `vrf`\n neighbor: `str`\n address_family: `str`\n route_map: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n Steps:\n 1. Learn BGP Ops configured on device. SKIP the trigger if there\n is no BGP configured on the device.\n 2. Save the current device configurations using \"method\" specified.\n 3. Modify the outbound route map learned out step 1 using Genie BGP Conf.\n 4. Verify the change to outbound route map config under BGP is\n reflected in device configuration.\n 5. Restore the device configuration to the original configuration saved\n in step 2.\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1.\n \"\"\"\n\n # Create a name for router map in\n new_name = 'bgp_' + time.ctime().replace(' ', '_').replace(':', '_')\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<address_family>.*)',\n 'route_map_name_out', '(?P<route_map>.*)'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys':True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={'conf.bgp.Bgp':{\n 'requirements':[['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'neighbor_attr','(?P<neighbor>.*)', 'address_family_attr',\n '(?P<address_family>.*)', 'nbr_af_route_map_name_out',\n new_name]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<address_family>.*)',\n 'route_map_name_out', new_name]],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'vrf':1, 'instance':1, 'neighbor':1,\n 'address_family':1})\n", "id": "3767090", "language": "Python", "matching_score": 7.5939040184021, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/modify/bgp/iosxe/modify.py" }, { "content": "'''Implementation for bgp modify triggers'''\n\n# import python\nimport time\nimport collections\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.modify.bgp import modify\n\nclass TriggerModifyBgpNeighborRoutemapIn(modify.TriggerModifyBgpNeighborRoutemapIn):\n \"\"\"Modify and revert the inbound route-map for dynamically learned BGP neighbors(s).\"\"\"\n\n __description__ = \"\"\"Modify and revert the inbound route-map for dynamically learned BGP neighbors(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `vrf`\n neighbor: `str`\n address_family: `str`\n route_map: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n\n steps:\n 1. Learn BGP Ops object and store the BGP neighbors(s) which has inbound route-map\n configured. SKIP the trigger if there is no BGP neighbors(s) found\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the inbound route-map of the learned BGP neighbor(s) from step 1\n with BGP Conf object\n 4. Verify the inbound route-map of learned BGP neighbor(s) from step 3\n changes to the modified name in step 3\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n config_info = collections.OrderedDict()\n config_info['conf.route_policy.RoutePolicy'] =\\\n {'requirements':[],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': modify.TriggerModifyBgpNeighborRoutemapIn.new_name}}}\n\n config_info['conf.bgp.Bgp'] =\\\n {'requirements':[['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'neighbor_attr','(?P<neighbor>.*)', 'address_family_attr',\n '(?P<address_family>.*)', 'nbr_af_route_map_name_in',\n modify.TriggerModifyBgpNeighborRoutemapIn.new_name]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<address_family>.*)',\n 'route_map_name_in', '(?P<route_map>.*)'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys':True, \n 'kwargs':{'attributes':['info']},\n 'exclude': modify.bgp_exclude}},\n config_info=config_info,\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<address_family>.*)',\n 'route_map_name_in', modify.TriggerModifyBgpNeighborRoutemapIn.new_name]],\n 'kwargs':{'attributes':['info']},\n 'exclude': modify.bgp_exclude}},\n num_values={'vrf':1, 'instance':1, 'neighbor':1,\n 'address_family':1})\n\n\nclass TriggerModifyBgpNeighborRoutemapOut(modify.TriggerModifyBgpNeighborRoutemapOut):\n \"\"\"Modify and revert the outbound route-map for dynamically learned BGP neighbors(s).\"\"\"\n\n __description__ = \"\"\"Modify and revert the outbound route-map for dynamically learned BGP neighbors(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `vrf`\n neighbor: `str`\n address_family: `str`\n route_map: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n\n steps:\n 1. Learn BGP Ops object and store the BGP neighbors(s) which has inbound route-map\n configured. SKIP the trigger if there is no BGP neighbors(s) found\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the outbound route-map of the learned BGP neighbor(s) from step 1\n with BGP Conf object\n 4. Verify the outbound route-map of learned BGP neighbor(s) from step 3\n changes to the modified name in step 3\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n config_info = collections.OrderedDict()\n config_info['conf.route_policy.RoutePolicy'] =\\\n {'requirements':[],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': modify.TriggerModifyBgpNeighborRoutemapOut.new_name}}}\n\n config_info['conf.bgp.Bgp'] =\\\n {'requirements':[['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'neighbor_attr','(?P<neighbor>.*)', 'address_family_attr',\n '(?P<address_family>.*)', 'nbr_af_route_map_name_out',\n modify.TriggerModifyBgpNeighborRoutemapOut.new_name]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<address_family>.*)',\n 'route_map_name_in', '(?P<route_map>.*)'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys':True,\n 'kwargs':{'attributes':['info']},\n 'exclude': modify.bgp_exclude}},\n config_info=config_info,\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<address_family>.*)',\n 'route_map_name_out', modify.TriggerModifyBgpNeighborRoutemapOut.new_name]],\n 'kwargs':{'attributes':['info']},\n 'exclude': modify.bgp_exclude}},\n num_values={'vrf':1, 'instance':1, 'neighbor':1,\n 'address_family':1})\n", "id": "7938663", "language": "Python", "matching_score": 6.823016166687012, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/modify/bgp/iosxr/modify.py" }, { "content": "'''NXOS Implementation for BGP unconfigconfig triggers'''\n\n# python\nimport logging\nimport time\nfrom functools import partial\n\nlog = logging.getLogger(__name__)\n\n# ATS\nfrom ats import aetest\nfrom ats.utils.objects import Not, NotExists\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.libs.utils.triggeractions import Configure, verify_ops_or_logic\nfrom genie.libs.sdk.libs.utils.mapping import Mapping, Different\nfrom genie.libs.sdk.triggers.unconfigconfig.unconfigconfig import TriggerUnconfigConfig\n\n# ipaddress\nfrom ipaddress import IPv4Address, IPv6Address\n\n\n# Which keys to exclude for BGP Ops comparison\nbgp_exclude = ['maker', 'bgp_session_transport', 'route_refresh',\n 'bgp_negotiated_capabilities', 'notifications', 'last_reset',\n 'keepalives', 'total', 'total_bytes', 'up_time',\n 'bgp_negotiated_keepalive_timers', 'updates', 'opens',\n 'bgp_table_version', 'holdtime', 'keepalive_interval',\n 'route_reflector_client', 'capability',\n 'distance_internal_as', 'bgp_neighbor_counters', 'memory_usage',\n 'total_entries', 'routing_table_version', 'total_memory',\n 'path', 'prefixes', 'cluster_id', 'distance_extern_as']\n\ntrm_exclude = ['maker', 'keepalives', 'total', 'up_time', 'total_bytes',]\n\n\nclass TriggerUnconfigConfigBgpNeighborSendCommunity(TriggerUnconfigConfig):\n \"\"\"Unconfigure send-community under BGP and\n reapply the whole configurations for learned BGP.\"\"\"\n\n __description__ = \"\"\"Unconfigure send-community under BGP and reapply the\n whole configurations for learned BGP\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n address_family: `str`\n send_community: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the BGP instance(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure send-community under bgp pid from step 1\n with BGP Conf object\n 4. Verify the send-comunity from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)','vrf', '(?P<vrf>.*)',\n 'neighbor','(?P<neighbor>.*)','address_family','(?P<address_family>.*)',\n 'send_community','(?P<send_community>.*)'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys':True,\n 'kwargs':{'attributes': ['info']},\n 'exclude': bgp_exclude}},\n config_info={'conf.bgp.Bgp':{\n 'requirements':[['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'neighbor_attr','(?P<neighbor>.*)',\n 'address_family_attr','(?P<address_family>.*)', 'nbr_af_send_community','(?P<send_community>.*)']],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)','vrf', '(?P<vrf>.*)',\n 'neighbor','(?P<neighbor>.*)','address_family','(?P<address_family>.*)',\n NotExists('send_community')]],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'bgp_id':1, 'instance':1 , 'vrf':1, 'address_family':1, 'neighbor':1 })\n\n\nclass TriggerUnconfigConfigBgpNeighborSendCommunityExtended(TriggerUnconfigConfig):\n \"\"\"Unconfigure send-community extended under a BGP neighbor and\n reapply the whole configurations of dynamically learned BGP pid\"\"\"\n\n __description__ = \"\"\"Unconfigure send-community extended for a BGP neighbor and\n reapply the whole configurations of dynamically learned BGP pid\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n address_family: `str`\n send_community: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the BGP instance(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure send-community extended for a BGP neighbor of learned BGP pid from step 1\n 4. Verify the send-community extended for BGP neighbor from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)','vrf', '(?P<vrf>.*)',\n 'neighbor','(?P<neighbor>.*)','address_family','(?P<address_family>.*)',\n 'send_community','(?P<send_community>(both|extended)+)$'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys':True,\n 'kwargs':{'attributes': ['info']},\n 'exclude': bgp_exclude}},\n config_info={'conf.bgp.Bgp':{\n 'requirements':[['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'neighbor_attr','(?P<neighbor>.*)',\n 'address_family_attr','(?P<address_family>.*)', 'nbr_af_send_community','(?P<send_community>.*)']],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)','vrf', '(?P<vrf>.*)',\n 'neighbor','(?P<neighbor>.*)','address_family','(?P<address_family>.*)',\n NotExists('send_community')]],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'bgp_id':1, 'instance':1 , 'vrf':1, 'address_family':1, 'neighbor':1 })\n\nclass TriggerUnconfigConfigBgpNeighborSoftReconfiguration(TriggerUnconfigConfig):\n \"\"\"Unconfigure soft-reconfiguration inbound for a BGP neighbor and\n reapply the whole configurations for learned BGP pid\"\"\"\n\n __description__ = \"\"\"Unconfigure soft-reconfiguration inbound for a BGP neighbor and\n reapply the whole configurations for learned BGP pid\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n address_family: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the BGP instance(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure soft-reconfiguration inbound for a BGP neighbor of learned BGP pid from step 1\n 4. Verify the soft-reconfiguration for BGP neighbor from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)','vrf', '(?P<vrf>.*)',\n 'neighbor','(?P<neighbor>.*)','address_family','(?P<address_family>.*)',\n 'soft_configuration',True ],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys':True,\n 'kwargs':{'attributes': ['info']},\n 'exclude': bgp_exclude}},\n config_info={'conf.bgp.Bgp':{\n 'requirements': [['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'neighbor_attr','(?P<neighbor>.*)',\n 'address_family_attr','(?P<address_family>.*)',\n 'nbr_af_soft_reconfiguration',True]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)','vrf', '(?P<vrf>.*)',\n 'neighbor','(?P<neighbor>.*)','address_family','(?P<address_family>.*)',\n NotExists('soft_configuration')]],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'bgp_id':1, 'instance':1 , 'vrf':1, 'neighbor':1 })\n\n\nclass TriggerUnconfigConfigBgpKeepaliveHoldtime(TriggerUnconfigConfig):\n \"\"\"Unconfigure keepalive interval and holdtime and\n reapply the whole configurations for learned BGP pid\"\"\"\n\n __description__ = \"\"\"Unconfigure keepalive interval and holdtime and\n reapply the whole configurations for learned BGP pid\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n keepalive_interval: `int`\n holdtime: `int`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the BGP instance(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure keepalive interval and holdtime for learned BGP pid from step 1\n 4. Verify the keepalive interval and holdtime for BGP pid from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n mapping = Mapping(\\\n requirements={\\\n 'conf.bgp.Bgp': {\n 'requirements': [\\\n [['device_attr', '{uut}', '_vrf_attr', '(?P<vrf>.*)', 'keepalive_interval', '(?P<keepalive_interval>.*)']],\n [['device_attr', '{uut}', '_vrf_attr', '(?P<vrf>.*)', 'holdtime', '(?P<holdtime>.*)']]],\n 'all_keys':True,\n 'exclude': bgp_exclude},\n 'ops.bgp.bgp.Bgp': {\n 'requirements': [\\\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', 'session_state', 'established'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys':True,\n 'kwargs': {'attributes': ['info']},\n 'exclude': bgp_exclude}},\n config_info={\\\n 'conf.bgp.Bgp': {\n 'requirements': [\\\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)', 'keepalive_interval', '(?P<keepalive_interval>.*)'],\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)', 'holdtime', '(?P<holdtime>.*)']],\n 'verify_conf': False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={\\\n 'conf.bgp.Bgp': {\n 'requirements': [\\\n ['device_attr', '{uut}', '_vrf_attr', '(?P<vrf>.*)', NotExists('keepalive_interval')],\n ['device_attr', '{uut}', '_vrf_attr', '(?P<vrf>.*)', NotExists('holdtime')]],\n 'exclude': bgp_exclude}},\n num_values={'device': 1, 'bgp_id': 1, 'vrf': 1, 'instance': 1, 'neighbor': 1})\n\n\nclass TriggerUnconfigConfigBgpFastExternalFallover(TriggerUnconfigConfig):\n \"\"\"Unconfigure fast-external-fallover and reapply the whole configurations for learned BGP pid\"\"\"\n\n __description__ = \"\"\"Unconfigure fast-external-fallover under a BGP and\n reapply the whole configurations for learned BGP pid\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the BGP instance(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure fast-external-fallover under learned BGP pid from step 1\n 4. Verify the fast-external-fallover under BGP pid from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n\n\n mapping = Mapping(\\\n requirements={\\\n 'conf.bgp.Bgp': {\n 'requirements': [\\\n ['device_attr', '{uut}', '_vrf_attr', '(?P<vrf>.*)', 'fast_external_fallover', True]],\n 'exclude': bgp_exclude},\n 'ops.bgp.bgp.Bgp': {\n 'requirements': [\\\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', 'session_state', 'established'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys':True,\n 'kwargs': {'attributes': ['info']},\n 'exclude': bgp_exclude}},\n config_info={\\\n 'conf.bgp.Bgp': {\n 'requirements': [\\\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)', 'fast_external_fallover', True]],\n 'verify_conf': False,\n 'kwargs': {'mandatory': {'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={\\\n 'conf.bgp.Bgp': {\n 'requirements': [\\\n ['device_attr', '{uut}', '_vrf_attr', '(?P<vrf>.*)', NotExists('fast_external_fallover')]],\n 'exclude': bgp_exclude}},\n num_values={'device': 1, 'bgp_id': 1, 'vrf': 1, 'instance': 1, 'neighbor': 1})\n\n\nclass TriggerUnconfigConfigBgpGracefulRestart(TriggerUnconfigConfig):\n \"\"\"Unconfigure graceful restart configured under BGP and then\n reapply the whole configuration of dynamically learned BGP instance(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure graceful restart configured under BGP and then \n reapply the whole configuration of dynamically learned BGP instance(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n Steps:\n 1. Learn BGP Ops object and store the BGP instance(s) if any,\n else SKIP the trigger\n 2. Save the current device configurations using the \"method\" specified\n by user in Trigger YAML.\n 3. Unconfigure the learned BGP instance(s) from step 1\n with BGP Conf object\n 4. Verify the BGP instance(s) from step 3 no longer exists\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n\n mapping = Mapping(\\\n requirements={\\\n 'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude},\n 'conf.bgp.Bgp':{\n 'requirements':[\\\n ['device_attr', '{uut}', '_vrf_attr', '(?P<vrf>.*)', 'graceful_restart', True]],\n 'exclude': bgp_exclude}},\n config_info={\\\n 'conf.bgp.Bgp':{\n 'requirements':[\\\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)', 'graceful_restart', True]],\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={\\\n 'conf.bgp.Bgp':{\n 'requirements':[\\\n ['device_attr', '{uut}', '_vrf_attr', '(?P<vrf>.*)', 'graceful_restart', False]],\n 'exclude': bgp_exclude}},\n num_values={'instance':1, 'vrf':1, 'neighbor':1})\n\n\nclass TriggerUnconfigConfigBgpNeighborDefaultOriginate(TriggerUnconfigConfig):\n \"\"\"Unconfigure default originate configured under BGP neighbor and then\n reapply the whole configuration of dynamically learned BGP instance(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure default originate configured under BGP neighbor and then \n reapply the whole configuration of dynamically learned BGP instance(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n address_family: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n Steps:\n 1. Learn BGP Ops object and store the BGP instance(s) if any,\n else SKIP the trigger\n 2. Save the current device configurations using the \"method\" specified\n by user in Trigger YAML.\n 3. Unconfigure the learned BGP instance(s) from step 1\n with BGP Conf object\n 4. Verify the BGP instance(s) from step 3 no longer exists\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n\n mapping = Mapping(\\\n requirements={\\\n 'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', 'address_family', '(?P<address_family>.*)', 'default_originate', True],\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', 'session_state', 'established'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys': True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={\\\n 'conf.bgp.Bgp':{\n 'requirements':[\\\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)', 'neighbor_attr', '(?P<neighbor>.*)', 'address_family_attr', '(?P<address_family>.*)', 'nbr_af_default_originate', True]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={\\\n 'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', 'address_family', '(?P<address_family>.*)', NotExists('default_originate')]],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'instance':1, 'vrf':1, 'neighbor':1})\n\n\nclass TriggerUnconfigConfigBgpNeighborNextHopSelf(TriggerUnconfigConfig):\n \"\"\"Unconfigure next hop self configured under BGP neighbor and then\n reapply the whole configuration of dynamically learned BGP instance(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure next hop self configured under BGP neighbor and then \n reapply the whole configuration of dynamically learned BGP instance(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n address_family: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n Steps:\n 1. Learn BGP Ops object and store the BGP instance(s) if any,\n else SKIP the trigger\n 2. Save the current device configurations using the \"method\" specified\n by user in Trigger YAML.\n 3. Unconfigure the learned BGP instance(s) from step 1\n with BGP Conf object\n 4. Verify the BGP instance(s) from step 3 no longer exists\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n\n mapping = Mapping(\\\n requirements={\\\n 'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', 'address_family', '(?P<address_family>.*)', 'next_hop_self', True],\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', 'session_state', 'established'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys': True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={\\\n 'conf.bgp.Bgp':{\n 'requirements':[\\\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)', 'neighbor_attr', '(?P<neighbor>.*)', 'address_family_attr', '(?P<address_family>.*)', 'nbr_af_next_hop_self', True]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={\\\n 'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', 'address_family', '(?P<address_family>.*)', NotExists('next_hop_self')]],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'instance':1, 'vrf':1, 'neighbor':1})\n\n\nclass TriggerUnconfigConfigBgpNeighborTransportConnectionModePassive(TriggerUnconfigConfig):\n \"\"\"Unconfigure transportation connection mode (if passive) configured under\n BGP neighbor and then reapply the whole configuration of dynamically \n learned BGP instance(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure transportation connection mode (if passive) configured under \n BGP neighbor and then reapply the whole configuration of dynamically \n learned BGP instance(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n Steps:\n 1. Learn BGP Ops object and store the BGP instance(s) if any,\n else SKIP the trigger\n 2. Save the current device configurations using the \"method\" specified\n by user in Trigger YAML.\n 3. Unconfigure the learned BGP instance(s) from step 1\n with BGP Conf object\n 4. Verify the BGP instance(s) from step 3 no longer exists\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n\n mapping = Mapping(\\\n requirements={\\\n 'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', 'bgp_session_transport', 'connection', 'mode', 'passive'],\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', 'session_state', 'established'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys': True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={\\\n 'conf.bgp.Bgp':{\n 'requirements':[\\\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)', 'neighbor_attr', '(?P<neighbor>.*)', 'nbr_transport_connection_mode', 'passive']],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={\\\n 'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', 'bgp_session_transport', 'connection', NotExists('mode')]],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'instance':1, 'vrf':1, 'neighbor':1})\n\n\nclass TriggerUnconfigConfigBgpNeighborPassword(TriggerUnconfigConfig):\n \"\"\"Unconfigure the password configured under BGP neighbor and then\n reapply the whole configuration of dynamically learned BGP instance(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure the password configured under BGP neighbor and then \n reapply the whole configuration of dynamically learned BGP instance(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n password_text: `<PASSWORD>`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n Steps:\n 1. Learn BGP Ops object and store the BGP instance(s) if any,\n else SKIP the trigger\n 2. Save the current device configurations using the \"method\" specified\n by user in Trigger YAML.\n 3. Unconfigure the learned BGP instance(s) from step 1\n with BGP Conf object\n 4. Verify the BGP instance(s) from step 3 no longer exists\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n\n mapping = Mapping(\\\n requirements={\\\n 'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', 'password_text', '(?P<password_text>.*)'],\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', 'session_state', 'established'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys': True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={\\\n 'conf.bgp.Bgp':{\n 'requirements':[\\\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)', 'neighbor_attr', '(?P<neighbor>.*)', 'nbr_password_text', '(?P<password_text>.*)']],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={\\\n 'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', NotExists('nbr_password_text')]],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'instance':1, 'vrf':1, 'neighbor':1})\n\n\nclass TriggerUnconfigConfigBgpNeighborBfd(TriggerUnconfigConfig):\n \"\"\"Unconfigure bfd configured under BGP neighbor and then\n reapply the whole configuration of dynamically learned BGP instance(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure bfd configured under BGP neighbor and then \n reapply the whole configuration of dynamically learned BGP instance(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n Steps:\n 1. Learn BGP Ops object and store the BGP instance(s) if any,\n else SKIP the trigger\n 2. Save the current device configurations using the \"method\" specified\n by user in Trigger YAML.\n 3. Unconfigure the learned BGP instance(s) from step 1\n with BGP Conf object\n 4. Verify the BGP instance(s) from step 3 no longer exists\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n\n mapping = Mapping(\\\n requirements={\\\n 'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', 'fall_over_bfd', True],\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', 'session_state', 'established'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys': True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={\\\n 'conf.bgp.Bgp':{\n 'requirements':[\\\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)', 'neighbor_attr', '(?P<neighbor>.*)', 'nbr_fall_over_bfd', True]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={\\\n 'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', NotExists('fall_over_bfd')]],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'instance':1, 'vrf':1, 'neighbor':1})\n\n\nclass TriggerUnconfigConfigBgpNeighborRouteReflectorClient(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically\n learned BGP neighbor(s) route-reflector-client.\"\"\"\n\n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically\n learned BGP neighbor(s) route-reflector-client.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n address_family: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the BGP neighbor(s) with route-reflector-client\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned BGP neighbor(s) route-reflector-client from step 1 \n with BGP Conf object\n 4. Verify the BGP vrf(s) route_distinguisher from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n # configuration steps callable\n def unconfigure_route_ref(self, conf_obj, path, **kwargs):\n\n paths = self._path_population([path], kwargs['device'])\n # find position that neighbor (ip) sit\n # replace ip string to IPv4Address object\n for path in paths:\n ipv4_index_list = [path.index(val) for val in path if '.' in str(val)]\n ipv6_index_list = [path.index(val) for val in path if ':' in str(val)]\n\n for index in ipv4_index_list:\n path[index] = IPv4Address(path[index])\n for index in ipv6_index_list:\n path[index] = IPv6Address(path[index])\n\n config = '\\n'.join([str(conf_path) for conf_path in paths])\n log.info('With following configuration:\\n{c}'\n .format(c=config))\n\n Configure.conf_configure(device=kwargs['device'],\n conf=conf_obj,\n conf_structure=paths,\n unconfig=True)\n\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)', 'neighbor',\n '(?P<neighbor>.*)', 'address_family',\n '(?P<address_family>.*)',\n 'route_reflector_client', True],\n ['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor',\n '(?P<neighbor>.*)', 'session_state', 'established'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys':True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={'conf.bgp.Bgp':{\n 'requirements':[[partial(unconfigure_route_ref, path = ['device_attr', '{uut}', 'vrf_attr',\n '(?P<vrf>.*)', 'neighbor_attr',\n '(?P<neighbor>.*)', 'address_family_attr',\n '(?P<address_family>.*)',\n 'nbr_af_route_reflector_client', True])\n ]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements': [['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)', 'neighbor',\n '(?P<neighbor>.*)', 'address_family',\n '(?P<address_family>.*)',\n NotExists('route_reflector_client')]],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'vrf':'all', 'instance':'all', 'neighbor': 'all',\n 'address_family':'all', 'rd': 'all'})\n\n\nclass TriggerUnconfigConfigBgpNeighborIpv4(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically learned BGP IPv4 neighbor(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically learned BGP IPv4 neighbor(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the BGP IPv4 neighbor(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned BGP IPv4 neighbor(s) from step 1 \n with BGP Conf object\n 4. Verify the BGP IPv4 neighbor(s) from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)', 'neighbor',\n '(?P<neighbor>^[\\d\\.]+$)', 'session_state', 'established'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys':True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={'conf.bgp.Bgp':{\n 'requirements':[['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'neighbor_attr','(?P<neighbor>^[\\d\\.]+$)']],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements': [[partial(verify_ops_or_logic,\n requires=[['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor',\n NotExists('(?P<neighbor>.*)')],\n ['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', NotExists('neighbor')],\n ])\n ]],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude + ['vpnv4 unicast', 'distance_local']}},\n num_values={'vrf':'all', 'instance':'all',\n 'neighbor':'all'})\n\n\nclass TriggerUnconfigConfigBgpNeighborIpv6(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically learned BGP IPv6 neighbor(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically learned BGP IPv6 neighbor(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the BGP IPv6 neighbor(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned BGP IPv6 neighbor(s) from step 1 \n with BGP Conf object\n 4. Verify the BGP IPv6 neighbor(s) from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)', 'neighbor',\n '(?P<neighbor>^[\\w\\:]+$)', 'session_state', 'established'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys':True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={'conf.bgp.Bgp':{\n 'requirements':[['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'neighbor_attr','(?P<neighbor>^[\\w\\:]+$)']],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements': [['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor',\n NotExists('(?P<neighbor>.*)')]],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'vrf':'all', 'instance':'all',\n 'neighbor':'all'})\n\n\nclass TriggerUnconfigConfigBgpNeighborIbgp(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically learned iBGP neighbor(s).\"\"\"\n \n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically learned iBGP neighbor(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the iBGP neighbor(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned iBGP neighbor(s) from step 1 \n with BGP Conf object\n 4. Verify the iBGP neighbor(s) from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)',\n 'bgp_id', '(?P<bgp_id>.*)'],\n ['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)', 'neighbor',\n '(?P<neighbor>.*)', 'remote_as',\n '(?P<bgp_id>.*)'],\n ['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor',\n '(?P<neighbor>.*)', 'session_state', 'established']],\n 'all_keys':True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={'conf.bgp.Bgp':{\n 'requirements':[['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'neighbor_attr','(?P<neighbor>.*)']\n ],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements': [[partial(verify_ops_or_logic,\n requires=[['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor',\n NotExists('(?P<neighbor>.*)')],\n ['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', NotExists('neighbor')],\n ])\n ]],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'vrf':1, 'instance':1,\n 'neighbor':1, 'bgp_id': 1})\n\n\nclass TriggerUnconfigConfigBgpRouterId(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the bgp-id of dynamically learned BGP instance(s).\"\"\"\n \n __description__ = \"\"\"Unconfigure and reapply the bgp-id of dynamically learned BGP instance(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Conf object and store the BGP instance(s)\n if has bgp_id configured, otherwise, SKIP the trigger.\n And learn BGP ops object for verifying in step 4 and 6\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned BGP instance(s) bgp-id from step 1 \n with BGP Conf object\n 4. Verify the BGP instance(s) bgp-id from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)','router_id', '(?P<routerId>.*)'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys':True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude},\n 'conf.bgp.Bgp':{\n 'requirements':[['device_attr', '{uut}', '_vrf_attr', '(?P<vrf>.*)',\n 'router_id', '(?P<router_id>.*)']]}},\n config_info={'conf.bgp.Bgp':{\n 'requirements':[['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'router_id', '(?P<router_id>.*)']],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements': [['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'router_id', '(.*)']], # will still pick up some loopback interace ip \n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude},\n 'conf.bgp.Bgp':{\n 'requirements': [['device_attr', '{uut}', '_vrf_attr',\n '(?P<vrf>.*)', NotExists('router_id')]], # no router_id should exists in conf\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'vrf':'all', 'instance':'all',\n 'router_id':'all'})\n \n\nclass TriggerUnconfigConfigBgpNeighborVrf(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically learned BGP IPv6 neighbor(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically learned BGP IPv6 neighbor(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the BGP IPv6 neighbor(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned BGP IPv6 neighbor(s) from step 1 \n with BGP Conf object\n 4. Verify the BGP IPv6 neighbor(s) from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>^(?!default).*)', 'neighbor',\n '(?P<neighbor>.*)', 'session_state', 'established'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys':True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={'conf.bgp.Bgp':{\n 'requirements':[['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>^(?!default).*)']],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements': [[partial(verify_ops_or_logic,\n requires=[['info', 'instance', '(?P<instance>.*)', NotExists('vrf')],\n ['info', 'instance', '(?P<instance>.*)', 'vrf',\n NotExists('(?P<vrf>.*)')],\n ['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', NotExists('neighbor')]\n ])\n ]],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'vrf':'all', 'instance':'all',\n 'neighbor':'all'})\n\n\nclass TriggerUnconfigConfigBgpNeighborAsOverride(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically\n learned BGP neighbors(s) as_override.\"\"\"\n\n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically\n learned BGP neighbors(s) as_override.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n address_family: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n\n steps:\n 1. Learn BGP Ops object and store the BGP neighbors(s)\n if has as_override enabled, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned BGP neighbors(s) as_override from step 1 \n with BGP Conf object\n 4. Verify the BGP neighbors(s) as_override from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n # configuration steps callable\n def unconfigure_route_ref(self, conf_obj, path, **kwargs):\n\n paths = self._path_population([path], kwargs['device'])\n # find position that neighbor (ip) sit\n # replace ip string to IPv4Address object\n for path in paths:\n ipv4_index_list = [path.index(val) for val in path if '.' in str(val)]\n ipv6_index_list = [path.index(val) for val in path if ':' in str(val)]\n\n for index in ipv4_index_list:\n path[index] = IPv4Address(path[index])\n for index in ipv6_index_list:\n path[index] = IPv6Address(path[index])\n\n config = '\\n'.join([str(conf_path) for conf_path in paths])\n log.info('With following configuration:\\n{c}'\n .format(c=config))\n\n Configure.conf_configure(device=kwargs['device'],\n conf=conf_obj,\n conf_structure=paths,\n unconfig=True)\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<address_family>.*)',\n 'as_override', True],\n ['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', 'session_state', 'established'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys':True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={'conf.bgp.Bgp':{\n 'requirements':[[partial(unconfigure_route_ref, path = [\n 'device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'neighbor_attr', '(?P<neighbor>.*)', 'address_family_attr',\n '(?P<address_family>.*)', 'nbr_af_as_override', True]),\n ]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements': [['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<address_family>.*)',\n NotExists('as_override')]],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'vrf':'all', 'instance':'all',\n 'address_family':'all', 'neighbor': 'all'})\n\n\nclass TriggerUnconfigConfigBgpNeighborEbgp(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically learned EBGP neighbor(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically learned EBGP neighbor(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the EBGP neighbor(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned EBGP neighbor(s) from step 1 \n with BGP Conf object\n 4. Verify the BGP IPv6 neighbor(s) from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>^(?!default).*)', 'neighbor',\n '(?P<neighbor>.*)', 'session_state', 'established'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)'],\n ['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>^(?!default).*)', 'neighbor',\n '(?P<neighbor>.*)', 'remote_as', Different('(?P<bgp_id>.*)')]],\n 'all_keys':True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={'conf.bgp.Bgp':{\n 'requirements':[['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>^(?!default).*)',\n 'neighbor_attr','(?P<neighbor>.*)']],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements': [[partial(verify_ops_or_logic,\n requires=[['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', NotExists('(?P<neighbor>.*)')],\n ['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', NotExists('neighbor')]\n ])\n ]],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude + ['vpnv4 unicast']}},\n num_values={'vrf':'all', 'instance':'all',\n 'neighbor':'all'})\n\n\nclass TriggerUnconfigConfigBgpVpnRd(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically\n learned BGP vrf(s) route-distinguisher.\"\"\"\n\n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically\n learned BGP vrf(s) route-distinguisher.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n address_family: `str`\n rd: `str`\n default_vrf: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the BGP vrf(s) with route_distinguisher\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned BGP vrf(s) route-distinguisher from step 1 \n with BGP Conf object\n 4. Verify the BGP vrf(s) route_distinguisher from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n @aetest.test\n def verify_unconfigure(self, uut, abstract, steps):\n time.sleep(120)\n super().verify_unconfigure(uut, abstract, steps)\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['table', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)', 'address_family',\n '(?P<address_family>.*)', 'route_distinguisher',\n '(?P<rd>.*)'],\n ['table', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)', 'address_family',\n '(?P<address_family>.*)', 'default_vrf',\n '(?P<default_vrf>.*)']],\n 'kwargs':{'attributes':['table', 'info']},\n 'exclude': bgp_exclude},\n 'ops.vrf.vrf.Vrf':{\n 'requirements':[['info', 'vrfs', '(?P<default_vrf>^(?!default).*)',\n 'route_distinguisher', '(?P<rd>.*)']],\n 'kwargs':{'attributes':['info']},\n 'exclude': ['maker']}},\n config_info={'conf.vrf.Vrf':{\n 'requirements':[['device_attr', '{uut}', 'rd', '(?P<rd>.*)']],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<default_vrf>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements': [['table', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)', 'address_family',\n '(?P<address_family>.*)', NotExists('default_vrf')]],\n 'kwargs':{'attributes':['table', 'info']},\n 'exclude': bgp_exclude + ['label_allocation_mode', 'vpnv4 unicast', 'vpnv6 unicast']},\n 'ops.vrf.vrf.Vrf':{\n 'requirements':[['info', 'vrfs', '(?P<default_vrf>.*)',\n 'route_distinguisher', '0:0']],\n 'kwargs':{'attributes':['info']},\n 'exclude': ['maker']}},\n num_values={'vrf': 'all', 'instance':1, \n 'address_family': 'all', 'rd': 1, 'default_vrf': 1})\n\n\nclass TriggerUnconfigConfigBgpL2vpnCapability(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically\n learned BGP l2vpn evpn address-family.\"\"\"\n\n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically\n learned BGP l2vpn evpn address-family.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the BGP l2vpn evpn address-family\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned BGP l2vpn evpn address-family from step 1 \n with BGP Conf object\n 4. Verify the BGP l2vpn evpn address-family from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n mapping = Mapping(\\\n requirements={\\\n 'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)',\n 'neighbor', '(?P<neighbor>.*)', 'address_family', '(?P<address_family>^l2vpn +evpn$)',\n 'session_state', 'established'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys': True,\n 'kwargs':{'attributes':['info[instance][(.*)][bgp_id]',\n 'info[list_of_vrfs]',\n 'info[instance][(.*)][vrf][(.*)][neighbor]'\n '[(.*)][address_family][(.*)][session_state]']},\n 'exclude': bgp_exclude}},\n config_info={\\\n 'conf.bgp.Bgp':{\n 'requirements':[\\\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'neighbor_attr', '(?P<neighbor>.*)', 'address_family_attr', '(?P<address_family>.*)']],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={\\\n 'ops.bgp.bgp.Bgp':{\n 'requirements':[[partial(verify_ops_or_logic,\n requires=[['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)',\n 'neighbor', '(?P<neighbor>.*)', 'address_family',\n NotExists('(?P<address_family>.*)')],\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)',\n 'neighbor', NotExists('(?P<neighbor>.*)')],\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)',\n 'neighbor', '(?P<neighbor>.*)', 'address_family',\n NotExists('(?P<address_family>^l2vpn +evpn$)')]\n ])\n ]],\n 'kwargs':{'attributes':['info[instance][(.*)][bgp_id]',\n 'info[list_of_vrfs]',\n 'info[instance][(.*)][vrf][(.*)][neighbor]'\n '[(.*)][address_family][(.*)][session_state]']},\n 'exclude': bgp_exclude}},\n num_values={'instance':1, 'vrf':1, 'neighbor':1 , 'address_family': 1})\n\n\nclass TriggerUnconfigConfigBgpAfL2vpnEvpnRewriteEvpnRtAsn(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically\n learned BGP l2vpn evpn address-family evpn rewrite-evpn-rt-asn.\"\"\"\n\n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically\n learned BGP l2vpn evpn address-family evpn rewrite-evpn-rt-asn.\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n address_family: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the BGP l2vpn evpn address-family evpn rewrite-evpn-rt-asn\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned rewrite-evpn-rt-asn from step 1\n with BGP Conf object\n 4. Verify the evpn rewrite-evpn-rt-asn under bgp l2vpn evpn address-family from step 3\n no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n mapping = Mapping(\\\n requirements={ \\\n 'conf.bgp.Bgp': {\n 'requirements': [ \\\n ['device_attr', '{uut}', '_vrf_attr', '(?P<vrf>.*)', '_neighbor_attr',\n '(?P<neighbor>.*)', '_address_family_attr', '(?P<af>.*)', 'nbr_af_rewrite_evpn_rt_asn', True]],\n 'exclude': bgp_exclude},\n 'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n [['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)',\n 'neighbor', '(?P<neighbor>.*)', 'address_family', '(?P<address_family>^l2vpn +evpn$)',\n 'session_state', 'established']],\n [['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n [['info', 'instance', '(?P<instance>.*)', 'vrf', \\\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', 'remote_as', Different('(?P<bgp_id>.*)')]]],\n 'all_keys': True,\n 'kwargs':{'attributes':['info[instance][(.*)][bgp_id]',\n 'info[list_of_vrfs]',\n 'info[instance][(.*)][vrf][(.*)][neighbor]'\n '[(.*)][address_family][(.*)][session_state]',\n 'info[instance][(.*)][vrf][(.*)][neighbor]'\n '[(.*)][remote_as]']},\n 'exclude': bgp_exclude}},\n config_info={\\\n 'conf.bgp.Bgp':{\n 'requirements':[\\\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'neighbor_attr', '(?P<neighbor>.*)', 'address_family_attr', '(?P<address_family>.*)',\\\n 'nbr_af_rewrite_evpn_rt_asn', True]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={ \\\n 'conf.bgp.Bgp': {\n 'requirements': [ \\\n ['device_attr', '{uut}', '_vrf_attr', '(?P<vrf>.*)', '_neighbor_attr',\n '(?P<neighbor>.*)', '_address_family_attr', '(?P<af>.*)', NotExists('nbr_af_rewrite_evpn_rt_asn')]],\n 'exclude': bgp_exclude},\n 'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)',\n 'neighbor', '(?P<neighbor>.*)', 'address_family', '(?P<address_family>^l2vpn +evpn$)', 'session_state', 'established']],\n 'kwargs':{'attributes':['info[instance][(.*)][bgp_id]',\n 'info[list_of_vrfs]',\n 'info[instance][(.*)][vrf][(.*)][neighbor][(.*)][address_family][(.*)][session_state]',\n 'info[instance][(.*)][vrf][(.*)][neighbor][(.*)][remote_as]']},\n 'exclude': bgp_exclude}},\n num_values={'instance':1, 'vrf':1, 'neighbor':1 , 'address_family': 1})\n\nclass TriggerUnconfigConfigBgpAddressFamilyIpv4Mvpn(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically\n learned BGP ipv4 mvpn address-family.\"\"\"\n\n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically\n learned BGP ipv4 mvpn address-family.\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n address_family: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the BGP ipv4 mvpn address-family\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned ipv4 mvpn addrres-family from step 1\n with BGP Conf object\n 4. Verify the ipv4 mvpn address-family under router bgp from step 3\n no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n mapping = Mapping( \\\n requirements={ \\\n 'ops.bgp.bgp.Bgp': {\n 'requirements': [ \\\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'address_family', '(?P<af>(ipv4 mvpn))', '(?P<af_info>.*)'],\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'session_state', 'established'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys': True,\n 'kwargs': {'attributes': ['info']},\n 'exclude': trm_exclude +['bgp_table_version','updates']}},\n config_info={ \\\n 'conf.bgp.Bgp': {\n 'requirements': [ \\\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)', 'address_family_attr', '(?P<af>.*)']],\n 'verify_conf': False,\n 'kwargs': {'mandatory': {'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={ \\\n 'conf.bgp.Bgp': {\n 'requirements': [ \\\n ['device_attr', '{uut}', '_vrf_attr', '(?P<vrf>.*)', '_address_family_attr', '(?P<af>(?!ipv4 mvpn).*)']],\n 'exclude': trm_exclude }},\n\n num_values={'instance': 1, 'vrf': 1, 'neighbor': 'all', 'af':1})\n\n\nclass TriggerUnconfigConfigBgpNeighborAddressFamilyIpv4Mvpn(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically\n learned ipv4 mvpn address-family under BGP neighbors.\"\"\"\n\n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically\n learned ipv4 mvpn address-family under BGP neighbors.\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the ipv4 mvpn address-family under BGP neighbors\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned ipv4 mvpn addrres-family under BGP neighbors from step 1\n with BGP Conf object\n 4. Verify the ipv4 mvpn address-family under BGP neighbors from step 3\n no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n mapping = Mapping( \\\n requirements={ \\\n 'ops.bgp.bgp.Bgp': {\n 'requirements': [\\\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<af>(ipv4 mvpn))', '(?P<af_info>.*)'],\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'bgp_negotiated_capabilities', 'ipv4_mvpn', '(?P<negotiated_cap>^(advertised).*)'],\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'session_state', 'established'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys': True,\n 'kwargs': {'attributes': ['info']},\n 'exclude': trm_exclude + ['bgp_table_version','updates','capability']}},\n config_info={ \\\n 'conf.bgp.Bgp': {\n 'requirements': [ \\\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)', 'neighbor_attr', '(?P<neighbor>.*)',\n 'address_family_attr', '(?P<af>.*)']],\n 'verify_conf': False,\n 'kwargs': {'mandatory': {'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={ \\\n\t 'conf.bgp.Bgp': {\n 'requirements': [ \\\n ['device_attr', '{uut}', '_vrf_attr', '(?P<vrf>.*)', '_neighbor_attr',\n '(?P<neighbor>.*)', '_address_family_attr', Not('ipv4 mvpn')]],\n 'exclude': trm_exclude},\n 'ops.bgp.bgp.Bgp': {\n 'requirements': [ \\\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'address_family', NotExists('ipv4 mvpn'), NotExists('(.*)') ],\n ['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'bgp_negotiated_capabilities', 'ipv4_mvpn', Not('advertised')]],\n 'kwargs': {'attributes': ['info']},\n 'exclude': trm_exclude + ['updates','bgp_table_version','capability']}},\n num_values={'instance': 1, 'vrf': 1, 'neighbor': 1, 'af': 1, 'negotiated_cap':1})\n\n", "id": "11472952", "language": "Python", "matching_score": 8.063822746276855, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/unconfigconfig/bgp/nxos/unconfigconfig.py" }, { "content": "'''Implementation for bgp unconfigconfig triggers'''\n\nfrom ats.utils.objects import Not, NotExists\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.unconfigconfig.unconfigconfig import TriggerUnconfigConfig\n\n\n# Which key to exclude for BGP Ops comparison\nbgp_exclude = ['maker', 'bgp_session_transport', 'route_refresh',\n 'bgp_negotiated_capabilities', 'notifications', 'capability',\n 'keepalives', 'total', 'total_bytes', 'up_time', 'last_reset',\n 'bgp_negotiated_keepalive_timers', 'updates', 'opens',\n 'bgp_table_version', 'holdtime', 'keepalive_interval',\n 'distance_internal_as', 'bgp_neighbor_counters',\n 'memory_usage', 'total_entries', 'routing_table_version',\n 'total_memory', 'totals', 'distance_extern_as', 'reset_reason',\n 'holdtime', 'keepalive_interval']\n\n\nclass TriggerUnconfigConfigBgpNeighbor(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically learned BGP neighbor(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically learned BGP neighbor(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n neighbor: `str`\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the BGP neighbor(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned BGP neighbor(s) from step 1 \n with BGP Conf object\n 4. Verify the BGP neighbor(s) from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)', 'neighbor',\n '(?P<neighbor>.*)', '(?P<neighbor_info>.*)'],\n ['info', 'instance', '(?P<instance>.*)', 'bgp_id', '(?P<bgp_id>.*)']],\n 'all_keys': True,\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={'conf.bgp.Bgp':{\n 'requirements':[['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'neighbor_attr','(?P<neighbor>.*)']],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements': [['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', NotExists('neighbor')]], # '(?P<neighbor>.*)', '(?P<neighbor_info>.*)'\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'vrf':'all', 'instance':'all',\n 'neighbor':'all'})\n\n\nclass TriggerUnconfigConfigBgp(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically learned BGP instance(s).\"\"\"\n \n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically learned BGP instance(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n bgp_id: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn BGP Ops object and store the BGP instance(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned BGP instance(s) from step 1 \n with BGP Conf object\n 4. Verify the BGP instance(s) from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn BGP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[['info', 'instance', '(?P<instance>.*)',\n 'bgp_id', '(?P<bgp_id>.*)']],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n config_info={'conf.bgp.Bgp':{\n 'requirements':[['device_attr', '{uut}']],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'bgp_id': '(?P<bgp_id>.*)'}}}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements': [['info', NotExists('instance')]],\n 'kwargs':{'attributes':['info']},\n 'exclude': bgp_exclude}},\n num_values={'bgp_id':'all', 'instance':'all'})\n", "id": "9458200", "language": "Python", "matching_score": 4.6220293045043945, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/unconfigconfig/bgp/unconfigconfig.py" }, { "content": "'''Implementation for interface shutnoshut triggers'''\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.shutnoshut.shutnoshut import TriggerShutNoShut\n\n# Which key to exclude for BGP Ops comparison\ninterface_exclude = ['maker', 'last_change','in_rate','in_rate_pkts',\n 'out_rate', 'out_rate_pkts', 'in_octets', 'in_errors',\n 'in_pkts', 'in_unicast_pkts', 'out_octets',\n 'out_pkts', 'out_unicast_pkts', 'out_multicast_pkts',\n 'in_multicast_pkts', 'last_clear', 'in_broadcast_pkts',\n 'out_broadcast_pkts','bandwidth','duplex_mode',\n '(Tunnel.*)', 'accounting']\n\n\nclass TriggerShutNoShutTrunkInterface(TriggerShutNoShut):\n \"\"\"Shut and unshut the dynamically learned trunk interface(s).\"\"\"\n\n __description__ = \"\"\"Shut and unshut the dynamically learned trunk interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Interface Ops object and store the \"up\" trunk interface(s)\n if has any, otherwise, SKIP the trigger\n 2. Shut the learned trunk interface(s) from step 1 with Interface Conf object\n 3. Verify the state of learned trunk interface(s) from step 2 is \"down\"\n 4. Unshut the trunk interface(s) with Interface Conf object\n 5. Learn Interface Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements':[['info', '(?P<interface>.*)', 'switchport_mode', 'trunk'],\n ['info', '(?P<interface>.*)', 'enabled', True],\n ['info', '(?P<interface>.*)', 'port_channel',\n 'port_channel_int', '(?P<port_int>.*)'],\n ['info', '(?P<interface>.*)', 'oper_status', 'up']],\n 'all_keys': True,\n 'exclude': interface_exclude}},\n config_info={'conf.interface.Interface':{\n 'requirements':[['enabled', False]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<interface>.*)',\n 'attach': False}}}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements':[['info', '(?P<interface>.*)', 'enabled', False],\n ['info', '(?P<interface>.*)', 'oper_status', 'down'],\n ['info', '(?P<port_int>.*)', 'enabled', False],\n ['info', '(?P<port_int>.*)', 'oper_status', 'down']],\n 'exclude': interface_exclude + ['(Vlan.*)']}},\n num_values={'interface': 1, 'port_int': 'all'})\n\n\nclass TriggerShutNoShutEthernetInterface(TriggerShutNoShut):\n \"\"\"Shut and unshut the dynamically learned Ethernet interface(s).\"\"\"\n\n __description__ = \"\"\"Shut and unshut the dynamically learned Ethernet interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Interface Ops object and store the \"up\" Ethernet interface(s)\n if has any, otherwise, SKIP the trigger\n 2. Shut the learned Ethernet interface(s) from step 1 with Interface Conf object\n 3. Verify the state of learned Ethernet interface(s) from step 2 is \"down\"\n 4. Unshut the Ethernet interface(s) with Interface Conf object\n 5. Learn Interface Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements':[\\\n ['info', '(?P<interface>Ethernet(\\S+))', 'oper_status', 'up'],\n ['info', '(?P<interface>.*)', 'enabled', True],\n ['info', '(?P<interface>.*)', 'port_channel', 'port_channel_member', False]],\n 'exclude': interface_exclude}},\n config_info={'conf.interface.Interface':{\n 'requirements':[['enabled', False]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<interface>.*)',\n 'attach': False}}}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements':[\\\n ['info', '(?P<interface>.*)', 'enabled', False],\n ['info', '(?P<interface>.*)', 'oper_status', 'down'],\n ['info', '(.*)', 'enabled', False]],\n 'exclude': interface_exclude}},\n num_values={'interface': 1})\n\n\nclass TriggerShutNoShutVlanInterface(TriggerShutNoShut):\n \"\"\"Shut and unshut the dynamically learned Vlan interface(s).\"\"\"\n\n __description__ = \"\"\"Shut and unshut the dynamically learned Vlan interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n mtu: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Interface Ops object and store the \"up\" Vlan interface(s)\n if has any, otherwise, SKIP the trigger\n 2. Shut the learned Vlan interface(s) from step 1 with Interface Conf object\n 3. Verify the state of learned Vlan interface(s) from step 2 is \"down\"\n 4. Unshut the Vlan interface(s) with Interface Conf object\n 5. Learn Interface Ops again and verify it is the same as the Ops in step 1\n \n \"\"\"\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements':[['info', '(?P<interface>Vlan[0-9]+)', 'mtu', '(?P<mtu>.*)'],\n ['info', '(?P<interface>.*)', 'enabled', True],\n ['info', '(?P<interface>.*)', 'oper_status', 'up']],\n 'exclude': interface_exclude}},\n config_info={'conf.interface.Interface':{\n 'requirements':[['enabled', False]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<interface>.*)',\n 'attach': False}}}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements':[['info', '(?P<interface>.*)', 'enabled', False],\n ['info', '(?P<interface>.*)', 'oper_status', 'down']],\n 'exclude': interface_exclude}},\n num_values={'interface': 1, 'mtu': 1})\n\n\nclass TriggerShutNoShutLoopbackInterface(TriggerShutNoShut):\n \"\"\"Shut and unshut the dynamically learned Loopback interface(s).\"\"\"\n \n __description__ = \"\"\"Shut and unshut the dynamically learned Loopback interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Interface Ops object and store the \"up\" Loopback interface(s)\n if has any, otherwise, SKIP the trigger\n 2. Shut the learned Loopback interface(s) from step 1 with Interface Conf object\n 3. Verify the state of learned Loopback interface(s) from step 2 is \"down\"\n 4. Unshut the Loopback interface(s) with Interface Conf object\n 5. Learn Interface Ops again and verify it is the same as the Ops in step 1\n \n \"\"\"\n\n loopback_exclude = interface_exclude + ['oper_status', 'enabled', 'status']\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements':[['info', '(?P<interface>(Loopback|Lo|loopback)[0-9]+)', 'oper_status', 'up']],\n 'exclude': interface_exclude}},\n config_info={'conf.interface.Interface':{\n 'requirements':[['enabled', False]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<interface>.*)',\n 'attach': False}}}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements':[['info', '(?P<interface>.*)', 'enabled', False],\n ['info', '(?P<interface>.*)', 'oper_status', 'down']],\n 'exclude': loopback_exclude}},\n num_values={'interface': 1})\n", "id": "10501905", "language": "Python", "matching_score": 6.912807464599609, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/shutnoshut/interface/shutnoshut.py" }, { "content": "'''Implementation for Interface unconfigconfig triggers'''\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.unconfigconfig.unconfigconfig import TriggerUnconfigConfig\n\n# import ats\nfrom ats import aetest\nfrom ats.utils.objects import Not, NotExists\n\n# Which key to exclude for Interface Ops comparison\ninterface_exclude = ['maker', 'last_change','in_rate','in_rate_pkts',\n 'out_rate', 'out_rate_pkts', 'in_octets',\n 'in_pkts', 'in_unicast_pkts', 'out_octets',\n 'out_pkts', 'out_unicast_pkts', 'out_multicast_pkts',\n 'in_multicast_pkts', 'last_clear', 'in_broadcast_pkts',\n 'out_broadcast_pkts', 'bandwidth', 'load_interval',\n 'port_speed', 'in_crc_errors', 'in_discards',\n 'unnumbered', '(Tunnel.*)', 'accounting']\n\n\nclass TriggerUnconfigConfigLoopbackInterface(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically\n learned Loopback interface(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically\n learned Loopback interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n\n steps:\n 1. Learn Interface Ops object and store the \"up\" Loopback interface(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned Loopback interface(s) from step 1 \n with Interface Conf object\n 4. Verify the Loopback interface(s) from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn Interface Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n loopback_exclude = interface_exclude + ['oper_status', 'enabled', 'status']\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements':[['info', '(?P<interface>(Loopback||Lo|loopback)[0-9]+)', 'oper_status', 'up']],\n 'exclude': interface_exclude}},\n config_info={'conf.interface.Interface':{\n 'requirements':[],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<interface>.*)',\n 'attach': False}}}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements':[['info', NotExists('(?P<interface>.*)')]],\n 'exclude': loopback_exclude}},\n num_values={'interface':1})\n\n\nclass TriggerUnconfigConfigPhysicalTrunkInterface(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically learned\n physical (non-Loopback, non-Vlan, non-Null, non-Tunnel, non-subinterface etc.) \"trunk\" interface(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically learned\n physical (non-Loopback, non-Vlan, non-Null, non-Tunnel, non-subinterface etc.) \"trunk\" interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Interface Ops object and store the \"up\" physical \"trunk\" interface(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned physical interface(s) from step 1 \n with Interface Conf object\n 4. Verify the physical interface(s) from step 3 are \"down\",\n verify the configurations are all gone for the learned physical interface(s)\n 5. Recover the device configurations to the one in step 2\n 6. Learn Interface Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements':[['info', '(?P<interface>[E|e]thernet[\\d\\/\\.]+)',\n 'switchport_mode', 'trunk'],\n ['info', '(?P<interface>[E|e]thernet[\\d\\/\\.]+)',\n 'port_channel', 'port_channel_member', False]],\n 'exclude': interface_exclude}},\n config_info={'conf.interface.Interface':{\n 'requirements':[],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<interface>.*)',\n 'attach': False}}}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements':[['info', '(?P<interface>.*)', 'oper_status', '(.*)'],\n ['info', '(?P<interface>.*)', 'enabled', False],\n ['info', '(?P<interface>.*)', 'port_channel',\n 'port_channel_member', False],\n ['info', '(?P<interface>.*)', 'mac_address', '([\\w\\.]+)'],\n ['info', '(?P<interface>.*)', '(.*)']],\n 'exclude': interface_exclude + ['(Vlan.*)']}},\n num_values={'interface':1})\n\n\nclass TriggerUnconfigConfigVirtualTrunkInterface(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically learned\n virtual \"trunk\" (port-channel) interface(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically learned\n virtual \"trunk\" (port-channel) interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Interface Ops object and store the \"up\" virtual \"trunk\" interface(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned virtual interface(s) from step 1 \n with Interface Conf object\n 4. Verify the virtual interface(s) from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn Interface Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements':[['info', '(?P<interface>[p|P]ort-channel[\\d\\.]+)',\n 'switchport_mode', 'trunk']],\n 'exclude': interface_exclude}},\n config_info={'conf.interface.Interface':{\n 'requirements':[],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<interface>.*)',\n 'attach': False}}}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements':[['info', NotExists('(?P<interface>.*)')]],\n 'exclude': interface_exclude}},\n num_values={'interface':1})\n\n\nclass TriggerUnconfigConfigEthernetInterface(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically learned Ethernet interface(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically learned Ethernet interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Interface Ops object and store the \"up\" Ethernet interface(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned Ethernet interface(s) from step 1 \n with Interface Conf object\n 4. Verify the configurations are all gone for the learned Ethernet interface(s)\n 5. Recover the device configurations to the one in step 2\n 6. Learn Interface Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n\n def remove_related_subinterface(item, interface, **kwargs):\n # Easiest way is to split at the dot, and see if it exists in name\n modified_item = item.split('.')[0]\n\n # If it remained the same, dont waste time\n if item == modified_item:\n return False\n\n # See if the modified_item exists in the list of name\n if modified_item in interface:\n return True\n return False\n\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements':[['info', '(?P<interface>(GigabitEthernet|gigabitEthernet|Ethernet|ethernet)[0-9\\/]+$)',\n 'enabled', True],\n ['info', '(?P<interface>(GigabitEthernet|gigabitEthernet|Ethernet|ethernet)[0-9\\/]+$)',\n 'port_channel', 'port_channel_member', False],\n ['info', '(?P<interface>(GigabitEthernet|gigabitEthernet|Ethernet|ethernet)[0-9\\/]+$)',\n 'oper_status', 'up']],\n 'exclude': interface_exclude}},\n config_info={'conf.interface.Interface':{\n 'requirements':[],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<interface>.*)',\n 'attach': False}}}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements':[['info', '(?P<interface>.*)', 'vrf', 'default'],\n ['info', '(?P<interface>.*)', 'enabled', False],\n ['info', '(?P<interface>.*)', 'oper_status', 'down'],\n ['info', '(?P<interface>.*)', 'delay', '(\\d+)'],\n ['info', '(?P<interface>.*)', 'bandwidth', '(\\d+)'],\n ['info', '(?P<interface>.*)', 'encapsulation', 'encapsulation', '(\\S+)'],\n ['info', '(?P<interface>.*)', 'mac_address', '(\\S+)'],\n ['info', '(?P<interface>.*)', 'medium', '(\\S+)'],\n ['info', '(?P<interface>.*)', 'mtu', '(\\d+)'],\n ['info', '(?P<interface>.*)', 'port_channel', 'port_channel_member', False]],\n 'exclude': interface_exclude +\\\n [remove_related_subinterface, '(Vlan.*)']}},\n num_values={'interface': 1})\n\n\nclass TriggerUnconfigConfigEthernetInterfaceSub(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically learned Ethernet SubInterface(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically learned Ethernet SubInterface(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Interface Ops object and store the \"up\" Ethernet SubInterface(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned Ethernet SubInterface(s) from step 1 \n with Interface Conf object\n 4. Verify the learned Ethernet SubInterface(s) are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn Interface Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements':[['info', '(?P<interface>(GigabitEthernet|gigabitEthernet|Ethernet|ethernet)[0-9\\/]+\\.[0-9]+)',\n 'enabled', True],\n ['info', '(?P<interface>(GigabitEthernet|gigabitEthernet|Ethernet|ethernet)[0-9\\/]+\\.[0-9]+)',\n 'oper_status', 'up']],\n 'exclude': interface_exclude}},\n config_info={'conf.interface.Interface':{\n 'requirements':[],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<interface>.*)',\n 'attach': False}}}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements':[['info', NotExists('(?P<interface>.*)')]],\n 'exclude': interface_exclude + ['(Vlan.*)']}},\n num_values={'interface': 1})\n\n\nclass TriggerUnconfigConfigVlanInterface(TriggerUnconfigConfig):\n \"\"\"Unconfigure and reapply the whole configurations of dynamically learned Vlan interface(s).\"\"\"\n \n __description__ = \"\"\"Unconfigure and reapply the whole configurations of dynamically learned Vlan interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Interface Ops object and store the \"up\" Vlan interface(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the learned Vlan interface(s) from step 1 \n with Interface Conf object\n 4. Verify the learned Vlan interface(s) are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn Interface Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n vlan_exclude = interface_exclude + ['oper_status']\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements':[['info', '(?P<interface>(Vlan|vlan)[0-9]+)', 'oper_status', 'up']],\n 'exclude': interface_exclude}},\n config_info={'conf.interface.Interface':{\n 'requirements':[],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<interface>.*)',\n 'attach': False}}}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements':[['info', NotExists('(?P<interface>.*)')]],\n 'exclude': vlan_exclude}},\n num_values={'interface':1})\n \n", "id": "4123185", "language": "Python", "matching_score": 6.464909076690674, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/unconfigconfig/interface/unconfigconfig.py" }, { "content": "'''Implementation for interface modify triggers'''\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.modify.modify import TriggerModify\nfrom genie.libs.conf.interface import IPv4Addr, IPv6Addr\nfrom genie.libs.conf.base import IPv4Address, IPv6Address\n\n# import python\nfrom functools import partial\nfrom ats.utils.objects import Not\n\n# Which key to exclude for Interface Ops comparison\ninterface_exclude = ['maker', 'last_change','in_rate','in_rate_pkts',\n 'out_rate', 'out_rate_pkts', 'in_octets',\n 'in_pkts', 'in_unicast_pkts', 'out_octets',\n 'out_pkts', 'out_unicast_pkts', 'out_multicast_pkts',\n 'in_multicast_pkts', 'last_clear', 'in_broadcast_pkts',\n 'out_broadcast_pkts', 'in_discards', 'accounting']\n\n\nclass TriggerModifyEthernetMtu(TriggerModify):\n \"\"\"Modify and revert the mtu for dynamically learned Ethernet interface(s).\"\"\"\n\n __description__ = \"\"\"Modify and revert the mtu for dynamically learned Ethernet interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n mtu: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n\n steps:\n 1. Learn Interface Ops object and store the \"up\" Ethernet interface(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the mtu of the learned Ethernet interface(s) from step 1\n with Interface Conf object\n 4. Verify the mtu of the learned Ethernet interface(s) from step 3\n changes to the modified value in step 3\n 5. Recover the device configurations to the one in step 2\n 6. Learn Interface Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements': [['info', '(?P<interface>e|Ethernet[0-9\\/\\s]+$)', 'mtu', '(?P<mtu>.*)'],\n ['info', '(?P<interface>.*)', 'enabled', True],\n ['info', '(?P<interface>.*)', 'port_channel', 'port_channel_member', False],\n ['info', '(?P<interface>.*)', 'oper_status', 'up']],\n 'exclude': interface_exclude}},\n config_info={'conf.interface.Interface':{\n 'requirements':[['mtu', 9216]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<interface>.*)',\n 'attach': False}}}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements': [['info', '(?P<interface>.*)', 'mtu', 9216],\n ['info', '(?P<interface>.*)', 'bandwidth', '(\\d+)']],\n 'exclude': interface_exclude}},\n num_values={'interface': 1, 'mtu': 1})\n\n\nclass TriggerModifySwitchportModeTrunkToAccess(TriggerModify):\n \"\"\"Modify and revert the mode (\"trunk\" to \"access\") for dynamically learned switchport interface(s).\"\"\"\n\n __description__ = \"\"\"Modify and revert the mode (\"trunk\" to \"access\") for dynamically learned switchport interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Interface Ops object and store the \"up\" \"trunk\" interface(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the mode of the learned \"trunk\" interface(s) from step 1 to \"access\"\n with Interface Conf object\n 4. Verify the mode of the learned \"trunk\" interface(s) from step 3\n changes to \"access\" in step 3\n 5. Recover the device configurations to the one in step 2\n 6. Learn Interface Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements':[\\\n ['info', '(?P<interface>.*)', 'switchport_mode', 'trunk'],\n ['info', '(?P<interface>.*)', 'enabled', True],\n ['info', '(?P<interface>.*)', 'oper_status', 'up'],\n ['info', '(?P<interface>.*)', 'port_channel', 'port_channel_member', False]],\n 'exclude': interface_exclude}},\n config_info={'conf.interface.Interface':{\n 'requirements':[['switchport_mode', 'access']],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<interface>.*)',\n 'attach': False}}}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements': [\\\n ['info', '(?P<interface>.*)', 'switchport_mode', 'access'],\n ['info', '(?P<interface>.*)', 'enabled', False],\n ['info', '(.*)', 'switchport_mode', '(\\w+)']],\n 'exclude': interface_exclude + ['(Vlan.*)']}},\n num_values={'interface': 1})\n\n\nclass TriggerModifyLoopbackInterfaceIp(TriggerModify):\n \"\"\"Modify and revert the ipv4 address for dynamically learned Loopback interface(s).\"\"\"\n\n __description__ = \"\"\"Modify and revert the ipv4 address for dynamically learned Loopback interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n steps:\n 1. Learn Interface Ops object and store the \"up\" \"ipv4\" Loopback interface(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the ipv4 address of the learned Loopback interface(s) from step 1\n with Interface Conf object\n 4. Verify the ipv4 address of the learned Loopback interface(s) from step 3\n changes to the modified value in step 3\n 5. Recover the device configurations to the one in step 2\n 6. Learn Interface Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n # configuration steps callable\n def configure_intf_ip(self, conf_obj, ip, mask, **kwargs):\n ipv4 = IPv4Addr(device=kwargs['device'])\n ipv4.ipv4 = IPv4Address(ip)\n ipv4.prefix_length = mask\n conf_obj.add_ipv4addr(ipv4)\n conf_obj.build_config()\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements': [['info', '(?P<name>[l|L]oopback\\d+)',\n 'ipv4', '(?P<ip_mask>.*)', 'ip', '(?P<ip>.*)'],\n ['info', '(?P<name>[l|L]oopback\\d+)',\n 'ipv4', '(?P<ip_mask>.*)', 'prefix_length', '(?P<mask>.*)']],\n 'all_keys':True,\n 'exclude': interface_exclude,\n 'all_keys': True}},\n config_info={'conf.interface.Interface':{\n 'requirements':[[partial(configure_intf_ip, ip='10.254.254.254',\n mask='32')]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<name>.*)',\n 'attach': False}}}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements': [['info', '(?P<name>.*)', 'ipv4', '10.254.254.254/32',\n 'prefix_length', '32'],\n ['info', '(?P<name>.*)', 'ipv4', '10.254.254.254/32',\n 'ip', '10.254.254.254']],\n 'exclude': interface_exclude}},\n num_values={'name': 1, 'ip_mask': 1, 'ip' : 'all', 'mask': 'all'})\n\n\nclass TriggerModifyLoopbackInterfaceIpv6(TriggerModify):\n \"\"\"Modify and revert the ipv6 address for dynamically learned Loopback interface(s).\"\"\"\n\n __description__ = \"\"\"Modify and revert the ipv6 address for dynamically learned Loopback interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n\n steps:\n 1. Learn Interface Ops object and store the \"up\" \"ipv6\" Loopback interface(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the ipv6 address of the learned Loopback interface(s) from step 1\n with Interface Conf object\n 4. Verify the ipv6 address of the learned Loopback interface(s) from step 3\n changes to the modified value in step 3\n 5. Recover the device configurations to the one in step 2\n 6. Learn Interface Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n # configuration steps callable\n def configure_intf_ipv6(self, conf_obj, ip, mask, **kwargs):\n\n # add old ipv6 attributes in conf_object\n # for remove the original ipv6 configurations \n ipv6 = IPv6Addr(device=kwargs['device'])\n for ipInfo in self.keys:\n if 'ip' in ipInfo:\n ipv6.ipv6 = IPv6Address(ipInfo['ip'])\n if 'mask' in ipInfo:\n ipv6.ipv6_prefix_length = ipInfo['mask']\n conf_obj.add_ipv6addr(ipv6)\n\n # remove all existing ipv6 configurations\n conf_obj.build_unconfig(attributes={'ipv6addr':None})\n # clear the used attribtues\n conf_obj.ipv6addr.clear()\n\n # configure new ipv6 address\n ipv6 = IPv6Addr(device=kwargs['device'])\n ipv6.ipv6 = IPv6Address(ip)\n ipv6.ipv6_prefix_length = mask\n conf_obj.add_ipv6addr(ipv6)\n conf_obj.build_config()\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements': [['info', '(?P<name>[l|L]oopback\\d+)',\n 'ipv6', '(?P<ip_mask>.*)', 'ip', '(?P<ip>.*)'],\n ['info', '(?P<name>[l|L]oopback\\d+)',\n 'ipv6', '(?P<ip_mask>.*)', 'prefix_length', '(?P<mask>.*)']],\n 'exclude': interface_exclude,\n 'all_keys': True}},\n config_info={'conf.interface.Interface':{\n 'requirements':[[partial(configure_intf_ipv6, ip='fc00:e968:6179::de52:7100:254',\n mask='64')]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<name>.*)',\n 'attach': False}}}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements': [['info', '(?P<name>.*)', 'ipv6', 'fc00:e968:6179::de52:7100:254/64',\n 'prefix_length', '64'],\n ['info', '(?P<name>.*)', 'ipv6', 'fc00:e968:6179::de52:7100:254/64',\n 'ip', 'fc00:e968:6179::de52:7100:254'],\n ['info', '(?P<name>.*)', '(ipv6)', '([\\w\\/\\.\\:]+)']],\n 'exclude': interface_exclude}},\n num_values={'name': 1, 'ip_mask': 1, 'ip' : 'all', 'mask': 'all'})\n\n\nclass TriggerModifySviInterfaceIp(TriggerModify):\n \"\"\"Modify and revert the ipv4 address for dynamically learned Svi (Vlan) interface(s).\"\"\"\n\n __description__ = \"\"\"Modify and revert the ipv4 address for dynamically learned Svi (Vlan) interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n\n steps:\n 1. Learn Interface Ops object and store the \"up\" \"ipv4\" Vlan interface(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the ipv4 address of the learned Vlan interface(s) from step 1\n with Interface Conf object\n 4. Verify the ipv4 address of the learned Vlan interface(s) from step 3\n changes to the modified value in step 3\n 5. Recover the device configurations to the one in step 2\n 6. Learn Interface Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n # configuration steps callable\n def configure_intf_ip(self, conf_obj, ip, mask, **kwargs):\n ipv4 = IPv4Addr(device=kwargs['device'])\n ipv4.ipv4 = IPv4Address(ip)\n ipv4.prefix_length = mask\n conf_obj.add_ipv4addr(ipv4)\n conf_obj.build_config()\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements': [\\\n ['info', '(?P<name>Vlan[0-9]+)', 'ipv4', '(?P<ip_mask>.*)', 'ip', '(?P<ip>.*)'],\n ['info', '(?P<name>Vlan[0-9]+)', 'ipv4', '(?P<ip_mask>.*)', 'prefix_length', '(?P<mask>.*)']],\n 'exclude': interface_exclude,\n 'all_keys': True}},\n config_info={'conf.interface.Interface':{\n 'requirements':[\\\n [partial(configure_intf_ip, ip='10.254.254.254', mask='24')]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<name>.*)',\n 'attach': False}}}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements': [\\\n ['info', '(?P<name>.*)', 'ipv4', '10.254.254.254/24', 'prefix_length', '24'],\n ['info', '(?P<name>.*)', 'ipv4', '10.254.254.254/24', 'ip', '10.254.254.254']],\n 'exclude': interface_exclude}},\n num_values={'name': 1, 'ip_mask': 1, 'ip' : 'all', 'mask': 'all'})\n\n\nclass TriggerModifyVlanMtu(TriggerModify):\n \"\"\"Modify and revert the mtu for dynamically learned Vlan interface(s).\"\"\"\n \n __description__ = \"\"\"Modify and revert the mtu for dynamically learned Vlan interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n\n steps:\n 1. Learn Interface Ops object and store the \"up\" Vlan interface(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the mtu of the learned Vlan interface(s) from step 1\n with Interface Conf object\n 4. Verify the mtu of the learned Vlan interface(s) from step 3\n changes to the modified value in step 3\n 5. Recover the device configurations to the one in step 2\n 6. Learn Interface Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements': [['info', '(?P<name>(Vlan|vlan)[0-9]+)', 'mtu', '(?P<mtu>.*)'],\n ['info', '(?P<name>.*)', 'oper_status', 'up']],\n 'exclude': interface_exclude}},\n config_info={'conf.interface.Interface':{\n 'requirements':[['mtu', 9216]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<name>.*)',\n 'attach': False}}}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements': [['info', '(?P<name>.*)', 'mtu', 9216],\n ['info', '(?P<name>.*)', 'bandwidth', '(\\d+)']],\n 'exclude': interface_exclude}},\n num_values={'name': 1, 'mtu': 1})\n", "id": "6113395", "language": "Python", "matching_score": 9.126768112182617, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/modify/interface/modify.py" }, { "content": "'''Implementation for interface modify triggers'''\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.modify.modify import TriggerModify\n\n# import python\nfrom functools import partial\n\n# Which key to exclude for Interface Ops comparison\ninterface_exclude = ['maker', 'last_change','in_rate','in_rate_pkts',\n 'out_rate', 'out_rate_pkts', 'in_octets',\n 'in_pkts', 'in_unicast_pkts', 'out_octets',\n 'out_pkts', 'out_unicast_pkts', 'out_multicast_pkts',\n 'in_multicast_pkts', 'last_clear', 'in_broadcast_pkts',\n 'out_broadcast_pkts', 'accounting']\n\n\nclass TriggerModifyEthernetMtu(TriggerModify):\n \"\"\"Modify and revert the mtu for dynamically learned Ethernet interface(s).\"\"\"\n\n __description__ = \"\"\"Modify and revert the mtu for dynamically learned Ethernet interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n mtu: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n\n steps:\n 1. Learn Interface Ops object and store the \"up\" Ethernet interface(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the mtu of the learned Ethernet interface(s) from step 1\n with Interface Conf object\n 4. Verify the mtu of the learned Ethernet interface(s) from step 3\n changes to the modified value in step 3\n 5. Recover the device configurations to the one in step 2\n 6. Learn Interface Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements': [['info', '(?P<interface>\\w+Ethernet[0-9\\/]+$)', 'mtu', '(?P<mtu>.*)'],\n ['info', '(?P<interface>.*)', 'port_channel', 'port_channel_member', False],\n ['info', '(?P<interface>.*)', 'oper_status', 'up']],\n 'exclude': interface_exclude,\n 'include_management_interface': False}},\n config_info={'conf.interface.Interface':{\n 'requirements':[['mtu', 9216]],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<interface>.*)',\n 'attach': False}}}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements': [['info', '(?P<interface>.*)', 'mtu', 9216],\n ['info', '(?P<interface>.*)', 'bandwidth', '(\\d+)'],\n ['info', '(.*)', 'mtu', '(\\d+)']],\n 'exclude': interface_exclude}},\n num_values={'interface': 1, 'mtu': 1})\n\n\nclass TriggerModifySwitchportModeTrunkToAccess(TriggerModify):\n \"\"\"Modify and revert the switchport mode to access for dynamically learned interface(s).\"\"\"\n\n __description__ = \"\"\"Modify and revert the switchport mode to access for dynamically learned interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n\n steps:\n 1. Learn Interface Ops object and store the \"up\" interface(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the switchport mode to \"access\" of the learned interface(s) from step 1\n with Interface Conf object\n 4. Verify the switchport mode of the learned interface(s) from step 3\n changes to \"access\" in step 3\n 5. Recover the device configurations to the one in step 2\n 6. Learn Interface Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements':[\\\n ['info', '(?P<interface>.*)', 'switchport_mode', 'trunk'],\n ['info', '(?P<interface>.*)', 'enabled', True],\n ['info', '(?P<interface>.*)', 'oper_status', 'up'],\n ['info', '(?P<interface>.*)', 'port_channel', 'port_channel_member', False]],\n 'exclude': interface_exclude}},\n config_info={'conf.interface.Interface':{\n 'requirements':[['switchport_mode', 'access']],\n 'verify_conf':False,\n 'kwargs':{'mandatory':{'name': '(?P<interface>.*)',\n 'attach': False}}}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements': [\\\n ['info', '(?P<interface>.*)', 'switchport_mode', '(.*access)'],\n ['info', '(?P<interface>.*)', 'switchport_enable', False]],\n 'exclude': interface_exclude + ['(Vlan.*)']}},\n num_values={'interface': 1})\n", "id": "12620009", "language": "Python", "matching_score": 2.885796308517456, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/modify/interface/iosxe/modify.py" }, { "content": "'''NXOS implementation for Msdp clear triggers'''\n\n# import genie.libs\nfrom ..clear import TriggerClearMsdpPeer as TriggerClearMsdpPeerBase, \\\n TriggerClearMsdpStatistics as TriggerClearMsdpStatisticsBase, \\\n TriggerClearMsdpPolicyStatisticsSaPolicyIn as TriggerClearMsdpPolicyStatisticsSaPolicyInBase, \\\n TriggerClearMsdpPolicyStatisticsSaPolicyOut as TriggerClearMsdpPolicyStatisticsSaPolicyOutBase, \\\n TriggerClearMsdpSaCache as TriggerClearMsdpSaCacheBase, \\\n TriggerClearMsdpRoute as TriggerClearMsdpRouteBase\n\n\nclass TriggerClearMsdpPeer(TriggerClearMsdpPeerBase):\n \"\"\"Reset msdp peer using CLI command \"clear ip msdp peer x.x.x.x [ vrf <vrf> ]\".\"\"\"\n\n __description__ = \"\"\"Reset msdp peer using CLI command \"clear ip msdp peer x.x.x.x [ vrf <vrf> ]\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn Msdp Ops object and store the peer(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset msdp peers with command \"clear ip msdp peer x.x.x.x [ vrf <vrf> ]\"\n 3. Learn Msdp Ops again, verify the elapsed_time of peer(s) is reset,\n and verify it is the same as the Ops in step 1 except the elapsed_time\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear ip msdp peer (?P<peer>.*) vrf (?P<vrf>.*)']\n\n\nclass TriggerClearMsdpStatistics(TriggerClearMsdpStatisticsBase):\n \"\"\"Reset msdp statistics using CLI command \"clear ip msdp statistics x.x.x.x [vrf <vrf> ]\".\"\"\"\n\n __description__ = \"\"\"Reset msdp statistics using CLI command\n \"clear ip msdp statistics x.x.x.x [vrf <vrf> ]\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn Msdp Ops object and store the peer(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset msdp peers with command \"clear ip msdp statistics x.x.x.x [vrf <vrf> ]\"\n 3. Learn Msdp Ops again, verify the discontinuity_time of peer(s) is reset,\n and verify it is the same as the Ops in step 1 except the discontinuity_time\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear ip msdp statistics (?P<peer>.*) vrf (?P<vrf>.*)']\n\n\nclass TriggerClearMsdpPolicyStatisticsSaPolicyIn(TriggerClearMsdpPolicyStatisticsSaPolicyInBase):\n \"\"\"Reset msdp peer statistics sa_policy in using CLI command\n \"clear ip msdp policy statistics sa-policy x.x.x.x in\".\"\"\"\n\n __description__ = \"\"\"Reset msdp peer statistics sa_policy in using CLI command\n \"clear ip msdp policy statistics sa-policy x.x.x.x in\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn Msdp Ops object and store the peer(s) which has sa_policy in configured\n if has any, otherwise, SKIP the trigger\n 2. Reset msdp peers with command \"clear ip msdp policy statistics sa-policy x.x.x.x in\"\n 3. Learn Msdp Ops again, verify it is the same as the Ops in step 1\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear ip msdp policy statistics sa-policy (?P<peer>.*) in']\n\n\nclass TriggerClearMsdpPolicyStatisticsSaPolicyOut(TriggerClearMsdpPolicyStatisticsSaPolicyOutBase):\n \"\"\"Reset msdp peer statistics sa_policy out using CLI command\n \"clear ip msdp policy statistics sa-policy x.x.x.x out\".\"\"\"\n\n __description__ = \"\"\"Reset msdp peer statistics sa_policy out using CLI command\n \"clear ip msdp policy statistics sa-policy x.x.x.x out\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn Msdp Ops object and store the peer(s) which has sa_policy in configured\n if has any, otherwise, SKIP the trigger\n 2. Reset msdp peers with command \"clear ip msdp policy statistics sa-policy x.x.x.x out\"\n 3. Learn Msdp Ops again, verify it is the same as the Ops in step 1\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear ip msdp policy statistics sa-policy (?P<peer>.*) out']\n\n\nclass TriggerClearMsdpSaCache(TriggerClearMsdpSaCacheBase):\n \"\"\"Reset sa-cache for msdp groups using CLI command \"clear ip msdp sa-cache x.x.x.x [vrf <vrf> ]\".\"\"\"\n\n __description__ = \"\"\"Reset sa cache for msdp groups using CLI command\n \"clear ip msdp sa-cache x.x.x.x [vrf <vrf> ]\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn Msdp Ops object and store the msdp sa-cache group(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset msdp sa-cache groups with command \"clear ip msdp sa-cache x.x.x.x [vrf <vrf> ]\"\n 3. Learn Msdp Ops again, verify the uptime of group(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear ip msdp sa-cache (?P<group>.*) vrf (?P<vrf>.*)']\n\n\nclass TriggerClearMsdpRoute(TriggerClearMsdpRouteBase):\n \"\"\"Reset sa-cache for msdp groups using CLI command \"clear ip msdp route x.x.x.x [ vrf < vrf> ]\".\"\"\"\n\n __description__ = \"\"\"Reset sa cache for msdp groups using CLI command\n \"clear ip msdp route x.x.x.x [ vrf < vrf> ]\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn Msdp Ops object and store the msdp sa-cache group(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset msdp sa-cache groups with command \"clear ip msdp route x.x.x.x [ vrf < vrf> ]\"\n 3. Learn Msdp Ops again, verify the uptime of group(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear ip msdp route (?P<group>.*) vrf (?P<vrf>.*)']\n", "id": "6829453", "language": "Python", "matching_score": 4.201830863952637, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/clear/msdp/nxos/clear.py" }, { "content": "'''\nNXOS implementation for interface clear triggers\n'''\n\n# Genie\nfrom genie.libs.sdk.triggers.clear.interface.clear import TriggerClearCounters\n\n\nclass TriggerClearCountersInterfaceAll(TriggerClearCounters):\n \"\"\"Clear counters on all interfaces using CLI command \"clear counters interface all\".\"\"\"\n\n __description__ = \"\"\"Clear counters on all interfaces using CLI command\n \"clear counters interface all\".\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn Interface Ops object and store the interface(s)\n if has any, otherwise, SKIP the trigger\n 2. Hard reset all the BGP connections with command \"clear counters interface all\"\n 3. Learn Interface Ops again, verify the counter of the learned interface(s)\n is reset, and verify it is the same as the Ops in step 1\n\n \"\"\" \n clear_cmd = ['clear counters interface all']", "id": "11839517", "language": "Python", "matching_score": 4.1737284660339355, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/clear/interface/nxos/clear.py" }, { "content": "'''NXOS implementation for ospf clear triggers'''\n\n\nfrom ..clear import TriggerClearIpOspfNeighborVrfAll as TriggerClearIpOspfNeighborVrfAllBase, \\\n TriggerRestartOspf as TriggerRestartOspfBase\n\n\nclass TriggerClearIpOspfNeighborVrfAll(TriggerClearIpOspfNeighborVrfAllBase):\n \"\"\"Reset all Ospf neighbor connections using CLI command \"clear ip ospf neighbor * vrf all\".\"\"\"\n\n __description__ = \"\"\"Reset all Ospf neighbor connections using CLI command \"clear ip ospf neighbor * vrf all\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn Ospf ops object and store the neighbor(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset all the neighbors with command \"clear ip ospf neighbor * vrf all\"\n 3. Learn Ospf Ops again, verify the last state change of neighbor(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n \"\"\"\n clear_cmd = ['clear ip ospf neighbor * vrf all']\n\n\nclass TriggerRestartOspf(TriggerRestartOspfBase):\n \"\"\"Restart Ospf instances using command \"clear ospf <instance>\".\"\"\"\n\n __description__ = \"\"\"Restart Ospf instance(s) using CLI command \"restart ospf <instance>\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn Ospf ops object and store the all neighbors\n if has any, otherwise, SKIP the trigger\n 2. Restart Ospf instance(s) with command \"restart ospf <instance>\"\n 3. Learn Ospf Ops again and verify it is the same as the Ops in step 1\n except the last state change\n \"\"\"\n clear_cmd = ['restart ospf (?P<instance>.*)']\n", "id": "4748884", "language": "Python", "matching_score": 4.372591495513916, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/clear/ospf/nxos/clear.py" }, { "content": "'''NXOS implementation for ipv6 clear triggers'''\n\n# Genie Libs\nfrom ..clear import TriggerClearIPv6NeighborVrfAll\n\n\nclass TriggerClearIPv6NeighborVrfAll(TriggerClearIPv6NeighborVrfAll):\n \"\"\"Reset all the ipv6 neighbors using CLI command \"clear ipv6 neighbor vrf all force-delete\".\"\"\"\n\n __description__ = \"\"\"Reset all the ipv6 neighbors using CLI command \"clear ipv6 neighbor vrf all force-delete\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn Nd Ops object and store the ipv6 neighbores\n if has any, otherwise, SKIP the trigger\n 2. Reset all the ipv6 neighbors using CLI command \"clear ipv6 neighbor vrf all force-delete\"\n 3. Learn Nd Ops again, verify the lifetime of neighbor(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear ipv6 neighbor vrf all force-delete']\n", "id": "1037620", "language": "Python", "matching_score": 4.179862022399902, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/clear/ipv6/nxos/clear.py" }, { "content": "'''NXOS implementation for routing clear triggers'''\n\n# import genie.libs\nfrom ..clear import TriggerClearIpRouteVrfAll as TriggerClearRouteVrfAll, \\\n TriggerClearIpv6RouteVrfAll as TriggerClearV6RouteVrfAll, \\\n TriggerClearIpRouteVrfDefault, \\\n TriggerClearIpv6RouteVrfDefault\n\n\nclass TriggerClearIpRouteVrfAll(TriggerClearRouteVrfAll):\n \"\"\"Reset all ip route connections using CLI command \"clear ip route vrf all *\".\"\"\"\n\n __description__ = \"\"\"Reset all the ip route connections using CLI command \"clear ip route vrf all *\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn routing Ops object and store the route IP(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset all the route connections with command \"clear ip route vrf all *\"\n 3. Learn routing Ops again, verify the uptime of route(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear ip route vrf all *']\n\n\nclass TriggerClearIpv6RouteVrfAll(TriggerClearV6RouteVrfAll):\n \"\"\"Reset all ipv6 routes connections using CLI command \"clear ipv6 route vrf all *\".\"\"\"\n\n __description__ = \"\"\"Reset all the ipv6 route connections using CLI command \"clear ipv6 route vrf all *\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn routing Ops object and store the route IPv6(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset all the route connections with command \"clear ip route vrf all *\"\n 3. Learn routing Ops again, verify the uptime of route(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear ipv6 route vrf all *']\n\n\nclass TriggerClearIpRoute(TriggerClearIpRouteVrfDefault):\n \"\"\"Reset vrf default ipv4 route connections using CLI command \"clear ip route *\".\"\"\"\n\n __description__ = \"\"\"Reset vrf default ip route connections using CLI command \"clear ip route *\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn routing Ops object and store the route IP(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset vrf default route connections with command \"clear ip route *\"\n 3. Learn routing Ops again, verify the uptime of route(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear ip route *']\n\n\nclass TriggerClearIpv6Route(TriggerClearIpv6RouteVrfDefault):\n \"\"\"Reset vrf default ipv6 route connections using CLI command \"clear ipv6 route *\".\"\"\"\n\n __description__ = \"\"\"Reset vrf default ipv6 route connections using CLI command \"clear ipv6 route *\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn routing Ops object and store the route ipv6 address(es)\n if has any, otherwise, SKIP the trigger\n 2. Reset vrf default route connections with command \"clear ipv6 route *\"\n 3. Learn routing Ops again, verify the uptime of route(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear ipv6 route *']\n\n\nclass TriggerClearIpRouting(TriggerClearIpRouteVrfDefault):\n \"\"\"Reset vrf default ip routing using CLI command \"clear routing ip *\".\"\"\"\n\n __description__ = \"\"\"Reset vrf default ip routing using CLI command \"clear routing ip *\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn routing Ops object and store the route IP(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset vrf default route connections with command \"clear routing ip *\"\n 3. Learn routing Ops again, verify the uptime of route(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear routing ip *']\n\n\nclass TriggerClearRouting(TriggerClearIpRouteVrfDefault):\n \"\"\"Reset vrf default routing using CLI command \"clear routing *\".\"\"\"\n\n __description__ = \"\"\"Reset vrf default routing using CLI command \"clear routing *\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn routing Ops object and store the route(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset vrf default route connections with command \"clear routing *\"\n 3. Learn routing Ops again, verify the uptime of route(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear routing *']\n\n\nclass TriggerClearIpv6Routing(TriggerClearIpv6RouteVrfDefault):\n \"\"\"Reset all ipv6 routing using CLI command \"clear routing ipv6 *\".\"\"\"\n\n __description__ = \"\"\"Reset all ipv6 routing using CLI command \"clear routing ipv6 *\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn routing Ops object and store the route IP(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset vrf default route connections with command \"clear routing ipv6 *\"\n 3. Learn routing Ops again, verify the uptime of route(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear routing ipv6 *']\n\n\nclass TriggerClearRoutingUnicast(TriggerClearIpRouteVrfDefault):\n \"\"\"Reset all unicast route connections using CLI command \"clear routing unicast *\".\"\"\"\n\n __description__ = \"\"\"Reset vrf default ip route connections using CLI command \"clear routing unicast *\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn routing Ops object and store the route IP(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset vrf default route connections with command \"clear routing unicast *\"\n 3. Learn routing Ops again, verify the uptime of route(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear routing unicast *']\n\n\nclass TriggerClearIpRoutingUnicast(TriggerClearIpRouteVrfDefault):\n \"\"\"Reset all ip unicast routing using CLI command \"clear routing ip unicast *\".\"\"\"\n\n __description__ = \"\"\"Reset all ip route connections using CLI command \"clear routing ip unicast *\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn routing Ops object and store the route IP(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset vrf default route connections with command \"clear routing ip unicast *\"\n 3. Learn routing Ops again, verify the uptime of route(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear routing ip unicast *']\n\n\nclass TriggerClearIpv6RoutingUnicast(TriggerClearIpv6RouteVrfDefault):\n \"\"\"Reset all ip unicast routing using CLI command \"clear routing ipv6 unicast *\".\"\"\"\n\n __description__ = \"\"\"Reset all ip route connections using CLI command \"clear routing ipv6 unicast *\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn routing Ops object and store the route IP(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset vrf default route connections with command \"clear routing ipv6 unicast *\"\n 3. Learn routing Ops again, verify the uptime of route(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear routing ipv6 unicast *']\n", "id": "2118836", "language": "Python", "matching_score": 4.931265354156494, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/clear/routing/nxos/clear.py" }, { "content": "'''NXOS implementation for Mcast clear triggers'''\n\n# import genie.libs\nfrom ..clear import TriggerClearIpMroute as TriggerClearIpMrouteBase, \\\n TriggerClearIpv6Mroute as TriggerClearIpv6MrouteBase, \\\n TriggerClearIpMrouteVrfAll as TriggerClearIpMrouteVrfAllBase, \\\n TriggerClearIpv6MrouteVrfAll as TriggerClearIpv6MrouteVrfAllBase\n\n\nclass TriggerClearIpMroute(TriggerClearIpMrouteBase):\n \"\"\"Reset vrf default ipv4 Mroute using CLI command \"clear ip mroute *\".\"\"\"\n\n __description__ = \"\"\"Reset vrf default ip Mroute using CLI command \"clear ip mroute *\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn routing Ops object and store the route IP(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset vrf default route connections with command \"clear ip mroute *\"\n 3. Learn routing Ops again, verify the uptime of route(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear ip mroute *']\n\n\nclass TriggerClearIpv6Mroute(TriggerClearIpv6MrouteBase):\n \"\"\"Reset vrf default ipv6 Mroute using CLI command \"clear ipv6 mroute *\".\"\"\"\n\n __description__ = \"\"\"Reset vrf default ipv6 Mroute using CLI command \"clear ipv6 mroute *\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn routing Ops object and store the route ipv6 address(es)\n if has any, otherwise, SKIP the trigger\n 2. Reset vrf default route connections with command \"clear ipv6 mroute *\"\n 3. Learn routing Ops again, verify the uptime of route(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear ipv6 mroute *']\n\n\nclass TriggerClearIpMrouteVrfAll(TriggerClearIpMrouteVrfAllBase):\n \"\"\"Reset all the vrf ipv4 Mroute using CLI command \"clear ip mroute * vrf all\".\"\"\"\n\n __description__ = \"\"\"Reset all the vrf ipv4 Mroute using CLI command \"clear ip mroute * vrf all\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn routing Ops object and store the route IP(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset vrf default route connections with command \"clear ip mroute * vrf all\"\n 3. Learn routing Ops again, verify the uptime of route(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear ip mroute * vrf all']\n\n\nclass TriggerClearIpv6MrouteVrfAll(TriggerClearIpv6MrouteVrfAllBase):\n \"\"\"Reset all the vrf ipv6 Mroute using CLI command \"clear ipv6 mroute * vrf all\".\"\"\"\n\n __description__ = \"\"\"Reset all the vrf ipv6 Mroute using CLI command \"clear ipv6 mroute * vrf all\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn routing Ops object and store the route ipv6 address(es)\n if has any, otherwise, SKIP the trigger\n 2. Reset vrf default route connections with command \"clear ipv6 mroute * vrf all\"\n 3. Learn routing Ops again, verify the uptime of route(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear ipv6 mroute * vrf all']\n\n\nclass TriggerClearIpRoutingMulticast(TriggerClearIpMrouteBase):\n \"\"\"Reset all ip multicast route connections using CLI command \"clear routing ip multicast *\".\"\"\"\n\n __description__ = \"\"\"Reset all ip multicast route connections using CLI command \"clear routing ip multicast *\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn routing Ops object and store the route IP(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset vrf default route connections with command \"clear routing ip multicast *\"\n 3. Learn routing Ops again, verify the uptime of route(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear routing ip multicast *']\n\n\nclass TriggerClearRoutingMulticast(TriggerClearIpMrouteBase):\n \"\"\"Reset all multicast route connections using CLI command \"clear routing multicast *\".\"\"\"\n\n __description__ = \"\"\"Reset all multicast route connections using CLI command \"clear routing multicast *\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn routing Ops object and store the route IP(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset vrf default route connections with command \"clear routing multicast *\"\n 3. Learn routing Ops again, verify the uptime of route(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear routing multicast *']\n\n\nclass TriggerClearV4RouteMulticast(TriggerClearIpMrouteBase):\n \"\"\"Reset all V4 multicast route using CLI command \"clear routing ipv4 multicast *\".\"\"\"\n\n __description__ = \"\"\"Reset all V4 multicast route using CLI command \"clear routing ipv4 multicast *\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn routing Ops object and store the route IP(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset vrf default route connections with command \"clear routing ipv4 multicast *\"\n 3. Learn routing Ops again, verify the uptime of route(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear routing ipv4 multicast *']\n\n\nclass TriggerClearV6RouteMulticast(TriggerClearIpv6MrouteBase):\n \"\"\"Reset all V6 multicast route using CLI command \"clear routing ipv6 multicast *\".\"\"\"\n\n __description__ = \"\"\"Reset all V6 multicast route using CLI command \"clear routing ipv6 multicast *\".\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn routing Ops object and store the route IP(s)\n if has any, otherwise, SKIP the trigger\n 2. Reset vrf default route connections with command \"clear routing ipv6 multicast *\"\n 3. Learn routing Ops again, verify the uptime of route(s) is reset,\n and verify it is the same as the Ops in step 1 except the uptime\n\n \"\"\"\n # These variables are NOT for user to change,\n # only specific for this trigger\n clear_cmd = ['clear routing ipv6 multicast *']", "id": "11616680", "language": "Python", "matching_score": 2.015819549560547, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/clear/mcast/nxos/clear.py" }, { "content": "'''Common implementation for routing clear triggers'''\n\n# python\nfrom functools import partial\n\n# genie libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.clear.clear import TriggerClear, verify_clear_callable\nfrom genie.libs.sdk.libs.utils.triggeractions import CompareUptime\n\n# Ignore keys when doing the diff with Ops objects for save_snapshot and\n# verify_clear, it will be used for LearnPollDiff.ops_diff callable\nexclude = ['maker', 'uptime']\n\n\nclass TriggerClearIpMroute(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n verify_func_args={'r_obj': [['table', 'vrf', '(?P<vrf>^default$)',\n 'address_family', '(?P<af>ipv4)',\n 'multicast_group', '(?P<group>.*)',\n 'source_address', '(?P<source>.*)',\n 'uptime', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.mcast.mcast.Mcast':{\n 'requirements':[\\\n ['table', 'vrf', '(?P<vrf>^default$)',\n 'address_family', '(?P<af>ipv4)',\n 'multicast_group', '(?P<group>.*)',\n 'source_address', '(?P<source>.*)', 'uptime', '(?P<uptime>.*)']],\n 'kwargs':{'attributes': [\n 'table[vrf][(.*)][address_family][ipv4][multicast_group][(.*)][source_address][(.*)]']},\n 'exclude': exclude}},\n verify_ops={'ops.mcast.mcast.Mcast':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': [\n 'table[vrf][(.*)][address_family][ipv4][multicast_group][(.*)][source_address][(.*)]']},\n 'exclude': exclude}},\n num_values={'vrf': 'all', 'route':'all',\n 'af':'all'})\n\n\nclass TriggerClearIpv6Mroute(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n verify_func_args={'r_obj': [['table', 'vrf', '(?P<vrf>^default$)',\n 'address_family', '(?P<af>ipv6)',\n 'multicast_group', '(?P<group>.*)',\n 'source_address', '(?P<source>.*)',\n 'uptime', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.mcast.mcast.Mcast':{\n 'requirements':[\\\n ['table', 'vrf', '(?P<vrf>^default$)',\n 'address_family', '(?P<af>ipv6)',\n 'multicast_group', '(?P<group>.*)',\n 'source_address', '(?P<source>.*)', 'uptime', '(?P<uptime>.*)']],\n 'kwargs':{'attributes': [\n 'table[vrf][(.*)][address_family][ipv6][multicast_group][(.*)][source_address][(.*)]']},\n 'exclude': exclude}},\n verify_ops={'ops.mcast.mcast.Mcast':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': [\n 'table[vrf][(.*)][address_family][ipv6][multicast_group][(.*)][source_address][(.*)]']},\n 'exclude': exclude}},\n num_values={'vrf': 'all', 'route':'all',\n 'af':'all'})\n \n\nclass TriggerClearIpMrouteVrfAll(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n verify_func_args={'r_obj': [['table', 'vrf', '(?P<vrf>.*)',\n 'address_family', '(?P<af>ipv4)',\n 'multicast_group', '(?P<group>.*)',\n 'source_address', '(?P<source>.*)',\n 'uptime', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.mcast.mcast.Mcast':{\n 'requirements':[\\\n ['table', 'vrf', '(?P<vrf>.*)',\n 'address_family', '(?P<af>ipv4)',\n 'multicast_group', '(?P<group>.*)',\n 'source_address', '(?P<source>.*)', 'uptime', '(?P<uptime>.*)']],\n 'kwargs':{'attributes': [\n 'table[vrf][(.*)][address_family][ipv4][multicast_group][(.*)][source_address][(.*)]']},\n 'exclude': exclude}},\n verify_ops={'ops.mcast.mcast.Mcast':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': [\n 'table[vrf][(.*)][address_family][ipv4][multicast_group][(.*)][source_address][(.*)]']},\n 'exclude': exclude}},\n num_values={'vrf': 'all', 'route':'all',\n 'af':'all'})\n\n\nclass TriggerClearIpv6MrouteVrfAll(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n verify_func_args={'r_obj': [['table', 'vrf', '(?P<vrf>.*)',\n 'address_family', '(?P<af>ipv6)',\n 'multicast_group', '(?P<group>.*)',\n 'source_address', '(?P<source>.*)',\n 'uptime', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.mcast.mcast.Mcast':{\n 'requirements':[\\\n ['table', 'vrf', '(?P<vrf>.*)',\n 'address_family', '(?P<af>ipv6)',\n 'multicast_group', '(?P<group>.*)',\n 'source_address', '(?P<source>.*)', 'uptime', '(?P<uptime>.*)']],\n 'kwargs':{'attributes': [\n 'table[vrf][(.*)][address_family][ipv6][multicast_group][(.*)][source_address][(.*)]']},\n 'exclude': exclude}},\n verify_ops={'ops.mcast.mcast.Mcast':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': [\n 'table[vrf][(.*)][address_family][ipv6][multicast_group][(.*)][source_address][(.*)]']},\n 'exclude': exclude}},\n num_values={'vrf': 'all', 'route':'all',\n 'af':'all'})\n", "id": "11145156", "language": "Python", "matching_score": 5.528539180755615, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/clear/mcast/clear.py" }, { "content": "'''Common implementation for routing clear triggers'''\n\n# python\nfrom functools import partial\n\n# genie libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.clear.clear import TriggerClear, verify_clear_callable\nfrom genie.libs.sdk.libs.utils.triggeractions import CompareUptime\n\n# Ignore keys when doing the diff with Ops objects for save_snapshot and\n# verify_clear, it will be used for LearnPollDiff.ops_diff callable\nexclude = ['maker','updated']\n\n\nclass TriggerClearIpRouteVrfAll(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'vrf', '(?P<vrf>.*)',\n 'address_family', 'ipv4',\n 'routes', '(?P<route>.*)',\n 'next_hop', 'next_hop_list','(?P<index>.*)',\n 'updated', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.routing.routing.Routing': {\n 'requirements': [ \\\n ['info', 'vrf', '(?P<vrf>.*)',\n 'address_family', 'ipv4',\n 'routes', '(?P<route>.*)',\n 'active', True]],\n 'kwargs': {'attributes': \\\n ['info[vrf][(.*)][address_family][ipv4][routes][(.*)]']},\n 'exclude': exclude}},\n verify_ops={'ops.routing.routing.Routing':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][address_family][ipv4][routes][(.*)]']},\n 'exclude': exclude}},\n num_values={'vrf': 'all', 'route': 'all', 'af': 'all'})\n\n\nclass TriggerClearIpv6RouteVrfAll(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'vrf', '(?P<vrf>.*)',\n 'address_family', 'ipv6',\n 'routes', '(?P<route>.*)',\n 'next_hop', 'next_hop_list', '(?P<index>.*)',\n 'updated', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.routing.routing.Routing': {\n 'requirements': [ \\\n ['info', 'vrf', '(?P<vrf>.*)',\n 'address_family', 'ipv6',\n 'routes', '(?P<route>.*)',\n 'active', True]],\n 'kwargs': {'attributes': \\\n ['info[vrf][(.*)][address_family][ipv6][routes][(.*)]']},\n 'exclude': exclude}},\n verify_ops={'ops.routing.routing.Routing':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][address_family][ipv6][routes][(.*)]']},\n 'exclude': exclude}},\n num_values={'vrf': 'all', 'route': 'all', 'af': 'all'})\n\n\nclass TriggerClearIpRouteVrfDefault(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'vrf', '(?P<vrf>^default$)',\n 'address_family', 'ipv4',\n 'routes', '(?P<route>.*)',\n 'next_hop', 'next_hop_list','(?P<index>.*)',\n 'updated', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.routing.routing.Routing': {\n 'requirements': [ \\\n ['info', 'vrf', '(?P<vrf>^default$)',\n 'address_family', 'ipv4',\n 'routes', '(?P<route>.*)',\n 'active', True]],\n 'kwargs': {'attributes': \\\n ['info[vrf][(.*)][address_family][ipv4][routes][(.*)]']},\n 'exclude': exclude}},\n verify_ops={'ops.routing.routing.Routing':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][address_family][ipv4][routes][(.*)]']},\n 'exclude': exclude}},\n num_values={'vrf': 1, 'route': 'all', 'af': 'all'})\n\n\nclass TriggerClearIpv6RouteVrfDefault(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'vrf', '(?P<vrf>^default$)',\n 'address_family', 'ipv6',\n 'routes', '(?P<route>.*)',\n 'next_hop', 'next_hop_list','(?P<index>.*)',\n 'updated', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.routing.routing.Routing': {\n 'requirements': [ \\\n ['info', 'vrf', '(?P<vrf>^default$)',\n 'address_family', 'ipv6',\n 'routes', '(?P<route>.*)',\n 'active', True]],\n 'kwargs': {'attributes': \\\n ['info[vrf][(.*)][address_family][ipv6][routes][(.*)]']},\n 'exclude': exclude}},\n verify_ops={'ops.routing.routing.Routing':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][address_family][ipv6][routes][(.*)]']},\n 'exclude': exclude}},\n num_values={'vrf': 1, 'route': 'all', 'af': 'all'})\n", "id": "2922156", "language": "Python", "matching_score": 5.659907341003418, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/clear/routing/clear.py" }, { "content": "'''Common implementation for ospf clear triggers'''\n\n# python\nfrom functools import partial\n\n# genie libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.clear.clear import TriggerClear, verify_clear_callable\nfrom genie.libs.sdk.libs.utils.triggeractions import CompareUptime\n\n# Ignore keys when doing the diff with Ops objects for save_snapshot and\n# verify_clear, it will be used for LearnPollDiff.ops_diff callable\nexclude = ['maker', 'age', 'checksum', 'seq_num','dead_timer',\n 'last_state_change' ,'spf_runs_count','hello_timer','nbr_event_count']\n\nclass TriggerClearIpOspfNeighborVrfAll(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'vrf', '(?P<vrf>.*)', 'address_family','(?P<af>.*)',\n 'instance' , '(?P<instance>.*)', 'areas','(?P<area>).*',\n 'interfaces', '(?P<intf>.*)',\n 'neighbors', '(?P<neighbor>.*)',\n 'last_state_change', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.ospf.ospf.Ospf':{\n 'requirements': [\\\n ['info', 'vrf', '(?P<vrf>.*)', 'address_family','(?P<af>.*)',\n 'instance', '(?P<instance>.*)', 'areas','(?P<area>.*)',\n 'interfaces', '(?P<intf>.*)',\n 'neighbors', '(?P<neighbor>.*)','(.*)']],\n 'all_keys': True,\n 'kwargs': {'attributes':['info']},\n 'exclude': exclude }},\n verify_ops={'ops.ospf.ospf.Ospf':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude + ['link_data']}},\n num_values={'vrf':'all', 'instance':'all','neighbor':'all' , 'intf':'all', 'area': 'all'})\n\n\nclass TriggerRestartOspf(TriggerClearIpOspfNeighborVrfAll):\n pass", "id": "2051250", "language": "Python", "matching_score": 6.0706682205200195, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/clear/ospf/clear.py" }, { "content": "'''Common implementation for ipv6 triggers'''\n\n# python\nfrom functools import partial\n\n# genie libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.clear.clear import TriggerClear, verify_clear_callable\nfrom genie.libs.sdk.libs.utils.triggeractions import CompareUptime\n\n# Ignore keys when doing the diff with Ops objects for save_snapshot and\n# verify_clear, it will be used for LearnPollDiff.ops_diff callable\nexclude = ['maker','age','active_timers']\n\nclass TriggerClearIPv6NeighborVrfAll(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'interfaces', '(?P<interface>.*)',\n 'neighbors', '(?P<neighbor>.*)', 'age', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.nd.nd.Nd':{\n 'requirements':[\\\n ['info', 'interfaces', '(?P<interface>.*)'\n ,'neighbors', '(?P<neighbor>.*)','(?P<nbr_contents>.*)']],\n 'kwargs': {'attributes': ['info[interfaces][(.*)][neighbors][(.*)][age]']},\n 'exclude': exclude}},\n verify_ops={'ops.nd.nd.Nd':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': ['info[interfaces][(.*)][neighbors][(.*)][age]']},\n 'exclude': exclude}},\n num_values={'interface': 'all', 'neighbor': 'all'})\n", "id": "6733482", "language": "Python", "matching_score": 0.5653688907623291, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/clear/ipv6/clear.py" }, { "content": "import time\nimport logging\nfrom ats import aetest\nfrom genie.harness.base import Trigger\nfrom pprint import pprint as pp\nfrom genie.harness.base import Trigger\nimport pdb\n\nlog = logging.getLogger()\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.template.addremove import \\\n TriggerAddRemove as AddRemoveTemplate\n\n# Genie\nfrom genie.harness.exceptions import GenieConfigReplaceWarning\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.addremove.addremove import TriggerAddRemove\n\nimport re\nfrom genie.libs.sdk.triggers.template.unconfigconfig import \\\n TriggerUnconfigConfig as UnconfigConfigTemplate\n\nclass TriggerAddRemoveMplsLdpAutoConfig(Trigger):\n '''Add Remove mpls ldp Auto config in ospf'''\n\n @aetest.setup\n def prerequisites(self, uut):\n '''Figure out if ospf is configured'''\n output = uut.parse('show ip ospf')\n for vrf_id in output['vrf']:\n for addr_id in output['vrf'][vrf_id]['address_family']:\n for instance in output['vrf'][vrf_id]['address_family'][addr_id]['instance']:\n if instance not in output:\n print(\"ospf instances:\",instance)\n print(\"No ospf is configured for \"\\\n \"device '{d}'\".format(d=uut.name)) \n print(instance)\n self.ospf_id = instance\n print(self.ospf_id)\n\n #Getting the mpls peer id \n output1 = uut.parse('show mpls ldp neighbor')\n for vrf_id in output1['vrf']:\n for peers in output1['vrf'][vrf_id]['peers']:\n if peers not in output1:\n print(\"mpls peer:\",peers)\n print(\"No mpls peers are configured for \"\\\n \"device '{d}'\".format(d=uut.name))\n\n print(peers)\n self.peers_id = peers\n print(self.peers_id)\n\n @aetest.test\n def save_configuration(self, uut, method, abstract):\n '''Save current configuration\n\n Can be either done via TFTP or checkpoint feature (If exists for OS)\n\n Args:\n uut (`obj`): Device object.\n method (`str`): Save method from trigger datafile.\n Only accpet \"local\" and \"checkpoint\"\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n self.lib = abstract.sdk.libs.abstracted_libs.restore.Restore()\n default_dir = getattr(self.parent, 'default_file_system', {})\n try:\n self.lib.save_configuration(uut, method, abstract, default_dir)\n except Exception as e:\n self.failed('Saving the configuration failed', from_exception=e,\n goto=['next_tc'])\n\n\n @aetest.test\n def remove(self, uut):\n '''Remove auto config under ospf'''\n uut.configure('''\\\nrouter ospf {id}\nno mpls ldp autoconfig'''.format(id=self.ospf_id))\n\n @aetest.test\n def verify_remove(self, uut):\n ''' verifying the peer-id exists in the mpls neighbor list'''\n output1 = uut.parse('show mpls ldp neighbor')\n for vrf_id in output1['vrf']:\n for peers in output1['vrf'][vrf_id]['peers']:\n print(\"peers after removing:\",peers)\n if (self.peers_id) not in peers:\n self.passed(\"peers id {peers_id} is not showing anymore in the \"\n \"output of the cmd, this is \"\n \"unexpected!\".format(peers_id=self.peers_id))\n\n @aetest.test\n def add(self, uut):\n '''Configuring auto config under ospf'''\n uut.configure('''\\\nrouter ospf {id}\nmpls ldp autoconfig'''.format(id=self.ospf_id))\n\n @aetest.test\n def verify_add(self, uut):\n ''' verifying the peer-id exists in the mpls neighbor list'''\n output1 = uut.parse('show mpls ldp neighbor')\n for vrf_id in output1['vrf']:\n for peers in output1['vrf'][vrf_id]['peers']:\n print(\"mpls peers after reconfigure:\",peers)\n if (self.peers_id) in peers:\n self.passed(\"peers id {peers_id} is showing in the \"\n \"output of the cmd, this is \"\n \"expected!\".format(peers_id=self.peers_id))\n\n @aetest.test\n def restore_configuration(self, uut, method, abstract):\n '''Rollback the configuration\n\n Can be either done via TFTP or checkpoint feature (If exists for OS)\n\n Args:\n uut (`obj`): Device object.\n method (`str`): Save method from trigger datafile.\n Only accpet \"local\" and \"checkpoint\"\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n try:\n self.lib.restore_configuration(uut, method, abstract)\n except GenieConfigReplaceWarning as e:\n self.passx('Configure replace requires device reload')\n except Exception as e:\n self.failed('Failed to restore the configuration', from_exception=e)\n\nclass Triggermplsaddremove(Trigger):\n ''' Config and Unconfig of mpls '''\n @aetest.setup\n def prerequisites(self,uut):\n #To verify mpls \n output = uut.execute('show mpls ldp neighbor')\n if output:\n self.skipped('mpls ldp neighbors are there')\n\n @aetest.test\n def save_configuration(self, uut, method, abstract, steps):\n '''Save current configuration\n\n Can be either done via TFTP or checkpoint feature (If exists for OS)\n\n Args:\n uut (`obj`): Device object.\n method (`str`): Save method from trigger datafile.\n Only accpet \"local\" and \"checkpoint\"\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n self.lib = abstract.sdk.libs.abstracted_libs.restore.Restore()\n default_dir = getattr(self.parent, 'default_file_system', {})\n try:\n self.lib.save_configuration(uut, method, abstract, default_dir)\n except Exception as e:\n self.failed('Saving the configuration failed', from_exception=e,\n goto=['next_tc'])\n\n @aetest.test\n def add_mpls(self,uut,ospf_id,isis_name):\n ''' add mpls with ospf and isis '''\n uut.configure('''\\\nrouter ospf {id}\nmpls ldp autoconfig'''.format(id=ospf_id))\n\n uut.configure('''\\\nrouter isis {is_name}\nmpls ldp autoconfig'''.format(is_name=isis_name))\n\n @aetest.test\n def Verify_addmpls(self,uut):\n # ''' Verify mpls '''\n output = uut.execute('show mpls ldp neighbor')\n if output:\n self.passed(\"mpls ldp neighbors are there\")\n\n @aetest.test\n def remove_mpls(self,uut,ospf_id,isis_name):\n '''remove mpls with ospf and isis '''\n uut.configure('''\\\nrouter ospf {id}\nno mpls ldp autoconfig'''.format(id=ospf_id))\n\n uut.configure('''\\\nrouter isis {is_name}\nno mpls ldp autoconfig'''.format(is_name=isis_name))\n\n @aetest.test\n def Verify_removempls(self,uut):\n # ''' Verify mpls '''\n output = uut.execute('show mpls ldp neighbor')\n if not output:\n self.passed(\"No neighbors for mpls and ldp\")\n\n @aetest.test\n def restore_configuration(self, uut, method, abstract, steps):\n '''Rollback the configuration\n\n Can be either done via TFTP or checkpoint feature (If exists for OS)\n\n Args:\n uut (`obj`): Device object.\n method (`str`): Save method from trigger datafile.\n Only accpet \"local\" and \"checkpoint\"\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n try:\n self.lib.restore_configuration(uut, method, abstract)\n except GenieConfigReplaceWarning as e:\n self.passx('Configure replace requires device reload')\n except Exception as e:\n self.failed('Failed to restore the configuration', from_exception=e)\n\nclass Triggermplsexplicitnull(Trigger):\n ''' mpls explict null'''\n @aetest.setup\n def prerequisites(self,uut):\n #To verify mpls\n output = uut.execute('show mpls ldp neighbor')\n id=re.search(r'.*(Msgs\\s+sent)\\/(rcvd)\\:\\s+(\\d+)\\/(\\d+).*',output)\n msgs=id.group(1)\n msgs_recvd=id.group(3)\n rcevid=id.group(2)\n msgs_rcivd=id.group(4)\n print(msgs)\n print(msgs_recvd)\n print(rcevid)\n print(msgs_rcivd)\n self.sent_msgs=msgs_recvd\n print(self.sent_msgs)\n self.recvd_msgs=msgs_rcivd\n print(self.recvd_msgs)\n# if output:\n# self.skipped('mpls ldp neighbors are there')\n\n @aetest.test\n def save_configuration(self, uut, method, abstract, steps):\n '''Save current configuration\n\n Can be either done via TFTP or checkpoint feature (If exists for OS)\n\n Args:\n uut (`obj`): Device object.\n method (`str`): Save method from trigger datafile.\n Only accpet \"local\" and \"checkpoint\"\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n self.lib = abstract.sdk.libs.abstracted_libs.restore.Restore()\n default_dir = getattr(self.parent, 'default_file_system', {})\n try:\n self.lib.save_configuration(uut, method, abstract, default_dir)\n except Exception as e:\n self.failed('Saving the configuration failed', from_exception=e,\n goto=['next_tc'])\n \n @aetest.test\n def explictmplsnull(self,uut):\n ''' explicit mpls '''\n uut.configure('mpls ldp explicit-null')\n\n @aetest.test\n def Verify_explictmplsnull(self,uut):\n # ''' Verify mpls '''\n output = uut.execute('show mpls ldp neighbor')\n id=re.search(r'.*(Msgs\\s+sent)\\/(rcvd)\\:\\s+(\\d+)\\/(\\d+).*',output)\n beforeexp=id.group(3)\n print(beforeexp)\n beforeexprec=id.group(4)\n print(beforeexprec)\n if self.sent_msgs == beforeexp and self.recvd_msgs== beforeexprec:\n self.passed('Packets are same before and after mpls lsp explicit null')\n else:\n self.failed('Packets are not same')\n \n\n\n\n @aetest.test\n def noexplicit_mpls(self,uut):\n '''no explicit mpls '''\n uut.configure('no mpls ldp explicit-null')\n\n @aetest.test\n def Verify_noexplicit_mpls(self,uut):\n # ''' Verify mpls '''\n output = uut.execute('show mpls ldp neighbor')\n\n @aetest.test\n def restore_configuration(self, uut, method, abstract, steps):\n '''Rollback the configuration\n\n Can be either done via TFTP or checkpoint feature (If exists for OS)\n\n Args:\n uut (`obj`): Device object.\n method (`str`): Save method from trigger datafile.\n Only accpet \"local\" and \"checkpoint\"\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n try:\n self.lib.restore_configuration(uut, method, abstract)\n except GenieConfigReplaceWarning as e:\n self.passx('Configure replace requires device reload')\n except Exception as e:\n self.failed('Failed to restore the configuration', from_exception=e)\n", "id": "12255968", "language": "Python", "matching_score": 7.13770866394043, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/contrib/addremove/mpls/iosxe/addremove.py" }, { "content": "import time\nimport logging\nfrom ats import aetest\nfrom genie.harness.base import Trigger\nfrom pprint import pprint as pp\nfrom genie.harness.base import Trigger\nimport pdb\nimport re\n\nlog = logging.getLogger()\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.template.unconfigconfig import \\\n TriggerUnconfigConfig as UnconfigConfigTemplate\n\n# Genie\nfrom genie.harness.exceptions import GenieConfigReplaceWarning\n\nfrom ats.utils.objects import Not, NotExists\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.unconfigconfig.unconfigconfig import TriggerUnconfigConfig\n\nclass TriggerUnconfigConfigOspfInteface(Trigger):\n ''' Unconfiguring ospf under all interfaces'''\n @aetest.setup\n def prerequisites(self, uut):\n ''' Getting the ospf configured interfaces'''\n local_dict_1 = []\n output_1 = uut.parse('show ip ospf neighbor')\n for intrf in output_1['interfaces']:\n local_dict_1.append(intrf)\n if intrf in output_1:\n print(\"interface:\",intrf)\n\n print('local dictionary:',local_dict_1)\n self.local_intrf_1 = local_dict_1\n for i in self.local_intrf_1:\n print('Local interface:',i)\n\n @aetest.test\n def save_configuration(self, uut, method, abstract, steps):\n '''Save current configuration\n\n Can be either done via TFTP or checkpoint feature (If exists for OS)\n\n Args:\n uut (`obj`): Device object.\n method (`str`): Save method from trigger datafile.\n Only accpet \"local\" and \"checkpoint\"\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n self.lib = abstract.sdk.libs.abstracted_libs.restore.Restore()\n default_dir = getattr(self.parent, 'default_file_system', {})\n try:\n self.lib.save_configuration(uut, method, abstract, default_dir)\n except Exception as e:\n self.failed('Saving the configuration failed', from_exception=e,\n goto=['next_tc'])\n\n @aetest.test\n def Unconfig(self,uut, ospf_id):\n ''' Unconfiguring ospf from all interfaces'''\n for i in self.local_intrf_1:\n uut.configure('''\\\ninterface {id}\nno ip ospf {o_id} area 0'''.format(id=i,o_id=ospf_id))\n\n @aetest.test\n def verify_Unconfig(self,uut):\n ''' Verifying ospf unconfigured interface exists'''\n try:\n output=uut.parse('show ip ospf neighbor')\n except:\n self.passed('ospf configured interface {} is not showing in the output of the cmd, this is expected'.format(self.local_intrf_1))\n else:\n self.failed('Interfaces are showing for the ospf')\n\n @aetest.test\n def restore_configuration(self, uut, method, abstract, steps):\n '''Rollback the configuration\n\n Can be either done via TFTP or checkpoint feature (If exists for OS)\n\n Args:\n uut (`obj`): Device object.\n method (`str`): Save method from trigger datafile.\n Only accpet \"local\" and \"checkpoint\"\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n try:\n self.lib.restore_configuration(uut, method, abstract)\n except GenieConfigReplaceWarning as e:\n self.passx('Configure replace requires device reload')\n except Exception as e:\n self.failed('Failed to restore the configuration', from_exception=e)\n\nclass unconfigconfigOspf(Trigger):\n ''' Config and Unconfig of Ospf '''\n @aetest.setup\n def prerequisites(self,uut,ospf_id):\n ''' Verify Ospf configured or nor '''\n\n #To verify if Ospf is configured\n output = uut.parse('show ip ospf')\n if ospf_id in output:\n self.skipped(\"Ospf id {id} is not showing in the \"\n \"output of the cmd, this is \"\n \"unexpected!\".format(id=ospf_id))\n\n @aetest.test\n def save_configuration(self, uut, method, abstract, steps):\n '''Save current configuration\n\n Can be either done via TFTP or checkpoint feature (If exists for OS)\n\n Args:\n uut (`obj`): Device object.\n method (`str`): Save method from trigger datafile.\n Only accpet \"local\" and \"checkpoint\"\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n self.lib = abstract.sdk.libs.abstracted_libs.restore.Restore()\n default_dir = getattr(self.parent, 'default_file_system', {})\n try:\n self.lib.save_configuration(uut, method, abstract, default_dir)\n except Exception as e:\n self.failed('Saving the configuration failed', from_exception=e,\n goto=['next_tc']) \n @aetest.test\n def Unconfig(self,uut,ospf_id):\n ''' Configure Ospf '''\n uut.configure('no router ospf {id}'.format(id=ospf_id))\n\n @aetest.test\n def Verify_unconfig(self,uut,ospf_id):\n # ''' Verify Ospf config worked or not '''\n output = uut.parse('show ip ospf')\n if ospf_id not in output:\n self.passed(\"Ospf is {id} is not showing anymore in the \"\n \"output of the cmd, this is \"\n \"expected!\".format(id=ospf_id))\n\n @aetest.test\n def Config(self,uut,ospf_id):\n ''' Configure Ospf '''\n uut.configure('''\\\nrouter ospf {id}'''.format(id=ospf_id))\n\n @aetest.test\n def Verify_config(self,uut,ospf_id):\n # ''' Verify Ospf config worked or not '''\n output = uut.parse('show ip ospf')\n if int(ospf_id) in output:\n self.passed(\"Ospf is {id} is not showing anymore in the \"\n \"output of the cmd, this is \"\n \"expected!\".format(id=ospf_id))\n else:\n self.failed(\"No ospf id is there\")\n\n @aetest.test\n def restore_configuration(self, uut, method, abstract, steps):\n '''Rollback the configuration\n\n Can be either done via TFTP or checkpoint feature (If exists for OS)\n\n Args:\n uut (`obj`): Device object.\n method (`str`): Save method from trigger datafile.\n Only accpet \"local\" and \"checkpoint\"\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n try:\n self.lib.restore_configuration(uut, method, abstract)\n except GenieConfigReplaceWarning as e:\n self.passx('Configure replace requires device reload')\n except Exception as e:\n self.failed('Failed to restore the configuration', from_exception=e)\n\nclass Triggeruserconfigentry(Trigger):\n @aetest.setup\n def prerequisites(self,uut,ospf_id):\n output = uut.parse('show ip ospf')\n if ospf_id in output:\n self.skipped(\"Ospf id {id} is not showing in the \"\n \"output of the cmd, this is \"\n \"unexpected!\".format(id=ospf_id))\n\n @aetest.test\n def save_configuration(self, uut, method, abstract, steps):\n '''Save current configuration\n\n Can be either done via TFTP or checkpoint feature (If exists for OS)\n\n Args:\n uut (`obj`): Device object.\n method (`str`): Save method from trigger datafile.\n Only accpet \"local\" and \"checkpoint\"\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n self.lib = abstract.sdk.libs.abstracted_libs.restore.Restore()\n default_dir = getattr(self.parent, 'default_file_system', {})\n try:\n self.lib.save_configuration(uut, method, abstract, default_dir)\n except Exception as e:\n self.failed('Saving the configuration failed', from_exception=e,\n goto=['next_tc'])\n\n @aetest.test\n def add_config(self,uut,user_config,testbed):\n uut.configure(user_config)\n ixia_device = testbed.devices['IXIA']\n ixia_device.check_traffic_loss(loss_tolerance = 100, check_interval= 30, check_iteration = 2)\n\n\n @aetest.test\n def restore_configuration(self, uut, method, abstract, steps):\n '''Rollback the configuration\n\n Can be either done via TFTP or checkpoint feature (If exists for OS)\n\n Args:\n uut (`obj`): Device object.\n method (`str`): Save method from trigger datafile.\n Only accpet \"local\" and \"checkpoint\"\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n try:\n self.lib.restore_configuration(uut, method, abstract)\n except GenieConfigReplaceWarning as e:\n self.passx('Configure replace requires device reload')\n except Exception as e:\n self.failed('Failed to restore the configuration', from_exception=e)\n\n", "id": "2730727", "language": "Python", "matching_score": 6.301148891448975, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/contrib/unconfigconfig/ospf/iosxe/unconfigconfig.py" }, { "content": "import time\nimport logging\nfrom ats import aetest\nfrom genie.harness.base import Trigger\nfrom pprint import pprint as pp\nfrom genie.harness.base import Trigger\nimport pdb\nimport re\n\nlog = logging.getLogger()\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.template.unconfigconfig import \\\n TriggerUnconfigConfig as UnconfigConfigTemplate\n\n# Genie\nfrom genie.harness.exceptions import GenieConfigReplaceWarning\n\nfrom ats.utils.objects import Not, NotExists\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.unconfigconfig.unconfigconfig import TriggerUnconfigConfig\n\nclass TriggerUnconfigConfigMplsStaticBindings(Trigger):\n ''' Unconfigure Configure mpls static bindings'''\n @aetest.setup\n def prerequisites(self, uut):\n ''' Getting the bindings config and local labels'''\n output = uut.execute('show run')\n self.mpls_bind=re.findall(r'.*(mpls\\sstatic\\sbinding\\sipv4\\s\\d+\\.\\d+\\.\\d+\\.\\d+\\s\\d+\\.\\d+\\.\\d+\\.\\d+\\s\\d+)',output)\n local_dict_1 = []\n output1 = uut.parse('show mpls forwarding-table')\n for vrf_id in output1['vrf']:\n for local_id in output1['vrf'][vrf_id]['local_label']:\n local_dict_1.append(local_id)\n if local_id in output1:\n print(\"local id:\",local_id)\n\n self.local_1 = local_dict_1\n print('self local label:',self.local_1)\n\n @aetest.test\n def save_configuration(self, uut, method, abstract, steps):\n '''Save current configuration\n\n Can be either done via TFTP or checkpoint feature (If exists for OS)\n\n Args:\n uut (`obj`): Device object.\n method (`str`): Save method from trigger datafile.\n Only accpet \"local\" and \"checkpoint\"\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n self.lib = abstract.sdk.libs.abstracted_libs.restore.Restore()\n default_dir = getattr(self.parent, 'default_file_system', {})\n try:\n self.lib.save_configuration(uut, method, abstract, default_dir)\n except Exception as e:\n self.failed('Saving the configuration failed', from_exception=e,\n goto=['next_tc'])\n\n @aetest.test\n def Unconfig(self,uut, testbed):\n ''' unconfiguring all the mpls static bindings'''\n for i in self.mpls_bind:\n status = 'no '\n val= status + i\n print('mpls static bind:',val)\n uut.configure(val)\n\n @aetest.test\n def verify_Unconfig(self, uut, l_id):\n ''' verifying the unconfigured labels are getting or not '''\n local_dict_2 = []\n output2 = uut.parse('show mpls forwarding-table detail')\n for vrf_id in output2['vrf']:\n for local_id in output2['vrf'][vrf_id]['local_label']:\n local_dict_2.append(local_id)\n if local_id not in output2:\n print(\"local id:\",local_id)\n\n self.local_2 = local_dict_2\n print('self local label:',self.local_2)\n\n if l_id not in self.local_2:\n self.passed(\"local label {local_lbl} is not showing in the \"\n \"output of the cmd, this is expected\"\n \"expected!\".format(local_lbl=l_id))\n else:\n self.failed(\"local label {local_lbl} is showing in the \"\n \"output of the cmd, this is unexpected\"\n \"un expected!\".format(local_lbl=l_id))\n\n @aetest.test\n def Config(self,uut, testbed):\n ''' re-configuring all the mpls static bindings ''' \n for i in self.mpls_bind:\n val= i\n print('mpls static bind:',val)\n uut.configure(val)\n\n @aetest.test\n def verify_Config(self, uut, l_id):\n ''' verifying the unconfigured labels are getting or not '''\n local_dict_3 = []\n output3 = uut.parse('show mpls forwarding-table')\n for vrf_id in output3['vrf']:\n for local_id in output3['vrf'][vrf_id]['local_label']:\n local_dict_3.append(local_id)\n if local_id in output3:\n print(\"local id:\",local_id)\n\n self.local_3 = local_dict_3\n print('self local label:',self.local_3)\n\n if l_id in self.local_3:\n self.passed(\"local label {local_lbl} is showing in the \"\n \"output of the cmd, this is \"\n \"expected!\".format(local_lbl=l_id))\n else:\n self.failed(\"local label {local_lbl} is not showing in the \"\n \"output of the cmd, this is \"\n \"unexpected!\".format(local_lbl=l_id))\n\n @aetest.test\n def restore_configuration(self, uut, method, abstract, steps, testbed):\n '''Rollback the configuration\n\n Can be either done via TFTP or checkpoint feature (If exists for OS)\n\n Args:\n uut (`obj`): Device object.\n method (`str`): Save method from trigger datafile.\n Only accpet \"local\" and \"checkpoint\"\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n try:\n self.lib.restore_configuration(uut, method, abstract)\n except GenieConfigReplaceWarning as e:\n self.passx('Configure replace requires device reload')\n except Exception as e:\n self.failed('Failed to restore the configuration', from_exception=e)\n\n", "id": "4632036", "language": "Python", "matching_score": 2.9165048599243164, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/contrib/unconfigconfig/mpls/iosxe/unconfigconfig.py" }, { "content": "#import statements\nimport re\nimport logging\nimport time\nlog = logging.getLogger()\nfrom ats import aetest\nfrom pprint import pprint as pp\nfrom genie.harness.base import Trigger\nimport pdb\nfrom ats.utils.objects import Not, NotExists\nfrom genie.libs.sdk.triggers.template.unconfigconfig import \\\n TriggerUnconfigConfig as UnconfigConfigTemplate\n\n# Genie\nfrom genie.harness.exceptions import GenieConfigReplaceWarning\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.unconfigconfig.unconfigconfig import TriggerUnconfigConfig\n\nclass Triggerunconfigconfigcdp(Trigger):\n ''' Config and Unconfig of cdp '''\n @aetest.setup\n def prerequisites(self,uut):\n output = uut.execute('show cdp')\n cdp_status=re.search(r'(Global\\s+CDP)',output)\n print(cdp_status.group(1))\n if cdp_status.group(1) == 'Global CDP':\n self.skipped(\"CDP is enabled globally\")\n else:\n self.failed(\"CDP is not enabled globally\")\n\n @aetest.test\n def save_configuration(self, uut, method, abstract, steps):\n '''Save current configuration\n\n Can be either done via TFTP or checkpoint feature (If exists for OS)\n\n Args:\n uut (`obj`): Device object.\n method (`str`): Save method from trigger datafile.\n Only accpet \"local\" and \"checkpoint\"\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n self.lib = abstract.sdk.libs.abstracted_libs.restore.Restore()\n default_dir = getattr(self.parent, 'default_file_system', {})\n try:\n self.lib.save_configuration(uut, method, abstract, default_dir)\n except Exception as e:\n self.failed('Saving the configuration failed', from_exception=e,\n goto=['next_tc'])\n\n @aetest.test\n def unconfig(self,uut):\n uut.configure('no cdp run')\n\n @aetest.test\n def Verify_unconfig(self,uut):\n # ''' Verify unconfig for cdp worked or not '''\n output = uut.execute('show cdp')\n if 'Global CDP' not in output:\n self.passed(\"CDP is not enabled globally\")\n else:\n self.failed(\"CDP is enabled globally\")\n\n @aetest.test\n def config(self,uut):\n uut.configure('cdp run')\n\n @aetest.test\n def Verify_config(self,uut):\n # ''' Verify config for cdp worked or not '''\n output = uut.execute('show cdp')\n cdp_status=re.search(r'(Global\\s+CDP)',output)\n if cdp_status.group(1) == 'Global CDP':\n self.passed(\"CDP is enabled globally\")\n else:\n self.failed(\"CDP is not enabled globally\")\n\n\n @aetest.test\n def restore_configuration(self, uut, method, abstract, steps):\n '''Rollback the configuration\n\n Can be either done via TFTP or checkpoint feature (If exists for OS)\n\n Args:\n uut (`obj`): Device object.\n method (`str`): Save method from trigger datafile.\n Only accpet \"local\" and \"checkpoint\"\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n try:\n self.lib.restore_configuration(uut, method, abstract)\n except GenieConfigReplaceWarning as e:\n self.passx('Configure replace requires device reload')\n except Exception as e:\n self.failed('Failed to restore the configuration', from_exception=e)\n\n\n\n\n\n\n\n", "id": "2022779", "language": "Python", "matching_score": 4.559762954711914, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/contrib/unconfigconfig/cdp/iosxe/unconfigconfig.py" }, { "content": "import time\nimport logging\nfrom ats import aetest\nfrom genie.harness.base import Trigger\nfrom pprint import pprint as pp\nfrom genie.harness.base import Trigger\nimport pdb\nimport re\n\nlog = logging.getLogger()\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.template.unconfigconfig import \\\n TriggerUnconfigConfig as UnconfigConfigTemplate\n\n# Genie\nfrom genie.harness.exceptions import GenieConfigReplaceWarning\n\nfrom ats.utils.objects import Not, NotExists\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.unconfigconfig.unconfigconfig import TriggerUnconfigConfig\n\nclass TriggerUnconfigConfigBfdTimer(Trigger):\n\n @aetest.setup\n def prerequisites(self, uut):\n '''checking for bfd configuration'''\n\n output = uut.parse('show bfd neighbors details')\n print(output)\n # pprint.pprint(output)\n", "id": "12501763", "language": "Python", "matching_score": 2.192873239517212, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/contrib/unconfigconfig/bfd/iosxe/unconfigconfig.py" }, { "content": "'''Implementation for Msdp unconfigconfig triggers'''\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.unconfigconfig.unconfigconfig import TriggerUnconfigConfig\n\n\nclass TriggerUnconfigConfigMsdpPeer(TriggerUnconfigConfig):\n\tpass\n\n\nclass TriggerUnconfigConfigMsdpSaFilterIn(TriggerUnconfigConfig):\n\tpass\n\n\nclass TriggerUnconfigConfigMsdpSaFilterOut(TriggerUnconfigConfig):\n\tpass\n\n\nclass TriggerUnconfigConfigMsdpSaLimit(TriggerUnconfigConfig):\n\tpass\n\n\nclass TriggerUnconfigConfigMsdpMeshGroup(TriggerUnconfigConfig):\n\tpass\n\n\nclass TriggerUnconfigConfigMsdpOriginatorId(TriggerUnconfigConfig):\n\tpass\n\n\nclass TriggerUnconfigConfigMsdpKeepaliveHoldtime(TriggerUnconfigConfig):\n\tpass\n\n\nclass TriggerUnconfigConfigMsdpReconnectInterval(TriggerUnconfigConfig):\n\tpass\n\n\nclass TriggerUnconfigConfigMsdpDescription(TriggerUnconfigConfig):\n\tpass\n", "id": "10573819", "language": "Python", "matching_score": 2.858194351196289, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/unconfigconfig/msdp/unconfigconfig.py" }, { "content": "'''Implementation for Mld unconfigconfig triggers'''\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.unconfigconfig.unconfigconfig import TriggerUnconfigConfig\n\n\nclass TriggerUnconfigConfigIgmpEnable(TriggerUnconfigConfig):\n\tpass\n\n", "id": "8977287", "language": "Python", "matching_score": 2.479588747024536, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/unconfigconfig/mld/unconfigconfig.py" }, { "content": "'''Implementation for Igmp unconfigconfig triggers'''\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.unconfigconfig.unconfigconfig import TriggerUnconfigConfig\n\n\nclass TriggerUnconfigConfigPimNeighborFilter(TriggerUnconfigConfig):\n\tpass\n\n", "id": "12476906", "language": "Python", "matching_score": 3.716334104537964, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/unconfigconfig/igmp/unconfigconfig.py" }, { "content": "'''Implementation for Pim unconfigconfig triggers'''\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.unconfigconfig.unconfigconfig import TriggerUnconfigConfig\n\n\nclass TriggerUnconfigConfigPimNeighborFilter(TriggerUnconfigConfig):\n\tpass\n\n", "id": "7069827", "language": "Python", "matching_score": 0.8377754092216492, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/unconfigconfig/pim/unconfigconfig.py" }, { "content": "#!/usr/bin/env python\n\n# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning\n\n# PIM\nfrom genie.libs.conf.vrf import Vrf\nfrom genie.libs.conf.pim import Pim\nfrom genie.libs.conf.pim.rp_address import RPAddressGroup\n\n\nclass test_pim(TestCase):\n\n def setUp(self):\n \n # Set Genie Tb\n testbed = Testbed()\n Genie.testbed = testbed\n \n # Device\n self.dev1 = Device(name='PE1', testbed=testbed, os='iosxe')\n\n def test_pim_full_config(self):\n\n # For failures\n self.maxDiff = None\n\n # shorten the line\n dev1 = self.dev1\n \n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n\n # Apply configuration\n\n # VRF configuration\n vrf1 = Vrf('default')\n pim.device_attr[self.dev1].vrf_attr[vrf1]\n vrf2 = Vrf('red')\n pim.device_attr[self.dev1].vrf_attr[vrf2]\n pim.device_attr[dev1].enabled_bidir = True\n\n for vrf, intf in {vrf1: 'GigabitEthernet0/0/1', vrf2: 'GigabitEthernet0/0/2'}.items():\n # == auto-rp ===\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_rp_group = '1.1.1.1'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_scope = 20\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_group_list = 'test_list'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_interval = 60\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_bidir = True\n\n # == auto-rp discovery===\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_discovery_intf = intf\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_discovery_scope = 20\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_discovery_interval = 1000\n\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n autorp_listener = True\n\n # == bsr candidate ===\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_candidate_interface = intf\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_candidate_hash_mask_length = 20\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_candidate_priority = 50\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_candidate_accept_rp_acl = 190\n\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_candidate_address = '2001:DB8:1:1::1'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_candidate_hash_mask_length = 126\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_candidate_priority = 20\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_candidate_accept_rp_acl = 190\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n scope = True\n\n # == bsr rp-candidate ===\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_rp_candidate_interface = 'Loopback0'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_rp_candidate_group_list = '11'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_rp_candidate_priority = 10\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_rp_candidate_interval = 60\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_rp_candidate_bidir = True\n\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_rp_candidate_address = '2001:DB8:2:2::2'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_rp_candidate_group_list = 'fc00:db20:35b:7399::5/64'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_rp_candidate_priority = 10\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_rp_candidate_interval = 60\n\n # == static RP ===\n rp1 = RPAddressGroup(device=self.dev1)\n rp1.static_rp_address = '1.1.1.1'\n rp1.static_rp_group_list = '10'\n rp1.static_rp_override = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].add_static_rp(rp1)\n\n\n rp2 = RPAddressGroup(device=self.dev1)\n rp2.static_rp_address = '2001:db8:1:1::1'\n rp2.static_rp_group_list = 'fc00:db20:35b:7399::5/64'\n rp2.static_rp_bidir = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].add_static_rp(rp2)\n\n # == static rp register ===\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n accept_register = 'regist_name'\n\n # ipv6\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n accept_register = 'regist_map_v6'\n\n # log-neighbor-changes\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n log_neighbor_changes = True\n # ipv6 is not supported\n\n # register_source\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n register_source = intf\n # not ipv6 supported\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n register_source = 'Loopback0'\n\n # == sg-expiry-timer ==\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n sg_expiry_timer = 182\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n sg_expiry_timer_sg_list = 'sg_name'\n\n # == spt-threshold ==\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n spt_switch_infinity = 0\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n spt_switch_policy = 'abcde'\n\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n spt_switch_infinity = 'infinity'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n spt_switch_policy = 'abcdef'\n\n\n # == interface ==\n intf = 'Loopback0'\n # ---- mode ----------\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr[intf].mode = 'dense-mode'\n\n # ---- jp-policy ----------\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr[intf].boundary = 'test'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr[intf].boundary_filter_autorp = True\n \n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n interface_attr[intf].boundary = 'test'\n\n # ---- border ----------\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr[intf].bsr_border = True\n \n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n interface_attr[intf].bsr_border = True\n\n # ---- hello-interval ----------\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr[intf].hello_interval_msec = 30000\n \n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n interface_attr[intf].hello_interval = 3000\n\n # ---- dr-priority ----------\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr[intf].dr_priority = 777\n \n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n interface_attr[intf].dr_priority = 777\n\n # ---- neighbor-policy ----------\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr[intf].neighbor_filter = 'pim_neighbor_policy'\n \n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n interface_attr[intf].neighbor_filter = 'pim_neighbor_policy'\n \n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'ip pim bidir-enable',\n 'ip pim send-rp-announce 1.1.1.1 scope 20 group-list test_list interval 60 bidir',\n 'ip pim send-rp-discovery GigabitEthernet0/0/1 scope 20 interval 1000',\n 'ip pim autorp listener',\n 'ip pim bsr-candidate GigabitEthernet0/0/1 20 50 accept-rp-candidate 190',\n 'ip pim rp-candidate Loopback0 group-list 11 interval 60 priority 10 bidir',\n 'ip pim accept-register list regist_name',\n 'ip pim log-neighbor-changes',\n 'ip pim register-source GigabitEthernet0/0/1',\n 'ip pim sparse sg-expiry-timer 182 sg-list sg_name',\n 'ip pim spt-threshold 0 group-list abcde',\n 'ip pim rp-address 1.1.1.1 10 override',\n 'interface Loopback0',\n ' ip pim dense-mode',\n ' ip multicast boundary test filter-autorp',\n ' ip pim bsr-border',\n ' ip pim dr-priority 777',\n ' ip pim query-interval 30000 msec',\n ' ip pim neighbor-filter pim_neighbor_policy',\n ' exit',\n 'ipv6 pim bsr candidate rp 2001:DB8:2:2::2 group-list fc00:db20:35b:7399::5/64 interval 60 priority 10',\n 'ipv6 pim accept-register list regist_map_v6',\n 'ipv6 pim register-source Loopback0',\n 'ipv6 pim spt-threshold infinity group-list abcdef',\n 'ipv6 pim rp-address 2001:db8:1:1::1 fc00:db20:35b:7399::5/64 bidir',\n 'interface Loopback0',\n ' ipv6 multicast boundary block source',\n ' ipv6 pim bsr border',\n ' ipv6 pim dr-priority 777',\n ' ipv6 pim hello-interval 3000',\n ' exit',\n 'ipv6 pim neighbor-filter list pim_neighbor_policy',\n 'ip pim vrf red send-rp-announce 1.1.1.1 scope 20 group-list test_list interval 60 bidir',\n 'ip pim vrf red send-rp-discovery GigabitEthernet0/0/2 scope 20 interval 1000',\n 'ip pim autorp listener',\n 'ip pim vrf red bsr-candidate GigabitEthernet0/0/2 20 50 accept-rp-candidate 190',\n 'ip pim vrf red rp-candidate Loopback0 group-list 11 interval 60 priority 10 bidir',\n 'ip pim accept-register list regist_name',\n 'ip pim log-neighbor-changes',\n 'ip pim vrf red register-source GigabitEthernet0/0/2',\n 'ip pim vrf red sparse sg-expiry-timer 182 sg-list sg_name',\n 'ip pim vrf red spt-threshold 0 group-list abcde',\n 'ip pim vrf red rp-address 1.1.1.1 10 override',\n 'interface Loopback0',\n ' ip pim dense-mode',\n ' ip multicast boundary test filter-autorp',\n ' ip pim bsr-border',\n ' ip pim dr-priority 777',\n ' ip pim query-interval 30000 msec',\n ' ip pim neighbor-filter pim_neighbor_policy',\n ' exit',\n 'ipv6 pim vrf red bsr candidate rp 2001:DB8:2:2::2 group-list fc00:db20:35b:7399::5/64 interval 60 priority 10',\n 'ipv6 pim vrf red accept-register list regist_map_v6',\n 'ipv6 pim vrf red register-source Loopback0',\n 'ipv6 pim vrf red spt-threshold infinity group-list abcdef',\n 'ipv6 pim vrf red rp-address 2001:db8:1:1::1 ff1e:abcd:def1::0/64 bidir',\n 'interface Loopback0',\n ' ipv6 multicast boundary block source',\n ' ipv6 pim bsr border',\n ' ipv6 pim dr-priority 777',\n ' ipv6 pim hello-interval 3000',\n ' exit',\n 'ipv6 pim vrf red neighbor-filter list pim_neighbor_policy',\n ]))\n\n cfgs = pim.build_unconfig(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'no ip pim bidir-enable',\n 'no ip pim send-rp-discovery GigabitEthernet0/0/1 scope 20 interval 1000',\n 'no ip pim autorp listener',\n 'no ip pim bsr-candidate GigabitEthernet0/0/1 20 50 accept-rp-candidate 190',\n 'no ip pim accept-register list regist_name',\n 'no ip pim log-neighbor-changes',\n 'no ip pim register-source GigabitEthernet0/0/1',\n 'no ip pim sparse sg-expiry-timer 182 sg-list sg_name',\n 'no ip pim spt-threshold 0 group-list abcde',\n 'no ip pim rp-address 1.1.1.1 10 override',\n 'interface Loopback0',\n ' no ip pim dense-mode',\n ' no ip multicast boundary test filter-autorp',\n ' no ip pim bsr-border',\n ' no ip pim dr-priority 777',\n ' no ip pim query-interval 30000 msec',\n ' no ip pim neighbor-filter pim_neighbor_policy',\n ' exit',\n 'no ipv6 pim bsr candidate rp 2001:DB8:2:2::2 group-list fc00:db20:35b:7399::5/64 interval 60 priority 10',\n 'no ipv6 pim accept-register list regist_map_v6',\n 'no ipv6 pim register-source Loopback0',\n 'no ipv6 pim spt-threshold infinity group-list abcdef',\n 'no ipv6 pim rp-address 2001:db8:1:1::1 fc00:db20:35b:7399::5/64 bidir',\n 'interface Loopback0',\n ' no ipv6 multicast boundary block source',\n ' no ipv6 pim bsr border',\n ' no ipv6 pim dr-priority 777',\n ' no ipv6 pim hello-interval 3000',\n ' exit',\n 'no ipv6 pim neighbor-filter list pim_neighbor_policy',\n 'no ip pim vrf red send-rp-discovery GigabitEthernet0/0/2 scope 20 interval 1000',\n 'no ip pim autorp listener',\n 'no ip pim vrf red bsr-candidate GigabitEthernet0/0/2 20 50 accept-rp-candidate 190',\n 'no ip pim accept-register list regist_name',\n 'no ip pim log-neighbor-changes',\n 'no ip pim vrf red register-source GigabitEthernet0/0/2',\n 'no ip pim vrf red sparse sg-expiry-timer 182 sg-list sg_name',\n 'no ip pim vrf red spt-threshold 0 group-list abcde',\n 'no ip pim vrf red rp-address 1.1.1.1 10 override',\n 'interface Loopback0',\n ' no ip pim dense-mode',\n ' no ip multicast boundary test filter-autorp',\n ' no ip pim bsr-border',\n ' no ip pim dr-priority 777',\n ' no ip pim query-interval 30000 msec',\n ' no ip pim neighbor-filter pim_neighbor_policy',\n ' exit',\n 'no ipv6 pim vrf red bsr candidate rp 2001:DB8:2:2::2 group-list fc00:db20:35b:7399::5/64 interval 60 priority 10',\n 'no ipv6 pim vrf red accept-register list regist_map_v6',\n 'no ipv6 pim vrf red register-source Loopback0',\n 'no ipv6 pim vrf red spt-threshold infinity group-list abcdef',\n 'no ipv6 pim vrf red rp-address 2001:db8:1:1::1 fc00:db20:35b:7399::5/64 bidir',\n 'interface Loopback0',\n ' no ipv6 multicast boundary block source',\n ' no ipv6 pim bsr border',\n ' no ipv6 pim dr-priority 777',\n ' no ipv6 pim hello-interval 3000',\n ' exit',\n 'no ipv6 pim vrf red neighbor-filter list pim_neighbor_policy',\n ]))\n\n # uncfg with attributes\n # pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n # interface_attr[intf].mode = 'dense-mode'\n cfgs = pim.build_unconfig(apply=False,\n attributes={'device_attr': {\n self.dev1: {\n 'vrf_attr': {\n 'default': {\n 'address_family_attr': {\n 'ipv6': {\n 'register_source': None\n }\n }\n },\n 'red': {\n 'address_family_attr': {\n 'ipv4': {\n 'interface_attr': {\n intf: {\n 'mode': None\n }\n }\n }\n } \n }\n }\n }}})\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'no ipv6 pim register-source Loopback0',\n 'interface Loopback0',\n ' no ip pim dense-mode',\n ' exit',\n ]))\n\n \n def test_multiple_pim_static_rp_config(self):\n\n # For failures\n self.maxDiff = None\n\n # shorten the line\n dev1 = self.dev1\n\n # VRF configuration\n vrf = Vrf('default')\n\n # == bsr static rp ===\n # -- bsr static rp intf --\n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n\n rp1 = RPAddressGroup(device=self.dev1)\n rp1.static_rp_address = '2.2.2.2'\n rp2 = RPAddressGroup(device=self.dev1)\n rp2.static_rp_address = '3.3.3.3'\n rp2.static_rp_group_list = 'rp_group_list'\n rp3 = RPAddressGroup(device=self.dev1)\n rp3.static_rp_address = '4.4.4.4'\n rp3.static_rp_group_list = 'rp_group_list'\n rp3.static_rp_override = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].add_static_rp(rp1)\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].add_static_rp(rp2)\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].add_static_rp(rp3)\n\n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'ip pim rp-address 2.2.2.2',\n 'ip pim rp-address 3.3.3.3 rp_group_list',\n 'ip pim rp-address 4.4.4.4 rp_group_list override',\n ]))\n\n \n\nif __name__ == '__main__':\n unittest.main()\n", "id": "8990968", "language": "Python", "matching_score": 6.0586724281311035, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/pim/iosxe/tests/test_pim.py" }, { "content": "#!/usr/bin/env python\n\n# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning\n\n# PIM\nfrom genie.libs.conf.vrf import Vrf\nfrom genie.libs.conf.pim import Pim\nfrom genie.libs.conf.pim.rp_address import RPAddressGroup\n\n\noutputs = {}\n\ndef mapper(key):\n return outputs[key]\n\n\nclass test_pim(TestCase):\n\n def setUp(self):\n \n # Set Genie Tb\n testbed = Testbed()\n Genie.testbed = testbed\n \n # Device\n self.dev1 = Device(name='PE1', testbed=testbed, os='nxos')\n\n def test_pim_full_config(self):\n\n # For failures\n self.maxDiff = None\n\n # shorten the line\n dev1 = self.dev1\n \n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n\n # Apply configuration\n pim.device_attr[dev1].enabled = True\n\n # VRF configuration\n vrf1 = Vrf('default')\n pim.device_attr[self.dev1].vrf_attr[vrf1]\n vrf2 = Vrf('red')\n pim.device_attr[self.dev1].vrf_attr[vrf2]\n\n for vrf, intf in {vrf1: 'Ethernet1/1', vrf2: 'Ethernet2/1'}.items():\n # == auto-rp ===\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n auto_rp = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_rp_group = '1.1.1.1'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_route_map = 'test'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_scope = 20\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_interval = 60\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_bidir = True\n\n # == auto-rp discovery===\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n auto_rp_discovery = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_discovery_intf = intf\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_discovery_scope = 20\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n autorp_listener = True\n\n # == bsr candidate ===\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_candidate_interface = intf\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_candidate_hash_mask_length = 20\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_candidate_priority = 20\n\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_candidate_interface = intf\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_candidate_hash_mask_length = 20\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_candidate_priority = 20\n\n # == bsr rp-candidate ===\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_rp_candidate_interface = intf\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_rp_candidate_group_list = '192.168.127.12/24'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_rp_candidate_route_map = 'test'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_rp_candidate_prefix_list = 'LLAL'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_rp_candidate_priority = 10\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_rp_candidate_interval = 60\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_rp_candidate_bidir = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_rp_candidate_priority = 10\n\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_rp_candidate_interface = intf\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_rp_candidate_group_list = 'fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/64'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_rp_candidate_route_map = 'test'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_rp_candidate_prefix_list = 'LLAL'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_rp_candidate_priority = 10\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_rp_candidate_interval = 60\n\n # == static RP ===\n rp1 = RPAddressGroup(device=self.dev1)\n rp1.static_rp_address = '1.1.1.1'\n rp1.static_rp_group_list = '192.168.127.12/24'\n rp1.static_rp_route_map = 'test'\n rp1.static_rp_prefix_list = 'LLAL'\n rp1.static_rp_bidir = True\n rp1.static_rp_override = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].add_static_rp(rp1)\n\n\n rp2 = RPAddressGroup(device=self.dev1)\n rp2.static_rp_address = '2001:db8:1:1::1'\n rp2.static_rp_group_list = 'fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/64'\n rp2.static_rp_route_map = 'test'\n rp2.static_rp_prefix_list = 'LLAL'\n rp2.static_rp_override = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].add_static_rp(rp2)\n\n # == static rp register ===\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n accept_register = 'regist_name'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n accept_register_prefix_list = 'test'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n accept_register = 'regist_name'\n # not ipv6 supported\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n accept_register_prefix_list = 'test'\n\n # log-neighbor-changes\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n log_neighbor_changes = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n log_neighbor_changes = True\n\n # register_source\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n register_source = intf\n # not ipv6 supported\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n register_source = intf\n\n # == sg-expiry-timer ==\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n sg_expiry_timer = 182\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n sg_expiry_timer_infinity = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n sg_expiry_timer_sg_list = 'sg_name'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n sg_expiry_timer_prefix_list = 'prefix_name'\n\n # == spt-threshold ==\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n spt_switch_infinity = 'infinity'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n spt_switch_policy = 'abcde'\n\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n spt_switch_infinity = 'infinity'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n spt_switch_policy = 'abcde'\n\n\n # == interface ==\n # ---- mode ----------\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr[intf].mode = 'sparse-mode'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr[intf].boundary = 'abc'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr[intf].boundary_in = True\n \n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n interface_attr[intf].mode = 'sparse-mode'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n interface_attr[intf].boundary = 'abc'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n interface_attr[intf].boundary_in = True\n\n # ---- jp-policy ----------\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr[intf].boundary = 'test'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr[intf].boundary_in = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr[intf].boundary_out = True\n \n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n interface_attr[intf].boundary = 'test'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n interface_attr[intf].boundary_in = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n interface_attr[intf].boundary_out = True\n\n # ---- border ----------\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr[intf].bsr_border = True\n \n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n interface_attr[intf].bsr_border = True\n\n # ---- hello-interval ----------\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr[intf].hello_interval = 30000\n \n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n interface_attr[intf].hello_interval = 30000\n\n # ---- dr-priority ----------\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr[intf].dr_priority = 777\n \n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n interface_attr[intf].dr_priority = 777\n\n # ---- neighbor-policy ----------\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr[intf].neighbor_filter = 'pim_neighbor_policy'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr[intf].neighbor_filter_prefix_list = 'test'\n \n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n interface_attr[intf].neighbor_filter = 'pim_neighbor_policy'\n # not supported\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n interface_attr[intf].neighbor_filter_prefix_list = 'test'\n \n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'feature pim',\n 'feature pim6',\n 'ip pim auto-rp rp-candidate 1.1.1.1 route-map test interval 60 scope 20 bidir',\n 'ip pim auto-rp mapping-agent Ethernet1/1 scope 20',\n 'ip pim auto-rp forward listen',\n 'ip pim bsr forward listen',\n 'ip pim bsr-candidate Ethernet1/1 hash-len 20 priority 20',\n 'ip pim bsr forward listen',\n 'ip pim rp-candidate Ethernet1/1 group-list 192.168.127.12/24 priority 10 interval 60 bidir',\n 'ip pim register-policy regist_name',\n 'ip pim register-policy prefix-list test',\n 'ip pim log-neighbor-changes',\n 'ip pim register-source Ethernet1/1',\n 'ip pim sg-expiry-timer 182 prefix-list prefix_name',\n 'ip pim sg-expiry-timer 182 sg-list sg_name',\n 'ip pim sg-expiry-timer infinity prefix-list prefix_name',\n 'ip pim sg-expiry-timer infinity sg-list sg_name',\n 'ip pim spt-threshold infinity group-list abcde',\n 'ip pim rp-address 1.1.1.1 group-list 192.168.127.12/24 bidir',\n 'interface Ethernet1/1',\n ' ip pim sparse-mode',\n ' ip pim jp-policy test in',\n ' ip pim jp-policy test out',\n ' ip pim border',\n ' ip pim hello-interval 30000',\n ' ip pim dr-priority 777',\n ' ip pim neighbor-policy pim_neighbor_policy',\n ' ip pim neighbor-policy prefix-list test',\n ' exit',\n 'ipv6 pim bsr forward listen',\n 'ipv6 pim bsr-candidate Ethernet1/1 hash-len 20 priority 20',\n 'ipv6 pim bsr forward listen',\n 'ipv6 pim rp-candidate Ethernet1/1 group-list fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/64 priority 10 interval 60',\n 'ipv6 pim register-policy regist_name',\n 'ipv6 pim log-neighbor-changes',\n 'ipv6 pim spt-threshold infinity group-list abcde',\n 'ipv6 pim rp-address 2001:db8:1:1::1 group-list fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/64 override',\n 'interface Ethernet1/1',\n ' ipv6 pim sparse-mode',\n ' ipv6 pim jp-policy test in',\n ' ipv6 pim jp-policy test out',\n ' ipv6 pim border',\n ' ipv6 pim hello-interval 30000',\n ' ipv6 pim dr-priority 777',\n ' ipv6 pim neighbor-policy pim_neighbor_policy',\n ' exit',\n 'vrf context red',\n ' ip pim auto-rp rp-candidate 1.1.1.1 route-map test interval 60 scope 20 bidir',\n ' ip pim auto-rp mapping-agent Ethernet2/1 scope 20',\n ' ip pim auto-rp forward listen',\n ' ip pim bsr forward listen',\n ' ip pim bsr-candidate Ethernet2/1 hash-len 20 priority 20',\n ' ip pim bsr forward listen',\n ' ip pim rp-candidate Ethernet2/1 group-list 192.168.127.12/24 priority 10 interval 60 bidir',\n ' ip pim register-policy regist_name',\n ' ip pim register-policy prefix-list test',\n ' ip pim log-neighbor-changes',\n ' ip pim register-source Ethernet2/1',\n ' ip pim sg-expiry-timer 182 prefix-list prefix_name',\n ' ip pim sg-expiry-timer 182 sg-list sg_name',\n ' ip pim sg-expiry-timer infinity prefix-list prefix_name',\n ' ip pim sg-expiry-timer infinity sg-list sg_name',\n ' ip pim spt-threshold infinity group-list abcde',\n ' ip pim rp-address 1.1.1.1 group-list 192.168.127.12/24 bidir',\n ' exit',\n 'interface Ethernet2/1',\n ' ip pim sparse-mode',\n ' ip pim jp-policy test in',\n ' ip pim jp-policy test out',\n ' ip pim border',\n ' ip pim hello-interval 30000',\n ' ip pim dr-priority 777',\n ' ip pim neighbor-policy pim_neighbor_policy',\n ' ip pim neighbor-policy prefix-list test',\n ' exit',\n 'vrf context red',\n ' ipv6 pim bsr forward listen',\n ' ipv6 pim bsr-candidate Ethernet2/1 hash-len 20 priority 20',\n ' ipv6 pim bsr forward listen',\n ' ipv6 pim rp-candidate Ethernet2/1 group-list fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/64 priority 10 interval 60',\n ' ipv6 pim register-policy regist_name',\n ' ipv6 pim log-neighbor-changes',\n ' ipv6 pim spt-threshold infinity group-list abcde',\n ' ipv6 pim rp-address 2001:db8:1:1::1 group-list fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/64 override',\n ' exit',\n 'interface Ethernet2/1',\n ' ipv6 pim sparse-mode',\n ' ipv6 pim jp-policy test in',\n ' ipv6 pim jp-policy test out',\n ' ipv6 pim border',\n ' ipv6 pim hello-interval 30000',\n ' ipv6 pim dr-priority 777',\n ' ipv6 pim neighbor-policy pim_neighbor_policy',\n ' exit',\n ]))\n\n cfgs = pim.build_unconfig(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'no feature pim',\n 'no feature pim6',\n ]))\n\n \n cfgs = pim.build_unconfig(apply=False,\n attributes={'device_attr': {\n self.dev1: {\n 'vrf_attr': {\n 'default': {\n 'address_family_attr': {\n 'ipv4': {\n 'register_source': None\n }\n }\n },\n 'red': {\n 'address_family_attr': {\n 'ipv6': {\n 'interface_attr': {\n 'Ethernet2/1': {\n 'mode': None\n }\n }\n }\n } \n }\n }\n }}})\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'no ip pim register-source Ethernet1/1',\n 'interface Ethernet2/1',\n ' no ipv6 pim sparse-mode',\n ' exit',\n ]))\n\n def test_pim_auto_rp_config(self):\n\n # For failures\n self.maxDiff = None\n\n # shorten the line\n dev1 = self.dev1\n\n # VRF configuration\n vrf = Vrf('default')\n\n\n # == auto-rp lack of information ===\n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n\n pim.device_attr[dev1].enabled_pim = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_rp_group = '1.1.1.1'\n\n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'feature pim',\n ]))\n\n # == auto-rp intf group-list ===\n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n\n # Apply configuration\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_intf = 'Ethernet1/1'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_group_list = '192.168.127.12/24'\n\n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'ip pim send-rp-announce Ethernet1/1 group-list 192.168.127.12/24',\n ]))\n\n # == auto-rp group route-map with interval ===\n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n\n # VRF configuration\n vrf = Vrf('red')\n\n # Apply configuration\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_rp_group = '1.1.1.1'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_route_map = 'test'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_interval = 30\n\n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'vrf context red',\n ' ip pim send-rp-announce 1.1.1.1 route-map test interval 30',\n ' exit',\n ]))\n\n # == auto-rp intf prefix-list with interval bidir===\n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n\n # VRF configuration\n vrf = Vrf('red')\n\n # Apply configuration\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n auto_rp = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_intf = 'Ethernet1/1'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_prefix_list = 'test'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_scope = 10\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_announce_bidir = True\n\n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'vrf context red',\n ' ip pim auto-rp rp-candidate Ethernet1/1 prefix-list test scope 10 bidir',\n ' exit',\n ]))\n\n\n # == auto-rp discovery===\n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n\n # VRF configuration\n vrf = Vrf('default')\n\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_discovery = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n send_rp_discovery_intf = 'Ethernet1/1'\n\n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'ip pim send-rp-discovery Ethernet1/1',\n ]))\n\n def test_pim_bsr_config(self):\n\n # For failures\n self.maxDiff = None\n\n # shorten the line\n dev1 = self.dev1\n\n # VRF configuration\n vrf = Vrf('default')\n\n # == bsr rp ===\n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_candidate_interface = 'Ethernet1/1'\n\n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'ip pim bsr forward listen',\n 'ip pim bsr-candidate Ethernet1/1',\n ]))\n\n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_candidate_interface = 'Ethernet1/1'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_candidate_hash_mask_length = 30\n\n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'ip pim bsr forward listen',\n 'ip pim bsr-candidate Ethernet1/1 hash-len 30',\n ]))\n\n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_candidate_interface = 'Ethernet1/1'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_candidate_priority = 200\n\n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'ipv6 pim bsr forward listen',\n 'ipv6 pim bsr-candidate Ethernet1/1 priority 200',\n ]))\n\n # == bsr rp intf route-map ===\n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n\n # VRF configuration\n vrf = Vrf('red')\n\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_rp_candidate_interface = 'Ethernet1/1'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].\\\n bsr_rp_candidate_route_map = 'test'\n\n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'vrf context red',\n ' ipv6 pim bsr forward listen',\n ' ipv6 pim rp-candidate Ethernet1/1 route-map test',\n ' exit',\n ]))\n\n # == auto-rp intf prefix-list with interval bidir===\n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n\n # VRF configuration\n vrf = Vrf('red')\n\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_rp_candidate_interface = 'Ethernet1/1'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_rp_candidate_prefix_list = 'LALALLA'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_rp_candidate_interval = 60\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n bsr_rp_candidate_bidir = True\n\n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'vrf context red',\n ' ip pim bsr forward listen',\n ' ip pim rp-candidate Ethernet1/1 prefix-list LALALLA interval 60 bidir',\n ' exit',\n ]))\n\n def test_pim_static_rp_config(self):\n\n # For failures\n self.maxDiff = None\n\n # shorten the line\n dev1 = self.dev1\n\n # VRF configuration\n vrf = Vrf('default')\n\n # == bsr static rp ===\n # -- bsr static rp intf --\n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n\n rp1 = RPAddressGroup(device=self.dev1)\n rp1.static_rp_address = '2.2.2.2'\n rp1.static_rp_group_list = '172.16.17.32/4'\n rp2 = RPAddressGroup(device=self.dev1)\n rp2.static_rp_address = '3.3.3.3'\n rp2.static_rp_group_list = '172.16.17.32/4'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].add_static_rp(rp1)\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].add_static_rp(rp2)\n\n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'ip pim rp-address 2.2.2.2 group-list 172.16.17.32/4',\n 'ip pim rp-address 3.3.3.3 group-list 172.16.17.32/4',\n ]))\n\n # == bsr static rp intf route-map ===\n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n\n rp1 = RPAddressGroup(device=self.dev1)\n rp1.static_rp_address = '1.1.1.1'\n rp1.static_rp_route_map = 'test'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].add_static_rp(rp1)\n\n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'ip pim rp-address 1.1.1.1 route-map test',\n ]))\n\n # == bsr static rp intf group-list ipv6 ===\n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n\n rp1 = RPAddressGroup(device=self.dev1)\n rp1.static_rp_address = 'fc00:e968:6179::de52:7100:1'\n rp1.static_rp_group_list = 'ff00::/8'\n rp1.static_rp_override = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv6'].add_static_rp(rp1)\n\n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'ipv6 pim rp-address fc00:e968:6179::de52:7100:1 group-list ff00::/8 override',\n ]))\n\n def test_pim_sg_expiry_timer_config(self):\n\n # For failures\n self.maxDiff = None\n\n # shorten the line\n dev1 = self.dev1\n\n # VRF configuration\n vrf = Vrf('default')\n\n # == sg_expiry_timer ===\n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n sg_expiry_timer = 181\n\n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'ip pim sg-expiry-timer 181',\n ]))\n\n # == sg_expiry_timer_infinity ===\n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n # VRF configuration\n vrf = Vrf('blue')\n\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n sg_expiry_timer_infinity = True\n\n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'vrf context blue',\n ' ip pim sg-expiry-timer infinity',\n ' exit',\n ]))\n\n # == sg_expiry_timer sg_expiry_timer_sg_list ===\n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n # VRF configuration\n vrf = Vrf('VRF1')\n\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n sg_expiry_timer = 200\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n sg_expiry_timer_sg_list = 'test'\n\n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'vrf context VRF1',\n ' ip pim sg-expiry-timer 200 sg-list test',\n ' exit',\n ]))\n\n # == sg_expiry_timer_infinity sg_expiry_timer_prefix_list ===\n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n # VRF configuration\n vrf = Vrf('default')\n\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n sg_expiry_timer_infinity = True\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n sg_expiry_timer_prefix_list = 'test'\n\n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'ip pim sg-expiry-timer infinity prefix-list test',\n ]))\n\n\n def test_learn_config(self):\n\n testbed = Testbed()\n dev = Device(testbed=testbed, name='PE2', os='nxos')\n dev.custom = {'abstraction':{'order':['os'], 'context':'cli'}}\n dev.mapping={}\n dev.mapping['cli']='cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n dev.connectionmgr.connections['cli'] = dev\n\n golden_output = {'return_value': '''\n N95_2_R2# show run pim\n !Command: show running-config pim\n!Time: Wed Aug 15 14:45:52 2018\n\nversion 7.0(3)I7(3)\nfeature pim\n\nip pim bsr bsr-candidate loopback0 priority 128\nip pim rp-address 6.6.6.6 group-list 172.16.58.3/8\nip pim rp-address 6.6.6.6 group-list 172.16.58.3/24 bidir\nip pim rp-address 2192.168.3.11 group-list 192.168.127.12/8\nip pim rp-address 192.168.3.11 group-list 172.16.17.32/8\nip pim bsr rp-candidate loopback0 group-list 172.16.17.32/8 priority 128\nip pim send-rp-announce loopback0 group-list 172.16.17.32/8\nip pim send-rp-discovery loopback0\nip pim ssm range 172.16.17.32/8\nip pim anycast-rp 192.168.3.11 2.2.2.2\nip pim anycast-rp 192.168.3.11 6.6.6.6\nip pim bsr forward listen\nip pim register-source loopback0\n\nvrf context VRF1\n ip pim bsr bsr-candidate loopback11 priority 128\n ip pim rp-address 6.6.6.6 group-list 172.16.58.3/8\n ip pim rp-address 6.6.6.6 group-list 172.16.58.3/24 bidir\n ip pim rp-address 172.16.17.32 group-list 192.168.127.12/8\n ip pim rp-address 192.168.3.11 group-list 172.16.17.32/8\n ip pim bsr rp-candidate loopback11 group-list 172.16.17.32/8 priority 128\n ip pim send-rp-announce loopback11 group-list 172.16.17.32/8\n ip pim send-rp-discovery loopback11\n ip pim ssm range 172.16.17.32/8\n ip pim anycast-rp 192.168.3.11 2.2.2.2\n ip pim anycast-rp 192.168.3.11 6.6.6.6\n ip pim bsr forward listen\n ip pim register-source loopback11\n\ninterface loopback0\n ip pim sparse-mode\n '''}\n\n golden_output_vrf = '''\n N95_2_R2# show run pim | inc vrf\nvrf context VRF1\n '''\n golden_output_vrf6 = '''\n N95_2_R2# show run pim6 | inc vrf\nvrf context VRF1\n '''\n golden_output_feature = '''\n N95_2_R2# show run pim | inc feature\nfeature pim\n '''\n golden_output_feature6 = '''\n N95_2_R2# show run pim6 | inc feature\nfeature pim6\n '''\n golden_output_auto_rp = '''\n N95_2_R2# show run pim | sec '^i' | inc send-rp-announce\nip pim send-rp-announce loopback0 group-list 172.16.17.32/8\n '''\n golden_output_auto_rp_vrf = '''\n N95_2_R2# show run pim | sec VRF1 | inc send-rp-announce\n ip pim send-rp-announce loopback11 group-list 172.16.17.32/8\n '''\n\n pim = Pim()\n outputs['show running-config pim | inc feature'] = golden_output_feature\n outputs['show running-config pim6 | inc feature'] = golden_output_feature6\n outputs['show running-config pim | inc vrf'] = golden_output_vrf\n outputs['show running-config pim6 | inc vrf'] = golden_output_vrf6\n outputs[\"show running-config pim | sec '^i' | inc send-rp-announce\"] = golden_output_auto_rp\n outputs[\"show running-config pim | sec VRF1 | inc send-rp-announce\"] = golden_output_auto_rp_vrf\n # Return outputs above as inputs to parser when called\n dev.execute = Mock()\n dev.execute.side_effect = mapper\n\n learn = Pim.learn_config(device=dev, attributes=['pim[vrf_attr][default][address_family_attr][ipv4][send_rp_announce_intf]'])\n\n self.assertEqual(learn[0].device_attr[dev].vrf_attr['default'].address_family_attr['ipv4'].send_rp_announce_intf, 'loopback0')\n self.assertEqual(learn[0].device_attr[dev].vrf_attr['default'].address_family_attr['ipv4'].send_rp_announce_group_list, None)\n\n learn = Pim.learn_config(device=dev)\n\n self.assertEqual(learn[0].device_attr[dev].vrf_attr['default'].address_family_attr['ipv4'].send_rp_announce_intf, 'loopback0')\n self.assertEqual(learn[0].device_attr[dev].vrf_attr['default'].address_family_attr['ipv4'].send_rp_announce_group_list, '172.16.17.32/8')\n self.assertEqual(learn[0].device_attr[dev].vrf_attr['VRF1'].address_family_attr['ipv4'].send_rp_announce_intf, 'loopback11')\n self.assertEqual(learn[0].device_attr[dev].vrf_attr['VRF1'].address_family_attr['ipv4'].send_rp_announce_group_list, '172.16.17.32/8')\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "564672", "language": "Python", "matching_score": 4.290853977203369, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/pim/nxos/tests/test_pim.py" }, { "content": "\n# import python\nfrom abc import ABC\n\n# import genie\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning, \\\n AttributesHelper\n\n\nclass Pim(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n \n # ip pim bidir-enable\n if attributes.value('enabled_bidir'):\n configurations.append_line(\n attributes.format('ip pim bidir-enable'))\n\n # VrfAttributes\n for sub, attributes2 in attributes.mapping_values('vrf_attr',\n sort=True, keys=self.vrf_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class VrfAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # address_family\n for address_family_sub, address_family_attributes in \\\n attributes.mapping_values(\n 'address_family_attr', sort=True,\n keys = self.address_family_attr):\n configurations.append_block(\n address_family_sub.build_config(apply=False,\n attributes=address_family_attributes,\n unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class AddressFamilyAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n # assign ip version according to the address_family_attr\n if hasattr(self, 'address_family'):\n if 'ipv4' in self.address_family.value:\n self.ip = 'ip'\n if 'ipv6' in self.address_family.value:\n self.ip = 'ipv6'\n\n base_cfg = '{ip} pim'.format(ip=self.ip) if self.vrf_name == 'default' else \\\n '{ip} pim vrf {vrf}'.format(ip=self.ip, vrf=self.vrf_name)\n\n\n skip_unconfig = False\n\n if self.ip == 'ip':\n # ==== PIM Auto-RP =======\n # send-rp-announce { <send_rp_announce_intf> | <send_rp_announce_rp_group> }\n if attributes.value('send_rp_announce_rp_group'):\n auto_rp_str = ' send-rp-announce {send_rp_announce_rp_group}'\n elif attributes.value('send_rp_announce_intf'):\n auto_rp_str = ' send-rp-announce {send_rp_announce_intf}'\n else:\n auto_rp_str = ''\n\n # + scope <send_rp_announce_scope>\n if auto_rp_str and attributes.value('send_rp_announce_scope'):\n auto_rp_str += ' scope {send_rp_announce_scope}'\n else:\n auto_rp_str = ''\n\n # + group-list {send_rp_announce_group_list}\n if auto_rp_str and attributes.value('send_rp_announce_group_list'):\n if attributes.value('send_rp_announce_group_list'):\n auto_rp_str += ' group-list'\\\n ' {send_rp_announce_group_list}'\n\n # + interval {send_rp_announce_interval}\n if auto_rp_str and attributes.value('send_rp_announce_interval'):\n if attributes.value('send_rp_announce_interval'):\n auto_rp_str += ' interval'\\\n ' {send_rp_announce_interval}'\n\n # + bidir\n if auto_rp_str and attributes.value('send_rp_announce_bidir'):\n if unconfig and attributes.value('enabled_bidir'):\n skip_unconfig = True\n else:\n auto_rp_str += ' bidir'\n skip_unconfig = False\n\n if auto_rp_str and not skip_unconfig:\n configurations.append_line(\n attributes.format(base_cfg + auto_rp_str))\n\n # === PIM Send-RP-Discovery ===\n # ip pim [ vrf <vrf_name> ]\n # send-rp-discovery [ <send_rp_discovery_intf> ]\n # scope <send_rp_discovery_scope>\n # [ interval <send_rp_discovery_interval> ]\n\n # <send_rp_discovery_intf>\n # [scope ,send_rp_discovery_scope>]\n if not attributes.value('send_rp_discovery_intf') and \\\n attributes.value('send_rp_discovery_scope'):\n pre_str = ' send-rp-discovery'\\\n ' scope {send_rp_discovery_scope}'\n elif attributes.value('send_rp_discovery_intf') and \\\n attributes.value('send_rp_discovery_scope'):\n pre_str = ' send-rp-discovery {send_rp_discovery_intf}'\\\n ' scope {send_rp_discovery_scope}'\n else:\n pre_str = ''\n\n if pre_str and attributes.value('send_rp_discovery_scope'):\n pre_str += ' interval {send_rp_discovery_interval}'\n\n\n if pre_str:\n configurations.append_line(\n attributes.format(base_cfg + pre_str))\n\n # ip pim autorp listener\n if attributes.value('autorp_listener'):\n configurations.append_line(\n attributes.format('ip pim autorp listener'))\n\n # ==== PIM BSR =======\n # == bsr bsr-candidate ==\n # ip/ipv6 pim [ vrf <vrf_name>] bsr-candidate <bsr_candidate_interface>\n if attributes.value('bsr_candidate_interface'):\n\n # ip/ipv6 pim bsr-candidate {bsr_candidate_interface}\n bsr_str = ' bsr-candidate '\\\n '{bsr_candidate_interface}' if self.ip == 'ip' else \\\n ' bsr candidate bsr'\\\n '{bsr_candidate_interface}'\n\n # + <bsr_candidate_hash_mask_length>\n if attributes.value('bsr_candidate_hash_mask_length'):\n bsr_str += ' {bsr_candidate_hash_mask_length}'\n\n # + priority <bsr_candidate_priority> | \n # <bsr_candidate_priority>\n if attributes.value('bsr_candidate_priority'):\n bsr_str += ' priority {bsr_candidate_priority}' if \\\n self.ip == 'ipv6' else \\\n ' {bsr_candidate_priority}'\n\n # + [scope] -- only for ipv6 pim\n if attributes.value('scope') and self.ip == 'ipv6':\n bsr_str += ' scope'\n\n # + accept-rp-candidate <bsr_candidate_accept_rp_acl>\n if attributes.value('bsr_candidate_accept_rp_acl'):\n bsr_str += ' accept-rp-candidate {bsr_candidate_accept_rp_acl}'\n\n configurations.append_line(\n attributes.format(base_cfg + bsr_str))\n\n # == bsr rp-candidate ==\n # ip pim rp-candidate <bsr_rp_candidate_interface>\n if attributes.value('bsr_rp_candidate_interface') and self.ip == 'ip':\n # ip pim rp-candidate {bsr_rp_candidate_interface}\n bsr_rp_str = ' rp-candidate '\\\n '{bsr_rp_candidate_interface}'\n\n # + group-list {bsr_rp_candidate_group_list}\n if attributes.value('bsr_rp_candidate_group_list'):\n bsr_rp_str += ' group-list'\\\n ' {bsr_rp_candidate_group_list}'\n\n # +interval <bsr_rp_candidate_interval>\n if attributes.value('bsr_rp_candidate_interval'):\n bsr_rp_str += ' interval {bsr_rp_candidate_interval}'\n\n # +priority <bsr_rp_candidate_priority>\n if attributes.value('bsr_rp_candidate_priority'):\n bsr_rp_str += ' priority '\\\n '{bsr_rp_candidate_priority}'\n\n # +bidir\n if attributes.value('bsr_rp_candidate_bidir'):\n if unconfig and attributes.value('enabled_bidir'):\n skip_unconfig = True\n else:\n bsr_rp_str += ' bidir'\n skip_unconfig = False\n\n if not skip_unconfig:\n configurations.append_line(\n attributes.format(base_cfg + bsr_rp_str))\n\n elif attributes.value('bsr_rp_candidate_address') and self.ip == 'ipv6':\n # ipv6 pim [ vrf <vrf_name> ] bsr candidate rp <bsr_rp_candidate_address> \n bsr_rp_str = ' bsr candidate rp '\\\n '{bsr_rp_candidate_address}'\n\n # + group-list {bsr_rp_candidate_group_list}\n if attributes.value('bsr_rp_candidate_group_list'):\n bsr_rp_str += ' group-list'\\\n ' {bsr_rp_candidate_group_list}'\n\n # +interval <bsr_rp_candidate_interval>\n if attributes.value('bsr_rp_candidate_interval'):\n bsr_rp_str += ' interval {bsr_rp_candidate_interval}'\n\n # +priority <bsr_rp_candidate_priority>\n if attributes.value('bsr_rp_candidate_priority'):\n bsr_rp_str += ' priority '\\\n '{bsr_rp_candidate_priority}'\n\n # +bidir\n if attributes.value('bsr_rp_candidate_bidir'):\n if unconfig and attributes.value('enabled_bidir'):\n skip_unconfig = True\n else:\n bsr_rp_str += ' bidir'\n skip_unconfig = False\n\n if not skip_unconfig:\n configurations.append_line(\n attributes.format(base_cfg + bsr_rp_str))\n \n # ip/ipv6 pim register-policy list <accept_register>\n if attributes.value('accept_register'):\n if self.ip == 'ip':\n configurations.append_line(\n attributes.format('ip pim accept-register list '\n '{accept_register}', force=True))\n else:\n configurations.append_line(\n attributes.format(base_cfg + ' accept-register list '\n '{accept_register}', force=True))\n\n # ip pim log-neighbor-changes\n if attributes.value('log_neighbor_changes') and self.ip == 'ip':\n configurations.append_line(\n attributes.format(\n 'ip pim log-neighbor-changes'))\n\n # ip/ipv6 pim [vrf <vrf_name>] register-source <register_source>\n if attributes.value('register_source'):\n configurations.append_line(\n attributes.format(base_cfg +\n ' register-source {register_source}'))\n \n # ip pim [ vrf <vrf_name> ] sparse sg-expiry-timer <sg_expiry_timer>\n if attributes.value('sg_expiry_timer') and self.ip == 'ip':\n sg_cfg = ' sparse sg-expiry-timer {sg_expiry_timer}'\n\n if attributes.value('sg_expiry_timer_sg_list'):\n sg_cfg += ' sg-list {sg_expiry_timer_sg_list}'\n configurations.append_line(\n attributes.format(base_cfg + sg_cfg))\n\n # ip/ipv6 pim spt-threshold infinity group-list\n # <spt_switch_policy>\n if attributes.value('spt_switch_infinity') and \\\n attributes.value('spt_switch_policy'):\n configurations.append_line(\n attributes.format(base_cfg + \n ' spt-threshold {spt_switch_infinity.value} '\n 'group-list {spt_switch_policy}'))\n elif attributes.value('spt_switch_infinity') and \\\n not attributes.value('spt_switch_policy'):\n configurations.append_line(\n attributes.format(base_cfg + \n ' spt-threshold {spt_switch_infinity.value}'))\n\n # Static RP address Attributes under top level config\n for groups, attributes2 in attributes.sequence_values(\n 'rp_addresses', sort=True):\n kwargs = {'ip_type': self.ip,\n 'vrf': self.vrf_name,\n 'bidir': attributes.value('enabled_bidir')}\n if unconfig:\n configurations.append_block(groups.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(groups.build_config(\n apply=False, attributes=attributes2, **kwargs))\n\n # InterfaceAttributes\n for sub, attributes2 in attributes.mapping_values(\n 'interface_attr', keys=self.interface_attr,\n sort=True):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n \n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # the interface should have vrf(name = vrf_name) attached\n with configurations.submode_context(\n attributes.format('interface {interface_name}',\n force=True)):\n\n # avoid the force = True for every config line\n # assign the 'ip' attribute when unconfig \n if unconfig and attributes.attributes:\n attributes.attributes['ip'] = self.ip\n\n # interface <intf_name>\n # ip/ipv6 pim sparse-mode\n if attributes.value('mode'):\n configurations.append_line(\n attributes.format('{ip} pim {mode.value}'))\n\n # interface <intf_name>\n # ip multicast <boundary> [filter-autorp] [in|out]\n if attributes.value('boundary') and self.ip == 'ip':\n cfg_str = 'ip multicast boundary {boundary}'\n\n if attributes.value('boundary_in'):\n cfg_str += ' in'\n elif attributes.value('boundary_out'):\n cfg_str += ' out'\n elif attributes.value('boundary_filter_autorp'):\n cfg_str += ' filter-autorp'\n\n configurations.append_line(\n attributes.format(cfg_str))\n if attributes.value('boundary') and self.ip == 'ipv6':\n configurations.append_line(\n attributes.format('ipv6 multicast boundary block source'))\n\n # interface <intf_name>\n # ip/ipv6 pim bsr border\n if attributes.value('bsr_border'):\n if self.ip == 'ip':\n configurations.append_line(\n attributes.format('{ip} pim bsr-border'))\n else: \n configurations.append_line(\n attributes.format('{ip} pim bsr border'))\n\n # interface <intf_name>\n # ip/ipv6 pim dr-priority {dr_priority}\n if attributes.value('dr_priority'):\n configurations.append_line(\n attributes.format(\n '{ip} pim dr-priority '\n '{dr_priority}'))\n\n if self.ip == 'ip':\n # interface <intf_name>\n # ip pim query-interval <hello_interval> [msec]\n if attributes.value('hello_interval'):\n configurations.append_line(\n attributes.format(\n 'ip pim query-interval '\n '{hello_interval}'))\n elif attributes.value('hello_interval_msec'):\n configurations.append_line(\n attributes.format(\n 'ip pim query-interval '\n '{hello_interval_msec} msec'))\n\n # interface <intf_name>\n # ip pim neighbor-filter <neighbor_filter>\n if attributes.value('neighbor_filter'):\n configurations.append_line(\n attributes.format(\n 'ip pim neighbor-filter '\n '{neighbor_filter}'))\n else:\n # interface <intf_name>\n # ipv6 pim hello-interval <hello_interval>\n if attributes.value('hello_interval'):\n configurations.append_line(\n attributes.format(\n 'ipv6 pim hello-interval '\n '{hello_interval}'))\n\n # interface <intf_name>\n # ipv6 pim [ vrf <vrf_name> ] neighbor-filter list <neighbor_filter>\n # this is done in config)# mode, not under interface for IPV6\n if attributes.value('neighbor_filter') and self.ip == 'ipv6':\n cfg_str = 'ipv6 pim' if self.vrf_name == 'default' else \\\n 'ipv6 pim vrf {vrf}'.format(vrf=self.vrf_name)\n configurations.append_line(\n attributes.format(cfg_str + \n ' neighbor-filter list '\n '{neighbor_filter}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n", "id": "8851033", "language": "Python", "matching_score": 6.754931449890137, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/pim/iosxe/pim.py" }, { "content": "\n# import python\nfrom abc import ABC\n\n# import genie\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning, \\\n AttributesHelper\n\n\nclass Pim(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n cfg_line = []\n unconfig_line = []\n\n # enabled\n if attributes.value('enabled'):\n if unconfig is False:\n configurations.append_line(attributes.format('feature pim'))\n configurations.append_line(attributes.format('feature pim6'))\n\n # Make sure that only enabled was provided in attributes\n # If wildcard, then delete everything\n elif unconfig is True and\\\n attributes.attributes == {'enabled': {True: None}} or \\\n attributes.iswildcard:\n configurations.append_line('no feature pim', raw=True)\n configurations.append_line('no feature pim6', raw=True)\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n # enable_pim\n elif attributes.value('enabled_pim'):\n cfg_line.append('feature pim')\n unconfig_line.append('no feature pim')\n\n # enable_pim6\n elif attributes.value('enabled_pim6'):\n cfg_line.append('feature pim6')\n unconfig_line.append('no feature pim6')\n\n if cfg_line:\n if unconfig is False:\n configurations.append_line('\\n'.join(cfg_line))\n elif unconfig is True:\n configurations.append_line('\\n'.join(unconfig_line), raw=True)\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n # VrfAttributes\n for sub, attributes2 in attributes.mapping_values('vrf_attr',\n sort=True, keys=self.vrf_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class VrfAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # address_family\n for address_family_sub, address_family_attributes in \\\n attributes.mapping_values(\n 'address_family_attr', sort=True,\n keys = self.address_family_attr):\n configurations.append_block(\n address_family_sub.build_config(apply=False,\n attributes=address_family_attributes,\n unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class AddressFamilyAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n # assign ip version according to the address_family_attr\n if hasattr(self, 'address_family'):\n if 'ipv4' in self.address_family.value:\n self.ip = 'ip'\n if 'ipv6' in self.address_family.value:\n self.ip = 'ipv6'\n\n if unconfig:\n attributes.attributes['ip'] = self.ip\n \n with configurations.submode_context(\n None if self.vrf_name == 'default' else\n attributes.format(\n 'vrf context {vrf_name}', force=True)):\n # no configuration append if vrf is default\n if self.vrf_name != 'default' and unconfig and \\\n attributes.iswildcard:\n configurations.submode_unconfig()\n\n # ==== PIM Auto-RP =======\n if attributes.value('auto_rp') or \\\n attributes.value('send_rp'):\n\n # auto-rp rp-candidate | send-rp-announce\n if attributes.value('auto_rp'):\n auto_rp_str = 'ip pim auto-rp rp-candidate'\n elif attributes.value('send_rp'):\n auto_rp_str = 'ip pim send-rp-announce'\n\n # + {send_rp_announce_rp_group} |\n # + {send_rp_announce_intf}\n if attributes.value('send_rp_announce_rp_group'):\n auto_rp_str += ' {send_rp_announce_rp_group}'\n elif attributes.value('send_rp_announce_intf'):\n auto_rp_str += ' {send_rp_announce_intf}'\n else:\n auto_rp_str = ''\n\n # + group-list {send_rp_announce_group_list} |\n # + route-map {send_rp_announce_route_map} |\n # + prefix-list {send_rp_announce_prefix_list}\n if auto_rp_str:\n if attributes.value('send_rp_announce_group_list'):\n auto_rp_str += ' group-list'\\\n ' {send_rp_announce_group_list}'\n elif attributes.value('send_rp_announce_route_map'):\n auto_rp_str += ' route-map'\\\n ' {send_rp_announce_route_map}'\n elif attributes.value('send_rp_announce_prefix_list'):\n auto_rp_str += ' prefix-list'\\\n ' {send_rp_announce_prefix_list}'\n else:\n auto_rp_str = ''\n\n if auto_rp_str:\n # + interval {send_rp_announce_interval}\n if attributes.value('send_rp_announce_interval'):\n auto_rp_str += ' interval'\\\n ' {send_rp_announce_interval}'\n\n # + scope {send_rp_announce_scope}\n if attributes.value('send_rp_announce_scope'):\n auto_rp_str += ' scope {send_rp_announce_scope}'\n\n # + bidir\n if attributes.value('send_rp_announce_bidir'):\n auto_rp_str += ' bidir'\n\n configurations.append_line(\n attributes.format(auto_rp_str))\n\n # === PIM Send-RP-Discovery ===\n # ip pim auto-rp mapping-agent|send-rp-discovery\n # <send_rp_discovery_intf>\n # [scope ,send_rp_discovery_scope>]\n if attributes.value('auto_rp_discovery') or \\\n attributes.value('send_rp_discovery'):\n\n # set auto-rp method\n if attributes.value('auto_rp_discovery'):\n pre_str = 'ip pim auto-rp mapping-agent'\n if attributes.value('send_rp_discovery'):\n pre_str = 'ip pim send-rp-discovery'\n\n # <send_rp_discovery_intf>\n # [scope ,send_rp_discovery_scope>]\n if attributes.value('send_rp_discovery_intf') and \\\n attributes.value('send_rp_discovery_scope'):\n pre_str +=' {send_rp_discovery_intf}'\\\n ' scope {send_rp_discovery_scope}'\n\n elif attributes.value('send_rp_discovery_intf') and \\\n not attributes.value('send_rp_discovery_scope'):\n pre_str +=' {send_rp_discovery_intf}'\n\n configurations.append_line(\n attributes.format(pre_str))\n # initial it back\n pre_str = ''\n\n # ip pim auto-rp forward listen\n if attributes.value('autorp_listener'):\n configurations.append_line(\n attributes.format('ip pim auto-rp forward listen'))\n\n # ==== PIM BSR =======\n # == bsr bsr-candidate ==\n # ip/ipv6 pim auto-rp forward listen\n # ip/ipv6 pim [bsr] bsr-candidate <bsr_candidate_interface>\n if attributes.value('bsr_candidate_interface'):\n # ip/ipv6 pim bsr forward listen\n configurations.append_line(\n attributes.format('{ip} pim bsr forward listen'))\n\n # ip/ipv6 pim bsr-candidate {bsr_candidate_interface}\n bsr_str = '{ip} pim bsr-candidate '\\\n '{bsr_candidate_interface}'\n\n # + hash-len <bsr_candidate_hash_mask_length>\n if attributes.value('bsr_candidate_hash_mask_length'):\n bsr_str += ' hash-len '\\\n '{bsr_candidate_hash_mask_length}'\n\n # + interval <bsr_candidate_interval>\n if attributes.value('bsr_candidate_interval'):\n bsr_str += ' interval {bsr_candidate_interval}'\n\n # + priority <bsr_candidate_priority>\n if attributes.value('bsr_candidate_priority'):\n bsr_str += ' priority {bsr_candidate_priority}'\n\n configurations.append_line(\n attributes.format(bsr_str))\n\n # == bsr rp-candidate ==\n # ip/ipv6 pim auto-rp forward listen\n # ip/ipv6 pim [bsr] rp-candidate <bsr_rp_candidate_interface>\n if attributes.value('bsr_rp_candidate_interface'):\n # ip/ipv6 pim bsr forward listen\n configurations.append_line(\n attributes.format('{ip} pim bsr forward listen'))\n\n # ip/ipv6 pim rp-candidate {bsr_rp_candidate_interface}\n bsr_rp_str = '{ip} pim rp-candidate '\\\n '{bsr_rp_candidate_interface}'\n\n # + group-list {bsr_rp_candidate_group_list} |\n # + route-map {bsr_rp_candidate_route_map} |\n # + prefix-list {bsr_rp_candidate_prefix_list}\n if attributes.value('bsr_rp_candidate_group_list'):\n bsr_rp_str += ' group-list'\\\n ' {bsr_rp_candidate_group_list}'\n elif attributes.value('bsr_rp_candidate_route_map'):\n bsr_rp_str += ' route-map'\\\n ' {bsr_rp_candidate_route_map}'\n elif attributes.value('bsr_rp_candidate_prefix_list'):\n bsr_rp_str += ' prefix-list'\\\n ' {bsr_rp_candidate_prefix_list}'\n else:\n bsr_rp_str = ''\n\n if bsr_rp_str:\n # +priority <bsr_rp_candidate_priority>\n if attributes.value('bsr_rp_candidate_priority'):\n bsr_rp_str += ' priority '\\\n '{bsr_rp_candidate_priority}'\n\n # +interval <bsr_rp_candidate_interval>\n if attributes.value('bsr_rp_candidate_interval'):\n bsr_rp_str += ' interval {bsr_rp_candidate_interval}'\n\n # +bidir\n if attributes.value('bsr_rp_candidate_bidir'):\n bsr_rp_str += ' bidir'\n\n configurations.append_line(\n attributes.format(bsr_rp_str))\n \n # ip/ipv6 pim register-policy <accept_register>\n if attributes.value('accept_register'):\n configurations.append_line(\n attributes.format(\n '{ip} pim register-policy '\n '{accept_register}'))\n\n # ip pim register-policy prefix-list\n # <accept_register_prefix_list>\n if attributes.value('accept_register_prefix_list') \\\n and self.ip == 'ip':\n configurations.append_line(\n attributes.format(\n 'ip pim register-policy prefix-list '\n '{accept_register_prefix_list}'))\n\n # ip/ipv6 pim log-neighbor-changes\n if attributes.value('log_neighbor_changes'):\n configurations.append_line(\n attributes.format(\n '{ip} pim log-neighbor-changes'))\n\n # ip pim register-source <register_source>\n if attributes.value('register_source') and \\\n self.ip == 'ip':\n configurations.append_line(\n attributes.format(\n 'ip pim register-source {register_source}'))\n \n # ip pim sg-expiry-timer infinity\n if attributes.value('sg_expiry_timer_infinity') and \\\n not attributes.value('sg_expiry_timer_prefix_list') \\\n and not attributes.value('sg_expiry_timer_sg_list'):\n configurations.append_line(\n attributes.format(\n 'ip pim sg-expiry-timer infinity'))\n \n # ip pim sg-expiry-timer <sg_expiry_timer>\n if attributes.value('sg_expiry_timer') and \\\n not attributes.value('sg_expiry_timer_prefix_list') \\\n and not attributes.value('sg_expiry_timer_sg_list'):\n configurations.append_line(\n attributes.format(\n 'ip pim sg-expiry-timer {sg_expiry_timer}'))\n\n # ip pim sg-expiry-timer <sg_expiry_timer>\n # prefix-list <sg_expiry_timer_prefix_list>\n if attributes.value('sg_expiry_timer') and \\\n attributes.value('sg_expiry_timer_prefix_list'):\n configurations.append_line(\n attributes.format(\n 'ip pim sg-expiry-timer {sg_expiry_timer} '\n 'prefix-list {sg_expiry_timer_prefix_list}'))\n\n # ip pim sg-expiry-timer <sg_expiry_timer>\n # sg-list <sg_expiry_timer_sg_list>\n if attributes.value('sg_expiry_timer') and \\\n attributes.value('sg_expiry_timer_sg_list'):\n configurations.append_line(\n attributes.format(\n 'ip pim sg-expiry-timer {sg_expiry_timer} '\n 'sg-list {sg_expiry_timer_sg_list}'))\n\n # ip pim sg-expiry-timer infinity\n # prefix-list <sg_expiry_timer_prefix_list>\n if attributes.value('sg_expiry_timer_infinity') and \\\n attributes.value('sg_expiry_timer_prefix_list'):\n configurations.append_line(\n attributes.format(\n 'ip pim sg-expiry-timer infinity '\n 'prefix-list {sg_expiry_timer_prefix_list}'))\n \n # ip pim sg-expiry-timer infinity\n # sg-list <sg_expiry_timer_sg_list>\n if attributes.value('sg_expiry_timer_infinity') and \\\n attributes.value('sg_expiry_timer_sg_list'):\n configurations.append_line(\n attributes.format(\n 'ip pim sg-expiry-timer infinity '\n 'sg-list {sg_expiry_timer_sg_list}'))\n\n # ip/ipv6 pim spt-threshold infinity group-list\n # <spt_switch_policy>\n if attributes.value('spt_switch_infinity') and \\\n attributes.value('spt_switch_policy'):\n configurations.append_line(\n attributes.format(\n '{ip} pim spt-threshold {spt_switch_infinity.value} '\n 'group-list {spt_switch_policy}'))\n\n # ip/ipv6 pim use-shared-tree-only group-list <spt_switch_policy>\n if not attributes.value('spt_switch_infinity') and \\\n attributes.value('spt_switch_policy'):\n configurations.append_line(\n attributes.format(\n '{ip} pim use-shared-tree-only group-list'\n ' {spt_switch_policy}'))\n\n # Static RP address Attributes under top level config\n for groups, attributes2 in attributes.sequence_values(\n 'rp_addresses', sort=True):\n kwargs = {'ip_type': self.ip}\n if unconfig:\n configurations.append_block(groups.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(groups.build_config(\n apply=False, attributes=attributes2, **kwargs))\n\n # InterfaceAttributes\n for sub, attributes2 in attributes.mapping_values(\n 'interface_attr', keys=self.interface_attr,\n sort=True):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n \n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # if self.vrf_name != 'default':\n # configurations.append_line('exit')\n\n # the interface should have vrf(name = vrf_name) attached\n with configurations.submode_context(\n attributes.format('interface {interface_name}',\n force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n if unconfig:\n attributes.attributes['ip'] = self.ip\n\n # interface <intf_name>\n # ip/ipv6 pim sparse-mode\n if attributes.value('mode'):\n configurations.append_line(\n attributes.format('{ip} pim sparse-mode'))\n\n # interface <intf_name>\n # ip/ipv6 pim jp-policy <boundary> [in|out]\n if attributes.value('boundary') and \\\n attributes.value('boundary_in') and \\\n attributes.value('boundary_out'):\n configurations.append_line(\n attributes.format(\n '{ip} pim jp-policy {boundary} in'))\n configurations.append_line(\n attributes.format(\n '{ip} pim jp-policy {boundary} out'))\n elif attributes.value('boundary') and \\\n attributes.value('boundary_in') and \\\n not attributes.value('boundary_out'):\n configurations.append_line(\n attributes.format(\n '{ip} pim jp-policy {boundary} in'))\n elif attributes.value('boundary') and \\\n not attributes.value('boundary_in') and \\\n attributes.value('boundary_out'):\n configurations.append_line(\n attributes.format(\n '{ip} pim jp-policy {boundary} out'))\n elif attributes.value('boundary') and \\\n not attributes.value('boundary_in') and \\\n not attributes.value('boundary_out'):\n configurations.append_line(\n attributes.format(\n '{ip} pim jp-policy {boundary}'))\n\n # interface <intf_name>\n # ip/ipv6 pim border\n if attributes.value('bsr_border'):\n configurations.append_line(\n attributes.format('{ip} pim border'))\n\n # interface <intf_name>\n # ip/ipv6 pim hello-interval {hello_interval}\n if attributes.value('hello_interval'):\n configurations.append_line(\n attributes.format(\n '{ip} pim hello-interval '\n '{hello_interval}'))\n\n # interface <intf_name>\n # ip/ipv6 pim dr-priority {dr_priority}\n if attributes.value('dr_priority'):\n configurations.append_line(\n attributes.format(\n '{ip} pim dr-priority '\n '{dr_priority}'))\n\n # interface <intf_name>\n # ip/ipv6 pim neighbor-policy {neighbor_filter}\n if attributes.value('neighbor_filter'):\n configurations.append_line(\n attributes.format(\n '{ip} pim neighbor-policy '\n '{neighbor_filter}'))\n\n # interface <intf_name>\n # ip/ipv6 pim neighbor-policy prefix-list\n # <neighbor_filter_prefix_list>\n if attributes.value('neighbor_filter_prefix_list') \\\n and self.ip == 'ip':\n configurations.append_line(\n attributes.format(\n 'ip pim neighbor-policy prefix-list '\n '{neighbor_filter_prefix_list}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n", "id": "8146340", "language": "Python", "matching_score": 3.7345328330993652, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/pim/nxos/pim.py" }, { "content": "\"\"\"Implement IOS-XR (iosxr) Specific Configurations for Vrf objects.\n\"\"\"\n\n# Table of contents:\n# class Vrf:\n# class DeviceAttributes:\n# def build_config/build_unconfig:\n# class AddressFamilyAttributes:\n# def build_config/build_unconfig:\n# class RouteTargetAttributes:\n# def build_config/build_unconfig:\n\nfrom abc import ABC\nimport warnings\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\n\n\nclass Vrf(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(attributes.format('vrf {name}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n if attributes.value('shutdown'):\n warnings.warn('vrf shutdown', UnsupportedAttributeWarning)\n\n # iosxr: vrf vrf1 / description some line data\n configurations.append_line(attributes.format('description {description}'))\n\n # iosxr: vrf vrf1 / address-family ipv4 unicast (config-vrf-af)\n for key, sub, attributes2 in attributes.mapping_items(\n 'address_family_attr', keys=self.address_family_attr, sort=True):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n # iosxr: vrf vrf1 / fallback-vrf vrf2\n configurations.append_line(attributes.format('fallback-vrf {fallback_vrf.name}'))\n\n # iosxr: vrf vrf1 / mhost ipv4 default-interface GigabitEthernet0/0/0/0\n configurations.append_line(attributes.format('mhost ipv4 default-interface {mhost_ipv4_default_interface.name}'))\n\n # iosxr: vrf vrf1 / mhost ipv6 default-interface GigabitEthernet0/0/0/0\n configurations.append_line(attributes.format('mhost ipv6 default-interface {mhost_ipv6_default_interface.name}'))\n\n # iosxr: vrf vrf1 / mode big\n configurations.append_line(attributes.format('mode {scale_mode}'))\n\n # iosxr: vrf vrf1 / remote-route-filtering disable\n if attributes.value('remote_route_filtering') is False:\n configurations.append_line('remote-route-filtering disable')\n\n # iosxr: vrf vrf1 / vpn id 0:0\n configurations.append_line(attributes.format('vpn id {vpn_id}'))\n\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class AddressFamilyAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(attributes.format('address-family {address_family.value}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n if attributes.value('export_vrf') or attributes.value('import_vrf'):\n warnings.warn('vrf export/import vrf', UnsupportedAttributeWarning)\n\n # iosxr: vrf vrf1 / address-family ipv4 unicast / export route-policy <rtepol>\n configurations.append_line(attributes.format('export route-policy {export_route_policy}'))\n\n # iosxr: vrf vrf1 / address-family ipv4 unicast / export route-target 100:200\n # iosxr: vrf vrf1 / address-family ipv4 unicast / export route-target 100:20 stitching\n for v, attributes2 in attributes.sequence_values('export_route_targets'):\n cfg = 'export route-target {}'.format(v.route_target)\n if v.stitching:\n cfg += ' stitching'\n configurations.append_line(cfg)\n\n # iosxr: vrf vrf1 / address-family ipv4 unicast / export to default-vrf route-policy <rtepol>\n configurations.append_line(attributes.format('export to default-vrf route-policy {export_to_default_vrf_route_policy.name}'))\n\n # iosxr: vrf vrf1 / address-family ipv4 unicast / export to vrf allow-imported-vpn\n # iosxr: vrf vrf1 / address-family ipv4 unicast / export to vrf allow-imported-vpn import stitching-rt\n # iosxr: vrf vrf1 / address-family ipv4 unicast / export to vrf import stitching-rt\n if attributes.value('export_to_vrf_allow_imported_vpn') or \\\n attributes.value('export_to_vrf_import_stitching_rt'):\n cfg = 'export to vrf'\n if attributes.value('export_to_vrf_allow_imported_vpn', force=True):\n cfg += ' allow-imported-vpn'\n if attributes.value('export_to_vrf_import_stitching_rt', force=True):\n cfg += ' import stitching-rt'\n configurations.append_line(cfg)\n\n # iosxr: vrf vrf1 / address-family ipv4 unicast / import from default-vrf route-policy <rtepol>\n # iosxr: vrf vrf1 / address-family ipv4 unicast / import from default-vrf route-policy <rtepol> advertise-as-vpn\n v = attributes.value('import_from_default_vrf_route_policy')\n if v is not None:\n cfg = 'import from default-vrf route-policy {}'.format(v)\n if attributes.value('import_from_default_vrf_advertise_as_vpn', force=True):\n cfg += ' advertise-as-vpn'\n configurations.append_line(cfg)\n\n # iosxr: vrf vrf1 / address-family ipv4 unicast / import route-policy <rtepol>\n configurations.append_line(attributes.format('import route-policy {import_route_policy}'))\n\n # iosxr: vrf vrf1 / address-family ipv4 unicast / import route-target 100:200\n # iosxr: vrf vrf1 / address-family ipv4 unicast / import route-target 100:20 stitching\n for v, attributes2 in attributes.sequence_values('import_route_targets'):\n cfg = 'import route-target {}'.format(v.route_target)\n if v.stitching:\n cfg += ' stitching'\n configurations.append_line(cfg)\n\n # iosxr: vrf vrf1 / address-family ipv4 unicast / maximum prefix 32\n # iosxr: vrf vrf1 / address-family ipv4 unicast / maximum prefix 32 1\n cfg = attributes.format('maximum prefix {maximum_prefix}')\n if cfg:\n if attributes.value('maximum_prefix_warning_only', force=True):\n warnings.warn('vrf maximum prefix warning-only', UnsupportedAttributeWarning)\n v = attributes.value('maximum_prefix_threshold', force=True)\n if v is not None:\n cfg += ' {}'.format(v)\n if attributes.value('maximum_prefix_reinstall_threshold', force=True):\n warnings.warn('vrf maximum prefix reinstall threshold', UnsupportedAttributeWarning)\n configurations.append_line(cfg)\n\n # loop over all route-target\n for sub, attributes2 in attributes.mapping_values(\n 'route_target_attr', keys=self.route_target_attr, sort=True):\n configurations.append_block(sub.build_config(apply=False,\n attributes=attributes2, unconfig=unconfig, **kwargs))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=False, **kwargs)\n\n\n class RouteTargetAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # route-target <rt_type> <rt>\n if attributes.value('rt_type'):\n if attributes.value('rt_type').value == 'both':\n configurations.append_line(\n 'import route-target {rt}'.format(rt=self.rt))\n configurations.append_line(\n 'export route-target {rt}'.format(rt=self.rt))\n else:\n configurations.append_line(\n '{type} route-target {rt}'.format(\n rt=self.rt,\n type=attributes.value('rt_type').value))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n", "id": "11032360", "language": "Python", "matching_score": 5.845848083496094, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/vrf/iosxr/vrf.py" }, { "content": "\"\"\"Implement Nexus (nxos) Specific Configurations for Vrf objects.\n\"\"\"\n\n# Table of contents:\n# class Vrf:\n# class DeviceAttributes:\n# def build_config/build_unconfig:\n# class AddressFamilyAttributes:\n# def build_config/build_unconfig:\n# Class RouteTargetAttributes:\n\nfrom abc import ABC\nimport warnings\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\n\nclass Vrf(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # nxos: vrf context vrf1 (config-vrf)\n with configurations.submode_context(attributes.format('vrf context {name}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # nxos: vrf context vrf1 / rd 1.2:1\n v = attributes.value('rd')\n if v is not None:\n # if v != 'auto':\n # v = format(v, 'd.d:d')\n configurations.append_line('rd {}'.format(v))\n\n # nxos: vrf context vrf1 / vni 1-16777214\n if attributes.value('vni'):\n configurations.append_line(attributes.format('vni {vni}'))\n\n # nxos: vrf context vrf1 / address-family ipv4 unicast (config-vrf-af-ipv4)\n # nxos: vrf context vrf1 / address-family ipv6 unicast (config-vrf-af-ipv6)\n for key, sub, attributes2 in attributes.mapping_items(\n 'address_family_attr', keys=self.address_family_attr,\n sort=True):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n # nxos: vrf context vrf1 / amt flush-routes\n if attributes.value('amt_flush_routes'):\n configurations.append_line('amt flush-routes')\n\n # nxos: vrf context vrf1 / amt pseudo-interface Ethernet1/1\n configurations.append_line(attributes.format('amt pseudo-interface {amt_pseudo_interface.name}'))\n\n # nxos: vrf context vrf1 / description some line data\n configurations.append_line(attributes.format('description {description}'))\n\n # nxos: vrf context vrf1 / ip ... -> StaticRouting/TODO\n # nxos: vrf context vrf1 / ipv6 ... -> StaticRouting/TODO\n\n # nxos: vrf context vrf1 / shutdown\n if attributes.value('shutdown'):\n configurations.append_line('shutdown')\n\n # nxos: vrf context vrf1 / vni 4096 topology 1\n\n # comment out due to impot issue (this is from old configuration)\n # --- ImportError: cannot import name 'ESI'\n # for vni, attributes2 in attributes.sequence_values('vnis'):\n # configurations.append_line('vni {}'.format(vni.vni_id))\n\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class AddressFamilyAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # nxos: vrf context vrf1 / address-family ipv4 unicast (config-vrf-af-ipv4)\n # nxos: vrf context vrf1 / address-family ipv6 unicast (config-vrf-af-ipv6)\n with configurations.submode_context(attributes.format(\n 'address-family {address_family.value}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # nxos: vrf context vrf1 / address-family ipv4 unicast / export map someword\n configurations.append_line(attributes.format('export map {export_route_policy.name}'))\n\n # nxos: vrf context vrf1 / address-family ipv4 unicast / import map someword\n configurations.append_line(attributes.format('import map {import_route_policy.name}'))\n\n # nxos: vrf context vrf1 / address-family ipv4 unicast / import vrf default map someword\n # nxos: vrf context vrf1 / address-family ipv4 unicast / import vrf default 1 map someword\n v = attributes.value('import_from_default_vrf_route_policy')\n if v is not None:\n cfg = 'import vrf default'\n cfg += attributes.format(' {import_from_default_vrf_route_policy_maximum_prefixes}', force=True)\n cfg += ' map {}'.format(v)\n if attributes.value('import_from_default_vrf_advertise_as_vpn'):\n warnings.warn('import vrf default map advertise_as_vpn', UnsupportedAttributeWarning)\n configurations.append_line(cfg)\n\n # nxos: vrf context vrf1 / address-family ipv4 unicast / route-target both 1.2.3.4:1\n # nxos: vrf context vrf1 / address-family ipv4 unicast / route-target both 100:200\n both_route_targets = set(self.export_route_targets) & set(self.import_route_targets)\n\n # nxos: vrf context vrf1 / address-family ipv4 unicast / route-target export 1.2.3.4:1\n # nxos: vrf context vrf1 / address-family ipv4 unicast / route-target export 100:200\n for v, attributes2 in attributes.sequence_values('export_route_targets'):\n if v in both_route_targets:\n cfg = 'route-target both {}'.format(v.route_target)\n else:\n cfg = 'route-target export {}'.format(v.route_target)\n if v.stitching:\n cfg += ' auto evpn'\n configurations.append_line(cfg)\n\n # nxos: vrf context vrf1 / address-family ipv4 unicast / route-target import 1.2.3.4:1\n # nxos: vrf context vrf1 / address-family ipv4 unicast / route-target import 100:200\n for v, attributes2 in attributes.sequence_values('import_route_targets'):\n if v in both_route_targets:\n continue # done above\n else:\n cfg = 'route-target import {}'.format(v.route_target)\n if v.stitching:\n cfg += ' auto evpn'\n configurations.append_line(cfg)\n\n # nxos: vrf context vrf1 / address-family ipv4|ipv6 unicast / maximum routes 1\n # nxos: vrf context vrf1 / address-family ipv4|ipv6 unicast / maximum routes 1 1\n # nxos: vrf context vrf1 / address-family ipv4|ipv6 unicast / maximum routes 1 1 reinstall 1\n # nxos: vrf context vrf1 / address-family ipv4|ipv6 unicast / maximum routes 1 warning-only\n cfg = attributes.format('maximum routes {maximum_prefix}')\n if cfg:\n if attributes.value('maximum_prefix_warning_only', force=True):\n cfg += ' warning-only'\n else:\n v = attributes.value('maximum_prefix_threshold', force=True)\n if v is not None:\n cfg += ' {}'.format(v)\n cfg += attributes.format(' reinstall {maximum_prefix_reinstall_threshold}', force=True)\n configurations.append_line(cfg)\n\n # ---------- Genie Team latest Update --------------- #\n # import_from_global_map\n if attributes.value('import_from_global_map'):\n configurations.append_line(\n attributes.format('import vrf default map'\n ' {import_from_global_map}', force=True))\n\n # routing_table_limit_number\n if attributes.value('routing_table_limit_number') and \\\n attributes.value('alert_percent_value'):\n configurations.append_line(\n attributes.format('maximum routes {routing_table_limit_number} '\n '{alert_percent_value}'))\n elif attributes.value('routing_table_limit_number') and \\\n attributes.value('simple_alert'):\n configurations.append_line(\n attributes.format('maximum routes {routing_table_limit_number} '\n 'warning-only'))\n\n # loop over all route-target\n for sub, attributes2 in attributes.mapping_values(\n 'route_target_attr', keys=self.route_target_attr.keys(), sort=True):\n configurations.append_block(sub.build_config(apply=False,\n attributes=attributes2, unconfig=unconfig, **kwargs))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class RouteTargetAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if unconfig:\n if attributes.attributes['rt_type'] == {'both': None} or\\\n attributes.attributes['rt_type'] == {'import': None} or\\\n attributes.attributes['rt_type'] == {'export': None}:\n for key, value in attributes.attributes['rt_type'].items():\n self.tmp_rt_type = key\n else:\n self.tmp_rt_type = attributes.attributes['rt_type']\n\n if not self.tmp_rt_type:\n configurations.append_line(\n 'route-target import {rt}'.format(rt=self.rt))\n configurations.append_line(\n 'route-target export {rt}'.format(rt=self.rt))\n else:\n if self.tmp_rt_type == 'both' and self.rt != \"auto\":\n configurations.append_line(\n 'route-target import {rt}'.format(rt=self.rt), raw=True)\n configurations.append_line(\n 'route-target export {rt}'.format(rt=self.rt), raw=True)\n else:\n # route-target <rt_type> <rt>\n configurations.append_line(\n 'route-target {type} {rt}'.format(\n rt=self.rt,\n type=self.tmp_rt_type), raw=True)\n\n # route-target <rt_type> <rt>\n if not unconfig and attributes.value('rt_type'):\n if attributes.value('rt_type').value == 'both' and self.rt != \"auto\":\n configurations.append_line(\n 'route-target import {rt}'.format(rt=self.rt))\n configurations.append_line(\n 'route-target export {rt}'.format(rt=self.rt))\n else:\n # route-target <rt_type> <rt>\n configurations.append_line(\n 'route-target {type} {rt}'.format(\n rt=self.rt,\n type=attributes.value('rt_type').value))\n\n for sub, attributes2 in attributes.mapping_values('protocol_attr',\n sort=True,\n keys=self.protocol_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class ProtocolAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # route-target <rt_type> <rt> mvpn\n if attributes.value('rt_mvpn'):\n self.protocol = 'mvpn'\n # route-target <rt_type> <rt> evpn\n if attributes.value('rt_evpn'):\n self.protocol = 'evpn'\n\n if unconfig:\n if self.protocol:\n if self.tmp_rt_type:\n configurations.append_line(\n 'route-target {rt_type} {rt} {protocol}'.format(\n rt_type=self.tmp_rt_type,\n rt=self.rt,\n protocol=self.protocol))\n\n if not unconfig and self.protocol:\n configurations.append_line(\n 'route-target {rt_type} {rt} {protocol}'.format(\n rt_type=attributes.value('rt_type').value,\n rt=self.rt,\n protocol=self.protocol))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)", "id": "9939600", "language": "Python", "matching_score": 7.162151336669922, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/vrf/nxos/vrf.py" }, { "content": "\"\"\"Implement IOSXE Specific Configurations for Vrf objects.\n\"\"\"\n\n# Table of contents:\n# class Vrf:\n# class DeviceAttributes:\n# def build_config/build_unconfig:\n# class AddressFamilyAttributes:\n# def build_config/build_unconfig:\n# Class RouteTargetAttributes:\n\nfrom abc import ABC\nimport warnings\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\n\nclass Vrf(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(attributes.format('vrf definition {name}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n if attributes.value('shutdown'):\n warnings.warn('vrf shutdown', UnsupportedAttributeWarning)\n\n # iosxe: vrf definition vrf1 / vpn id 0:0\n configurations.append_line(attributes.format('vpn id {vpn_id}'))\n # iosxr: vrf vrf1 / description some line data\n configurations.append_line(attributes.format('description {description}'))\n configurations.append_line(attributes.format('rd {rd}'))\n\n # iosxr: vrf vrf1 / address-family ipv4 unicast (config-vrf-af)\n for key, sub, attributes2 in attributes.mapping_items(\n 'address_family_attr', keys=self.address_family_attr, sort=True):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n # iosxe: vrf vrf1 / vpn id 0:0\n configurations.append_line(attributes.format('vpn id {vpn_id}'))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class AddressFamilyAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n assert not kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(attributes.format(\n 'address-family {address_family.value}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # import_from_global_map\n if attributes.value('import_from_global_map'):\n configurations.append_line(\n attributes.format('import {address_family.value} '\n 'map {import_from_global_map}', force=True))\n\n # export_to_global_map\n if attributes.value('export_to_global_map'):\n configurations.append_line(\n attributes.format('export {address_family.value} '\n 'map {export_to_global_map}', force=True))\n\n # routing_table_limit_number\n if attributes.value('routing_table_limit_number') and \\\n attributes.value('alert_percent_value'):\n configurations.append_line(\n attributes.format('maximum routes {routing_table_limit_number} '\n '{alert_percent_value}'))\n elif attributes.value('routing_table_limit_number') and \\\n attributes.value('simple_alert'):\n configurations.append_line(\n attributes.format('maximum routes {routing_table_limit_number} '\n 'warning-only'))\n\n # keep old handle\n if self.address_family.value == 'ipv4 unicast':\n if attributes.value('export_route_targets'):\n for v, attributes3 in attributes.sequence_values('export_route_targets'):\n configurations.append_line('route-target export {}'.format(v.route_target))\n\n if attributes.value('import_route_targets'):\n for v, attributes3 in attributes.sequence_values('import_route_targets'):\n configurations.append_line('route-target import {}'.format(v.route_target))\n \n if attributes.value('maximum_routes'):\n configurations.append(attributes.format('maximum routes {maximum_routes}'))\n\n\n # loop over all route-target\n for sub, attributes2 in attributes.mapping_values(\n 'route_target_attr', keys=self.route_target_attr.keys(), sort=True):\n configurations.append_block(sub.build_config(apply=False,\n attributes=attributes2, unconfig=unconfig, **kwargs))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class RouteTargetAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # route-target <rt_type> <rt>\n if attributes.value('rt_type'):\n if attributes.value('rt_type').value == 'both':\n configurations.append_line(\n 'route-target import {rt}'.format(rt=self.rt))\n configurations.append_line(\n 'route-target export {rt}'.format(rt=self.rt))\n else:\n configurations.append_line(\n 'route-target {type} {rt}'.format(\n rt=self.rt,\n type=attributes.value('rt_type').value))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n", "id": "8120781", "language": "Python", "matching_score": 4.353635787963867, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/vrf/iosxe/vrf.py" }, { "content": "\nfrom abc import ABC\nimport warnings\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\nfrom genie.libs.conf.address_family import AddressFamily, AddressFamilySubAttributes\n\n\nclass SegmentRouting(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxe: segment-routing mpls\n with configurations.submode_context('segment-routing mpls'):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n if attributes.value('shutdown'):\n configurations.append_line('shutdown')\n\n # iosxe: segment-routing mpls / set-attributes\n with configurations.submode_context('set-attributes',cancel_empty=True):\n for address_family,address_family_sub,address_family_attributes in \\\n attributes.mapping_items('address_family_attr', keys=self.address_families, sort=True):\n\n if address_family == AddressFamily.ipv4_unicast:\n context_cli = 'address-family ipv4'\n else:\n context_cli = address_family_attributes.format('address-family {address_family.value}', force = True)\n\n with configurations.submode_context(context_cli,cancel_empty=True):\n\n if address_family_attributes.value('sr_label_preferred'):\n configurations.append_line('sr-label-preferred')\n\n if address_family_attributes.value('explicit_null'):\n configurations.append_line('explicit-null')\n\n # iosxe: segment-routing mpls / global-block 16000 16001\n v = attributes.value('global_block')\n if v is not None:\n configurations.append_line('global-block {first} {last}'.format(\n first=v.start,\n last=v[-1]))\n\n # iosxe: segment-routing mpls / connected-prefix-sid-map\n with configurations.submode_context('connected-prefix-sid-map',cancel_empty=True):\n\n for address_family,address_family_sub,address_family_attributes in \\\n attributes.mapping_items('address_family_attr', keys=self.address_families, sort=True):\n\n if address_family == AddressFamily.ipv4_unicast:\n context_cli = 'address-family ipv4'\n else:\n context_cli = address_family_attributes.format('address-family {address_family.value}', force = True)\n\n with configurations.submode_context(context_cli,cancel_empty=True):\n\n for entry,attributes2 in address_family_attributes.sequence_values('connected_prefix_sid_map'):\n configurations.append_line(attributes2.format('{prefix} index {index} range {range}'))\n\n # iosxe: segment-routing mpls / mapping-server / prefix-sid-map\n if attributes.value('mapping_server'):\n with configurations.submode_context('mapping server'):\n with configurations.submode_context('prefix-sid-map',cancel_empty=True):\n\n for address_family,address_family_sub,address_family_attributes in \\\n attributes.mapping_items('address_family_attr', keys=self.address_families, sort=True):\n\n if address_family == AddressFamily.ipv4_unicast:\n context_cli = 'address-family ipv4'\n else:\n context_cli = address_family_attributes.format('address-family {address_family.value}', force = True)\n\n with configurations.submode_context(context_cli,cancel_empty=True):\n\n for entry,attributes2 in address_family_attributes.sequence_values('prefix_sid_map'):\n if attributes2.value('attach'):\n configurations.append_line(attributes2.format('{prefix} index {index} range {range} attach'))\n else:\n configurations.append_line(attributes2.format('{prefix} index {index} range {range}'))\n\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n #return str(configurations)\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n", "id": "3557637", "language": "Python", "matching_score": 4.075265884399414, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/segment_routing/iosxe/segment_routing.py" }, { "content": "\nfrom abc import ABC\nimport warnings\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\nfrom genie.libs.conf.route_policy import RoutePolicy\n\n\nclass SegmentRouting(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: segment-routing (config-sr)\n with configurations.submode_context('segment-routing'):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: segment-routing / global-block 16000 16001\n v = attributes.value('global_block')\n if v is not None:\n configurations.append_line('global-block {first} {last}'.format(\n first=v.start,\n last=v[-1]))\n\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n # TODO\n # iosxr: segment-routing / mapping-server (config-sr-ms)\n # iosxr: segment-routing / mapping-server / prefix-sid-map (config-sr-ms-map)\n # iosxr: segment-routing / mapping-server / prefix-sid-map / address-family ipv4 (config-sr-ms-map-af)\n # iosxr: segment-routing / mapping-server / prefix-sid-map / address-family ipv4 / 172.16.17.32/24 <0-1048575>\n # iosxr: segment-routing / mapping-server / prefix-sid-map / address-family ipv4 / 172.16.17.32/24 <0-1048575> attached\n # iosxr: segment-routing / mapping-server / prefix-sid-map / address-family ipv4 / 172.16.17.32/24 <0-1048575> range <0-1048575>\n # iosxr: segment-routing / mapping-server / prefix-sid-map / address-family ipv4 / 172.16.17.32/24 <0-1048575> range <0-1048575> attached\n # iosxr: segment-routing / mapping-server / prefix-sid-map / address-family ipv6 (config-sr-ms-map-af)\n # iosxr: segment-routing / mapping-server / prefix-sid-map / address-family ipv6 / fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b/128 <0-1048575>\n # iosxr: segment-routing / mapping-server / prefix-sid-map / address-family ipv6 / fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b/128 <0-1048575> attached\n # iosxr: segment-routing / mapping-server / prefix-sid-map / address-family ipv6 / fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b/128 <0-1048575> range <0-1048575>\n # iosxr: segment-routing / mapping-server / prefix-sid-map / address-family ipv6 / fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b/128 <0-1048575> range <0-1048575> attached\n\n", "id": "12455033", "language": "Python", "matching_score": 1.8364046812057495, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/segment_routing/iosxr/segment_routing.py" }, { "content": "'''\r\nNXOS specific configurations for IPv4Addr feature object.\r\n'''\r\n\r\n# Python\r\nfrom abc import ABC\r\n\r\n# Genie\r\nfrom genie.conf.base.cli import CliConfigBuilder\r\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\r\n AttributesHelper\r\n\r\n\r\nclass IPv4Addr(ABC):\r\n\r\n def build_config(self, apply=True, attributes=None, unconfig=False,\r\n **kwargs):\r\n assert not kwargs, kwargs\r\n assert not apply\r\n attributes = AttributesHelper(self, attributes)\r\n configurations = CliConfigBuilder(unconfig=unconfig)\r\n\r\n # ===================================\r\n # ipv4\r\n # prefix_length\r\n # ipv4_secondary\r\n # route_tag\r\n # secondary_vrf\r\n # ===================================\r\n\r\n if attributes.value('ipv4') and attributes.value('prefix_length'):\r\n if attributes.value('ipv4_secondary'):\r\n configurations.append_line('ip address'\r\n ' {ipv4}/{prefix_length} secondary'\r\n .format(ipv4=attributes.value('ipv4'),\r\n prefix_length=attributes.value('prefix_length')))\r\n if attributes.value('tag'):\r\n configurations.append_line('ip address'\r\n ' {ipv4}/{prefix_length} secondary'\r\n ' tag {route_tag}'\r\n .format(ipv4=attributes.value('ipv4'), \r\n prefix_length=attributes.value('prefix_length'),\r\n route_tag=attributes.value('route_tag')))\r\n elif attributes.value('tag'):\r\n configurations.append_line('ip address'\r\n ' {ipv4}/{prefix_length}'\r\n ' tag {route_tag}'\r\n .format(ipv4=attributes.value('ipv4'), \r\n prefix_length=attributes.value('prefix_length'),\r\n route_tag=attributes.value('route_tag')))\r\n else:\r\n configurations.append_line('ip address'\r\n ' {ipv4}/{prefix_length}'\r\n .format(ipv4=attributes.value('ipv4'), \r\n prefix_length=attributes.value('prefix_length')))\r\n\r\n return str(configurations)\r\n\r\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\r\n return self.build_config(apply=apply, attributes=attributes,\r\n unconfig=True)\r\n", "id": "11804065", "language": "Python", "matching_score": 3.3443822860717773, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/interface/nxos/ipv4addr.py" }, { "content": "'''\r\nIOSXE specific configurations for IPv4Addr feature object.\r\n'''\r\n\r\n# Python\r\nfrom abc import ABC\r\nfrom ipaddress import IPv4Network\r\n\r\n# Genie\r\nfrom genie.conf.base.cli import CliConfigBuilder\r\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\r\n AttributesHelper\r\n\r\n\r\nclass IPv4Addr(ABC):\r\n\r\n def build_config(self, apply=True, attributes=None, unconfig=False,\r\n **kwargs):\r\n assert not kwargs, kwargs\r\n assert not apply\r\n attributes = AttributesHelper(self, attributes)\r\n configurations = CliConfigBuilder(unconfig=unconfig)\r\n\r\n # ===================================\r\n # ipv4\r\n # prefix_length\r\n # ipv4_secondary\r\n # secondary_vrf\r\n # ===================================\r\n\r\n if attributes.value('ipv4') and attributes.value('prefix_length'):\r\n # convert prefix_length to netmask\r\n ret = IPv4Network('1.1.1.1/{}'.format(\r\n attributes.value('prefix_length')), strict=False)\r\n mask = ret.with_netmask.split('/')[1]\r\n\r\n if attributes.value('ipv4_secondary'):\r\n configurations.append_line('ip address'\r\n ' {ipv4} {prefix_length} secondary'\r\n .format(ipv4=attributes.value('ipv4'),\r\n prefix_length=mask))\r\n if attributes.value('secondary_vrf'):\r\n configurations.append_line('ip address'\r\n ' {ipv4} {prefix_length} secondary'\r\n ' vrf {secondary_vrf}'\r\n .format(ipv4=attributes.value('ipv4'), \r\n prefix_length=mask,\r\n secondary_vrf=attributes.value('secondary_vrf')))\r\n else:\r\n configurations.append_line('ip address'\r\n ' {ipv4} {prefix_length}'\r\n .format(ipv4=attributes.value('ipv4'), \r\n prefix_length=mask))\r\n\r\n return str(configurations)\r\n\r\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\r\n return self.build_config(apply=apply, attributes=attributes,\r\n unconfig=True)\r\n", "id": "7324749", "language": "Python", "matching_score": 1.6939527988433838, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/interface/iosxe/ipv4addr.py" }, { "content": "'''\nIOSXE specific configurations for Mroute feature object.\n'''\n\n# Python\nimport warnings\nfrom abc import ABC\nfrom ipaddress import IPv4Network\n\n# Genie\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\n\n\nclass Mroute(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n # assert not kwargs, kwargs\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # ===================================\n # mroute_address\n # mroute_prefix_mask\n # mroute_neighbor_address\n # mroute_interface_name\n # mroute_admin_distance\n # ===================================\n\n # get vrf and address_family info\n vrf = kwargs['vrf']\n af = kwargs['af_name']\n\n begin_with = {'ip': 'ip mroute', 'ipv6': 'ipv6 route'}\n\n if vrf == 'default':\n cmd_str = begin_with[af]\n else:\n cmd_str = begin_with[af] + ' vrf {}'.format(vrf)\n\n if attributes.value('mroute_address') and \\\n attributes.value('mroute_prefix_mask'):\n\n if af == 'ip':\n # convert prefix_length to netmask\n ret = IPv4Network('1.1.1.1/{}'.format(\n attributes.value('mroute_prefix_mask')), strict=False)\n mask = ret.with_netmask.split('/')[1]\n address = '{addr} {mask}'.format(addr=attributes.value('mroute_address'),\n mask=mask)\n else:\n address = '{addr}/{mask}'.format(\n addr=attributes.value('mroute_address'),\n mask=attributes.value('mroute_prefix_mask'))\n\n # build up configuration string \n if attributes.value('mroute_neighbor_address'):\n cmd_str += ' {address} {nei}'.format(\n address=address,\n nei=attributes.value('mroute_neighbor_address'))\n\n elif attributes.value('mroute_interface_name'):\n cmd_str += ' {address} {int}'.format(\n address=address,\n int=attributes.value('mroute_interface_name'))\n else:\n cmd_str = ''\n \n if attributes.value('mroute_admin_distance') and cmd_str:\n cmd_str += ' ' + str(attributes.value('mroute_admin_distance'))\n\n configurations.append_line(cmd_str)\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n", "id": "121329", "language": "Python", "matching_score": 2.812972068786621, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/mcast/iosxe/mroute.py" }, { "content": "__all__ = (\n 'Mroute'\n)\n\n# Python\nimport weakref\nimport functools\n\n# Genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import ConfigurableBase\nfrom genie.libs.conf.base import IPv4Address, IPv6Address, IPv4Interface, IPv6Interface\n\n\n@functools.total_ordering\nclass Mroute(ConfigurableBase):\n\n @property\n def testbed(self):\n return self.device.testbed\n\n @property\n def device(self):\n return self._device()\n\n # mroute_address\n mroute_address = managedattribute(\n name='mroute_ip_address',\n default=None,\n type=(None, IPv4Address, IPv6Address),\n doc=\"Configure 'ip mroute' or 'ipv6 mroute' on the device.\")\n\n # mroute_prefix_mask\n mroute_prefix_mask = managedattribute(\n name='mroute_prefix_mask',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure 'ip/ipv6 mroute' prefix mask on the device.\")\n\n # mroute_neighbor_address\n mroute_neighbor_address = managedattribute(\n name='mroute_neighbor_address',\n default=None,\n type=(None, IPv4Address, IPv6Address),\n doc=\"Configure 'ip/ipv6 mroute' neighbor address on the device.\")\n \n # mroute_interface_name\n mroute_interface_name = managedattribute(\n name='mroute_interface_name',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Configure 'ip/ipv6 mroute' interface name on the device.\")\n \n # mroute_admin_distance\n mroute_admin_distance = managedattribute(\n name='mroute_admin_distance',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure 'ip/ipv6 mroute' admin distance on the device.\")\n\n # mroute_vrf\n mroute_vrf = managedattribute(\n name='mroute_vrf',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Configure 'ip/ipv6 mroute' VRF on the device.\")\n\n # ==========================================================================\n\n # Overload __eq__\n def __eq__(self, other):\n if not isinstance(other, Mroute):\n raise NotImplemented\n \n return (self.mroute_address,\n self.mroute_prefix_mask,\n self.mroute_neighbor_address,\n self.mroute_admin_distance,\n self.mroute_vrf,\n self.mroute_interface_name,\n self.device) == \\\n (other.mroute_address,\n other.mroute_prefix_mask,\n other.mroute_neighbor_address,\n other.mroute_admin_distance,\n other.mroute_vrf,\n other.mroute_interface_name,\n other.device)\n\n # Overload __lt__\n def __lt__(self, other):\n if not isinstance(other, Mroute):\n raise NotImplemented(\"Cannot compare '{s}' to a '{o}'\".format(s=type(self), o=type(other)))\n\n # Comparing same types (both v4 or both v6)\n if type(self.mroute_address) == type(other.mroute_address):\n return self.mroute_address < other.mroute_address\n # Comparing mistmatch types\n else:\n self_addr = str(self.mroute_address)\n other_addr = str(other.mroute_address)\n return self_addr < other_addr\n \n # Overload __hash__\n def __hash__(self):\n return hash((self.mroute_address,\n self.mroute_prefix_mask,\n self.mroute_neighbor_address,\n self.mroute_admin_distance,\n self.mroute_vrf,\n self.mroute_interface_name,\n self.device))\n\n # Overload __repr__\n def __repr__(self):\n if isinstance(self.mroute_address, IPv6Address):\n return '%s object at 0x%x with ipv6 address %s/%s' % (\n self.__class__.__name__,\n id(self),\n self.mroute_address,\n self.mroute_prefix_mask)\n else:\n return '%s object at 0x%x with ip address %s/%s' % (\n self.__class__.__name__,\n id(self),\n self.mroute_address,\n self.mroute_prefix_mask)\n\n\n def __init__(self, device, *args, **kwargs):\n self._device = weakref.ref(device)\n super().__init__(*args, **kwargs)", "id": "7957185", "language": "Python", "matching_score": 2.7963554859161377, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/mcast/mroute.py" }, { "content": "'''\nNXOS specific configurations for Mroute feature object.\n'''\n\n# Python\nimport warnings\nfrom abc import ABC\n\n# Genie\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\n\n\nclass Mroute(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # ===================================\n # mroute_address/mroute_prefix_mask\n # mroute_neighbor_address\n # mroute_interface_name\n # mroute_admin_distance\n # mroute_vrf\n # ===================================\n\n # Get vrf and address_family info\n vrf = kwargs['vrf']\n af_name = kwargs['af_name']\n\n if af_name == 'ipv4':\n af_key = 'ip'\n else:\n af_key = 'ipv6'\n\n if attributes.value('mroute_address') and \\\n attributes.value('mroute_prefix_mask') and \\\n attributes.value('mroute_neighbor_address') and \\\n attributes.value('mroute_admin_distance') and \\\n attributes.value('mroute_vrf'):\n configurations.append_line('{af_key} mroute'\n ' {mroute_address}/{mroute_prefix_mask}'\n ' {mroute_neighbor_address} {mroute_admin_distance}'\n ' vrf {mroute_vrf}'.format(af_key=af_key,\n mroute_address=attributes.value('mroute_address'), \n mroute_prefix_mask=attributes.value('mroute_prefix_mask'),\n mroute_neighbor_address=\\\n attributes.value('mroute_neighbor_address'),\n mroute_admin_distance=\\\n attributes.value('mroute_admin_distance'),\n mroute_vrf=attributes.value('mroute_vrf')))\n\n elif attributes.value('mroute_address') and \\\n attributes.value('mroute_prefix_mask') and \\\n attributes.value('mroute_neighbor_address') and \\\n attributes.value('mroute_admin_distance'):\n configurations.append_line('{af_key} mroute'\n ' {mroute_address}/{mroute_prefix_mask}'\n ' {mroute_neighbor_address}'\n ' {mroute_admin_distance}'.format(af_key=af_key,\n mroute_address=attributes.value('mroute_address'), \n mroute_prefix_mask=attributes.value('mroute_prefix_mask'),\n mroute_neighbor_address=\\\n attributes.value('mroute_neighbor_address'),\n mroute_admin_distance=\\\n attributes.value('mroute_admin_distance')))\n \n elif attributes.value('mroute_address') and \\\n attributes.value('mroute_prefix_mask') and \\\n attributes.value('mroute_neighbor_address'):\n configurations.append_line('{af_key} mroute'\n ' {mroute_address}/{mroute_prefix_mask}'\n ' {mroute_neighbor_address}'.format(af_key=af_key,\n mroute_address=attributes.value('mroute_address'), \n mroute_prefix_mask=\\\n attributes.value('mroute_prefix_mask'),\n mroute_neighbor_address=\\\n attributes.value('mroute_neighbor_address')))\n \n elif attributes.value('mroute_address') and \\\n attributes.value('mroute_prefix_mask') and \\\n attributes.value('mroute_interface_name') and \\\n attributes.value('mroute_admin_distance') and \\\n attributes.value('mroute_vrf'):\n configurations.append_line('{af_key} mroute'\n ' {mroute_address}/{mroute_prefix_mask}'\n ' {mroute_interface_name} {mroute_admin_distance}'\n ' vrf {mroute_vrf}'.format(af_key=af_key,\n mroute_address=attributes.value('mroute_address'), \n mroute_prefix_mask=attributes.value('mroute_prefix_mask'),\n mroute_interface_name=\\\n attributes.value('mroute_interface_name'),\n mroute_admin_distance=\\\n attributes.value('mroute_admin_distance'),\n mroute_vrf=attributes.value('mroute_vrf')))\n\n elif attributes.value('mroute_address') and \\\n attributes.value('mroute_prefix_mask') and \\\n attributes.value('mroute_interface_name') and \\\n attributes.value('mroute_admin_distance'):\n configurations.append_line('{af_key} mroute'\n ' {mroute_address}/{mroute_prefix_mask}'\n ' {mroute_interface_name} {mroute_admin_distance}'.\\\n format(af_key=af_key,\n mroute_address=attributes.value('mroute_address'), \n mroute_prefix_mask=attributes.value('mroute_prefix_mask'),\n mroute_interface_name=\\\n attributes.value('mroute_interface_name'),\n mroute_admin_distance=\\\n attributes.value('mroute_admin_distance')))\n\n elif attributes.value('mroute_address') and \\\n attributes.value('mroute_prefix_mask') and \\\n attributes.value('mroute_interface_name'):\n configurations.append_line('{af_key} mroute'\n ' {mroute_address}/{mroute_prefix_mask}'\n ' {mroute_interface_name}'.format(af_key=af_key,\n mroute_address=attributes.value('mroute_address'), \n mroute_prefix_mask=\\\n attributes.value('mroute_prefix_mask'),\n mroute_interface_name=\\\n attributes.value('mroute_interface_name')))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n", "id": "4463748", "language": "Python", "matching_score": 4.648331165313721, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/mcast/nxos/mroute.py" }, { "content": "'''\nIOSXR specific configurations for Mroute feature object.\n'''\n\n# Python\nimport warnings\nfrom abc import ABC\n\n# Genie\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\n\n\nclass Mroute(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # ===================================\n # mroute_address/mroute_prefix_mask\n # mroute_interface_name\n # mroute_neighbor_address\n # ===================================\n\n if attributes.value('mroute_address') and \\\n attributes.value('mroute_prefix_mask') and \\\n attributes.value('mroute_interface_name') and \\\n attributes.value('mroute_neighbor_address'):\n\n # Final config string\n configurations.append_line(attributes.format(\n 'static-rpf {mroute_address} {mroute_prefix_mask}'\n ' {mroute_interface_name} {mroute_neighbor_address}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n", "id": "4608774", "language": "Python", "matching_score": 1.1990984678268433, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/mcast/iosxr/mroute.py" }, { "content": "\n# import python\nfrom abc import ABC\n\n# import genie\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning, \\\n AttributesHelper\n\n\nclass Lldp(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n \n # lldp run\n if attributes.value('enabled'):\n configurations.append_line(\n attributes.format('lldp run'))\n \n # lldp timer <hello_timer>\n if attributes.value('hello_timer'):\n configurations.append_line(\n attributes.format('lldp timer {hello_timer}'))\n \n # lldp holdtime <hold_timer>\n if attributes.value('hold_timer'):\n configurations.append_line(\n attributes.format('lldp holdtime {hold_timer}'))\n \n # lldp reinit <reinit_timer>\n if attributes.value('reinit_timer'):\n configurations.append_line(\n attributes.format('lldp reinit {reinit_timer}'))\n\n # tlv select attributes\n sub, attributes2 = attributes.namespace('tlv_select_attr')\n if sub is not None:\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2,\n unconfig=unconfig))\n\n # interface attributes\n for sub, attributes2 in attributes.mapping_values('interface_attr',\n sort=True, keys=self.interface_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class TlvSelectAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n \n # no lldp tlv-select port-description\n if attributes.value('suppress_tlv_port_description'):\n configurations.append_line(\n attributes.format('no lldp tlv-select port-description'),\n unconfig_cmd='lldp tlv-select port-description')\n if attributes.value('suppress_tlv_port_description') == False:\n configurations.append_line(\n attributes.format('lldp tlv-select port-description'))\n\n # no lldp tlv-select system-name\n if attributes.value('suppress_tlv_system_name'):\n configurations.append_line(\n attributes.format('no lldp tlv-select system-name'),\n unconfig_cmd='lldp tlv-select system-name')\n if attributes.value('suppress_tlv_system_name') == False:\n configurations.append_line(\n attributes.format('lldp tlv-select system-name'))\n\n # no lldp tlv-select system-description\n if attributes.value('suppress_tlv_system_description'):\n configurations.append_line(\n attributes.format('no lldp tlv-select system-description'),\n unconfig_cmd='lldp tlv-select system-description')\n if attributes.value('suppress_tlv_system_description') == False:\n configurations.append_line(\n attributes.format('lldp tlv-select system-description'))\n\n # no lldp tlv-select system-capabilities\n if attributes.value('suppress_tlv_system_capabilities'):\n configurations.append_line(\n attributes.format('no lldp tlv-select system-capabilities'),\n unconfig_cmd='lldp tlv-select system-capabilities')\n if attributes.value('suppress_tlv_system_capabilities') == False:\n configurations.append_line(\n attributes.format('lldp tlv-select system-capabilities'))\n\n # no lldp tlv-select port-description\n if attributes.value('suppress_tlv_management_address'):\n configurations.append_line(\n attributes.format('no lldp tlv-select management-address'),\n unconfig_cmd='lldp tlv-select management-address')\n if attributes.value('suppress_tlv_management_address') == False:\n configurations.append_line(\n attributes.format('lldp tlv-select management-address'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(\n attributes.format('interface {intf}', force=True)): \n # lldp transmit\n # lldp receive\n if attributes.value('if_enabled'):\n configurations.append_line(\n attributes.format('lldp transmit'))\n configurations.append_line(\n attributes.format('lldp receive'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)", "id": "3145376", "language": "Python", "matching_score": 4.624550819396973, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/lldp/iosxe/lldp.py" }, { "content": "\n__all__ = (\n 'Lldp',\n)\n\n# import genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import ConfigurableBase\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.base import DeviceFeature, InterfaceFeature\nfrom genie.conf.base.attributes import DeviceSubAttributes,\\\n SubAttributesDict,\\\n AttributesHelper, \\\n KeyedSubAttributes, SubAttributes\n# import genie.libs\nfrom genie.conf.base.attributes import InterfaceSubAttributes\n\n\n# Structure\n# Lldp\n# +- Device\n# +- TlvSelectAttributes\n# +- InterfaceAttributes\n\n\nclass ConfigurableTlvNamespace(ConfigurableBase):\n\n def __init__(self, tlv):\n self._tlv = tlv\n\n _tlv = None\n\n @property\n def tlv(self):\n return self._tlv\n\n @property\n def testbed(self):\n return self.tlv.testbed\n\n @property\n def device(self):\n return self.tlv.device\n\n\nclass Lldp(DeviceFeature, InterfaceFeature):\n\n # device attributes\n enabled = managedattribute(\n name='enabled',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n hello_timer = managedattribute(\n name='hello_timer',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n hold_timer = managedattribute(\n name='hold_timer',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n reinit_timer = managedattribute(\n name='reinit_timer',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n\n # interface attributes\n interface = managedattribute(\n name='interface',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n if_enabled = managedattribute(\n name='if_enabled',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n class DeviceAttributes(DeviceSubAttributes):\n\n \n class InterfaceAttributes(InterfaceSubAttributes):\n\n def __init__(self, parent, key):\n self.intf = key\n super().__init__(parent, key)\n \n \n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(\n self.InterfaceAttributes, parent=self)\n\n class TlvSelectAttributes(ConfigurableTlvNamespace): \n # tlvSelect attributes\n suppress_tlv_chassis_id = managedattribute(\n name='suppress_tlv_chassis_id',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n suppress_tlv_port_id = managedattribute(\n name='suppress_tlv_port_id',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n suppress_tlv_port_description = managedattribute(\n name='suppress_tlv_port_description',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n suppress_tlv_system_name = managedattribute(\n name='suppress_tlv_system_name',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n suppress_tlv_system_description = managedattribute(\n name='suppress_tlv_system_description',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n \n suppress_tlv_system_capabilities = managedattribute(\n name='suppress_tlv_system_capabilities',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n \n suppress_tlv_management_address = managedattribute(\n name='suppress_tlv_management_address',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n tlv_select_attr = managedattribute(\n name='tlv_select_attr',\n read_only=True,\n doc=TlvSelectAttributes.__doc__)\n\n @tlv_select_attr.initter\n def tlv_select_attr(self):\n return self.TlvSelectAttributes(tlv=self)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n def build_config(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n", "id": "11093285", "language": "Python", "matching_score": 3.31847882270813, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/lldp/lldp.py" }, { "content": "#!/usr/bin/env python\n\n# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning\n\n# Stp\nfrom genie.libs.conf.lldp import Lldp\n\n\nclass test_lldp(TestCase):\n\n def setUp(self):\n \n # Set Genie Tb\n testbed = Testbed()\n Genie.testbed = testbed\n \n # Device\n self.dev1 = Device(name='PE1', testbed=testbed, os='iosxe')\n\n def test_lldp_full_config(self):\n\n # For failures\n self.maxDiff = None\n \n # Pim object\n lldp = Lldp()\n self.dev1.add_feature(lldp)\n\n lldp.device_attr[self.dev1].enabled = True\n lldp.device_attr[self.dev1].hello_timer = 20\n lldp.device_attr[self.dev1].hold_timer = 30\n lldp.device_attr[self.dev1].reinit_timer = 5\n\n lldp.device_attr[self.dev1].tlv_select_attr.suppress_tlv_port_description = True\n lldp.device_attr[self.dev1].tlv_select_attr.suppress_tlv_system_name = True\n lldp.device_attr[self.dev1].tlv_select_attr.suppress_tlv_system_description = True\n lldp.device_attr[self.dev1].tlv_select_attr.suppress_tlv_system_capabilities = False\n lldp.device_attr[self.dev1].tlv_select_attr.suppress_tlv_system_description = False\n\n lldp.device_attr[self.dev1].interface_attr['GigabitEthernet2/0/15'].if_enabled = True \n\n cfgs = lldp.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'lldp run',\n 'lldp timer 20',\n 'lldp holdtime 30',\n 'lldp reinit 5',\n 'no lldp tlv-select port-description',\n 'no lldp tlv-select system-name',\n 'lldp tlv-select system-description',\n 'lldp tlv-select system-capabilities',\n 'interface GigabitEthernet2/0/15',\n ' lldp transmit',\n ' lldp receive',\n ' exit',\n ]))\n\n cfgs = lldp.build_unconfig(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'no lldp run',\n 'no lldp timer 20',\n 'no lldp holdtime 30',\n 'no lldp reinit 5',\n 'lldp tlv-select port-description',\n 'lldp tlv-select system-name',\n 'no lldp tlv-select system-description',\n 'no lldp tlv-select system-capabilities',\n 'interface GigabitEthernet2/0/15',\n ' no lldp transmit',\n ' no lldp receive',\n ' exit',\n ]))\n\n # uncfg with attributes\n cfgs = lldp.build_unconfig(apply=False,\n attributes={'device_attr': {\n self.dev1: {\n 'enabled': None,\n 'tlv_select_attr': {\n 'suppress_tlv_port_description': None\n },\n 'interface_attr': {\n 'GigabitEthernet2/0/15': {\n 'if_enabled': None\n }\n },}}})\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'no lldp run',\n 'lldp tlv-select port-description',\n 'interface GigabitEthernet2/0/15',\n ' no lldp transmit',\n ' no lldp receive',\n ' exit',\n ]))\n \n\nif __name__ == '__main__':\n unittest.main()\n", "id": "3558852", "language": "Python", "matching_score": 1.6296111345291138, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/lldp/iosxe/tests/test_lldp.py" }, { "content": "'''LLDP Genie Ops Object Outputs for NXOS.'''\r\n\r\n\r\nclass LldpOutput(object):\r\n ShowLldpTimers = {\r\n 'hold_timer': 120,\r\n 'reinit_timer': 2,\r\n 'hello_timer': 30\r\n }\r\n\r\n ShowLldpAll = {\r\n 'interfaces': {\r\n 'Ethernet1/1':\r\n {'enabled': True,\r\n 'tx': True,\r\n 'rx': True,\r\n 'dcbx': True\r\n },\r\n 'Ethernet1/2':\r\n {'enabled': True,\r\n 'tx': True,\r\n 'rx': True,\r\n 'dcbx': False\r\n }\r\n }\r\n }\r\n\r\n ShowLldpTlvSelect = {\r\n 'suppress_tlv_advertisement': {\r\n 'port_description': False,\r\n 'system_name': False,\r\n 'system_description': False,\r\n 'system_capabilities': False,\r\n 'management_address_v4': False,\r\n 'management_address_v6': False,\r\n 'power_management': False,\r\n 'port_vlan': False,\r\n 'dcbxp': False\r\n }\r\n }\r\n\r\n ShowLldpNeighborsDetail = {\r\n 'total_entries': 2,\r\n 'interfaces': {\r\n 'Ethernet1/1': {\r\n 'port_id': {\r\n 'GigabitEthernet3': {\r\n 'neighbors': {\r\n 'R1_csr1000v.openstacklocal': {\r\n 'chassis_id': '001e.49f7.2c00',\r\n 'port_description': 'GigabitEthernet3',\r\n 'system_name': 'R1_csr1000v.openstacklocal',\r\n 'system_description': 'Cisco IOS Software [Everest], '\r\n 'Virtual XE Software ('\r\n 'X86_64_LINUX_IOSD-UNIVERSALK9-M), Version 16.6.1, RELEASE SOFTWARE (fc2)\\nTechnical Support: http://www.cisco.com/techsupport\\nCopyright (c) 1986-2017 by Cisco Systems, Inc.\\nCompiled Sat 22-Jul-17 05:51 by',\r\n 'time_remaining': 114,\r\n 'capabilities': {\r\n 'bridge': {\r\n 'name': 'bridge',\r\n 'system': True,\r\n },\r\n 'router': {\r\n 'name': 'router',\r\n 'system': True,\r\n 'enabled': True\r\n }\r\n },\r\n 'management_address_v4': '10.1.3.1',\r\n 'management_address_v6': 'not advertised',\r\n 'vlan_id': 'not advertised'\r\n }\r\n }\r\n }\r\n }\r\n },\r\n 'Ethernet1/2': {\r\n 'port_id': {\r\n 'GigabitEthernet0/0/0/1': {\r\n 'neighbors': {\r\n 'R2_xrv9000': {\r\n 'chassis_id': '000d.bd09.46fa',\r\n 'system_name': 'R2_xrv9000',\r\n 'system_description': '6.2.2, IOS-XRv 9000',\r\n 'time_remaining': 95,\r\n 'capabilities': {\r\n 'router': {\r\n 'name': 'router',\r\n 'system': True,\r\n 'enabled': True\r\n }\r\n },\r\n 'management_address_v4': '10.2.3.2',\r\n 'management_address_v6': 'not advertised',\r\n 'vlan_id': 'not advertised'\r\n }\r\n }\r\n }\r\n }\r\n }\r\n }\r\n }\r\n\r\n ShowLldpTraffic = {\r\n 'counters': {\r\n \"total_frames_received\": 209,\r\n \"total_frames_transmitted\": 349,\r\n \"total_frames_received_in_error\": 0,\r\n \"total_frames_discarded\": 0,\r\n 'total_unrecognized_tlvs': 0,\r\n 'total_entries_aged': 0\r\n }\r\n }\r\n\r\n Lldp_info = {\r\n 'hello_timer': 30,\r\n 'hold_timer': 120,\r\n 'suppress_tlv_advertisement': {\r\n 'port_description': False,\r\n 'system_name': False,\r\n 'system_description': False,\r\n 'system_capabilities': False,\r\n 'management_address': False,\r\n },\r\n 'counters': {\r\n 'frame_in': 209,\r\n 'frame_out': 349,\r\n 'frame_error_in': 0,\r\n 'frame_discard': 0,\r\n 'tlv_unknown': 0,\r\n 'entries_aged_out': 0,\r\n },\r\n 'interfaces': {\r\n 'Ethernet1/1': {\r\n 'enabled': True,\r\n 'if_name': 'Ethernet1/1',\r\n 'port_id': {\r\n 'GigabitEthernet3': {\r\n 'neighbors': {\r\n 'R1_csr1000v.openstacklocal': {\r\n 'chassis_id': '001e.49f7.2c00',\r\n 'port_id': 'GigabitEthernet3',\r\n 'port_description': 'GigabitEthernet3',\r\n 'system_name': 'R1_csr1000v.openstacklocal',\r\n 'system_description': 'Cisco IOS Software [Everest], '\r\n 'Virtual XE Software ('\r\n 'X86_64_LINUX_IOSD-UNIVERSALK9-M), Version 16.6.1, RELEASE SOFTWARE (fc2)\\nTechnical Support: http://www.cisco.com/techsupport\\nCopyright (c) 1986-2017 by Cisco Systems, Inc.\\nCompiled Sat 22-Jul-17 05:51 by',\r\n 'capabilities': {\r\n 'bridge': {\r\n 'name': 'bridge',\r\n 'system': True,\r\n },\r\n 'router': {\r\n 'name': 'router',\r\n 'system': True,\r\n 'enabled': True\r\n }\r\n },\r\n 'management_address': '10.1.3.1',\r\n 'management_address_type': 'ipv4',\r\n },\r\n },\r\n },\r\n },\r\n },\r\n 'Ethernet1/2': {\r\n 'if_name': 'Ethernet1/2',\r\n 'enabled': True,\r\n 'port_id': {\r\n 'GigabitEthernet0/0/0/1': {\r\n 'neighbors': {\r\n 'R2_xrv9000': {\r\n 'chassis_id': '000d.bd09.46fa',\r\n 'port_id': 'GigabitEthernet0/0/0/1',\r\n 'system_name': 'R2_xrv9000',\r\n 'system_description': '6.2.2, IOS-XRv 9000',\r\n 'capabilities': {\r\n 'router': {\r\n 'name': 'router',\r\n 'system': True,\r\n 'enabled': True\r\n }\r\n },\r\n 'management_address': '10.2.3.2',\r\n 'management_address_type': 'ipv4',\r\n },\r\n },\r\n },\r\n },\r\n },\r\n\r\n }\r\n }\r\n", "id": "12811435", "language": "Python", "matching_score": 6.95326042175293, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/lldp/nxos/tests/lldp_output.py" }, { "content": "'''LLDP Genie Ops Object Outputs for IOSXR.'''\n\n\nclass LldpOutput(object):\n\n ShowLldp = {\n \"hello_timer\": 30,\n \"enabled\": True,\n \"hold_timer\": 120,\n \"status\": \"active\",\n \"reinit_delay\": 2\n }\n \n ShowLldpEntry = {\n 'interfaces': {\n 'GigabitEthernet0/0/0/0': {\n 'port_id': {\n 'GigabitEthernet2': {\n 'neighbors': {\n 'R1_csr1000v.openstacklocal': {\n 'chassis_id': '001e.49f7.2c00',\n 'port_description': 'GigabitEthernet2',\n 'system_name': 'R1_csr1000v.openstacklocal',\n 'neighbor_id': 'R1_csr1000v.openstacklocal',\n 'system_description': 'Cisco IOS Software [Everest], Virtual XE Software (X86_64_LINUX_IOSD-UNIVERSALK9-M), Version 16.6.1, RELEASE SOFTWARE (fc2)\\nTechnical Support: http://www.cisco.com/techsupport\\nCopyright (c) 1986-2017 by Cisco Systems, Inc.\\nCompiled Sat 22-Jul-17 05:51 by',\n 'time_remaining': 117,\n 'hold_time': 120,\n 'capabilities': {\n 'bridge': {\n 'system': True,\n },\n 'router': {\n 'system': True,\n 'enabled': True,\n },\n },\n 'management_address': '10.1.2.1',\n },\n },\n },\n },\n },\n 'GigabitEthernet0/0/0/1': {\n 'port_id': {\n 'Ethernet1/2': {\n 'neighbors': {\n 'R3_n9kv': {\n 'chassis_id': '5e00.8002.0009',\n 'port_description': 'Ethernet1/2',\n 'system_name': 'R3_n9kv',\n 'neighbor_id': 'R3_n9kv',\n 'system_description': 'Cisco Nexus Operating System (NX-OS) Software 7.0(3)I7(1)\\nTAC support: http://www.cisco.com/tac\\nCopyright (c) 2002-2017, Cisco Systems, Inc. All rights reserved.\\n',\n 'time_remaining': 103,\n 'hold_time': 120,\n 'capabilities': {\n 'bridge': {\n 'system': True,\n 'enabled': True,\n },\n 'router': {\n 'system': True,\n 'enabled': True,\n },\n },\n },\n },\n },\n },\n },\n },\n 'total_entries': 2,\n }\n\n ShowLldpNeighborsDetail = {\n 'interfaces': {\n 'GigabitEthernet0/0/0/0': {\n 'port_id': {\n 'GigabitEthernet2': {\n 'neighbors': {\n 'R1_csr1000v.openstacklocal': {\n 'chassis_id': '001e.49f7.2c00',\n 'port_description': 'GigabitEthernet2',\n 'system_name': 'R1_csr1000v.openstacklocal',\n 'neighbor_id': 'R1_csr1000v.openstacklocal',\n 'system_description': 'Cisco IOS Software [Everest], Virtual XE Software (X86_64_LINUX_IOSD-UNIVERSALK9-M), Version 16.6.1, RELEASE SOFTWARE (fc2)\\nTechnical Support: http://www.cisco.com/techsupport\\nCopyright (c) 1986-2017 by Cisco Systems, Inc.\\nCompiled Sat 22-Jul-17 05:51 by',\n 'time_remaining': 90,\n 'hold_time': 120,\n 'capabilities': {\n 'bridge': {\n 'system': True,\n },\n 'router': {\n 'system': True,\n 'enabled': True,\n },\n },\n 'management_address': '10.1.2.1',\n },\n },\n },\n },\n },\n 'GigabitEthernet0/0/0/1': {\n 'port_id': {\n 'Ethernet1/2': {\n 'neighbors': {\n 'R3_n9kv': {\n 'chassis_id': '5e00.8002.0009',\n 'port_description': 'Ethernet1/2',\n 'system_name': 'R3_n9kv',\n 'neighbor_id': 'R3_n9kv',\n 'system_description': 'Cisco Nexus Operating System (NX-OS) Software 7.0(3)I7(1)\\nTAC support: http://www.cisco.com/tac\\nCopyright (c) 2002-2017, Cisco Systems, Inc. All rights reserved.\\n',\n 'time_remaining': 106,\n 'hold_time': 120,\n 'capabilities': {\n 'bridge': {\n 'system': True,\n 'enabled': True,\n },\n 'router': {\n 'system': True,\n 'enabled': True,\n },\n },\n },\n },\n },\n },\n },\n },\n 'total_entries': 2,\n }\n\n ShowLldpTraffic = {\n \"counters\": {\n \"frame_in\": 399,\n \"frame_out\": 588,\n \"frame_error_in\": 0,\n \"frame_discard\": 0,\n \"tlv_discard\": 119,\n 'tlv_unknown': 119,\n 'entries_aged_out': 0\n }\n }\n\n ShowLldpInterface = {\n 'interfaces': {\n 'GigabitEthernet0/0/0/0': {\n 'tx': 'enabled',\n 'rx': 'enabled',\n 'tx_state': 'idle',\n 'rx_state': 'wait for frame',\n },\n 'GigabitEthernet0/0/0/1': {\n 'tx': 'enabled',\n 'rx': 'enabled',\n 'tx_state': 'idle',\n 'rx_state': 'wait for frame',\n },\n } \n }\n\n \n lldpOutput = {\n 'enabled': True,\n 'hello_timer': 30,\n 'hold_timer': 120,\n 'interfaces': {\n 'GigabitEthernet0/0/0/1': {\n 'port_id': {\n 'Ethernet1/2': {\n 'neighbors': {\n 'R3_n9kv': {\n 'neighbor_id': 'R3_n9kv',\n 'system_name': 'R3_n9kv',\n 'system_description': 'Cisco Nexus Operating System (NX-OS) Software 7.0(3)I7(1)\\nTAC support: http://www.cisco.com/tac\\nCopyright (c) 2002-2017, Cisco Systems, Inc. All rights reserved.\\n',\n 'chassis_id': '5e00.8002.0009',\n 'port_description': 'Ethernet1/2',\n 'capabilities': {\n 'router': {\n 'enabled': True,\n },\n 'bridge': {\n 'enabled': True,\n },\n },\n },\n },\n },\n },\n 'enabled': True,\n },\n 'GigabitEthernet0/0/0/0': {\n 'port_id': {\n 'GigabitEthernet2': {\n 'neighbors': {\n 'R1_csr1000v.openstacklocal': {\n 'neighbor_id': 'R1_csr1000v.openstacklocal',\n 'system_name': 'R1_csr1000v.openstacklocal',\n 'system_description': 'Cisco IOS Software [Everest], Virtual XE Software (X86_64_LINUX_IOSD-UNIVERSALK9-M), Version 16.6.1, RELEASE SOFTWARE (fc2)\\nTechnical Support: http://www.cisco.com/techsupport\\nCopyright (c) 1986-2017 by Cisco Systems, Inc.\\nCompiled Sat 22-Jul-17 05:51 by',\n 'chassis_id': '001e.49f7.2c00',\n 'port_description': 'GigabitEthernet2',\n 'management_address': '10.1.2.1',\n 'capabilities': {\n 'router': {\n 'enabled': True,\n },\n },\n },\n },\n },\n },\n 'enabled': True,\n },\n },\n 'counters': {\n 'frame_in': 399,\n 'frame_out': 588,\n 'frame_error_in': 0,\n 'frame_discard': 0,\n 'tlv_discard': 119,\n 'tlv_unknown': 119,\n 'entries_aged_out': 0,\n },\n }\n\n", "id": "8252649", "language": "Python", "matching_score": 1.727247953414917, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/lldp/iosxr/tests/lldp_output.py" }, { "content": "class HsrpOutput(object):\n\n # 'show standby all' output\n showStandbyAllOutput = {\n \"GigabitEthernet1/0/1\": {\n \"address_family\": {\n \"ipv4\": {\n \"version\": {\n 2: {\n \"groups\": {\n 0: {\n \"active_router\": \"local\",\n \"authentication\": \"5\",\n \"authentication_type\": \"MD5\",\n \"default_priority\": 100,\n \"group_number\": 0,\n \"hsrp_router_state\": \"active\",\n \"last_state_change\": \"1w0d\",\n \"local_virtual_mac_address\": \"0000.0c9f.f000\",\n \"local_virtual_mac_address_conf\": \"v2 \" \"default\",\n \"preempt\": True,\n \"preempt_min_delay\": 5,\n \"preempt_reload_delay\": 10,\n \"preempt_sync_delay\": 20,\n \"primary_ipv4_address\": {\n \"address\": \"192.168.1.254\"\n },\n \"priority\": 100,\n \"session_name\": \"hsrp-Gi1/0/1-0\",\n \"standby_ip_address\": \"192.168.1.2\",\n \"standby_router\": \"192.168.1.2\",\n \"standby_priority\": 100,\n \"standby_expires_in\": 10.624,\n \"statistics\": {\"num_state_changes\": 8},\n \"timers\": {\n \"hello_msec_flag\": False,\n \"hello_sec\": 5,\n \"hold_msec_flag\": False,\n \"hold_sec\": 20,\n \"next_hello_sent\": 2.848,\n },\n \"virtual_mac_address\": \"0000.0c9f.f000\",\n \"virtual_mac_address_mac_in_use\": True,\n }\n }\n }\n }\n }\n },\n \"interface\": \"GigabitEthernet1/0/1\",\n \"redirects_disable\": False,\n \"use_bia\": False,\n },\n \"GigabitEthernet1/0/2\": {\n \"address_family\": {\n \"ipv4\": {\n \"version\": {\n 1: {\n \"groups\": {\n 10: {\n \"active_router\": \"unknown\",\n \"authentication\": \"cisco123\",\n \"authentication_type\": \"MD5\",\n \"configured_priority\": 110,\n \"group_number\": 10,\n \"hsrp_router_state\": \"disabled\",\n \"local_virtual_mac_address\": \"0000.0c07.ac0a\",\n \"local_virtual_mac_address_conf\": \"v1 \" \"default\",\n \"preempt\": True,\n \"primary_ipv4_address\": {\"address\": \"unknown\"},\n \"priority\": 110,\n \"session_name\": \"hsrp-Gi1/0/2-10\",\n \"standby_ip_address\": \"unknown\",\n \"standby_router\": \"unknown\",\n \"timers\": {\n \"hello_msec_flag\": False,\n \"hello_sec\": 3,\n \"hold_msec_flag\": False,\n \"hold_sec\": 10,\n },\n \"virtual_mac_address\": \"unknown\",\n \"virtual_mac_address_mac_in_use\": False,\n }\n }\n }\n }\n }\n },\n \"interface\": \"GigabitEthernet1/0/2\",\n \"redirects_disable\": False,\n \"use_bia\": False,\n },\n \"GigabitEthernet3\": {\n \"address_family\": {\n \"ipv4\": {\n \"version\": {\n 1: {\n \"groups\": {\n 10: {\n \"active_expires_in\": 0.816,\n \"active_ip_address\": \"10.1.2.1\",\n \"active_router\": \"10.1.2.1\",\n \"active_router_priority\": 120,\n \"configured_priority\": 110,\n \"group_number\": 10,\n \"hsrp_router_state\": \"standby\",\n \"local_virtual_mac_address\": \"0000.0c07.ac0a\",\n \"local_virtual_mac_address_conf\": \"v1 \" \"default\",\n \"preempt\": True,\n \"primary_ipv4_address\": {\"address\": \"10.1.2.254\"},\n \"priority\": 110,\n \"session_name\": \"hsrp-Gi3-10\",\n \"standby_router\": \"local\",\n \"timers\": {\n \"hello_msec_flag\": False,\n \"hello_sec\": 3,\n \"hold_msec_flag\": False,\n \"hold_sec\": 10,\n \"next_hello_sent\": 2.096,\n },\n \"virtual_mac_address\": \"0050.568e.3a40\",\n \"virtual_mac_address_mac_in_use\": False,\n }\n }\n }\n }\n }\n },\n \"interface\": \"GigabitEthernet3\",\n \"redirects_disable\": False,\n \"use_bia\": False,\n },\n }\n\n showStandbyAllOutput_golden = \"\"\"\n GigabitEthernet1/0/1 - Group 0 (version 2)\n State is Active\n 8 state changes, last state change 1w0d\n Track object 1 (unknown)\n Virtual IP address is 192.168.1.254\n Active virtual MAC address is 0000.0c9f.f000 (MAC In Use)\n Local virtual MAC address is 0000.0c9f.f000 (v2 default)\n Hello time 5 sec, hold time 20 sec\n Next hello sent in 2.848 secs\n Authentication MD5, key-chain \"5\"\n Preemption enabled, delay min 5 secs, reload 10 secs, sync 20 secs\n Active router is local\n Standby router is 192.168.1.2, priority 100 (expires in 10.624 sec)\n Priority 100 (default 100)\n Group name is \"hsrp-Gi1/0/1-0\" (default)\n GigabitEthernet1/0/2 - Group 10\n State is Disabled\n Virtual IP address is unknown\n Active virtual MAC address is unknown (MAC Not In Use)\n Local virtual MAC address is 0000.0c07.ac0a (v1 default)\n Hello time 3 sec, hold time 10 sec\n Authentication MD5, key-chain \"cisco123\"\n Preemption enabled\n Active router is unknown\n Standby router is unknown\n Priority 110 (configured 110)\n Group name is \"hsrp-Gi1/0/2-10\" (default)\n GigabitEthernet3 - Group 10\n State is Standby\n 1 state change, last state change 00:00:08\n Virtual IP address is 10.1.2.254\n Active virtual MAC address is 0050.568e.3a40 (MAC Not In Use)\n Local virtual MAC address is 0000.0c07.ac0a (v1 default)\n Hello time 3 sec, hold time 10 sec\n Next hello sent in 2.096 secs\n Preemption enabled\n Active router is 10.1.2.1, priority 120 (expires in 0.816 sec)\n Standby router is local\n Priority 110 (configured 110)\n Group name is \"hsrp-Gi3-10\" (default)\n \"\"\"\n\n # 'show standby internal' output\n showStandbyInternalOutput = {\n \"hsrp_common_process_state\": \"not running\",\n \"hsrp_ha_state\": \"capable\",\n \"hsrp_ipv4_process_state\": \"not running\",\n \"hsrp_ipv6_process_state\": \"not running\",\n \"hsrp_timer_wheel_state\": \"running\",\n \"mac_address_table\": {\n 166: {\"group\": 10, \"interface\": \"gi2/0/3\", \"mac_address\": \"0000.0c07.ac0a\"},\n 169: {\"group\": 5, \"interface\": \"gi1/0/1\", \"mac_address\": \"0000.0c07.ac05\"},\n 172: {\"group\": 0, \"interface\": \"gi2/0/3\", \"mac_address\": \"0000.0c07.ac00\"},\n 173: {\"group\": 1, \"interface\": \"gi2/0/3\", \"mac_address\": \"0000.0c07.ac01\"},\n },\n \"msgQ_max_size\": 0,\n \"msgQ_size\": 0,\n \"v3_to_v4_transform\": \"disabled\",\n \"virtual_ip_hash_table\": {\n \"ipv6\": {\n 78: {\"group\": 20, \"interface\": \"gi1\", \"ip\": \"2001:DB8:10:1:1::254\"}\n },\n \"ipv4\": {\n 103: {\"group\": 0, \"interface\": \"gi1/0/1\", \"ip\": \"192.168.1.254\"},\n 106: {\"group\": 10, \"interface\": \"gi1/0/2\", \"ip\": \"192.168.2.254\"},\n },\n },\n }\n\n showStandbyInternalOutput_golden = \"\"\"\n HSRP common process not running\n MsgQ size 0, max 0\n HSRP IPv4 process not running\n HSRP IPv6 process not running\n HSRP Timer wheel running\n HSRP HA capable, v3 to v4 transform disabled\n\n HSRP virtual IP Hash Table (global)\n 103 192.168.1.254 Gi1/0/1 Grp 0\n 106 192.168.2.254 Gi1/0/2 Grp 10\n\n HSRP virtual IPv6 Hash Table (global)\n 78 2001:DB8:10:1:1::254 Gi1 Grp 20\n\n HSRP MAC Address Table\n 169 Gi1/0/1 0000.0c07.ac05\n Gi1/0/1 Grp 5\n 166 Gi2/0/3 0000.0c07.ac0a\n Gi2/0/3 Grp 10\n 172 Gi2/0/3 0000.0c07.ac00\n Gi2/0/3 Grp 0\n 173 Gi2/0/3 0000.0c07.ac01\n Gi2/0/3 Grp 1\n \"\"\"\n\n showStandbyDelayOutput = {\n \"GigabitEthernet1\": {\"delay\": {\"minimum_delay\": 99, \"reload_delay\": 888}}\n }\n\n showStandbyDelayOutput_golden = \"\"\"\n Interface Minimum Reload \n GigabitEthernet1 99 888 \n \"\"\"\n\n # Hsrp Ops Object final output\n hsrpOpsOutput = {\n \"GigabitEthernet1\": {\"delay\": {\"minimum_delay\": 99, \"reload_delay\": 888}},\n \"GigabitEthernet3\": {\n \"address_family\": {\n \"ipv4\": {\n \"version\": {\n 1: {\n \"groups\": {\n 10: {\n \"timers\": {\n \"hello_msec_flag\": False,\n \"hello_sec\": 3,\n \"hold_msec_flag\": False,\n \"hold_sec\": 10,\n },\n \"primary_ipv4_address\": {\"address\": \"10.1.2.254\"},\n \"priority\": 110,\n \"preempt\": True,\n \"session_name\": \"hsrp-Gi3-10\",\n \"virtual_mac_address\": \"0050.568e.3a40\",\n \"group_number\": 10,\n \"active_ip_address\": \"10.1.2.1\",\n \"hsrp_router_state\": \"standby\",\n \"active_router\": \"10.1.2.1\",\n \"standby_router\": \"local\",\n }\n }\n }\n }\n }\n },\n \"use_bia\": False,\n \"redirects_disable\": False,\n \"interface\": \"GigabitEthernet3\",\n },\n \"GigabitEthernet1/0/2\": {\n \"address_family\": {\n \"ipv4\": {\n \"version\": {\n 1: {\n \"groups\": {\n 10: {\n \"timers\": {\n \"hello_msec_flag\": False,\n \"hello_sec\": 3,\n \"hold_msec_flag\": False,\n \"hold_sec\": 10,\n },\n \"primary_ipv4_address\": {\"address\": \"unknown\"},\n \"authentication\": \"cisco123\",\n \"priority\": 110,\n \"preempt\": True,\n \"session_name\": \"hsrp-Gi1/0/2-10\",\n \"virtual_mac_address\": \"unknown\",\n \"group_number\": 10,\n \"standby_ip_address\": \"unknown\",\n \"hsrp_router_state\": \"disabled\",\n \"active_router\": \"unknown\",\n \"standby_router\": \"unknown\",\n }\n }\n }\n }\n }\n },\n \"use_bia\": False,\n \"redirects_disable\": False,\n \"interface\": \"GigabitEthernet1/0/2\",\n },\n \"GigabitEthernet1/0/1\": {\n \"address_family\": {\n \"ipv4\": {\n \"version\": {\n 2: {\n \"groups\": {\n 0: {\n \"timers\": {\n \"hello_msec_flag\": False,\n \"hello_sec\": 5,\n \"hold_msec_flag\": False,\n \"hold_sec\": 20,\n },\n \"primary_ipv4_address\": {\n \"address\": \"192.168.1.254\"\n },\n \"authentication\": \"5\",\n \"priority\": 100,\n \"preempt\": True,\n \"session_name\": \"hsrp-Gi1/0/1-0\",\n \"virtual_mac_address\": \"0000.0c9f.f000\",\n \"group_number\": 0,\n \"standby_ip_address\": \"192.168.1.2\",\n \"hsrp_router_state\": \"active\",\n \"active_router\": \"local\",\n \"standby_router\": \"192.168.1.2\",\n }\n }\n }\n }\n }\n },\n \"use_bia\": False,\n \"redirects_disable\": False,\n \"interface\": \"GigabitEthernet1/0/1\",\n },\n }\n", "id": "8948898", "language": "Python", "matching_score": 8.139777183532715, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/hsrp/ios/tests/hsrp_output.py" }, { "content": "class HsrpOutput(object):\n\n # 'show standby all' output\n showStandbyAllOutput = \\\n {\n 'GigabitEthernet1/0/1': {\n 'address_family': {\n 'ipv4': {\n 'version': {\n 2: {\n 'groups': {\n 0: {\n 'active_router': 'local',\n 'authentication': '5',\n 'authentication_type': 'MD5',\n 'default_priority': 100,\n 'group_number': 0,\n 'hsrp_router_state': 'active',\n 'last_state_change': '1w0d',\n 'local_virtual_mac_address': '0000.0c9f.f000',\n 'local_virtual_mac_address_conf': 'v2 '\n 'default',\n 'preempt': True,\n 'preempt_min_delay': 5,\n 'preempt_reload_delay': 10,\n 'preempt_sync_delay': 20,\n 'primary_ipv4_address': {\n 'address': '192.168.1.254'\n },\n 'priority': 100,\n 'session_name': 'hsrp-Gi1/0/1-0',\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'statistics': {\n 'num_state_changes': 8\n },\n 'timers': {\n 'hello_msec_flag': False,\n 'hello_sec': 5,\n 'hold_msec_flag': False,\n 'hold_sec': 20,\n 'next_hello_sent': 2.848\n },\n 'virtual_mac_address': '0000.0c9f.f000',\n 'virtual_mac_address_mac_in_use': True\n }\n }\n }\n }\n }\n },\n 'interface': 'GigabitEthernet1/0/1',\n 'redirects_disable': False,\n 'use_bia': False\n },\n 'GigabitEthernet1/0/2': {\n 'address_family': {\n 'ipv4': {\n 'version': {\n 1: {\n 'groups': {\n 10: {\n 'active_router': 'unknown',\n 'authentication': 'cisco123',\n 'authentication_type': 'MD5',\n 'configured_priority': 110,\n 'group_number': 10,\n 'hsrp_router_state': 'disabled',\n 'local_virtual_mac_address': '0000.0c07.ac0a',\n 'local_virtual_mac_address_conf': 'v1 '\n 'default',\n 'preempt': True,\n 'primary_ipv4_address': {\n 'address': 'unknown'\n },\n 'priority': 110,\n 'session_name': 'hsrp-Gi1/0/2-10',\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'timers': {\n 'hello_msec_flag': False,\n 'hello_sec': 3,\n 'hold_msec_flag': False,\n 'hold_sec': 10\n },\n 'virtual_mac_address': 'unknown',\n 'virtual_mac_address_mac_in_use': False\n }\n }\n }\n }\n }\n },\n 'interface': 'GigabitEthernet1/0/2',\n 'redirects_disable': False,\n 'use_bia': False\n },\n 'GigabitEthernet3': {\n 'address_family': {\n 'ipv4': {\n 'version': {\n 1: {\n 'groups': {\n 10: {\n 'active_expires_in': 0.816,\n 'active_ip_address': '10.1.2.1',\n 'active_router': '10.1.2.1',\n 'active_router_priority': 120,\n 'configured_priority': 110,\n 'group_number': 10,\n 'hsrp_router_state': 'standby',\n 'local_virtual_mac_address': '0000.0c07.ac0a',\n 'local_virtual_mac_address_conf': 'v1 '\n 'default',\n 'preempt': True,\n 'primary_ipv4_address': {\n 'address': '10.1.2.254'\n },\n 'priority': 110,\n 'session_name': 'hsrp-Gi3-10',\n 'standby_router': 'local',\n 'timers': {\n 'hello_msec_flag': False,\n 'hello_sec': 3,\n 'hold_msec_flag': False,\n 'hold_sec': 10,\n 'next_hello_sent': 2.096\n },\n 'virtual_mac_address': '0050.568e.3a40',\n 'virtual_mac_address_mac_in_use': False\n }\n }\n }\n }\n }\n },\n 'interface': 'GigabitEthernet3',\n 'redirects_disable': False,\n 'use_bia': False\n }\n }\n\n\n # 'show standby internal' output\n showStandbyInternalOutput = \\\n {\n 'hsrp_common_process_state': 'running',\n 'hsrp_ha_state': 'capable',\n 'hsrp_ipv4_process_state': 'running',\n 'hsrp_ipv6_process_state': 'running',\n 'hsrp_timer_wheel_state': 'running',\n 'msgQ_max_size': 3,\n 'msgQ_size': 0,\n 'v3_to_v4_transform': 'disabled',\n 'virtual_ip_hash_table': {\n 'ipv4': {\n 10: {\n 'group': 10,\n 'interface': 'gi1',\n 'ip': '10.1.1.254'\n }\n },\n 'ipv6': {\n 78: {\n 'group': 20,\n 'interface': 'gi1',\n 'ip': '2001:DB8:10:1:1::254'\n },\n 167: {\n 'group': 20,\n 'interface': 'gi1',\n 'ip': 'FE80::5:73FF:FEA0:14'\n },\n 197: {\n 'group': 50,\n 'interface': 'gi1',\n 'ip': 'FE80::5:73FF:FEA0:32'\n }\n }\n }\n }\n\n\n showStandbyDelayOutput = \\\n {\n \"GigabitEthernet1/0/1\": {\n \"delay\": {\n \"minimum_delay\": 99,\n \"reload_delay\": 888\n }\n }\n }\n\n # Hsrp Ops Object final output\n hsrpOpsOutput = \\\n {\n 'GigabitEthernet1/0/1': {\n 'address_family': {\n 'ipv4': {\n 'version': {\n 2: {\n 'groups': {\n 0: {\n 'active_router': 'local',\n 'authentication': '5',\n 'group_number': 0,\n 'hsrp_router_state': 'active',\n 'preempt': True,\n 'primary_ipv4_address': {\n 'address': '192.168.1.254'\n },\n 'priority': 100,\n 'session_name': 'hsrp-Gi1/0/1-0',\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'timers': {\n 'hello_msec_flag': False,\n 'hello_sec': 5,\n 'hold_msec_flag': False,\n 'hold_sec': 20\n },\n 'virtual_mac_address': '0000.0c9f.f000'\n }\n }\n }\n }\n }\n },\n 'delay': {\n 'minimum_delay': 99, 'reload_delay': 888\n },\n 'interface': 'GigabitEthernet1/0/1',\n 'redirects_disable': False,\n 'use_bia': False\n },\n 'GigabitEthernet1/0/2': {\n 'address_family': {\n 'ipv4': {\n 'version': {\n 1: {\n 'groups': {\n 10: {\n 'active_router': 'unknown',\n 'authentication': 'cisco123',\n 'group_number': 10,\n 'hsrp_router_state': 'disabled',\n 'preempt': True,\n 'primary_ipv4_address': {\n 'address': 'unknown'\n },\n 'priority': 110,\n 'session_name': 'hsrp-Gi1/0/2-10',\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'timers': {\n 'hello_msec_flag': False,\n 'hello_sec': 3,\n 'hold_msec_flag': False,\n 'hold_sec': 10\n },\n 'virtual_mac_address': 'unknown'\n }\n }\n }\n }\n }\n },\n 'interface': 'GigabitEthernet1/0/2',\n 'redirects_disable': False,\n 'use_bia': False\n },\n 'GigabitEthernet3': {\n 'address_family': {\n 'ipv4': {\n 'version': {\n 1: {\n 'groups': {\n 10: {\n 'active_ip_address': '10.1.2.1',\n 'active_router': '10.1.2.1',\n 'group_number': 10,\n 'hsrp_router_state': 'standby',\n 'preempt': True,\n 'primary_ipv4_address': {\n 'address': '10.1.2.254'\n },\n 'priority': 110,\n 'session_name': 'hsrp-Gi3-10',\n 'standby_router': 'local',\n 'timers': {\n 'hello_msec_flag': False,\n 'hello_sec': 3,\n 'hold_msec_flag': False,\n 'hold_sec': 10\n },\n 'virtual_mac_address': '0050.568e.3a40'\n }\n }\n }\n }\n }\n },\n 'interface': 'GigabitEthernet3',\n 'redirects_disable': False,\n 'use_bia': False\n }\n }\n\n# vim: ft=python et sw=4\n", "id": "3413052", "language": "Python", "matching_score": 5.100157260894775, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/hsrp/iosxe/tests/hsrp_output.py" }, { "content": "class HsrpOutput(object):\n\n # 'show hsrp detail' output\n showHsrpDetailOutput = \\\n {\n 'GigabitEthernet0/0/0/0': {\n 'address_family': {\n 'ipv4': {\n 'version': {\n 1: {\n 'groups': {\n 0: {\n 'active_ip_address': 'unknown',\n 'active_router': 'unknown',\n 'bfd': {\n 'address': '10.1.1.1',\n 'interface_name': 'GigabitEthernet0/0/0/1',\n 'state': 'inactive'\n },\n 'group_number': 0,\n 'hsrp_router_state': 'init',\n 'preempt': True,\n 'priority': 100,\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'standby_state': 'stored',\n 'statistics': {\n 'last_coup_received': 'Never',\n 'last_coup_sent': 'Never',\n 'last_resign_received': 'Never',\n 'last_resign_sent': 'Never',\n 'last_state_change': 'never',\n 'num_state_changes': 0\n },\n 'timers': {\n 'hello_msec': 3000,\n 'hello_msec_flag': True,\n 'hold_msec': 10000,\n 'hold_msec_flag': True\n },\n 'virtual_mac_address': '0000.0c07.ac00'\n },\n 10: {\n 'active_ip_address': 'local',\n 'active_priority': 63,\n 'active_router': 'local',\n 'authentication': 'cisco123',\n 'group_number': 10,\n 'hsrp_router_state': 'active',\n 'num_of_slaves': 1,\n 'preempt': True,\n 'primary_ipv4_address': {\n 'address': '10.1.1.254'\n },\n 'priority': 63,\n 'session_name': 'group10',\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'standby_state': 'reserving',\n 'statistics': {\n 'last_coup_received': 'Never',\n 'last_coup_sent': 'Never',\n 'last_resign_received': 'Never',\n 'last_resign_sent': 'Never',\n 'last_state_change': '09:36:53',\n 'num_state_changes': 4\n },\n 'timers': {\n 'cfgd_hello_msec': 10000,\n 'cfgd_hold_msec': 30000,\n 'hello_msec': 10000,\n 'hello_msec_flag': True,\n 'hold_msec': 30000,\n 'hold_msec_flag': True\n },\n 'tracked_interfaces': {\n 'GigabitEthernet0/0/0/1': {\n 'interface_name': 'GigabitEthernet0/0/0/1',\n 'priority_decrement': 123\n }\n },\n 'tracked_objects': {\n '1': {\n 'object_name': '1',\n 'priority_decrement': 25\n },\n 'num_tracked_objects': 2,\n 'num_tracked_objects_up': 1\n },\n 'virtual_mac_address': '0000.0c07.ac0a'\n },\n 20: {\n 'active_ip_address': 'local',\n 'active_priority': 100,\n 'active_router': 'local',\n 'group_number': 20,\n 'hsrp_router_state': 'active',\n 'primary_ipv4_address': {\n 'address': '10.1.1.128'\n },\n 'priority': 100,\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'standby_state': 'reserving',\n 'statistics': {\n 'last_coup_received': 'Never',\n 'last_coup_sent': 'Never',\n 'last_resign_received': 'Never',\n 'last_resign_sent': 'Never',\n 'last_state_change': '09:37:52',\n 'num_state_changes': 4\n },\n 'timers': {\n 'cfgd_hello_msec': 111,\n 'cfgd_hold_msec': 333,\n 'hello_msec': 111,\n 'hello_msec_flag': True,\n 'hold_msec': 333,\n 'hold_msec_flag': True\n },\n 'tracked_interfaces': {\n 'GigabitEthernet0/0/0/1': {\n 'interface_name': 'GigabitEthernet0/0/0/1',\n 'priority_decrement': 251\n }\n },\n 'tracked_objects': {\n 'num_tracked_objects': 1,\n 'num_tracked_objects_up': 1\n },\n 'virtual_mac_address': '0000.0c07.ac14'\n }\n },\n 'slave_groups': {\n 30: {\n 'follow': 'group10',\n 'group_number': 30,\n 'hsrp_router_state': 'init',\n 'primary_ipv4_address': {\n 'address': 'unknown'\n },\n 'priority': 100,\n 'standby_state': 'stored',\n 'virtual_mac_address': '0000.0c07.ac1e'\n }\n }\n }\n }\n },\n 'ipv6': {\n 'version': {\n 2: {\n 'groups': {\n 10: {\n 'active_ip_address': 'local',\n 'active_priority': 100,\n 'active_router': 'local',\n 'group_number': 10,\n 'hsrp_router_state': 'active',\n 'link_local_ipv6_address': {\n 'address': 'fe80::205:73ff:fea0:a'\n },\n 'priority': 100,\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'standby_state': 'reserving',\n 'statistics': {\n 'last_coup_received': 'Never',\n 'last_coup_sent': 'Never',\n 'last_resign_received': 'Never',\n 'last_resign_sent': 'Never',\n 'last_state_change': '09:37:18',\n 'num_state_changes': 4\n },\n 'timers': {\n 'hello_msec': 3000,\n 'hello_msec_flag': True,\n 'hold_msec': 10000,\n 'hold_msec_flag': True\n },\n 'virtual_mac_address': '0005.73a0.000a'\n },\n 20: {\n 'active_ip_address': 'local',\n 'active_priority': 100,\n 'active_router': 'local',\n 'group_number': 20,\n 'hsrp_router_state': 'active',\n 'link_local_ipv6_address': {\n 'address': 'fe80::205:73ff:fea0:14'\n },\n 'priority': 100,\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'standby_state': 'reserving',\n 'statistics': {\n 'last_coup_received': 'Never',\n 'last_coup_sent': 'Never',\n 'last_resign_received': 'Never',\n 'last_resign_sent': 'Never',\n 'last_state_change': '09:37:18',\n 'num_state_changes': 4\n },\n 'timers': {\n 'hello_msec': 3000,\n 'hello_msec_flag': True,\n 'hold_msec': 10000,\n 'hold_msec_flag': True\n },\n 'virtual_mac_address': '0005.73a0.0014'\n },\n 30: {\n 'active_ip_address': 'local',\n 'active_priority': 100,\n 'active_router': 'local',\n 'group_number': 30,\n 'hsrp_router_state': 'active',\n 'link_local_ipv6_address': {\n 'address': 'fe80::205:73ff:fea0:1e'\n },\n 'priority': 100,\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'standby_state': 'reserving',\n 'statistics': {\n 'last_coup_received': 'Never',\n 'last_coup_sent': 'Never',\n 'last_resign_received': 'Never',\n 'last_resign_sent': 'Never',\n 'last_state_change': '09:37:18',\n 'num_state_changes': 4\n },\n 'timers': {\n 'hello_msec': 3000,\n 'hello_msec_flag': True,\n 'hold_msec': 10000,\n 'hold_msec_flag': True\n },\n 'virtual_mac_address': '0005.73a0.001e'\n }\n }\n }\n }\n }\n },\n 'bfd': {\n 'detection_multiplier': 3,\n 'enabled': True,\n 'interval': 15\n },\n 'delay': {\n 'minimum_delay': 100,\n 'reload_delay': 1000\n },\n 'interface': 'GigabitEthernet0/0/0/0',\n 'redirects_disable': True,\n 'use_bia': False\n }\n }\n\n showHsrpDetailOutputIncomplete = \\\n {\n 'GigabitEthernet0/0/0/0': {\n 'address_family': {\n 'ipv4': {\n 'version': {\n 1: {\n 'groups': {\n 0: {\n 'active_ip_address': 'unknown',\n 'active_router': 'unknown',\n 'bfd': {\n 'address': '10.1.1.1',\n 'interface_name': 'GigabitEthernet0/0/0/1',\n 'state': 'inactive'\n },\n 'group_number': 0,\n 'hsrp_router_state': 'init',\n 'preempt': True,\n 'priority': 100,\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'standby_state': 'stored',\n 'statistics': {\n 'last_coup_received': 'Never',\n 'last_coup_sent': 'Never',\n 'last_resign_received': 'Never',\n 'last_resign_sent': 'Never',\n 'last_state_change': 'never',\n 'num_state_changes': 0\n },\n 'virtual_mac_address': '0000.0c07.ac00'\n },\n 10: {\n 'active_ip_address': 'local',\n 'active_priority': 63,\n 'active_router': 'local',\n 'authentication': 'cisco123',\n 'group_number': 10,\n 'hsrp_router_state': 'active',\n 'num_of_slaves': 1,\n 'preempt': True,\n 'primary_ipv4_address': {\n 'address': '10.1.1.254'\n },\n 'priority': 63,\n 'session_name': 'group10',\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'standby_state': 'reserving',\n 'statistics': {\n 'last_coup_received': 'Never',\n 'last_coup_sent': 'Never',\n 'last_resign_received': 'Never',\n 'last_resign_sent': 'Never',\n 'last_state_change': '09:36:53',\n 'num_state_changes': 4\n },\n 'timers': {\n 'cfgd_hello_msec': 10000,\n 'cfgd_hold_msec': 30000,\n 'hello_msec': 10000,\n 'hello_msec_flag': True,\n 'hold_msec': 30000,\n 'hold_msec_flag': True\n },\n 'tracked_interfaces': {\n 'GigabitEthernet0/0/0/1': {\n 'interface_name': 'GigabitEthernet0/0/0/1',\n 'priority_decrement': 123\n }\n },\n 'tracked_objects': {\n '1': {\n 'object_name': '1',\n 'priority_decrement': 25\n },\n 'num_tracked_objects': 2,\n 'num_tracked_objects_up': 1\n },\n 'virtual_mac_address': '0000.0c07.ac0a'\n },\n 20: {\n 'active_ip_address': 'local',\n 'active_priority': 100,\n 'active_router': 'local',\n 'group_number': 20,\n 'hsrp_router_state': 'active',\n 'primary_ipv4_address': {\n 'address': '10.1.1.128'\n },\n 'priority': 100,\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'standby_state': 'reserving',\n 'statistics': {\n 'last_coup_received': 'Never',\n 'last_coup_sent': 'Never',\n 'last_resign_received': 'Never',\n 'last_resign_sent': 'Never',\n 'last_state_change': '09:37:52',\n 'num_state_changes': 4\n },\n 'timers': {\n 'cfgd_hello_msec': 111,\n 'cfgd_hold_msec': 333,\n 'hello_msec': 111,\n 'hello_msec_flag': True,\n 'hold_msec': 333,\n 'hold_msec_flag': True\n },\n 'tracked_interfaces': {\n 'GigabitEthernet0/0/0/1': {\n 'interface_name': 'GigabitEthernet0/0/0/1',\n 'priority_decrement': 251\n }\n },\n 'tracked_objects': {\n 'num_tracked_objects': 1,\n 'num_tracked_objects_up': 1\n },\n 'virtual_mac_address': '0000.0c07.ac14'\n }\n },\n 'slave_groups': {\n 30: {\n 'follow': 'group10',\n 'group_number': 30,\n 'hsrp_router_state': 'init',\n 'primary_ipv4_address': {\n 'address': 'unknown'\n },\n 'priority': 100,\n 'standby_state': 'stored',\n 'virtual_mac_address': '0000.0c07.ac1e'\n }\n }\n }\n }\n },\n 'ipv6': {\n 'version': {\n 2: {\n 'groups': {\n 10: {\n 'active_ip_address': 'local',\n 'active_priority': 100,\n 'active_router': 'local',\n 'group_number': 10,\n 'hsrp_router_state': 'active',\n 'link_local_ipv6_address': {\n 'address': 'fe80::205:73ff:fea0:a'\n },\n 'priority': 100,\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'standby_state': 'reserving',\n 'statistics': {\n 'last_coup_received': 'Never',\n 'last_coup_sent': 'Never',\n 'last_resign_received': 'Never',\n 'last_resign_sent': 'Never',\n 'last_state_change': '09:37:18',\n 'num_state_changes': 4\n },\n 'timers': {\n 'hello_msec': 3000,\n 'hello_msec_flag': True,\n 'hold_msec': 10000,\n 'hold_msec_flag': True\n },\n 'virtual_mac_address': '0005.73a0.000a'\n },\n 20: {\n 'active_ip_address': 'local',\n 'active_priority': 100,\n 'active_router': 'local',\n 'group_number': 20,\n 'hsrp_router_state': 'active',\n 'link_local_ipv6_address': {\n 'address': 'fe80::205:73ff:fea0:14'\n },\n 'priority': 100,\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'standby_state': 'reserving',\n 'statistics': {\n 'last_coup_received': 'Never',\n 'last_coup_sent': 'Never',\n 'last_resign_received': 'Never',\n 'last_resign_sent': 'Never',\n 'last_state_change': '09:37:18',\n 'num_state_changes': 4\n },\n 'timers': {\n 'hello_msec': 3000,\n 'hello_msec_flag': True,\n 'hold_msec': 10000,\n 'hold_msec_flag': True\n },\n 'virtual_mac_address': '0005.73a0.0014'\n },\n 30: {\n 'active_ip_address': 'local',\n 'active_priority': 100,\n 'active_router': 'local',\n 'group_number': 30,\n 'hsrp_router_state': 'active',\n 'link_local_ipv6_address': {\n 'address': 'fe80::205:73ff:fea0:1e'\n },\n 'priority': 100,\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'standby_state': 'reserving',\n 'statistics': {\n 'last_coup_received': 'Never',\n 'last_coup_sent': 'Never',\n 'last_resign_received': 'Never',\n 'last_resign_sent': 'Never',\n 'last_state_change': '09:37:18',\n 'num_state_changes': 4\n },\n 'timers': {\n 'hello_msec': 3000,\n 'hello_msec_flag': True,\n 'hold_msec': 10000,\n 'hold_msec_flag': True\n },\n 'virtual_mac_address': '0005.73a0.001e'\n }\n }\n }\n }\n }\n },\n 'bfd': {\n 'detection_multiplier': 3,\n 'enabled': True,\n 'interval': 15\n },\n 'delay': {\n 'minimum_delay': 100,\n 'reload_delay': 1000\n },\n 'interface': 'GigabitEthernet0/0/0/0',\n 'redirects_disable': True,\n 'use_bia': False\n }\n }\n\n # 'show hsrp summary' output\n showHsrpSummaryOutput = \\\n {\n 'address_family': {\n 'ipv4': {\n 'intf_down': 0,\n 'intf_total': 1,\n 'intf_up': 1,\n 'state': {\n 'ACTIVE': {\n 'sessions': 2,\n 'slaves': 0,\n 'total': 2\n },\n 'ALL': {\n 'sessions': 3,\n 'slaves': 1,\n 'total': 4\n },\n 'INIT': {\n 'sessions': 1,\n 'slaves': 1,\n 'total': 2\n },\n 'LEARN': {\n 'sessions': 0,\n 'slaves': 0,\n 'total': 0\n },\n 'LISTEN': {\n 'sessions': 0,\n 'slaves': 0,\n 'total': 0\n },\n 'SPEAK': {\n 'sessions': 0,\n 'slaves': 0,\n 'total': 0\n },\n 'STANDBY': {\n 'sessions': 0,\n 'slaves': 0,\n 'total': 0\n }\n },\n 'virtual_addresses_active': 3,\n 'virtual_addresses_inactive': 0,\n 'vritual_addresses_total': 3\n },\n 'ipv6': {\n 'intf_down': 0,\n 'intf_total': 1,\n 'intf_up': 1,\n 'state': {\n 'ACTIVE': {\n 'sessions': 3,\n 'slaves': 0,\n 'total': 3\n },\n 'ALL': {\n 'sessions': 3,\n 'slaves': 0,\n 'total': 3\n },\n 'INIT': {\n 'sessions': 0,\n 'slaves': 0,\n 'total': 0\n },\n 'LEARN': {\n 'sessions': 0,\n 'slaves': 0,\n 'total': 0\n },\n 'LISTEN': {\n 'sessions': 0,\n 'slaves': 0,\n 'total': 0\n },\n 'SPEAK': {\n 'sessions': 0,\n 'slaves': 0,\n 'total': 0\n },\n 'STANDBY': {\n 'sessions': 0,\n 'slaves': 0,\n 'total': 0\n }\n },\n 'virtual_addresses_active': 5,\n 'virtual_addresses_inactive': 0,\n 'vritual_addresses_total': 5\n }\n },\n 'bfd_sessions_down': 0,\n 'bfd_sessions_inactive': 1,\n 'bfd_sessions_up': 0,\n 'num_bfd_sessions': 1,\n 'num_tracked_objects': 2,\n 'tracked_objects_down': 1,\n 'tracked_objects_up': 1\n }\n\n\n # Hsrp Ops Object final output\n hsrpOpsOutput = \\\n {\n 'GigabitEthernet0/0/0/0': {\n 'address_family': {\n 'ipv4': {\n 'version': {\n 1: {\n 'groups': {\n 0: {\n 'active_ip_address': 'unknown',\n 'active_router': 'unknown',\n 'bfd': {\n 'address': '10.1.1.1',\n 'interface_name': 'GigabitEthernet0/0/0/1'\n },\n 'group_number': 0,\n 'hsrp_router_state': 'init',\n 'preempt': True,\n 'priority': 100,\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'timers': {\n 'hello_msec': 3000,\n 'hello_msec_flag': True,\n 'hold_msec': 10000,\n 'hold_msec_flag': True\n },\n 'virtual_mac_address': '0000.0c07.ac00'\n },\n 10: {\n 'active_ip_address': 'local',\n 'active_router': 'local',\n 'authentication': 'cisco123',\n 'group_number': 10,\n 'hsrp_router_state': 'active',\n 'preempt': True,\n 'primary_ipv4_address': {\n 'address': '10.1.1.254'\n },\n 'priority': 63,\n 'session_name': 'group10',\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'timers': {\n 'hello_msec': 10000,\n 'hello_msec_flag': True,\n 'hold_msec': 30000,\n 'hold_msec_flag': True\n },\n 'tracked_interfaces': {\n 'GigabitEthernet0/0/0/1': {\n 'interface_name': 'GigabitEthernet0/0/0/1',\n 'priority_decrement': 123\n }\n },\n 'tracked_objects': {\n '1': {\n 'object_name': '1',\n 'priority_decrement': 25\n }\n },\n 'virtual_mac_address': '0000.0c07.ac0a'\n },\n 20: {\n 'active_ip_address': 'local',\n 'active_router': 'local',\n 'group_number': 20,\n 'hsrp_router_state': 'active',\n 'primary_ipv4_address': {\n 'address': '10.1.1.128'\n },\n 'priority': 100,\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'timers': {\n 'hello_msec': 111,\n 'hello_msec_flag': True,\n 'hold_msec': 333,\n 'hold_msec_flag': True\n },\n 'tracked_interfaces': {\n 'GigabitEthernet0/0/0/1': {\n 'interface_name': 'GigabitEthernet0/0/0/1',\n 'priority_decrement': 251\n }\n },\n 'virtual_mac_address': '0000.0c07.ac14'\n }\n },\n 'slave_groups': {\n 30: {\n 'follow': 'group10',\n 'primary_ipv4_address': {\n 'address': 'unknown'\n },\n 'virtual_mac_address': '0000.0c07.ac1e'\n }\n }\n }\n }\n },\n 'ipv6': {\n 'version': {\n 2: {\n 'groups': {\n 10: {\n 'active_ip_address': 'local',\n 'active_router': 'local',\n 'group_number': 10,\n 'hsrp_router_state': 'active',\n 'link_local_ipv6_address': {\n 'address': 'fe80::205:73ff:fea0:a'\n },\n 'priority': 100,\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'timers': {\n 'hello_msec': 3000,\n 'hello_msec_flag': True,\n 'hold_msec': 10000,\n 'hold_msec_flag': True\n },\n 'virtual_mac_address': '0005.73a0.000a'\n },\n 20: {\n 'active_ip_address': 'local',\n 'active_router': 'local',\n 'group_number': 20,\n 'hsrp_router_state': 'active',\n 'link_local_ipv6_address': {\n 'address': 'fe80::205:73ff:fea0:14'\n },\n 'priority': 100,\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'timers': {\n 'hello_msec': 3000,\n 'hello_msec_flag': True,\n 'hold_msec': 10000,\n 'hold_msec_flag': True\n },\n 'virtual_mac_address': '0005.73a0.0014'\n },\n 30: {\n 'active_ip_address': 'local',\n 'active_router': 'local',\n 'group_number': 30,\n 'hsrp_router_state': 'active',\n 'link_local_ipv6_address': {\n 'address': 'fe80::205:73ff:fea0:1e'\n },\n 'priority': 100,\n 'standby_ip_address': 'unknown',\n 'standby_router': 'unknown',\n 'timers': {\n 'hello_msec': 3000,\n 'hello_msec_flag': True,\n 'hold_msec': 10000,\n 'hold_msec_flag': True\n },\n 'virtual_mac_address': '0005.73a0.001e'\n }\n }\n }\n }\n }\n },\n 'bfd': {\n 'detection_multiplier': 3,\n 'enabled': True,\n 'interval': 15\n },\n 'delay': {\n 'minimum_delay': 100,\n 'reload_delay': 1000\n },\n 'interface': 'GigabitEthernet0/0/0/0',\n 'redirects_disable': True,\n 'use_bia': False\n }\n }\n\n# vim: ft=python et sw=4\n", "id": "12499236", "language": "Python", "matching_score": 5.635010719299316, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/hsrp/iosxr/tests/hsrp_output.py" }, { "content": "class HsrpOutput(object):\n\n # 'show hsrp all' output\n showHsrpAllOutput = {\n \"Ethernet1/3\": {\n \"address_family\": {\n \"ipv4\": {\n \"version\": {\n 2: {\n \"groups\": {\n 0: {\n \"active_priority\": 110,\n \"active_router\": \"local\",\n \"authentication\": \"cisco123\",\n \"configured_priority\": 110,\n \"hsrp_router_state\": \"active\",\n \"session_name\": \"hsrp-Eth1/3-0\",\n \"last_state_change\": \"00:01:43\",\n \"lower_fwd_threshold\": 0,\n \"num_state_changes\": 10,\n \"preempt\": True,\n \"priority\": 110,\n \"standby_expire\": 2.429,\n \"standby_priority\": 90,\n \"standby_router\": \"192.168.1.2\",\n \"standby_ip_address\": \"192.168.1.2\",\n \"timers\": {\n \"hello_sec\": 1,\n \"hold_sec\": 3\n },\n 'tracked_objects': {\n 1: {\n 'object_name': 1,\n 'priority_decrement': 22,\n 'status': 'UP',\n }\n },\n \"upper_fwd_threshold\": 110,\n 'primary_ipv4_address': {\n 'address': \"192.168.1.254\",\n },\n \"virtual_mac_address\": \"0000.0c9f.f000\"\n },\n 2: {\n \"active_router\": \"unknown\",\n \"authentication\": \"cisco\",\n \"configured_priority\": 1,\n \"hsrp_router_state\": \"disabled(virtual ip not cfged)\",\n \"session_name\": \"hsrp-Eth1/3-2\",\n \"last_state_change\": \"never\",\n \"lower_fwd_threshold\": 0,\n \"num_state_changes\": 0,\n \"priority\": 1,\n \"standby_router\": \"unknown\",\n \"timers\": {\n \"hello_sec\": 3,\n \"hold_sec\": 10\n },\n \"upper_fwd_threshold\": 1,\n \"virtual_mac_address\": \"0000.0c9f.f002\"\n }\n }\n }\n }\n },\n \"ipv6\": {\n \"version\": {\n 2: {\n \"groups\": {\n 2: {\n \"active_priority\": 100,\n \"active_router\": \"local\",\n \"authentication\": \"cisco\",\n \"configured_priority\": 100,\n \"hsrp_router_state\": \"active\",\n \"session_name\": \"hsrp-Eth1/3-2-V6\",\n \"last_state_change\": \"02:43:40\",\n \"lower_fwd_threshold\": 0,\n \"num_state_changes\": 2,\n \"priority\": 100,\n \"secondary_vips\": \"2001:db8:7746:fa41::1\",\n \"standby_expire\": 8.96,\n \"standby_priority\": 90,\n \"standby_router\": \"fe80::20c:29ff:fe69:14bb\",\n \"standby_ipv6_address\": \"fe80::20c:29ff:fe69:14bb\",\n \"timers\": {\n \"hello_sec\": 3,\n \"hold_sec\": 10\n },\n \"upper_fwd_threshold\": 100,\n 'link_local_ipv6_address': {\n 'address': \"fe80::5:73ff:fea0:2\",\n },\n \"virtual_mac_address\": \"0005.73a0.0002\"\n }\n }\n }\n }\n }\n },\n \"interface\": \"Ethernet1/3\",\n }\n }\n\n # 'show hsrp summary' output\n showHsrpSummaryOutput = {\n 'global_hsrp_bfd': 'enabled',\n 'intf_total': 1,\n 'nsf': 'enabled',\n 'nsf_time': 10,\n 'pkt_unknown_groups': 0,\n 'total_mts_rx': 85,\n 'stats': {\n 'total_groups': 3,\n 'active': 0,\n 'listen': 0,\n 'standby': 0,\n 'v1_ipv4': 0,\n 'v2_ipv4': 3,\n 'v2_ipv6': 0,\n 'v6_active': 0,\n 'v6_listen': 0,\n 'v6_standby': 0\n },\n 'total_packets': {\n 'rx_good': 0,\n 'tx_fail': 0,\n 'tx_pass': 0,\n }\n }\n\n showHsrpDelayOutput = {\n 'Ethernet1/3': {\n 'delay': {\n 'minimum_delay': 99,\n 'reload_delay': 888,\n }\n }\n }\n\n # Hsrp Ops Object final output\n hsrpOpsOutput = \\\n {\n 'Ethernet1/3': {\n 'address_family': {\n 'ipv4': {\n 'version': {\n 2: {\n 'groups': {\n 0: {\n 'active_router': 'local',\n 'authentication': 'cisco123',\n 'hsrp_router_state': 'active',\n 'preempt': True,\n 'primary_ipv4_address': {\n 'address': '192.168.1.254'\n },\n 'priority': 110,\n 'session_name': 'hsrp-Eth1/3-0',\n 'standby_ip_address': '192.168.1.2',\n 'standby_router': '192.168.1.2',\n 'timers': {\n 'hello_sec': 1,\n 'hold_sec': 3\n },\n 'tracked_objects': {\n 1: {\n 'object_name': 1,\n 'priority_decrement': 22\n }\n },\n 'virtual_mac_address': '0000.0c9f.f000'\n },\n 2: {\n 'active_router': 'unknown',\n 'authentication': 'cisco',\n 'hsrp_router_state': 'disabled(virtual '\n 'ip '\n 'not '\n 'cfged)',\n 'priority': 1,\n 'session_name': 'hsrp-Eth1/3-2',\n 'standby_router': 'unknown',\n 'timers': {\n 'hello_sec': 3,\n 'hold_sec': 10\n },\n 'virtual_mac_address': '0000.0c9f.f002'\n }\n }\n }\n }\n },\n 'ipv6': {\n 'version': {\n 2: {\n 'groups': {\n 2: {\n 'active_router': 'local',\n 'authentication': 'cisco',\n 'hsrp_router_state': 'active',\n 'link_local_ipv6_address': {\n 'address': 'fe80::5:73ff:fea0:2'\n },\n 'priority': 100,\n 'session_name': 'hsrp-Eth1/3-2-V6',\n 'standby_ipv6_address': 'fe80::20c:29ff:fe69:14bb',\n 'standby_router': 'fe80::20c:29ff:fe69:14bb',\n 'timers': {\n 'hello_sec': 3,\n 'hold_sec': 10\n },\n 'virtual_mac_address': '0005.73a0.0002'\n }\n }\n }\n }\n }\n },\n 'delay': {\n 'minimum_delay': 99, 'reload_delay': 888\n },\n 'interface': 'Ethernet1/3'\n }\n }\n\n\n\n# vim: ft=python et sw=4\n", "id": "6239470", "language": "Python", "matching_score": 0.6121413707733154, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/hsrp/nxos/tests/hsrp_output.py" }, { "content": "''' \nArp Genie Ops Object Outputs for IOS.\n'''\n\n\nclass ArpOutput(object):\n\n\tShowIpArp = {\n\t\t'interfaces': {\n\t\t\t'FastEthernet0': {\n\t\t\t\t'ipv4': {\n\t\t\t\t\t'neighbors': {\n\t\t\t\t\t\t'10.1.8.1': {\n\t\t\t\t\t\t\t'age': '79',\n\t\t\t\t\t\t\t'ip': '10.1.8.1',\n\t\t\t\t\t\t\t'link_layer_address': '0012.7f57.ac80',\n\t\t\t\t\t\t\t'origin': 'dynamic',\n\t\t\t\t\t\t\t'protocol': 'Internet',\n\t\t\t\t\t\t\t'type': 'ARPA'},\n\t\t\t\t\t\t'10.1.8.146': {\n\t\t\t\t\t\t\t'age': '-',\n\t\t\t\t\t\t\t'ip': '10.1.8.146',\n\t\t\t\t\t\t\t'link_layer_address': '843d.c638.b9b7',\n\t\t\t\t\t\t\t'origin': 'static',\n\t\t\t\t\t\t\t'protocol': 'Internet',\n\t\t\t\t\t\t\t'type': 'ARPA'}}}},\n\t\t\t'Port-channel10': {\n\t\t\t\t'ipv4': {\n\t\t\t\t\t'neighbors': {\n\t\t\t\t\t\t'10.9.1.1': {\n\t\t\t\t\t\t\t'age': '-',\n\t\t\t\t\t\t\t'ip': '10.9.1.1',\n\t\t\t\t\t\t\t'link_layer_address': '843d.c638.b9c6',\n\t\t\t\t\t\t\t'origin': 'static',\n\t\t\t\t\t\t\t'protocol': 'Internet',\n\t\t\t\t\t\t\t'type': 'ARPA'}}}},\n\t\t\t'Vlan99': {\n\t\t\t\t'ipv4': {\n\t\t\t\t\t'neighbors': {\n\t\t\t\t\t\t'10.69.1.2': {\n\t\t\t\t\t\t\t'age': '-',\n\t\t\t\t\t\t\t'ip': '10.69.1.2',\n\t\t\t\t\t\t\t'link_layer_address': '843d.c638.b9c1',\n\t\t\t\t\t\t\t'origin': 'static',\n\t\t\t\t\t\t\t'protocol': 'Internet',\n\t\t\t\t\t\t\t'type': 'ARPA'}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tShowIpArpVrf = {\n\t\t'interfaces': {\n\t\t\t'GigabitEthernet2.390': {\n\t\t\t\t'ipv4': {\n\t\t\t\t\t'neighbors': {\n\t\t\t\t\t\t'10.12.90.1': {\n\t\t\t\t\t\t\t'age': '-',\n\t\t\t\t\t\t\t'ip': '10.12.90.1',\n\t\t\t\t\t\t\t'link_layer_address': 'fa16.3e24.787a',\n\t\t\t\t\t\t\t'origin': 'static',\n\t\t\t\t\t\t\t'protocol': 'Internet',\n\t\t\t\t\t\t\t'type': 'ARPA'},\n\t\t\t\t\t\t'10.12.90.2':\n\t\t\t\t\t\t\t{'age': '139',\n\t\t\t\t\t\t\t 'ip': '10.12.90.2',\n\t\t\t\t\t\t\t 'link_layer_address': 'fa16.3e8a.cfeb',\n\t\t\t\t\t\t\t 'origin': 'dynamic',\n\t\t\t\t\t\t\t 'protocol': 'Internet',\n\t\t\t\t\t\t\t 'type': 'ARPA'}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n\t\t\t'GigabitEthernet2.410': {\n\t\t\t\t'ipv4': {\n\t\t\t\t\t'neighbors': {\n\t\t\t\t\t\t'10.12.110.1': {\n\t\t\t\t\t\t\t'age': '-',\n\t\t\t\t\t\t\t'ip': '10.12.110.1',\n\t\t\t\t\t\t\t'link_layer_address': 'fa16.3e24.787a',\n\t\t\t\t\t\t\t'origin': 'static',\n\t\t\t\t\t\t\t'protocol': 'Internet',\n\t\t\t\t\t\t\t'type': 'ARPA'}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tShowVrf = {\n\t\t'vrf': {\n\t\t\t'VRF1': {\n\t\t\t\t'route_distinguisher': '65000:1',\n\t\t\t\t'protocols': ['ipv4', 'ipv6'],\n\t\t\t\t'interfaces': ['GigabitEthernet2.390',\n\t\t\t\t\t\t\t 'GigabitEthernet2.410'],\n\t\t\t}\n\t\t}\n\t}\n\tShowIpArpSummary = {\n\t\t'incomp_entries': 0,\n\t\t'total_entries': 8}\n\n\tShowIpTraffic = {\n\t\t'arp_statistics': {\n\t\t\t'arp_drops_input_full': 0,\n\t\t\t'arp_in_other': 0,\n\t\t\t'arp_in_replies': 25520,\n\t\t\t'arp_in_requests': 26338,\n\t\t\t'arp_in_reverse': 42,\n\t\t\t'arp_out_proxy': 0,\n\t\t\t'arp_out_replies': 1399,\n\t\t\t'arp_out_requests': 123,\n\t\t\t'arp_out_reverse': 0},\n\t\t'bgp_statistics': {\n\t\t\t'bgp_received_keepalives': 0,\n\t\t\t'bgp_received_notifications': 0,\n\t\t\t'bgp_received_opens': 0,\n\t\t\t'bgp_received_route_refresh': 0,\n\t\t\t'bgp_received_total': 0,\n\t\t\t'bgp_received_unrecognized': 0,\n\t\t\t'bgp_received_updates': 0,\n\t\t\t'bgp_sent_keepalives': 0,\n\t\t\t'bgp_sent_notifications': 0,\n\t\t\t'bgp_sent_opens': 0,\n\t\t\t'bgp_sent_route_refresh': 0,\n\t\t\t'bgp_sent_total': 0,\n\t\t\t'bgp_sent_updates': 0},\n\t\t'eigrp_ipv4_statistics': {\n\t\t\t'eigrp_ipv4_received_total': 0,\n\t\t\t'eigrp_ipv4_sent_total': 0},\n\t\t'icmp_statistics': {\n\t\t\t'icmp_received_checksum_errors': 0,\n\t\t\t'icmp_received_echo': 43838,\n\t\t\t'icmp_received_echo_reply': 713,\n\t\t\t'icmp_received_format_errors': 0,\n\t\t\t'icmp_received_info_request': 0,\n\t\t\t'icmp_received_irdp_advertisements': 0,\n\t\t\t'icmp_received_irdp_solicitations': 0,\n\t\t\t'icmp_received_mask_replies': 0,\n\t\t\t'icmp_received_mask_requests': 0,\n\t\t\t'icmp_received_other': 0,\n\t\t\t'icmp_received_parameter': 0,\n\t\t\t'icmp_received_quench': 0,\n\t\t\t'icmp_received_redirects': 0,\n\t\t\t'icmp_received_timestamp': 0,\n\t\t\t'icmp_received_unreachable': 0,\n\t\t\t'icmp_sent_echo': 730,\n\t\t\t'icmp_sent_echo_reply': 43838,\n\t\t\t'icmp_sent_info_reply': 0,\n\t\t\t'icmp_sent_irdp_advertisements': 0,\n\t\t\t'icmp_sent_irdp_solicitations': 0,\n\t\t\t'icmp_sent_mask_replies': 0,\n\t\t\t'icmp_sent_mask_requests': 0,\n\t\t\t'icmp_sent_parameter_problem': 0,\n\t\t\t'icmp_sent_quench': 0,\n\t\t\t'icmp_sent_redirects': 0,\n\t\t\t'icmp_sent_time_exceeded': 0,\n\t\t\t'icmp_sent_timestamp': 0,\n\t\t\t'icmp_sent_unreachable': 4},\n\t\t'igmp_statistics': {\n\t\t\t'igmp_checksum_errors': '0/0',\n\t\t\t'igmp_dvmrp': '0/0',\n\t\t\t'igmp_format_errors': '0/0',\n\t\t\t'igmp_host_leaves': '0/0',\n\t\t\t'igmp_host_queries': '0/0',\n\t\t\t'igmp_host_reports': '0/0',\n\t\t\t'igmp_pim': '0/0',\n\t\t\t'igmp_total': '0/0'},\n\t\t'ip_statistics': {\n\t\t\t'ip_bcast_received': 653921,\n\t\t\t'ip_bcast_sent': 6,\n\t\t\t'ip_drop_encap_failed': 10,\n\t\t\t'ip_drop_forced_drop': 0,\n\t\t\t'ip_drop_no_adj': 0,\n\t\t\t'ip_drop_no_route': 0,\n\t\t\t'ip_drop_opts_denied': 0,\n\t\t\t'ip_drop_src_ip': 0,\n\t\t\t'ip_drop_unicast_rpf': 0,\n\t\t\t'ip_drop_unresolved': 0,\n\t\t\t'ip_frags_fragmented': 0,\n\t\t\t'ip_frags_no_fragmented': 0,\n\t\t\t'ip_frags_no_reassembled': 0,\n\t\t\t'ip_frags_reassembled': 0,\n\t\t\t'ip_frags_timeouts': 0,\n\t\t\t'ip_mcast_received': 0,\n\t\t\t'ip_mcast_sent': 0,\n\t\t\t'ip_opts_alert': 0,\n\t\t\t'ip_opts_basic_security': 0,\n\t\t\t'ip_opts_cipso': 0,\n\t\t\t'ip_opts_end': 0,\n\t\t\t'ip_opts_extended_security': 0,\n\t\t\t'ip_opts_loose_src_route': 0,\n\t\t\t'ip_opts_nop': 0,\n\t\t\t'ip_opts_other': 0,\n\t\t\t'ip_opts_record_route': 0,\n\t\t\t'ip_opts_strct_src_route': 0,\n\t\t\t'ip_opts_strm_id': 0,\n\t\t\t'ip_opts_timestamp': 0,\n\t\t\t'ip_opts_ump': 0,\n\t\t\t'ip_rcvd_bad_hop': 811,\n\t\t\t'ip_rcvd_bad_optns': 0,\n\t\t\t'ip_rcvd_checksum_errors': 0,\n\t\t\t'ip_rcvd_format_errors': 0,\n\t\t\t'ip_rcvd_local_destination': 843331,\n\t\t\t'ip_rcvd_not_gateway': 6,\n\t\t\t'ip_rcvd_sec_failures': 0,\n\t\t\t'ip_rcvd_total': 844148,\n\t\t\t'ip_rcvd_unknwn_protocol': 0,\n\t\t\t'ip_rcvd_with_optns': 0,\n\t\t\t'ip_sent_forwarded': 0,\n\t\t\t'ip_sent_generated': 212110},\n\t\t'ospf_statistics': {\n\t\t\t'ospf_received_checksum_errors': 0,\n\t\t\t'ospf_received_database_desc': 0,\n\t\t\t'ospf_received_hello': 0,\n\t\t\t'ospf_received_link_state_req': 0,\n\t\t\t'ospf_received_lnk_st_acks': 0,\n\t\t\t'ospf_received_lnk_st_updates': 0,\n\t\t\t'ospf_received_total': 0,\n\t\t\t'ospf_sent_database_desc': 0,\n\t\t\t'ospf_sent_hello': 0,\n\t\t\t'ospf_sent_lnk_st_acks': 0,\n\t\t\t'ospf_sent_lnk_st_updates': 0,\n\t\t\t'ospf_sent_total': 0},\n\t\t'pimv2_statistics': {\n\t\t\t'pimv2_asserts': '0/0',\n\t\t\t'pimv2_bootstraps': '0/0',\n\t\t\t'pimv2_candidate_rp_advs': '0/0',\n\t\t\t'pimv2_checksum_errors': 0,\n\t\t\t'pimv2_format_errors': 0,\n\t\t\t'pimv2_grafts': '0/0',\n\t\t\t'pimv2_hellos': '0/0',\n\t\t\t'pimv2_join_prunes': '0/0',\n\t\t\t'pimv2_non_rp': 0,\n\t\t\t'pimv2_non_sm_group': 0,\n\t\t\t'pimv2_registers': '0/0',\n\t\t\t'pimv2_registers_stops': '0/0',\n\t\t\t'pimv2_state_refresh': '0/0',\n\t\t\t'pimv2_total': '0/0'},\n\t\t'tcp_statistics': {\n\t\t\t'tcp_received_checksum_errors': 0,\n\t\t\t'tcp_received_no_port': 0,\n\t\t\t'tcp_received_total': 116563,\n\t\t\t'tcp_sent_total': 139252},\n\t\t'udp_statistics': {\n\t\t\t'udp_received_no_port': 289579,\n\t\t\t'udp_received_total': 682217,\n\t\t\t'udp_received_udp_checksum_errors': 0,\n\t\t\t'udp_sent_fwd_broadcasts': 0,\n\t\t\t'udp_sent_total': 28296}\n\t\t}\n\n\tShowIpInterface = {\n\t\t\"Vlan211\": {\n\t\t\t\"sevurity_level\": \"default\",\n\t\t\t\"ip_route_cache_flags\": [\n\t\t\t\t\"CEF\",\n\t\t\t\t\"Fast\"\n\t\t\t],\n\t\t\t\"enabled\": True,\n\t\t\t\"oper_status\": \"up\",\n\t\t\t\"address_determined_by\": \"configuration file\",\n\t\t\t\"router_discovery\": False,\n\t\t\t\"ip_multicast_fast_switching\": False,\n\t\t\t\"split_horizon\": True,\n\t\t\t\"bgp_policy_mapping\": False,\n\t\t\t\"ip_output_packet_accounting\": False,\n\t\t\t\"mtu\": 1500,\n\t\t\t\"policy_routing\": False,\n\t\t\t\"local_proxy_arp\": False,\n\t\t\t\"proxy_arp\": True,\n\t\t\t\"network_address_translation\": False,\n\t\t\t\"ip_cef_switching_turbo_vector\": True,\n\t\t\t\"icmp\": {\n\t\t\t\t\"redirects\": \"always sent\",\n\t\t\t\t\"mask_replies\": \"never sent\",\n\t\t\t\t\"unreachables\": \"always sent\",\n\t\t\t},\n\t\t\t\"ipv4\": {\n\t\t\t\t\"192.168.76.1/24\": {\n\t\t\t\t\t\"prefix_length\": \"24\",\n\t\t\t\t\t\"ip\": \"192.168.76.1\",\n\t\t\t\t\t\"secondary\": False,\n\t\t\t\t\t\"broadcase_address\": \"255.255.255.255\"\n\t\t\t\t}\n\t\t\t},\n\t\t\t\"ip_access_violation_accounting\": False,\n\t\t\t\"ip_cef_switching\": True,\n\t\t\t\"unicast_routing_topologies\": {\n\t\t\t\t\"topology\": {\n\t\t\t\t\t\"base\": {\n\t\t\t\t\t\t\"status\": \"up\"\n\t\t\t\t\t}\n\t\t\t\t},\n\t\t\t},\n\t\t\t\"ip_null_turbo_vector\": True,\n\t\t\t\"probe_proxy_name_replies\": False,\n\t\t\t\"ip_fast_switching\": True,\n\t\t\t\"ip_multicast_distributed_fast_switching\": False,\n\t\t\t\"tcp_ip_header_compression\": False,\n\t\t\t\"rtp_ip_header_compression\": False,\n\t\t\t\"input_features\": [\"MCI Check\"],\n\t\t\t\"directed_broadcast_forwarding\": False,\n\t\t\t\"ip_flow_switching\": False\n\t },\n\t \"GigabitEthernet0/0\": {\n\t\t\t\"sevurity_level\": \"default\",\n\t\t\t'address_determined_by': 'setup command',\n\t\t\t\"ip_route_cache_flags\": [\n\t\t\t\t \"CEF\",\n\t\t\t\t \"Fast\"\n\t\t\t],\n\t\t\t\"enabled\": True,\n\t\t\t\"oper_status\": \"up\",\n\t\t\t\"router_discovery\": False,\n\t\t\t\"ip_multicast_fast_switching\": False,\n\t\t\t\"split_horizon\": True,\n\t\t\t\"bgp_policy_mapping\": False,\n\t\t\t\"ip_output_packet_accounting\": False,\n\t\t\t\"mtu\": 1500,\n\t\t\t\"policy_routing\": False,\n\t\t\t\"local_proxy_arp\": False,\n\t\t\t\"vrf\": \"Mgmt-vrf\",\n\t\t\t\"proxy_arp\": True,\n\t\t\t\"network_address_translation\": False,\n\t\t\t\"ip_cef_switching_turbo_vector\": True,\n\t\t\t\"icmp\": {\n\t\t\t\t\"redirects\": \"always sent\",\n\t\t\t\t\"mask_replies\": \"never sent\",\n\t\t\t\t\"unreachables\": \"always sent\",\n\t\t\t},\n\t\t\t\"ipv4\": {\n\t\t\t\t\"10.1.8.134/24\": {\n\t\t\t\t\t\"prefix_length\": \"24\",\n\t\t\t\t\t\"ip\": \"10.1.8.134\",\n\t\t\t\t\t\"secondary\": False,\n\t\t\t\t\t\"broadcase_address\": \"255.255.255.255\"\n\t\t\t\t}\n\t\t\t},\n\t\t\t\"ip_access_violation_accounting\": False,\n\t\t\t\"ip_cef_switching\": True,\n\t\t\t\"unicast_routing_topologies\": {\n\t\t\t\t\"topology\": {\n\t\t\t\t\t\"base\": {\n\t\t\t\t\t\t\"status\": \"up\"\n\t\t\t\t\t}\n\t\t\t\t},\n\t\t\t},\n\t\t\t\"ip_null_turbo_vector\": True,\n\t\t\t\"probe_proxy_name_replies\": False,\n\t\t\t\"ip_fast_switching\": True,\n\t\t\t\"ip_multicast_distributed_fast_switching\": False,\n\t\t\t\"tcp_ip_header_compression\": False,\n\t\t\t\"rtp_ip_header_compression\": False,\n\t\t\t\"input_features\": [\"MCI Check\"],\n\t\t\t\"directed_broadcast_forwarding\": False,\n\t\t\t\"ip_flow_switching\": False\n\t },\n\t \"GigabitEthernet2\": {\n\t\t\t\"enabled\": False,\n\t\t\t\"oper_status\": \"down\"\n\t },\n\t \"GigabitEthernet1/0/1\": {\n\t\t\t\"sevurity_level\": \"default\",\n\t\t\t'address_determined_by': 'setup command',\n\t\t\t\"ip_route_cache_flags\": [\n\t\t\t\t\"CEF\",\n\t\t\t\t\"Fast\"\n\t\t\t],\n\t\t\t\"enabled\": False,\n\t\t\t\"oper_status\": \"down\",\n\t\t\t\"router_discovery\": False,\n\t\t\t\"ip_multicast_fast_switching\": False,\n\t\t\t\"split_horizon\": True,\n\t\t\t\"bgp_policy_mapping\": False,\n\t\t\t\"ip_output_packet_accounting\": False,\n\t\t\t\"mtu\": 1500,\n\t\t\t\"policy_routing\": False,\n\t\t\t\"local_proxy_arp\": False,\n\t\t\t\"proxy_arp\": True,\n\t\t\t\"network_address_translation\": False,\n\t\t\t\"ip_cef_switching_turbo_vector\": True,\n\t\t\t\"icmp\": {\n\t\t\t\t\"redirects\": \"always sent\",\n\t\t\t\t\"mask_replies\": \"never sent\",\n\t\t\t\t\"unreachables\": \"always sent\",\n\t\t\t},\n\t\t\t\"ipv4\": {\n\t\t\t\t\"10.1.1.1/24\": {\n\t\t\t\t\t\"prefix_length\": \"24\",\n\t\t\t\t\t\"ip\": \"10.1.1.1\",\n\t\t\t\t\t\"secondary\": False,\n\t\t\t\t\t\"broadcase_address\": \"255.255.255.255\"\n\t\t\t\t},\n\t\t\t\t\"10.2.2.2/24\": {\n\t\t\t\t\t\"prefix_length\": \"24\",\n\t\t\t\t\t\"ip\": \"10.2.2.2\",\n\t\t\t\t\t\"secondary\": True\n\t\t\t\t},\n\t\t\t},\n\t\t\t\"ip_access_violation_accounting\": False,\n\t\t\t\"ip_cef_switching\": True,\n\t\t\t\"unicast_routing_topologies\": {\n\t\t\t\t\"topology\": {\n\t\t\t\t\t\"base\": {\n\t\t\t\t\t\t\"status\": \"up\"\n\t\t\t\t\t}\n\t\t\t\t},\n\t\t\t},\n\t\t\t'wccp': {\n\t\t\t\t'redirect_outbound': False,\n\t\t\t\t'redirect_inbound': False,\n\t\t\t\t'redirect_exclude': False,\n\t\t\t},\n\t\t\t\"ip_null_turbo_vector\": True,\n\t\t\t\"probe_proxy_name_replies\": False,\n\t\t\t\"ip_fast_switching\": True,\n\t\t\t\"ip_multicast_distributed_fast_switching\": False,\n\t\t\t\"tcp_ip_header_compression\": False,\n\t\t\t\"rtp_ip_header_compression\": False,\n\t\t\t\"directed_broadcast_forwarding\": False,\n\t\t\t\"ip_flow_switching\": False,\n\t\t\t\"input_features\": [\"MCI Check\", \"QoS Classification\", \"QoS Marking\"],\n\t\t}\n\t}\n\n\tArp_info = {\n\t\t'interfaces': {\n\t\t\t'FastEthernet0': {\n\t\t\t\t'ipv4': {\n\t\t\t\t\t'neighbors': {\n\t\t\t\t\t\t'10.1.8.1': {\n\t\t\t\t\t\t\t'ip': '10.1.8.1',\n 'link_layer_address': '0012.7f57.ac80',\n 'origin': 'dynamic'},\n \t'10.1.8.146': {\n \t\t'ip': '10.1.8.146',\n\t 'link_layer_address': '843d.c638.b9b7',\n\t 'origin': 'static'}\n\t }\n\t }\n\t },\n 'GigabitEthernet0/0': {\n \t'arp_dynamic_learning': {\n \t\t'local_proxy_enable': False,\n 'proxy_enable': True}\n },\n 'GigabitEthernet1/0/1': {\n \t'arp_dynamic_learning': {\n \t\t'local_proxy_enable': False,\n 'proxy_enable': True}\n },\n 'Port-channel10': {\n \t'ipv4': {\n \t\t'neighbors': {\n \t\t\t'10.9.1.1': {\n \t\t\t\t'ip': '10.9.1.1',\n \t'link_layer_address': '843d.c638.b9c6',\n \t'origin': 'static'}\n }\n }\n },\n 'Vlan211': {\n \t'arp_dynamic_learning': {\n \t\t'local_proxy_enable': False,\n 'proxy_enable': True}\n },\n 'Vlan99': {\n \t'ipv4': {\n \t\t'neighbors': {\n \t\t\t'10.69.1.2': {\n \t\t\t\t'ip': '10.69.1.2',\n \t'link_layer_address': '843d.c638.b9c1',\n \t'origin': 'static'}\n }\n }\n },\n\t\t\t'GigabitEthernet2.390': {\n\t\t\t\t'ipv4': {\n\t\t\t\t\t'neighbors': {\n\t\t\t\t\t\t'10.12.90.1': {\n\t\t\t\t\t\t\t'ip': '10.12.90.1',\n\t\t\t\t\t\t\t'link_layer_address': 'fa16.3e24.787a',\n\t\t\t\t\t\t\t'origin': 'static',\n\t\t\t\t\t\t},\n\t\t\t\t\t\t'10.12.90.2':\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t'ip': '10.12.90.2',\n\t\t\t\t\t\t\t\t'link_layer_address': 'fa16.3e8a.cfeb',\n\t\t\t\t\t\t\t\t'origin': 'dynamic',\n\t\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n\t\t\t'GigabitEthernet2.410': {\n\t\t\t\t'ipv4': {\n\t\t\t\t\t'neighbors': {\n\t\t\t\t\t\t'10.12.110.1': {\n\t\t\t\t\t\t\t'ip': '10.12.110.1',\n\t\t\t\t\t\t\t'link_layer_address': 'fa16.3e24.787a',\n\t\t\t\t\t\t\t'origin': 'static',\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n },\n \t\t'statistics': {'entries_total': 8,\n 'in_drops': 0,\n 'in_replies_pkts': 25520,\n 'in_requests_pkts': 26338,\n 'incomplete_total': 0,\n 'out_replies_pkts': 1399,\n 'out_requests_pkts': 123}\n \t}", "id": "4933390", "language": "Python", "matching_score": 14.446866035461426, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/arp/ios/tests/arp_output.py" }, { "content": "''' \nArp Genie Ops Object Outputs for IOSXE.\n'''\n\n\nclass ArpOutput(object):\n\n\tShowArp = {\n\t\t'interfaces': {\n\t\t\t'Vlan100': {\n\t\t\t\t'ipv4': {\n\t\t\t\t\t'neighbors': {\n\t\t\t\t\t\t'192.168.234.1': {\n\t\t\t\t\t\t\t'age': '-',\n\t\t\t\t\t\t\t'ip': '192.168.234.1',\n\t\t\t\t\t\t\t'link_layer_address': '58bf.eab6.2f51',\n\t\t\t\t\t\t\t'origin': 'static',\n\t\t\t\t\t\t\t'protocol': 'Internet',\n\t\t\t\t\t\t\t'type': 'ARPA'},\n\t\t\t\t\t\t'192.168.234.2': {'age': '29',\n\t\t\t\t\t\t\t'ip': '192.168.234.2',\n\t\t\t\t\t\t\t'link_layer_address': '3820.5672.fc51',\n\t\t\t\t\t\t\t'origin': 'dynamic',\n\t\t\t\t\t\t\t'protocol': 'Internet',\n\t\t\t\t\t\t\t'type': 'ARPA'}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n\t\t\t'Vlan200': {\n\t\t\t\t'ipv4': {\n\t\t\t\t\t'neighbors': {\n\t\t\t\t\t\t'192.168.70.1': {\n\t\t\t\t\t\t\t'age': '-',\n\t\t\t\t\t\t\t'ip': '192.168.70.1',\n\t\t\t\t\t\t\t'link_layer_address': '58bf.eab6.2f62',\n\t\t\t\t\t\t\t'origin': 'static',\n\t\t\t\t\t\t\t'protocol': 'Internet',\n\t\t\t\t\t\t\t'type': 'ARPA'}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tShowIpArp_all='''\n\t\t\tProtocol Address Age (min) Hardware Addr Type Interface\n\t\t\tInternet 192.168.234.1 - 58bf.eab6.2f51 ARPA Vlan100\n\t\t\tInternet 192.168.234.2 29 3820.5672.fc51 ARPA Vlan100\n\t\t\tInternet 192.168.70.1 - 58bf.eab6.2f62 ARPA Vlan200\n\t\t\t'''\n\tShowIpArp_vrf1='''\nProtocol Address Age (min) Hardware Addr Type Interface\nInternet 10.12.90.1 - fa16.3e24.787a ARPA GigabitEthernet2.390\nInternet 10.12.90.2 139 fa16.3e8a.cfeb ARPA GigabitEthernet2.390\nInternet 10.12.110.1 - fa16.3e24.787a ARPA GigabitEthernet2.410\n\t'''\n\tShowIpArp={\n 'interfaces': {\n 'GigabitEthernet2.390': {\n 'ipv4': {\n 'neighbors': {\n '10.12.90.1': {\n 'age': '-',\n 'ip': '10.12.90.1',\n 'link_layer_address': 'fa16.3e24.787a',\n 'origin': 'static',\n 'protocol': 'Internet',\n 'type': 'ARPA'},\n '10.12.90.2':\n {'age': '139',\n 'ip': '10.12.90.2',\n 'link_layer_address': 'fa16.3e8a.cfeb',\n 'origin': 'dynamic',\n 'protocol': 'Internet',\n 'type': 'ARPA'}\n }\n }\n },\n 'GigabitEthernet2.410': {\n 'ipv4': {\n 'neighbors': {\n '10.12.110.1': {\n 'age': '-',\n 'ip': '10.12.110.1',\n 'link_layer_address': 'fa16.3e24.787a',\n 'origin': 'static',\n 'protocol': 'Internet',\n 'type': 'ARPA'}\n }\n }\n }\n }\n }\n\tShowVrf={\n 'vrf': {\n 'VRF1': {\n 'route_distinguisher': '65000:1',\n 'protocols': ['ipv4', 'ipv6'],\n 'interfaces': ['GigabitEthernet2.390',\n 'GigabitEthernet2.410'],\n }\n }\n }\n\tShowIpArpSummary = {\n\t\t 'incomp_entries': 0,\n\t\t 'total_entries': 8}\n\n\tShowIpTraffic = {\n\t\t'arp_statistics': {\n\t\t\t'arp_drops_input_full': 0,\n\t\t\t'arp_in_other': 0,\n\t\t\t'arp_in_replies': 764,\n\t\t\t'arp_in_requests': 2020,\n\t\t\t'arp_in_reverse': 0,\n\t\t\t'arp_out_proxy': 2,\n\t\t\t'arp_out_replies': 126,\n\t\t\t'arp_out_requests': 29,\n\t\t\t'arp_out_reverse': 0},\n\t\t\t'ip_statistics': {\n\t\t\t\t'ip_bcast_received': 33324,\n\t\t\t'ip_bcast_sent': 5,\n\t\t\t'ip_drop_encap_failed': 8,\n\t\t\t'ip_drop_forced_drop': 0,\n\t\t\t'ip_drop_no_adj': 20,\n\t\t\t'ip_drop_no_route': 19,\n\t\t\t'ip_drop_opts_denied': 0,\n\t\t\t'ip_drop_src_ip': 0,\n\t\t\t'ip_drop_unicast_rpf': 0,\n\t\t\t'ip_drop_unresolved': 0,\n\t\t\t'ip_drop_unsupp_address': 0,\n\t\t\t'ip_frags_fragmented': 1,\n\t\t\t'ip_frags_fragments': 5,\n\t\t\t'ip_frags_invalid_hole': 0,\n\t\t\t'ip_frags_no_fragmented': 0,\n\t\t\t'ip_frags_no_reassembled': 0,\n\t\t\t'ip_frags_reassembled': 0,\n\t\t\t'ip_frags_timeouts': 0,\n\t\t\t'ip_mcast_received': 144833,\n\t\t\t'ip_mcast_sent': 66274,\n\t\t\t'ip_opts_alert': 12717,\n\t\t\t'ip_opts_basic_security': 0,\n\t\t\t'ip_opts_cipso': 0,\n\t\t\t'ip_opts_end': 0,\n\t\t\t'ip_opts_extended_security': 0,\n\t\t\t'ip_opts_ignored': 0,\n\t\t\t'ip_opts_loose_src_route': 0,\n\t\t\t'ip_opts_nop': 0,\n\t\t\t'ip_opts_other': 0,\n\t\t\t'ip_opts_record_route': 0,\n\t\t\t'ip_opts_strct_src_route': 0,\n\t\t\t'ip_opts_strm_id': 0,\n\t\t\t'ip_opts_timestamp': 0,\n\t\t\t'ip_opts_ump': 0,\n\t\t\t'ip_rcvd_bad_hop': 0,\n\t\t\t'ip_rcvd_bad_optns': 0,\n\t\t\t'ip_rcvd_checksum_errors': 0,\n\t\t\t'ip_rcvd_format_errors': 0,\n\t\t\t'ip_rcvd_local_destination': 110596,\n\t\t\t'ip_rcvd_not_gateway': 5,\n\t\t\t'ip_rcvd_sec_failures': 0,\n\t\t\t'ip_rcvd_total': 17780,\n\t\t\t'ip_rcvd_unknwn_protocol': 0,\n\t\t\t'ip_rcvd_with_optns': 12717,\n\t\t\t'ip_sent_forwarded': 1654728,\n\t\t\t'ip_sent_generated': 85543},\n\t\t'icmp_statistics': {\n\t\t\t'icmp_received_checksum_errors': 0,\n\t\t\t'icmp_received_echo': 284,\n\t\t\t'icmp_received_echo_reply': 9,\n\t\t\t'icmp_received_format_errors': 0,\n\t\t\t'icmp_received_info_replies': 0,\n\t\t\t'icmp_received_info_request': 0,\n\t\t\t'icmp_received_irdp_advertisements': 0,\n\t\t\t'icmp_received_irdp_solicitations': 0,\n\t\t\t'icmp_received_mask_replies': 0,\n\t\t\t'icmp_received_mask_requests': 0,\n\t\t\t'icmp_received_other': 0,\n\t\t\t'icmp_received_parameter': 0,\n\t\t\t'icmp_received_quench': 0,\n\t\t\t'icmp_received_redirects': 0,\n\t\t\t'icmp_received_time_exceeded': 0,\n\t\t\t'icmp_received_timestamp': 0,\n\t\t\t'icmp_received_timestamp_replies': 0,\n\t\t\t'icmp_received_unreachable': 0,\n\t\t\t'icmp_sent_echo': 9,\n\t\t\t'icmp_sent_echo_reply': 134,\n\t\t\t'icmp_sent_info_reply': 0,\n\t\t\t'icmp_sent_irdp_advertisements': 0,\n\t\t\t'icmp_sent_irdp_solicitations': 0,\n\t\t\t'icmp_sent_mask_replies': 0,\n\t\t\t'icmp_sent_mask_requests': 0,\n\t\t\t'icmp_sent_parameter_problem': 0,\n\t\t\t'icmp_sent_quench': 0,\n\t\t\t'icmp_sent_redirects': 0,\n\t\t\t'icmp_sent_time_exceeded': 0,\n\t\t\t'icmp_sent_timestamp': 0,\n\t\t\t'icmp_sent_timestamp_replies': 0,\n\t\t\t'icmp_sent_unreachable': 14},\n\t\t'udp_statistics': {\n\t\t\t'udp_received_finput': 0,\n\t\t\t'udp_received_no_port': 15906,\n\t\t\t'udp_received_total': 62515,\n\t\t\t'udp_received_udp_checksum_errors': 0,\n\t\t\t'udp_sent_fwd_broadcasts': 0,\n\t\t\t'udp_sent_total': 41486},\n\t\t'pimv2_statistics': {\n\t\t\t'pimv2_asserts': '0/697',\n\t\t\t'pimv2_bootstraps': '2088/2438',\n\t\t\t'pimv2_candidate_rp_advs': '350/0',\n\t\t\t'pimv2_checksum_errors': 0,\n\t\t\t'pimv2_format_errors': 0,\n\t\t\t'pimv2_grafts': '0/2',\n\t\t\t'pimv2_hellos': '5011/5008',\n\t\t\t'pimv2_join_prunes': '5/712',\n\t\t\t'pimv2_non_rp': 0,\n\t\t\t'pimv2_non_sm_group': 0,\n\t\t\t'pimv2_queue_drops': 0,\n\t\t\t'pimv2_registers': '1/1',\n\t\t\t'pimv2_registers_stops': '1/1',\n\t\t\t'pimv2_state_refresh': '0/0',\n\t\t\t'pimv2_total': '7458/8859'},\n\t\t'ospf_statistics': {\n\t\t\t'ospf_received_checksum_errors': 0,\n\t\t\t'ospf_received_database_desc': 20,\n\t\t\t'ospf_received_hello': 15153,\n\t\t\t'ospf_received_link_state_req': 2,\n\t\t\t'ospf_received_lnk_st_acks': 688,\n\t\t\t'ospf_received_lnk_st_updates': 359,\n\t\t\t'ospf_received_total': 16222,\n\t\t\t'ospf_sent_database_desc': 30,\n\t\t\t'ospf_sent_hello': 8887,\n\t\t\t'ospf_sent_lnk_st_acks': 239,\n\t\t\t'ospf_sent_lnk_st_updates': 299,\n\t\t\t'ospf_sent_total': 9456,\n\t\t\t'ospf_traffic_cntrs_clear': 'never'},\n\t\t'igmp_statistics': {\n\t\t\t'igmp_checksum_errors': '0/0',\n\t\t\t'igmp_dvmrp': '0/0',\n\t\t\t'igmp_format_errors': '0/0',\n\t\t\t'igmp_host_leaves': '0/5',\n\t\t\t'igmp_host_queries': '2475/1414',\n\t\t\t'igmp_host_reports': '357/3525',\n\t\t\t'igmp_pim': '0/0',\n\t\t\t'igmp_queue_drops': 0,\n\t\t\t'igmp_total': '2832/4946'},\n\t\t'tcp_statistics': {\n\t\t\t'tcp_received_checksum_errors': 0,\n\t\t\t'tcp_received_no_port': 0,\n\t\t\t'tcp_received_total': 15396,\n\t\t\t'tcp_sent_total': 19552},\n\t\t'eigrp_ipv4_statistics': {\n\t\t\t'eigrp_ipv4_received_total': 4612,\n\t\t\t'eigrp_ipv4_sent_total': 4611},\n\t\t'bgp_statistics': {\n\t\t\t'bgp_received_keepalives': 2167,\n\t\t\t'bgp_received_notifications': 0,\n\t\t\t'bgp_received_opens': 6,\n\t\t\t'bgp_received_route_refresh': 0,\n\t\t\t'bgp_received_total': 2185,\n\t\t\t'bgp_received_unrecognized': 0,\n\t\t\t'bgp_received_updates': 12,\n\t\t\t'bgp_sent_keepalives': 2296,\n\t\t\t'bgp_sent_notifications': 2,\n\t\t\t'bgp_sent_opens': 6,\n\t\t\t'bgp_sent_route_refresh': 0,\n\t\t\t'bgp_sent_total': 2304,\n\t\t\t'bgp_sent_updates': 0},\n\t}\n\n\tShowIpInterface = {\n\t\t\"Vlan211\": {\n\t\t\t\"sevurity_level\": \"default\",\n\t\t\t\"ip_route_cache_flags\": [\n\t\t\t\t \"CEF\",\n\t\t\t\t \"Fast\"\n\t\t\t],\n\t\t\t\"enabled\": True,\n\t\t\t\"oper_status\": \"up\",\n\t\t\t\"address_determined_by\": \"configuration file\",\n\t\t\t\"router_discovery\": False,\n\t\t\t\"ip_multicast_fast_switching\": False,\n\t\t\t\"split_horizon\": True,\n\t\t\t\"bgp_policy_mapping\": False,\n\t\t\t\"ip_output_packet_accounting\": False,\n\t\t\t\"mtu\": 1500,\n\t\t\t\"policy_routing\": False,\n\t\t\t\"local_proxy_arp\": False,\n\t\t\t\"proxy_arp\": True,\n\t\t\t\"network_address_translation\": False,\n\t\t\t\"ip_cef_switching_turbo_vector\": True,\n\t\t\t\"icmp\": {\n\t\t\t\t\"redirects\": \"always sent\",\n\t\t\t\t\"mask_replies\": \"never sent\",\n\t\t\t\t\"unreachables\": \"always sent\",\n\t\t\t},\n\t\t\t\"ipv4\": {\n\t\t\t\t \"192.168.76.1/24\": {\n\t\t\t\t\t \"prefix_length\": \"24\",\n\t\t\t\t\t \"ip\": \"192.168.76.1\",\n\t\t\t\t\t \"secondary\": False,\n\t\t\t\t\t \"broadcase_address\": \"255.255.255.255\"\n\t\t\t\t }\n\t\t\t},\n\t\t\t\"ip_access_violation_accounting\": False,\n\t\t\t\"ip_cef_switching\": True,\n\t\t\t\"unicast_routing_topologies\": {\n\t\t\t\t \"topology\": {\n\t\t\t\t\t \"base\": {\n\t\t\t\t\t\t \"status\": \"up\"\n\t\t\t\t\t }\n\t\t\t\t },\n\t\t\t},\n\t\t\t\"ip_null_turbo_vector\": True,\n\t\t\t\"probe_proxy_name_replies\": False,\n\t\t\t\"ip_fast_switching\": True,\n\t\t\t\"ip_multicast_distributed_fast_switching\": False,\n\t\t\t\"tcp_ip_header_compression\": False,\n\t\t\t\"rtp_ip_header_compression\": False,\n\t\t\t\"input_features\": [\"MCI Check\"],\n\t\t\t\"directed_broadcast_forwarding\": False,\n\t\t\t\"ip_flow_switching\": False\n\t },\n\t \"GigabitEthernet0/0\": {\n\t\t\t\"sevurity_level\": \"default\",\n\t\t\t'address_determined_by': 'setup command',\n\t\t\t\"ip_route_cache_flags\": [\n\t\t\t\t \"CEF\",\n\t\t\t\t \"Fast\"\n\t\t\t],\n\t\t\t\"enabled\": True,\n\t\t\t\"oper_status\": \"up\",\n\t\t\t\"router_discovery\": False,\n\t\t\t\"ip_multicast_fast_switching\": False,\n\t\t\t\"split_horizon\": True,\n\t\t\t\"bgp_policy_mapping\": False,\n\t\t\t\"ip_output_packet_accounting\": False,\n\t\t\t\"mtu\": 1500,\n\t\t\t\"policy_routing\": False,\n\t\t\t\"local_proxy_arp\": False,\n\t\t\t\"vrf\": \"Mgmt-vrf\",\n\t\t\t\"proxy_arp\": True,\n\t\t\t\"network_address_translation\": False,\n\t\t\t\"ip_cef_switching_turbo_vector\": True,\n\t\t\t\"icmp\": {\n\t\t\t\t\"redirects\": \"always sent\",\n\t\t\t\t\"mask_replies\": \"never sent\",\n\t\t\t\t\"unreachables\": \"always sent\",\n\t\t\t},\n\t\t\t\"ipv4\": {\n\t\t\t\t \"10.1.8.134/24\": {\n\t\t\t\t\t \"prefix_length\": \"24\",\n\t\t\t\t\t \"ip\": \"10.1.8.134\",\n\t\t\t\t\t \"secondary\": False,\n\t\t\t\t\t \"broadcase_address\": \"255.255.255.255\"\n\t\t\t\t }\n\t\t\t},\n\t\t\t\"ip_access_violation_accounting\": False,\n\t\t\t\"ip_cef_switching\": True,\n\t\t\t\"unicast_routing_topologies\": {\n\t\t\t\t \"topology\": {\n\t\t\t\t\t \"base\": {\n\t\t\t\t\t\t \"status\": \"up\"\n\t\t\t\t\t }\n\t\t\t\t },\n\t\t\t},\n\t\t\t\"ip_null_turbo_vector\": True,\n\t\t\t\"probe_proxy_name_replies\": False,\n\t\t\t\"ip_fast_switching\": True,\n\t\t\t\"ip_multicast_distributed_fast_switching\": False,\n\t\t\t\"tcp_ip_header_compression\": False,\n\t\t\t\"rtp_ip_header_compression\": False,\n\t\t\t\"input_features\": [\"MCI Check\"],\n\t\t\t\"directed_broadcast_forwarding\": False,\n\t\t\t\"ip_flow_switching\": False\n\t },\n\t \"GigabitEthernet2\": {\n\t\t\t\"enabled\": False,\n\t\t\t\"oper_status\": \"down\"\n\t },\n\t \"GigabitEthernet1/0/1\": {\n\t\t\t\"sevurity_level\": \"default\",\n\t\t\t'address_determined_by': 'setup command',\n\t\t\t\"ip_route_cache_flags\": [\n\t\t\t\t \"CEF\",\n\t\t\t\t \"Fast\"\n\t\t\t],\n\t\t\t\"enabled\": False,\n\t\t\t\"oper_status\": \"down\",\n\t\t\t\"router_discovery\": False,\n\t\t\t\"ip_multicast_fast_switching\": False,\n\t\t\t\"split_horizon\": True,\n\t\t\t\"bgp_policy_mapping\": False,\n\t\t\t\"ip_output_packet_accounting\": False,\n\t\t\t\"mtu\": 1500,\n\t\t\t\"policy_routing\": False,\n\t\t\t\"local_proxy_arp\": False,\n\t\t\t\"proxy_arp\": True,\n\t\t\t\"network_address_translation\": False,\n\t\t\t\"ip_cef_switching_turbo_vector\": True,\n\t\t\t\"icmp\": {\n\t\t\t\t\"redirects\": \"always sent\",\n\t\t\t\t\"mask_replies\": \"never sent\",\n\t\t\t\t\"unreachables\": \"always sent\",\n\t\t\t},\n\t\t\t\"ipv4\": {\n\t\t\t\t \"10.1.1.1/24\": {\n\t\t\t\t\t \"prefix_length\": \"24\",\n\t\t\t\t\t \"ip\": \"10.1.1.1\",\n\t\t\t\t\t \"secondary\": False,\n\t\t\t\t\t \"broadcase_address\": \"255.255.255.255\"\n\t\t\t\t },\n\t\t\t\t \"10.2.2.2/24\": {\n\t\t\t\t\t \"prefix_length\": \"24\",\n\t\t\t\t\t \"ip\": \"10.2.2.2\",\n\t\t\t\t\t \"secondary\": True\n\t\t\t\t },\n\t\t\t},\n\t\t\t\"ip_access_violation_accounting\": False,\n\t\t\t\"ip_cef_switching\": True,\n\t\t\t\"unicast_routing_topologies\": {\n\t\t\t\t \"topology\": {\n\t\t\t\t\t \"base\": {\n\t\t\t\t\t\t \"status\": \"up\"\n\t\t\t\t\t }\n\t\t\t\t },\n\t\t\t},\n\t\t\t'wccp': {\n\t\t\t 'redirect_outbound': False,\n\t\t\t 'redirect_inbound': False,\n\t\t\t 'redirect_exclude': False,\n\t\t\t},\n\t\t\t\"ip_null_turbo_vector\": True,\n\t\t\t\"probe_proxy_name_replies\": False,\n\t\t\t\"ip_fast_switching\": True,\n\t\t\t\"ip_multicast_distributed_fast_switching\": False,\n\t\t\t\"tcp_ip_header_compression\": False,\n\t\t\t\"rtp_ip_header_compression\": False,\n\t\t\t\"directed_broadcast_forwarding\": False,\n\t\t\t\"ip_flow_switching\": False,\n\t\t\t\"input_features\": [\"MCI Check\", \"QoS Classification\", \"QoS Marking\"],\n\t\t}\n\t}\n\n\tArp_info = {\n\t\t'interfaces': {\n\t\t\t'GigabitEthernet0/0': {\n\t\t\t\t'arp_dynamic_learning': {\n\t\t\t\t\t'local_proxy_enable': False,\n\t\t\t\t\t'proxy_enable': True}\n\t\t\t},\n\t\t\t'GigabitEthernet1/0/1': {\n\t\t\t\t'arp_dynamic_learning': {\n\t\t\t\t\t'local_proxy_enable': False,\n\t\t\t\t\t'proxy_enable': True}\n\t\t\t},\n\t\t\t'Vlan100': {\n\t\t\t\t'ipv4': {\n\t\t\t\t\t'neighbors': {\n\t\t\t\t\t\t'192.168.234.1': {\n\t\t\t\t\t\t\t'ip': '192.168.234.1',\n\t\t\t\t\t\t\t'link_layer_address': '58bf.eab6.2f51',\n\t\t\t\t\t\t\t'origin': 'static'},\n\t\t\t\t\t\t'192.168.234.2': {\n\t\t\t\t\t\t\t'ip': '192.168.234.2',\n\t\t\t\t\t\t\t'link_layer_address': '3820.5672.fc51',\n\t\t\t\t\t\t\t'origin': 'dynamic'}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n\t\t\t'Vlan200': {\n\t\t\t\t'ipv4': {\n\t\t\t\t\t'neighbors': {\n\t\t\t\t\t\t'192.168.70.1': {\n\t\t\t\t\t\t\t'ip': '192.168.70.1',\n\t\t\t\t\t\t\t'link_layer_address': '58bf.eab6.2f62',\n\t\t\t\t\t\t\t'origin': 'static'}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n\t\t\t'Vlan211': {\n\t\t\t\t'arp_dynamic_learning': {\n\t\t\t\t\t'local_proxy_enable': False,\n\t\t\t\t\t'proxy_enable': True}\n\t\t\t},\n\t\t\t'GigabitEthernet2.390': {\n\t\t\t\t'ipv4': {\n\t\t\t\t\t'neighbors': {\n\t\t\t\t\t\t'10.12.90.1': {\n\t\t\t\t\t\t\t'ip': '10.12.90.1',\n\t\t\t\t\t\t\t'link_layer_address': 'fa16.3e24.787a',\n\t\t\t\t\t\t\t'origin': 'static',\n\t\t\t\t\t\t},\n\t\t\t\t\t\t'10.12.90.2':\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t 'ip': '10.12.90.2',\n\t\t\t\t\t\t\t 'link_layer_address': 'fa16.3e8a.cfeb',\n\t\t\t\t\t\t\t 'origin': 'dynamic',\n\t\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n\t\t\t'GigabitEthernet2.410': {\n\t\t\t\t'ipv4': {\n\t\t\t\t\t'neighbors': {\n\t\t\t\t\t\t'10.12.110.1': {\n\t\t\t\t\t\t\t'ip': '10.12.110.1',\n\t\t\t\t\t\t\t'link_layer_address': 'fa16.3e24.787a',\n\t\t\t\t\t\t\t'origin': 'static',\n\t\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t},\n\t\t},\n\t\t'statistics': {\n\t\t\t'entries_total': 8,\n\t\t\t'in_drops': 0,\n\t\t\t'in_replies_pkts': 764,\n\t\t\t'in_requests_pkts': 2020,\n\t\t\t'incomplete_total': 0,\n\t\t\t'out_replies_pkts': 126,\n\t\t\t'out_requests_pkts': 29}}\n", "id": "2364580", "language": "Python", "matching_score": 5.34233283996582, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/arp/iosxe/tests/arp_output.py" }, { "content": "''' \nInterface Genie Ops Object Outputs for IOSXE.\n'''\n\n\nclass InterfaceOutput(object):\n\n ShowInterfaces = {\n \"Port-channel12\": {\n \"flow_control\": {\n \"send\": False,\n \"receive\": False\n },\n \"type\": \"EtherChannel\",\n \"counters\": {\n \"out_buffer_failure\": 0,\n \"out_underruns\": 0,\n \"in_giants\": 0,\n \"in_throttles\": 0,\n \"in_frame\": 0,\n \"in_ignored\": 0,\n \"last_clear\": \"1d23h\",\n \"out_interface_resets\": 2,\n \"in_mac_pause_frames\": 0,\n \"out_collision\": 0,\n \"rate\": {\n \"out_rate_pkts\": 0,\n \"load_interval\": 300,\n \"out_rate\": 0,\n \"in_rate\": 2000,\n \"in_rate_pkts\": 2\n },\n \"in_watchdog\": 0,\n \"out_deferred\": 0,\n \"out_mac_pause_frames\": 0,\n \"in_pkts\": 961622,\n \"in_multicast_pkts\": 4286699522,\n \"in_runts\": 0,\n \"out_unknown_protocl_drops\": 0,\n \"in_no_buffer\": 0,\n \"out_buffers_swapped\": 0,\n \"out_lost_carrier\": 0,\n \"out_errors\": 0,\n \"in_errors\": 0,\n \"in_octets\": 72614643,\n \"in_crc_errors\": 0,\n \"out_no_carrier\": 0,\n \"in_with_dribble\": 0,\n \"in_broadcast_pkts\": 944788,\n \"out_pkts\": 39281,\n \"out_late_collision\": 0,\n \"out_octets\": 6235318,\n \"in_overrun\": 0,\n \"out_babble\": 0\n },\n \"auto_negotiate\": True,\n \"phys_address\": \"0057.d228.1a02\",\n \"keepalive\": 10,\n \"output_hang\": \"never\",\n \"txload\": \"1/255\",\n \"oper_status\": \"up\",\n \"arp_type\": \"arpa\",\n \"rxload\": \"1/255\",\n \"duplex_mode\": \"full\",\n \"link_type\": \"auto\",\n \"queues\": {\n \"input_queue_size\": 0,\n \"total_output_drop\": 0,\n \"input_queue_drops\": 0,\n \"input_queue_max\": 2000,\n \"output_queue_size\": 0,\n \"input_queue_flushes\": 0,\n \"output_queue_max\": 0,\n \"queue_strategy\": \"fifo\"\n },\n \"encapsulations\": {\n \"encapsulation\": \"qinq virtual lan\",\n \"first_dot1q\": \"10\",\n \"second_dot1q\": \"20\",\n },\n \"last_input\": \"never\",\n \"last_output\": \"1d22h\",\n \"line_protocol\": \"up\",\n \"mac_address\": \"0057.d228.1a02\",\n \"connected\": True,\n \"port_channel\": {\n \"port_channel_member\": True,\n \"port_channel_member_intfs\": ['GigabitEthernet1/0/2'],\n },\n \"arp_timeout\": \"04:00:00\",\n \"bandwidth\": 1000000,\n \"port_speed\": \"1000\",\n \"enabled\": True,\n \"mtu\": 1500,\n \"delay\": 10,\n \"reliability\": \"255/255\"\n },\n \"GigabitEthernet1/0/1\": {\n \"flow_control\": {\n \"send\": False,\n \"receive\": False\n },\n \"type\": \"Gigabit Ethernet\",\n \"counters\": {\n \"out_buffer_failure\": 0,\n \"out_underruns\": 0,\n \"in_giants\": 0,\n \"in_throttles\": 0,\n \"in_frame\": 0,\n \"in_ignored\": 0,\n \"last_clear\": \"1d02h\",\n \"out_interface_resets\": 2,\n \"in_mac_pause_frames\": 0,\n \"out_collision\": 0,\n \"rate\": {\n \"out_rate_pkts\": 0,\n \"load_interval\": 30,\n \"out_rate\": 0,\n \"in_rate\": 0,\n \"in_rate_pkts\": 0\n },\n \"in_watchdog\": 0,\n \"out_deferred\": 0,\n \"out_mac_pause_frames\": 0,\n \"in_pkts\": 12127,\n \"in_multicast_pkts\": 4171,\n \"in_runts\": 0,\n \"out_unknown_protocl_drops\": 0,\n \"in_no_buffer\": 0,\n \"out_buffers_swapped\": 0,\n \"out_lost_carrier\": 0,\n \"out_errors\": 0,\n \"in_errors\": 0,\n \"in_octets\": 2297417,\n \"in_crc_errors\": 0,\n \"out_no_carrier\": 0,\n \"in_with_dribble\": 0,\n \"in_broadcast_pkts\": 0,\n \"out_pkts\": 12229,\n \"out_late_collision\": 0,\n \"out_octets\": 2321107,\n \"in_overrun\": 0,\n \"out_babble\": 0\n },\n \"phys_address\": \"0057.d228.1a64\",\n \"keepalive\": 10,\n \"output_hang\": \"never\",\n \"txload\": \"1/255\",\n \"description\": \"desc\",\n \"oper_status\": \"down\",\n \"arp_type\": \"arpa\",\n \"rxload\": \"1/255\",\n \"duplex_mode\": \"auto\",\n \"queues\": {\n \"input_queue_size\": 0,\n \"total_output_drop\": 0,\n \"input_queue_drops\": 0,\n \"input_queue_max\": 375,\n \"output_queue_size\": 0,\n \"input_queue_flushes\": 0,\n \"output_queue_max\": 40,\n \"queue_strategy\": \"fifo\"\n },\n \"ipv4\": {\n \"10.1.1.1/24\": {\n \"prefix_length\": \"24\",\n \"ip\": \"10.1.1.1\"\n }\n },\n \"encapsulations\": {\n \"encapsulation\": \"arpa\"\n },\n \"last_input\": \"never\",\n \"last_output\": \"04:39:18\",\n \"line_protocol\": \"down\",\n \"mac_address\": \"0057.d228.1a64\",\n \"connected\": False,\n \"port_channel\": {\n \"port_channel_member\": False\n },\n \"media_type\": \"10/100/1000BaseTX\",\n \"bandwidth\": 768,\n \"port_speed\": \"1000\",\n \"enabled\": False,\n \"arp_timeout\": \"04:00:00\",\n \"mtu\": 1500,\n \"delay\": 3330,\n \"reliability\": \"255/255\"\n },\n \"GigabitEthernet3\": {\n \"flow_control\": {\n \"send\": False,\n \"receive\": False\n },\n \"type\": \"CSR vNIC\",\n 'auto_negotiate': True,\n 'duplex_mode': 'full',\n 'link_type': 'auto',\n 'media_type': 'RJ45',\n 'port_speed': '1000',\n \"counters\": {\n \"out_buffer_failure\": 0,\n \"out_underruns\": 0,\n \"in_giants\": 0,\n \"in_throttles\": 0,\n \"in_frame\": 0,\n \"in_ignored\": 0,\n \"last_clear\": \"never\",\n \"out_interface_resets\": 1,\n \"in_mac_pause_frames\": 0,\n \"out_collision\": 0,\n \"in_crc_errors\": 0,\n \"rate\": {\n \"out_rate_pkts\": 0,\n \"load_interval\": 300,\n \"out_rate\": 0,\n \"in_rate\": 0,\n \"in_rate_pkts\": 0\n },\n \"in_watchdog\": 0,\n \"out_deferred\": 0,\n \"out_mac_pause_frames\": 0,\n \"in_pkts\": 6,\n \"in_multicast_pkts\": 0,\n \"in_runts\": 0,\n \"in_no_buffer\": 0,\n \"out_buffers_swapped\": 0,\n \"out_errors\": 0,\n \"in_errors\": 0,\n \"in_octets\": 480,\n \"out_unknown_protocl_drops\": 0,\n \"out_no_carrier\": 0,\n \"out_lost_carrier\": 0,\n \"in_broadcast_pkts\": 0,\n \"out_pkts\": 28,\n \"out_late_collision\": 0,\n \"out_octets\": 7820,\n \"in_overrun\": 0,\n \"out_babble\": 0\n },\n \"phys_address\": \"5254.0072.9b0c\",\n \"keepalive\": 10,\n \"output_hang\": \"never\",\n \"txload\": \"1/255\",\n \"reliability\": \"255/255\",\n \"arp_type\": \"arpa\",\n \"rxload\": \"1/255\",\n \"queues\": {\n \"input_queue_size\": 0,\n \"total_output_drop\": 0,\n \"input_queue_drops\": 0,\n \"input_queue_max\": 375,\n \"output_queue_size\": 0,\n \"input_queue_flushes\": 0,\n \"output_queue_max\": 40,\n \"queue_strategy\": \"fifo\"\n },\n \"ipv4\": {\n \"192.168.154.1/24\": {\n \"prefix_length\": \"24\",\n \"ip\": \"192.168.154.1\"\n },\n \"unnumbered\": {\n \"interface_ref\": \"Loopback0\"\n }\n },\n \"encapsulations\": {\n \"encapsulation\": \"arpa\"\n },\n \"last_output\": \"00:00:27\",\n \"line_protocol\": \"up\",\n \"mac_address\": \"5254.0072.9b0c\",\n \"oper_status\": \"up\",\n \"port_channel\": {\n \"port_channel_member\": False\n },\n \"arp_timeout\": \"04:00:00\",\n \"bandwidth\": 1000000,\n \"enabled\": True,\n \"mtu\": 1500,\n \"delay\": 10,\n \"last_input\": \"never\"\n },\n \"Loopback0\": {\n \"queues\": {\n \"input_queue_size\": 0,\n \"total_output_drop\": 0,\n \"input_queue_drops\": 0,\n \"input_queue_max\": 75,\n \"output_queue_size\": 0,\n \"input_queue_flushes\": 0,\n \"output_queue_max\": 0,\n \"queue_strategy\": \"fifo\"\n },\n \"mtu\": 1514,\n \"encapsulations\": {\n \"encapsulation\": \"loopback\"\n },\n \"last_output\": \"never\",\n \"type\": \"Loopback\",\n \"line_protocol\": \"up\",\n \"oper_status\": \"up\",\n \"keepalive\": 10,\n \"output_hang\": \"never\",\n \"txload\": \"1/255\",\n \"counters\": {\n \"out_buffer_failure\": 0,\n \"out_underruns\": 0,\n \"in_giants\": 0,\n \"in_throttles\": 0,\n \"in_frame\": 0,\n \"in_ignored\": 0,\n \"last_clear\": \"1d04h\",\n \"out_interface_resets\": 0,\n \"out_collision\": 0,\n \"rate\": {\n \"out_rate_pkts\": 0,\n \"load_interval\": 300,\n \"out_rate\": 0,\n \"in_rate\": 0,\n \"in_rate_pkts\": 0\n },\n \"in_pkts\": 0,\n \"in_multicast_pkts\": 0,\n \"in_runts\": 0,\n \"in_no_buffer\": 0,\n \"out_buffers_swapped\": 0,\n \"out_errors\": 0,\n \"in_errors\": 0,\n \"in_octets\": 0,\n \"in_crc_errors\": 0,\n \"out_unknown_protocl_drops\": 0,\n \"in_broadcast_pkts\": 0,\n \"out_pkts\": 72,\n \"out_octets\": 5760,\n \"in_overrun\": 0,\n \"in_abort\": 0\n },\n \"reliability\": \"255/255\",\n \"bandwidth\": 8000000,\n \"port_channel\": {\n \"port_channel_member\": False\n },\n \"enabled\": True,\n \"ipv4\": {\n \"192.168.154.1/24\": {\n \"prefix_length\": \"24\",\n \"ip\": \"192.168.154.1\"\n }\n },\n \"rxload\": \"1/255\",\n \"delay\": 5000,\n \"last_input\": \"1d02h\"\n },\n \"Vlan100\": {\n \"type\": \"Ethernet SVI\",\n \"counters\": {\n \"out_buffer_failure\": 0,\n \"out_underruns\": 0,\n \"in_giants\": 0,\n \"in_throttles\": 0,\n \"in_frame\": 0,\n \"in_ignored\": 0,\n \"last_clear\": \"1d04h\",\n \"out_interface_resets\": 0,\n \"rate\": {\n \"out_rate_pkts\": 0,\n \"load_interval\": 300,\n \"out_rate\": 0,\n \"in_rate\": 0,\n \"in_rate_pkts\": 0\n },\n \"in_pkts\": 50790,\n \"in_multicast_pkts\": 0,\n \"in_runts\": 0,\n \"in_no_buffer\": 0,\n \"out_buffers_swapped\": 0,\n \"out_errors\": 0,\n \"in_errors\": 0,\n \"in_octets\": 3657594,\n \"in_crc_errors\": 0,\n \"out_unknown_protocl_drops\": 0,\n \"in_broadcast_pkts\": 0,\n \"out_pkts\": 72,\n \"out_octets\": 5526,\n \"in_overrun\": 0\n },\n \"phys_address\": \"0057.d228.1a51\",\n \"queues\": {\n \"input_queue_size\": 0,\n \"total_output_drop\": 0,\n \"input_queue_drops\": 0,\n \"input_queue_max\": 375,\n \"output_queue_size\": 0,\n \"input_queue_flushes\": 0,\n \"output_queue_max\": 40,\n \"queue_strategy\": \"fifo\"\n },\n \"txload\": \"1/255\",\n \"reliability\": \"255/255\",\n \"arp_type\": \"arpa\",\n \"rxload\": \"1/255\",\n \"output_hang\": \"never\",\n \"ipv4\": {\n \"192.168.234.1/24\": {\n \"prefix_length\": \"24\",\n \"ip\": \"192.168.234.1\"\n }\n },\n \"encapsulations\": {\n \"encapsulation\": \"arpa\"\n },\n \"last_output\": \"1d03h\",\n \"line_protocol\": \"up\",\n \"mac_address\": \"0057.d228.1a51\",\n \"oper_status\": \"up\",\n \"port_channel\": {\n \"port_channel_member\": False\n },\n \"arp_timeout\": \"04:00:00\",\n \"bandwidth\": 1000000,\n \"enabled\": True,\n \"mtu\": 1500,\n \"delay\": 10,\n \"last_input\": \"never\"\n },\n \"GigabitEthernet1/0/2\": {\n \"flow_control\": {\n \"send\": False,\n \"receive\": False\n },\n \"type\": \"Gigabit Ethernet\",\n \"counters\": {\n \"out_buffer_failure\": 0,\n \"out_underruns\": 0,\n \"in_giants\": 0,\n \"in_throttles\": 0,\n \"in_frame\": 0,\n \"in_ignored\": 0,\n \"last_clear\": \"1d02h\",\n \"out_interface_resets\": 5,\n \"in_mac_pause_frames\": 0,\n \"out_collision\": 0,\n \"rate\": {\n \"out_rate_pkts\": 0,\n \"load_interval\": 300,\n \"out_rate\": 0,\n \"in_rate\": 3000,\n \"in_rate_pkts\": 5\n },\n \"in_watchdog\": 0,\n \"out_deferred\": 0,\n \"out_mac_pause_frames\": 0,\n \"in_pkts\": 545526,\n \"in_multicast_pkts\": 535961,\n \"in_runts\": 0,\n \"out_unknown_protocl_drops\": 0,\n \"in_no_buffer\": 0,\n \"out_buffers_swapped\": 0,\n \"out_lost_carrier\": 0,\n \"out_errors\": 0,\n \"in_errors\": 0,\n \"in_octets\": 41210298,\n \"in_crc_errors\": 0,\n \"out_no_carrier\": 0,\n \"in_with_dribble\": 0,\n \"in_broadcast_pkts\": 535961,\n \"out_pkts\": 23376,\n \"out_late_collision\": 0,\n \"out_octets\": 3642296,\n \"in_overrun\": 0,\n \"out_babble\": 0\n },\n \"phys_address\": \"0057.d228.1a02\",\n \"keepalive\": 10,\n \"output_hang\": \"never\",\n \"txload\": \"1/255\",\n \"oper_status\": \"up\",\n \"arp_type\": \"arpa\",\n \"media_type\": \"10/100/1000BaseTX\",\n \"rxload\": \"1/255\",\n \"duplex_mode\": \"full\",\n \"queues\": {\n \"input_queue_size\": 0,\n \"total_output_drop\": 0,\n \"input_queue_drops\": 0,\n \"input_queue_max\": 2000,\n \"output_queue_size\": 0,\n \"input_queue_flushes\": 0,\n \"output_queue_max\": 40,\n \"queue_strategy\": \"fifo\"\n },\n \"encapsulations\": {\n \"encapsulation\": \"arpa\"\n },\n \"last_input\": \"never\",\n \"last_output\": \"00:00:02\",\n \"line_protocol\": \"up\",\n \"mac_address\": \"0057.d228.1a02\",\n \"connected\": True,\n \"port_channel\": {\n \"port_channel_member\": False\n },\n \"arp_timeout\": \"04:00:00\",\n \"bandwidth\": 1000000,\n \"port_speed\": \"1000\",\n \"enabled\": True,\n \"mtu\": 1500,\n \"delay\": 10,\n \"reliability\": \"255/255\"\n }\n }\n\n ShowVrf = {\n 'vrf': {\n 'Mgmt-intf': {\n 'protocols': ['ipv4', 'ipv6'],\n 'interfaces': ['GigabitEthernet1/0/2'],\n },\n 'VRF1': {\n 'route_distinguisher': '65000:1',\n 'protocols': ['ipv4', 'ipv6'],\n 'interfaces': ['GigabitEthernet1/0/1'],\n }\n }\n }\n ShowInterfacesSwitchport = {\n \"GigabitEthernet1/0/1\": {\n \"switchport_mode\": \"trunk\",\n \"pruning_vlans\": \"2-1001\",\n 'operational_mode': 'trunk',\n \"switchport_enable\": True,\n \"trunk_vlans\": \"200-211\",\n \"capture_mode\": False,\n \"private_vlan\": {\n \"native_vlan_tagging\": True,\n \"encapsulation\": \"dot1q\"\n },\n \"access_vlan\": \"1\",\n \"unknown_unicast_blocked\": False,\n \"native_vlan_tagging\": True,\n \"unknown_multicast_blocked\": False,\n \"protected\": False,\n \"negotiation_of_trunk\": True,\n \"capture_vlans\": \"all\",\n \"encapsulation\": {\n \"operational_encapsulation\": \"dot1q\",\n \"native_vlan\": \"1\",\n \"administrative_encapsulation\": \"dot1q\"\n }\n },\n \"GigabitEthernet1/0/2\": {\n \"pruning_vlans\": \"2-1001\",\n \"switchport_enable\": True,\n \"unknown_multicast_blocked\": False,\n \"trunk_vlans\": \"100-110\",\n \"port_channel\": {\n \"port_channel_int\": \"Port-channel12\",\n \"port_channel_member\": True\n },\n \"access_vlan\": \"1\",\n \"operational_mode\": \"trunk\",\n \"unknown_unicast_blocked\": False,\n \"capture_mode\": False,\n \"private_vlan\": {\n \"native_vlan_tagging\": True,\n \"encapsulation\": \"dot1q\",\n \"operational\": \"10 (VLAN0010) 100 (VLAN0100)\",\n \"trunk_mappings\": \"10 (VLAN0010) 100 (VLAN0100)\"\n },\n \"encapsulation\": {\n \"operational_encapsulation\": \"dot1q\",\n \"native_vlan\": \"1\",\n \"administrative_encapsulation\": \"dot1q\"\n },\n \"protected\": False,\n \"native_vlan_tagging\": True,\n \"negotiation_of_trunk\": True,\n \"capture_vlans\": \"all\",\n \"switchport_mode\": \"trunk\"\n },\n \"GigabitEthernet1/0/5\": {\n \"switchport_mode\": \"static access\",\n \"pruning_vlans\": \"2-1001\",\n \"switchport_enable\": True,\n \"trunk_vlans\": \"all\",\n 'operational_mode': 'down',\n \"capture_mode\": False,\n \"private_vlan\": {\n \"native_vlan_tagging\": True,\n \"encapsulation\": \"dot1q\"\n },\n \"access_vlan\": \"1\",\n \"unknown_unicast_blocked\": False,\n \"native_vlan_tagging\": True,\n \"unknown_multicast_blocked\": False,\n \"protected\": False,\n \"negotiation_of_trunk\": False,\n \"capture_vlans\": \"all\",\n \"encapsulation\": {\n \"native_vlan\": \"1\",\n \"administrative_encapsulation\": \"dot1q\"\n }\n },\n \"GigabitEthernet1/1/1\": {\n \"switchport_enable\": True,\n \"switchport_mode\": \"dynamic auto\"\n }\n }\n\n ShowIpInterface = {\n \"Vlan100\": {\n \"sevurity_level\": \"default\",\n \"ip_route_cache_flags\": [\n \"CEF\",\n \"Fast\"\n ],\n \"enabled\": True,\n \"oper_status\": \"up\",\n \"address_determined_by\": \"configuration file\",\n \"router_discovery\": False,\n \"ip_multicast_fast_switching\": False,\n \"split_horizon\": True,\n \"bgp_policy_mapping\": False,\n \"ip_output_packet_accounting\": False,\n \"mtu\": 1500,\n \"policy_routing\": False,\n \"local_proxy_arp\": False,\n \"proxy_arp\": True,\n \"network_address_translation\": False,\n \"ip_cef_switching_turbo_vector\": True,\n \"icmp\": {\n \"redirects\": \"always sent\",\n \"mask_replies\": \"never sent\",\n \"unreachables\": \"always sent\",\n },\n \"ipv4\": {\n \"192.168.76.1/24\": {\n \"prefix_length\": \"24\",\n \"ip\": \"192.168.76.1\",\n \"secondary\": False,\n \"broadcase_address\": \"255.255.255.255\"\n }\n },\n \"ip_access_violation_accounting\": False,\n \"ip_cef_switching\": True,\n \"unicast_routing_topologies\": {\n \"topology\": {\n \"base\": {\n \"status\": \"up\"\n }\n },\n },\n \"ip_null_turbo_vector\": True,\n \"probe_proxy_name_replies\": False,\n \"ip_fast_switching\": True,\n \"ip_multicast_distributed_fast_switching\": False,\n \"tcp_ip_header_compression\": False,\n \"rtp_ip_header_compression\": False,\n \"input_features\": [\"MCI Check\"],\n \"directed_broadcast_forwarding\": False,\n \"ip_flow_switching\": False\n },\n \"GigabitEthernet0/0\": {\n \"sevurity_level\": \"default\",\n 'address_determined_by': 'setup command',\n \"ip_route_cache_flags\": [\n \"CEF\",\n \"Fast\"\n ],\n \"enabled\": True,\n \"oper_status\": \"up\",\n \"router_discovery\": False,\n \"ip_multicast_fast_switching\": False,\n \"split_horizon\": True,\n \"bgp_policy_mapping\": False,\n \"ip_output_packet_accounting\": False,\n \"mtu\": 1500,\n \"policy_routing\": False,\n \"local_proxy_arp\": False,\n \"vrf\": \"Mgmt-vrf\",\n \"proxy_arp\": True,\n \"network_address_translation\": False,\n \"ip_cef_switching_turbo_vector\": True,\n \"icmp\": {\n \"redirects\": \"always sent\",\n \"mask_replies\": \"never sent\",\n \"unreachables\": \"always sent\",\n },\n \"ipv4\": {\n \"10.1.8.134/24\": {\n \"prefix_length\": \"24\",\n \"ip\": \"10.1.8.134\",\n \"secondary\": False,\n \"broadcase_address\": \"255.255.255.255\"\n }\n },\n \"ip_access_violation_accounting\": False,\n \"ip_cef_switching\": True,\n \"unicast_routing_topologies\": {\n \"topology\": {\n \"base\": {\n \"status\": \"up\"\n }\n },\n },\n \"ip_null_turbo_vector\": True,\n \"probe_proxy_name_replies\": False,\n \"ip_fast_switching\": True,\n \"ip_multicast_distributed_fast_switching\": False,\n \"tcp_ip_header_compression\": False,\n \"rtp_ip_header_compression\": False,\n \"input_features\": [\"MCI Check\"],\n \"directed_broadcast_forwarding\": False,\n \"ip_flow_switching\": False\n },\n \"GigabitEthernet2\": {\n \"enabled\": False,\n \"oper_status\": \"down\"\n },\n \"GigabitEthernet1/0/1\": {\n \"sevurity_level\": \"default\",\n 'address_determined_by': 'setup command',\n \"ip_route_cache_flags\": [\n \"CEF\",\n \"Fast\"\n ],\n \"enabled\": False,\n \"oper_status\": \"down\",\n \"router_discovery\": False,\n \"ip_multicast_fast_switching\": False,\n \"split_horizon\": True,\n \"bgp_policy_mapping\": False,\n \"ip_output_packet_accounting\": False,\n \"mtu\": 1500,\n \"policy_routing\": False,\n \"local_proxy_arp\": False,\n \"proxy_arp\": True,\n \"network_address_translation\": False,\n \"ip_cef_switching_turbo_vector\": True,\n \"icmp\": {\n \"redirects\": \"always sent\",\n \"mask_replies\": \"never sent\",\n \"unreachables\": \"always sent\",\n },\n \"ipv4\": {\n \"10.1.1.1/24\": {\n \"prefix_length\": \"24\",\n \"ip\": \"10.1.1.1\",\n \"secondary\": False,\n \"broadcase_address\": \"255.255.255.255\"\n },\n \"10.2.2.2/24\": {\n \"prefix_length\": \"24\",\n \"ip\": \"10.2.2.2\",\n \"secondary\": True\n },\n },\n \"ip_access_violation_accounting\": False,\n \"ip_cef_switching\": True,\n \"unicast_routing_topologies\": {\n \"topology\": {\n \"base\": {\n \"status\": \"up\"\n }\n },\n },\n 'wccp': {\n 'redirect_outbound': False,\n 'redirect_inbound': False,\n 'redirect_exclude': False,\n },\n \"ip_null_turbo_vector\": True,\n \"probe_proxy_name_replies\": False,\n \"ip_fast_switching\": True,\n \"ip_multicast_distributed_fast_switching\": False,\n \"tcp_ip_header_compression\": False,\n \"rtp_ip_header_compression\": False,\n \"directed_broadcast_forwarding\": False,\n \"ip_flow_switching\": False,\n \"input_features\": [\"MCI Check\", \"QoS Classification\", \"QoS Marking\"],\n }\n }\n\n ShowIpv6Interface = {\n \"GigabitEthernet1/0/1\": {\n \"joined_group_addresses\": [\n \"fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b\"\n ],\n \"ipv6\": {\n \"2001:DB8:2:2::2/64\": {\n \"ip\": \"2001:DB8:2:2::2\",\n \"prefix_length\": \"64\",\n \"status\": \"tentative\"\n },\n \"2001:db8:400::1/126\": {\n \"ip\": \"2001:db8:400::1\",\n \"prefix_length\": \"126\",\n \"status\": \"tentative\"\n },\n \"2001:DB8:1:1::1/64\": {\n \"ip\": \"2001:DB8:1:1::1\",\n \"prefix_length\": \"64\",\n \"status\": \"tentative\"\n },\n \"2001:DB8:4:4:257:D2FF:FE28:1A64/64\": {\n \"ip\": \"2001:DB8:4:4:257:D2FF:FE28:1A64\",\n \"prefix_length\": \"64\",\n \"status\": \"tentative\",\n \"eui_64\": True\n },\n \"2001:DB8:3:3::3/64\": {\n \"ip\": \"2001:DB8:3:3::3\",\n \"prefix_length\": \"64\",\n \"status\": \"tentative\",\n \"anycast\": True\n },\n \"FE80::257:D2FF:FE28:1A64\": {\n \"ip\": \"FE80::257:D2FF:FE28:1A64\",\n \"status\": \"tentative\",\n \"origin\": \"link_layer\",\n },\n \"enabled\": True,\n \"nd\": {\n \"dad_attempts\": 1,\n \"ns_retransmit_interval\": 1000,\n \"dad_enabled\": True,\n \"reachable_time\": 30000,\n \"using_time\": 30000\n },\n \"icmp\": {\n \"error_messages_limited\": 100,\n \"redirects\": True,\n \"unreachables\": \"sent\"\n },\n },\n \"oper_status\": \"down\",\n \"enabled\": False,\n \"mtu\": 1500\n },\n \"Vlan211\": {\n \"joined_group_addresses\": [\n \"fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b\",\n \"fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b:FF14:1\",\n \"fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b:FF28:1A71\"\n ],\n \"ipv6\": {\n \"2001:10::14:1/112\": {\n \"ip\": \"2001:10::14:1\",\n \"prefix_length\": \"112\",\n \"status\": \"valid\",\n 'autoconf': {\n 'preferred_lifetime': 604711,\n 'valid_lifetime': 2591911,\n },\n },\n \"FE80::257:D2FF:FE28:1A71\": {\n \"ip\": \"FE80::257:D2FF:FE28:1A71\",\n \"status\": \"valid\",\n \"origin\": \"link_layer\",\n },\n \"enabled\": True,\n \"nd\": {\n \"dad_attempts\": 1,\n \"ns_retransmit_interval\": 1000,\n \"dad_enabled\": True,\n \"reachable_time\": 30000,\n \"using_time\": 30000\n },\n \"icmp\": {\n \"error_messages_limited\": 100,\n \"redirects\": True,\n \"unreachables\": \"sent\"\n },\n },\n \"oper_status\": \"up\",\n \"enabled\": True,\n \"autoconf\": True,\n \"mtu\": 1500\n },\n \"GigabitEthernet3\": {\n \"enabled\": True,\n \"joined_group_addresses\": [\n \"fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b\",\n \"fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b:FF1E:4F2\",\n \"fc00:db20:35b:7399::5\"\n ],\n \"ipv6\": {\n \"enabled\": False,\n \"FE80::5054:FF:FE1E:4F2\": {\n \"ip\": \"FE80::5054:FF:FE1E:4F2\",\n \"status\": \"valid\",\n \"origin\": \"link_layer\",\n },\n \"unnumbered\": {\n \"interface_ref\": \"Loopback0\",\n },\n \"nd\": {\n \"dad_attempts\": 1,\n \"reachable_time\": 30000,\n \"using_time\": 30000,\n \"dad_enabled\": True\n },\n \"icmp\": {\n \"unreachables\": \"sent\",\n \"redirects\": True,\n \"error_messages_limited\": 100\n },\n \"nd\": {\n \"dad_attempts\": 1,\n \"dad_enabled\": True,\n \"reachable_time\": 30000,\n \"using_time\": 30000,\n \"advertised_reachable_time\": 0,\n \"advertised_retransmit_interval\": 0,\n \"router_advertisements_interval\": 200,\n \"router_advertisements_live\": 1800,\n \"advertised_default_router_preference\": 'Medium',\n \"advertised_reachable_time_unspecified\": False,\n \"advertised_retransmit_interval_unspecified\": False,\n },\n },\n \"oper_status\": \"up\",\n \"mtu\": 1500,\n \"addresses_config_method\": 'stateless autoconfig',\n }\n }\n\n ShowVrfDetail = {\n \"Mgmt-vrf\": {\n \"vrf_id\": 1,\n \"interfaces\": [\n \"GigabitEthernet0/0\"\n ],\n \"interface\": {\n \"GigabitEthernet0/0\": {'vrf': 'Mgmt-vrf'}\n },\n \"address_family\": {\n \"ipv4 unicast\": {\n \"table_id\": \"0x1\"\n },\n \"ipv6 unicast\": {\n \"table_id\": \"0x1E000001\"\n }\n },\n \"flags\": \"0x0\"\n },\n \"VRF1\": {\n \"interfaces\": [\n \"GigabitEthernet1/0/2\"\n ],\n \"interface\": {\n \"GigabitEthernet1/0/2\": {'vrf': 'VRF1'}\n },\n \"address_family\": {\n \"ipv4 unicast\": {\n \"export_to_global\": {\n \"export_to_global_map\": \"export_to_global_map\",\n \"prefix_limit\": 1000\n },\n \"import_from_global\": {\n \"prefix_limit\": 1000,\n \"import_from_global_map\": \"import_from_global_map\"\n },\n \"table_id\": \"0x1\",\n \"routing_table_limit\": {\n \"routing_table_limit_action\": {\n \"enable_alert_percent\": {\n \"alert_percent_value\": 10000\n }\n }\n },\n \"route_target\": {\n \"200:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:1\"\n },\n \"100:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"100:1\"\n }\n }\n },\n \"ipv6 unicast\": {\n \"export_to_global\": {\n \"export_to_global_map\": \"export_to_global_map\",\n \"prefix_limit\": 1000\n },\n \"table_id\": \"0x1E000001\",\n \"routing_table_limit\": {\n \"routing_table_limit_action\": {\n \"enable_alert_percent\": {\n \"alert_percent_value\": 7000\n }\n },\n \"routing_table_limit_number\": 10000\n },\n \"route_target\": {\n \"200:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"200:1\"\n },\n \"400:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"400:1\"\n },\n \"300:1\": {\n \"rt_type\": \"export\",\n \"route_target\": \"300:1\"\n },\n \"100:1\": {\n \"rt_type\": \"export\",\n \"route_target\": \"100:1\"\n }\n }\n }\n },\n \"flags\": \"0x100\",\n \"route_distinguisher\": \"100:1\",\n \"vrf_id\": 1\n }\n }\n\n ShowInterfacesAccounting = {\n 'GigabitEthernet1/0/1': {\n \"accounting\": {\n \"arp\": {\n \"chars_in\": 4590030,\n \"chars_out\": 120,\n \"pkts_in\": 109280,\n \"pkts_out\": 2\n },\n \"ip\": {\n \"chars_in\": 2173570,\n \"chars_out\": 2167858,\n \"pkts_in\": 22150,\n \"pkts_out\": 22121\n },\n \"ipv6\": {\n \"chars_in\": 1944,\n \"chars_out\": 0,\n \"pkts_in\": 24,\n \"pkts_out\": 0\n },\n \"other\": {\n \"chars_in\": 5306164,\n \"chars_out\": 120,\n \"pkts_in\": 112674,\n \"pkts_out\": 2\n }\n }\n }\n }\n ShowInterfaces_gi1 = '''\n GigabitEthernet1/0/1 is administratively down, line protocol is down (disabled) \n Hardware is Gigabit Ethernet, address is 0057.d228.1a64 (bia 0057.d228.1a64)\n Description: desc\n Internet address is 10.1.1.1/24\n MTU 1500 bytes, BW 768 Kbit/sec, DLY 3330 usec, \n reliability 255/255, txload 1/255, rxload 1/255\n Encapsulation ARPA, loopback not set\n Keepalive set (10 sec)\n Auto-duplex, 1000Mb/s, media type is 10/100/1000BaseTX\n input flow-control is off, output flow-control is unsupported \n ARP type: ARPA, ARP Timeout 04:00:00\n Last input never, output 04:39:18, output hang never\n Last clearing of \"show interface\" counters 1d02h\n Input queue: 0/375/0/0 (size/max/drops/flushes); Total output drops: 0\n Queueing strategy: fifo\n Output queue: 0/40 (size/max)\n 30 second input rate 0 bits/sec, 0 packets/sec\n 30 second output rate 0 bits/sec, 0 packets/sec\n 12127 packets input, 2297417 bytes, 0 no buffer\n Received 4173 broadcasts (0 IP multicasts)\n 0 runts, 0 giants, 0 throttles \n 0 input errors, 0 CRC, 0 frame, 0 overrun, 0 ignored\n 0 watchdog, 4171 multicast, 0 pause input\n 0 input packets with dribble condition detected\n 12229 packets output, 2321107 bytes, 0 underruns\n 0 output errors, 0 collisions, 2 interface resets\n 0 unknown protocol drops\n 0 babbles, 0 late collision, 0 deferred\n 0 lost carrier, 0 no carrier, 0 pause output\n 0 output buffer failures, 0 output buffers swapped out\n '''\n\n ShowInterfacesAccounting_gi1 = '''\n GigabitEthernet1/0/1 \n Protocol Pkts In Chars In Pkts Out Chars Out\n Other 112674 5306164 2 120\n IP 22150 2173570 22121 2167858\n ARP 109280 4590030 2 120\n IPv6 24 1944 0 0\n '''\n ShowIpInterfaces_gi1 = '''\n GigabitEthernet1/0/1 is administratively down, line protocol is down\n Internet address is 10.1.1.1/24\n Broadcast address is 255.255.255.255\n Address determined by setup command\n MTU is 1500 bytes\n Helper address is not set\n Directed broadcast forwarding is disabled\n Secondary address 10.2.2.2/24\n Outgoing Common access list is not set \n Outgoing access list is not set\n Inbound Common access list is not set \n Inbound access list is not set\n Proxy ARP is enabled\n Local Proxy ARP is disabled\n Security level is default\n Split horizon is enabled\n ICMP redirects are always sent\n ICMP unreachables are always sent\n ICMP mask replies are never sent\n IP fast switching is enabled\n IP Flow switching is disabled\n IP CEF switching is enabled\n IP CEF switching turbo vector\n IP Null turbo vector\n Associated unicast routing topologies:\n Topology \"base\", operation state is UP\n IP multicast fast switching is disabled\n IP multicast distributed fast switching is disabled\n IP route-cache flags are Fast, CEF\n Router Discovery is disabled\n IP output packet accounting is disabled\n IP access violation accounting is disabled\n TCP/IP header compression is disabled\n RTP/IP header compression is disabled\n Probe proxy name replies are disabled\n Policy routing is disabled\n Network address translation is disabled\n BGP Policy Mapping is disabled\n Input features: QoS Classification, QoS Marking, MCI Check\n IPv4 WCCP Redirect outbound is disabled\n IPv4 WCCP Redirect inbound is disabled\n IPv4 WCCP Redirect exclude is disabled\n '''\n ShowIpv6Interfaces_gi1 = '''\n GigabitEthernet1/0/1 is administratively down, line protocol is down\n IPv6 is tentative, link-local address is FE80::257:D2FF:FE28:1A64 [TEN]\n No Virtual link-local address(es):\n Description: desc\n Global unicast address(es):\n 2001:db8:400::1, subnet is 2001:db8:400::/126 [TEN]\n 2001:DB8:1:1::1, subnet is 2001:DB8:1:1::/64 [TEN]\n 2001:DB8:2:2::2, subnet is 2001:DB8:2:2::/64 [TEN]\n 2001:DB8:3:3::3, subnet is 2001:DB8:3:3::/64 [ANY/TEN]\n 2001:DB8:4:4:257:D2FF:FE28:1A64, subnet is 2001:DB8:4:4::/64 [EUI/TEN]\n Joined group address(es):\n fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b\n MTU is 1500 bytes\n ICMP error messages limited to one every 100 milliseconds\n ICMP redirects are enabled\n ICMP unreachables are sent\n ND DAD is enabled, number of DAD attempts: 1\n ND reachable time is 30000 milliseconds (using 30000)\n ND NS retransmit interval is 1000 milliseconds\n '''\n ShowInterfacesSwitchport_gi1 ='''\n Name: Gi1/0/1\n Switchport: Enabled\n Administrative Mode: trunk\n Operational Mode: trunk\n Administrative Trunking Encapsulation: dot1q\n Operational Trunking Encapsulation: dot1q\n Negotiation of Trunking: On\n Access Mode VLAN: 1 (default)\n Trunking Native Mode VLAN: 1 (default)\n Administrative Native VLAN tagging: enabled\n Voice VLAN: none\n Administrative private-vlan host-association: none \n Administrative private-vlan mapping: none \n Administrative private-vlan trunk native VLAN: none\n Administrative private-vlan trunk Native VLAN tagging: enabled\n Administrative private-vlan trunk encapsulation: dot1q\n Administrative private-vlan trunk normal VLANs: none\n Administrative private-vlan trunk associations: none\n Administrative private-vlan trunk mappings: none\n Operational private-vlan: none\n Trunking VLANs Enabled: 200-211\n Pruning VLANs Enabled: 2-1001\n Capture Mode Disabled\n Capture VLANs Allowed: ALL\n '''\n ShowVrf_all = '''\n Name Default RD Protocols Interfaces\n Mgmt-intf <not set> ipv4,ipv6 GigabitEthernet1/0/2\n VRF1 65000:1 ipv4,ipv6 GigabitEthernet1/0/1\n\n '''\n ShowInterfaces_all = '''\n GigabitEthernet1/0/1 is administratively down, line protocol is down (disabled) \n Hardware is Gigabit Ethernet, address is 0057.d228.1a64 (bia 0057.d228.1a64)\n Description: desc\n Internet address is 10.1.1.1/24\n MTU 1500 bytes, BW 768 Kbit/sec, DLY 3330 usec, \n reliability 255/255, txload 1/255, rxload 1/255\n Encapsulation ARPA, loopback not set\n Keepalive set (10 sec)\n Auto-duplex, 1000Mb/s, media type is 10/100/1000BaseTX\n input flow-control is off, output flow-control is unsupported \n ARP type: ARPA, ARP Timeout 04:00:00\n Last input never, output 04:39:18, output hang never\n Last clearing of \"show interface\" counters 1d02h\n Input queue: 0/375/0/0 (size/max/drops/flushes); Total output drops: 0\n Queueing strategy: fifo\n Output queue: 0/40 (size/max)\n 30 second input rate 0 bits/sec, 0 packets/sec\n 30 second output rate 0 bits/sec, 0 packets/sec\n 12127 packets input, 2297417 bytes, 0 no buffer\n Received 4173 broadcasts (0 IP multicasts)\n 0 runts, 0 giants, 0 throttles \n 0 input errors, 0 CRC, 0 frame, 0 overrun, 0 ignored\n 0 watchdog, 4171 multicast, 0 pause input\n 0 input packets with dribble condition detected\n 12229 packets output, 2321107 bytes, 0 underruns\n 0 output errors, 0 collisions, 2 interface resets\n 0 unknown protocol drops\n 0 babbles, 0 late collision, 0 deferred\n 0 lost carrier, 0 no carrier, 0 pause output\n 0 output buffer failures, 0 output buffers swapped out\n GigabitEthernet1/0/2 is up, line protocol is up (connected) \n Hardware is Gigabit Ethernet, address is 0057.d228.1a02 (bia 0057.d228.1a02)\n MTU 1500 bytes, BW 1000000 Kbit/sec, DLY 10 usec, \n reliability 255/255, txload 1/255, rxload 1/255\n Encapsulation ARPA, loopback not set\n Keepalive set (10 sec)\n Full-duplex, 1000Mb/s, media type is 10/100/1000BaseTX\n input flow-control is off, output flow-control is unsupported \n ARP type: ARPA, ARP Timeout 04:00:00\n Last input never, output 00:00:02, output hang never\n Last clearing of \"show interface\" counters 1d02h\n Input queue: 0/2000/0/0 (size/max/drops/flushes); Total output drops: 0\n Queueing strategy: fifo\n Output queue: 0/40 (size/max)\n 5 minute input rate 3000 bits/sec, 5 packets/sec\n 5 minute output rate 0 bits/sec, 0 packets/sec\n 545526 packets input, 41210298 bytes, 0 no buffer\n Received 535996 broadcasts (535961 multicasts)\n 0 runts, 0 giants, 0 throttles \n 0 input errors, 0 CRC, 0 frame, 0 overrun, 0 ignored\n 0 watchdog, 535961 multicast, 0 pause input\n 0 input packets with dribble condition detected\n 23376 packets output, 3642296 bytes, 0 underruns\n 0 output errors, 0 collisions, 5 interface resets\n 0 unknown protocol drops\n 0 babbles, 0 late collision, 0 deferred\n 0 lost carrier, 0 no carrier, 0 pause output\n 0 output buffer failures, 0 output buffers swapped out\n '''\n ShowIpInterfaces_all = '''\n GigabitEthernet1/0/1 is administratively down, line protocol is down\n Internet address is 10.1.1.1/24\n Broadcast address is 255.255.255.255\n Address determined by setup command\n MTU is 1500 bytes\n Helper address is not set\n Directed broadcast forwarding is disabled\n Secondary address 10.2.2.2/24\n Outgoing Common access list is not set \n Outgoing access list is not set\n Inbound Common access list is not set \n Inbound access list is not set\n Proxy ARP is enabled\n Local Proxy ARP is disabled\n Security level is default\n Split horizon is enabled\n ICMP redirects are always sent\n ICMP unreachables are always sent\n ICMP mask replies are never sent\n IP fast switching is enabled\n IP Flow switching is disabled\n IP CEF switching is enabled\n IP CEF switching turbo vector\n IP Null turbo vector\n Associated unicast routing topologies:\n Topology \"base\", operation state is UP\n IP multicast fast switching is disabled\n IP multicast distributed fast switching is disabled\n IP route-cache flags are Fast, CEF\n Router Discovery is disabled\n IP output packet accounting is disabled\n IP access violation accounting is disabled\n TCP/IP header compression is disabled\n RTP/IP header compression is disabled\n Probe proxy name replies are disabled\n Policy routing is disabled\n Network address translation is disabled\n BGP Policy Mapping is disabled\n Input features: QoS Classification, QoS Marking, MCI Check\n IPv4 WCCP Redirect outbound is disabled\n IPv4 WCCP Redirect inbound is disabled\n IPv4 WCCP Redirect exclude is disabled\n\n '''\n ShowIpv6Interfaces_all = '''\n GigabitEthernet1/0/1 is administratively down, line protocol is down\n IPv6 is tentative, link-local address is FE80::257:D2FF:FE28:1A64 [TEN]\n No Virtual link-local address(es):\n Description: desc\n Global unicast address(es):\n 2001:db8:400::1, subnet is 2001:db8:400::/126 [TEN]\n 2001:DB8:1:1::1, subnet is 2001:DB8:1:1::/64 [TEN]\n 2001:DB8:2:2::2, subnet is 2001:DB8:2:2::/64 [TEN]\n 2001:DB8:3:3::3, subnet is 2001:DB8:3:3::/64 [ANY/TEN]\n 2001:DB8:4:4:257:D2FF:FE28:1A64, subnet is 2001:DB8:4:4::/64 [EUI/TEN]\n Joined group address(es):\n fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b\n MTU is 1500 bytes\n ICMP error messages limited to one every 100 milliseconds\n ICMP redirects are enabled\n ICMP unreachables are sent\n ND DAD is enabled, number of DAD attempts: 1\n ND reachable time is 30000 milliseconds (using 30000)\n ND NS retransmit interval is 1000 milliseconds\n '''\n ShowInterfacesAccounting_all = '''\n GigabitEthernet1/0/1\n Protocol Pkts In Chars In Pkts Out Chars Out\n Other 112674 5306164 2 120\n IP 22150 2173570 22121 2167858\n ARP 109280 4590030 2 120\n IPv6 24 1944 0 0\n GigabitEthernet1/0/2 \n Protocol Pkts In Chars In Pkts Out Chars Out\n Other 3483 741524 92 5520\n IP 11745 968690 10821 1148402\n ARP 91 5460 92 5520\n IPv6 1 70 0 0\n\n '''\n ShowInterfacesSwitchport_all ='''\n Name: Gi1/0/1\n Switchport: Enabled\n Administrative Mode: trunk\n Operational Mode: trunk\n Administrative Trunking Encapsulation: dot1q\n Operational Trunking Encapsulation: dot1q\n Negotiation of Trunking: On\n Access Mode VLAN: 1 (default)\n Trunking Native Mode VLAN: 1 (default)\n Administrative Native VLAN tagging: enabled\n Voice VLAN: none\n Administrative private-vlan host-association: none \n Administrative private-vlan mapping: none \n Administrative private-vlan trunk native VLAN: none\n Administrative private-vlan trunk Native VLAN tagging: enabled\n Administrative private-vlan trunk encapsulation: dot1q\n Administrative private-vlan trunk normal VLANs: none\n Administrative private-vlan trunk associations: none\n Administrative private-vlan trunk mappings: none\n Operational private-vlan: none\n Trunking VLANs Enabled: 200-211\n Pruning VLANs Enabled: 2-1001\n Capture Mode Disabled\n Capture VLANs Allowed: ALL\n Name: Gi1/0/2\n Switchport: Enabled\n Administrative Mode: trunk\n Operational Mode: trunk \n Administrative Trunking Encapsulation: dot1q\n Operational Trunking Encapsulation: dot1q\n Negotiation of Trunking: On\n Access Mode VLAN: 1 (default)\n Trunking Native Mode VLAN: 1 (default)\n Administrative Native VLAN tagging: enabled\n Voice VLAN: none\n Administrative private-vlan host-association: none \n Administrative private-vlan mapping: none \n Administrative private-vlan trunk native VLAN: none\n Administrative private-vlan trunk Native VLAN tagging: enabled\n Administrative private-vlan trunk encapsulation: dot1q\n Administrative private-vlan trunk normal VLANs: none\n Administrative private-vlan trunk associations: none\n Administrative private-vlan trunk mappings:\n 10 (VLAN0010) 100 (VLAN0100)\n Operational private-vlan:\n 10 (VLAN0010) 100 (VLAN0100)\n Trunking VLANs Enabled: 100-110\n Pruning VLANs Enabled: 2-1001\n Capture Mode Disabled\n Capture VLANs Allowed: ALL\n\n Protected: false\n Unknown unicast blocked: disabled\n Unknown multicast blocked: disabled\n Appliance trust: none\n '''\n ShowInterfacesAccountingCustom = {\n \"GigabitEthernet1/0/1\": {\n \"accounting\": {\n \"arp\": {\n \"chars_in\": 4590030,\n \"chars_out\": 120,\n \"pkts_in\": 109280,\n \"pkts_out\": 2\n },\n \"ip\": {\n \"chars_in\": 2173570,\n \"chars_out\": 2167858,\n \"pkts_in\": 22150,\n \"pkts_out\": 22121\n },\n \"ipv6\": {\n \"chars_in\": 1944,\n \"chars_out\": 0,\n \"pkts_in\": 24,\n \"pkts_out\": 0\n },\n \"other\": {\n \"chars_in\": 5306164,\n \"chars_out\": 120,\n \"pkts_in\": 112674,\n \"pkts_out\": 2\n }\n }\n }\n }\n\n interfaceOpsOutput_custom_info = {\n \"GigabitEthernet1/0/1\": {\n \"accounting\": {\n \"arp\": {\n \"chars_in\": 4590030,\n \"chars_out\": 120,\n \"pkts_in\": 109280,\n \"pkts_out\": 2\n },\n \"ip\": {\n \"chars_in\": 2173570,\n \"chars_out\": 2167858,\n \"pkts_in\": 22150,\n \"pkts_out\": 22121\n },\n \"ipv6\": {\n \"chars_in\": 1944,\n \"chars_out\": 0,\n \"pkts_in\": 24,\n \"pkts_out\": 0\n },\n \"other\": {\n \"chars_in\": 5306164,\n \"chars_out\": 120,\n \"pkts_in\": 112674,\n \"pkts_out\": 2\n }\n },\n \"type\": \"Gigabit Ethernet\",\n \"oper_status\": \"down\",\n 'access_vlan': '1',\n 'vrf': 'VRF1',\n \"ipv4\": {\n \"10.2.2.2/24\": {\n \"secondary\": True,\n \"ip\": \"10.2.2.2\",\n \"prefix_length\": \"24\"\n },\n \"10.1.1.1/24\": {\n \"secondary\": False,\n \"ip\": \"10.1.1.1\",\n \"prefix_length\": \"24\"\n }\n },\n \"mac_address\": \"0057.d228.1a64\",\n \"duplex_mode\": \"auto\",\n \"port_speed\": \"1000\",\n \"delay\": 3330,\n \"phys_address\": \"0057.d228.1a64\",\n \"port_channel\": {\n \"port_channel_member\": False\n },\n \"encapsulation\": {\n \"encapsulation\": \"arpa\",\n 'native_vlan': '1'\n },\n \"mtu\": 1500,\n \"description\": \"desc\",\n \"flow_control\": {\n \"receive\": False,\n \"send\": False\n },\n \"enabled\": False,\n \"counters\": {\n \"last_clear\": \"1d02h\",\n \"in_pkts\": 12127,\n \"out_errors\": 0,\n \"in_octets\": 2297417,\n \"out_octets\": 2321107,\n \"in_broadcast_pkts\": 0,\n \"rate\": {\n \"out_rate_pkts\": 0,\n \"out_rate\": 0,\n \"load_interval\": 30,\n \"in_rate\": 0,\n \"in_rate_pkts\": 0\n },\n \"out_pkts\": 12229,\n \"in_multicast_pkts\": 4171,\n \"in_crc_errors\": 0,\n \"in_mac_pause_frames\": 0,\n \"in_errors\": 0,\n \"out_mac_pause_frames\": 0\n },\n \"bandwidth\": 768,\n \"switchport_enable\": True,\n 'switchport_mode': 'trunk',\n 'trunk_vlans': '200-211',\n 'vlan_id': '1'\n },\n\n }\n InterfaceOpsOutput_info = {\n \"GigabitEthernet1/0/1\": {\n \"accounting\": {\n \"arp\": {\n \"chars_in\": 4590030,\n \"chars_out\": 120,\n \"pkts_in\": 109280,\n \"pkts_out\": 2\n },\n \"ip\": {\n \"chars_in\": 2173570,\n \"chars_out\": 2167858,\n \"pkts_in\": 22150,\n \"pkts_out\": 22121\n },\n \"ipv6\": {\n \"chars_in\": 1944,\n \"chars_out\": 0,\n \"pkts_in\": 24,\n \"pkts_out\": 0\n },\n \"other\": {\n \"chars_in\": 5306164,\n \"chars_out\": 120,\n \"pkts_in\": 112674,\n \"pkts_out\": 2\n }\n },\n \"type\": \"Gigabit Ethernet\",\n \"oper_status\": \"down\",\n \"ipv4\": {\n \"10.2.2.2/24\": {\n \"secondary\": True,\n \"ip\": \"10.2.2.2\",\n \"prefix_length\": \"24\"\n },\n \"10.1.1.1/24\": {\n \"secondary\": False,\n \"ip\": \"10.1.1.1\",\n \"prefix_length\": \"24\"\n }\n },\n 'vrf': 'VRF1',\n 'access_vlan': '1',\n \"mac_address\": \"0057.d228.1a64\",\n \"duplex_mode\": \"auto\",\n \"port_speed\": \"1000\",\n \"delay\": 3330,\n \"phys_address\": \"0057.d228.1a64\",\n \"port_channel\": {\n \"port_channel_member\": False\n },\n \"encapsulation\": {\n \"encapsulation\": \"arpa\",\n 'native_vlan': '1'\n },\n \"mtu\": 1500,\n \"description\": \"desc\",\n \"flow_control\": {\n \"receive\": False,\n \"send\": False\n },\n \"enabled\": False,\n \"counters\": {\n \"last_clear\": \"1d02h\",\n \"in_pkts\": 12127,\n \"out_errors\": 0,\n \"in_octets\": 2297417,\n \"out_octets\": 2321107,\n \"in_broadcast_pkts\": 0,\n \"rate\": {\n \"out_rate_pkts\": 0,\n \"out_rate\": 0,\n \"load_interval\": 30,\n \"in_rate\": 0,\n \"in_rate_pkts\": 0\n },\n \"out_pkts\": 12229,\n \"in_multicast_pkts\": 4171,\n \"in_crc_errors\": 0,\n \"in_mac_pause_frames\": 0,\n \"in_errors\": 0,\n \"out_mac_pause_frames\": 0\n },\n \"bandwidth\": 768,\n \"ipv6\": {\n \"FE80::257:D2FF:FE28:1A64\": {\n \"origin\": \"link_layer\",\n \"ip\": \"FE80::257:D2FF:FE28:1A64\",\n \"status\": \"tentative\"\n },\n \"2001:DB8:4:4:257:D2FF:FE28:1A64/64\": {\n \"ip\": \"2001:DB8:4:4:257:D2FF:FE28:1A64\",\n \"eui_64\": True,\n \"prefix_length\": \"64\",\n \"status\": \"tentative\"\n },\n \"2001:db8:400::1/126\": {\n \"ip\": \"2001:db8:400::1\",\n \"prefix_length\": \"126\",\n \"status\": \"tentative\"\n },\n \"2001:DB8:1:1::1/64\": {\n \"ip\": \"2001:DB8:1:1::1\",\n \"prefix_length\": \"64\",\n \"status\": \"tentative\"\n },\n \"2001:DB8:2:2::2/64\": {\n \"ip\": \"2001:DB8:2:2::2\",\n \"prefix_length\": \"64\",\n \"status\": \"tentative\"\n },\n \"2001:DB8:3:3::3/64\": {\n \"ip\": \"2001:DB8:3:3::3\",\n \"anycast\": True,\n \"prefix_length\": \"64\",\n \"status\": \"tentative\"\n }\n },\n \"switchport_enable\": True,\n 'switchport_mode': 'trunk',\n 'trunk_vlans': '200-211',\n 'vlan_id': '1',\n },\n \"GigabitEthernet1/0/2\": {\n \"accounting\": {\n \"arp\": {\n \"chars_in\": 5460,\n \"chars_out\": 5520,\n \"pkts_in\": 91,\n \"pkts_out\": 92\n },\n \"ip\": {\n \"chars_in\": 968690,\n \"chars_out\": 1148402,\n \"pkts_in\": 11745,\n \"pkts_out\": 10821\n },\n \"ipv6\": {\n \"chars_in\": 70,\n \"chars_out\": 0,\n \"pkts_in\": 1,\n \"pkts_out\": 0\n },\n \"other\": {\n \"chars_in\": 741524,\n \"chars_out\": 5520,\n \"pkts_in\": 3483,\n \"pkts_out\": 92\n }\n },\n \"type\": \"Gigabit Ethernet\",\n 'access_vlan': '1',\n \"oper_status\": \"up\",\n \"mac_address\": \"0057.d228.1a02\",\n \"duplex_mode\": \"full\",\n \"vrf\": \"Mgmt-intf\",\n \"delay\": 10,\n \"phys_address\": \"0057.d228.1a02\",\n \"port_channel\": {\n \"port_channel_member\": False\n },\n \"port_speed\": \"1000\",\n \"encapsulation\": {\n \"encapsulation\": \"arpa\",\n 'native_vlan': '1'\n },\n \"mtu\": 1500,\n \"flow_control\": {\n \"receive\": False,\n \"send\": False\n },\n \"enabled\": True,\n \"counters\": {\n \"last_clear\": \"1d02h\",\n \"in_pkts\": 545526,\n \"out_errors\": 0,\n \"in_octets\": 41210298,\n \"out_octets\": 3642296,\n \"in_broadcast_pkts\": 535961,\n \"rate\": {\n \"out_rate_pkts\": 0,\n \"out_rate\": 0,\n \"load_interval\": 300,\n \"in_rate\": 3000,\n \"in_rate_pkts\": 5\n },\n \"out_pkts\": 23376,\n \"in_multicast_pkts\": 535961,\n \"in_crc_errors\": 0,\n \"in_mac_pause_frames\": 0,\n \"in_errors\": 0,\n \"out_mac_pause_frames\": 0\n },\n 'switchport_enable': True,\n 'switchport_mode': 'trunk',\n 'trunk_vlans': '100-110',\n \"bandwidth\": 1000000,\n 'vlan_id': '1',\n },\n }\n", "id": "3361189", "language": "Python", "matching_score": 9.65522289276123, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/interface/iosxe/c3850/tests/interface_output.py" }, { "content": "''' \nInterface Genie Ops Object Outputs for IOSXR.\n'''\n\n\nclass InterfaceOutput(object):\n\n ShowInterfacesDetail = {\n 'GigabitEthernet0/0/0/0': {\n 'auto_negotiate': True,\n 'bandwidth': 768,\n 'counters': {'carrier_transitions': 0,\n 'drops': 0,\n 'in_abort': 0,\n 'in_broadcast_pkts': 0,\n 'in_crc_errors': 0,\n 'in_discards': 0,\n 'in_errors': 0,\n 'in_frame': 0,\n 'in_giants': 0,\n 'in_ignored': 0,\n 'in_multicast_pkts': 0,\n 'in_octets': 0,\n 'in_overrun': 0,\n 'in_parity': 0,\n 'in_pkts': 0,\n 'in_runts': 0,\n 'in_throttles': 0,\n 'last_clear': 'never',\n 'out_applique': 0,\n 'out_broadcast_pkts': 0,\n 'out_buffer_failures': 0,\n 'out_buffer_swapped_out': 0,\n 'out_discards': 0,\n 'out_errors': 0,\n 'out_multicast_pkts': 0,\n 'out_octets': 0,\n 'out_pkts': 0,\n 'out_resets': 0,\n 'out_underruns': 0,\n 'rate': {'in_rate': 0,\n 'in_rate_pkts': 0,\n 'load_interval': 30,\n 'out_rate': 0,\n 'out_rate_pkts': 0}},\n 'description': 'desc',\n 'duplex_mode': 'full',\n 'enabled': False,\n 'encapsulations': {'encapsulation': 'ARPA'},\n 'flow_control': {'flow_control_receive': False,\n 'flow_control_send': False},\n 'interface_state': 0,\n 'ipv4': {'10.1.1.1/24': {'ip': '10.1.1.1',\n 'prefix_length': '24'}},\n 'last_input': 'never',\n 'last_output': 'never',\n 'line_protocol': 'administratively down',\n 'location': 'unknown',\n 'loopback_status': 'not set',\n 'mac_address': 'aaaa.bbbb.cccc',\n 'mtu': 1600,\n 'phys_address': '5254.0077.9407',\n 'port_speed': '1000Mb/s',\n 'reliability': '255/255',\n 'rxload': '0/255',\n 'txload': '0/255',\n 'types': 'GigabitEthernet'},\n 'GigabitEthernet0/0/0/0.10': {\n 'bandwidth': 768,\n 'counters': {'drops': 0,\n 'in_broadcast_pkts': 0,\n 'in_discards': 0,\n 'in_multicast_pkts': 0,\n 'in_octets': 0,\n 'in_pkts': 0,\n 'last_clear': 'never',\n 'out_broadcast_pkts': 0,\n 'out_discards': 0,\n 'out_multicast_pkts': 0,\n 'out_octets': 0,\n 'out_pkts': 0,\n 'rate': {'in_rate': 0,\n 'in_rate_pkts': 0,\n 'load_interval': 5,\n 'out_rate': 0,\n 'out_rate_pkts': 0}},\n 'enabled': False,\n 'encapsulations': {'encapsulation': '802.1Q '\n 'Virtual '\n 'LAN',\n 'first_dot1q': '10',\n 'second_dot1q': '10'},\n 'interface_state': 0,\n 'last_input': 'never',\n 'last_output': 'never',\n 'line_protocol': 'administratively down',\n 'loopback_status': 'not set',\n 'mtu': 1608,\n 'reliability': '255/255',\n 'rxload': '0/255',\n 'txload': '0/255'},\n 'GigabitEthernet0/0/0/0.20': {\n 'bandwidth': 768,\n 'counters': {'drops': 0,\n 'in_broadcast_pkts': 0,\n 'in_discards': 0,\n 'in_multicast_pkts': 0,\n 'in_octets': 0,\n 'in_pkts': 0,\n 'last_clear': 'never',\n 'out_broadcast_pkts': 0,\n 'out_discards': 0,\n 'out_multicast_pkts': 0,\n 'out_octets': 0,\n 'out_pkts': 0,\n 'rate': {'in_rate': 0,\n 'in_rate_pkts': 0,\n 'load_interval': 5,\n 'out_rate': 0,\n 'out_rate_pkts': 0}},\n 'enabled': False,\n 'encapsulations': {'encapsulation': '802.1Q '\n 'Virtual '\n 'LAN',\n 'first_dot1q': '20'},\n 'interface_state': 0,\n 'last_input': 'never',\n 'last_output': 'never',\n 'line_protocol': 'administratively down',\n 'loopback_status': 'not set',\n 'mtu': 1604,\n 'reliability': '255/255',\n 'rxload': '0/255',\n 'txload': '0/255'},\n 'MgmtEth0/0/CPU0/0': {\n 'auto_negotiate': True,\n 'bandwidth': 0,\n 'counters': {'carrier_transitions': 0,\n 'drops': 0,\n 'in_abort': 0,\n 'in_broadcast_pkts': 0,\n 'in_crc_errors': 0,\n 'in_discards': 0,\n 'in_errors': 0,\n 'in_frame': 0,\n 'in_giants': 0,\n 'in_ignored': 0,\n 'in_multicast_pkts': 0,\n 'in_octets': 0,\n 'in_overrun': 0,\n 'in_parity': 0,\n 'in_pkts': 0,\n 'in_runts': 0,\n 'in_throttles': 0,\n 'last_clear': 'never',\n 'out_applique': 0,\n 'out_broadcast_pkts': 0,\n 'out_buffer_failures': 0,\n 'out_buffer_swapped_out': 0,\n 'out_discards': 0,\n 'out_errors': 0,\n 'out_multicast_pkts': 0,\n 'out_octets': 0,\n 'out_pkts': 0,\n 'out_resets': 0,\n 'out_underruns': 0,\n 'rate': {'in_rate': 0,\n 'in_rate_pkts': 0,\n 'load_interval': 5,\n 'out_rate': 0,\n 'out_rate_pkts': 0}},\n 'duplex_mode': 'duplex unknown',\n 'enabled': False,\n 'encapsulations': {'encapsulation': 'ARPA'},\n 'flow_control': {'flow_control_receive': False,\n 'flow_control_send': False},\n 'interface_state': 0,\n 'last_input': 'never',\n 'last_output': 'never',\n 'line_protocol': 'administratively down',\n 'location': 'unknown',\n 'loopback_status': 'not set',\n 'mac_address': '5254.00c3.6c43',\n 'mtu': 1514,\n 'phys_address': '5254.00c3.6c43',\n 'port_speed': '0Kb/s',\n 'reliability': '255/255',\n 'rxload': 'Unknown',\n 'txload': 'Unknown',\n 'types': 'Management Ethernet'},\n 'Null0': {\n 'bandwidth': 0,\n 'counters': {'drops': 0,\n 'in_broadcast_pkts': 0,\n 'in_discards': 0,\n 'in_multicast_pkts': 0,\n 'in_octets': 0,\n 'in_pkts': 0,\n 'last_clear': 'never',\n 'out_broadcast_pkts': 0,\n 'out_discards': 0,\n 'out_multicast_pkts': 0,\n 'out_octets': 0,\n 'out_pkts': 0,\n 'rate': {'in_rate': 0,\n 'in_rate_pkts': 0,\n 'load_interval': 5,\n 'out_rate': 0,\n 'out_rate_pkts': 0}},\n 'enabled': True,\n 'encapsulations': {'encapsulation': 'Null'},\n 'last_input': 'never',\n 'last_output': 'never',\n 'line_protocol': 'up',\n 'loopback_status': 'not set',\n 'mtu': 1500,\n 'reliability': '255/255',\n 'rxload': 'Unknown',\n 'txload': 'Unknown',\n 'types': 'Null'}\n\n }\n\n ShowEthernetTags = {\n \"GigabitEthernet0/0/0/0.10\": {\n \"rewrite_num_of_tags_push\": 0,\n \"status\": \"up\",\n \"rewrite_num_of_tags_pop\": 1,\n \"mtu\": 1518,\n \"outer_vlan\": \".1Q:10\",\n \"vlan_id\": \"10\"\n },\n \"GigabitEthernet0/0/0/0.20\": {\n \"rewrite_num_of_tags_push\": 0,\n \"status\": \"up\",\n \"rewrite_num_of_tags_pop\": 1,\n \"mtu\": 1518,\n \"outer_vlan\": \".1Q:20\",\n \"vlan_id\": \"20\"\n }\n }\n\n ShowIpv6VrfAllInterface = {\n 'GigabitEthernet0/0/0/0': {\n 'enabled': True,\n 'int_status': 'shutdown',\n 'ipv6': {'2001:db8:1:1::1/64': {'ipv6': '2001:db8:1:1::1',\n 'ipv6_prefix_length': '64',\n 'ipv6_status': 'tentative',\n 'ipv6_subnet': '2001:db8:1:1::'},\n '2001:db8:2:2::2/64': {'ipv6': '2001:db8:2:2::2',\n 'ipv6_prefix_length': '64',\n 'ipv6_status': 'tentative',\n 'ipv6_subnet': '2001:db8:2:2::'},\n '2001:db8:3:3:a8aa:bbff:febb:cccc/64': {'ipv6': '2001:db8:3:3:a8aa:bbff:febb:cccc',\n 'ipv6_eui64': True,\n 'ipv6_prefix_length': '64',\n 'ipv6_status': 'tentative',\n 'ipv6_subnet': '2001:db8:3:3::'},\n '2001:db8:4:4::4/64': {'ipv6': '2001:db8:4:4::4',\n 'ipv6_prefix_length': '64',\n 'ipv6_route_tag': '10',\n 'ipv6_status': 'tentative',\n 'ipv6_subnet': '2001:db8:4:4::'},\n 'auto_config_state': 'stateless',\n 'complete_glean_adj': '0',\n 'complete_protocol_adj': '0',\n 'dropped_glean_req': '0',\n 'dropped_protocol_req': '0',\n 'icmp_redirects': 'disabled',\n 'icmp_unreachables': 'enabled',\n 'in_access_list': 'not set',\n 'incomplete_glean_adj': '0',\n 'incomplete_protocol_adj': '0',\n 'ipv6_link_local': 'fe80::a8aa:bbff:febb:cccc',\n 'ipv6_link_local_state': 'tentative',\n 'ipv6_mtu': '1600',\n 'ipv6_mtu_available': '1586',\n 'nd_adv_retrans_int': '0',\n 'nd_cache_limit': '1000000000',\n 'nd_reachable_time': '0',\n 'out_access_list': 'not set',\n 'table_id': '0xe0800011'},\n 'ipv6_enabled': False,\n 'oper_status': 'down',\n 'vrf': 'VRF1',\n 'vrf_id': '0x60000002'},\n 'GigabitEthernet0/0/0/0.10': {'enabled': False,\n 'int_status': 'shutdown',\n 'ipv6_enabled': False,\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'GigabitEthernet0/0/0/0.20': {'enabled': False,\n 'int_status': 'shutdown',\n 'ipv6_enabled': False,\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'GigabitEthernet0/0/0/1': {'enabled': True,\n 'int_status': 'up',\n 'ipv6': {'2001:db8:1:5::1/64': {'ipv6': '2001:db8:1:5::1',\n 'ipv6_prefix_length': '64',\n 'ipv6_subnet': '2001:db8:1:5::'},\n 'complete_glean_adj': '1',\n 'complete_protocol_adj': '1',\n 'dad_attempts': '1',\n 'dropped_glean_req': '0',\n 'dropped_protocol_req': '0',\n 'icmp_redirects': 'disabled',\n 'icmp_unreachables': 'enabled',\n 'incomplete_glean_adj': '0',\n 'incomplete_protocol_adj': '0',\n 'ipv6_groups': ['fc00:e968:6179::de52:7100:1',\n 'fc00:e968:6179::de52:7100:ff78:ebe0',\n 'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b',\n 'fc00:e968:6179::de52:7100'],\n 'ipv6_link_local': 'fe80::5054:ff:fe78:ebe0',\n 'ipv6_mtu': '1514',\n 'ipv6_mtu_available': '1500',\n 'nd_adv_duration': '160-240',\n 'nd_adv_retrans_int': '0',\n 'nd_cache_limit': '1000000000',\n 'nd_dad': 'enabled',\n 'nd_reachable_time': '0',\n 'nd_router_adv': '1800',\n 'stateless_autoconfig': True,\n 'table_id': '0xe0800010'},\n 'ipv6_enabled': True,\n 'oper_status': 'up',\n 'vrf': 'VRF1',\n 'vrf_id': '0x60000001'},\n 'GigabitEthernet0/0/0/2': {'enabled': False,\n 'int_status': 'shutdown',\n 'ipv6_enabled': False,\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'GigabitEthernet0/0/0/3': {'enabled': False,\n 'int_status': 'shutdown',\n 'ipv6_enabled': False,\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'GigabitEthernet0/0/0/4': {'enabled': False,\n 'int_status': 'shutdown',\n 'ipv6_enabled': False,\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'GigabitEthernet0/0/0/5': {'enabled': False,\n 'int_status': 'shutdown',\n 'ipv6_enabled': False,\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'GigabitEthernet0/0/0/6': {'enabled': False,\n 'int_status': 'shutdown',\n 'ipv6_enabled': False,\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'MgmtEth0/0/CPU0/0': {'enabled': False,\n 'int_status': 'shutdown',\n 'ipv6_enabled': False,\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'}\n }\n\n ShowIpv4VrfAllInterface = {\n 'GigabitEthernet0/0/0/0': {\n 'int_status': 'shutdown',\n 'ipv4': {'10.1.1.1/24': {'ip': '10.1.1.1',\n 'prefix_length': '24',\n 'route_tag': 50},\n '10.2.2.2/24': {'arp': 'disabled',\n 'broadcast_forwarding': 'disabled',\n 'helper_address': 'not '\n 'set',\n 'icmp_redirects': 'never '\n 'sent',\n 'icmp_replies': 'never '\n 'sent',\n 'icmp_unreachables': 'always '\n 'sent',\n 'in_access_list': 'not '\n 'set',\n 'ip': '10.2.2.2',\n 'mtu': 1600,\n 'mtu_available': 1586,\n 'out_access_list': 'not '\n 'set',\n 'prefix_length': '24',\n 'secondary': True,\n 'table_id': '0xe0000011'},\n 'unnumbered': {'unnumbered_int': '10.69.111.111/32',\n 'unnumbered_intf_ref': 'Loopback11'}},\n 'oper_status': 'down',\n 'vrf': 'VRF1',\n 'vrf_id': '0x60000002'},\n 'GigabitEthernet0/0/0/0.10': {'int_status': 'shutdown',\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'GigabitEthernet0/0/0/0.20': {'int_status': 'shutdown',\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'GigabitEthernet0/0/0/1': {'int_status': 'shutdown',\n 'oper_status': 'down',\n 'vrf': 'VRF2',\n 'vrf_id': '0x60000003'},\n 'GigabitEthernet0/0/0/2': {'int_status': 'shutdown',\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'GigabitEthernet0/0/0/3': {'int_status': 'shutdown',\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'GigabitEthernet0/0/0/4': {'int_status': 'shutdown',\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'GigabitEthernet0/0/0/5': {'int_status': 'shutdown',\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'GigabitEthernet0/0/0/6': {'int_status': 'shutdown',\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'MgmtEth0/0/CPU0/0': {'int_status': 'shutdown',\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'}\n }\n\n ShowVrfAllDetail = {\n \"VRF1\": {\n \"description\": \"not set\",\n \"vrf_mode\": \"Regular\",\n \"address_family\": {\n \"ipv6 unicast\": {\n \"route_target\": {\n \"400:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"400:1\"\n },\n \"300:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"300:1\"\n },\n \"200:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:1\"\n },\n \"200:2\": {\n \"rt_type\": \"import\",\n \"route_target\": \"200:2\"\n }\n }\n },\n \"ipv4 unicast\": {\n \"route_target\": {\n \"400:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"400:1\"\n },\n \"300:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"300:1\"\n },\n \"200:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:1\"\n },\n \"200:2\": {\n \"rt_type\": \"import\",\n \"route_target\": \"200:2\"\n }\n }\n }\n },\n \"route_distinguisher\": \"200:1\",\n \"interfaces\": [\n \"GigabitEthernet0/0/0/1\"\n ]\n },\n \"VRF2\": {\n \"description\": \"not set\",\n \"vrf_mode\": \"Regular\",\n \"address_family\": {\n \"ipv6 unicast\": {\n \"route_target\": {\n \"200:2\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:2\"\n }\n }\n },\n \"ipv4 unicast\": {\n \"route_target\": {\n \"200:2\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:2\"\n }\n }\n }\n },\n \"route_distinguisher\": \"200:2\",\n \"interfaces\": [\n \"GigabitEthernet0/0/0/2\"\n ]}\n }\n\n ShowInterfacesAccounting = \\\n {\n \"GigabitEthernet0/0/0/0\": {\n \"accounting\": {\n \"arp\": {\n \"chars_in\": 378,\n \"chars_out\": 378,\n \"pkts_in\": 9,\n \"pkts_out\": 9\n },\n \"ipv4_multicast\": {\n \"chars_in\": 0,\n \"chars_out\": 843700,\n \"pkts_in\": 0,\n \"pkts_out\": 10514\n },\n \"ipv4_unicast\": {\n \"chars_in\": 1226852,\n \"chars_out\": 887519,\n \"pkts_in\": 19254,\n \"pkts_out\": 13117\n }\n }\n },\n \"GigabitEthernet0/0/0/1\": {\n \"accounting\": {\n \"arp\": {\n \"chars_in\": 378,\n \"chars_out\": 378,\n \"pkts_in\": 9,\n \"pkts_out\": 9\n },\n \"ipv4_multicast\": {\n \"chars_in\": 0,\n \"chars_out\": 844816,\n \"pkts_in\": 0,\n \"pkts_out\": 10530\n },\n \"ipv4_unicast\": {\n \"chars_in\": 843784,\n \"chars_out\": 1764,\n \"pkts_in\": 10539,\n \"pkts_out\": 26\n }\n }\n }\n }\n ShowInterfacesDetail_all='''\n Null0 is up, line protocol is up \n Interface state transitions: \n Hardware is Null interface\n Internet address is Unknown\n MTU 1500 bytes, BW 0 Kbit\n reliability 255/255, txload Unknown, rxload Unknown\n Encapsulation Null, loopback not set,\n Last input never, output never\n Last clearing of \"show interface\" counters never\n 5 minute input rate 0 bits/sec, 0 packets/sec\n 5 minute output rate 0 bits/sec, 0 packets/sec\n 0 packets input, 0 bytes, 0 total input drops\n 0 drops for unrecognized upper-level protocol\n Received 0 broadcast packets, 0 multicast packets\n 0 packets output, 0 bytes, 0 total output drops\n Output 0 broadcast packets, 0 multicast packets\n\n MgmtEth0/0/CPU0/0 is administratively down, line protocol is administratively down \n Interface state transitions: 0\n Hardware is Management Ethernet, address is 5254.00c3.6c43 (bia 5254.00c3.6c43)\n Internet address is Unknown\n MTU 1514 bytes, BW 0 Kbit\n reliability 255/255, txload Unknown, rxload Unknown\n Encapsulation ARPA,\n Duplex unknown, 0Kb/s, unknown, link type is autonegotiation\n output flow control is off, input flow control is off\n Carrier delay (up) is 10 msec\n loopback not set,\n Last input never, output never\n Last clearing of \"show interface\" counters never\n 5 minute input rate 0 bits/sec, 0 packets/sec\n 5 minute output rate 0 bits/sec, 0 packets/sec\n 0 packets input, 0 bytes, 0 total input drops\n 0 drops for unrecognized upper-level protocol\n Received 0 broadcast packets, 0 multicast packets\n 0 runts, 0 giants, 0 throttles, 0 parity\n 0 input errors, 0 CRC, 0 frame, 0 overrun, 0 ignored, 0 abort\n 0 packets output, 0 bytes, 0 total output drops\n Output 0 broadcast packets, 0 multicast packets\n 0 output errors, 0 underruns, 0 applique, 0 resets\n 0 output buffer failures, 0 output buffers swapped out\n 0 carrier transitions\n\n GigabitEthernet0/0/0/0 is administratively down, line protocol is administratively down \n Interface state transitions: 0\n Hardware is GigabitEthernet, address is aaaa.bbbb.cccc (bia 5254.0077.9407)\n Description: desc\n Internet address is 10.1.1.1/24\n MTU 1600 bytes, BW 768 Kbit (Max: 1000000 Kbit)\n reliability 255/255, txload 0/255, rxload 0/255\n Encapsulation ARPA,\n Full-duplex, 1000Mb/s, unknown, link type is force-up\n output flow control is off, input flow control is off\n Carrier delay (up) is 10 msec\n loopback not set,\n Last input never, output never\n Last clearing of \"show interface\" counters never\n 30 second input rate 0 bits/sec, 0 packets/sec\n 30 second output rate 0 bits/sec, 0 packets/sec\n 0 packets input, 0 bytes, 0 total input drops\n 0 drops for unrecognized upper-level protocol\n\n Received 0 broadcast packets, 0 multicast packets\n 0 runts, 0 giants, 0 throttles, 0 parity\n 0 input errors, 0 CRC, 0 frame, 0 overrun, 0 ignored, 0 abort\n \n 0 packets output, 0 bytes, 0 total output drops \n Output 0 broadcast packets, 0 multicast packets\n 0 output errors, 0 underruns, 0 applique, 0 resets\n 0 output buffer failures, 0 output buffers swapped out\n 0 carrier transitions\n\n GigabitEthernet0/0/0/0.10 is administratively down, line protocol is administratively down \n Interface state transitions: 0\n Hardware is VLAN sub-interface(s), address is aaaa.bbbb.cccc\n Internet address is Unknown\n MTU 1608 bytes, BW 768 Kbit (Max: 1000000 Kbit)\n reliability 255/255, txload 0/255, rxload 0/255\n Encapsulation 802.1Q Virtual LAN, VLAN Id 10, 2nd VLAN Id 10,\n loopback not set,\n Last input never, output never\n Last clearing of \"show interface\" counters never\n 5 minute input rate 0 bits/sec, 0 packets/sec\n 5 minute output rate 0 bits/sec, 0 packets/sec\n 0 packets input, 0 bytes, 0 total input drops\n 0 drops for unrecognized upper-level protocol\n Received 0 broadcast packets, 0 multicast packets\n 0 packets output, 0 bytes, 0 total output drops\n Output 0 broadcast packets, 0 multicast packets\n\n GigabitEthernet0/0/0/0.20 is administratively down, line protocol is administratively down \n Interface state transitions: 0\n Hardware is VLAN sub-interface(s), address is aaaa.bbbb.cccc\n Internet address is Unknown\n MTU 1604 bytes, BW 768 Kbit (Max: 1000000 Kbit)\n reliability 255/255, txload 0/255, rxload 0/255\n Encapsulation 802.1Q Virtual LAN, VLAN Id 20, loopback not set,\n Last input never, output never\n Last clearing of \"show interface\" counters never\n 5 minute input rate 0 bits/sec, 0 packets/sec\n 5 minute output rate 0 bits/sec, 0 packets/sec\n 0 packets input, 0 bytes, 0 total input drops\n 0 drops for unrecognized upper-level protocol\n Received 0 broadcast packets, 0 multicast packets\n 0 packets output, 0 bytes, 0 total output drops\n Output 0 broadcast packets, 0 multicast packets\n\n GigabitEthernet0/0/0/1 is up, line protocol is up \n Interface state transitions: 1\n Hardware is GigabitEthernet, address is 5254.0078.ebe0 (bia 5254.0078.ebe0)\n Internet address is 10.1.5.1/24\n MTU 1514 bytes, BW 1000000 Kbit (Max: 1000000 Kbit)\n reliability 255/255, txload 0/255, rxload 0/255\n Encapsulation ARPA,\n Full-duplex, 1000Mb/s, unknown, link type is force-up\n output flow control is off, input flow control is off\n Carrier delay (up) is 10 msec\n loopback not set,\n Last link flapped 1w5d\n ARP type ARPA, ARP timeout 04:00:00\n Last input 00:01:09, output 00:01:09\n Last clearing of \"show interface\" counters never\n 5 minute input rate 0 bits/sec, 0 packets/sec\n 5 minute output rate 0 bits/sec, 0 packets/sec\n 146164 packets input, 18221418 bytes, 0 total input drops\n 0 drops for unrecognized upper-level protocol\n Received 0 broadcast packets, 29056 multicast packets\n 0 runts, 0 giants, 0 throttles, 0 parity\n 0 input errors, 0 CRC, 0 frame, 0 overrun, 0 ignored, 0 abort\n 123696 packets output, 10777610 bytes, 0 total output drops\n Output 2 broadcast packets, 6246 multicast packets\n 0 output errors, 0 underruns, 0 applique, 0 resets\n 0 output buffer failures, 0 output buffers swapped out\n 1 carrier transitions\n '''\n ShowInterfacesDetail_gi1 = '''\n GigabitEthernet0/0/0/1 is up, line protocol is up \n Interface state transitions: 1\n Hardware is GigabitEthernet, address is 5254.0078.ebe0 (bia 5254.0078.ebe0)\n Internet address is 10.1.5.1/24\n MTU 1514 bytes, BW 1000000 Kbit (Max: 1000000 Kbit)\n reliability 255/255, txload 0/255, rxload 0/255\n Encapsulation ARPA,\n Full-duplex, 1000Mb/s, unknown, link type is force-up\n output flow control is off, input flow control is off\n Carrier delay (up) is 10 msec\n loopback not set,\n Last link flapped 1w5d\n ARP type ARPA, ARP timeout 04:00:00\n Last input 00:01:09, output 00:01:09\n Last clearing of \"show interface\" counters never\n 5 minute input rate 0 bits/sec, 0 packets/sec\n 5 minute output rate 0 bits/sec, 0 packets/sec\n 146164 packets input, 18221418 bytes, 0 total input drops\n 0 drops for unrecognized upper-level protocol\n Received 0 broadcast packets, 29056 multicast packets\n 0 runts, 0 giants, 0 throttles, 0 parity\n 0 input errors, 0 CRC, 0 frame, 0 overrun, 0 ignored, 0 abort\n 123696 packets output, 10777610 bytes, 0 total output drops\n Output 2 broadcast packets, 6246 multicast packets\n 0 output errors, 0 underruns, 0 applique, 0 resets\n 0 output buffer failures, 0 output buffers swapped out\n 1 carrier transitions '''\n ShowInterfacesAccounting_all = '''\n Tue Jun 5 20:45:11.544 UTC\nNo accounting statistics available for Loopback0\nNo accounting statistics available for Loopback1\nNo accounting statistics available for Null0\nGigabitEthernet0/0/0/0\n Protocol Pkts In Chars In Pkts Out Chars Out\n IPV4_UNICAST 19254 1226852 13117 887519\n IPV4_MULTICAST 0 0 10514 843700\n ARP 9 378 9 378\n\nGigabitEthernet0/0/0/1\n Protocol Pkts In Chars In Pkts Out Chars Out\n IPV4_UNICAST 10539 843784 26 1764\n IPV4_MULTICAST 0 0 10530 844816\n ARP 9 378 9 378\n\n\n\nNo accounting statistics available for MgmtEth0/RP0/CPU0/0\n '''\n ShowVrfAllDetail_all='''\n VRF VRF1; RD 200:1; VPN ID not set\n VRF mode: Regular\n Description not set\n Interfaces:\n GigabitEthernet0/0/0/1\n Address family IPV4 Unicast\n Import VPN route-target communities:\n RT:200:1\n RT:200:2\n RT:300:1\n RT:400:1\n Export VPN route-target communities:\n RT:200:1\n No import route policy\n No export route policy\n Address family IPV6 Unicast\n Import VPN route-target communities:\n RT:200:1\n RT:200:2\n RT:300:1\n RT:400:1\n Export VPN route-target communities:\n RT:200:1\n No import route policy\n No export route policy\n\n VRF VRF2; RD 200:2; VPN ID not set\n VRF mode: Regular\n Description not set\n Interfaces:\n GigabitEthernet0/0/0/2\n Address family IPV4 Unicast\n Import VPN route-target communities:\n RT:200:2\n Export VPN route-target communities:\n RT:200:2\n No import route policy\n No export route policy\n Address family IPV6 Unicast\n Import VPN route-target communities:\n RT:200:2\n Export VPN route-target communities:\n RT:200:2\n No import route policy\n No export route policy\n '''\n ShowVrfAllDetail_vrf1='''\n VRF VRF1; RD 200:1; VPN ID not set\n VRF mode: Regular\n Description not set\n Interfaces:\n GigabitEthernet0/0/0/1\n Address family IPV4 Unicast\n Import VPN route-target communities:\n RT:200:1\n RT:200:2\n RT:300:1\n RT:400:1\n Export VPN route-target communities:\n RT:200:1\n No import route policy\n No export route policy\n Address family IPV6 Unicast\n Import VPN route-target communities:\n RT:200:1\n RT:200:2\n RT:300:1\n RT:400:1\n Export VPN route-target communities:\n RT:200:1\n No import route policy\n No export route policy\n '''\n ShowIpv4VrfAllInterface_all = '''\n MgmtEth0/0/CPU0/0 is Shutdown, ipv4 protocol is Down\n Vrf is default (vrfid 0x60000000)\n Internet protocol processing disabled\n GigabitEthernet0/0/0/0 is Up, ipv4 protocol is Up\n Vrf is default (vrfid 0x60000000)\n Internet address is 10.1.3.1/24\n MTU is 1514 (1500 is available to IP)\n Helper address is not set\n Multicast reserved groups joined: 172.16.58.3 172.16.17.32 172.16.58.3\n 192.168.127.12 172.16.31.10\n Directed broadcast forwarding is disabled\n Outgoing access list is not set\n Inbound common access list is not set, access list is not set\n Proxy ARP is disabled\n ICMP redirects are never sent\n ICMP unreachables are always sent\n ICMP mask replies are never sent\n Table Id is 0xe0000000\n GigabitEthernet0/0/0/1 is Up, ipv4 protocol is Up\n Vrf is VRF1 (vrfid 0x60000001)\n Internet address is 10.1.5.1/24 with route-tag 50\n Secondary address 10.2.2.2/24\n MTU is 1514 (1500 is available to IP)\n Helper address is not set\n Multicast reserved groups joined: 172.16.58.3 172.16.17.32\n Directed broadcast forwarding is disabled\n Outgoing access list is not set\n Inbound common access list is not set, access list is not set\n Proxy ARP is disabled\n ICMP redirects are never sent\n ICMP unreachables are always sent\n ICMP mask replies are never sent\n Table Id is 0xe0000010\n GigabitEthernet0/0/0/2 is is Shutdown, ipv4 protocol is Down\n Vrf is VRF2 (vrfid 0x60000000)\n Internet protocol processing disabled\n GigabitEthernet0/0/0/3 is is Shutdown, ipv4 protocol is Down\n Vrf is default (vrfid 0x60000000)\n Internet protocol processing disabled\n GigabitEthernet0/0/0/4 is is Shutdown, ipv4 protocol is Down\n Vrf is default (vrfid 0x60000000)\n Internet protocol processing disabled\n GigabitEthernet0/0/0/5 is Shutdown, ipv4 protocol is Down\n Vrf is default (vrfid 0x60000000)\n Internet protocol processing disabled\n GigabitEthernet0/0/0/6 is Shutdown, ipv4 protocol is Down\n Vrf is default (vrfid 0x60000000)\n Internet protocol processing disabled\n '''\n ShowIpv4VrfAllInterface_vrf1='''\n GigabitEthernet0/0/0/1 is Up, ipv4 protocol is Up\n Vrf is VRF1 (vrfid 0x60000001)\n Internet address is 10.1.5.1/24 with route-tag 50\n Secondary address 10.2.2.2/24\n MTU is 1514 (1500 is available to IP)\n Helper address is not set\n Multicast reserved groups joined: 172.16.58.3 172.16.17.32\n Directed broadcast forwarding is disabled\n Outgoing access list is not set\n Inbound common access list is not set, access list is not set\n Proxy ARP is disabled\n ICMP redirects are never sent\n ICMP unreachables are always sent\n ICMP mask replies are never sent\n Table Id is 0xe0000010\n '''\n ShowIpv6VrfAllInterface_all = '''\n MgmtEth0/0/CPU0/0 is Shutdown, ipv6 protocol is Down, Vrfid is default (0x60000000)\n IPv6 is disabled, link-local address unassigned\n No global unicast address is configured\n GigabitEthernet0/0/0/0 is Shutdown, ipv6 protocol is Down, Vrfid is VRF1 (0x60000002)\n IPv6 is enabled, link-local address is fe80::a8aa:bbff:febb:cccc [TENTATIVE]\n Global unicast address(es):\n 2001:db8:1:1::1, subnet is 2001:db8:1:1::/64 [TENTATIVE]\n 2001:db8:2:2::2, subnet is 2001:db8:2:2::/64 [TENTATIVE]\n 2001:db8:4:4::4, subnet is 2001:db8:4:4::/64 [TENTATIVE] with route-tag 10\n 2001:db8:3:3:a8aa:bbff:febb:cccc, subnet is 2001:db8:3:3::/64 [TENTATIVE]\n Joined group address(es): fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b fc00:e968:6179::de52:7100\n MTU is 1600 (1586 is available to IPv6)\n ICMP redirects are disabled\n ICMP unreachables are enabled\n ND DAD is enabled, number of DAD attempts 1\n ND reachable time is 0 milliseconds\n ND cache entry limit is 1000000000\n ND advertised retransmit interval is 0 milliseconds\n Hosts use stateless autoconfig for addresses.\n Outgoing access list is not set\n Inbound access list is not set\n Table Id is 0xe0800011\n Complete protocol adjacency: 0\n Complete glean adjacency: 0\n Incomplete protocol adjacency: 0\n Incomplete glean adjacency: 0\n Dropped protocol request: 0\n Dropped glean request: 0\n GigabitEthernet0/0/0/0.10 is Shutdown, ipv6 protocol is Down, Vrfid is default (0x60000000)\n IPv6 is enabled, link-local address is fe80::5054:ff:fea6:78c5 \n Global unicast address(es):\n 2001:db8:1:3::1, subnet is 2001:db8:1:3::/64 \n Joined group address(es): fc00:e968:6179::de52:7100:ff00:1 fc00:e968:6179::de52:7100:ffa6:78c5 fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b\n fc00:e968:6179::de52:7100\n MTU is 1514 (1500 is available to IPv6)\n ICMP redirects are disabled\n ICMP unreachables are enabled\n ND DAD is enabled, number of DAD attempts 1\n ND reachable time is 0 milliseconds\n ND cache entry limit is 1000000000\n ND advertised retransmit interval is 0 milliseconds\n ND router advertisements are sent every 160 to 240 seconds\n ND router advertisements live for 1800 seconds\n Hosts use stateless autoconfig for addresses.\n Outgoing access list is not set\n Inbound common access list is not set, access list is not set\n Table Id is 0xe0800000\n Complete protocol adjacency: 0\n Complete glean adjacency: 0\n Incomplete protocol adjacency: 0\n Incomplete glean adjacency: 0\n Dropped protocol request: 0\n Dropped glean request: 0\n GigabitEthernet0/0/0/0.20 is Shutdown, ipv6 protocol is Down, Vrfid is default (0x60000000)\n IPv6 is disabled, link-local address unassigned\n No global unicast address is configured\n GigabitEthernet0/0/0/1 is Up, ipv6 protocol is Up, Vrfid is VRF1 (0x60000001)\n IPv6 is enabled, link-local address is fe80::5054:ff:fe78:ebe0 \n Global unicast address(es):\n 2001:db8:1:5::1, subnet is 2001:db8:1:5::/64 \n Joined group address(es): fc00:e968:6179::de52:7100:ff00:1 fc00:e968:6179::de52:7100:ff78:ebe0 fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b\n fc00:e968:6179::de52:7100\n MTU is 1514 (1500 is available to IPv6)\n ICMP redirects are disabled\n ICMP unreachables are enabled\n ND DAD is enabled, number of DAD attempts 1\n ND reachable time is 0 milliseconds\n ND cache entry limit is 1000000000\n ND advertised retransmit interval is 0 milliseconds\n ND router advertisements are sent every 160 to 240 seconds\n ND router advertisements live for 1800 seconds\n Hosts use stateless autoconfig for addresses.\n Outgoing access list is not set\n Inbound common access list is not set, access list is not set\n Table Id is 0xe0800010\n Complete protocol adjacency: 1\n Complete glean adjacency: 1\n Incomplete protocol adjacency: 0\n Incomplete glean adjacency: 0\n Dropped protocol request: 0\n Dropped glean request: 0\n GigabitEthernet0/0/0/2 is Shutdown, ipv6 protocol is Down, Vrfid is VRF2 (0x60000000)\n IPv6 is disabled, link-local address unassigned\n No global unicast address is configured\n GigabitEthernet0/0/0/3 is Shutdown, ipv6 protocol is Down, Vrfid is default (0x60000000)\n IPv6 is disabled, link-local address unassigned\n No global unicast address is configured\n GigabitEthernet0/0/0/4 is Shutdown, ipv6 protocol is Down, Vrfid is default (0x60000000)\n IPv6 is disabled, link-local address unassigned\n No global unicast address is configured\n GigabitEthernet0/0/0/5 is Shutdown, ipv6 protocol is Down, Vrfid is default (0x60000000)\n IPv6 is disabled, link-local address unassigned\n No global unicast address is configured\n GigabitEthernet0/0/0/6 is Shutdown, ipv6 protocol is Down, Vrfid is default (0x60000000)\n IPv6 is disabled, link-local address unassigned\n No global unicast address is configured\n '''\n ShowIpv6VrfAllInterface_vrf1='''\n GigabitEthernet0/0/0/0 is Shutdown, ipv6 protocol is Down, Vrfid is VRF1 (0x60000002)\n IPv6 is enabled, link-local address is fe80::a8aa:bbff:febb:cccc [TENTATIVE]\n Global unicast address(es):\n 2001:db8:1:1::1, subnet is 2001:db8:1:1::/64 [TENTATIVE]\n 2001:db8:2:2::2, subnet is 2001:db8:2:2::/64 [TENTATIVE]\n 2001:db8:4:4::4, subnet is 2001:db8:4:4::/64 [TENTATIVE] with route-tag 10\n 2001:db8:3:3:a8aa:bbff:febb:cccc, subnet is 2001:db8:3:3::/64 [TENTATIVE]\n Joined group address(es): fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b fc00:e968:6179::de52:7100\n MTU is 1600 (1586 is available to IPv6)\n ICMP redirects are disabled\n ICMP unreachables are enabled\n ND DAD is enabled, number of DAD attempts 1\n ND reachable time is 0 milliseconds\n ND cache entry limit is 1000000000\n ND advertised retransmit interval is 0 milliseconds\n Hosts use stateless autoconfig for addresses.\n Outgoing access list is not set\n Inbound access list is not set\n Table Id is 0xe0800011\n Complete protocol adjacency: 0\n Complete glean adjacency: 0\n Incomplete protocol adjacency: 0\n Incomplete glean adjacency: 0\n Dropped protocol request: 0\n Dropped glean request: 0\n GigabitEthernet0/0/0/1 is Up, ipv6 protocol is Up, Vrfid is VRF1 (0x60000001)\n IPv6 is enabled, link-local address is fe80::5054:ff:fe78:ebe0 \n Global unicast address(es):\n 2001:db8:1:5::1, subnet is 2001:db8:1:5::/64 \n Joined group address(es): fc00:e968:6179::de52:7100:ff00:1 fc00:e968:6179::de52:7100:ff78:ebe0 fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b\n fc00:e968:6179::de52:7100\n MTU is 1514 (1500 is available to IPv6)\n ICMP redirects are disabled\n ICMP unreachables are enabled\n ND DAD is enabled, number of DAD attempts 1\n ND reachable time is 0 milliseconds\n ND cache entry limit is 1000000000\n ND advertised retransmit interval is 0 milliseconds\n ND router advertisements are sent every 160 to 240 seconds\n ND router advertisements live for 1800 seconds\n Hosts use stateless autoconfig for addresses.\n Outgoing access list is not set\n Inbound common access list is not set, access list is not set\n Table Id is 0xe0800010\n Complete protocol adjacency: 1\n Complete glean adjacency: 1\n Incomplete protocol adjacency: 0\n Incomplete glean adjacency: 0\n Dropped protocol request: 0\n Dropped glean request: 0\n '''\n ShowIpv6VrfAllInterface_gi1='''\n GigabitEthernet0/0/0/1 is Up, ipv6 protocol is Up, Vrfid is VRF1 (0x60000001)\n IPv6 is enabled, link-local address is fe80::5054:ff:fe78:ebe0 \n Global unicast address(es):\n 2001:db8:1:5::1, subnet is 2001:db8:1:5::/64 \n Joined group address(es): fc00:e968:6179::de52:7100:ff00:1 fc00:e968:6179::de52:7100:ff78:ebe0 fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b\n fc00:e968:6179::de52:7100\n MTU is 1514 (1500 is available to IPv6)\n ICMP redirects are disabled\n ICMP unreachables are enabled\n ND DAD is enabled, number of DAD attempts 1\n ND reachable time is 0 milliseconds\n ND cache entry limit is 1000000000\n ND advertised retransmit interval is 0 milliseconds\n ND router advertisements are sent every 160 to 240 seconds\n ND router advertisements live for 1800 seconds\n Hosts use stateless autoconfig for addresses.\n Outgoing access list is not set\n Inbound common access list is not set, access list is not set\n Table Id is 0xe0800010\n Complete protocol adjacency: 1\n Complete glean adjacency: 1\n Incomplete protocol adjacency: 0\n Incomplete glean adjacency: 0\n Dropped protocol request: 0\n Dropped glean request: 0\n '''\n ShowInterfacesAccounting_gi1 = '''\nGigabitEthernet0/0/0/1\n Protocol Pkts In Chars In Pkts Out Chars Out\n IPV4_UNICAST 10539 843784 26 1764\n IPV4_MULTICAST 0 0 10530 844816\n ARP 9 378 9 378\n '''\n ShowEthernetTags_all='''\n St: AD - Administratively Down, Dn - Down, Up - Up\n Ly: L2 - Switched layer 2 service, L3 = Terminated layer 3 service,\n Xtra C - Match on Cos, E - Match on Ethertype, M - Match on source MAC\n -,+: Ingress rewrite operation; number of tags to pop and push respectively\n\n Interface St MTU Ly Outer Inner Xtra -,+\n Gi0/0/0/0.10 Up 1518 L3 .1Q:10 - - 1 0\n Gi0/0/0/0.20 Up 1518 L3 .1Q:20 - - 1 0\n \n '''\n ShowEthernetTag_gi1='''\n St: AD - Administratively Down, Dn - Down, Up - Up\n Ly: L2 - Switched layer 2 service, L3 = Terminated layer 3 service,\n Xtra C - Match on Cos, E - Match on Ethertype, M - Match on source MAC\n -,+: Ingress rewrite operation; number of tags to pop and push respectively\n\n Interface St MTU Ly Outer Inner Xtra -,+\n Gi0/0/0/1 Up 1514 L3 .1Q:501 - - 1 0\n '''\n interfaceOpsOutput_custom_info={\n \"GigabitEthernet0/0/0/1\": {\n \"mtu\": 1514,\n \"mac_address\": \"5254.0078.ebe0\",\n \"duplex_mode\": \"full\",\n \"type\": \"gigabitethernet\",\n \"enabled\": True,\n 'oper_status':'up',\n 'vlan_id':'501',\n \"encapsulation\": {\n \"encapsulation\": \"arpa\"\n },\n \"auto_negotiate\": False,\n \"vrf\": 'VRF1',\n \"bandwidth\": 1000000,\n \"counters\": {\n \"in_octets\": 18221418,\n \"out_broadcast_pkts\": 2,\n \"out_pkts\": 123696,\n 'in_crc_errors': 0,\n \"in_discards\": 0,\n 'out_errors': 0,\n \"in_pkts\": 146164,\n \"in_multicast_pkts\": 29056,\n \"in_broadcast_pkts\": 0,\n \"rate\": {\n \"out_rate\": 0,\n \"out_rate_pkts\": 0,\n \"in_rate_pkts\": 0,\n \"load_interval\": 300,\n \"in_rate\": 0\n },\n \"last_clear\": \"never\",\n \"out_multicast_pkts\": 6246,\n \"out_octets\": 10777610\n },\n \"accounting\": {\n \"arp\": {\n \"chars_in\": 378,\n \"chars_out\": 378,\n \"pkts_in\": 9,\n \"pkts_out\": 9\n },\n \"ipv4_multicast\": {\n \"chars_in\": 0,\n \"chars_out\": 844816,\n \"pkts_in\": 0,\n \"pkts_out\": 10530\n },\n \"ipv4_unicast\": {\n \"chars_in\": 843784,\n \"chars_out\": 1764,\n \"pkts_in\": 10539,\n \"pkts_out\": 26\n }\n },\n\n \"flow_control\": {\n \"flow_control_receive\": False,\n \"flow_control_send\": False\n },\n \"port_speed\": \"1000Mb/s\",\n \"phys_address\": \"5254.0078.ebe0\",\n \"ipv6\": {\n '2001:db8:1:5::1/64': {\n 'ip': '2001:db8:1:5::1',\n 'prefix_length': '64'\n },\n 'enabled': True}\n\n\n },\n }\n InterfaceOpsOutput_info = {\n \"Null0\": {\n \"mtu\": 1500,\n \"type\": \"Null\",\n \"enabled\": True,\n \"bandwidth\": 0,\n \"counters\": {\n \"in_octets\": 0,\n \"out_broadcast_pkts\": 0,\n \"out_pkts\": 0,\n \"in_discards\": 0,\n \"in_pkts\": 0,\n \"in_multicast_pkts\": 0,\n \"in_broadcast_pkts\": 0,\n \"rate\": {\n \"out_rate\": 0,\n \"out_rate_pkts\": 0,\n \"in_rate_pkts\": 0,\n \"load_interval\": 5,\n \"in_rate\": 0\n },\n \"last_clear\": \"never\",\n \"out_multicast_pkts\": 0,\n \"out_octets\": 0\n },\n \"encapsulation\": {\n \"encapsulation\": \"Null\"\n },\n },\n \"MgmtEth0/0/CPU0/0\": {\n \"mtu\": 1514,\n \"mac_address\": \"5254.00c3.6c43\",\n \"flow_control\": {\n \"flow_control_receive\": False,\n \"flow_control_send\": False\n },\n \"type\": \"Management Ethernet\",\n \"enabled\": False,\n \"encapsulation\": {\n \"encapsulation\": \"ARPA\"\n },\n \"auto_negotiate\": True,\n \"bandwidth\": 0,\n \"counters\": {\n \"out_broadcast_pkts\": 0,\n \"in_multicast_pkts\": 0,\n \"in_crc_errors\": 0,\n \"in_pkts\": 0,\n \"in_errors\": 0,\n \"in_broadcast_pkts\": 0,\n \"out_multicast_pkts\": 0,\n \"out_errors\": 0,\n \"in_octets\": 0,\n \"rate\": {\n \"out_rate\": 0,\n \"out_rate_pkts\": 0,\n \"in_rate_pkts\": 0,\n \"load_interval\": 5,\n \"in_rate\": 0\n },\n \"out_pkts\": 0,\n \"in_discards\": 0,\n \"last_clear\": \"never\",\n \"out_octets\": 0\n },\n \"duplex_mode\": \"duplex unknown\",\n \"port_speed\": \"0Kb/s\",\n \"phys_address\": \"5254.00c3.6c43\",\n \"ipv6\": {\n \"enabled\": False\n }\n },\n \"GigabitEthernet0/0/0/5\": {\n \"ipv6\": {\n \"enabled\": False\n }\n },\n \"GigabitEthernet0/0/0/4\": {\n \"ipv6\": {\n \"enabled\": False\n }\n },\n \"GigabitEthernet0/0/0/0\": {\n \"mtu\": 1600,\n \"mac_address\": \"aaaa.bbbb.cccc\",\n \"description\": \"desc\",\n \"duplex_mode\": \"full\",\n \"type\": \"GigabitEthernet\",\n \"enabled\": False,\n \"encapsulation\": {\n \"encapsulation\": \"ARPA\"\n },\n \"auto_negotiate\": True,\n \"ipv4\": {\n \"10.1.3.1/24\": {\n \"ip\": \"10.1.3.1\",\n \"prefix_length\": \"24\",\n },\n\n },\n \"bandwidth\": 768,\n \"accounting\": {\n \"arp\": {\n \"chars_in\": 378,\n \"chars_out\": 378,\n \"pkts_in\": 9,\n \"pkts_out\": 9\n },\n \"ipv4_multicast\": {\n \"chars_in\": 0,\n \"chars_out\": 843700,\n \"pkts_in\": 0,\n \"pkts_out\": 10514\n },\n \"ipv4_unicast\": {\n \"chars_in\": 1226852,\n \"chars_out\": 887519,\n \"pkts_in\": 19254,\n \"pkts_out\": 13117\n }\n },\n \"counters\": {\n \"out_broadcast_pkts\": 0,\n \"in_multicast_pkts\": 0,\n \"in_crc_errors\": 0,\n \"in_pkts\": 0,\n \"in_errors\": 0,\n \"in_broadcast_pkts\": 0,\n \"out_multicast_pkts\": 0,\n \"out_errors\": 0,\n \"in_octets\": 0,\n \"rate\": {\n \"out_rate\": 0,\n \"out_rate_pkts\": 0,\n \"in_rate_pkts\": 0,\n \"load_interval\": 30,\n \"in_rate\": 0\n },\n \"out_pkts\": 0,\n \"in_discards\": 0,\n \"last_clear\": \"never\",\n \"out_octets\": 0\n },\n \"flow_control\": {\n \"flow_control_receive\": False,\n \"flow_control_send\": False\n },\n \"port_speed\": \"1000Mb/s\",\n \"phys_address\": \"5254.0077.9407\",\n \"ipv6\": {\n \"2001:db8:2:2::2/64\": {\n \"status\": \"tentative\",\n \"ip\": \"2001:db8:2:2::2\",\n \"prefix_length\": \"64\"\n },\n \"2001:db8:1:1::1/64\": {\n \"status\": \"tentative\",\n \"ip\": \"2001:db8:1:1::1\",\n \"prefix_length\": \"64\"\n },\n \"enabled\": False,\n \"2001:db8:4:4::4/64\": {\n \"status\": \"tentative\",\n \"route_tag\": \"10\",\n \"ip\": \"2001:db8:4:4::4\",\n \"prefix_length\": \"64\"\n },\n \"2001:db8:3:3:a8aa:bbff:febb:cccc/64\": {\n \"status\": \"tentative\",\n \"ip\": \"2001:db8:3:3:a8aa:bbff:febb:cccc\",\n \"prefix_length\": \"64\",\n \"eui64\": True\n }\n }\n },\n \"GigabitEthernet0/0/0/1\": {\n \"vrf\": \"VRF1\",\n \"ipv6\": {\n '2001:db8:1:5::1/64': {\n 'ip': '2001:db8:1:5::1',\n 'prefix_length': '64'\n },\n 'enabled': True},\n 'ipv4': {\n '10.1.5.1/24': {\n 'ip': '10.1.5.1',\n 'prefix_length': '24',\n 'route_tag': 50},\n '10.2.2.2/24': {\n 'ip': '10.2.2.2',\n 'prefix_length': '24',\n 'secondary': True}},\n\n \"accounting\": {\n \"arp\": {\n \"chars_in\": 378,\n \"chars_out\": 378,\n \"pkts_in\": 9,\n \"pkts_out\": 9\n },\n \"ipv4_multicast\": {\n \"chars_in\": 0,\n \"chars_out\": 844816,\n \"pkts_in\": 0,\n \"pkts_out\": 10530\n },\n \"ipv4_unicast\": {\n \"chars_in\": 843784,\n \"chars_out\": 1764,\n \"pkts_in\": 10539,\n \"pkts_out\": 26\n }\n }\n },\n \"GigabitEthernet0/0/0/6\": {\n \"ipv6\": {\n \"enabled\": False\n }\n },\n \"GigabitEthernet0/0/0/0.20\": {\n \"mtu\": 1604,\n \"counters\": {\n \"in_octets\": 0,\n \"out_broadcast_pkts\": 0,\n \"out_pkts\": 0,\n \"in_discards\": 0,\n \"in_pkts\": 0,\n \"in_multicast_pkts\": 0,\n \"in_broadcast_pkts\": 0,\n \"rate\": {\n \"out_rate\": 0,\n \"out_rate_pkts\": 0,\n \"in_rate_pkts\": 0,\n \"load_interval\": 5,\n \"in_rate\": 0\n },\n \"last_clear\": \"never\",\n \"out_multicast_pkts\": 0,\n \"out_octets\": 0\n },\n \"enabled\": False,\n \"bandwidth\": 768,\n \"vlan_id\": '20',\n \"encapsulation\": {\n \"encapsulation\": \"802.1Q Virtual LAN\",\n \"first_dot1q\": \"20\"\n },\n \"ipv6\": {\n \"enabled\": False\n }\n },\n \"GigabitEthernet0/0/0/2\": {\n \"vrf\": \"VRF2\",\n \"ipv6\": {\n \"enabled\": False\n }\n },\n \"GigabitEthernet0/0/0/3\": {\n \"ipv6\": {\n \"enabled\": False\n }\n },\n \"GigabitEthernet0/0/0/0.10\": {\n \"mtu\": 1608,\n \"counters\": {\n \"in_octets\": 0,\n \"out_broadcast_pkts\": 0,\n \"out_pkts\": 0,\n \"in_discards\": 0,\n \"in_pkts\": 0,\n \"in_multicast_pkts\": 0,\n \"in_broadcast_pkts\": 0,\n \"rate\": {\n \"out_rate\": 0,\n \"out_rate_pkts\": 0,\n \"in_rate_pkts\": 0,\n \"load_interval\": 5,\n \"in_rate\": 0\n },\n \"last_clear\": \"never\",\n \"out_multicast_pkts\": 0,\n \"out_octets\": 0\n },\n \"enabled\": False,\n \"bandwidth\": 768,\n \"vlan_id\": '10',\n \"encapsulation\": {\n \"encapsulation\": \"802.1Q Virtual LAN\",\n \"first_dot1q\": \"10\",\n \"second_dot1q\": \"10\"\n },\n \"ipv6\": {\n '2001:db8:1:3::1/64': {\n 'ip': '2001:db8:1:3::1',\n 'prefix_length': '64'},\n \"enabled\": False\n }\n }\n }\n\n\n", "id": "9311853", "language": "Python", "matching_score": 3.3545398712158203, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/interface/iosxr/tests/interface_output.py" }, { "content": "''' \nInterface Genie Ops Object for NXOS - CLI.\n'''\n\nimport re\n\n# super class\nfrom genie.libs.ops.interface.interface import Interface as SuperInterface\n\n# nxos show_interface\nfrom genie.libs.parser.nxos.show_interface import ShowInterface, ShowVrfAllInterface,\\\n ShowIpv6InterfaceVrfAll, ShowIpInterfaceVrfAll,\\\n ShowInterfaceSwitchport\nfrom genie.libs.parser.nxos.show_routing import ShowRoutingIpv6VrfAll, ShowRoutingVrfAll\n\n\nclass Interface(SuperInterface):\n '''Interface Genie Ops Object'''\n\n def convert_intf_name(self, item):\n return item.capitalize()\n\n def learn(self, interface=None, vrf=None, address_family=None):\n '''Learn Interface Ops'''\n ########################################################################\n # info\n ########################################################################\n self.callables = {'convert_intf_name': self.convert_intf_name}\n\n # Global source\n src = '[(?P<interface>{convert_intf_name})]'\n dest = 'info[(?P<interface>{convert_intf_name})]'\n req_keys_path = {'[description]': '[description]',\n '[types]': '[type]',\n '[oper_status]': '[oper_status]',\n '[last_link_flapped]': '[last_change]',\n '[phys_address]': '[phys_address]',\n '[port_speed]': '[port_speed]',\n '[mtu]': '[mtu]',\n '[enabled]': '[enabled]',\n '[encapsulations][first_dot1q]': '[vlan_id]',\n '[mac_address]': '[mac_address]',\n '[auto_negotiate]': '[auto_negotiate]',\n '[duplex_mode]': '[duplex_mode]',\n '[medium]': '[medium]',\n '[delay]': '[delay]',\n '[bandwidth]': '[bandwidth]',\n }\n # vrf\n self.add_leaf(cmd=ShowVrfAllInterface,\n src=src + '[vrf]',\n dest=dest + '[vrf]', interface=interface, vrf=vrf)\n self.make()\n if vrf:\n for intf in self.info:\n for src_key_path, dest_key_path in req_keys_path.items():\n self.add_leaf(cmd=ShowInterface,\n src=src + src_key_path,\n dest=dest + dest_key_path,\n interface=intf)\n else:\n for src_key_path, dest_key_path in req_keys_path.items():\n self.add_leaf(cmd=ShowInterface,\n src=src + src_key_path,\n dest=dest + dest_key_path,\n interface=interface)\n\n\n\n\n req_keys = ['access_vlan', 'trunk_vlans', 'switchport_mode',\n 'switchport_enable']\n if vrf:\n for intf in self.info:\n for key in req_keys:\n self.add_leaf(cmd=ShowInterfaceSwitchport,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key), interface=intf)\n\n # ======================================================================\n # flow_control\n # ======================================================================\n\n # flow_control\n self.add_leaf(cmd=ShowInterface,\n src=src + '[flow_control]',\n dest=dest + '[flow_control]', interface=intf)\n\n # ======================================================================\n # accounting\n # ======================================================================\n\n # accounting N/A\n\n # ======================================================================\n # port_channel\n # ======================================================================\n\n # port_channel\n self.add_leaf(cmd=ShowInterface,\n src=src + '[port_channel]',\n dest=dest + '[port_channel]', interface=intf)\n\n # ======================================================================\n # counters\n # ======================================================================\n # Global source\n src = '[(?P<interface>{convert_intf_name})][counters]'\n dest = 'info[(?P<interface>{convert_intf_name})][counters]'\n\n req_keys = ['in_pkts', 'in_octets', 'in_unicast_pkts',\n 'in_broadcast_pkts', 'in_multicast_pkts',\n 'in_discards', 'in_errors', 'in_unknown_protos',\n 'in_mac_pause_frames', 'in_oversize_frames',\n 'in_crc_errors', 'out_pkts', 'out_octets', 'out_unicast_pkts',\n 'out_broadcast_pkts', 'out_multicast_pkts', 'out_discard',\n 'out_errors', 'last_clear','out_mac_pause_frames']\n\n for key in req_keys:\n self.add_leaf(cmd=ShowInterface,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key), interface=intf)\n\n\n # Global source - counters | rate\n src = '[(?P<interface>{convert_intf_name})][counters][rate]'\n dest = 'info[(?P<interface>{convert_intf_name})][counters][rate]'\n\n req_keys = ['load_interval', 'in_rate', 'in_rate_pkts',\n 'out_rate', 'out_rate_pkts']\n\n for key in req_keys:\n self.add_leaf(cmd=ShowInterface,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key), interface=intf)\n\n # ======================================================================\n # encapsulation\n # ======================================================================\n\n\n # Global source\n src = '[(?P<interface>{convert_intf_name})][encapsulations]'\n dest = 'info[(?P<interface>{convert_intf_name})][encapsulation]'\n\n req_keys = ['encapsulation', 'first_dot1q', 'native_vlan']\n\n for key in req_keys:\n self.add_leaf(cmd=ShowInterface,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key), interface=intf)\n else:\n for key in req_keys:\n self.add_leaf(cmd=ShowInterfaceSwitchport,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key), interface=interface)\n\n # ======================================================================\n # flow_control\n # ======================================================================\n\n # flow_control\n self.add_leaf(cmd=ShowInterface,\n src=src + '[flow_control]',\n dest=dest + '[flow_control]', interface=interface)\n\n # ======================================================================\n # accounting\n # ======================================================================\n\n # accounting N/A\n\n # ======================================================================\n # port_channel\n # ======================================================================\n\n # port_channel\n self.add_leaf(cmd=ShowInterface,\n src=src + '[port_channel]',\n dest=dest + '[port_channel]', interface=interface)\n\n # ======================================================================\n # counters\n # ======================================================================\n # Global source\n src = '[(?P<interface>{convert_intf_name})][counters]'\n dest = 'info[(?P<interface>{convert_intf_name})][counters]'\n\n req_keys = ['in_pkts', 'in_octets', 'in_unicast_pkts',\n 'in_broadcast_pkts', 'in_multicast_pkts',\n 'in_discards', 'in_errors', 'in_unknown_protos',\n 'in_mac_pause_frames', 'in_oversize_frames',\n 'in_crc_errors', 'out_pkts', 'out_octets', 'out_unicast_pkts',\n 'out_broadcast_pkts', 'out_multicast_pkts', 'out_discard',\n 'out_errors', 'last_clear', 'out_mac_pause_frames']\n\n for key in req_keys:\n self.add_leaf(cmd=ShowInterface,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key), interface=interface)\n\n # Global source - counters | rate\n src = '[(?P<interface>{convert_intf_name})][counters][rate]'\n dest = 'info[(?P<interface>{convert_intf_name})][counters][rate]'\n\n req_keys = ['load_interval', 'in_rate', 'in_rate_pkts',\n 'out_rate', 'out_rate_pkts']\n\n for key in req_keys:\n self.add_leaf(cmd=ShowInterface,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key), interface=interface)\n\n # ======================================================================\n # encapsulation\n # ======================================================================\n\n # Global source\n src = '[(?P<interface>{convert_intf_name})][encapsulations]'\n dest = 'info[(?P<interface>{convert_intf_name})][encapsulation]'\n\n req_keys = ['encapsulation', 'first_dot1q', 'native_vlan']\n\n for key in req_keys:\n self.add_leaf(cmd=ShowInterface,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key), interface=interface)\n\n # ======================================================================\n # ipv4\n # ======================================================================\n \n if not address_family or address_family.lower() == 'ipv4':\n # Global source\n src = '[(?P<interface>{convert_intf_name})][ipv4][(?P<ipv4>.*)]'\n dest = 'info[(?P<interface>{convert_intf_name})][ipv4][(?P<ipv4>.*)]'\n\n req_keys = ['ip', 'prefix_length', 'secondary']\n\n for key in req_keys:\n self.add_leaf(cmd=ShowIpInterfaceVrfAll,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key), interface=interface, vrf=vrf)\n if vrf:\n for intf in self.info:\n self.add_leaf(cmd=ShowInterface,\n src=src + '[route_tag]',\n dest=dest + '[route_tag]', interface=intf)\n\n else:\n # route_tag\n self.add_leaf(cmd=ShowInterface,\n src=src + '[route_tag]',\n dest=dest + '[route_tag]', interface=interface)\n\n # secondary_vrf --- This is not supported on NXOS\n # unnumbered\n self.add_leaf(cmd=ShowIpInterfaceVrfAll,\n src='[(?P<interface>{convert_intf_name})][ipv4][unnumbered]',\n dest='info[(?P<interface>{convert_intf_name})][ipv4][unnumbered]',\n interface=interface, vrf=vrf)\n self.add_leaf(cmd=ShowRoutingVrfAll,\n src='[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][ip]',\n dest='info[routing_v4][(?P<vrf>.*)]', vrf=vrf)\n\n # ======================================================================\n # ipv6\n # ======================================================================\n \n if not address_family or address_family.lower() == 'ipv6':\n # Global source\n src = '[(?P<interface>{convert_intf_name})][ipv6][(?P<ipv6>.*)]'\n dest = 'info[(?P<interface>{convert_intf_name})][ipv6][(?P<ipv6>.*)]'\n\n req_keys = ['ip', 'prefix_length', 'anycast', 'status']\n\n for key in req_keys:\n self.add_leaf(cmd=ShowIpv6InterfaceVrfAll,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key), interface=interface, vrf=vrf)\n\n\n # get routing output\n self.add_leaf(cmd=ShowRoutingIpv6VrfAll,\n src='[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][ip]',\n dest='info[routing_v6][(?P<vrf>.*)]',vrf=vrf)\n\n\n # make to write in cache\n self.make(final_call=True)\n\n # eui_64\n # if has ip like 2001:db8::5054:ff:fed5:63f9, eui_64 is True\n p = re.compile(r'([a-z0-9]+):([\\w\\:]+)?ff:([a-z0-9]+):([a-z0-9]+)')\n if hasattr(self, 'info'): \n for intf in self.info:\n # check vrf\n if 'vrf' in self.info[intf]: \n if 'routing_v4' in self.info and \\\n self.info[intf]['vrf'] in self.info['routing_v4']:\n dict_v4 = self.info['routing_v4'][self.info[intf]['vrf']]\n else:\n dict_v4 = {}\n\n if 'routing_v6' in self.info and \\\n self.info[intf]['vrf'] in self.info['routing_v6']:\n dict_v6 = self.info['routing_v6'][self.info[intf]['vrf']]\n else:\n dict_v6 = {}\n else:\n continue\n\n for key in self.info[intf]:\n if key == 'ipv4' or key == 'ipv6':\n for ip in self.info[intf][key].keys():\n if p.match(ip):\n self.info[intf][key][ip]['eui_64'] = True\n\n # route_tag and origin of ipv4/ipv6\n self.ret_dict = {}\n routing_dict = dict_v4 if key == 'ipv4' else dict_v6\n\n if ip in routing_dict:\n self._match_keys(dic=routing_dict[ip],\n match={'interface': intf})\n\n for protocol in self.ret_dict:\n if 'tag' in self.ret_dict[protocol]:\n self.info[intf][key][ip]['route_tag'] = \\\n self.ret_dict[protocol]['tag']\n self.info[intf][key][ip]['origin'] = protocol\n\n # delete the routing attribute which is only used \n # for getting route_tag and origin\n for key in ['routing_v4', 'routing_v6']:\n if key in self.info:\n del(self.info[key])\n\n def _match_keys(self, dic, match):\n '''find entry in the dic when values are same to match.\n will return the upper level dictionary contains those values\n '''\n if isinstance(dic, dict):\n for key, value in match.items():\n for dic_key in dic:\n if key in dic[dic_key] and dic[dic_key][key] == value:\n self.ret_dict.update(dic)\n break\n elif not isinstance(dic[dic_key], dict):\n pass\n else:\n self._match_keys(dic=dic[dic_key], match=match)\n return(self.ret_dict)\n\n\n", "id": "8374096", "language": "Python", "matching_score": 6.366107940673828, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/interface/nxos/interface.py" }, { "content": "''' \nInterface Genie Ops Object for IOSXE - CLI.\n'''\n\nimport re\n\n# super class\nfrom genie.libs.ops.interface.interface import Interface as SuperInterface\n\n# iosxe show_interface\nfrom genie.libs.parser.iosxe.show_interface import ShowInterfaces, \\\n ShowIpInterface, \\\n ShowIpv6Interface, \\\n ShowInterfacesAccounting\n\nfrom genie.libs.parser.iosxe.show_vrf import ShowVrf\n\n\n\nclass Interface(SuperInterface):\n '''Interface Genie Ops Object'''\n\n def learn(self, custom=None, interface=None, vrf=None, address_family=None):\n '''Learn Interface Ops'''\n ########################################################################\n # info\n ########################################################################\n # Global source\n src = '[(?P<interface>.*)]'\n dest = 'info[(?P<interface>.*)]'\n req_keys = ['[description]', '[type]', '[oper_status]',\n '[phys_address]', '[port_speed]', '[mtu]',\n '[enabled]', '[bandwidth]', '[flow_control]',\n '[mac_address]', '[auto_negotiate]', '[port_channel]',\n '[duplex_mode]', '[medium]', '[delay]']\n\n\n self.add_leaf(cmd=ShowVrf,\n src='vrf[(?P<vrf>.*)][interfaces]',\n dest='info[vrf][(?P<vrf>.*)][interfaces]', vrf=vrf)\n self.make()\n if vrf:\n for intf in self.info['vrf'][vrf]['interfaces']:\n for key in req_keys:\n self.add_leaf(cmd=ShowInterfaces,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n interface=intf)\n else:\n for key in req_keys:\n self.add_leaf(cmd=ShowInterfaces,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n interface=interface)\n\n\n\n # make to write in cache\n self.make()\n\n # mapping vrf to interface from ShowVrf\n if interface:\n if hasattr(self, 'info') and 'vrf' in self.info:\n for vrf_entry in self.info['vrf']:\n if 'interfaces' not in self.info['vrf'][vrf_entry]:\n continue\n for intf in self.info['vrf'][vrf_entry]['interfaces']:\n if intf == interface:\n if intf not in self.info:\n self.info[intf] = {}\n self.info[intf]['vrf'] = vrf_entry\n del (self.info['vrf'])\n else:\n if hasattr(self, 'info') and 'vrf' in self.info:\n for vrf_entry in self.info['vrf']:\n if 'interfaces' not in self.info['vrf'][vrf_entry]:\n continue\n for intf in self.info['vrf'][vrf_entry]['interfaces']:\n if intf not in self.info:\n self.info[intf] = {}\n self.info[intf]['vrf'] = vrf_entry\n del (self.info['vrf'])\n\n # make to write in cache\n self.make()\n\n # vrf_downstream not supported on iosxe\n\n # vlan_id access_vlan trunk_vlans\n # switchport_mode switchport_enable native_vlan\n # are only supported on c3850 platform\n\n # ======================================================================\n # counters\n # ======================================================================\n # keys are not supported on iosxe\n # 'in_unicast_pkts'\n # 'in_discards'\n # 'in_unknown_protos'\n # 'in_mac_control_frames'\n # 'in_oversize_frames'\n # 'in_jabber_frames'\n # 'in_fragment_frames'\n # 'in_8021q_frames'\n # 'out_unicast_pkts'\n # 'out_discard'\n # 'out_mac_control_frames'\n # 'out_8021q_frames'\n\n # Global source\n if vrf:\n for intf in self.info:\n src = '[(?P<interface>.*)][counters]'\n dest = 'info[(?P<interface>.*)][counters]'\n\n req_keys = ['in_pkts', 'in_octets', 'in_broadcast_pkts',\n 'in_multicast_pkts', 'in_errors',\n 'in_mac_pause_frames', 'out_mac_pause_frames',\n 'in_crc_errors', 'out_pkts', 'out_octets',\n 'out_broadcast_pkts', 'out_multicast_pkts',\n 'out_errors', 'last_clear']\n\n for key in req_keys:\n self.add_leaf(cmd=ShowInterfaces,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n interface=intf)\n\n # Global source - counters | rate\n src = '[(?P<interface>.*)][counters][rate]'\n dest = 'info[(?P<interface>.*)][counters][rate]'\n\n req_keys = ['load_interval', 'in_rate', 'in_rate_pkts',\n 'out_rate', 'out_rate_pkts']\n for key in req_keys:\n self.add_leaf(cmd=ShowInterfaces,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n interface=intf)\n\n # ======================================================================\n # encapsulation\n # ======================================================================\n\n # Global source\n src = '[(?P<interface>.*)][encapsulations]'\n dest = 'info[(?P<interface>.*)][encapsulation]'\n\n req_keys = ['encapsulation', 'first_dot1q', 'second_dot1q']\n\n for key in req_keys:\n self.add_leaf(cmd=ShowInterfaces,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n interface=intf)\n\n # ======================================================================\n # accounting\n # ======================================================================\n\n # Global source\n src = '[(?P<interface>.*)]'\n dest = 'info[(?P<interface>.*)]'\n\n cmd = 'ShowInterfacesAccounting'\n if custom and cmd in custom.keys():\n if 'intf' in custom[cmd].keys():\n self.add_leaf(cmd=ShowInterfacesAccounting,\n src=src + '[accounting]',\n dest=dest + '[accounting]',\n interface=custom[cmd]['intf'])\n else:\n self.add_leaf(cmd=ShowInterfacesAccounting,\n src=src + '[accounting]',\n dest=dest + '[accounting]')\n\n self.add_leaf(cmd=ShowInterfacesAccounting,\n src=src + '[accounting]',\n dest=dest + '[accounting]',\n interface=intf)\n\n # ======================================================================\n # ipv4\n # ======================================================================\n\n if not address_family or address_family.lower() == 'ipv4':\n # Global source\n src = '[(?P<interface>.*)][ipv4][(?P<ipv4>.*)]'\n dest = 'info[(?P<interface>.*)][ipv4][(?P<ipv4>.*)]'\n\n req_keys = ['ip', 'prefix_length', 'secondary', 'origin']\n # custom\n for key in req_keys:\n self.add_leaf(cmd=ShowIpInterface,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key), interface=intf)\n\n # route_tag --- This is not supported on IOSXE\n\n # secondary_vrf --- This is not supported on IOSXE\n\n # unnumbered\n self.add_leaf(cmd=ShowInterfaces,\n src='[(?P<interface>.*)][ipv4][unnumbered]',\n dest='info[(?P<interface>.*)][ipv4][unnumbered]',\n interface=intf)\n\n # ======================================================================\n # ipv6\n # ======================================================================\n\n # Global source\n if not address_family or address_family.lower() == 'ipv6':\n src = '[(?P<interface>.*)][ipv6][(?P<ipv6>.*)]'\n dest = 'info[(?P<interface>.*)][ipv6][(?P<ipv6>.*)]'\n req_keys = ['ip', 'prefix_length', 'anycast', 'status',\n 'origin', 'autoconf', 'eui_64', 'enabled',\n 'unnumbered']\n\n for key in req_keys:\n self.add_leaf(cmd=ShowIpv6Interface,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key), interface=intf)\n else:\n src = '[(?P<interface>.*)][counters]'\n dest = 'info[(?P<interface>.*)][counters]'\n\n req_keys = ['in_pkts', 'in_octets', 'in_broadcast_pkts',\n 'in_multicast_pkts', 'in_errors',\n 'in_mac_pause_frames', 'out_mac_pause_frames',\n 'in_crc_errors', 'out_pkts', 'out_octets',\n 'out_broadcast_pkts', 'out_multicast_pkts',\n 'out_errors', 'last_clear']\n\n\n for key in req_keys:\n self.add_leaf(cmd=ShowInterfaces,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n interface=interface)\n\n\n # Global source - counters | rate\n src = '[(?P<interface>.*)][counters][rate]'\n dest = 'info[(?P<interface>.*)][counters][rate]'\n\n req_keys = ['load_interval', 'in_rate', 'in_rate_pkts',\n 'out_rate', 'out_rate_pkts']\n for key in req_keys:\n self.add_leaf(cmd=ShowInterfaces,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n interface=interface)\n\n # ======================================================================\n # encapsulation\n # ======================================================================\n\n\n # Global source\n src = '[(?P<interface>.*)][encapsulations]'\n dest = 'info[(?P<interface>.*)][encapsulation]'\n\n req_keys = ['encapsulation', 'first_dot1q', 'second_dot1q']\n\n for key in req_keys:\n self.add_leaf(cmd=ShowInterfaces,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n interface=interface)\n\n # ======================================================================\n # accounting\n # ======================================================================\n\n # Global source\n src = '[(?P<interface>.*)]'\n dest = 'info[(?P<interface>.*)]'\n\n cmd = 'ShowInterfacesAccounting'\n if custom and cmd in custom.keys():\n if 'intf' in custom[cmd].keys():\n self.add_leaf(cmd=ShowInterfacesAccounting,\n src=src + '[accounting]',\n dest=dest + '[accounting]',\n interface=custom[cmd]['intf'])\n else:\n self.add_leaf(cmd=ShowInterfacesAccounting,\n src=src + '[accounting]',\n dest=dest + '[accounting]')\n\n self.add_leaf(cmd=ShowInterfacesAccounting,\n src=src + '[accounting]',\n dest=dest + '[accounting]',\n interface=interface)\n\n\n # ======================================================================\n # ipv4\n # ======================================================================\n\n if not address_family or address_family.lower() == 'ipv4':\n # Global source\n src = '[(?P<interface>.*)][ipv4][(?P<ipv4>.*)]'\n dest = 'info[(?P<interface>.*)][ipv4][(?P<ipv4>.*)]'\n\n req_keys = ['ip', 'prefix_length', 'secondary', 'origin']\n # custom\n for key in req_keys:\n self.add_leaf(cmd=ShowIpInterface,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key), interface=interface)\n\n\n # route_tag --- This is not supported on IOSXE\n\n # secondary_vrf --- This is not supported on IOSXE\n\n # unnumbered\n self.add_leaf(cmd=ShowInterfaces,\n src='[(?P<interface>.*)][ipv4][unnumbered]',\n dest='info[(?P<interface>.*)][ipv4][unnumbered]',\n interface=interface)\n\n # ======================================================================\n # ipv6\n # ======================================================================\n\n\n # Global source\n if not address_family or address_family.lower() == 'ipv6':\n src = '[(?P<interface>.*)][ipv6][(?P<ipv6>.*)]'\n dest = 'info[(?P<interface>.*)][ipv6][(?P<ipv6>.*)]'\n req_keys = ['ip', 'prefix_length', 'anycast', 'status',\n 'origin', 'autoconf', 'eui_64', 'enabled',\n 'unnumbered']\n\n for key in req_keys:\n self.add_leaf(cmd=ShowIpv6Interface,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key), interface=interface)\n\n # route_tag --- This is not supported on IOSXE\n\n # make to write in cache\n self.make(final_call=True)\n\n # switchport_enable -- default values\n if hasattr(self, 'info'):\n for intf in self.info:\n if 'switchport_enable' not in self.info[intf]:\n self.info[intf]['switchport_enable'] = False\n", "id": "5992817", "language": "Python", "matching_score": 6.246792316436768, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/interface/iosxe/interface.py" }, { "content": "''' \nInterface Genie Ops Object for IOSXR - CLI.\n'''\n# python\nimport re\n\n# super class\nfrom genie.libs.ops.interface.interface import Interface as SuperInterface\n\n# iosxr show_interface\nfrom genie.libs.parser.iosxr.show_interface import ShowInterfacesDetail, \\\n ShowEthernetTags, \\\n ShowIpv4VrfAllInterface, \\\n ShowIpv6VrfAllInterface, \\\n ShowInterfacesAccounting\n\nfrom genie.libs.parser.iosxr.show_vrf import ShowVrfAllDetail\n\n\nclass Interface(SuperInterface):\n '''Interface Genie Ops Object'''\n\n def learn(self, custom=None, interface=None, vrf=None, address_family=None):\n '''Learn Interface Ops'''\n ########################################################################\n # info\n ########################################################################\n # Global source\n # vrf\n self.add_leaf(cmd=ShowVrfAllDetail,\n src='[(?P<vrf>.*)][interfaces]',\n dest='info[vrf][(?P<vrf>.*)][interfaces]',\n vrf=vrf)\n self.make()\n src = '[(?P<interface>.*)]'\n dest = 'info[(?P<interface>.*)]'\n req_keys_path = {'[description]': '[description]',\n '[types]': '[type]',\n '[oper_status]': '[oper_status]',\n '[phys_address]': '[phys_address]',\n '[port_speed]': '[port_speed]',\n '[mtu]': '[mtu]',\n '[enabled]': '[enabled]',\n '[mac_address]': '[mac_address]',\n '[auto_negotiate]': '[auto_negotiate]',\n '[duplex_mode]': '[duplex_mode]',\n '[bandwidth]': '[bandwidth]',\n }\n\n if vrf:\n for intf in self.info['vrf'][vrf]['interfaces']:\n for src_key_path, dest_key_path in req_keys_path.items():\n self.add_leaf(cmd=ShowInterfacesDetail,\n src=src + src_key_path,\n dest=dest + dest_key_path,\n interface=intf)\n\n # vlan_id\n self.add_leaf(cmd=ShowEthernetTags,\n src=src + '[vlan_id]',\n dest=dest + '[vlan_id]',\n interface=intf)\n # flow_control\n self.add_leaf(cmd=ShowInterfacesDetail,\n src=src + '[flow_control]',\n dest=dest + '[flow_control]',\n interface=intf)\n else:\n for src_key_path, dest_key_path in req_keys_path.items():\n self.add_leaf(cmd=ShowInterfacesDetail,\n src=src + src_key_path,\n dest=dest + dest_key_path,\n interface=interface)\n\n # vlan_id\n self.add_leaf(cmd=ShowEthernetTags,\n src=src + '[vlan_id]',\n dest=dest + '[vlan_id]',\n interface=interface)\n # flow_control\n self.add_leaf(cmd=ShowInterfacesDetail,\n src=src + '[flow_control]',\n dest=dest + '[flow_control]',\n interface=interface)\n # make to write in cache\n self.make()\n\n # mapping vrf to interface from ShowVrfAllDetail\n if hasattr(self, 'info') and 'vrf' in self.info:\n for vrf_item in self.info['vrf']:\n if 'interfaces' not in self.info['vrf'][vrf_item]:\n continue\n for intf in self.info['vrf'][vrf_item]['interfaces']:\n if interface:\n if intf == interface:\n if intf not in self.info:\n self.info[intf] = {}\n self.info[intf]['vrf'] = vrf_item\n else:\n if intf not in self.info:\n self.info[intf] = {}\n self.info[intf]['vrf'] = vrf_item\n del (self.info['vrf'])\n\n if vrf:\n for intf in self.info:\n # ======================================================================\n # accounting\n # ======================================================================\n\n # accounting\n cmd = 'ShowInterfacesAccounting'\n if custom and cmd in custom.keys():\n if 'intf' in custom[cmd].keys():\n self.add_leaf(cmd=ShowInterfacesAccounting,\n src=src + '[accounting]',\n dest=dest + '[accounting]',\n interface=custom[cmd]['intf'])\n else:\n self.add_leaf(cmd=ShowInterfacesAccounting,\n src=src + '[accounting]',\n dest=dest + '[accounting]')\n\n self.add_leaf(cmd=ShowInterfacesAccounting,\n src=src + '[accounting]',\n dest=dest + '[accounting]',\n interface=intf)\n\n # ======================================================================\n # counters\n # ======================================================================\n # Global source\n src = '[(?P<interface>.*)][counters]'\n dest = 'info[(?P<interface>.*)][counters]'\n\n req_keys = ['<KEY> 'in_octets', 'out_discard',\n 'in_broadcast_pkts', 'in_multicast_pkts',\n 'in_discards', 'in_errors', 'out_octets',\n 'in_crc_errors', 'out_pkts',\n 'out_broadcast_pkts', 'out_multicast_pkts',\n 'out_errors', 'last_clear']\n\n for key in req_keys:\n self.add_leaf(cmd=ShowInterfacesDetail,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n interface=intf)\n\n # Global source - counters | rate\n src = '[(?P<interface>.*)][counters][rate]'\n dest = 'info[(?P<interface>.*)][counters][rate]'\n\n req_keys = ['load_interval', 'in_rate', 'in_rate_pkts',\n 'out_rate', 'out_rate_pkts']\n for key in req_keys:\n self.add_leaf(cmd=ShowInterfacesDetail,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n interface=intf)\n\n # IOSXR NONE supported keys from general ops\n # in_8021q_frames - N/A\n # in_fragment_frames - N/A\n # in_jabber_frames - N/A\n # in_mac_control_frames - N/A\n # in_mac_pause_frames - N/A\n # in_unicast_pkts - N/A\n # in_oversize_frames - N/A\n # in_unknown_protos - N/A\n # ipv6_origin - N/A\n # ipv6_preferred_lifetime - N/A\n # ipv6_valid_lifetime - N/A\n # last_change - N/A\n # origin - N/A\n # medium - N/A\n # out_8021q_frames - N/A\n # out_mac_control_frames - N/A\n # out_mac_pause_frames - N/A\n # out_unicast_pkts - N/A\n # vrf_downstream - N/A in xr\n # access_vlan- N/A in XR\n # delay - N/A in XR\n # secondary_vrf - N/A in xr\n # switchport_mode - N/A in xr\n # trunk_vlans - N/A in xr\n # link_status - N/A in xr\n # ipv6_unnumbered_intf_ref - N/A in xr\n # ipv6_anycast - N/A in xr\n\n # ======================================================================\n # encapsulation\n # ======================================================================\n\n # Global source\n src = '[(?P<interface>.*)][encapsulations]'\n dest = 'info[(?P<interface>.*)][encapsulation]'\n\n req_keys = ['encapsulation', 'first_dot1q',\n 'second_dot1q', 'native_vlan']\n\n for key in req_keys:\n self.add_leaf(cmd=ShowInterfacesDetail,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n interface=intf)\n\n else:\n # ======================================================================\n # accounting\n # ======================================================================\n\n # accounting\n cmd = 'ShowInterfacesAccounting'\n if custom and cmd in custom.keys():\n if 'intf' in custom[cmd].keys():\n self.add_leaf(cmd=ShowInterfacesAccounting,\n src=src + '[accounting]',\n dest=dest + '[accounting]',\n interface=custom[cmd]['intf'])\n else:\n self.add_leaf(cmd=ShowInterfacesAccounting,\n src=src + '[accounting]',\n dest=dest + '[accounting]')\n\n self.add_leaf(cmd=ShowInterfacesAccounting,\n src=src + '[accounting]',\n dest=dest + '[accounting]',\n interface=interface)\n\n # ======================================================================\n # counters\n # ======================================================================\n # Global source\n src = '[(?P<interface>.*)][counters]'\n dest = 'info[(?P<interface>.*)][counters]'\n\n req_keys = ['in_pkts', 'in_octets', 'out_discard',\n 'in_broadcast_pkts', 'in_multicast_pkts',\n 'in_discards', 'in_errors', 'out_octets',\n 'in_crc_errors', 'out_pkts',\n 'out_broadcast_pkts', 'out_multicast_pkts',\n 'out_errors', 'last_clear']\n\n for key in req_keys:\n self.add_leaf(cmd=ShowInterfacesDetail,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n interface=interface)\n\n\n # Global source - counters | rate\n src = '[(?P<interface>.*)][counters][rate]'\n dest = 'info[(?P<interface>.*)][counters][rate]'\n\n req_keys = ['load_interval', 'in_rate', 'in_rate_pkts',\n 'out_rate', 'out_rate_pkts']\n for key in req_keys:\n self.add_leaf(cmd=ShowInterfacesDetail,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n interface=interface)\n\n # IOSXR NONE supported keys from general ops\n # in_8021q_frames - N/A\n # in_fragment_frames - N/A\n # in_jabber_frames - N/A\n # in_mac_control_frames - N/A\n # in_mac_pause_frames - N/A\n # in_unicast_pkts - N/A\n # in_oversize_frames - N/A\n # in_unknown_protos - N/A\n # ipv6_origin - N/A\n # ipv6_preferred_lifetime - N/A\n # ipv6_valid_lifetime - N/A\n # last_change - N/A\n # origin - N/A\n # medium - N/A\n # out_8021q_frames - N/A\n # out_mac_control_frames - N/A\n # out_mac_pause_frames - N/A\n # out_unicast_pkts - N/A\n # vrf_downstream - N/A in xr\n # access_vlan- N/A in XR\n # delay - N/A in XR\n # secondary_vrf - N/A in xr\n # switchport_mode - N/A in xr\n # trunk_vlans - N/A in xr\n # link_status - N/A in xr\n # ipv6_unnumbered_intf_ref - N/A in xr\n # ipv6_anycast - N/A in xr\n\n # ======================================================================\n # encapsulation\n # ======================================================================\n\n\n # Global source\n src = '[(?P<interface>.*)][encapsulations]'\n dest = 'info[(?P<interface>.*)][encapsulation]'\n\n req_keys = ['encapsulation', 'first_dot1q',\n 'second_dot1q', 'native_vlan']\n\n for key in req_keys:\n self.add_leaf(cmd=ShowInterfacesDetail,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n interface=interface)\n\n # ======================================================================\n # ipv4\n # ======================================================================\n if not address_family or address_family.lower() == 'ipv4':\n # Global source\n req_keys = ['ip', 'prefix_length', 'secondary', 'route_tag']\n if interface:\n src = '[{}][ipv4][(?P<ipv4>.*)]'.format(interface)\n dest = 'info[{}][ipv4][(?P<ipv4>.*)]'.format(interface)\n for key in req_keys:\n self.add_leaf(cmd=ShowIpv4VrfAllInterface,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n vrf=vrf,\n interface=interface)\n # unnumbered\n self.add_leaf(cmd=ShowIpv4VrfAllInterface,\n src='[{}][ipv4][unnumbered][unnumbered_intf_ref]'.format(interface),\n dest='info[{}][ipv4][unnumbered][unnumbered_intf_ref]'.format(interface),\n vrf=vrf,\n interface=interface)\n else:\n src = '[(?P<interface>.*)][ipv4][(?P<ipv4>.*)]'\n dest = 'info[(?P<interface>.*)][ipv4][(?P<ipv4>.*)]'\n for key in req_keys:\n self.add_leaf(cmd=ShowIpv4VrfAllInterface,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n vrf=vrf,\n interface=interface)\n # unnumbered\n self.add_leaf(cmd=ShowIpv4VrfAllInterface,\n src='[(?P<interface>.*)][ipv4][unnumbered][unnumbered_intf_ref]',\n dest='info[(?P<interface>.*)][ipv4][unnumbered][unnumbered_intf_ref]',\n vrf=vrf,\n interface=interface)\n\n\n\n # ======================================================================\n # ipv6\n # ======================================================================\n if not address_family or address_family.lower() == 'ipv6':\n req_keys_path = {'[ipv6]': '[ip]',\n '[ipv6_prefix_length]': '[prefix_length]',\n '[ipv6_status]': '[status]',\n '[ipv6_eui64]': '[eui64]',\n '[ipv6_route_tag]': '[route_tag]',\n }\n if interface:\n src = '[{}][ipv6][(?P<ipv6>.*)]'.format(interface)\n dest = 'info[{}][ipv6][(?P<ipv6>.*)]'.format(interface)\n for src_key_path, dest_key_path in req_keys_path.items():\n self.add_leaf(cmd=ShowIpv6VrfAllInterface,\n src=src + src_key_path,\n dest=dest + dest_key_path,\n vrf=vrf,\n interface=interface)\n # enabled\n self.add_leaf(cmd=ShowIpv6VrfAllInterface,\n src='[{}][ipv6_enabled]'.format(interface),\n dest='info[{}][ipv6][enabled]'.format(interface),\n vrf=vrf,\n interface=interface)\n else:\n src = '[(?P<interface>.*)][ipv6][(?P<ipv6>.*)]'\n dest = 'info[(?P<interface>.*)][ipv6][(?P<ipv6>.*)]'\n for src_key_path, dest_key_path in req_keys_path.items():\n self.add_leaf(cmd=ShowIpv6VrfAllInterface,\n src=src + src_key_path,\n dest=dest + dest_key_path,\n vrf=vrf,\n interface=interface)\n # enabled\n self.add_leaf(cmd=ShowIpv6VrfAllInterface,\n src='[(?P<interface>.*)][ipv6_enabled]',\n dest='info[(?P<interface>.*)][ipv6][enabled]',\n vrf=vrf,\n interface=interface)\n\n # make to write in cache\n self.make(final_call=True)\n\n", "id": "4807370", "language": "Python", "matching_score": 2.568449020385742, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/interface/iosxr/interface.py" }, { "content": "''' \nARP Genie Ops Object for NXOS - CLI.\n'''\n# super class\nfrom genie.libs.ops.arp.arp import Arp as SuperArp\n\n# Parser\nfrom genie.libs.parser.nxos.show_arp import ShowIpArpDetailVrfAll, \\\n\t\t\t\t\t\t\t\t\t\t\tShowIpArpSummaryVrfAll, \\\n\t\t\t\t\t\t\t\t\t\t\tShowIpArpstatisticsVrfAll\n\nfrom genie.libs.parser.nxos.show_interface import ShowIpInterfaceVrfAll\n\n\nclass Arp(SuperArp):\n\t'''ARP Genie Ops Object'''\n\n\tdef learn(self):\n\t\t'''Learn ARP Ops'''\n\n\t\t########################################################################\n\t\t# info\n\t\t########################################################################\n\n\t\tsrc_global = '[interfaces][(?P<interfaces>.*)][ipv4][neighbors]'\\\n\t\t\t'[(?P<neighbors>.*)]'\n\t\tdest_global = 'info[interfaces][(?P<interfaces>.*)][ipv4][neighbors]'\\\n\t\t\t'[(?P<neighbors>.*)]'\n\n\t\t# Missing keys: 'remaining_expire_time'\n\t\t# 'ip'\n\t\tself.add_leaf(cmd=ShowIpArpDetailVrfAll,\n\t\t\t\t\t\tsrc=src_global + '[ip]',\n\t\t\t\t\t\tdest=dest_global + '[ip]')\n\n\t\t# 'link_layer_address'\n\t\tself.add_leaf(cmd=ShowIpArpDetailVrfAll,\n\t\t\t\t\t\tsrc=src_global + '[link_layer_address]',\n\t\t\t\t\t\tdest=dest_global + '[link_layer_address]')\n\n\t\t# 'origin'\n\t\tself.add_leaf(cmd=ShowIpArpDetailVrfAll,\n\t\t\t\t\t\tsrc=src_global + '[origin]',\n\t\t\t\t\t\tdest=dest_global + '[origin]')\n\n\t\tsrc_interface = '[(?P<intf>.*)]'\n\t\tdest_interface = 'info[interfaces][(?P<intf>.*)][arp_dynamic_learning]'\n\n\t\t# 'proxy_enable'\n\t\tself.add_leaf(cmd=ShowIpInterfaceVrfAll,\n\t\t\t\t\t\tsrc=src_interface + '[proxy_arp]',\n\t\t\t\t\t\tdest=dest_interface + '[proxy_enable]')\n\n\t\t# 'local_proxy_enable'\n\t\tself.add_leaf(cmd=ShowIpInterfaceVrfAll,\n\t\t\t\t\t\tsrc=src_interface + '[local_proxy_arp]',\n\t\t\t\t\t\tdest=dest_interface + '[local_proxy_enable]')\n\n\t\tdest_summary = 'info[statistics]'\n\n\t\t# incomplete_total\n\t\tself.add_leaf(cmd=ShowIpArpSummaryVrfAll,\n\t\t\t\t\t\tsrc='[incomplete]',\n\t\t\t\t\t\tdest=dest_summary + '[incomplete_total]')\n\n\t\t# entries_total\n\t\tself.add_leaf(cmd=ShowIpArpSummaryVrfAll,\n\t\t\t\t\t\tsrc='[total]',\n\t\t\t\t\t\tdest=dest_summary + '[entries_total]')\n\n\t\tsrc_stat_in = '[statistics][received]'\n\t\tdest_stat_in = 'info[statistics]'\n\n\t\t# Missing keys: 'in_gratuitous_pkts', 'all_dynamic_pkts',\n\t\t# 'all_static_pkts'\n\t\tself.add_leaf(cmd=ShowIpArpstatisticsVrfAll,\n\t\t\t\t\t\tsrc=src_stat_in + '[requests]',\n\t\t\t\t\t\tdest=dest_stat_in + '[in_requests_pkts]')\n\n\t\tself.add_leaf(cmd=ShowIpArpstatisticsVrfAll,\n\t\t\t\t\t\tsrc=src_stat_in + '[replies]',\n\t\t\t\t\t\tdest=dest_stat_in + '[in_replies_pkts]')\n\n\t\tself.add_leaf(cmd=ShowIpArpstatisticsVrfAll,\n\t\t\t\t\t\tsrc=src_stat_in + '[total]',\n\t\t\t\t\t\tdest=dest_stat_in + '[in_total]')\n\n\t\tself.add_leaf(cmd=ShowIpArpstatisticsVrfAll,\n\t\t\t\t\t\tsrc=src_stat_in + '[dropped]',\n\t\t\t\t\t\tdest=dest_stat_in + '[in_drops]')\n\n\t\tsrc_stat_out = '[statistics][sent]'\n\t\tdest_stat_out = 'info[statistics]'\n\n\t\tself.add_leaf(cmd=ShowIpArpstatisticsVrfAll,\n\t\t\t\t\t\tsrc=src_stat_out + '[gratuitous]',\n\t\t\t\t\t\tdest=dest_stat_out + '[out_gratuitous_pkts]')\n\n\t\tself.add_leaf(cmd=ShowIpArpstatisticsVrfAll,\n\t\t\t\t\t\tsrc=src_stat_out + '[requests]',\n\t\t\t\t\t\tdest=dest_stat_out + '[out_requests_pkts]')\n\n\t\tself.add_leaf(cmd=ShowIpArpstatisticsVrfAll,\n\t\t\t\t\t\tsrc=src_stat_out + '[replies]',\n\t\t\t\t\t\tdest=dest_stat_out + '[out_replies_pkts]')\n\n\t\tself.add_leaf(cmd=ShowIpArpstatisticsVrfAll,\n\t\t\t\t\t\tsrc=src_stat_out + '[total]',\n\t\t\t\t\t\tdest=dest_stat_out + '[out_total]')\n\n\t\tself.add_leaf(cmd=ShowIpArpstatisticsVrfAll,\n\t\t\t\t\t\tsrc=src_stat_out + '[dropped]',\n\t\t\t\t\t\tdest=dest_stat_out + '[out_drops]')\n\n\t\tself.make(final_call=True)\n\n\t\t# Parser return a string and 'proxy_arp' & 'local_proxy_arp' attributes\n\t\t# are booleans\n\t\tif hasattr(self, 'info') and 'interfaces' in self.info:\n\t\t\tfor intf in self.info['interfaces']:\n\t\t\t\tif 'arp_dynamic_learning' in self.info['interfaces'][intf] and \\\n\t\t\t\t\t'proxy_enable' in self.info['interfaces'][intf]\\\n\t\t\t\t\t['arp_dynamic_learning']:\n\t\t\t\t\t# proxy_enable\n\t\t\t\t\tif self.info['interfaces'][intf]['arp_dynamic_learning']\\\n\t\t\t\t\t\t['proxy_enable'] == 'disabled':\n\t\t\t\t\t\tself.info['interfaces'][intf]['arp_dynamic_learning']\\\n\t\t\t\t\t\t\t['proxy_enable'] = False\n\t\t\t\t\telse:\n\t\t\t\t\t\tself.info['interfaces'][intf]['arp_dynamic_learning']\\\n\t\t\t\t\t\t\t['proxy_enable'] = True\n\t\t\t\t\t# local_proxy_enable\n\t\t\t\t\tif self.info['interfaces'][intf]['arp_dynamic_learning']\\\n\t\t\t\t\t\t['local_proxy_enable'] == 'disabled':\n\t\t\t\t\t\tself.info['interfaces'][intf]['arp_dynamic_learning']\\\n\t\t\t\t\t\t\t['local_proxy_enable'] = False\n\t\t\t\t\telse:\n\t\t\t\t\t\tself.info['interfaces'][intf]['arp_dynamic_learning']\\\n\t\t\t\t\t\t\t['local_proxy_enable'] = True", "id": "5779228", "language": "Python", "matching_score": 5.414426326751709, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/arp/nxos/arp.py" }, { "content": "''' \nARP Genie Ops Object for IOSXE - CLI.\n'''\nimport copy\n\n# super class\nfrom genie.libs.ops.arp.arp import Arp as SuperArp\n\n# Parser\nfrom genie.libs.parser.iosxe.show_arp import ShowArp, \\\n\t\t\t\t\t\t\t\t\t\t\t ShowIpArpSummary, \\\n\t\t\t\t\t\t\t\t\t\t ShowIpTraffic, ShowIpArp\n\nfrom genie.libs.parser.iosxe.show_interface import ShowIpInterface\nfrom genie.libs.parser.iosxe.show_vrf import ShowVrf\n\n\nclass Arp(SuperArp):\n\t'''ARP Genie Ops Object'''\n\n\tdef learn(self):\n\t\t'''Learn ARP Ops'''\n\n\t\t########################################################################\n\t\t# info\n\t\t########################################################################\n\n\t\tsrc_global = '[interfaces][(?P<interfaces>.*)][ipv4][neighbors]'\\\n\t\t\t'[(?P<neighbors>.*)]'\n\t\tdest_global = 'info[interfaces][(?P<interfaces>.*)][ipv4][neighbors]'\\\n\t\t\t'[(?P<neighbors>.*)]'\n\n\n\t\t# Missing keys: 'remaining_expire_time'\n\t\t# 'ip'\n\t\tself.add_leaf(cmd=ShowIpArp,\n\t\t\t\t\t\tsrc=src_global + '[ip]',\n\t\t\t\t\t\tdest=dest_global + '[ip]')\n\n\t\t# 'link_layer_address'\n\t\tself.add_leaf(cmd=ShowIpArp,\n\t\t\t\t\t\tsrc=src_global + '[link_layer_address]',\n\t\t\t\t\t\tdest=dest_global + '[link_layer_address]')\n\n\t\t# 'origin'\n\t\tself.add_leaf(cmd=ShowIpArp,\n\t\t\t\t\t\tsrc=src_global + '[origin]',\n\t\t\t\t\t\tdest=dest_global + '[origin]')\n\n\t\tself.add_leaf(cmd=ShowVrf,\n\t\t\t\t\t\tsrc='vrf[(?P<vrf>.*)][interfaces]',\n\t\t\t\t\t\tdest='info[vrf][(?P<vrf>.*)][interfaces]')\n\t\t# save to cache\n\t\tself.make()\n\t\tif hasattr(self, 'info') and 'vrf' in self.info:\n\t\t\tfor vrf in self.info['vrf']:\n\t\t\t\tself.add_leaf(cmd=ShowIpArp,\n\t\t\t\t\t\t\t\tsrc=src_global + '[ip]',\n\t\t\t\t\t\t\t\tdest=dest_global + '[ip]',\n\t\t\t\t\t\t\t\tvrf=vrf)\n\n\t\t\t\t# 'link_layer_address'\n\t\t\t\tself.add_leaf(cmd=ShowIpArp,\n\t\t\t\t\t\t\t\tsrc=src_global + '[link_layer_address]',\n\t\t\t\t\t\t\t\tdest=dest_global + '[link_layer_address]',\n\t\t\t\t\t\t\t\tvrf=vrf)\n\n\t\t\t\t# 'origin'\n\t\t\t\tself.add_leaf(cmd=ShowIpArp,\n\t\t\t\t\t\t\t\tsrc=src_global + '[origin]',\n\t\t\t\t\t\t\t\tdest=dest_global + '[origin]',\n\t\t\t\t\t\t\t\tvrf=vrf)\n\t\t\tdel (self.info['vrf'])\n\n\n\n\t\tsrc_interface = '[(?P<intf>.*)]'\n\t\tdest_interface = 'info[interfaces][(?P<intf>.*)][arp_dynamic_learning]'\n\n\t\t# 'local_proxy_enable'\n\t\tself.add_leaf(cmd=ShowIpInterface,\n\t\t\t\t\t\tsrc=src_interface + '[local_proxy_arp]',\n\t\t\t\t\t\tdest=dest_interface + '[local_proxy_enable]')\n\n\t\t# 'proxy_enable'\n\t\tself.add_leaf(cmd=ShowIpInterface,\n\t\t\t\t\t\tsrc=src_interface + '[proxy_arp]',\n\t\t\t\t\t\tdest=dest_interface + '[proxy_enable]')\n\n\t\tdest_summary = 'info[statistics]'\n\n\t\t# incomplete_total\n\t\tself.add_leaf(cmd=ShowIpArpSummary,\n\t\t\t\t\t\tsrc='[incomp_entries]',\n\t\t\t\t\t\tdest=dest_summary + '[incomplete_total]')\n\n\t\t# entries_total\n\t\tself.add_leaf(cmd=ShowIpArpSummary,\n\t\t\t\t\t\tsrc='[total_entries]',\n\t\t\t\t\t\tdest=dest_summary + '[entries_total]')\n\n\t\tsrc_stat = '[arp_statistics]'\n\t\tdest_stat = 'info[statistics]'\n\n\t\t# Missing keys: 'out_gratuitous_pkts', 'out_drops', \n\t\t# 'in_gratuitous_pkts', 'all_dynamic_pkts', 'all_static_pkts'\n\n\t\t# 'in_requests_pkts'\n\t\tself.add_leaf(cmd=ShowIpTraffic,\n\t\t\t\t\t\tsrc=src_stat + '[arp_in_requests]',\n\t\t\t\t\t\tdest=dest_stat + '[in_requests_pkts]')\n\n\t\t# 'in_replies_pkts'\n\t\tself.add_leaf(cmd=ShowIpTraffic,\n\t\t\t\t\t\tsrc=src_stat + '[arp_in_replies]',\n\t\t\t\t\t\tdest=dest_stat + '[in_replies_pkts]')\n\n\t\t# 'out_requests_pkts'\n\t\tself.add_leaf(cmd=ShowIpTraffic,\n\t\t\t\t\t\tsrc=src_stat + '[arp_out_requests]',\n\t\t\t\t\t\tdest=dest_stat + '[out_requests_pkts]')\n\n\t\t# 'out_replies_pkts'\n\t\tself.add_leaf(cmd=ShowIpTraffic,\n\t\t\t\t\t\tsrc=src_stat + '[arp_out_replies]',\n\t\t\t\t\t\tdest=dest_stat + '[out_replies_pkts]')\n\n\t\t# 'in_drops'\n\t\tself.add_leaf(cmd=ShowIpTraffic,\n\t\t\t\t\t\tsrc=src_stat + '[arp_drops_input_full]',\n\t\t\t\t\t\tdest=dest_stat + '[in_drops]')\n\n\t\tself.make(final_call=True)", "id": "8376598", "language": "Python", "matching_score": 5.730154991149902, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/arp/iosxe/arp.py" }, { "content": "''' \nARP Genie Ops Object for IOSXR - CLI.\n'''\nimport copy\n\n# super class\nfrom genie.libs.ops.arp.arp import Arp as SuperArp\n\n# Parser\nfrom genie.libs.parser.iosxr.show_arp import ShowArpDetail, \\\n\t\t\t\t\t\t\t\t\t\t\t ShowArpTrafficDetail\n\nfrom genie.libs.parser.iosxr.show_interface import ShowIpv4VrfAllInterface\n\n\nclass Arp(SuperArp):\n\t'''ARP Genie Ops Object'''\n\n\tdef learn(self):\n\t\t'''Learn ARP Ops'''\n\n\t\t########################################################################\n\t\t# info\n\t\t########################################################################\n\n\t\tsrc_global = '[interfaces][(?P<interfaces>.*)][ipv4][neighbors]'\\\n\t\t\t'[(?P<neighbors>.*)]'\n\t\tdest_global = 'info[interfaces][(?P<interfaces>.*)][ipv4][neighbors]'\\\n\t\t\t'[(?P<neighbors>.*)]'\n\n\t\t# Missing keys: 'remaining_expire_time'\n\t\t# 'ip'\n\t\tself.add_leaf(cmd=ShowArpDetail,\n\t\t\t\t\t src=src_global + '[ip]',\n\t\t\t\t\t dest=dest_global + '[ip]')\n\n\t\t# 'link_layer_address'\n\t\tself.add_leaf(cmd=ShowArpDetail,\n\t\t\t\t\t src=src_global + '[link_layer_address]',\n\t\t\t\t\t dest=dest_global + '[link_layer_address]')\n\n\t\t# 'origin'\n\t\tself.add_leaf(cmd=ShowArpDetail,\n\t\t\t\t\t src=src_global + '[origin]',\n\t\t\t\t\t dest=dest_global + '[origin]')\n\n\t\tsrc_interface = '[(?P<intf>.*)][ipv4]'\n\t\tdest_interface = 'info[interfaces][(?P<intf>.*)][arp_dynamic_learning]'\n\n\t\t# Missing keys: 'local_proxy_enable'\n\t\t# 'proxy_enable'\n\t\tself.add_leaf(cmd=ShowIpv4VrfAllInterface,\n\t\t\t\t\t src=src_interface + '[proxy_arp]',\n\t\t\t\t\t dest=dest_interface + '[proxy_enable]')\n\n\t\tsrc_stat = '[(?P<module>(0/0/CPU0))][statistics]'\n\t\tdest_stat = 'info[statistics]'\n\n\t\t# Missing keys: 'in_total', 'in_gratuitous_pkts', 'all_dynamic_pkts',\n\t\t# 'all_static_pkts', 'in_drops', 'out_drops', 'out_total'\n\t\treq_key = ['in_replies_pkts', 'in_requests_pkts',\\\n\t\t\t\t 'out_requests_pkts', 'out_replies_pkts',\\\n\t\t\t\t 'out_gratuitous_pkts', ]\n\n\t\tfor key in req_key:\n\t\t\tself.add_leaf(cmd=ShowArpTrafficDetail,\n\t\t\t\t\t\t src=src_stat + '[{}]'.format(key),\n\t\t\t\t\t\t dest=dest_stat + '[{}]'.format(key))\n\n\t\tself.make(final_call=True)\n\n\t\t# Parser return a string and 'proxy_arp' attribute is a boolean\n\t\tif hasattr(self, 'info') and 'interfaces' in self.info:\n\t\t\tfor intf in self.info['interfaces']:\n\t\t\t\tif 'arp_dynamic_learning' in self.info['interfaces'][intf] and \\\n\t\t\t\t\t'proxy_enable' in self.info['interfaces'][intf]\\\n\t\t\t\t\t['arp_dynamic_learning']:\n\t\t\t\t\tif self.info['interfaces'][intf]['arp_dynamic_learning']\\\n\t\t\t\t\t\t['proxy_enable'] == 'disabled':\n\t\t\t\t\t\tself.info['interfaces'][intf]['arp_dynamic_learning']\\\n\t\t\t\t\t\t\t['proxy_enable'] = False\n\t\t\t\t\telse:\n\t\t\t\t\t\tself.info['interfaces'][intf]['arp_dynamic_learning']\\\n\t\t\t\t\t\t\t['proxy_enable'] = True", "id": "6034367", "language": "Python", "matching_score": 3.428626775741577, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/arp/iosxr/arp.py" }, { "content": "# Python\nimport unittest\n\n# ATS\nfrom ats.topology import Device\n\n# Genie\nfrom genie.libs.ops.arp.iosxr.arp import Arp\nfrom genie.libs.ops.arp.iosxr.tests.arp_output import ArpOutput\n\n# Parser\nfrom genie.libs.parser.iosxr.show_arp import ShowArpDetail, \\\n ShowArpTrafficDetail\n\nfrom genie.libs.parser.iosxr.show_interface import ShowIpv4VrfAllInterface\n\n\nclass test_arp(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'iosxr'\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n self.device.connectionmgr.connections['cli'] = self.device\n\n def test_complete_output(self):\n self.maxDiff = None\n arp = Arp(device=self.device)\n\n # Get outputs\n arp.maker.outputs[ShowArpDetail] = \\\n {\"\": ArpOutput.ShowArpDetail}\n\n arp.maker.outputs[ShowArpTrafficDetail] = \\\n {\"\": ArpOutput.ShowArpTrafficDetail}\n\n arp.maker.outputs[ShowIpv4VrfAllInterface] = \\\n {\"\": ArpOutput.ShowIpv4VrfAllInterface}\n\n # Learn the feature\n arp.learn()\n\n # Verify Ops was created successfully\n self.assertEqual(arp.info, ArpOutput.Arp_info)\n\n # Check specific attribute values\n # info - interfaces\n self.assertEqual(arp.info['interfaces']['GigabitEthernet0/0/0/0']\\\n ['ipv4']['neighbors']['10.1.2.1']['ip'], '10.1.2.1')\n # info - statistics\n self.assertEqual(\n arp.info['statistics']['in_replies_pkts'], 8)\n\n def test_output_with_attribute(self):\n self.maxDiff = None\n arp = Arp(device=self.device,\n attributes=['info[statistics][(.*)]'])\n\n # Get outputs\n arp.maker.outputs[ShowArpDetail] = \\\n {\"\": ArpOutput.ShowArpDetail}\n\n arp.maker.outputs[ShowArpTrafficDetail] = \\\n {\"\": ArpOutput.ShowArpTrafficDetail}\n\n arp.maker.outputs[ShowIpv4VrfAllInterface] = \\\n {\"\": ArpOutput.ShowIpv4VrfAllInterface}\n\n # Learn the feature\n arp.learn()\n\n # Check no attribute not found\n with self.assertRaises(KeyError):\n arp.info['interfaces']\n\n # info - statistics\n self.assertEqual(arp.info['statistics'],\n ArpOutput.Arp_info['statistics'])\n\n def test_empty_output(self):\n self.maxDiff = None\n arp = Arp(device=self.device)\n\n # Get outputs\n arp.maker.outputs[ShowArpDetail] = \\\n {\"\": {}}\n\n arp.maker.outputs[ShowArpTrafficDetail] = \\\n {\"\": {}}\n\n arp.maker.outputs[ShowIpv4VrfAllInterface] = \\\n {\"\": {}}\n\n # Learn the feature\n arp.learn()\n\n # Check no attribute not found\n with self.assertRaises(AttributeError):\n arp.info['statistics']\n\n def test_incomplete_output(self):\n self.maxDiff = None\n \n arp = Arp(device=self.device)\n\n # Get outputs\n arp.maker.outputs[ShowArpDetail] = \\\n {\"\": ArpOutput.ShowArpDetail}\n\n arp.maker.outputs[ShowArpTrafficDetail] = \\\n {\"\": {}}\n\n arp.maker.outputs[ShowIpv4VrfAllInterface] = \\\n {\"\": ArpOutput.ShowIpv4VrfAllInterface}\n\n # Learn the feature\n arp.learn()\n \n # Check no attribute not found\n with self.assertRaises(KeyError):\n arp.info['statistics']\n\nif __name__ == '__main__':\n unittest.main()", "id": "7672443", "language": "Python", "matching_score": 4.165775775909424, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/arp/iosxr/tests/test_arp.py" }, { "content": "# Python\nimport unittest\n\n# ATS\nfrom ats.topology import Device\n\n# Genie\nfrom genie.libs.ops.arp.nxos.arp import Arp\nfrom genie.libs.ops.arp.nxos.tests.arp_output import ArpOutput\n\n# Parser\nfrom genie.libs.parser.nxos.show_arp import ShowIpArpDetailVrfAll, \\\n ShowIpArpSummaryVrfAll, \\\n ShowIpArpstatisticsVrfAll\n\nfrom genie.libs.parser.nxos.show_interface import ShowIpInterfaceVrfAll\n\n\nclass test_arp(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'nxos'\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n self.device.connectionmgr.connections['cli'] = self.device\n\n def test_complete_output(self):\n self.maxDiff = None\n arp = Arp(device=self.device)\n\n # Get outputs\n arp.maker.outputs[ShowIpArpDetailVrfAll] = \\\n {\"\": ArpOutput.ShowIpArpDetailVrfAll}\n\n arp.maker.outputs[ShowIpArpSummaryVrfAll] = \\\n {\"\": ArpOutput.ShowIpArpSummaryVrfAll}\n\n arp.maker.outputs[ShowIpArpstatisticsVrfAll] = \\\n {\"\": ArpOutput.ShowIpArpstatisticsVrfAll}\n\n arp.maker.outputs[ShowIpInterfaceVrfAll] = \\\n {\"\": ArpOutput.ShowIpInterfaceVrfAll}\n\n # Learn the feature\n arp.learn()\n\n # Verify Ops was created successfully\n self.assertEqual(arp.info, ArpOutput.Arp_info)\n\n # Check specific attribute values\n # info - interfaces\n self.assertEqual(arp.info['interfaces']['Ethernet1/4.200']['ipv4']\\\n ['neighbors']['10.76.1.101']['ip'], '10.76.1.101')\n # info - statistics\n self.assertEqual(arp.info['statistics']['in_drops'], 28218)\n\n def test_output_with_attribute(self):\n self.maxDiff = None\n arp = Arp(device=self.device,\n attributes=['info[statistics][(.*)]'])\n\n # Get outputs\n arp.maker.outputs[ShowIpArpDetailVrfAll] = \\\n {\"\": ArpOutput.ShowIpArpDetailVrfAll}\n\n arp.maker.outputs[ShowIpArpSummaryVrfAll] = \\\n {\"\": ArpOutput.ShowIpArpSummaryVrfAll}\n\n arp.maker.outputs[ShowIpArpstatisticsVrfAll] = \\\n {\"\": ArpOutput.ShowIpArpstatisticsVrfAll}\n\n arp.maker.outputs[ShowIpInterfaceVrfAll] = \\\n {\"\": ArpOutput.ShowIpInterfaceVrfAll}\n\n # Learn the feature\n arp.learn()\n\n # Check no attribute not found\n with self.assertRaises(KeyError):\n arp.info['interfaces']\n\n # info - statistics\n self.assertEqual(arp.info['statistics'], ArpOutput.Arp_info['statistics'])\n\n def test_empty_output(self):\n self.maxDiff = None\n arp = Arp(device=self.device)\n\n # Get outputs\n arp.maker.outputs[ShowIpArpDetailVrfAll] = \\\n {\"\": {}}\n\n arp.maker.outputs[ShowIpArpSummaryVrfAll] = \\\n {\"\": {}}\n\n arp.maker.outputs[ShowIpArpstatisticsVrfAll] = \\\n {\"\": {}}\n\n arp.maker.outputs[ShowIpInterfaceVrfAll] = \\\n {\"\": {}}\n\n # Learn the feature\n arp.learn()\n\n # Check no attribute not found\n with self.assertRaises(AttributeError):\n arp.info['statistics']\n\n def test_incomplete_output(self):\n self.maxDiff = None\n \n arp = Arp(device=self.device)\n\n # Get outputs\n arp.maker.outputs[ShowIpArpDetailVrfAll] = \\\n {\"\": ArpOutput.ShowIpArpDetailVrfAll}\n\n arp.maker.outputs[ShowIpArpSummaryVrfAll] = \\\n {\"\": {}}\n\n arp.maker.outputs[ShowIpArpstatisticsVrfAll] = \\\n {\"\": {}}\n\n arp.maker.outputs[ShowIpInterfaceVrfAll] = \\\n {\"\": ArpOutput.ShowIpInterfaceVrfAll}\n\n # Learn the feature\n arp.learn()\n \n # Check no attribute not found\n with self.assertRaises(KeyError):\n arp.info['statistics']\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "11350377", "language": "Python", "matching_score": 3.8100407123565674, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/arp/nxos/tests/test_arp.py" }, { "content": "# Python\nimport unittest\n\n# ATS\nfrom ats.topology import Device\nfrom unittest.mock import Mock\n# Genie\nfrom genie.libs.ops.arp.iosxe.arp import Arp\nfrom genie.libs.ops.arp.iosxe.tests.arp_output import ArpOutput\n\n# Parser\nfrom genie.libs.parser.iosxe.show_arp import ShowArp, \\\n ShowIpArpSummary, \\\n ShowIpTraffic, ShowIpArp\n\nfrom genie.libs.parser.iosxe.show_interface import ShowIpInterface\nfrom genie.libs.parser.iosxe.show_vrf import ShowVrf\n\noutputs = {}\noutputs['show ip arp'] = ArpOutput.ShowIpArp_all\noutputs['show ip arp vrf VRF1'] = ArpOutput.ShowIpArp_vrf1\ndef mapper(key):\n return outputs[key]\n\n\nclass test_arp(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'iosxe'\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n self.device.connectionmgr.connections['cli'] = self.device\n\n def test_complete_output(self):\n self.maxDiff = None\n arp = Arp(device=self.device)\n\n # Get outputs\n arp.maker.outputs[ShowArp] = \\\n {\"\": ArpOutput.ShowArp}\n arp.maker.outputs[ShowVrf] = \\\n {\"\": ArpOutput.ShowVrf}\n arp.maker.outputs[ShowIpArp] = \\\n {\"{'vrf':'VRF1'}\": ArpOutput.ShowIpArp}\n arp.maker.outputs[ShowIpArpSummary] = \\\n {\"\": ArpOutput.ShowIpArpSummary}\n\n arp.maker.outputs[ShowIpTraffic] = \\\n {\"\": ArpOutput.ShowIpTraffic}\n\n arp.maker.outputs[ShowIpInterface] = \\\n {\"\": ArpOutput.ShowIpInterface}\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n # Learn the feature\n arp.learn()\n\n # Verify Ops was created successfully\n self.assertDictEqual(arp.info, ArpOutput.Arp_info)\n\n # Check specific attribute values\n # info - interfaces\n self.assertEqual(arp.info['interfaces']['Vlan100']['ipv4']\\\n ['neighbors']['192.168.234.1']['ip'], '192.168.234.1')\n # info - statistics\n self.assertEqual(arp.info['statistics']['entries_total'], 8)\n\n def test_output_with_attribute(self):\n self.maxDiff = None\n arp = Arp(device=self.device,\n attributes=['info[statistics][(.*)]'])\n\n # Get outputs\n arp.maker.outputs[ShowArp] = \\\n {\"\": ArpOutput.ShowArp}\n arp.maker.outputs[ShowVrf] = \\\n {\"\": ArpOutput.ShowVrf}\n arp.maker.outputs[ShowIpArp] = \\\n {\"{'vrf':'VRF1'}\": ArpOutput.ShowIpArp}\n arp.maker.outputs[ShowIpArpSummary] = \\\n {\"\": ArpOutput.ShowIpArpSummary}\n\n arp.maker.outputs[ShowIpTraffic] = \\\n {\"\": ArpOutput.ShowIpTraffic}\n\n arp.maker.outputs[ShowIpInterface] = \\\n {\"\": ArpOutput.ShowIpInterface}\n\n # Learn the feature\n arp.learn()\n\n # Check no attribute not found\n with self.assertRaises(KeyError):\n arp.info['interfaces']\n\n # info - statistics\n self.assertEqual(arp.info['statistics'], ArpOutput.Arp_info['statistics'])\n\n def test_empty_output(self):\n self.maxDiff = None\n arp = Arp(device=self.device)\n\n # Get outputs\n arp.maker.outputs[ShowArp] = \\\n {\"\": {}}\n arp.maker.outputs[ShowVrf] = \\\n {\"\": {}}\n arp.maker.outputs[ShowIpArp] = \\\n {\"{'vrf':'VRF1'}\": {}}\n arp.maker.outputs[ShowIpArpSummary] = \\\n {\"\": {}}\n\n arp.maker.outputs[ShowIpTraffic] = \\\n {\"\": {}}\n\n arp.maker.outputs[ShowIpInterface] = \\\n {\"\": {}}\n outputs['show ip arp']=''\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n # Learn the feature\n arp.learn()\n\n # Check no attribute not found\n with self.assertRaises(AttributeError):\n arp.info['statistics']\n outputs['show ip arp'] = ArpOutput.ShowIpArp_all\n\n def test_incomplete_output(self):\n self.maxDiff = None\n \n arp = Arp(device=self.device)\n\n # Get outputs\n arp.maker.outputs[ShowArp] = \\\n {\"\": ArpOutput.ShowArp}\n arp.maker.outputs[ShowVrf] = \\\n {\"\": ArpOutput.ShowVrf}\n arp.maker.outputs[ShowIpArp] = \\\n {\"{'vrf':'VRF1'}\": ArpOutput.ShowIpArp}\n arp.maker.outputs[ShowIpArpSummary] = \\\n {\"\": {}}\n\n arp.maker.outputs[ShowIpTraffic] = \\\n {\"\": {}}\n\n arp.maker.outputs[ShowIpInterface] = \\\n {\"\": ArpOutput.ShowIpInterface}\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n # Learn the feature\n arp.learn()\n \n # Check no attribute not found\n with self.assertRaises(KeyError):\n arp.info['statistics']\n\nif __name__ == '__main__':\n unittest.main()", "id": "5300491", "language": "Python", "matching_score": 6.580142021179199, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/arp/iosxe/tests/test_arp.py" }, { "content": "# Python\nimport unittest\n\n# ATS\nfrom ats.topology import Device\n\n# Genie\nfrom genie.libs.ops.arp.ios.arp import Arp\nfrom genie.libs.ops.arp.ios.tests.arp_output import ArpOutput\n\n# Parser\nfrom genie.libs.parser.ios.show_arp import ShowIpArp, \\\n ShowIpArpSummary, \\\n ShowIpTraffic\n\nfrom genie.libs.parser.ios.show_interface import ShowIpInterface\nfrom genie.libs.parser.ios.show_vrf import ShowVrf\n\nclass test_arp(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'ios'\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n self.device.connectionmgr.connections['cli'] = self.device\n\n def test_complete_output(self):\n self.maxDiff = None\n arp = Arp(device=self.device)\n\n # Get outputs\n arp.maker.outputs[ShowIpArp] = \\\n {\"\": ArpOutput.ShowIpArp,\n \"{'vrf':'VRF1'}\": ArpOutput.ShowIpArpVrf}\n arp.maker.outputs[ShowVrf] = \\\n {\"\": ArpOutput.ShowVrf}\n\n arp.maker.outputs[ShowIpArpSummary] = \\\n {\"\": ArpOutput.ShowIpArpSummary}\n\n arp.maker.outputs[ShowIpTraffic] = \\\n {\"\": ArpOutput.ShowIpTraffic}\n\n arp.maker.outputs[ShowIpInterface] = \\\n {\"\": ArpOutput.ShowIpInterface}\n\n # Learn the feature\n arp.learn()\n\n # Verify Ops was created successfully\n self.assertEqual(arp.info, ArpOutput.Arp_info)\n\n # Check specific attribute values\n # info - interfaces\n self.assertEqual(arp.info['interfaces']['Port-channel10']['ipv4']\\\n ['neighbors']['10.9.1.1']['ip'], '10.9.1.1')\n # info - statistics\n self.assertEqual(arp.info['statistics']['in_drops'], 0)\n\n def test_output_with_attribute(self):\n self.maxDiff = None\n arp = Arp(device=self.device,\n attributes=['info[statistics][(.*)]'])\n\n # Get outputs\n arp.maker.outputs[ShowIpArp] = \\\n {\"\": ArpOutput.ShowIpArp,\n \"{'vrf':'VRF1'}\": ArpOutput.ShowIpArpVrf}\n arp.maker.outputs[ShowVrf] = \\\n {\"\": ArpOutput.ShowVrf}\n arp.maker.outputs[ShowIpArpSummary] = \\\n {\"\": ArpOutput.ShowIpArpSummary}\n\n arp.maker.outputs[ShowIpTraffic] = \\\n {\"\": ArpOutput.ShowIpTraffic}\n\n arp.maker.outputs[ShowIpInterface] = \\\n {\"\": ArpOutput.ShowIpInterface}\n\n # Learn the feature\n arp.learn()\n\n # Check no attribute not found\n with self.assertRaises(KeyError):\n arp.info['interfaces']\n\n # info - statistics\n self.assertEqual(arp.info['statistics'], ArpOutput.Arp_info['statistics'])\n\n def test_empty_output(self):\n self.maxDiff = None\n arp = Arp(device=self.device)\n\n # Get outputs\n arp.maker.outputs[ShowIpArp] = \\\n {\"\": {},\n \"{'vrf':'VRF1'}\": {}}\n arp.maker.outputs[ShowVrf] = \\\n {\"\": {}}\n\n arp.maker.outputs[ShowIpArpSummary] = \\\n {\"\": {}}\n\n arp.maker.outputs[ShowIpTraffic] = \\\n {\"\": {}}\n\n arp.maker.outputs[ShowIpInterface] = \\\n {\"\": {}}\n\n # Learn the feature\n arp.learn()\n\n # Check no attribute not found\n with self.assertRaises(AttributeError):\n arp.info['statistics']\n\n def test_incomplete_output(self):\n self.maxDiff = None\n \n arp = Arp(device=self.device)\n\n # Get outputs\n arp.maker.outputs[ShowIpArp] = \\\n {\"\": ArpOutput.ShowIpArp,\n \"{'vrf':'VRF1'}\": ArpOutput.ShowIpArpVrf}\n arp.maker.outputs[ShowVrf] = \\\n {\"\": ArpOutput.ShowVrf}\n\n arp.maker.outputs[ShowIpArpSummary] = \\\n {\"\": {}}\n\n arp.maker.outputs[ShowIpTraffic] = \\\n {\"\": {}}\n\n arp.maker.outputs[ShowIpInterface] = \\\n {\"\": ArpOutput.ShowIpInterface}\n\n # Learn the feature\n arp.learn()\n \n # Check no attribute not found\n with self.assertRaises(KeyError):\n arp.info['statistics']\n\nif __name__ == '__main__':\n unittest.main()", "id": "11482315", "language": "Python", "matching_score": 0.7117509245872498, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/arp/ios/tests/test_arp.py" }, { "content": "\"\"\"Common get info functions for hardware\"\"\"\n\n# Python\nimport re\nimport logging\n\n# Genie\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\nfrom genie.libs.sdk.libs.utils.normalize import GroupKeys\nfrom genie.utils.timeout import Timeout\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\n# pyATS\nfrom ats.utils.objects import find, R\n\n\nlog = logging.getLogger(__name__)\n\n\ndef get_hardware_all_fans_speed(device):\n \"\"\" Get fan speed for all fans \n\n Args:\n device (`obj`): Device object\n Returns:\n fans (`list`): Fans info\n Raises:\n None\n \"\"\"\n fans = []\n p = re.compile(r\"Fan +Speed +(?P<speed>.*)%\")\n\n try:\n out = device.parse(\"show environment | include Fan\")\n except (SchemaEmptyParserError, SubCommandFailure) as e:\n return fans\n\n reqs = R(\n [\n \"slot\",\n \"(?P<slot>.*)\",\n \"sensor\",\n \"(?P<sensor>.*)\",\n \"state\",\n \"(?P<state>.*)\",\n ]\n )\n found = find([out], reqs, filter_=False, all_keys=True)\n if found:\n fans = GroupKeys.group_keys(\n reqs=reqs.args, ret_num={}, source=found, all_keys=True\n )\n\n for fan in fans:\n fan[\"speed\"] = int(p.search(fan[\"state\"]).groupdict()[\"speed\"])\n log.info(\n \"Found fan on {fan[slot]} with Speed {fan[speed]}%\".format(fan=fan)\n )\n return fans\n\n\ndef get_hardware_rp_slot(\n device, state=\"standby\", max_time=90, check_interval=30\n):\n \"\"\" Get RP slot from device\n\n Args:\n device (`obj`): Device object\n state (`str`): RP state\n max_time (`int`): max wait time \n check_interval (`int`): check interval \n Returns:\n result (`str`): RP slot in required state\n None\n Raises:\n None\n \"\"\"\n log.info(\n \"Finding {st} RP on device {dev}\".format(st=state, dev=device.name)\n )\n reqs = R(\n [\n \"slot\",\n \"(?P<slot>.*)\",\n \"(?P<type>.*)\",\n \"(?P<name>.*)\",\n \"state\",\n \"(?P<state>.*)\",\n ]\n )\n timeout = Timeout(max_time, check_interval)\n\n while timeout.iterate():\n try:\n out = device.parse(\"show platform\")\n except SchemaEmptyParserError:\n timeout.sleep()\n continue\n\n found = find([out], reqs, filter_=False, all_keys=True)\n keys = GroupKeys.group_keys(\n reqs=reqs.args, ret_num={}, source=found, all_keys=True\n )\n for key in keys:\n if \"R\" in key[\"slot\"] and state in key[\"state\"]:\n log.info(\n \"Found {st} RP {key[name]} on slot {key[slot]}\".format(\n st=state, key=key\n )\n )\n return key[\"slot\"]\n timeout.sleep()\n\n return None\n\n\ndef get_hardware_esp_slot(\n device, state=\"standby\", max_time=90, check_interval=30\n):\n \"\"\" Get ESP slot from device\n\n Args:\n device (`obj`): Device object\n state (`str`): ESP state\n max_time (`int`): max wait time \n check_interval (`int`): check interval \n Returns:\n result (`str`): ESP slot in required state\n None\n Raises:\n None\n \"\"\"\n log.info(\n \"Finding {st} ESP on device {dev}\".format(st=state, dev=device.name)\n )\n reqs = R(\n [\n \"slot\",\n \"(?P<slot>.*)\",\n \"(?P<type>.*)\",\n \"(?P<name>.*)\",\n \"state\",\n \"(?P<state>.*)\",\n ]\n )\n timeout = Timeout(max_time, check_interval)\n\n while timeout.iterate():\n try:\n out = device.parse(\"show platform\")\n except SchemaEmptyParserError:\n timeout.sleep()\n continue\n\n found = find([out], reqs, filter_=False, all_keys=True)\n keys = GroupKeys.group_keys(\n reqs=reqs.args, ret_num={}, source=found, all_keys=True\n )\n for key in keys:\n if \"F\" in key[\"slot\"] and state in key[\"state\"]:\n log.info(\n \"Found {st} ESP {key[name]} on slot {key[slot]}\".format(\n st=state, key=key\n )\n )\n return key[\"slot\"]\n timeout.sleep()\n\n return None\n\n\ndef get_hardware_slot_state(device, slot):\n \"\"\" Get slot state\n\n Args:\n device (`obj`): Device object\n slot (`str`): Slot\n Returns:\n state (`str`): Slot state\n None\n Raises:\n None\n \"\"\"\n log.info(\"Getting slot {} state on device {}\".format(slot, device.name))\n try:\n out = device.parse(\"show platform\")\n except SchemaEmptyParserError:\n return None\n\n reqs = R(\n [\n \"slot\",\n str(slot),\n \"(?P<type>.*)\",\n \"(?P<name>.*)\",\n \"state\",\n \"(?P<state>.*)\",\n ]\n )\n found = find([out], reqs, filter_=False, all_keys=True)\n if found:\n keys = GroupKeys.group_keys(\n reqs=reqs.args, ret_num={}, source=found, all_keys=True\n )\n return keys[0][\"state\"]\n else:\n return None\n\n\ndef get_hardware_inserted_sfp(device, prev_slots, sfp_descr, intf_type):\n \"\"\" Get newly inserted SFP\n\n Args:\n device (`obj`): Device object\n prev_slots (`dict`): Previous sfp slot dict\n sfp_descr (`str`): SFP descr\n intf_type (`str`): Interface type\n Returns:\n interface (`str`): Interface name\n \"\"\"\n intf_sfp_dict = {}\n curr_slots = get_hardware_sfp_slot_dict(device, sfp_descr)\n for slot in curr_slots:\n if slot not in prev_slots:\n intf = intf_type + slot\n intf_sfp_dict.update({intf: curr_slots[slot]})\n log.info(\n \"Found newly inserted SFP {} with interface {}\".format(\n curr_slots[slot], intf\n )\n )\n\n if len(intf_sfp_dict) == 1:\n return list(intf_sfp_dict.keys())[0]\n elif len(intf_sfp_dict) == 0:\n log.error(\n \"Failed to detect newly inserted SFP on {}\".format(device.name)\n )\n else:\n log.error(\"Found multiple inserted SFPs {}\".format(intf_sfp_dict))\n\n return None\n\n\ndef get_hardware_sfp_slot_dict(device, sfp_descr=\".*\"):\n \"\"\" Get SFP slot dict\n\n Args:\n device (`obj`): Device object\n sfp_descr (`str`): SFP descr\n Returns:\n sfp_slot_dict (`dict`): SFP slot dict\n example: {\n '1/1/6':{'slot': '1', \n 'subslot': '1 transceiver 6', \n 'lc': 'ASR1000-SIP10', \n 'pid': 'SFP-GE-S', \n 'descr': 'GE SX'}}\n Raises:\n None\n \"\"\"\n log.info(\"Getting inventory on {}\".format(device.name))\n keys = []\n try:\n out = device.parse(\"show inventory\")\n except SchemaEmptyParserError:\n return keys\n\n reqs = R(\n [\n \"slot\",\n \"(?P<slot>.*)\",\n \"lc\",\n \"(?P<lc>.*)\",\n \"subslot\",\n \"(?P<subslot>.*)\",\n \"(?P<pid>.*)\",\n \"descr\",\n \"(?P<descr>\" + sfp_descr + \")\",\n ]\n )\n found = find([out], reqs, filter_=False, all_keys=True)\n\n if found:\n keys = GroupKeys.group_keys(\n reqs=reqs.args, ret_num={}, source=found, all_keys=True\n )\n\n sfp_slot_dict = {}\n p = re.compile(r\"(?<=\\d)( +\\w+ )(?=\\d)\")\n for sfp in keys:\n slot = sfp[\"slot\"] + \"/\" + re.sub(p, \"/\", sfp[\"subslot\"])\n sfp_slot_dict.update({slot: sfp})\n\n return sfp_slot_dict\n\n\ndef get_hardware_interface_sfp_descr(device, interface, sfp_slot_dict=\"\"):\n \"\"\" Get interface SFP descr\n\n Args:\n device (`obj`): Device object\n interface (`str`): Interface name\n sfp_slot_dict (`dict`): SFP slot dict\n example: {\n '1/1/6':{'slot': '1', \n 'subslot': '1 transceiver 6', \n 'lc': 'ASR1000-SIP10', \n 'pid': 'SFP-GE-S', \n 'descr': 'GE SX'}}\n Returns:\n descr (`str`): Interface SFP descr\n Raises:\n None\n \"\"\"\n if not sfp_slot_dict:\n sfp_slot_dict = get_hardware_sfp_slot_dict(device)\n\n if not sfp_slot_dict:\n return None\n\n p = re.compile(r\"^.*?(?=\\d\\S*)\")\n slot = re.sub(p, \"\", interface)\n if slot in sfp_slot_dict:\n return sfp_slot_dict[slot].get(\"descr\")\n", "id": "2917930", "language": "Python", "matching_score": 3.893138885498047, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/hardware/get.py" }, { "content": "\"\"\"Common verify functions for hardware\"\"\"\n\n# Python\nimport re\nimport logging\n\n# Genie\nfrom genie.libs.sdk.libs.utils.normalize import GroupKeys\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\nfrom genie.utils.timeout import Timeout\n\n# Ats\nfrom ats.utils.objects import find, R\n\n# HARDWARE\nfrom genie.libs.sdk.apis.iosxe.hardware.get import (\n get_hardware_rp_slot,\n get_hardware_esp_slot,\n)\n\nlog = logging.getLogger(__name__)\n\n\ndef verify_hardware_fan_speed_increase(curr_fans, prev_fans):\n \"\"\" Verify fan speed increase\n\n Args:\n curr_fans (`list`): current fans\n prev_fans (`list`): previous fans\n Returns:\n result(`bool`): verify result\n Raises:\n None\n \"\"\"\n result = True\n for cf in curr_fans:\n for pf in prev_fans:\n if cf[\"slot\"] != pf[\"slot\"]:\n continue\n elif cf[\"speed\"] <= pf[\"speed\"]:\n log.error(\n \"Fan on {pf[slot]} speed doesn't increase. \"\n \"Initial speed: {pf[speed]} Current speed: {cf[speed]}\".format(\n pf=pf, cf=cf\n )\n )\n result = False\n else:\n log.info(\n \"Fan on {pf[slot]} speed increases from \"\n \"{pf[speed]} to {cf[speed]}\".format(pf=pf, cf=cf)\n )\n\n return result\n\n\ndef verify_hardware_active_RP_changed(\n device, pre_act, max_time=300, check_interval=30\n):\n \"\"\" Verify active RP has changed\n\n Args:\n device (`obj`): Device object\n pre_act (`str`): previous active ESP\n max_time (`int`): max time\n check_interval (`int`): check interval\n Returns:\n result(`bool`): verify result\n curr_act (`str`): current active ESP\n Raises:\n None\n \"\"\"\n timeout = Timeout(max_time, check_interval)\n\n while timeout.iterate():\n curr_act = get_hardware_rp_slot(device, \"active\")\n if curr_act != pre_act:\n return True, curr_act\n timeout.sleep()\n\n curr_act = pre_act\n return False, curr_act\n\n\ndef verify_hardware_active_ESP_changed(\n device, pre_act, max_time=300, check_interval=30\n):\n \"\"\" Verify active ESP has changed\n\n Args:\n device (`obj`): Device object\n pre_act (`str`): previous active ESP\n max_time (`int`): max time\n check_interval (`int`): check interval\n Returns:\n result(`bool`): verify result\n curr_act (`str`): current active ESP\n \"\"\"\n timeout = Timeout(max_time, check_interval)\n\n while timeout.iterate():\n curr_act = get_hardware_esp_slot(device, \"active\")\n if curr_act != pre_act:\n return True, curr_act\n timeout.sleep()\n\n curr_act = pre_act\n return False, curr_act\n\n\ndef verify_hardware_slot_removed(\n device, slot, max_time=300, check_interval=30\n):\n \"\"\" Verify hardware slot has removed\n\n Args:\n device (`obj`): Device object\n slot (`str`): hardware slot\n max_time (`int`): max time\n check_interval (`int`): check interval\n Returns:\n result(`bool`): verify result\n Raises:\n None\n \"\"\"\n timeout = Timeout(max_time, check_interval)\n\n while timeout.iterate():\n try:\n out = device.parse(\"show platform\")\n except SchemaEmptyParserError:\n timeout.sleep()\n continue\n\n if slot not in out[\"slot\"]:\n return True\n timeout.sleep()\n\n return False\n\n\ndef verify_hardware_slot_exist(device, slot, max_time=300, check_interval=30):\n \"\"\" Verify hardware slot exists\n\n Args:\n device (`obj`): Device object\n slot (`str`): hardware slot\n max_time (`int`): max time\n check_interval (`int`): check interval\n Returns:\n result(`bool`): verify result\n \"\"\"\n timeout = Timeout(max_time, check_interval)\n\n while timeout.iterate():\n try:\n out = device.parse(\"show platform\")\n except SchemaEmptyParserError:\n timeout.sleep()\n continue\n\n if slot in out[\"slot\"]:\n return True\n timeout.sleep()\n\n return False\n\n\ndef verify_hardware_spa_removed(device, spa, max_time=300, check_interval=30):\n \"\"\" Verify spa has removed\n\n Args:\n device (`obj`): Device object\n spa (`str`): spa slot\n max_time (`int`): max time\n check_interval (`int`): check interval\n Returns:\n result(`bool`): verify result\n Raises:\n None\n \"\"\"\n slots = spa.split(\"/\")\n reqs = R(\n [\n \"slot\",\n slots[0],\n \"(?P<type>.*)\",\n \"(?P<name>.*)\",\n \"subslot\",\n slots[1],\n \"(?P<sub_dict>.*)\",\n ]\n )\n\n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n try:\n out = device.parse(\"show platform\")\n except SchemaEmptyParserError:\n timeout.sleep()\n continue\n\n found = find([out], reqs, filter_=False, all_keys=True)\n if not found:\n return True\n timeout.sleep()\n\n return False\n\n\ndef verify_hardware_spa_exist(device, spa, max_time=300, check_interval=30):\n \"\"\" Verify spa exists\n\n Args:\n device (`obj`): Device object\n spa (`str`): spa slot\n max_time (`int`): max time\n check_interval (`int`): check interval\n Returns:\n result(`bool`): verify result\n Raises:\n None\n \"\"\"\n slots = spa.split(\"/\")\n reqs = R(\n [\n \"slot\",\n slots[0],\n \"(?P<type>.*)\",\n \"(?P<name>.*)\",\n \"subslot\",\n slots[1],\n \"(?P<sub_dict>.*)\",\n ]\n )\n\n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n try:\n out = device.parse(\"show platform\")\n except SchemaEmptyParserError:\n timeout.sleep()\n continue\n\n found = find([out], reqs, filter_=False, all_keys=True)\n if found:\n return True\n timeout.sleep()\n\n return False\n\n\ndef verify_hardware_redundancy_states(\n device,\n oper_state=\"sso\",\n peer_state=\"STANDBY HOT\",\n manual_swact=\"enabled\",\n max_time=600,\n check_interval=30,\n):\n \"\"\" Verify redundancy operational state is sso\n Manual Swact is enabled and\n Peer state is STANDBY HOT\n\n Args:\n device (`obj`): Device object\n max_time (`int`): Max time\n check_interval (`int`): Check interval\n Returns:\n result (`bool`): verified result\n \"\"\"\n timeout = Timeout(max_time, check_interval)\n\n while timeout.iterate():\n try:\n out = device.parse(\"show redundancy states\")\n except SchemaEmptyParserError:\n timeout.sleep()\n continue\n\n try:\n oper_state_v = out[\"redundancy_mode_operational\"]\n manual_swact_v = out[\"manual_swact\"]\n peer_state_v = out[\"peer_state\"]\n except KeyError as e:\n log.info(\n \"Failed to get redundancy states on {}\".format(device.name)\n )\n continue\n\n log.info(\"Redundancy operational state is {}\".format(oper_state_v))\n log.info(\"Manual swact is {}\".format(manual_swact_v))\n log.info(\"Peer state is {}\".format(peer_state_v))\n\n if (\n oper_state_v.lower() == oper_state.lower()\n and manual_swact_v.lower() == manual_swact.lower()\n and peer_state.lower() in peer_state_v.lower()\n ):\n return True\n timeout.sleep()\n\n return False\n", "id": "8687249", "language": "Python", "matching_score": 3.476785659790039, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/hardware/verify.py" }, { "content": "'''Common verify functions for BGP'''\r\n# Python\r\nimport logging\r\n\r\nfrom genie.utils.timeout import Timeout\r\nfrom pyats.utils.objects import find, R\r\nfrom genie.libs.sdk.libs.utils.normalize import GroupKeys\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef verify_bgp_l2vpn_evpn_neighbor_in_state(device, neighbor, state='established', \r\n max_time=60, check_interval=20):\r\n ''' Verify BGP l2vpn evpn neighbor state\r\n\r\n Args:\r\n device (`obj`): Device object\r\n neighbor (`str`): Neighbor IP\r\n state (`str`): Expected state\r\n max_time (`int`): Max time\r\n check_interval (`int`): Check interval\r\n Returns:\r\n result (`bool`): Verified result\r\n '''\r\n cmd = 'show bgp l2vpn evpn neighbors {}'.format(neighbor)\r\n timeout = Timeout(max_time, check_interval)\r\n\r\n while timeout.iterate():\r\n try:\r\n out = device.parse(cmd)\r\n except Exception as e:\r\n log.error(\"Failed to parse '{}':\\n{}\".format(cmd, e))\r\n timeout.sleep()\r\n continue\r\n\r\n reqs = R(['instance', '(.*)', \r\n 'vrf', '(.*)', \r\n 'neighbor', neighbor, \r\n 'session_state', '(?P<state>.*)'])\r\n found = find([out], reqs, filter_=False, all_keys=True)\r\n if found:\r\n session_state = found[0][0].lower()\r\n else:\r\n log.error(\"Failed to get neighbor {} BGP state\".format(neighbor))\r\n timeout.sleep()\r\n continue\r\n\r\n log.info(\"Neighbor {} BGP state is {}, expected value is {}\"\r\n .format(neighbor, session_state, state))\r\n\r\n if session_state == state.lower():\r\n return True\r\n \r\n timeout.sleep()\r\n\r\n return False\r\n", "id": "5819117", "language": "Python", "matching_score": 4.878802299499512, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxr/bgp/verify.py" }, { "content": "'''Common verify functions for isis'''\r\n# Python\r\nimport logging\r\n\r\nfrom genie.utils.timeout import Timeout\r\nfrom pyats.utils.objects import find, R\r\nfrom genie.libs.sdk.libs.utils.normalize import GroupKeys\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef verify_isis_neighbor_in_state(device, interfaces, state='up', \r\n max_time=60, check_interval=20):\r\n ''' Verify ISIS neighbor state\r\n\r\n Args:\r\n device (`obj`): Device object\r\n interfaces (`list`): ISIS neighbor interfaces\r\n state (`str`): Expected state\r\n max_time (`int`): Max time\r\n check_interval (`int`): Check interval\r\n Returns:\r\n result (`bool`): Verified result\r\n '''\r\n cmd = 'show isis neighbors'\r\n timeout = Timeout(max_time, check_interval)\r\n\r\n while timeout.iterate():\r\n try:\r\n out = device.parse(cmd)\r\n except Exception as e:\r\n log.error(\"Failed to parse '{}':\\n{}\".format(cmd, e))\r\n timeout.sleep()\r\n continue\r\n\r\n result = True\r\n intfs = '|'.join(interfaces)\r\n reqs = R(['isis', '(.*)', \r\n 'vrf', '(.*)', \r\n 'interfaces', '(?P<interface>' + intfs + ')', \r\n 'neighbors', '(?P<neighbor>.*)', \r\n 'state', '(?P<state>.*)'])\r\n\r\n found = find([out], reqs, filter_=False, all_keys=True)\r\n if found and len(found) == len(interfaces):\r\n keys = GroupKeys.group_keys(reqs=reqs.args, ret_num={}, source=found, all_keys=True)\r\n else:\r\n log.error(\"Failed to find required ISIS neighbor interface: {}\".format(interfaces))\r\n timeout.sleep()\r\n continue\r\n\r\n for intf_dict in keys:\r\n log.info(\"Interface {} status is {}, expected value is {}\"\r\n .format(intf_dict['interface'], intf_dict['state'].lower(), state))\r\n if intf_dict['state'].lower() != state.lower():\r\n result = False\r\n\r\n if result:\r\n return True\r\n\r\n timeout.sleep()\r\n\r\n return False\r\n\r\n\r\ndef verify_no_isis_neighbor(device):\r\n ''' Verify ISIS neighbors not found\r\n\r\n Args:\r\n device (`obj`): Device object\r\n Returns:\r\n result (`bool`): Verified result\r\n '''\r\n cmd = 'show isis neighbors'\r\n\r\n try:\r\n out = device.parse(cmd)\r\n except Exception as e:\r\n log.error(\"Failed to parse '{}':\\n{}\".format(cmd, e))\r\n return False\r\n\r\n reqs = R(['isis', '(.*)', \r\n 'vrf', '(.*)', '(?P<interface>.*)'])\r\n\r\n found = find([out], reqs, filter_=False, all_keys=True)\r\n if found and not found[0][0]:\r\n return True\r\n\r\n return False\r\n", "id": "9363187", "language": "Python", "matching_score": 1.1235301494598389, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxr/isis/verify.py" }, { "content": "# Genie package\nfrom genie.ops.base import Base\nfrom genie.utils.config import Config\n\n# genie.libs\nfrom genie.libs.parser.iosxr.show_interface import ShowIpv6VrfAllInterface\nfrom genie.libs.parser.iosxr.show_ipv6 import ShowIpv6NeighborsDetail, ShowIpv6Neighbors\n\n\nclass Nd(Base):\n '''Nd Ops Object'''\n\n def learn(self, vrf='all', interface=''):\n '''Learn Nd object'''\n\n # new Nd structure\n # Place holder to make it more readable\n # interfaces\n # interface\n # router_advertisement\n # interval\n # lifetime\n # suppress\n # neighbors\n # neighbor\n # ip\n # link_layer_address\n # neighbor_state\n # age\n # origin\n # is_router N/A\n\n if interface:\n src_nd = '[interfaces][{}]'.format(interface)\n else:\n src_nd = '[interfaces][(?P<interface>.*)]'\n\n dest_nd = 'info' + src_nd\n \n self.add_leaf(cmd=ShowIpv6Neighbors,\n src=src_nd + '[interface]',\n dest=dest_nd + '[interface]',\n interface=interface, vrf=vrf)\n self.make()\n\n if interface:\n src_rd = '[{}][ipv6]'.format(interface)\n else:\n src_rd = '[(?P<interface>.*)][ipv6]'\n \n dest_rd = dest_nd +'[router_advertisement]'\n req_dict = {'nd_adv_duration':'interval', \n 'nd_router_adv':'lifetime'}\n\n for src, dest in req_dict.items():\n self.add_leaf(cmd=ShowIpv6VrfAllInterface,\n src=src_rd + '[{}]'.format(src),\n dest=dest_rd + '[{}]'.format(dest),\n interface=interface, vrf=vrf)\n\n src_nd_neighbor = src_nd +'[neighbors][(?P<neighbor>.*)]'\n dest_nd_neighbor = 'info' + src_nd_neighbor\n req_key =['ip','link_layer_address','neighbor_state','age','origin']\n for key in req_key:\n self.add_leaf(cmd=ShowIpv6NeighborsDetail,\n src=src_nd_neighbor + '[{}]'.format(key),\n dest=dest_nd_neighbor + '[{}]'.format(key))\n self.make()\n\n # Get nd suppress by executing 'show running-config interface'\n if interface:\n show_run_cmd = 'show running-config interface {}'.format(interface)\n else:\n show_run_cmd = 'show running-config interface'\n\n show_run = self.device.execute(show_run_cmd)\n cfg = Config(show_run)\n cfg.tree()\n config_dict = cfg.config\n\n if config_dict and hasattr(self, 'info'):\n for intf, intf_dict in self.info['interfaces'].items():\n key = 'interface {}'.format(intf)\n if key in config_dict and 'ipv6 nd suppress-ra' in config_dict[key]:\n intf_dict.setdefault('router_advertisement', {}).update({'suppress': True})\n\n self.make(final_call=True)\n", "id": "1519649", "language": "Python", "matching_score": 6.282526969909668, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/nd/iosxr/nd.py" }, { "content": "# Genie package\nfrom genie.ops.base import Base\n\n# genie.libs\nfrom genie.libs.parser.iosxe.show_ipv6 import ShowIpv6Neighbors\nfrom genie.libs.parser.iosxe.show_interface import ShowIpv6Interface\n\n\nclass Nd(Base):\n '''Nd Ops Object'''\n\n def learn(self, vrf='', interface=''):\n '''Learn Nd object'''\n\n # new Nd structure\n # Place holder to make it more readable\n # interface\n # interface\n # router_advertisement\n # interval\n # lifetime\n # suppress\n # neighbors\n # neighbor\n # ip\n # link_layer_address\n # neighbor_state\n # age\n # origin N/A\n # is_router N/A\n\n src_nd = '[interface][(?P<interface>.*)]'\n dest_nd = 'info' + src_nd\n self.add_leaf(cmd=ShowIpv6Neighbors,\n src=src_nd + '[interface]',\n dest=dest_nd + '[interface]',\n vrf=vrf, interface=interface)\n self.make()\n\n src_rd = '[(?P<interface>.*)][ipv6][nd]'\n dest_rd = dest_nd +'[router_advertisement]'\n req_dict = {'router_advertisements_interval':'interval', \n 'router_advertisements_live':'lifetime',\n 'suppress':'suppress'}\n\n for src, dest in req_dict.items():\n self.add_leaf(cmd=ShowIpv6Interface,\n src=src_rd + '[{}]'.format(src),\n dest=dest_rd + '[{}]'.format(dest),\n interface=interface)\n\n src_nd_neighbor = src_nd +'[neighbors][(?P<neighbor>.*)]'\n dest_nd_neighbor = 'info' + src_nd_neighbor\n req_key =['ip','link_layer_address','neighbor_state','age']\n for key in req_key:\n self.add_leaf(cmd=ShowIpv6Neighbors,\n src=src_nd_neighbor + '[{}]'.format(key),\n dest=dest_nd_neighbor + '[{}]'.format(key),\n vrf=vrf, interface=interface)\n \n self.make(final_call=True)\n", "id": "3969579", "language": "Python", "matching_score": 5.149899482727051, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/nd/iosxe/nd.py" }, { "content": "# Genie package\nfrom genie.ops.base import Base\n\n# genie.libs\nfrom genie.libs.parser.nxos.show_nd import ShowIpv6NeighborDetail,\\\n ShowIpv6NdInterface,\\\n ShowIpv6IcmpNeighborDetail,\\\n ShowIpv6Routers\n\nclass Nd(Base):\n '''Nd Ops Object'''\n\n def learn(self, vrf='all', interface=''):\n '''Learn Nd object'''\n\n # new Nd structure\n # Place holder to make it more readable\n # interface\n # interface\n # router_advertisement\n # interval\n # lifetime\n # suppress\n # neighbors\n # neighbor\n # ip\n # link_layer_address\n # origin\n # is_router\n # neighbor_state\n # age\n\n if interface:\n src_nd = '[interfaces][{}]'.format(interface)\n else:\n src_nd = '[interfaces][(?P<interface>.*)]'\n dest_nd = 'info' + src_nd\n self.add_leaf(cmd=ShowIpv6NeighborDetail,\n src=src_nd + '[interface]',\n dest=dest_nd + '[interface]',\n vrf=vrf)\n\n src_nd_router_advertisment = '[vrf][(?P<vrf>.*)]'+ src_nd +'[router_advertisement]'\n dest_nd_router_advertisment = dest_nd +'[router_advertisement]'\n req_key = ['interval','lifetime','suppress']\n for key in req_key:\n self.add_leaf(cmd=ShowIpv6NdInterface,\n src=src_nd_router_advertisment + '[{}]'.format(key),\n dest=dest_nd_router_advertisment + '[{}]'.format(key),\n vrf=vrf, interface=interface)\n\n src_nd_neighbor = src_nd +'[neighbors][(?P<neighbor>.*)]'\n dest_nd_neighbor = 'info' + src_nd_neighbor\n req_key =['ip','link_layer_address','origin','age']\n for key in req_key:\n self.add_leaf(cmd=ShowIpv6NeighborDetail,\n src=src_nd_neighbor + '[{}]'.format(key),\n dest=dest_nd_neighbor + '[{}]'.format(key),\n vrf=vrf)\n\n self.add_leaf(cmd=ShowIpv6Routers,\n src=src_nd_neighbor + '[is_router]',\n dest=dest_nd_neighbor + '[is_router]',\n vrf=vrf)\n\n self.add_leaf(cmd=ShowIpv6IcmpNeighborDetail,\n src=src_nd_neighbor + '[neighbor_state]',\n dest=dest_nd_neighbor + '[neighbor_state]',\n vrf=vrf, interface=interface)\n self.make(final_call=True)", "id": "1809676", "language": "Python", "matching_score": 2.979865550994873, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/nd/nxos/nd.py" }, { "content": "# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# pyats\nfrom ats.topology import Device\n\n# genie.libs\nfrom genie.libs.ops.nd.nxos.nd import Nd\nfrom genie.libs.ops.nd.nxos.tests.nd_output import NdOutput\n\nfrom genie.libs.parser.nxos.show_nd import (ShowIpv6NeighborDetail,\n ShowIpv6NdInterface,\n ShowIpv6IcmpNeighborDetail,\n ShowIpv6Routers)\n\nclass test_nd_all(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'nxos'\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n self.device.connectionmgr.connections['cli'] = self.device\n \n def test_full_nd(self):\n nd = Nd(device=self.device)\n nd.maker.outputs[ShowIpv6NeighborDetail] = {\"{'vrf':'all'}\": NdOutput.showIpv6NeighborDetail}\n nd.maker.outputs[ShowIpv6NdInterface] = {\"{'interface':'','vrf':'all'}\": NdOutput.showIpv6NdInterface}\n nd.maker.outputs[ShowIpv6IcmpNeighborDetail] = {\"{'interface':'','vrf':'all'}\": NdOutput.showIpv6IcmpNeighborDetail}\n nd.maker.outputs[ShowIpv6Routers] = {\"{'vrf':'all'}\": NdOutput.showIpv6Routers}\n # Learn the feature\n nd.learn()\n\n self.maxDiff = None\n self.assertEqual(nd.info, NdOutput.ndOpsOutput)\n\n def test_custom_nd(self):\n nd = Nd(device=self.device)\n nd.maker.outputs[ShowIpv6NeighborDetail] = {\"{'vrf':'VRF1'}\": NdOutput.showIpv6NeighborDetail_custom}\n nd.maker.outputs[ShowIpv6NdInterface] = {\"{'interface':'Ethernet1/2.420','vrf':'VRF1'}\": NdOutput.showIpv6NdInterface_custom}\n nd.maker.outputs[ShowIpv6IcmpNeighborDetail] = {\"{'interface':'Ethernet1/2.420','vrf':'VRF1'}\": NdOutput.showIpv6IcmpNeighborDetail_custom}\n nd.maker.outputs[ShowIpv6Routers] = {\"{'vrf':'VRF1'}\": NdOutput.showIpv6Routers_custom}\n\n # Learn the feature\n nd.learn(vrf='VRF1', interface='Ethernet1/2.420')\n\n self.maxDiff = None\n self.assertEqual(nd.info, NdOutput.ndOpsOutput_custom)\n\n def test_selective_attribute_nd(self):\n nd = Nd(device=self.device)\n nd.maker.outputs[ShowIpv6NeighborDetail] = {\"{'vrf':'all'}\": NdOutput.showIpv6NeighborDetail}\n nd.maker.outputs[ShowIpv6NdInterface] = {\"{'interface':'','vrf':'all'}\": NdOutput.showIpv6NdInterface}\n nd.maker.outputs[ShowIpv6IcmpNeighborDetail] = {\"{'interface':'','vrf':'all'}\": NdOutput.showIpv6IcmpNeighborDetail}\n nd.maker.outputs[ShowIpv6Routers] = {\"{'vrf':'all'}\": NdOutput.showIpv6Routers}\n # Learn the feature\n nd.learn()\n # Check match\n\n self.assertEqual('other', nd.info['interfaces']['Ethernet1/1']['neighbors']['2001:db8:c56d:4::2']['origin'])\n # Check does not match\n self.assertNotEqual('static', nd.info['interfaces']['Ethernet1/1']['neighbors']['2001:db8:c56d:4::2']['origin'])\n\n\n def test_missing_attributes_nd(self):\n nd = Nd(device=self.device)\n nd.maker.outputs[ShowIpv6NeighborDetail] = {\"{'vrf':'all'}\": NdOutput.showIpv6NeighborDetail}\n nd.maker.outputs[ShowIpv6NdInterface] = {\"{'interface':'','vrf':'all'}\": NdOutput.showIpv6NdInterface}\n nd.maker.outputs[ShowIpv6IcmpNeighborDetail] = {\"{'interface':'','vrf':'all'}\": NdOutput.showIpv6IcmpNeighborDetail}\n nd.maker.outputs[ShowIpv6Routers] = {\"{'vrf':'all'}\": NdOutput.showIpv6Routers}\n\n # Learn the feature\n nd.learn()\n\n with self.assertRaises(KeyError):\n interfaces = nd.info['interfaces']['Etherenet1/1']['neighbors']['2001:db8:c56d:4::2']\n\n def test_empty_output_nd(self):\n self.maxDiff = None\n nd = Nd(device=self.device)\n\n # Get outputs\n nd.maker.outputs[ShowIpv6NeighborDetail] = {\"{'vrf':'all'}\": {}}\n nd.maker.outputs[ShowIpv6NdInterface] = {\"{'interface':'','vrf':'all'}\": {}}\n nd.maker.outputs[ShowIpv6IcmpNeighborDetail] = {\"{'interface':'','vrf':'all'}\": {}}\n nd.maker.outputs[ShowIpv6Routers] = {\"{'vrf':'all'}\": {}}\n\n # Learn the feature\n nd.learn()\n\n # Check no attribute not found\n with self.assertRaises(AttributeError):\n nd.info['interfaces']\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "11539142", "language": "Python", "matching_score": 4.638514518737793, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/nd/nxos/tests/test_nd.py" }, { "content": "# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# Pyats\nfrom ats.topology import Device\n\n# genie.libs\nfrom genie.libs.ops.nd.iosxr.nd import Nd\nfrom genie.libs.ops.nd.iosxr.tests.nd_output import NdOutput\nfrom genie.libs.parser.iosxr.show_interface import ShowIpv6VrfAllInterface\nfrom genie.libs.parser.iosxr.show_ipv6 import ShowIpv6NeighborsDetail, ShowIpv6Neighbors\n\noutputs = {}\n\n# Set values\noutputs['show running-config interface'] = NdOutput.ShowRunInterface\noutputs['show running-config interface GigabitEthernet0/0/0/0.390'] = NdOutput.ShowRunInterface_custom\n\n\ndef mapper(key):\n return outputs[key]\n\nclass test_nd(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'iosxr'\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n self.device.connectionmgr.connections['cli'] = self.device\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n def test_full_nd(self):\n self.maxDiff = None\n nd = Nd(device=self.device)\n nd.maker.outputs[ShowIpv6VrfAllInterface] = {\"{'interface':'','vrf':'all'}\": NdOutput.ShowIpv6VrfAllInterface}\n nd.maker.outputs[ShowIpv6Neighbors] = {\"{'interface':'','vrf':'all'}\": NdOutput.ShowIpv6Neighbors}\n nd.maker.outputs[ShowIpv6NeighborsDetail] = {\"\": NdOutput.ShowIpv6NeighborsDetail}\n\n # Learn the feature\n nd.learn()\n self.assertEqual(nd.info, NdOutput.ndOpsOutput)\n\n def test_custom_nd(self):\n self.maxDiff = None\n nd = Nd(device=self.device)\n nd.maker.outputs[ShowIpv6VrfAllInterface] = {\"{'interface':'GigabitEthernet0/0/0/0.390','vrf':'VRF1'}\": NdOutput.ShowIpv6VrfAllInterface}\n nd.maker.outputs[ShowIpv6Neighbors] = {\"{'interface':'GigabitEthernet0/0/0/0.390','vrf':'VRF1'}\": NdOutput.ShowIpv6Neighbors}\n nd.maker.outputs[ShowIpv6NeighborsDetail] = {\"\": NdOutput.ShowIpv6NeighborsDetail}\n\n # Learn the feature\n nd.learn(interface='GigabitEthernet0/0/0/0.390', vrf='VRF1')\n self.assertEqual(nd.info, NdOutput.ndOpsOutput_custom)\n\n def test_selective_attribute_nd(self):\n nd = Nd(device=self.device)\n nd.maker.outputs[ShowIpv6VrfAllInterface] = {\"{'interface':'','vrf':'all'}\": NdOutput.ShowIpv6VrfAllInterface}\n nd.maker.outputs[ShowIpv6Neighbors] = {\"{'interface':'','vrf':'all'}\": NdOutput.ShowIpv6Neighbors}\n nd.maker.outputs[ShowIpv6NeighborsDetail] = {\"\": NdOutput.ShowIpv6NeighborsDetail}\n\n # Learn the feature\n nd.learn()\n\n # Check match\n self.assertEqual('1800', nd.info['interfaces']['GigabitEthernet0/0/0/1.390']['router_advertisement']['lifetime'])\n # Check does not match\n self.assertNotEqual('other', nd.info['interfaces']['GigabitEthernet0/0/0/1.390']['neighbors']['fe80::5c00:40ff:fe02:7']['origin'])\n\n def test_missing_attributes_nd(self):\n nd = Nd(device=self.device)\n nd.maker.outputs[ShowIpv6VrfAllInterface] = {\"{'interface':'','vrf':'all'}\": {}}\n nd.maker.outputs[ShowIpv6Neighbors] = {\"{'interface':'','vrf':'all'}\": NdOutput.ShowIpv6Neighbors}\n nd.maker.outputs[ShowIpv6NeighborsDetail] = {\"\": NdOutput.ShowIpv6NeighborsDetail}\n\n # Learn the feature\n nd.learn()\n\n with self.assertRaises(KeyError):\n nd.info['interfaces']['GigabitEthernet0/0/0/1.420']['router_advertisement']\n\n def test_empty_output_nd(self):\n self.maxDiff = None\n nd = Nd(device=self.device)\n\n # Get outputs\n nd.maker.outputs[ShowIpv6VrfAllInterface] = {\"{'interface':'','vrf':'all'}\": {}}\n nd.maker.outputs[ShowIpv6Neighbors] = {\"{'interface':'','vrf':'all'}\": {}}\n nd.maker.outputs[ShowIpv6NeighborsDetail] = {\"\": {}}\n\n # Learn the feature\n nd.learn()\n\n # Check attribute not found\n with self.assertRaises(AttributeError):\n nd.info['interfaces']\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "5119802", "language": "Python", "matching_score": 5.743770599365234, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/nd/iosxr/tests/test_nd.py" }, { "content": "# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# Pyats\nfrom pyats.topology import Device\n\n# genie.libs\nfrom genie.libs.ops.nd.iosxe.nd import Nd\nfrom genie.libs.ops.nd.iosxe.tests.nd_output import NdOutput\nfrom genie.libs.parser.iosxe.show_ipv6 import ShowIpv6Neighbors\nfrom genie.libs.parser.iosxe.show_interface import ShowIpv6Interface\n\n\nclass test_nd(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'iosxe'\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n self.device.connectionmgr.connections['cli'] = self.device\n\n def test_full_nd(self):\n self.maxDiff = None\n nd = Nd(device=self.device)\n nd.maker.outputs[ShowIpv6Neighbors] = {\"{'interface':'','vrf':''}\": NdOutput.ShowIpv6Neighbors}\n nd.maker.outputs[ShowIpv6Interface] = {\"{'interface':''}\": NdOutput.ShowIpv6Interface}\n\n # Learn the feature\n nd.learn()\n \n self.assertEqual(nd.info, NdOutput.ndOpsOutput)\n\n def test_custom_nd(self):\n self.maxDiff = None\n nd = Nd(device=self.device)\n nd.maker.outputs[ShowIpv6Neighbors] = {\"{'interface':'GigabitEthernet2.90','vrf':''}\": NdOutput.ShowIpv6Neighbors}\n nd.maker.outputs[ShowIpv6Interface] = {\"{'interface':'GigabitEthernet2.90'}\": NdOutput.ShowIpv6Interface}\n\n # Learn the feature\n nd.learn(interface='GigabitEthernet2.90', vrf='')\n\n self.assertEqual(nd.info, NdOutput.ndOpsOutput)\n\n def test_selective_attribute_nd(self):\n nd = Nd(device=self.device)\n nd.maker.outputs[ShowIpv6Neighbors] = {\"{'interface':'','vrf':''}\": NdOutput.ShowIpv6Neighbors}\n nd.maker.outputs[ShowIpv6Interface] = {\"{'interface':''}\": NdOutput.ShowIpv6Interface}\n\n # Learn the feature\n nd.learn()\n\n # Check match\n self.assertEqual(200, nd.info['interface']['GigabitEthernet2.90']['router_advertisement']['interval'])\n # Check does not match\n self.assertNotEqual(True, nd.info['interface']['GigabitEthernet2.90']['router_advertisement']['suppress'])\n\n def test_missing_attributes_nd(self):\n nd = Nd(device=self.device)\n nd.maker.outputs[ShowIpv6Neighbors] = {\"{'interface':'','vrf':''}\": NdOutput.ShowIpv6Neighbors}\n nd.maker.outputs[ShowIpv6Interface] = {\"{'interface':''}\": {}}\n\n # Learn the feature\n nd.learn()\n\n with self.assertRaises(KeyError):\n nd.info['interface']['GigabitEthernet2.90']['router_advertisement']\n\n def test_empty_output_nd(self):\n self.maxDiff = None\n nd = Nd(device=self.device)\n\n # Get outputs\n nd.maker.outputs[ShowIpv6Neighbors] = {\"{'interface':'','vrf':''}\": {}}\n nd.maker.outputs[ShowIpv6Interface] = {\"{'interface':''}\": {}}\n\n # Learn the feature\n nd.learn()\n\n # Check attribute not found\n with self.assertRaises(AttributeError):\n nd.info['interface']\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "2549800", "language": "Python", "matching_score": 0.6345935463905334, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/nd/iosxe/tests/test_nd.py" }, { "content": "from .community_set import *\n", "id": "2664736", "language": "Python", "matching_score": 0.6207863688468933, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/community_set/__init__.py" }, { "content": "\"\"\"Common configure functions for community-list\"\"\"\n\n# Python\nimport logging\n\n# Common\nfrom genie.libs.sdk.apis.iosxe.bgp.get import get_bgp_summary\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef configure_community_list_community_list_permit(device, community_list):\n\n \"\"\" Configure community list permit\n\n Args:\n device ('obj'): Device object\n community_list('list'): community list which contains dictionary\n dictionary contains following 3 keys:\n seq ('int'): sequence number\n permit ('str'): permit value\n community ('str'): community value - Optional\n ex.)\n [ \n {\n 'seq': 1,\n 'permit': 'deny',\n 'community': 62000:1\n },\n { \n 'seq': 1,\n 'permit': 'permit'\n },\n {\n 'seq': 2,\n 'permit': 'deny',\n 'community': '62000:2'\n },\n {\n 'seq': 2,\n 'permit': 'permit'\n }\n ]\n Returns:\n None\n Raises:\n SubCommandFailure\n \"\"\"\n\n # ip community-list 1 deny 62000:1\n # ip community-list 1 permit\n # ip community-list 2 deny 62000:2\n # ip community-list 2 permit\n\n config = []\n for comm in community_list:\n x = comm[\"seq\"]\n community_permit = comm[\"permit\"]\n community = \"\" if \"community\" not in comm else comm[\"community\"]\n config.append(\n \"ip community-list {x} {community_permit} {community}\\n\".format(\n x=x, community_permit=community_permit, community=community\n )\n )\n try:\n device.configure(\"\".join(config))\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Failed in configuring community-list \"\n \"on device {device}, \"\n \"Error: {e}\".format(device=device.name, e=str(e))\n ) from e\n", "id": "8426917", "language": "Python", "matching_score": 2.5082013607025146, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/community_list/configure.py" }, { "content": "\"\"\"Common configure functions for prefix-list\"\"\"\n\n# Python\nimport logging\n\n# Common\nfrom genie.libs.sdk.apis.iosxe.bgp.get import get_bgp_summary\nfrom genie.libs.sdk.apis.iosxe.interface.get import get_interface_netmask\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef configure_prefix_list_prefix_list(device, prefix_list):\n \"\"\" Configures prefix-list on device\n\n Args:\n device('obj'): device to configure on\n prefix_list('list'): prefix list which contains dictionary\n dictionary contains following 3 keys:\n prefix_list ('str'): prefix list value\n seq ('int'): sequence number\n route ('str'): IP address\n ex.)\n [ {\n 'prefix_list': 1,\n 'seq': 5,\n 'route': '172.16.0.0/24'\n },\n {\n 'prefix_list': 2,\n 'seq': 5,\n 'route': '172.16.1.0/24'\n },\n {\n 'direction': 'in',\n 'permit': 'deny',\n 'route': '10.94.12.1',\n 'comparison_operator': '<',\n 'comparison_value': 36\n } \n ]\n\n Returns:\n None\n Raises:\n SubCommandFailure\n \"\"\"\n\n if not isinstance(prefix_list, list):\n raise SubCommandFailure(\"prefix_list must be a list\")\n\n config = []\n\n for pf in prefix_list:\n if \"prefix_list\" in pf:\n config.append(\n \"\\nip prefix-list {prefix_list}\".format(\n prefix_list=pf[\"prefix_list\"]\n )\n )\n\n if \"seq\" in pf:\n config.append(\" seq {seq}\".format(seq=pf[\"seq\"]))\n\n if \"direction\" in pf:\n config.append(\n \"\\nip prefix-list {direction}\".format(\n direction=pf[\"direction\"]\n )\n )\n\n if not \"/\" in pf[\"route\"]:\n pf[\"route\"] += get_interface_netmask(pf[\"route\"])\n\n config.append(\n \" {permit} {route}\".format(permit=pf[\"permit\"], route=pf[\"route\"])\n )\n\n if \"comparison_operator\" in pf and \"comparison_value\" in pf:\n config.append(\n \" {comparison_operator} {comparison_value}\".format(\n comparison_operator=pf[\"comparison_operator\"],\n comparison_value=pf[\"comparison_value\"],\n )\n )\n\n try:\n device.configure(\"\".join(config))\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Failed in configuring prefix-list \"\n \"on device {device}, \"\n \"Error: {e}\".format(device=device.name, e=str(e))\n ) from e\n", "id": "2790518", "language": "Python", "matching_score": 2.8777456283569336, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/prefix_list/configure.py" }, { "content": "\"\"\"Common configure functions for route-map\"\"\"\r\n\r\n# Python\r\nimport logging\r\n\r\n# Unicon\r\nfrom unicon.core.errors import SubCommandFailure\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef configure_route_map_route_map(device, route_map):\r\n \"\"\" Configures route-map on device\r\n\r\n Args:\r\n device('obj'): device to configure on\r\n route_map('list'): route-map list which contains dictionary\r\n dictionary contains following 6 keys:\r\n seq ('int'): sequence number\r\n route_map ('str'): route-map name\r\n prefix_list ('str'): prefix-list value\r\n set_community ('str'): set community value\r\n match_community ('str'): match community value\r\n set_as_path ('str') : set as-path values\r\n ex.)\r\n [\r\n {\r\n 'seq': 10,\r\n 'route_map': 'community_test_out',\r\n 'match_community': 2\r\n },\r\n {\r\n 'seq': 10,\r\n 'route_map': 'community_test',\r\n 'set_community': '62000:1'\r\n },\r\n ]\r\n Returns:\r\n None\r\n Raises:\r\n SubCommandFailure: Failed configuring route map\r\n \"\"\"\r\n config = []\r\n\r\n # route-map community-test permit 10\r\n # match ip address prefix-list 1\r\n # set community 62000:1\r\n # route-map community_test permit 20\r\n # match ip address prefix-list 2\r\n # set community 62000:2\r\n # route-map community_test permit 30\r\n # end\r\n\r\n for rm in route_map:\r\n\r\n config.append(\r\n \"route-map {route_map} permit {seq}\\n\".format(\r\n route_map=rm[\"route_map\"], seq=rm[\"seq\"]\r\n )\r\n )\r\n\r\n if \"prefix_list\" in rm:\r\n config.append(\r\n \"match ip address prefix-list {x}\\n\".format(\r\n x=rm[\"prefix_list\"]\r\n )\r\n )\r\n\r\n if \"match_community\" in rm:\r\n config.append(\r\n \" match community {match_community}\\n\".format(\r\n match_community=rm[\"match_community\"]\r\n )\r\n )\r\n\r\n if \"set_community\" in rm:\r\n config.append(\r\n \"set community {set_community}\\n\".format(\r\n set_community=rm[\"set_community\"]\r\n )\r\n )\r\n\r\n if \"set_as_path\" in rm:\r\n config.append(\r\n \"set as-path {map_name} {as_path}\".format(\r\n map_name=rm[\"route_map\"], as_path=rm[\"set_as_path\"]\r\n )\r\n )\r\n\r\n try:\r\n device.configure(\"\".join(config))\r\n except SubCommandFailure as e:\r\n raise SubCommandFailure(\"Failed to execute configuration command\")\r\n", "id": "9781001", "language": "Python", "matching_score": 1.5564956665039062, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/route_map/configure.py" }, { "content": "from .prefix_list import *\n", "id": "378379", "language": "Python", "matching_score": 0.7569714188575745, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/prefix_list/__init__.py" }, { "content": "from .access_list import *\n", "id": "10460258", "language": "Python", "matching_score": 0.010604176670312881, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/access_list/__init__.py" }, { "content": "''' \nPrefix-list Genie Ops Object Outputs for NXOS.\n'''\n\n\nclass PrefixListOutput(object):\n\n ShowIpPrefixList = { \n \"prefix_set_name\": {\n \"test\": {\n \"entries\": 6,\n \"protocol\": \"ipv4\",\n \"prefix_set_name\": \"test\",\n \"prefixes\": {\n \"10.169.0.0/8 16..24 permit\": {\n \"masklength_range\": \"16..24\",\n \"sequence\": 25,\n \"prefix\": \"10.169.0.0/8\",\n \"action\": \"permit\"\n },\n \"10.205.0.0/8 8..16 permit\": {\n \"masklength_range\": \"8..16\",\n \"sequence\": 10,\n \"prefix\": \"10.205.0.0/8\",\n \"action\": \"permit\"\n },\n \"10.21.0.0/8 8..16 permit\": {\n \"masklength_range\": \"8..16\",\n \"sequence\": 15,\n \"prefix\": \"10.21.0.0/8\",\n \"action\": \"permit\"\n },\n \"10.205.0.0/8 8..8 deny\": {\n \"masklength_range\": \"8..8\",\n \"sequence\": 5,\n \"prefix\": \"10.205.0.0/8\",\n \"action\": \"deny\"\n },\n \"10.94.0.0/8 24..32 permit\": {\n \"masklength_range\": \"24..32\",\n \"sequence\": 20,\n \"prefix\": \"10.94.0.0/8\",\n \"action\": \"permit\"\n },\n \"192.0.2.0/24 25..25 permit\": {\n \"masklength_range\": \"25..25\",\n \"sequence\": 30,\n \"prefix\": \"192.0.2.0/24\",\n \"action\": \"permit\"\n },\n }\n }\n }\n }\n\n ShowIpv6PrefixList = {\n \"prefix_set_name\": {\n \"test6\": {\n \"entries\": 4,\n \"protocol\": \"ipv6\",\n \"prefix_set_name\": \"test6\",\n \"prefixes\": {\n \"2001:db8:3::/64 64..128 permit\": {\n \"masklength_range\": \"64..128\",\n \"sequence\": 15,\n \"prefix\": \"2001:db8:3::/64\",\n \"action\": \"permit\"\n },\n \"2001:db8:2::/64 65..128 permit\": {\n \"masklength_range\": \"65..128\",\n \"sequence\": 10,\n \"prefix\": \"2001:db8:2::/64\",\n \"action\": \"permit\"\n },\n \"2001:db8:1::/64 64..64 permit\": {\n \"masklength_range\": \"64..64\",\n \"sequence\": 5,\n \"prefix\": \"2001:db8:1::/64\",\n \"action\": \"permit\"\n },\n \"2001:db8:4::/64 65..98 permit\": {\n \"masklength_range\": \"65..98\",\n \"sequence\": 20,\n \"prefix\": \"2001:db8:4::/64\",\n \"action\": \"permit\"\n }\n }\n }\n }\n }\n\n PrefixList_info = {\n \"prefix_set_name\": {\n \"test\": {\n \"protocol\": \"ipv4\",\n \"prefix_set_name\": \"test\",\n \"prefixes\": {\n \"10.169.0.0/8 16..24 permit\": {\n \"masklength_range\": \"16..24\",\n \"prefix\": \"10.169.0.0/8\",\n \"action\": \"permit\"\n },\n \"10.205.0.0/8 8..16 permit\": {\n \"masklength_range\": \"8..16\",\n \"prefix\": \"10.205.0.0/8\",\n \"action\": \"permit\"\n },\n \"10.21.0.0/8 8..16 permit\": {\n \"masklength_range\": \"8..16\",\n \"prefix\": \"10.21.0.0/8\",\n \"action\": \"permit\"\n },\n \"10.205.0.0/8 8..8 deny\": {\n \"masklength_range\": \"8..8\",\n \"prefix\": \"10.205.0.0/8\",\n \"action\": \"deny\"\n },\n \"10.94.0.0/8 24..32 permit\": {\n \"masklength_range\": \"24..32\",\n \"prefix\": \"10.94.0.0/8\",\n \"action\": \"permit\"\n },\n \"192.0.2.0/24 25..25 permit\": {\n \"masklength_range\": \"25..25\",\n \"prefix\": \"192.0.2.0/24\",\n \"action\": \"permit\"\n }\n }\n },\n \"test6\": {\n \"protocol\": \"ipv6\",\n \"prefix_set_name\": \"test6\",\n \"prefixes\": {\n \"2001:db8:3::/64 64..128 permit\": {\n \"masklength_range\": \"64..128\",\n \"prefix\": \"2001:db8:3::/64\",\n \"action\": \"permit\"\n },\n \"2001:db8:2::/64 65..128 permit\": {\n \"masklength_range\": \"65..128\",\n \"prefix\": \"2001:db8:2::/64\",\n \"action\": \"permit\"\n },\n \"2001:db8:1::/64 64..64 permit\": {\n \"masklength_range\": \"64..64\",\n \"prefix\": \"2001:db8:1::/64\",\n \"action\": \"permit\"\n },\n \"2001:db8:4::/64 65..98 permit\": {\n \"masklength_range\": \"65..98\",\n \"prefix\": \"2001:db8:4::/64\",\n \"action\": \"permit\"\n }\n }\n }\n }\n\n }\n", "id": "4181979", "language": "Python", "matching_score": 2.9874086380004883, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/prefix_list/nxos/tests/prefix_output.py" }, { "content": "''' \nPrefix-list Genie Ops Object for IOSXE - CLI.\n'''\n\nimport re\n\nfrom genie.libs.ops.prefix_list.prefix_list import PrefixList as SuperPrefixList\nfrom genie.ops.base import Context\n\n# iosxe show_prefix_list\nfrom genie.libs.parser.iosxe.show_prefix_list import ShowIpPrefixListDetail, \\\n ShowIpv6PrefixListDetail\n\n\nclass PrefixList(SuperPrefixList):\n '''Prefix-list Genie Ops Object'''\n\n def learn(self):\n '''Learn Prefix-list Ops'''\n \n ########################################################################\n # info\n ########################################################################\n\n for cmd in ['show ip prefix-list detail', 'show ipv6 prefix-list detail']:\n\n # Global source\n src = '[prefix_set_name][(?P<name>.*)]'\n dest = 'info[prefix_set_name][(?P<name>.*)]'\n req_keys = ['[prefix_set_name]', '[protocol]',\n '[prefixes][(?P<prefixes>.*)][prefix]',\n '[prefixes][(?P<prefixes>.*)][masklength_range]',\n '[prefixes][(?P<prefixes>.*)][action]']\n\n for key in req_keys:\n self.add_leaf(cmd=cmd,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key))\n\n\n # make to write in cache\n self.make(final_call=True)\n", "id": "906651", "language": "Python", "matching_score": 1.368981122970581, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/prefix_list/iosxe/prefix_list.py" }, { "content": "''' \nRoutePolicy Genie Ops Object for NXOS - CLI.\n'''\n\n\nfrom genie.libs.ops.route_policy.route_policy import RoutePolicy as SuperRoutePolicy\n\n# nxos show_route_map\nfrom genie.libs.parser.nxos.show_route_map import ShowRouteMap\n\n\nclass RoutePolicy(SuperRoutePolicy):\n '''RoutePolicy Genie Ops Object'''\n\n def learn(self):\n '''Learn RoutePolicy Ops'''\n\n self.add_leaf(cmd=ShowRouteMap,\n src='[(?P<policy>.*)][description]',\n dest='info[(?P<policy>.*)][description]')\n\n #####################################################################\n # Statements section\n #####################################################################\n\n # Place holder to make it more readable\n src = '[(?P<policy>.*)][statements][(?P<statements>.*)]'\n dest = 'info[(?P<policy>.*)][statements][(?P<statements>.*)]'\n\n #####################################################################\n # Section \"conditions\"\n #####################################################################\n\n self.add_leaf(cmd=ShowRouteMap,\n src=src+'[conditions]',\n dest=dest+'[conditions]')\n\n #####################################################################\n # Section \"actions\"\n #####################################################################\n\n self.add_leaf(cmd=ShowRouteMap,\n src=src+'[actions]',\n dest=dest+'[actions]')\n\n self.make(final_call=True)\n\n if hasattr(self, 'info'):\n # Delete 'clause' under every statement 'actions' key\n for key in self.info:\n for key2 in self.info[key]['statements']:\n del self.info[key]['statements'][key2]['actions']['clause']\n\n # Delete 'set_route_origin' under every statement 'actions' key\n for key in self.info:\n for key2 in self.info[key]['statements']:\n if 'set_route_origin' in self.info[key]['statements'][key2]['actions']:\n del self.info[key]['statements'][key2]['actions']['set_route_origin']\n", "id": "5990071", "language": "Python", "matching_score": 4.3569560050964355, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/route_policy/nxos/route_policy.py" }, { "content": "''' \nRoutePolicy Genie Ops Object for IOSXR - CLI.\n'''\n\nfrom genie.libs.ops.route_policy.route_policy import RoutePolicy as SuperRoutePolicy\n\n# iosxr show_rpl\nfrom genie.libs.parser.iosxr.show_rpl import ShowRplRoutePolicy\n\n\nclass RoutePolicy(SuperRoutePolicy):\n '''RoutePolicy Genie Ops Object'''\n\n def learn(self):\n '''Learn RoutePolicy Ops'''\n\n self.add_leaf(cmd=ShowRplRoutePolicy,\n src='[(?P<policy>.*)][description]',\n dest='info[(?P<policy>.*)][description]')\n\n #####################################################################\n # Statements section\n #####################################################################\n\n # Place holder to make it more readable\n src = '[(?P<policy>.*)][statements][(?P<statements>.*)]'\n dest = 'info[(?P<policy>.*)][statements][(?P<statements>.*)]'\n\n #####################################################################\n # Section \"conditions\"\n #####################################################################\n\n self.add_leaf(cmd=ShowRplRoutePolicy,\n src=src+'[conditions]',\n dest=dest+'[conditions]')\n\n #####################################################################\n # Section \"actions\"\n #####################################################################\n\n self.add_leaf(cmd=ShowRplRoutePolicy,\n src=src+'[actions]',\n dest=dest+'[actions]')\n\n self.make(final_call=True)\n", "id": "431895", "language": "Python", "matching_score": 1.0465410947799683, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/route_policy/iosxr/route_policy.py" }, { "content": "''' \nNTP Genie Ops Object for Junos - CLI.\n'''\n# super class\nfrom genie.libs.ops.ntp.ntp import Ntp as SuperNtp\n\n# Parser\nfrom genie.libs.parser.junos.show_ntp import ShowNtpAssociations, \\\n ShowNtpStatus, \\\n ShowConfigurationSystemNtpSet\n\n\nclass Ntp(SuperNtp):\n '''NTP Genie Ops Object'''\n\n def learn(self):\n '''Learn NTP Ops'''\n \n ########################################################################\n # info\n ########################################################################\n\n # clock_state\n # 'associations_address', 'associations_local_mode',\n # 'clock_state', clock_stratum', 'root_delay'\n self.add_leaf(cmd=ShowNtpAssociations,\n src='[clock_state]',\n dest='info[clock_state]')\n\n # 'associations_address', 'associations_local_mode',\n # 'clock_state', clock_stratum', 'root_delay'\n for src_key, dest_key in {'frequency': 'actual_freq',\n 'precision': 'clock_precision',\n 'reftime': 'reference_time',\n 'rootdispersion': 'root_dispersion'}.items(): \n self.add_leaf(cmd=ShowNtpStatus,\n src='[clock_state][system_status][%s]' % src_key,\n dest='info[clock_state][system_status][%s]' % dest_key)\n\n # unicast_configuration\n self.add_leaf(cmd=ShowConfigurationSystemNtpSet,\n src='[vrf][(?P<vrf>.*)][address][(?P<address>.*)][type][(?P<type>.*)]',\n dest='info[vrf][(?P<vrf>.*)][unicast_configuration][address]'\n '[(?P<address>.*)][type][(?P<type>.*)]')\n\n # associations\n self.add_leaf(cmd=ShowConfigurationSystemNtpSet,\n src='[vrf][(?P<vrf>.*)][address][(?P<address>.*)][isconfigured][(?P<isconfigured>.*)]',\n dest='info[vrf][(?P<vrf>.*)][associations][address][(?P<address>.*)]'\n '[isconfigured][(?P<isconfigured>.*)]')\n\n # associations\n asso_keys = ['address', 'local_mode', 'stratum', 'refid', 'reach', 'poll',\n 'offset', 'delay', 'receive_time']\n for key in asso_keys:\n self.add_leaf(cmd=ShowNtpAssociations,\n src='[peer][(?P<address>.*)][local_mode][(?P<local_mode>.*)][%s]' % key,\n dest='info[associations][address][(?P<address>.*)]'\n '[local_mode][(?P<local_mode>.*)][%s]' % key)\n\n\n # make to write in cache\n self.make(final_call=True)\n\n # needs combine structures from ShowConfigurationSystemNtpSet and ShowNtpAssociations\n if hasattr(self, 'info') and 'associations' in self.info :\n\n peers = list(self.info['associations']['address'].keys()).copy()\n\n for peer in peers:\n for vrf in self.info['vrf']:\n if not self.info['vrf'][vrf]['associations']['address'].get(peer):\n continue\n\n for local_mode in self.info['associations']['address'][peer]['local_mode'].keys():\n mode_dict = self.info['vrf'][vrf]['associations']['address'][peer].setdefault('local_mode', {}).setdefault(local_mode, {})\n mode_dict.setdefault('isconfigured', {}).update(self.info['vrf'][vrf]['associations']['address'][peer]['isconfigured'])\n for configured in mode_dict['isconfigured']:\n mode_dict['isconfigured'][configured].update(self.info['associations']['address'][peer]['local_mode'][local_mode])\n mode_dict['isconfigured'][configured]['vrf'] = vrf\n mode_dict['isconfigured'][configured]['local_mode'] = local_mode\n \n try:\n del(self.info['vrf'][vrf]['associations']['address'][peer]['isconfigured'])\n except Exception:\n pass\n\n # remove the non-combined key\n try:\n del(self.info['associations'])\n except Exception:\n pass\n", "id": "11139307", "language": "Python", "matching_score": 3.8547134399414062, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/ntp/junos/ntp.py" }, { "content": "# Python\nimport unittest\nfrom copy import deepcopy\nfrom unittest.mock import Mock\n\n# ATS\nfrom ats.topology import Device\n\n# Genie\nfrom genie.libs.ops.ntp.iosxr.ntp import Ntp\nfrom genie.libs.ops.ntp.iosxr.tests.ntp_output import NtpOutput\n\n# Parser\nfrom genie.libs.parser.iosxr.show_ntp import ShowNtpAssociations, \\\n ShowNtpStatus, \\\n ShowRunningConfigNtp\n\n\nclass test_ntp(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'iosxr'\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n self.device.connectionmgr.connections['cli'] = self.device\n\n def test_complete_output(self):\n self.maxDiff = None\n ntp = Ntp(device=self.device)\n # Get outputs\n ntp.maker.outputs[ShowNtpAssociations] = \\\n {\"\": NtpOutput.ShowNtpAssociations}\n\n ntp.maker.outputs[ShowNtpStatus] = \\\n {\"\": NtpOutput.ShowNtpStatus}\n\n ntp.maker.outputs[ShowRunningConfigNtp] = \\\n {\"\": NtpOutput.ShowRunningConfigNtp}\n\n # Learn the feature\n ntp.learn()\n\n # Verify Ops was created successfully\n self.assertEqual(ntp.info, NtpOutput.Ntp_info)\n\n # Check specific attribute values\n # info - clock_state\n self.assertEqual(ntp.info['clock_state']['system_status']\\\n ['clock_state'], 'synchronized')\n # info - vrf default\n self.assertEqual(ntp.info['vrf']['default']['unicast_configuration']\\\n ['address']['127.127.1.1']['type']['server']['type'], 'server')\n\n def test_empty_output(self):\n self.maxDiff = None\n ntp = Ntp(device=self.device)\n # Get outputs\n ntp.maker.outputs[ShowNtpAssociations] = \\\n {\"\": {}}\n\n ntp.maker.outputs[ShowNtpStatus] = \\\n {\"\": {}}\n\n ntp.maker.outputs[ShowRunningConfigNtp] = \\\n {\"\": {}}\n\n # Learn the feature\n ntp.learn()\n\n # Check no attribute not found\n with self.assertRaises(AttributeError):\n ntp.info['vrf']\n\n def test_incomplete_output(self):\n self.maxDiff = None\n\n ntp = Ntp(device=self.device)\n\n # Get outputs\n ntp.maker.outputs[ShowNtpAssociations] = \\\n {\"\": NtpOutput.ShowNtpAssociations}\n\n ntp.maker.outputs[ShowNtpStatus] = \\\n {\"\": {}}\n\n ntp.maker.outputs[ShowRunningConfigNtp] = \\\n {\"\": {}}\n\n # Learn the feature\n ntp.learn()\n \n # Check no attribute not found\n with self.assertRaises(KeyError):\n ntp.info['clock_state']['system_status']['actual_freq']\n ntp.info['clock_state']['system_status']['clock_precision']\n ntp.info['clock_state']['system_status']['reference_time']\n ntp.info['clock_state']['system_status']['root_dispersion']\n\n\nif __name__ == '__main__':\n unittest.main()", "id": "10369992", "language": "Python", "matching_score": 1.8587456941604614, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/ntp/iosxr/tests/test_ntp.py" }, { "content": "# Genie\r\nfrom genie.ops.base import Base\r\n\r\n\r\nclass Ntp(Base):\r\n exclude = ['root_delay',\r\n 'delay',\r\n 'root_dispersion',\r\n 'receive_time']", "id": "9884831", "language": "Python", "matching_score": 1, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/ntp/ntp.py" }, { "content": "from .ntp import *\n", "id": "1785314", "language": "Python", "matching_score": 0, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ntp/__init__.py" }, { "content": "from .segment_routing import *\n", "id": "2961108", "language": "Python", "matching_score": 0.6736921072006226, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/segment_routing/__init__.py" }, { "content": "from .static_routing import *\n", "id": "4739017", "language": "Python", "matching_score": 1, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/static_routing/__init__.py" }, { "content": "from .routing import *\n", "id": "941914", "language": "Python", "matching_score": 0, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/routing/__init__.py" }, { "content": "# XXXJST TODO Vni w/ mcast_group, peer_vtep\n", "id": "2684950", "language": "Python", "matching_score": 1.613547444343567, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/nxos/vni.py" }, { "content": "from .mcast import *\nfrom .mcast_group import *\n", "id": "2760523", "language": "Python", "matching_score": 0.774314284324646, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/mcast/__init__.py" }, { "content": "\n# Python\nimport weakref\nimport functools\n\n# Genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import ConfigurableBase\n\n\n@functools.total_ordering\nclass MldGroup(ConfigurableBase):\n\n @property\n def testbed(self):\n return self.device.testbed\n\n @property\n def device(self):\n return self._device()\n\n # join_group\n join_group = managedattribute(\n name='join_group',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Configure join_group under interface.\")\n\n # join_group_source_addr\n join_group_source_addr = managedattribute(\n name='join_group_source_addr',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Configure join_group_source_addr under interface.\")\n\n # static_group\n static_group = managedattribute(\n name='static_group',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Configure static_group under interface.\")\n\n # static_group_source_addr\n static_group_source_addr = managedattribute(\n name='static_group_source_addr',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Configure static_group_source_addr under interface.\")\n\n # ==========================================================================\n\n # Overload __eq__\n def __eq__(self, other):\n if not isinstance(other, MldGroup):\n raise NotImplemented\n \n return (self.join_group,\n self.join_group_source_addr,\n self.static_group,\n self.static_group_source_addr,\n self.device) == \\\n (other.join_group,\n other.join_group_source_addr,\n other.static_group,\n other.static_group_source_addr,\n other.device)\n\n # Overload __lt__\n def __lt__(self, other):\n if not isinstance(other, MldGroup):\n raise NotImplemented(\"Cannot compare '{s}' to a '{o}'\".format(s=type(self), o=type(other)))\n\n group_current = self.join_group if self.join_group else self.static_group\n group_previous = other.join_group if other.join_group else other.static_group\n\n source_current = self.join_group_source_addr if \\\n self.join_group_source_addr else self.static_group_source_addr\n source_previous = other.join_group_source_addr if \\\n other.join_group_source_addr else other.static_group_source_addr\n\n # Comparing same types\n if type(group_current) == type(group_previous):\n if group_current == group_previous:\n return source_current < source_previous\n return group_current < group_previous\n else:\n self_addr = str(group_current)\n other_addr = str(group_previous)\n return self_addr < other_addr\n\n \n # Overload __hash__\n def __hash__(self):\n return hash((self.join_group,\n self.join_group_source_addr,\n self.static_group,\n self.static_group_source_addr,\n self.device))\n\n # Overload __repr__\n def __repr__(self):\n if isinstance(self.join_group, str):\n return '%s object at 0x%x with string name %s/%s/%s/%s' % (\n self.__class__.__name__,\n id(self),\n self.join_group,\n self.join_group_source_addr,\n self.static_group,\n self.static_group_source_addr)\n else:\n return '%s object at 0x%x with the name %s/%s/%s/%s which is not string' % (\n self.__class__.__name__,\n id(self),\n self.join_group,\n self.join_group_source_addr,\n self.static_group,\n self.static_group_source_addr)\n\n\n def __init__(self, device, *args, **kwargs):\n self._device = weakref.ref(device)\n super().__init__(*args, **kwargs)", "id": "914551", "language": "Python", "matching_score": 2.405334949493408, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/mld/mld_group.py" }, { "content": "\n# Python\nimport weakref\nimport functools\n\n# Genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import ConfigurableBase\n\n\n@functools.total_ordering\nclass Ssm(ConfigurableBase):\n\n @property\n def testbed(self):\n return self.device.testbed\n\n @property\n def device(self):\n return self._device()\n\n # ssm_source_addr\n ssm_source_addr = managedattribute(\n name='ssm_source_addr',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Configure ssm_source_addr under vrf.\")\n\n # ssm_group_policy\n ssm_group_policy = managedattribute(\n name='ssm_group_policy',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Configure ssm_group_policy under vrf.\")\n\n # ssm_group_range\n ssm_group_range = managedattribute(\n name='ssm_group_range',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Configure ssm_group_range under vrf.\")\n\n # ==========================================================================\n\n # Overload __eq__\n def __eq__(self, other):\n if not isinstance(other, Ssm):\n raise NotImplemented\n \n return (self.ssm_source_addr,\n self.ssm_group_range,\n self.ssm_group_policy,\n self.device) == \\\n (other.ssm_source_addr,\n other.ssm_group_range,\n other.ssm_group_policy,\n other.device)\n\n # Overload __lt__\n def __lt__(self, other):\n if not isinstance(other, Ssm):\n raise NotImplemented(\"Cannot compare '{s}' to a '{o}'\".format(s=type(self), o=type(other)))\n\n # Comparing same types\n if type(self.ssm_source_addr) == type(other.ssm_source_addr):\n if type(self.ssm_group_range) != type(None):\n if self.ssm_source_addr == other.ssm_source_addr:\n return self.ssm_group_range < other.ssm_group_range\n elif type(self.ssm_group_policy) != type(None):\n if self.ssm_source_addr == other.ssm_source_addr:\n return self.ssm_group_policy < other.ssm_group_policy\n return self.ssm_source_addr < other.ssm_source_addr\n # Comparing mistmatch types\n else:\n self_addr = str(self.ssm_source_addr)\n other_addr = str(other.ssm_source_addr)\n return self_addr < other_addr\n \n # Overload __hash__\n def __hash__(self):\n return hash((self.ssm_source_addr,\n self.ssm_group_range,\n self.ssm_group_policy,\n self.device))\n\n # Overload __repr__\n def __repr__(self):\n if isinstance(self.ssm_source_addr, str):\n return '%s object at 0x%x with string %s/%s/%s' % (\n self.__class__.__name__,\n id(self),\n self.ssm_source_addr,\n self.ssm_group_range,\n self.ssm_group_policy)\n else:\n return '%s object at 0x%x with the name %s/%s/%s which is not string' % (\n self.__class__.__name__,\n id(self),\n self.ssm_source_addr,\n self.ssm_group_range,\n self.ssm_group_policy)\n\n\n def __init__(self, device, *args, **kwargs):\n self._device = weakref.ref(device)\n super().__init__(*args, **kwargs)", "id": "11264592", "language": "Python", "matching_score": 2.029404640197754, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/mld/ssm.py" }, { "content": "\n__all__ = (\n 'Vfi',\n )\n\nimport functools\nimport weakref\n\nfrom genie.utils.cisco_collections import typedset\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import ConfigurableBase, Device\nimport genie.conf.base.attributes\nfrom genie.conf.base.attributes import SubAttributes, SubAttributesDict, AttributesInheriter\n\nfrom genie.libs.conf.base import MAC\nfrom genie.libs.conf.bgp import RouteDistinguisher, RouteTarget\n\nfrom .pseudowire import Pseudowire, PseudowireClass, PseudowireNeighbor, PseudowireNeighborSubAttributes\n\n\nclass ConfigurableVfiNamespace(ConfigurableBase):\n\n def __init__(self, vfi=None):\n assert vfi\n self._vfi = vfi\n\n _vfi = None\n\n @property\n def vfi(self):\n return self._vfi\n\n @property\n def testbed(self):\n return self.vfi.testbed\n\n @property\n def device(self):\n return self.vfi.device\n\n\n@functools.total_ordering\nclass Vfi(ConfigurableBase):\n\n container = None # BridgeDomain\n\n device = managedattribute(\n name='device',\n read_only=True,\n gettype=managedattribute.auto_unref)\n\n name = managedattribute(\n name='name',\n read_only=True) # read-only hash key\n\n virtual = managedattribute(\n name='virtual',\n default=False,\n type=(None, managedattribute.test_istype(bool)))\n\n shutdown = managedattribute(\n name='shutdown',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n vpn_id = managedattribute(\n name='vpn_id',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n class AutodiscoveryBgpAttributes(ConfigurableVfiNamespace):\n\n enabled = managedattribute(\n name='enabled',\n default=False,\n type=managedattribute.test_istype(bool))\n\n control_word = managedattribute(\n name='control_word',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n rd = managedattribute(\n name='rd',\n default=None,\n type=(None, RouteDistinguisher,\n managedattribute.test_in((\n 'auto',\n ))))\n\n export_route_policy = managedattribute(\n name='export_route_policy',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n export_route_targets = managedattribute(\n name='export_route_targets',\n finit=typedset(RouteTarget.ImportExport).copy,\n type=typedset(RouteTarget.ImportExport)._from_iterable)\n\n import_route_targets = managedattribute(\n name='import_route_targets',\n finit=typedset(RouteTarget.ImportExport).copy,\n type=typedset(RouteTarget.ImportExport)._from_iterable)\n\n table_policy = managedattribute(\n name='table_policy',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n class SignalingProtocolBgpAttributes(ConfigurableVfiNamespace):\n\n enabled = managedattribute(\n name='enabled',\n default=False,\n type=managedattribute.test_istype(bool))\n\n ve_id = managedattribute(\n name='ve_id',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n ve_range = managedattribute(\n name='ve_range',\n default=None,\n type=(None,\n managedattribute.test_istype(int),\n managedattribute.test_istype(str)))\n\n signaling_protocol_bgp = managedattribute(\n name='signaling_protocol_bgp',\n read_only=True,\n doc=SignalingProtocolBgpAttributes.__doc__)\n\n @signaling_protocol_bgp.initter\n def signaling_protocol_bgp(self):\n return self.SignalingProtocolBgpAttributes(vfi=self.vfi)\n\n class SignalingProtocolLdpAttributes(ConfigurableVfiNamespace):\n\n enabled = managedattribute(\n name='enabled',\n default=False,\n type=managedattribute.test_istype(bool))\n\n vpls_id = managedattribute(\n name='vpls_id',\n default=None,\n type=(None, RouteTarget))\n\n signaling_protocol_ldp = managedattribute(\n name='signaling_protocol_ldp',\n read_only=True,\n doc=SignalingProtocolLdpAttributes.__doc__)\n\n @signaling_protocol_ldp.initter\n def signaling_protocol_ldp(self):\n return self.SignalingProtocolLdpAttributes(vfi=self.vfi)\n\n def __init__(self, vfi):\n super().__init__(vfi=vfi)\n\n autodiscovery_bgp = managedattribute(\n name='autodiscovery_bgp',\n read_only=True,\n doc=AutodiscoveryBgpAttributes.__doc__)\n\n @autodiscovery_bgp.initter\n def autodiscovery_bgp(self):\n return self.AutodiscoveryBgpAttributes(vfi=self)\n\n class MulticastP2mpAttributes(ConfigurableVfiNamespace):\n\n class SignalingProtocolBgpAttributes(ConfigurableVfiNamespace):\n\n enabled = managedattribute(\n name='enabled',\n default=False,\n type=managedattribute.test_istype(bool))\n\n signaling_protocol_bgp = managedattribute(\n name='signaling_protocol_bgp',\n read_only=True,\n doc=SignalingProtocolBgpAttributes.__doc__)\n\n @signaling_protocol_bgp.initter\n def signaling_protocol_bgp(self):\n return self.SignalingProtocolBgpAttributes(vfi=self.vfi)\n\n class TransportRsvpTeAttributes(ConfigurableVfiNamespace):\n\n enabled = managedattribute(\n name='enabled',\n default=False,\n type=managedattribute.test_istype(bool))\n\n attribute_set_p2mp_te = managedattribute(\n name='attribute_set_p2mp_te',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n transport_rsvp_te = managedattribute(\n name='transport_rsvp_te',\n read_only=True,\n doc=TransportRsvpTeAttributes.__doc__)\n\n @transport_rsvp_te.initter\n def transport_rsvp_te(self):\n return self.TransportRsvpTeAttributes(vfi=self.vfi)\n\n def __init__(self, vfi):\n super().__init__(vfi=vfi)\n\n multicast_p2mp = managedattribute(\n name='multicast_p2mp',\n read_only=True,\n doc=MulticastP2mpAttributes.__doc__)\n\n @multicast_p2mp.initter\n def multicast_p2mp(self):\n return self.MulticastP2mpAttributes(vfi=self)\n\n # TODO Cannot use typedset because segments need to be updated\n pseudowires = managedattribute(\n name='pseudowires',\n finit=set,\n type=managedattribute.test_set_of(\n managedattribute.test_isinstance(Pseudowire)),\n gettype=frozenset,\n doc='A `set` of Pseudowire associated objects')\n\n def add_pseudowire(self, pseudowire):\n prev_segments = self.segments\n self.pseudowires |= {pseudowire}\n self._on_segments_updated(prev_segments)\n\n def remove_pseudowire(self, pseudowire):\n prev_segments = self.segments\n self.pseudowires -= {pseudowire}\n self._on_segments_updated(prev_segments)\n\n @property\n def pseudowire_neighbors(self):\n for pw in self.pseudowires:\n for nbr in pw.neighbors:\n if nbr.container is self:\n # implied: nbr.device is self.device\n yield nbr\n\n def create_pseudowire_neighbor(self, **kwargs):\n pwnbr = PseudowireNeighbor(container=self,\n device=self.device,\n **kwargs)\n return pwnbr\n\n @property\n def segments(self):\n segments = []\n segments += list(self.pseudowires)\n return frozenset(segments)\n\n def add_segment(self, segment):\n if isinstance(segment, Pseudowire):\n self.add_pseudowire(segment)\n else:\n raise ValueError(segment)\n\n def remove_segment(self, segment):\n if isinstance(segment, Pseudowire):\n self.remove_pseudowire(segment)\n else:\n raise ValueError(segment)\n\n def _on_segments_updated(self, prev_segments):\n pass # TODO\n\n class NeighborAttributes(PseudowireNeighborSubAttributes):\n\n # ip -> self.neighbor.ip\n # pw_id -> self.neighbor.pw_id\n\n dhcp_ipv4_snooping_profile = managedattribute(\n name='dhcp_ipv4_snooping_profile',\n default=None,\n type=(None,\n managedattribute.test_is(False), # False\n managedattribute.test_istype(str), # <profile>\n ))\n\n igmp_snooping_profile = managedattribute(\n name='igmp_snooping_profile',\n default=None,\n type=(None,\n managedattribute.test_is(False), # False\n managedattribute.test_istype(str), # <profile>\n ))\n\n mld_snooping_profile = managedattribute(\n name='mld_snooping_profile',\n default=None,\n type=(None,\n managedattribute.test_is(False), # False\n managedattribute.test_istype(str), # <profile>\n ))\n\n mpls_static_label = managedattribute(\n name='mpls_static_label',\n default=None,\n type=(None,\n managedattribute.test_istype(int)))\n\n pw_class = managedattribute(\n name='pw_class',\n default=None,\n type=(None,\n managedattribute.test_isinstance(PseudowireClass)))\n\n static_mac_address = managedattribute(\n name='static_mac_address',\n default=None,\n type=(None, MAC))\n\n neighbor_attr = managedattribute(\n name='neighbor_attr',\n read_only=True,\n doc=NeighborAttributes.__doc__)\n\n @neighbor_attr.initter\n def neighbor_attr(self):\n return SubAttributesDict(self.NeighborAttributes, parent=self)\n\n def __init__(self, name, device, bridge_domain=None, *args, **kwargs):\n assert isinstance(name, str)\n self._name = name\n assert isinstance(device, Device)\n self._device = weakref.ref(device)\n super().__init__(*args, **kwargs)\n if bridge_domain is not None:\n bridge_domain.add_vfi(self)\n\n def __eq__(self, other):\n if not isinstance(other, Vfi):\n return NotImplemented\n # return (self.device, self.name,\n # self.container.__class__.__name__, self.container) \\\n # == (other.device, other.name,\n # other.container.__class__.__name__, other.container)\n return (self.name, self.device,\n self.container.__class__.__name__, self.container) \\\n == (other.name, other.device,\n other.container.__class__.__name__, other.container)\n\n def __lt__(self, other):\n if not isinstance(other, Vfi):\n return NotImplemented\n return (self.device, self.name,\n self.container.__class__.__name__, self.container) \\\n < (other.device, other.name,\n other.container.__class__.__name__, other.container)\n\n def __hash__(self):\n # return hash((self.device, self.container, self.name))\n # return hash((self.name, self.device, self.container))\n return hash(self.name)\n\n", "id": "5832972", "language": "Python", "matching_score": 5.5936970710754395, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/vfi.py" }, { "content": "\n__all__ = (\n 'BridgeDomain',\n 'BridgeDomainLink',\n)\n\nimport collections\nimport functools\nimport weakref\n\nfrom ats.datastructures import WeakList\n\nfrom genie.decorator import managedattribute\nimport genie.conf.base\nfrom genie.conf.base import DeviceFeature, Interface, Link\nfrom genie.conf.base.link import EmulatedLink, VirtualLink\nimport genie.conf.base.attributes\nfrom genie.conf.base.attributes import SubAttributes, KeyedSubAttributes,\\\n SubAttributesDict, AttributesHelper\n\nfrom genie.libs.conf.base import MAC\nfrom genie.libs.conf.interface import BviInterface\nimport genie.libs.conf.l2vpn\nfrom ..evpn.evi import Evi, EviSubAttributes\nfrom ..evpn.vni import Vni, VniSubAttributes\nfrom .pseudowire import Pseudowire, PseudowireClass, PseudowireNeighbor, PseudowireNeighborSubAttributes\nfrom .vfi import Vfi\n\n\nclass BridgeDomainLink(VirtualLink):\n\n bridge_domain = managedattribute(\n name='bridge_domain',\n read_only=True,\n gettype=managedattribute.auto_unref)\n\n @property\n def testbed(self):\n return self.bridge_domain.testbed\n\n def connect_interface(self, interface):\n '''Not supported; Use BridgeDomain.add_segment'''\n raise TypeError('%s objects do not support connect_interface; \\\n Please use BridgeDomain.add_segment')\n\n def _connect_interface_from_bridge_domain(self, interface):\n #had_interfaces = any(self.interfaces)\n super().connect_interface(interface)\n #if not had_interfaces:\n # self.testbed.add_link(self)\n\n def disconnect_interface(self, interface):\n '''Not supported; Use BridgeDomain.remove_segment'''\n raise TypeError('%s objects do not support disconnect_interface; \\\n Please use BridgeDomain.remove_segment')\n\n def _disconnect_interface_from_bridge_domain(self, interface):\n super().disconnect_interface(interface)\n #if not any(self.interfaces):\n # self.testbed.remove_link(self)\n\n def __init__(self, bridge_domain):\n self._bridge_domain = weakref.ref(bridge_domain)\n super().__init__(\n name='{g}:{n}'.format(\n g=bridge_domain.group_name,\n n=bridge_domain.name))\n\n\n@functools.total_ordering\nclass BridgeDomain(DeviceFeature):\n\n group_name = managedattribute(\n name='group_name',\n type=managedattribute.test_istype(str))\n\n @group_name.defaulter\n def group_name(self):\n return self.name + 'g'\n\n name = managedattribute(\n name='name',\n read_only=True, # read-only hash key\n doc='Bridge domain name (mandatory)')\n\n link = managedattribute(\n name='link',\n read_only=True,\n doc='The BridgeDomainLink instance that represents the connected interfaces')\n\n # TODO Cannot use typedset because segments need to be updated\n evis = managedattribute(\n name='evis',\n finit=set,\n type=managedattribute.test_set_of(\n managedattribute.test_isinstance(Evi)),\n gettype=frozenset,\n doc='A `set` of Evi associated objects')\n\n def add_evi(self, evi):\n prev_segments = self.segments\n self.evis |= {evi}\n self._on_segments_updated(prev_segments)\n\n def remove_evi(self, evi):\n prev_segments = self.segments\n self.evis -= {evi}\n self._on_segments_updated(prev_segments)\n\n class DefaultDeviceAndInterfaceMacAttributes(object):\n pass\n\n mac = managedattribute(\n name='mac',\n read_only=True,\n finit=DefaultDeviceAndInterfaceMacAttributes,\n doc=DefaultDeviceAndInterfaceMacAttributes.__doc__)\n\n # TODO Cannot use typedset because segments need to be updated\n interfaces = managedattribute(\n name='interfaces',\n finit=WeakList,\n type=managedattribute.test_set_of(\n managedattribute.test_isinstance(Interface)),\n gettype=frozenset,\n doc='A `set` of Interface associated objects')\n\n def add_interface(self, interface):\n if isinstance(interface, Vni):\n self.add_vni(interface)\n return\n prev_segments = self.segments\n self.interfaces |= {interface}\n self._on_segments_updated(prev_segments)\n\n def remove_interface(self, interface):\n if isinstance(interface, Vni):\n self.remove_vni(interface)\n return\n prev_segments = self.segments\n self.interfaces -= {interface}\n self._on_segments_updated(prev_segments)\n\n aging_time = managedattribute(\n name='aging_time',\n default=None,\n type=(None,managedattribute.test_istype(int)))\n\n learning_disable = managedattribute(\n name='learning_disable',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n split_horizon_group = managedattribute(\n name='split_horizon_group',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n split_horizon_group_core = managedattribute(\n name='split_horizon_group_core',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # TODO Cannot use typedset because segments need to be updated\n pseudowires = managedattribute(\n name='pseudowires',\n finit=set,\n type=managedattribute.test_set_of(\n managedattribute.test_isinstance(Pseudowire)),\n gettype=frozenset,\n doc='A `set` of Pseudowire associated objects')\n\n def add_pseudowire(self, pseudowire):\n prev_segments = self.segments\n self.pseudowires |= {pseudowire}\n self._on_segments_updated(prev_segments)\n\n def remove_pseudowire(self, pseudowire):\n prev_segments = self.segments\n self.pseudowires -= {pseudowire}\n self._on_segments_updated(prev_segments)\n\n @property\n def pseudowire_neighbors(self):\n for pw in self.pseudowires:\n for nbr in pw.neighbors:\n if nbr.container is self:\n yield nbr\n\n # TODO Cannot use typedset because segments need to be updated\n vnis = managedattribute(\n name='vnis',\n finit=set,\n type=managedattribute.test_set_of(\n managedattribute.test_isinstance(Vni)),\n gettype=frozenset,\n doc='A `set` of Vni associated objects')\n\n def add_vni(self, vni):\n prev_segments = self.segments\n self.vnis |= {vni}\n self._on_segments_updated(prev_segments)\n\n def remove_vni(self, vni):\n prev_segments = self.segments\n self.vnis -= {vni}\n self._on_segments_updated(prev_segments)\n\n # TODO Cannot use typedset because segments need to be updated\n vfis = managedattribute(\n name='vfis',\n finit=set,\n type=managedattribute.test_set_of(\n managedattribute.test_isinstance(Vfi)),\n gettype=frozenset,\n doc='A `set` of Vfi associated objects')\n\n def add_vfi(self, vfi):\n assert isinstance(vfi, Vfi)\n if vfi.container is not None:\n raise ValueError(\n '%r is already assigned to %r' % (vfi, vfi.container))\n prev_segments = self.segments\n self.vfis |= {vfi}\n vfi.container = self\n self._on_segments_updated(prev_segments)\n\n def remove_vfi(self, vfi):\n assert isinstance(vfi, Vfi)\n if vfi.container is not None and vfi.container is not self:\n raise ValueError(\n '%r is assigned to %r, not %r' % (vfi, vfi.container, self))\n prev_segments = self.segments\n self.vfis -= {vfi}\n vfi.container = None\n self._on_segments_updated(prev_segments)\n\n shutdown = managedattribute(\n name='shutdown',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n @property\n def segments(self):\n segments = set()\n segments |= self.interfaces\n segments |= self.pseudowires\n segments |= self.vnis\n segments |= self.vfis\n segments |= self.evis\n return frozenset(segments)\n\n def add_segment(self, segment):\n if isinstance(segment, Evi):\n self.add_evi(segment)\n elif isinstance(segment, Vni):\n self.add_vni(segment)\n elif isinstance(segment, Interface):\n self.add_interface(segment)\n elif isinstance(segment, Pseudowire):\n self.add_pseudowire(segment)\n elif isinstance(segment, Vfi):\n self.add_vfi(segment)\n else:\n raise ValueError(segment)\n\n def remove_segment(self, segment):\n if isinstance(segment, Evi):\n self.remove_evi(segment)\n elif isinstance(segment, Vni):\n self.remove_vni(segment)\n elif isinstance(segment, Interface):\n self.remove_interface(segment)\n elif isinstance(segment, Pseudowire):\n self.remove_pseudowire(segment)\n elif isinstance(segment, Vfi):\n self.remove_vfi(segment)\n else:\n raise ValueError(segment)\n\n def _on_segments_updated(self, prev_segments):\n # UNUSED prev_segments = frozenset(prev_segments)\n cur_segments = frozenset(self.segments)\n prev_link_interfaces = frozenset(self.link.interfaces)\n new_link_interfaces = frozenset(\n interface\n for segment in cur_segments\n for interface in self.link_interfaces_from_segment(segment))\n for link_interface in prev_link_interfaces - new_link_interfaces:\n self.link._disconnect_interface_from_bridge_domain(link_interface)\n for link_interface in new_link_interfaces - prev_link_interfaces:\n self.link._connect_interface_from_bridge_domain(link_interface)\n\n def link_interfaces_from_segment(self, segment):\n link_interfaces = set()\n if isinstance(segment, Evi):\n pass\n elif isinstance(segment, Vni):\n pass\n elif isinstance(segment, Interface):\n if isinstance(segment, BviInterface):\n link_interfaces.add(segment)\n else:\n # Links under Genie Interface object is deprecated\n # Placed the below workaround to bypass the Unittest\n from ats.datastructures import WeakList\n segment_links = set(WeakList()) - set([self.link])\n # Priority to L2 virtual links...\n if not link_interfaces:\n for link in segment_links:\n if isinstance(\n link,\n (BridgeDomainLink,\n genie.libs.conf.l2vpn.XconnectLink)):\n link_interfaces.update(link.interfaces)\n link_interfaces.discard(segment)\n # ... then emulated links\n if not link_interfaces:\n for link in segment_links:\n if isinstance(link, EmulatedLink):\n link_interfaces.update(link.interfaces)\n link_interfaces.discard(segment)\n # ... finally, all links\n if not link_interfaces:\n for link in segment_links:\n link_interfaces.update(link.interfaces)\n link_interfaces.discard(segment)\n # For VLAN TGEN connections, the CE interface is the peer of\n # the AC interface's parent\n if not link_interfaces:\n parent_interface = segment.parent_interface\n if parent_interface:\n # recurse\n link_interfaces = self.link_interfaces_from_segment(\n parent_interface)\n elif isinstance(segment, Pseudowire):\n pass\n elif isinstance(segment, Vfi):\n pass\n else:\n raise ValueError(segment)\n return link_interfaces\n\n def create_pseudowire_neighbor(self, device, **kwargs):\n return self.device_attr[device].create_pseudowire_neighbor(**kwargs)\n\n class DeviceAttributes(genie.conf.base.attributes.DeviceSubAttributes):\n\n class InterfaceAttributes(\n genie.conf.base.attributes.InterfaceSubAttributes):\n\n class MacAttributes(SubAttributes):\n\n def __init__(self, _interface_attr):\n self._interface_attr = _interface_attr\n super().__init__(\n # BridgeDomain.device_attr[].mac\n parent=_interface_attr.parent.mac)\n\n @property\n def interface_name(self):\n return self._interface_attr.interface_name\n\n @property\n def interface(self):\n return self._interface_attr.interface\n\n mac = managedattribute(\n name='mac',\n read_only=True,\n doc=MacAttributes.__doc__)\n\n @mac.initter\n def mac(self):\n return self.MacAttributes(_interface_attr=self)\n\n static_mac_address = managedattribute(\n name='static_mac_address',\n default=None,\n type=(None, MAC))\n\n def __init__(self, parent, key):\n super().__init__(parent, key)\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n class NeighborAttributes(PseudowireNeighborSubAttributes):\n\n # ip -> self.neighbor.ip\n # pw_id -> self.neighbor.pw_id\n # evi -> self.neighbor.evi\n # ac_id -> self.neighbor.ac_id\n # source_ac_id -> self.neighbor.source_ac_id\n\n dhcp_ipv4_snooping_profile = managedattribute(\n name='dhcp_ipv4_snooping_profile',\n default=None,\n type=(None,\n managedattribute.test_is(False), # False\n managedattribute.test_istype(str), # <profile>\n ))\n\n igmp_snooping_profile = managedattribute(\n name='igmp_snooping_profile',\n default=None,\n type=(None,\n managedattribute.test_is(False), # False\n managedattribute.test_istype(str), # <profile>\n ))\n\n mld_snooping_profile = managedattribute(\n name='mld_snooping_profile',\n default=None,\n type=(None,\n managedattribute.test_is(False), # False\n managedattribute.test_istype(str), # <profile>\n ))\n\n mpls_static_label = managedattribute(\n name='mpls_static_label',\n default=None,\n type=(None,\n managedattribute.test_istype(int)))\n\n pw_class = managedattribute(\n name='pw_class',\n default=None,\n type=(None,\n managedattribute.test_isinstance(PseudowireClass)))\n\n split_horizon = managedattribute(\n name='split_horizon',\n default=None,\n type=(None,\n managedattribute.test_istype(bool)))\n\n static_mac_address = managedattribute(\n name='static_mac_address',\n default=None,\n type=(None, MAC))\n\n neighbor_attr = managedattribute(\n name='neighbor_attr',\n read_only=True,\n doc=NeighborAttributes.__doc__)\n\n @neighbor_attr.initter\n def neighbor_attr(self):\n return SubAttributesDict(self.NeighborAttributes, parent=self)\n\n class EviAttributes(EviSubAttributes):\n\n vlan = managedattribute(\n name='vlan',\n default=None,\n type=(None, int))\n\n def __init__(self, parent, key):\n super().__init__(parent=parent, key=key)\n\n evi_attr = managedattribute(\n name='evi_attr',\n read_only=True,\n doc=EviAttributes.__doc__)\n\n @evi_attr.initter\n def evi_attr(self):\n return SubAttributesDict(self.EviAttributes, parent=self)\n\n class VniAttributes(VniSubAttributes):\n\n def __init__(self, parent, key):\n super().__init__(parent, key)\n\n vni_attr = managedattribute(\n name='vni_attr',\n read_only=True,\n doc=VniAttributes.__doc__)\n\n @vni_attr.initter\n def vni_attr(self):\n return SubAttributesDict(self.VniAttributes, parent=self)\n\n class MacAttributes(SubAttributes):\n\n def __init__(self, _device_attr):\n self._device_attr = _device_attr\n super().__init__(\n # BridgeDomain.mac\n parent=_device_attr.parent.mac)\n\n @property\n def device_name(self):\n return self._device_attr.device_name\n\n @property\n def device(self):\n return self._device_attr.device\n\n @property\n def testbed(self):\n return self._device_attr.testbed\n\n mac = managedattribute(\n name='mac',\n read_only=True,\n doc=MacAttributes.__doc__)\n\n @mac.initter\n def mac(self):\n return self.MacAttributes(_device_attr=self)\n\n @property\n def evis(self):\n device = self.device\n for evi in self.parent.evis:\n if evi.device is device:\n yield evi\n\n # interfaces -- See DeviceSubAttributes\n\n @property\n def vnis(self):\n device = self.device\n for vni in self.parent.vnis:\n if vni.device is device:\n yield vni\n\n @property\n def pseudowires(self):\n container = self.parent\n device = self.device\n for pw in container.pseudowires:\n for nbr in pw.neighbors:\n if nbr.container is container \\\n and nbr.device is device:\n yield pw\n break # next pw\n\n @property\n def pseudowire_neighbors(self):\n device = self.device\n for nbr in self.parent.pseudowire_neighbors:\n if nbr.device is device:\n yield nbr\n\n @property\n def vfis(self):\n device = self.device\n for vfi in self.parent.vfis:\n if vfi.device is device:\n yield vfi\n\n @property\n def segments(self):\n yield from self.interfaces\n yield from self.pseudowires\n yield from self.vnis\n yield from self.vfis\n yield from self.evis\n\n def create_pseudowire_neighbor(self, **kwargs):\n pwnbr = PseudowireNeighbor(container=self.parent,\n device=self.device,\n **kwargs)\n return pwnbr\n\n def __init__(self, parent, key):\n super().__init__(parent, key)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n def __eq__(self, other):\n if not isinstance(other, BridgeDomain):\n return NotImplemented\n # return (self.group_name, self.name) == (other.group_name, other.name)\n return (self.name, self.group_name, self.testbed) \\\n == (other.name, other.group_name, other.testbed)\n\n def __lt__(self, other):\n if not isinstance(other, BridgeDomain):\n return NotImplemented\n return (self.group_name, self.name, self.testbed) \\\n < (other.group_name, other.name, other.testbed)\n\n def __hash__(self):\n # return hash((self.group_name, self.name))\n return hash(self.name)\n\n def __init__(self, name, *args, **kwargs):\n self._name = name\n super().__init__(*args, **kwargs)\n self._link = BridgeDomainLink(bridge_domain=self)\n\n def build_config(self, devices=None, apply=True,\n attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True,\n attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n", "id": "4262874", "language": "Python", "matching_score": 2.637941598892212, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/bridge_domain.py" }, { "content": "\nfrom abc import ABC\nimport warnings\nimport contextlib\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\nfrom genie.libs.conf.interface import BviInterface\nfrom genie.libs.conf.l2vpn.pseudowire import PseudowireNeighbor,\\\n PseudowireIPv4Neighbor, PseudowireEviNeighbor\n\n\nclass BridgeDomain(ABC):\n\n class DeviceAttributes(ABC):\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: l2vpn / bridge group someword (config-l2vpn-bg)\n # iosxr: l2vpn / bridge group someword (config-l2vpn-bg)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 (config-l2vpn-bg-bd)\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / routed interface BVI1\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 (config-l2vpn-bg-bd-ac)\n with configurations.submode_context(\n attributes.format(\n 'routed interface {interface_name}' if isinstance(self.interface, BviInterface) else 'interface {interface_name}',\n force=True),\n exit_cmd='' if isinstance(self.interface, BviInterface) else 'exit', # routed interface may not be an actual submode\n ):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n if isinstance(self.interface, BviInterface):\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / routed interface BVI1 / split-horizon group core\n v = attributes.value('split_horizon_group_core')\n if v is True:\n configurations.append_line('split-horizon group core')\n\n if configurations:\n # There are configurations... It must be a submode; exit.\n configurations.append_line('exit', raw=True)\n else:\n # There are no configurations... May not be be a submode; Don't exit.\n pass\n\n else:\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dhcp ipv4 none\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dhcp ipv4 snoop profile someword3\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection (config-l2vpn-bg-bd-ac-dai)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / address-validation (config-l2vpn-bg-bd-ac-dai-av)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / address-validation / dst-mac\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / address-validation / dst-mac disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / address-validation / ipv4\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / address-validation / ipv4 disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / address-validation / src-mac\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / address-validation / src-mac disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / logging\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / logging disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / flooding\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / flooding disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / flooding unknown-unicast\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / flooding unknown-unicast disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / igmp snooping profile someword3\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / ip-source-guard (config-l2vpn-bg-bd-ac-ipsg)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / ip-source-guard / disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / ip-source-guard / logging\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / ip-source-guard / logging disable\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac (config-l2vpn-bg-bd-ac-mac)\n sub, attributes2 = attributes.namespace('mac')\n if sub is not None:\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac (config-l2vpn-bg-bd-ac-mac)\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mld snooping profile someword3\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / split-horizon group\n v = attributes.value('split_horizon_group')\n if v is True:\n configurations.append_line('split-horizon group')\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / static-mac-address aaaa.bbbb.cccc\n configurations.append_line(attributes.format('static-mac-address {static_mac_address}'))\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / storm-control broadcast kbps 64\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / storm-control broadcast pps 1\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / storm-control multicast kbps 64\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / storm-control multicast pps 1\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / storm-control unknown-unicast kbps 64\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / storm-control unknown-unicast pps 1\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class MacAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac (config-l2vpn-bg-bd-ac-mac)\n with configurations.submode_context('mac', cancel_empty=True):\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / aging (config-l2vpn-bg-bd-ac-mac-aging)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / aging / time 300\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / aging / type absolute\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / aging / type inactivity\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / learning\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / learning disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit (config-l2vpn-bg-bd-ac-mac-limit)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit / action flood\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit / action no-flood\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit / action none\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit / action shutdown\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit / maximum 1\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit / notification both\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit / notification none\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit / notification syslog\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit / notification trap\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / port-down flush\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / port-down flush disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / secure (config-l2vpn-bg-bd-ac-mac-secure)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / secure / action none\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / secure / action restrict\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / secure / action shutdown\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / secure / disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / secure / logging\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / secure / logging disable\n\n pass\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class NeighborAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n nbr_ctx = None\n nbr_is_submode = True\n if isinstance(self.neighbor, PseudowireIPv4Neighbor):\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 (config-l2vpn-bg-bd-pw)\n assert self.ip is not None\n assert self.pw_id is not None\n nbr_ctx = attributes.format('neighbor {ip} pw-id {pw_id}', force=True)\n elif isinstance(self.neighbor, PseudowireEviNeighbor):\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor evpn 1 target 1\n assert self.evi is not None\n assert self.ac_id is not None\n nbr_ctx = attributes.format('neighbor evpn {evi.evi_id} target {ac_id}', force=True)\n nbr_is_submode = False\n else:\n raise ValueError(self.neighbor)\n if not nbr_is_submode:\n configurations.append_line(nbr_ctx)\n else:\n with configurations.submode_context(nbr_ctx):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / backup neighbor 1.2.3.4 pw-id 1 (config-l2vpn-bg-bd-pw-backup)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / backup neighbor 1.2.3.4 pw-id 1 / pw-class someword3\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / dhcp ipv4 none\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / dhcp ipv4 snoop profile someword3\n v = attributes.value('dhcp_ipv4_snooping_profile')\n if v is not None:\n if v is False:\n configurations.append_line('dhcp ipv4 none')\n else:\n configurations.append_line('dhcp ipv4 snoop profile {}'.format(v))\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / flooding\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / flooding disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / flooding unknown-unicast\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / flooding unknown-unicast disable\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / igmp snooping profile someword3\n v = attributes.value('igmp_snooping_profile')\n if v is not None:\n if v is False:\n pass\n else:\n configurations.append_line('igmp snooping profile {}'.format(v))\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac (config-l2vpn-bg-bd-pw-mac)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / aging (config-l2vpn-bg-bd-pw-mac-aging)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / aging / time 300\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / aging / type absolute\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / aging / type inactivity\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / learning\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / learning disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit (config-l2vpn-bg-bd-pw-mac-limit)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit / action flood\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit / action no-flood\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit / action none\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit / action shutdown\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit / maximum 1\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit / notification both\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit / notification none\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit / notification syslog\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit / notification trap\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / port-down flush\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / port-down flush disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / secure (config-l2vpn-bg-bd-pw-mac-secure)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / secure / action none\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / secure / action restrict\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / secure / action shutdown\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / secure / disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / secure / logging\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / secure / logging disable\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mld snooping profile someword3\n v = attributes.value('mld_snooping_profile')\n if v is not None:\n if v is False:\n pass\n else:\n configurations.append_line('mld snooping profile {}'.format(v))\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mpls static label local 16 remote 16\n remote_label = attributes.value('mpls_static_label')\n if remote_label is not None:\n local_label = self.parent.neighbor_attr[self.remote_neighbor].mpls_static_label\n if local_label is None:\n warnings.warn(\n 'remote neighbor {!r} mpls_static_label missing'.format(self.remote_neighbor),\n UnsupportedAttributeWarning)\n else:\n configurations.append_line('mpls static label local {} remote {}'.\\\n format(local_label, remote_label))\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / pw-class someword3\n v = attributes.value('pw_class')\n if v is not None:\n configurations.append_line('pw-class {}'.\\\n format(v.device_attr[self.device].name))\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / split-horizon group\n if attributes.value('split_horizon'):\n configurations.append_line('split-horizon group')\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / static-mac-address aaaa.bbbb.cccc\n configurations.append_line(attributes.format('static-mac-address {static_mac_address}'))\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / storm-control broadcast kbps 64\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / storm-control broadcast pps 1\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / storm-control multicast kbps 64\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / storm-control multicast pps 1\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / storm-control unknown-unicast kbps 64\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / storm-control unknown-unicast pps 1\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class EviAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / evi 1 (config-l2vpn-bg-bd-evi)\n with configurations.submode_context(\n attributes.format('evi {evi_id}', force=True),\n exit_cmd=''): # evi is not a sub-mode in all releases.\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class VniAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: l2vpn / bridge group someword (config-l2vpn-bg)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 (config-l2vpn-bg-bd)\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / member vni 1 (config-l2vpn-bg-bd-vni)\n with configurations.submode_context(attributes.format('member vni {vni_id}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class MacAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac (config-l2vpn-bg-bd-mac)\n with configurations.submode_context('mac', cancel_empty=True):\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / aging (config-l2vpn-bg-bd-mac-aging)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / aging / time 300\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / aging / type absolute\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / aging / type inactivity\n with configurations.submode_context('aging',cancel_empty=True):\n configurations.append_line(attributes.format('time {aging_time}'))\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / learning\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / learning disable\n v = attributes.value('learning_disable')\n if v is True:\n configurations.append_line('learning disable')\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit (config-l2vpn-bg-bd-mac-limit)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit / action flood\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit / action no-flood\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit / action none\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit / action shutdown\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit / maximum 1\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit / notification both\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit / notification none\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit / notification syslog\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit / notification trap\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / port-down flush\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / port-down flush disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / secure (config-l2vpn-bg-bd-mac-secure)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / secure / action none\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / secure / action restrict\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / secure / action shutdown\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / secure / disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / secure / logging\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / secure / logging disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / static-address aaaa.bbbb.cccc drop\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / withdraw access-pw disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / withdraw disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / withdraw optimize\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / withdraw relay\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / withdraw state-down\n\n pass\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n contained=False, **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: l2vpn (config-l2vpn)\n submode_stack = contextlib.ExitStack()\n if not contained:\n submode_stack.enter_context(\n configurations.submode_context('l2vpn'))\n\n # iosxr: l2vpn / bridge group someword (config-l2vpn-bg)\n with configurations.submode_context(attributes.format('bridge group {group_name}', force=True, cancel_empty=True)):\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 (config-l2vpn-bg-bd)\n with configurations.submode_context(attributes.format('bridge-domain {name}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / coupled-mode\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / dhcp ipv4 snoop profile someword3\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / dynamic-arp-inspection (config-l2vpn-bg-bd-dai)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / dynamic-arp-inspection / address-validation (config-l2vpn-bg-bd-dai-av)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / dynamic-arp-inspection / address-validation / dst-mac\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / dynamic-arp-inspection / address-validation / ipv4\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / dynamic-arp-inspection / address-validation / src-mac\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / dynamic-arp-inspection / logging\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / evi 1 (config-l2vpn-bg-bd-evi)\n for sub, attributes2 in attributes.mapping_values('evi_attr', keys=self.evis, sort=True):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / flooding disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / flooding unknown-unicast disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / igmp snooping disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / igmp snooping profile someword3\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / routed interface BVI1\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 (config-l2vpn-bg-bd-ac)\n for sub, attributes2 in attributes.mapping_values('interface_attr', keys=self.interfaces, sort=True):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / ip-source-guard (config-l2vpn-bg-bd-ipsg)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / ip-source-guard / logging\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac (config-l2vpn-bg-bd-mac)\n ns, attributes2 = attributes.namespace('mac')\n if ns is not None:\n configurations.append_block(\n ns.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / member vni 1 (config-l2vpn-bg-bd-vni)\n for sub, attributes2 in attributes.mapping_values('vni_attr', keys=self.vnis, sort=True):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mld snooping profile someword3\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mtu 100\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 (config-l2vpn-bg-bd-pw)\n for sub, attributes2 in attributes.mapping_values('neighbor_attr', keys=self.pseudowire_neighbors, sort=True):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor evpn evi 1 target 1\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor evpn evi 1 target 1 source 1\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / nv satellite (config-l2vpn-bg-bd-nv)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / nv satellite / offload ipv4 multicast enable\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core (config-l2vpn-bg-bd-pbb-core)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / evi 1 (config-l2vpn-bg-bd-pbb-core-evi)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / mac (config-l2vpn-bg-bd-pbb-core-mac)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / mac / aging (config-l2vpn-bg-bd-pbb-core-mac-aging)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / mac / aging / time 300\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / mac / aging / type absolute\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / mac / aging / type inactivity\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / mac / learning\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / mac / learning disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / mmrp-flood-optimization\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / rewrite ingress tag push dot1ad 1 symmetric\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 (config-l2vpn-bg-bd-pbb-edge)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / dhcp ipv4 none\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / dhcp ipv4 snoop profile someword4\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / igmp snooping profile someword4\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac (config-l2vpn-bg-bd-pbb-edge-mac)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / aging (config-l2vpn-bg-bd-pbb-edge-mac-aging)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / aging / time 300\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / aging / type absolute\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / aging / type inactivity\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / learning\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / learning disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit (config-l2vpn-bg-bd-pbb-edge-mac-limit)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit / action flood\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit / action no-flood\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit / action none\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit / action shutdown\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit / maximum 1\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit / notification both\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit / notification none\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit / notification syslog\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit / notification trap\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / secure (config-l2vpn-bg-bd-pbb-edge-mac-sec)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / secure / accept-shutdown\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / secure / action none\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / secure / action restrict\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / secure / action shutdown\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / secure / disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / secure / logging\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / secure / logging disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / split-horizon group vfi disable\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / static-mac-address aaaa.bbbb.cccc bmac aaaa.bbbb.cccc\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / unknown-unicast-bmac aaaa.bbbb.cccc\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / shutdown\n if attributes.value('shutdown'):\n configurations.append_line('shutdown')\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / storm-control broadcast kbps 64\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / storm-control broadcast pps 1\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / storm-control multicast kbps 64\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / storm-control multicast pps 1\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / storm-control unknown-unicast kbps 64\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / storm-control unknown-unicast pps 1\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / transport-mode vlan passthrough\n\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 / vfi someword3 (config-l2vpn-bg-bd-vfi)\n for vfi, attributes2 in attributes.sequence_values('vfis'):\n configurations.append_block(\n str(vfi.build_config(apply=False, attributes=attributes2, unconfig=unconfig)))\n\n submode_stack.close()\n if apply:\n if configurations:\n self.device.configure(str(configurations), fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n", "id": "138841", "language": "Python", "matching_score": 6.73556661605835, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/iosxr/bridge_domain.py" }, { "content": "\nfrom abc import ABC\nimport warnings\nimport contextlib\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\nfrom genie.libs.conf.interface import EthernetInterface\nfrom genie.libs.conf.interface.iosxe import EFPInterface\nfrom genie.libs.conf.l2vpn.pseudowire import PseudowireNeighbor,\\\n PseudowireIPv4Neighbor\n\n\nclass BridgeDomain(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n contained=False, **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # create vfi configurations\n for vfi, attributes2 in attributes.sequence_values('vfis'):\n configurations.append_block(str(vfi.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs)))\n\n with configurations.submode_context(attributes.format('bridge-domain {name}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n for evi, attributes2 in attributes.mapping_values('evi_attr', keys=self.evis, sort=True):\n cfg = attributes2.format('member evpn-instance {evi_id}')\n if cfg:\n cfg += attributes2.format(' vlan {vlan}', force=True)\n configurations.append_line(cfg)\n\n for vfi, attributes2 in attributes.sequence_values('vfis'):\n configurations.append_line(attributes2.format('member vfi {name}'))\n\n for sub, attributes2 in attributes.mapping_values('interface_attr', keys=self.interfaces, sort=True):\n if isinstance(sub.interface, EFPInterface):\n configurations.append_line(attributes2.format('member {interface.parent_interface.name} service-instance {interface.service_instance}'))\n elif isinstance(sub.interface, EthernetInterface):\n configurations.append_line(attributes2.format('member {interface.name}'))\n else:\n raise NotImplementedError(sub.interface)\n\n if apply:\n if configurations:\n self.device.configure(str(configurations), fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n", "id": "527173", "language": "Python", "matching_score": 4.774474620819092, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/iosxe/bridge_domain.py" }, { "content": "\nfrom abc import ABC\nimport warnings\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\n\nclass L2vpn(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n\n for bd, attributes2 in attributes.sequence_values('bridge_domains'):\n configurations.append_block(\n str(bd.build_config(apply=False, attributes=attributes2, unconfig=unconfig,\n contained=True)))\n\n for xc, attributes2 in attributes.sequence_values('xconnects'):\n configurations.append_block(\n str(xc.build_config(apply=False, attributes=attributes2, unconfig=unconfig,\n contained=True)))\n\n for pwc, attributes2 in attributes.sequence_values('pseudowire_classes'):\n configurations.append_block(\n str(pwc.build_config(apply=False, attributes=attributes2, unconfig=unconfig,\n contained=True)))\n\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class PbbAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n", "id": "11574827", "language": "Python", "matching_score": 3.138556957244873, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/iosxe/l2vpn.py" }, { "content": "\nfrom abc import ABC\nimport warnings\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\n\n\nclass L2vpn(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: l2vpn (config-l2vpn)\n with configurations.submode_context('l2vpn'):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: l2vpn / autodiscovery bgp (config-l2vpn-ad)\n # iosxr: l2vpn / autodiscovery bgp / signaling-protocol bgp (config-l2vpn-ad-sig)\n # iosxr: l2vpn / autodiscovery bgp / signaling-protocol bgp / mtu mismatch ignore\n\n # iosxr: l2vpn / bridge group someword (config-l2vpn-bg)\n # iosxr: l2vpn / bridge group someword / bridge-domain someword2 (config-l2vpn-bg-bd)\n for bd, attributes2 in attributes.sequence_values('bridge_domains'):\n configurations.append_block(\n str(bd.build_config(apply=False, attributes=attributes2, unconfig=unconfig,\n contained=True)))\n\n # iosxr: l2vpn / capability high-mode\n # iosxr: l2vpn / capability single-mode\n # iosxr: l2vpn / description someword\n\n # iosxr: l2vpn / ethernet ring g8032 someword (config-l2vpn)\n for ring, attributes2 in attributes.sequence_values('g8032_rings'):\n configurations.append_block(\n str(ring.build_config(apply=False, attributes=attributes2, unconfig=unconfig)))\n\n # iosxr: l2vpn / flexible-xconnect-service vlan-unaware someword (config-l2vpn)\n # iosxr: l2vpn / flexible-xconnect-service vlan-unaware someword / interface Bundle-Ether1\n # iosxr: l2vpn / flexible-xconnect-service vlan-unaware someword / neighbor evpn evi 1 target 1\n # iosxr: l2vpn / flexible-xconnect-service vlan-unaware someword / neighbor evpn evi 1 target 1 source 1\n\n # iosxr: l2vpn / ignore-mtu-mismatch\n # iosxr: l2vpn / load-balancing flow src-dst-ip\n # iosxr: l2vpn / load-balancing flow src-dst-mac\n # iosxr: l2vpn / logging (config-l2vpn)\n # iosxr: l2vpn / logging / bridge-domain\n # iosxr: l2vpn / logging / nsr\n # iosxr: l2vpn / logging / pseudowire\n # iosxr: l2vpn / logging / pwhe-replication disable\n # iosxr: l2vpn / logging / vfi\n # iosxr: l2vpn / neighbor all ldp flap\n\n # iosxr: l2vpn / pbb (config-l2vpn)\n ns, attributes2 = attributes.namespace('pbb')\n if ns is not None:\n configurations.append_block(\n str(ns.build_config(apply=False, attributes=attributes2, unconfig=unconfig)))\n\n # iosxr: l2vpn / pw-class someword (config-l2vpn)\n for pwc, attributes2 in attributes.sequence_values('pseudowire_classes'):\n configurations.append_block(\n str(pwc.build_config(apply=False, attributes=attributes2, unconfig=unconfig,\n contained=True)))\n\n # iosxr: l2vpn / pw-grouping\n # iosxr: l2vpn / pw-oam refresh transmit 1\n # iosxr: l2vpn / pw-routing (config-l2vpn)\n # iosxr: l2vpn / pw-routing / bgp (config-l2vpn)\n # iosxr: l2vpn / pw-routing / bgp / rd 100:200000\n # iosxr: l2vpn / pw-routing / bgp / rd 65536:200\n # iosxr: l2vpn / pw-routing / bgp / rd 1.2.3.4:1\n # iosxr: l2vpn / pw-routing / global-id 1\n # iosxr: l2vpn / pw-status disable\n\n # iosxr: l2vpn / redundancy (config-l2vpn)\n # iosxr: l2vpn / redundancy / iccp group 1 (config-l2vpn)\n # See IccpGroup objects\n\n # iosxr: l2vpn / router-id 1.2.3.4\n configurations.append_line(attributes.format('router-id {router_id}'))\n\n # iosxr: l2vpn / snmp mib interface format external\n # iosxr: l2vpn / snmp mib pseudowire statistics\n # iosxr: l2vpn / tcn-propagation\n\n # iosxr: l2vpn / xconnect group someword (config-l2vpn)\n # iosxr: l2vpn / xconnect group someword / mp2mp someword2 (config-l2vpn)\n # iosxr: l2vpn / xconnect group someword / p2p someword2 (config-l2vpn)\n for xc, attributes2 in attributes.sequence_values('xconnects'):\n configurations.append_block(\n str(xc.build_config(apply=False, attributes=attributes2, unconfig=unconfig,\n contained=True)))\n\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class PbbAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: l2vpn / pbb (config-l2vpn)\n if attributes.value('enabled', force=True):\n with configurations.submode_context('pbb'):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: l2vpn / pbb / backbone-source-mac aaaa.bbbb.cccc\n configurations.append_line(attributes.format('backbone-source-mac {backbone_source_mac}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n", "id": "10093073", "language": "Python", "matching_score": 4.196599960327148, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/iosxr/l2vpn.py" }, { "content": "\nfrom abc import ABC\nimport warnings\nimport contextlib\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\n\n\nclass IccpGroup(ABC):\n\n class DeviceAttributes(ABC):\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: l2vpn / redundancy / iccp group 1 / interface Bundle-Ether1 (config-l2vpn)\n with configurations.submode_context(attributes.format('interface {interface_name}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: l2vpn / redundancy / iccp group 1 / interface Bundle-Ether1 / mac-flush stp-tcn\n configurations.append_line(attributes.format('mac-flush {mac_flush}'))\n\n # iosxr: l2vpn / redundancy / iccp group 1 / interface Bundle-Ether1 / primary vlan someword\n configurations.append_line(attributes.format('primary vlan {primary_vlan}'))\n\n # iosxr: l2vpn / redundancy / iccp group 1 / interface Bundle-Ether1 / recovery delay 30\n configurations.append_line(attributes.format('recovery delay {recovery_delay}'))\n\n # iosxr: l2vpn / redundancy / iccp group 1 / interface Bundle-Ether1 / secondary vlan someword\n configurations.append_line(attributes.format('secondary vlan {secondary_vlan}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: l2vpn (config-l2vpn)\n # iosxr: l2vpn / redundancy (config-l2vpn)\n # iosxr: redundancy (config-redundancy)\n # NOTE:\n # Starting in release 6.2.1, redundancy is not under l2vpn\n # submode anymore; Enter l2vpn+redundancy but exit back root in\n # case parser automatically went up one level.\n #with configurations.submode_context('l2vpn', cancel_empty=True, exit_cmd=None), \\\n # configurations.submode_context('redundancy', cancel_empty=True, exit_cmd='root'):\n with configurations.submode_context('redundancy', cancel_empty=True):\n\n # iosxr: l2vpn / redundancy / iccp group 1 (config-l2vpn)\n with configurations.submode_context(attributes.format('iccp group {group_id}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: l2vpn / redundancy / iccp group 1 / backbone (config-redundancy-group-iccp-backbone)\n with configurations.submode_context('backbone', cancel_empty=True):\n\n # iosxr: l2vpn / redundancy / iccp group 1 / backbone / interface <intf>\n for sub, attributes2 in attributes.sequence_values('backbone_interfaces', sort=True):\n configurations.append_line('interface {sub.name}'.format(sub=sub))\n\n # iosxr: l2vpn / redundancy / iccp group 1 / interface Bundle-Ether1 (config-l2vpn)\n for sub, attributes2 in attributes.mapping_values('interface_attr', keys=self.interfaces, sort=True):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n # iosxr: l2vpn / redundancy / iccp group 1 / isolation recovery-delay 30\n configurations.append_line(attributes.format('isolation recovery-delay {isolation_recovery_delay}'))\n\n # iosxr: l2vpn / redundancy / iccp group 1 / member (config-redundancy-group-iccp-member)\n # iosxr: l2vpn / redundancy / iccp group 1 / member / neighbor 1.2.3.4\n\n # iosxr: l2vpn / redundancy / iccp group 1 / mlacp connect timeout 0\n\n # iosxr: l2vpn / redundancy / iccp group 1 / mlacp node 0\n configurations.append_line(attributes.format('mlacp node {mlacp_node_id}'))\n\n # iosxr: l2vpn / redundancy / iccp group 1 / mlacp system mac aaaa.bbbb.cccc\n configurations.append_line(attributes.format('mlacp system mac {mlacp_system_mac}'))\n\n # iosxr: l2vpn / redundancy / iccp group 1 / mlacp system priority 1\n configurations.append_line(attributes.format('mlacp system priority {mlacp_system_priority}'))\n\n # iosxr: l2vpn / redundancy / iccp group 1 / mode singleton\n configurations.append_line(attributes.format('mode {mode}'))\n\n # iosxr: l2vpn / redundancy / iccp group 1 / multi-homing node-id <0-254>\n configurations.append_line(attributes.format('multi-homing node-id {multi_homing_node_id}'))\n\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n", "id": "9325815", "language": "Python", "matching_score": 2.8270387649536133, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/iosxr/iccp_group.py" }, { "content": "\nfrom abc import ABC\nimport warnings\nimport contextlib\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\nfrom ..bridge_domain import BridgeDomain\nfrom ..xconnect import Xconnect\nfrom ..vfi import Vfi\nfrom ..pseudowire import Pseudowire as _Pseudowire, \\\n PseudowireIPv4Neighbor, PseudowireIPv6Neighbor\n\n\nclass PseudowireClass(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n contained=False, **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: l2vpn (config-l2vpn)\n submode_stack = contextlib.ExitStack()\n if not contained:\n submode_stack.enter_context(\n configurations.submode_context('l2vpn'))\n\n # iosxr: l2vpn / pw-class someword (config-l2vpn)\n with configurations.submode_context(attributes.format('pw-class {name}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: l2vpn / pw-class someword / backup disable delay <0-180>\n # iosxr: l2vpn / pw-class someword / backup disable never\n\n # iosxr: l2vpn / pw-class someword / encapsulation l2tpv3 (config-l2vpn)\n # iosxr: l2vpn / pw-class someword / encapsulation mpls (config-l2vpn)\n ns, attributes2 = attributes.namespace('encapsulation')\n if ns is not None:\n configurations.append_block(\n str(ns.build_config(apply=False, attributes=attributes2, unconfig=unconfig)))\n\n # iosxr: l2vpn / pw-class someword / mac-withdraw\n if attributes.value('mac_withdraw'):\n configurations.append_line('mac-withdraw')\n\n submode_stack.close()\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class EncapsulationAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if self.type is None:\n pass\n elif self.type is _Pseudowire.EncapsulationType.l2tpv3:\n # iosxr: l2vpn / pw-class someword / encapsulation l2tpv3 (config-l2vpn)\n with configurations.submode_context('encapsulation l2tpv3'):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: l2vpn / pw-class someword / encapsulation l2tpv3 / cookie size 0\n # iosxr: l2vpn / pw-class someword / encapsulation l2tpv3 / cookie size 4\n # iosxr: l2vpn / pw-class someword / encapsulation l2tpv3 / cookie size 8\n configurations.append_line(attributes.format('cookie size {cookie_size}'))\n\n # iosxr: l2vpn / pw-class someword / encapsulation l2tpv3 / dfbit set\n if attributes.value('dfbit_set'):\n configurations.append_line('dfbit set')\n\n # iosxr: l2vpn / pw-class someword / encapsulation l2tpv3 / ipv4 source 1.2.3.4\n configurations.append_line(attributes.format('ipv4 source {ipv4_source}'))\n\n # iosxr: l2vpn / pw-class someword / encapsulation l2tpv3 / pmtu max 65535\n configurations.append_line(attributes.format('pmtu max {pmtu_max}'))\n\n # iosxr: l2vpn / pw-class someword / encapsulation l2tpv3 / protocol l2tpv3\n # iosxr: l2vpn / pw-class someword / encapsulation l2tpv3 / protocol l2tpv3 class someword2\n cfg = attributes.format('protocol {protocol.value}')\n if cfg:\n cfg += attributes.format('class {protocol_class}', force=True)\n configurations.append_line(cfg)\n\n # iosxr: l2vpn / pw-class someword / encapsulation l2tpv3 / sequencing both\n # iosxr: l2vpn / pw-class someword / encapsulation l2tpv3 / sequencing both resync 5\n cfg = attributes.format('sequencing {sequencing_direction}')\n if cfg:\n cfg += attributes.format('resync {sequencing_resync}', force=True)\n configurations.append_line(cfg)\n\n # iosxr: l2vpn / pw-class someword / encapsulation l2tpv3 / tos reflect\n # iosxr: l2vpn / pw-class someword / encapsulation l2tpv3 / tos reflect value <0-255>\n # iosxr: l2vpn / pw-class someword / encapsulation l2tpv3 / tos value <0-255>\n if (attributes.value('tos') is not None or\n attributes.value('tos_reflect')):\n cfg = ' tos'\n if attributes.value('tos_reflect', force=True):\n cfg += ' reflect'\n cfg += attributes.format(' value {tos}', force=True)\n configurations.append_line(cfg)\n\n # iosxr: l2vpn / pw-class someword / encapsulation l2tpv3 / transport-mode ethernet\n # iosxr: l2vpn / pw-class someword / encapsulation l2tpv3 / transport-mode vlan\n configurations.append_line(\n attributes.format('transport-mode {transport_mode}', transform={\n _Pseudowire.TransportMode.ethernet: 'ethernet',\n _Pseudowire.TransportMode.vlan: 'vlan',\n }))\n\n # iosxr: l2vpn / pw-class someword / encapsulation l2tpv3 / ttl 1\n configurations.append_line(attributes.format('ttl {ttl}'))\n\n elif self.type is _Pseudowire.EncapsulationType.mpls:\n # iosxr: l2vpn / pw-class someword / encapsulation mpls (config-l2vpn)\n with configurations.submode_context('encapsulation mpls'):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / control-word\n if attributes.value('control_word'):\n configurations.append_line('control-word')\n\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / ipv4 source 1.2.3.4\n configurations.append_line(attributes.format('ipv4 source {ipv4_source}'))\n\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / load-balancing (config-l2vpn)\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / load-balancing / flow-label both\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / load-balancing / flow-label both static\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / load-balancing / flow-label code 17\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / load-balancing / flow-label code 17 disable\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / load-balancing / flow-label receive\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / load-balancing / flow-label receive static\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / load-balancing / flow-label transmit\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / load-balancing / flow-label transmit static\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / load-balancing / pw-label\n\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / protocol ldp\n configurations.append_line(attributes.format('protocol {protocol.value}'))\n\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / redundancy (config-l2vpn)\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / redundancy / initial-delay <0-120>\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / redundancy / one-way\n\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / sequencing both\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / sequencing both resync 5\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / sequencing receive\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / sequencing receive resync 5\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / sequencing transmit\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / sequencing transmit resync 5\n cfg = attributes.format('sequencing {sequencing_direction}')\n if cfg:\n cfg += attributes.format('resync {sequencing_resync}', force=True)\n configurations.append_line(cfg)\n\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / switching-tlv hide\n\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / tag-rewrite ingress vlan 1\n configurations.append_line(attributes.format('tag-rewrite ingress vlan {tag_rewrite_ingress_vlan}'))\n\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / transport-mode ethernet\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / transport-mode vlan\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / transport-mode vlan passthrough\n configurations.append_line(\n attributes.format('transport-mode {transport_mode}', transform={\n _Pseudowire.TransportMode.ethernet: 'ethernet',\n _Pseudowire.TransportMode.vlan: 'vlan',\n _Pseudowire.TransportMode.vlan_passthrough: 'vlan passthrough',\n }))\n\n # iosxr: l2vpn / pw-class someword / encapsulation mpls / vccv verification-type none\n configurations.append_line(attributes.format('vccv verification-type {vccv_verification_type}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n", "id": "10923227", "language": "Python", "matching_score": 6.549115180969238, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/iosxr/pseudowire.py" }, { "content": "\nfrom abc import ABC\nimport warnings\nimport contextlib\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\nfrom ..bridge_domain import BridgeDomain\nfrom ..xconnect import Xconnect\nfrom ..vfi import Vfi\nfrom ..pseudowire import Pseudowire as _Pseudowire, \\\n PseudowireIPv4Neighbor, PseudowireIPv6Neighbor\n\n\nclass PseudowireClass(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n contained=False, **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: l2vpn / pw-class someword (config-l2vpn)\n with configurations.submode_context(attributes.format('pw-class {name}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n ns, attributes2 = attributes.namespace('encapsulation')\n if ns is not None:\n configurations.append_block(\n ns.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class EncapsulationAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if self.type is None:\n pass\n elif self.type is _Pseudowire.EncapsulationType.mpls:\n configurations.append_line('encapsulation mpls')\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n", "id": "6473833", "language": "Python", "matching_score": 3.710369110107422, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/iosxe/pseudowire.py" }, { "content": "\nfrom abc import ABC\nimport warnings\n\nfrom genie.conf.base.attributes import AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\nimport re\n\nclass Te(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, links=None, apply=True, attributes=None, unconfig=False, **kwargs):\n '''Device build config'''\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n \n if attributes.iswildcard:\n # iosxe : mpls traffic-eng tunnels\n configurations.append_line('mpls traffic-eng tunnels', \\\n unconfig_cmd = 'default mpls traffic-eng tunnels')\n\n if attributes.value('advertise_expnull'):\n configurations.append_line('mpls traffic-eng signalling advertise explicit-null')\n \n # Add per-interface config\n for sub, attributes2 in attributes.mapping_values('interface_attr', keys=self.interfaces, sort=True):\n configurations.append_block(sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, links=None, apply=True, attributes=None, **kwargs):\n return self.build_config(links=links, apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n '''Interface build config'''\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(attributes.format('interface {interface_name}', force=True)):\n\n if attributes.iswildcard:\n # iosxe : mpls traffic-eng tunnels\n configurations.append_line('mpls traffic-eng tunnels', \\\n unconfig_cmd = 'default mpls traffic-eng tunnels')\n \n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\nclass Srlg(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # TODO\n pass\n\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n \n # TODO\n pass\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True)\n\n", "id": "1067698", "language": "Python", "matching_score": 4.808779716491699, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/te/iosxe/te.py" }, { "content": "from abc import ABC\nimport warnings\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\nfrom ..rsvp import Rsvp as _Rsvp\n\n\nclass Rsvp(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, links=None, apply=True, attributes=None, unconfig=False, **kwargs):\n '''Device build config'''\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if attributes.value('sig_gr',force=True):\n configurations.append(attributes.format('ip rsvp signalling hello graceful-restart mode {sig_gr_mode}')) \n\n configurations.append_line(attributes.format('ip rsvp signalling hello graceful-restart refresh interval {sig_hello_gr_refresh_interval}'))\n\n configurations.append_line(attributes.format('ip rsvp signalling hello graceful-restart refresh misses {sig_hello_gr_refresh_misses}'))\n\n # Add per-interface config\n for sub, attributes2 in attributes.mapping_values('interface_attr', keys=self.interfaces, sort=True):\n configurations.append_block(sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n # # Add per-neighbor config\n # for sub, attributes2 in attributes.mapping_values('neighbor_attr', keys=self.neighbors, sort=True):\n # configurations.append_block(sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n # # Add per-controller config\n # for sub, attributes2 in attributes.mapping_values('controller_attr', keys=self.controllers, sort=True):\n # configurations.append_block(sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, links=None, apply=True, attributes=None, **kwargs):\n return self.build_config(links=links, apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n '''Interface build config'''\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxe: interface <name> (config-if)\n with configurations.submode_context(attributes.format('interface {interface_name}', force=True)):\n\n if attributes.value('enable_default_bw'):\n configurations.append_line('ip rsvp bandwidth')\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n", "id": "62089", "language": "Python", "matching_score": 1.912885308265686, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/rsvp/iosxe/rsvp.py" }, { "content": "\nfrom abc import ABC\nimport warnings\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\nfrom ..rsvp import Rsvp as _Rsvp\n\n\ndef format_bw_unit_percent(bw, unit, percent):\n if bw is None:\n return ''\n s = ' '\n if percent:\n s += '{}'.format(bw)\n elif unit is None:\n s += '{}'.format(bw)\n elif unit is _Rsvp.BwUnit.kbps:\n s += '{} kbps'.format(bw)\n elif unit is _Rsvp.BwUnit.mbps:\n s += '{} mbps'.format(bw // 1000)\n elif unit is _Rsvp.BwUnit.mbps:\n s += '{} gbps'.format(bw // 1000000)\n else:\n raise ValueError(unit)\n return s\n\n\nclass Rsvp(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, links=None, apply=True, attributes=None, unconfig=False, **kwargs):\n '''Device build config'''\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: rsvp (config-rsvp)\n with configurations.submode_context('rsvp'):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: rsvp / authentication (config-rsvp-auth)\n with configurations.submode_context('authentication', cancel_empty=True):\n\n # iosxr: rsvp / authentication / key-source key-chain someword\n configurations.append_line(attributes.format('key-source key-chain {auth_keysrc_keychain}'))\n\n # iosxr: rsvp / authentication / life-time 30\n configurations.append_line(attributes.format('life-time {auth_lifetime}'))\n\n # iosxr: rsvp / authentication / window-size 1\n configurations.append_line(attributes.format('window-size {auth_window_size}'))\n\n # iosxr: rsvp / authentication / retransmit <0-10000>\n configurations.append_line(attributes.format('retransmit {auth_retransmit}'))\n\n # iosxr: rsvp / bandwidth rdm percentage max-reservable-bc0 <0-10000>\n # iosxr: rsvp / bandwidth rdm percentage max-reservable-bc0 <0-10000> bc1 <0-10000>\n if self.rdm_bw_percentage:\n cfg = attributes.format('bandwidth rdm percentage max-reservable-bc0 {rdm_bw_total}')\n if cfg:\n cfg += attributes.format(' bc1 {rdm_bw_subpool}', force=True)\n configurations.append_line(cfg)\n\n # iosxr: rsvp / bandwidth mam percentage max-reservable <0-10000>\n # iosxr: rsvp / bandwidth mam percentage max-reservable <0-10000> bc0 <0-10000>\n # iosxr: rsvp / bandwidth mam percentage max-reservable <0-10000> bc0 <0-10000> bc1 <0-10000>\n if self.mam_bw_percentage:\n cfg = attributes.format('bandwidth mam percentage max-reservable {mam_bw_total}')\n if cfg:\n if self.mam_bw_bc0 is not None:\n cfg += attributes.format(' bc0 {mam_bw_bc0}', force=True)\n cfg += attributes.format(' bc1 {mam_bw_bc1}', force=True)\n configurations.append_line(cfg)\n\n # iosxr: rsvp / logging events issu\n if attributes.value('log_events_issu'):\n configurations.append_line('logging events issu')\n\n # iosxr: rsvp / logging events nsr\n if attributes.value('log_events_nsr'):\n configurations.append_line('logging events nsr')\n\n # iosxr: rsvp / latency threshold <0-180>\n # iosxr: rsvp / ltrace-buffer multiplier 2 all\n # iosxr: rsvp / ltrace-buffer multiplier 2 common\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err intf\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err intf rare\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err intf rare sig\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err intf rare sig sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err intf rare sig sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err intf rare sig sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err intf rare sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err intf rare sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err intf rare sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err intf sig\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err intf sig sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err intf sig sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err intf sig sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err intf sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err intf sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err intf sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err rare\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err rare sig\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err rare sig sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err rare sig sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err rare sig sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err rare sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err rare sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err rare sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err sig\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err sig sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err sig sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err sig sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common dbg-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common intf\n # iosxr: rsvp / ltrace-buffer multiplier 2 common intf rare\n # iosxr: rsvp / ltrace-buffer multiplier 2 common intf rare sig\n # iosxr: rsvp / ltrace-buffer multiplier 2 common intf rare sig sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 common intf rare sig sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common intf rare sig sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common intf rare sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 common intf rare sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common intf rare sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common intf sig\n # iosxr: rsvp / ltrace-buffer multiplier 2 common intf sig sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 common intf sig sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common intf sig sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common intf sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 common intf sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common intf sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common rare\n # iosxr: rsvp / ltrace-buffer multiplier 2 common rare sig\n # iosxr: rsvp / ltrace-buffer multiplier 2 common rare sig sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 common rare sig sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common rare sig sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common rare sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 common rare sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common rare sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common sig\n # iosxr: rsvp / ltrace-buffer multiplier 2 common sig sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 common sig sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common sig sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 common sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 common sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err intf\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err intf rare\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err intf rare sig\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err intf rare sig sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err intf rare sig sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err intf rare sig sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err intf rare sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err intf rare sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err intf rare sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err intf sig\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err intf sig sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err intf sig sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err intf sig sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err intf sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err intf sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err intf sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err rare\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err rare sig\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err rare sig sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err rare sig sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err rare sig sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err rare sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err rare sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err rare sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err sig\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err sig sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err sig sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err sig sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 dbg-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 intf\n # iosxr: rsvp / ltrace-buffer multiplier 2 intf rare\n # iosxr: rsvp / ltrace-buffer multiplier 2 intf rare sig\n # iosxr: rsvp / ltrace-buffer multiplier 2 intf rare sig sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 intf rare sig sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 intf rare sig sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 intf rare sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 intf rare sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 intf rare sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 intf sig\n # iosxr: rsvp / ltrace-buffer multiplier 2 intf sig sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 intf sig sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 intf sig sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 intf sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 intf sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 intf sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 rare\n # iosxr: rsvp / ltrace-buffer multiplier 2 rare sig\n # iosxr: rsvp / ltrace-buffer multiplier 2 rare sig sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 rare sig sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 rare sig sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 rare sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 rare sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 rare sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 sig\n # iosxr: rsvp / ltrace-buffer multiplier 2 sig sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 sig sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 sig sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 sig-err\n # iosxr: rsvp / ltrace-buffer multiplier 2 sig-err sync\n # iosxr: rsvp / ltrace-buffer multiplier 2 sync\n\n # iosxr: rsvp / signalling checksum disable\n if attributes.value('sig_checksum') is False:\n configurations.append_line('signalling checksum disable')\n\n # iosxr: rsvp / signalling event-per-pulse <0-1600>\n configurations.append_line(attributes.format('signalling event-per-pulse {sig_event_per_pulse}'))\n\n # iosxr: rsvp / signalling graceful-restart\n # iosxr: rsvp / signalling graceful-restart disable\n v = attributes.value('sig_gr')\n if v is not None:\n if v:\n configurations.append_line('signalling graceful-restart')\n else:\n configurations.append_line('signalling graceful-restart disable')\n\n # iosxr: rsvp / signalling graceful-restart recovery-time <0-3600>\n configurations.append_line(attributes.format('signalling graceful-restart recovery-time {sig_gr_recov_time}'))\n\n # iosxr: rsvp / signalling graceful-restart restart-time 60\n configurations.append_line(attributes.format('signalling graceful-restart restart-time {sig_gr_restart_time}'))\n\n # iosxr: rsvp / signalling hello graceful-restart refresh interval 3000\n configurations.append_line(attributes.format('signalling hello graceful-restart refresh interval {sig_hello_gr_refresh_interval}'))\n\n # iosxr: rsvp / signalling hello graceful-restart refresh misses 1\n configurations.append_line(attributes.format('signalling hello graceful-restart refresh misses {sig_hello_gr_refresh_misses}'))\n\n # iosxr: rsvp / signalling message-bundle disable\n if attributes.value('sig_message_bundle') is False:\n configurations.append_line('signalling message-bundle disable')\n\n # iosxr: rsvp / signalling out-of-band vrf someword\n configurations.append_line(attributes.format('signalling out-of-band vrf {sig_outofband_vrf.name}'))\n\n # iosxr: rsvp / signalling patherr state-removal disable\n if attributes.value('sig_patherr_state_removal') is False:\n configurations.append_line('signalling patherr state-removal disable')\n\n # iosxr: rsvp / signalling prefix-filtering access-list someword\n configurations.append_line(attributes.format('signalling prefix-filtering access-list {sig_prefixfilt_acl.name}'))\n\n # iosxr: rsvp / signalling prefix-filtering default-deny-action drop\n configurations.append_line(attributes.format('signalling prefix-filtering default-deny-action {sig_prefixfilt_defdenyaction.value}'))\n\n # Add per-interface config\n for sub, attributes2 in attributes.mapping_values('interface_attr', keys=self.interfaces, sort=True):\n configurations.append_block(sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n # Add per-neighbor config\n for sub, attributes2 in attributes.mapping_values('neighbor_attr', keys=self.neighbors, sort=True):\n configurations.append_block(sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n # Add per-controller config\n for sub, attributes2 in attributes.mapping_values('controller_attr', keys=self.controllers, sort=True):\n configurations.append_block(sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, links=None, apply=True, attributes=None, **kwargs):\n return self.build_config(links=links, apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n '''Interface build config'''\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: rsvp / interface <name> (config-rsvp-if)\n with configurations.submode_context(attributes.format('interface {interface_name}', force=True)):\n\n # iosxr: rsvp / interface <name> / authentication (config-rsvp-if-auth)\n with configurations.submode_context('authentication', cancel_empty=True):\n\n # iosxr: rsvp / interface <name> / authentication / key-source key-chain someword\n configurations.append_line(attributes.format('key-source key-chain {auth_keysrc_keychain}', inherited=False))\n\n # iosxr: rsvp / interface <name> / authentication / life-time 30\n configurations.append_line(attributes.format('life-time {auth_lifetime}', inherited=False))\n\n # iosxr: rsvp / interface <name> / authentication / window-size 1\n configurations.append_line(attributes.format('window-size {auth_window_size}', inherited=False))\n\n # iosxr: rsvp / interface <name> / signalling refresh out-of-band interval 180\n configurations.append_line(attributes.format('signalling refresh out-of-band interval {sig_refresh_outofband_interval}'))\n\n # iosxr: rsvp / interface <name> / signalling refresh out-of-band missed 1\n configurations.append_line(attributes.format('signalling refresh out-of-band missed {sig_refresh_outofband_missed}'))\n\n # iosxr: rsvp / interface <name> / signalling dscp <0-63>\n configurations.append_line(attributes.format('signalling dscp {sig_dscp}'))\n\n # iosxr: rsvp / interface <name> / signalling hello graceful-restart interface-based\n if attributes.value('sig_hello_gr_intfbased'):\n configurations.append_line('signalling hello graceful-restart interface-based')\n\n # iosxr: rsvp / interface <name> / signalling rate-limit\n # iosxr: rsvp / interface <name> / signalling rate-limit rate 1\n # iosxr: rsvp / interface <name> / signalling rate-limit rate 1 interval 250\n v = attributes.value('sig_rate_limit')\n if v is not None:\n if v is False:\n pass\n else:\n cfg = 'signalling rate-limit'\n if v is True:\n pass\n else:\n cfg += ' rate {}'.format(v)\n cfg += attributes.format(' interval {sig_rate_limit_interval}', force=True)\n configurations.append_line(cfg)\n\n # iosxr: rsvp / interface <name> / signalling refresh interval 10\n configurations.append_line(attributes.format('signalling refresh interval {sig_refresh_interval}'))\n\n # iosxr: rsvp / interface <name> / signalling refresh missed 1\n configurations.append_line(attributes.format('signalling refresh interval {sig_refresh_missed}'))\n\n # iosxr: rsvp / interface <name> / signalling refresh reduction disable\n if attributes.value('sig_refresh_reduction') is False:\n configurations.append_line('signalling refresh reduction disable')\n\n # iosxr: rsvp / interface <name> / signalling refresh reduction bundle-max-size 512\n configurations.append_line(attributes.format('signalling refresh reduction bundle-max-size {sig_refresh_reduction_bundle_maxsize}'))\n\n # iosxr: rsvp / interface <name> / signalling refresh reduction reliable ack-hold-time 100\n configurations.append_line(attributes.format('signalling refresh reduction reliable ack-hold-time {sig_refresh_reduction_reliable_ack_holdtime}'))\n\n # iosxr: rsvp / interface <name> / signalling refresh reduction reliable ack-max-size 20\n configurations.append_line(attributes.format('signalling refresh reduction reliable ack-max-size {sig_refresh_reduction_reliable_ack_maxsize}'))\n\n # iosxr: rsvp / interface <name> / signalling refresh reduction reliable retransmit-time 100\n configurations.append_line(attributes.format('signalling refresh reduction reliable retransmit-time {sig_refresh_reduction_reliable_retransmit_time}'))\n\n # iosxr: rsvp / interface <name> / signalling refresh reduction reliable summary-refresh\n if attributes.value('sig_refresh_reduction_reliable_summary_refresh'):\n configurations.append_line('signalling refresh reduction reliable summary-refresh')\n\n # iosxr: rsvp / interface <name> / signalling refresh reduction summary max-size 20\n configurations.append_line(attributes.format('signalling refresh reduction reliable summary max-size {sig_refresh_reduction_summary_maxsize}'))\n\n if not self.rdm_bw_percentage \\\n or not self.isinherited('rdm_bw_percentage') \\\n or not self.isinherited('rdm_bw_total'):\n if attributes.value('rdm_bw_total') is not None:\n keywords = ()\n keywords += ('rdm' if self.rdm_bw_cli_rdm_kw else '',)\n if self.rdm_bw_cli_style is _Rsvp.RdmBwCliStyle.unnamed_subpool:\n # iosxr: rsvp / interface <name> / bandwidth [rdm] <0-4294967295> [Kbps|Mbps|Gbps] [<0-4294967295> [Kbps|Mbps|Gbps]] [sub-pool <0-4294967295> [Kbps|Mbps|Gbps]]\n # iosxr: rsvp / interface <name> / bandwidth [rdm] percentage <0-10000> [<0-10000>] [sub-pool <0-10000>]\n keywords += ('', 'sub-pool')\n elif self.rdm_bw_cli_style is _Rsvp.RdmBwCliStyle.bc0_bc1:\n # iosxr: rsvp / interface <name> / bandwidth [rdm] bc0 <0-4294967295> [<0-4294967295> [Kbps|Mbps|Gbps]] [bc1 <0-4294967295> [Kbps|Mbps|Gbps]]\n # iosxr: rsvp / interface <name> / bandwidth [rdm] percentage bc0 <0-10000> [<0-10000>] [bc1 <0-10000>]\n keywords += ('bc0', 'bc1')\n elif self.rdm_bw_cli_style is _Rsvp.RdmBwCliStyle.global_subpool:\n # iosxr: rsvp / interface <name> / bandwidth [rdm] global-pool <0-4294967295> [Kbps|Mbps|Gbps] [<0-4294967295> [Kbps|Mbps|Gbps]] [sub-pool <0-4294967295> [Kbps|Mbps|Gbps]]\n # iosxr: rsvp / interface <name> / bandwidth [rdm] percentage global-pool <0-10000> [<0-10000>] [sub-pool <0-10000>]\n keywords += ('global-pool', 'sub-pool')\n else:\n raise ValueError(self.rdm_bw_cli_style)\n cfg = 'bandwidth'\n if self.rdm_bw_percentage:\n cfg += ' percentage'\n if keywords[0]:\n cfg += ' ' + keywords[0] # rdm\n if keywords[1]:\n cfg += ' ' + keywords[1] # |bc0|global-pool\n cfg += format_bw_unit_percent(self.rdm_bw_total, self.rdm_bw_total_unit, self.rdm_bw_percentage)\n cfg += format_bw_unit_percent(self.rdm_bw_largest, self.rdm_bw_largest_unit, self.rdm_bw_percentage)\n if self.rdm_bw_subpool is not None:\n if keywords[2]:\n cfg += ' ' + keywords[2] # sub-pool|bc1|sub-pool\n cfg += format_bw_unit_percent(self.rdm_bw_subpool, self.rdm_bw_subpool_unit, self.rdm_bw_percentage)\n configurations.append_line(cfg)\n\n elif attributes.value('enable_default_bw'):\n # Effectively overrides rdm config above\n # iosxr: rsvp / interface <name> / bandwidth\n configurations.append_line('bandwidth')\n\n if not self.mam_bw_percentage \\\n or not self.isinherited('mam_bw_percentage') \\\n or not self.isinherited('mam_bw_total'):\n if attributes.value('mam_bw_max_reservable') is not None \\\n or attributes.value('mam_bw_total') is not None:\n # iosxr: rsvp / interface <name> / bandwidth mam [max-reservable-bw] [<0-4294967295> [<0-4294967295> [Kbps|Mbps|Gbps]] [bc0 <0-4294967295> [Kbps|Mbps|Gbps] [bc1 <0-4294967295> [Kbps|Mbps|Gbps]]]]\n # iosxr: rsvp / interface <name> / bandwidth mam percentage [max-reservable-bw] [<0-10000> [<0-10000>] [bc0 <0-10000> [bc1 <0-10000>]]]\n cfg = 'bandwidth mam'\n if self.mam_bw_percentage:\n cfg += ' percentage'\n if self.mam_bw_max_reservable:\n cfg += ' max-reservable-bw'\n if self.mam_bw_total is not None:\n cfg += format_bw_unit_percent(self.mam_bw_total, self.mam_bw_total_unit, self.mam_bw_percentage)\n cfg += format_bw_unit_percent(self.mam_bw_largest, self.mam_bw_largest_unit, self.mam_bw_percentage)\n if self.mam_bw_bc0 is not None:\n cfg += ' bc0'\n cfg += format_bw_unit_percent(self.mam_bw_bc0, self.mam_bw_bc0_unit, self.mam_bw_percentage)\n if self.mam_bw_bc1 is not None:\n cfg += ' bc1'\n cfg += format_bw_unit_percent(self.mam_bw_bc1, self.mam_bw_bc1_unit, self.mam_bw_percentage)\n configurations.append_line(cfg)\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class NeighborAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n '''Neighbor build config'''\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: rsvp / neighbor 1.2.3.4 (config-rsvp-nbr)\n with configurations.submode_context(attributes.format('neighbor {neighbor.ip}', force=True)):\n\n # iosxr: rsvp / neighbor 1.2.3.4 / authentication (config-rsvp-nbor-auth)\n with configurations.submode_context('authentication', cancel_empty=True):\n\n # iosxr: rsvp / neighbor 1.2.3.4 / authentication / key-source key-chain someword\n configurations.append_line(attributes.format('key-source key-chain {auth_keysrc_keychain}', inherited=False))\n\n # iosxr: rsvp / neighbor 1.2.3.4 / authentication / life-time 30\n configurations.append_line(attributes.format('life-time {auth_lifetime}', inherited=False))\n\n # iosxr: rsvp / neighbor 1.2.3.4 / authentication / window-size 1\n configurations.append_line(attributes.format('window-size {auth_window_size}', inherited=False))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class ControllerAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n '''Controller build config'''\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: rsvp / controller <name> (config-rsvp-cntl)\n with configurations.submode_context(attributes.format('controller {interface_name}', force=True)):\n\n # iosxr: rsvp / controller <name> / signalling refresh out-of-band interval 180\n configurations.append_line(attributes.format('signalling refresh out-of-band interval {sig_refresh_outofband_interval}'))\n\n # iosxr: rsvp / controller <name> / signalling refresh out-of-band missed 1\n configurations.append_line(attributes.format('signalling refresh out-of-band missed {sig_refresh_outofband_missed}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n", "id": "642278", "language": "Python", "matching_score": 7.023021221160889, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/rsvp/iosxr/rsvp.py" }, { "content": "\n__all__ = (\n 'Rsvp',\n)\n\nfrom enum import Enum\n\nfrom genie.utils.cisco_collections import typedset\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import DeviceFeature, LinkFeature\nimport genie.conf.base.attributes\nfrom genie.conf.base.attributes import SubAttributes, SubAttributesDict, AttributesHelper\nfrom ats.datastructures import WeakList\nfrom genie.libs.conf.base import IPv4Neighbor, IPv4Address\nfrom genie.libs.conf.base.neighbor import IPv4NeighborSubAttributes\nfrom genie.libs.conf.address_family import AddressFamily\nfrom genie.libs.conf.vrf import Vrf, VrfSubAttributes\nfrom genie.libs.conf.access_list import AccessList\n\n\nclass Rsvp(DeviceFeature, LinkFeature):\n\n @property\n def interfaces(self):\n interfaces = set()\n interfaces.update(*[link.interfaces for link in self.links])\n return frozenset(interfaces)\n\n @property\n def controllers(self):\n controllers = set()\n # TODO\n return frozenset(controllers)\n\n # Top level attributes\n\n auth_keysrc_keychain = managedattribute(\n name='auth_keysrc_keychain',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n auth_lifetime = managedattribute(\n name='auth_lifetime',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n auth_window_size = managedattribute(\n name='auth_window_size',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n auth_retransmit = managedattribute(\n name='auth_retransmit',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n log_events_issu = managedattribute(\n name='log_events_issu',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n log_events_nsr = managedattribute(\n name='log_events_nsr',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n sig_checksum = managedattribute(\n name='sig_checksum',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n sig_event_per_pulse = managedattribute(\n name='sig_event_per_pulse',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n sig_gr = managedattribute(\n name='sig_gr',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n sig_gr_mode = managedattribute(\n name='sig_gr_mode',\n default='full',\n type=(None, managedattribute.test_istype(str)))\n\n sig_gr_recov_time = managedattribute(\n name='sig_gr_recov_time',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n sig_gr_restart_time = managedattribute(\n name='sig_gr_restart_time',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n sig_hello_gr_refresh_interval = managedattribute(\n name='sig_hello_gr_refresh_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n sig_hello_gr_refresh_misses = managedattribute(\n name='sig_hello_gr_refresh_misses',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n sig_message_bundle = managedattribute(\n name='sig_message_bundle',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n sig_outofband_vrf = managedattribute(\n name='sig_outofband_vrf',\n default=None,\n type=(None, managedattribute.test_isinstance(Vrf)))\n\n sig_patherr_state_removal = managedattribute(\n name='sig_patherr_state_removal',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n sig_prefixfilt_acl = managedattribute(\n name='sig_prefixfilt_acl',\n default=None,\n type=(None, managedattribute.test_isinstance(AccessList)))\n\n class PrefixFilteringAction(Enum):\n drop = 'drop'\n\n sig_prefixfilt_defdenyaction = managedattribute(\n name='sig_prefixfilt_defdenyaction',\n default=None,\n type=(None, PrefixFilteringAction))\n\n # Per-interface attributes\n\n sig_refresh_outofband_interval = managedattribute(\n name='sig_refresh_outofband_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n sig_refresh_outofband_missed = managedattribute(\n name='sig_refresh_outofband_missed',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n sig_dscp = managedattribute(\n name='sig_dscp',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n sig_hello_gr_intfbased = managedattribute(\n name='sig_hello_gr_intfbased',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n sig_rate_limit = managedattribute(\n name='sig_rate_limit',\n default=None,\n type=(None, managedattribute.test_istype((bool, int))))\n\n sig_rate_limit_interval = managedattribute(\n name='sig_rate_limit_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n sig_refresh_interval = managedattribute(\n name='sig_refresh_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n sig_refresh_missed = managedattribute(\n name='sig_refresh_missed',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n sig_refresh_reduction_bundle_maxsize = managedattribute(\n name='sig_refresh_reduction_bundle_maxsize',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n sig_refresh_reduction = managedattribute(\n name='sig_refresh_reduction',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n sig_refresh_reduction_reliable_ack_holdtime = managedattribute(\n name='sig_refresh_reduction_reliable_ack_holdtime',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n sig_refresh_reduction_reliable_ack_maxsize = managedattribute(\n name='sig_refresh_reduction_reliable_ack_maxsize',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n sig_refresh_reduction_reliable_retransmit_time = managedattribute(\n name='sig_refresh_reduction_reliable_retransmit_time',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n sig_refresh_reduction_reliable_summary_refresh = managedattribute(\n name='sig_refresh_reduction_reliable_summary_refresh',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n sig_refresh_reduction_summary_maxsize = managedattribute(\n name='sig_refresh_reduction_summary_maxsize',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n class BwUnit(Enum):\n kbps = 'kbps'\n mbps = 'mbps'\n gbps = 'gbps'\n\n enable_default_bw = managedattribute(\n name='enable_default_bw',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n class RdmBwCliStyle(Enum):\n unnamed_subpool = 'unnamed_subpool'\n bc0_bc1 = 'bc0_bc1'\n global_subpool = 'global_subpool'\n\n rdm_bw_cli_rdm_kw = managedattribute(\n name='rdm_bw_cli_rdm_kw',\n default=True,\n type=managedattribute.test_istype(bool))\n\n rdm_bw_cli_style = managedattribute(\n name='rdm_bw_cli_style',\n default=RdmBwCliStyle.unnamed_subpool,\n type=RdmBwCliStyle)\n\n rdm_bw_percentage = managedattribute(\n name='rdm_bw_percentage',\n default=False,\n type=managedattribute.test_istype(bool))\n\n rdm_bw_total = managedattribute(\n name='rdm_bw_total',\n default=20000,\n type=(None, managedattribute.test_istype(int)))\n\n rdm_bw_total_unit = managedattribute(\n name='rdm_bw_total_unit',\n default=None,\n type=(None, BwUnit))\n\n rdm_bw_largest = managedattribute(\n name='rdm_bw_largest',\n default=20000,\n type=(None, managedattribute.test_istype(int)))\n\n rdm_bw_largest_unit = managedattribute(\n name='rdm_bw_largest_unit',\n default=None,\n type=(None, BwUnit))\n\n rdm_bw_subpool = managedattribute(\n name='rdm_bw_subpool',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n rdm_bw_subpool_unit = managedattribute(\n name='rdm_bw_subpool_unit',\n default=None,\n type=(None, BwUnit))\n\n mam_bw_percentage = managedattribute(\n name='mam_bw_percentage',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n mam_bw_max_reservable = managedattribute(\n name='mam_bw_max_reservable',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n mam_bw_total = managedattribute(\n name='mam_bw_total',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n mam_bw_total_unit = managedattribute(\n name='mam_bw_total_unit',\n default=None,\n type=(None, BwUnit))\n\n mam_bw_largest = managedattribute(\n name='mam_bw_largest',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n mam_bw_largest_unit = managedattribute(\n name='mam_bw_largest_unit',\n default=None,\n type=(None, BwUnit))\n\n mam_bw_bc0 = managedattribute(\n name='mam_bw_bc0',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n mam_bw_bc0_unit = managedattribute(\n name='mam_bw_bc0_unit',\n default=None,\n type=(None, BwUnit))\n\n mam_bw_bc1 = managedattribute(\n name='mam_bw_bc1',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n mam_bw_bc1_unit = managedattribute(\n name='mam_bw_bc1_unit',\n default=None,\n type=(None, BwUnit))\n\n class DeviceAttributes(genie.conf.base.attributes.DeviceSubAttributes):\n\n enabled_feature = managedattribute(\n name='enabled_feature',\n default=False,\n type=managedattribute.test_istype(bool),\n doc='''Argument to control 'mpls traffic-engineering' CLI''')\n\n @property\n def interfaces(self):\n device = self.device\n interfaces = set(self.parent.interfaces)\n #interfaces.update(*[link.interfaces for link in self.parent.links])\n interfaces = {intf for intf in interfaces if intf.device is device}\n return frozenset(interfaces)\n\n @property\n def controllers(self):\n # TODO\n device = self.device\n controllers = set(self.parent.controllers)\n #controllers.update(*[link.interfaces for link in self.parent.links])\n controllers = {intf for intf in controllers if intf.device is device}\n return frozenset(controllers)\n\n neighbors = managedattribute(\n name='neighbors',\n finit=typedset(IPv4NeighborSubAttributes).copy,\n type=typedset(IPv4NeighborSubAttributes)._from_iterable)\n\n def add_neighbor(self, neighbor): # TODO DEPRECATE\n self.neighbors.add(neighbor)\n\n def remove_neighbor(self, neighbor): # TODO DEPRECATE\n self.neighbors.remove(neighbor)\n\n class InterfaceAttributes(genie.conf.base.attributes.InterfaceSubAttributes):\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n class NeighborAttributes(IPv4NeighborSubAttributes):\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n neighbor_attr = managedattribute(\n name='neighbor_attr',\n read_only=True,\n doc=NeighborAttributes.__doc__)\n\n @neighbor_attr.initter\n def neighbor_attr(self):\n return SubAttributesDict(self.NeighborAttributes, parent=self)\n\n class ControllerAttributes(genie.conf.base.attributes.InterfaceSubAttributes):\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n controller_attr = managedattribute(\n name='controller_attr',\n read_only=True,\n doc=ControllerAttributes.__doc__)\n\n @controller_attr.initter\n def controller_attr(self):\n return SubAttributesDict(self.ControllerAttributes, parent=self)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n def __init__(self, pid=None, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n def build_config(self, links=None, apply=True, attributes=None, **kwargs):\n '''Rsvp top build config'''\n attributes = AttributesHelper(self, attributes)\n\n cfgs = {}\n\n if links is None:\n devices = self.devices\n else:\n devices = set().union(*[link.devices for link in links])\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, links=None, apply=True, attributes=None, **kwargs):\n '''Rsvp top build unconfig'''\n attributes = AttributesHelper(self, attributes)\n\n cfgs = {}\n\n if links is None:\n devices = self.devices\n else:\n devices = set().union(*[link.devices for link in links])\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n", "id": "3864553", "language": "Python", "matching_score": 4.764151096343994, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/rsvp/rsvp.py" }, { "content": "\n__all__ = (\n 'AccessList',\n)\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import DeviceFeature\nimport genie.conf.base.attributes\nfrom genie.conf.base.attributes import SubAttributes, SubAttributesDict,\\\n AttributesInheriter, AttributesHelper\n\n\nclass AccessList(DeviceFeature):\n\n name = managedattribute(\n name='name',\n type=managedattribute.test_istype(str))\n\n def __init__(self, name, *args, **kwargs):\n self.name = name\n super().__init__(*args, **kwargs)\n\n def build_config(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n", "id": "8819819", "language": "Python", "matching_score": 4.927954196929932, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/access_list/access_list.py" }, { "content": "\n__all__ = (\n 'CommunitySet',\n)\n\nimport operator\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import DeviceFeature\nfrom genie.conf.base.attributes import SubAttributes, SubAttributesDict,\\\n AttributesInheriter, AttributesHelper, DeviceSubAttributes\n\nfrom genie.libs.conf.base import ip_address, ip_network\n\n\nclass CommunitySet(DeviceFeature):\n\n name = managedattribute(\n name='name',\n type=managedattribute.test_istype(str))\n\n communities = managedattribute(\n name='communities',\n finit=list)\n\n class DeviceAttributes(DeviceSubAttributes):\n\n pass\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n def __init__(self, name, *args, **kwargs):\n self.name = name\n super().__init__(*args, **kwargs)\n\n def build_config(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n cfgs = {key: value for key, value in cfgs.items() if value}\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n cfgs = {key: value for key, value in cfgs.items() if value}\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n", "id": "1891384", "language": "Python", "matching_score": 2.6653504371643066, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/community_set/community_set.py" }, { "content": "from enum import Enum\n\n# Genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.base import DeviceFeature\nfrom genie.conf.base.attributes import DeviceSubAttributes,\\\n InterfaceSubAttributes,\\\n SubAttributesDict,\\\n AttributesHelper\n\n__all__ = (\n 'Hsrp',\n)\n\n\nclass Hsrp(DeviceFeature):\n\n def __init__(self, group_number=None, address_family='ipv4', *args, **kwargs):\n if group_number != None:\n self.group_number = int(group_number)\n self.address_family = address_family\n super().__init__(*args, **kwargs)\n\n class DeviceAttributes(DeviceSubAttributes):\n\n class InterfaceAttributes(InterfaceSubAttributes):\n pass\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n # ==================== HSRP attributes ====================\n\n # enabled\n enabled = managedattribute(\n name='enabled',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='Enable feature HSRP')\n\n # state_change_disable (XR only)\n state_change_disable = managedattribute(\n name='state_change_disable',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='Disable HSRP state change messages')\n\n # bfd (old)\n bfd = managedattribute(\n name='bfd',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='Configure hsrp bfd')\n\n # bfd_enabled\n bfd_enabled = managedattribute(\n name='bfd_enabled',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='Configure hsrp bfd')\n\n # minimum_delay\n minimum_delay = managedattribute(\n name='minimum_delay',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Minimum delay')\n\n # reload_delay\n reload_delay = managedattribute(\n name='reload_delay',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Delay after reload')\n\n # mac_refresh\n mac_refresh = managedattribute(\n name='mac_refresh',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Refresh MAC cache on switch by periodically '\n 'sending packet from virtual mac address')\n\n # use_bia\n use_bia = managedattribute(\n name='use_bia',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"HSRP uses interface's burned in address\")\n\n # hsrp/standby version\n version = managedattribute(\n name='version',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Configure hsrp version')\n\n # hsrp/standby redirect (old)\n redirect = managedattribute(\n name='redirect',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='Configure hsrp redirect')\n\n # redirect_disable\n redirects_disable = managedattribute(\n name='redirects_disable',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='Disable hsrp redirect')\n\n # group_number\n group_number = managedattribute(\n name='group_number',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Configure hsrp group number')\n\n # ==================== Interface attributes ====================\n\n # group_name old\n group_name = managedattribute(\n name='group_name',\n default=None,\n doc='Configure hsrp mac refresh time')\n\n # session_name\n session_name = managedattribute(\n name='session_name',\n default=None,\n doc='Redundancy name string')\n\n # ipv4 address (old)\n ip_address = managedattribute(\n name='ip_address',\n default=None,\n doc='Enable HSRP IPv4 and set the virtual IP address')\n\n # primary_ipv4_address\n primary_ipv4_address = managedattribute(\n name='primary_ipv4_address',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Enable HSRP IPv4 and set the virtual IP address')\n\n # secondary_ipv4_address\n secondary_ipv4_address = managedattribute(\n name='secondary_ipv4_address',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Make this IP address a secondary virtual IP address')\n\n # ipv6 address (old)\n ipv6_address = managedattribute(\n name='ipv6_address',\n default=None,\n doc='Enable HSRP IPv6')\n\n # global_ipv6_address\n global_ipv6_address = managedattribute(\n name='global_ipv6_address',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Enable HSRP IPv6')\n\n # link_local_ipv6_address\n link_local_ipv6_address = managedattribute(\n name='link_local_ipv6_address',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Enable HSRP IPv6') \n\n # hsrp_linklocal\n class HSRP_LINKLOCAL(Enum):\n manual = 'manual'\n auto = 'auto'\n legacy = 'legacy'\n\n hsrp_linklocal = managedattribute(\n name='hsrp_linklocal',\n default=None,\n type=(None, HSRP_LINKLOCAL),\n doc='Obtain address using autoconfiguration') \n\n # priority\n priority = managedattribute(\n name='priority',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Priority value')\n\n # preepmt\n preempt = managedattribute(\n name='preempt',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc='Overthrow lower priority Active routers')\n\n # preempt delay\n preempt_minimum_delay = managedattribute(\n name='preempt_minimum_delay',\n default=False,\n type=(None, managedattribute.test_istype(int)),\n doc='Configure wait before preempting')\n\n # preempt reload\n preempt_reload_delay = managedattribute(\n name='preempt_reload_delay',\n default=False,\n type=(None, managedattribute.test_istype(int)),\n doc='Configure preempt reload delay')\n\n # preempt sync\n preempt_sync_delay = managedattribute(\n name='preempt_sync_delay',\n default=False,\n type=(None, managedattribute.test_istype(int)),\n doc='Configure wait for IP redundancy clients')\n\n # authentication\n authentication = managedattribute(\n name='authentication',\n default=None,\n doc='authentication string')\n\n # authentication_word (old)\n authentication_word = managedattribute(\n name='authentication_word',\n default=None,\n doc='Configure hsrp authentication')\n\n # authentication_text (old)\n authentication_text = managedattribute(\n name='authentication_text',\n default=None,\n doc='Configure hsrp authentication text')\n\n # authentication_md5_keychain (old)\n authentication_md5_keychain = managedattribute(\n name='authentication_md5_keychain',\n default=None,\n doc='Configure hsrp MD5 authentication with keychain')\n\n # authentication_md5_keystring (old)\n authentication_md5_keystring = managedattribute(\n name='authentication_md5_keystring',\n default=None,\n doc='Configure hsrp MD5 authentication with keystring')\n\n # track object (old)\n track_object = managedattribute(\n name='track_object',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Configure hsrp track')\n\n # tracked_object\n tracked_object = managedattribute(\n name='tracked_object',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Tracked object number')\n\n # priority decrement (old)\n priority_decrement = managedattribute(\n name='priority_decrement',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Configure hsrp track priority decrement')\n\n # tracked_object_priority_decrement\n tracked_object_priority_decrement = managedattribute(\n name='tracked_object_priority_decrement',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Priority decrement')\n\n # tracked_interface\n tracked_interface = managedattribute(\n name='track_interface',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Configuring tracking interface')\n\n # tracked_intf_priority_decrement\n tracked_intf_priority_decrement = managedattribute(\n name='tracked_intf_priority_decrement',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Priority decrement')\n\n # hello_msec_flag\n hello_msec_flag = managedattribute(\n name='hello_msec_flag',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc='Specify hello interval in milliseconds')\n\n # hold_msec_flag\n hold_msec_flag = managedattribute(\n name='hold_msec_flag',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc='Specify hold time in milliseconds')\n\n # timer hello interval in seconds (old)\n hello_interval_seconds = managedattribute(\n name='hello_interval_seconds',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Configure hsrp timer hello interval in seconds')\n\n # hello_sec\n hello_sec = managedattribute(\n name='hello_sec',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Hello interval in seconds')\n\n # timer hold time in seconds (old)\n holdtime_seconds = managedattribute(\n name='holdtime_seconds',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Configure hsrp timer holdtime in seconds')\n\n # hold_sec\n hold_sec = managedattribute(\n name='hold_sec',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Hold time in seconds')\n\n # timer hello interval in mseconds (old)\n hello_interval_msec = managedattribute(\n name='hello_interval_msec',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Configure hsrp timer hello interval in milli-seconds')\n\n # hello_msec\n hello_msec = managedattribute(\n name='hello_msec',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Hello interval in milliseconds')\n\n # timer hold time in seconds (old)\n holdtime_msec = managedattribute(\n name='holdtime_msec',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Configure hsrp timer holdtime in milli-seconds')\n\n # hold_msec\n hold_msec = managedattribute(\n name='hold_msec',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Hold time in milliseconds')\n\n # virtual_ip_learn\n virtual_ip_learn = managedattribute(\n name='virtual_ip_learn',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='Enable HSRP IPv4 and learn virtual IP address')\n\n # follow\n follow = managedattribute(\n name='follow',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Name of HSRP group to follow')\n\n # mac_address (old)\n mac_address = managedattribute(\n name='mac_address',\n default=None,\n doc='Configure hsrp mac refresh time')\n\n # virtual_mac_address\n virtual_mac_address = managedattribute(\n name='virtual_mac_address',\n default=None,\n doc='Virtual MAC address')\n\n # ==================== IOSXE specific ====================\n\n # track shutdown\n track_shutdown = managedattribute(\n name='track_shutdown',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='Configure hsrp track priority decrement')\n\n # ==================== IOSXR specific ====================\n\n # bfd_min_interval (old)\n bfd_min_interval = managedattribute(\n name='bfd_min_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Configure hsrp bfd minimum interval')\n\n # bfd_interval\n bfd_interval = managedattribute(\n name='bfd_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Configure hsrp bfd minimum interval')\n\n # bfd_multiplier (old)\n bfd_multiplier = managedattribute(\n name='bfd_multiplier',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Configure hsrp bfd multiplier')\n\n # bfd_detection_multiplier\n bfd_detection_multiplier = managedattribute(\n name='bfd_detection_multiplier',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Configure hsrp bfd multiplier')\n\n # bfd_address\n bfd_address = managedattribute(\n name='bfd_address',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='HSRP BFD remote interface IP address')\n\n # bfd_interface_name\n bfd_interface_name = managedattribute(\n name='bfd_interface_name',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='HSRP BFD outgoing interface')\n\n # address_family\n class ADDRESS_FAMILY(Enum):\n ipv4='ipv4'\n ipv6='ipv6'\n\n address_family = managedattribute(\n name='address_family',\n default='ipv4',\n type=(None, ADDRESS_FAMILY),\n doc='Configure hsrp address family')\n\n # bfd_fast_detect\n bfd_fast_detect = managedattribute(\n name='bfd_fast_detect',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='Configure hsrp bfd fast detect')\n\n # ==================== NXOS specific ====================\n\n # feature hsrp\n enabled = managedattribute(\n name='enabled',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc='Enable or disable feature hsrp')\n\n # ===========================================================\n\n def build_config(self, devices=None, apply=True, attributes=None,\n unconfig=False):\n cfgs = {}\n attributes = AttributesHelper(self, attributes)\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr', sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n if apply:\n for device_name, cfg in sorted(cfgs.items()):\n self.testbed.config_on_devices(cfg, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True, attributes=None):\n cfgs = {}\n attributes = AttributesHelper(self, attributes)\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr', sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n for device_name, cfg in sorted(cfgs.items()):\n self.testbed.config_on_devices(cfg, fail_invalid=True)\n else:\n return cfgs\n\n", "id": "3260557", "language": "Python", "matching_score": 5.472977161407471, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/hsrp/hsrp.py" }, { "content": "'''\nIOSXE Genie Conf using CLI for feature Standby.\n'''\n\n# Python\nfrom abc import ABC\nimport warnings\n\n# Genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\n\n\nclass Hsrp(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, devices=None, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # loop over all interfaces\n for sub, attributes2 in attributes.mapping_values(\n 'interface_attr', keys=self.interface_attr.keys()):\n configurations.append_block(sub.build_config(apply=False,\n attributes=attributes2, unconfig=unconfig, **kwargs))\n\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # enabled N/A\n # state_change_disable N/A\n\n # interface <interface_name>\n with configurations.submode_context(attributes.format(\n 'interface {interface_name}', force=True)):\n\n # interface <interface_name>\n # standby version <version>\n configurations.append_line(attributes.format(\n 'standby version {version}'))\n\n # interface <interface_name>\n # standby <bfd> (old)\n if attributes.value('bfd'):\n configurations.append_line('standby bfd')\n\n # interface <interface_name>\n # standby bfd\n if attributes.value('bfd_enabled'):\n configurations.append_line('standby bfd')\n\n # bfd_interval N/A\n # bfd_detection_multiplier N/A\n # bfd_address N/A\n # bfd_interface_name\n\n # interface <interface_name>\n # standby delay minimum <minimum_delay> \\\n # reload <reload_delay>\n if attributes.value('minimum_delay') and \\\n attributes.value('reload_delay'):\n configurations.append_line(attributes.format(\n 'standby delay minimum {minimum_delay} '\n 'reload {reload_delay}'))\n elif attributes.value('minimum_delay'):\n configurations.append_line(attributes.format(\n 'standby delay minimum {minimum_delay}'))\n\n # interface <interface_name>\n # standby mac-refresh <mac_refresh>\n if attributes.value('mac_refresh'):\n configurations.append_line(attributes.format(\n 'standby mac-refresh {mac_refresh}'))\n\n # interface <interface_name>\n # standby use-bia scope interface\n if attributes.value('use_bia'):\n configurations.append_line(attributes.format(\n 'standby use-bia'))\n\n # interface <interface_name>\n # standby redirect (old)\n if attributes.value('redirect'):\n configurations.append_line(\n 'standby redirect')\n\n # interface <interface_name>\n # standby redirect\n if attributes.value('redirects_disable') == False:\n configurations.append_line(\n 'standby redirect')\n\n if attributes.value('authentication_word'):\n # interface <interface_name>\n # standby <group_number> authentication \\\n # <authentication_word> (old)\n configurations.append_line(attributes.format(\n 'standby {group_number} '\n 'authentication {authentication_word}'))\n elif attributes.value('authentication_text'):\n # interface <interface_name>\n # standby <group_number> authentication text \\\n # <authentication_text> (old)\n configurations.append_line(attributes.format(\n 'standby {group_number} '\n 'authentication text {authentication_text}'))\n elif attributes.value('authentication_md5_keychain'):\n # interface <interface_name>\n # standby <group_number> authentication \\\n # md5 key-chain <authentication_md5_keychain> (old)\n configurations.append_line(attributes.format(\n 'standby {group_number} authentication '\n 'md5 key-chain {authentication_md5_keychain}'))\n elif attributes.value('authentication_md5_keystring'):\n # interface <interface_name>\n # standby <group_number> authentication \\\n # md5 key-string <authentication_md5_keystring> (old)\n configurations.append_line(attributes.format(\n 'standby {group_number} authentication '\n 'md5 key-string {authentication_md5_keystring}'))\n\n # interface <interface_name>\n # standby <group_number> authentication \\\n # <authentication>\n configurations.append_line(attributes.format(\n 'standby {group_number} '\n 'authentication {authentication}'))\n\n # interface <interface_name>\n # standby <group_number> follow <follow>\n configurations.append_line(attributes.format(\n 'standby {group_number} follow {follow}'))\n\n # interface <interface_name>\n # standby <group_number> ip <ip_address> (old)\n if attributes.value('address_family').name == 'ipv4':\n configurations.append_line(attributes.format(\n 'standby {group_number} ip {ip_address}'))\n\n # interface <interface_name>\n # standby <group_number> ip <primary_ipv4_address>\n # standby <group_number> ip <secondary_ipv4_address> secondary\n if attributes.value('address_family').name == 'ipv4':\n configurations.append_line(attributes.format(\n 'standby {group_number} ip {primary_ipv4_address}'))\n configurations.append_line(attributes.format(\n 'standby {group_number} ip {secondary_ipv4_address}'\n ' secondary'))\n if attributes.value('virtual_ip_learn'):\n configurations.append_line(attributes.format(\n 'standby {group_number} ip'))\n\n # interface <interface_name>\n # standby <group_number> ipv6 <global_ipv6_address>\n # standby <group_number> ipv6 <link_local_ipv6_address>\n if attributes.value('address_family').name == 'ipv6':\n configurations.append_line(attributes.format(\n 'standby {group_number} ipv6 '\n '{global_ipv6_address}'))\n configurations.append_line(attributes.format(\n 'standby {group_number} ipv6 '\n '{link_local_ipv6_address}'))\n if attributes.value('hsrp_linklocal'):\n if attributes.value('hsrp_linklocal').name \\\n == 'auto':\n configurations.append_line(attributes.format(\n 'standby {group_number} ipv6 autoconfig'))\n\n # interface <interface_name>\n # standby <group_number> mac-address <mac_address> (old)\n configurations.append_line(attributes.format(\n 'standby {group_number} mac-address {mac_address}'))\n\n # interface <interface_name>\n # standby <group_number> mac-address <virtual_mac_address>\n configurations.append_line(attributes.format(\n 'standby {group_number} mac-address '\n '{virtual_mac_address}'))\n\n # interface <interface_name>\n # standby <group_number> name <group_name> (old)\n configurations.append_line(attributes.format(\n 'standby {group_number} name {group_name}'))\n\n # interface <interface_name>\n # standby <group_number> name <session_name>\n configurations.append_line(attributes.format(\n 'standby {group_number} name {session_name}'))\n\n if attributes.value('preempt') and \\\n attributes.value('preempt_minimum_delay') and \\\n attributes.value('preempt_reload_delay') and \\\n attributes.value('preempt_sync_delay'):\n # interface <interface_name>\n # standby <group_number> preempt delay \\\n # minimum <preempt_minimum_delay> reload \\\n # <preempt_reload_delay> sync <preempt_sync_delay>\n configurations.append_line(attributes.format(\n 'standby {group_number} '\n 'preempt delay minimum {preempt_minimum_delay} '\n 'reload {preempt_reload_delay} '\n 'sync {preempt_sync_delay}'))\n elif attributes.value('preempt') and \\\n attributes.value('preempt_minimum_delay') and \\\n attributes.value('preempt_reload_delay'):\n # interface <interface_name>\n # standby <group_number> preempt delay \\\n # minimum <preempt_minimum_delay> reload \\\n # <preempt_reload_delay>\n configurations.append_line(attributes.format(\n 'standby {group_number} '\n 'preempt delay minimum {preempt_minimum_delay} '\n 'reload {preempt_reload_delay}'))\n elif attributes.value('preempt') and \\\n attributes.value('preempt_minimum_delay'):\n # interface <interface_name>\n # standby <group_number> preempt delay \\\n # minimum <preempt_minimum_delay>\n configurations.append_line(attributes.format(\n 'standby {group_number} '\n 'preempt delay minimum {preempt_minimum_delay}'))\n elif attributes.value('preempt') and \\\n attributes.value('preempt_reload_delay'):\n # interface <interface_name>\n # standby <group_number> preempt delay \\\n # reload <preempt_reload_delay>\n configurations.append_line(attributes.format(\n 'standby {group_number} '\n 'preempt delay reload {preempt_reload_delay}'))\n elif attributes.value('preempt') and \\\n attributes.value('preempt_sync_delay'):\n # interface <interface_name>\n # standby <group_number> preempt delay \\\n # sync <preempt_sync_delay>\n configurations.append_line(attributes.format(\n 'standby {group_number} '\n 'preempt delay sync {preempt_sync_delay}'))\n elif attributes.value('preempt'):\n # interface <interface_name>\n # standby <group_number> preempt\n configurations.append_line(attributes.format(\n 'standby {group_number} preempt'))\n\n # interface <interface_name>\n # standby <group_number> priority <priority>\n configurations.append_line(attributes.format(\n 'standby {group_number} priority {priority}'))\n\n # interface <interface_name>\n # standby <group_number> timers \\\n # <hello_interval_seconds> <holdtime_seconds> (old)\n configurations.append_line(attributes.format(\n 'standby {group_number} timers '\n '{hello_interval_seconds} {holdtime_seconds}'))\n\n # interface <interface_name>\n # standby <group_number> timers msec \\\n # <hello_interval_msec> msec <holdtime_msec> (old)\n configurations.append_line(attributes.format(\n 'standby {group_number} timers msec '\n '{hello_interval_msec} msec {holdtime_msec}'))\n\n timers_config =\\\n [attributes.format('standby {group_number} timers ')]\n if attributes.value('hello_msec_flag') is False and\\\n attributes.value('hold_msec_flag') is False:\n timers_config.append(\\\n attributes.format('{hello_sec} {hold_sec}'))\n elif attributes.value('hello_msec_flag') is True and\\\n attributes.value('hold_msec_flag') is False:\n timers_config.append(\\\n attributes.format('msec {hello_msec} {hold_sec}'))\n elif attributes.value('hello_msec_flag') is False and\\\n attributes.value('hold_msec_flag') is True:\n timers_config.append(\\\n attributes.format('{hello_sec} msec {hold_msec}'))\n elif attributes.value('hello_msec_flag') is True and\\\n attributes.value('hold_msec_flag') is True:\n timers_config.append(\\\n attributes.format('msec {hello_msec} msec '\n '{hold_msec}'))\n if timers_config[1] != '' and None not in timers_config:\n configurations.append_line(''.join(timers_config))\n\n # interface <interface_name>\n # standby <group_number> track object <track_object> \\\n # decrement <priority_decrement>\n configurations.append_line(attributes.format(\n 'standby {group_number} track {track_object} '\n 'decrement {priority_decrement}'))\n\n # interface <interface_name>\n # standby <group_number> track object <tracked_object> \\\n # [decrement <tracked_object_priority_decrement>]\n if attributes.value('tracked_object') and \\\n attributes.value('tracked_object_priority_decrement'):\n configurations.append_line(attributes.format(\n 'standby {group_number} track {tracked_object} '\n 'decrement {tracked_object_priority_decrement}'))\n elif attributes.value('tracked_object'):\n configurations.append_line(attributes.format(\n 'standby {group_number} track {tracked_object}')) \n\n # interface <interface_name>\n # standby <group_number> track object <track_object> \\\n # shutdown\n if attributes.value('track_shutdown'):\n configurations.append_line(attributes.format(\n 'standby {group_number} track {track_object} '\n 'shutdown'))\n\n # tracked_interface N/A\n # tracked_intf_priority_decrement N/A\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n", "id": "8862562", "language": "Python", "matching_score": 6.568673610687256, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/hsrp/iosxe/hsrp.py" }, { "content": "'''\nNXOS Specific Configurations for Hsrp Feature objects.\n'''\n\n# Python\nfrom abc import ABC\nimport warnings\n\n# Genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning, \\\n AttributesHelper\n\n\nclass Hsrp(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, devices=None, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # feature hsrp\n if attributes.value('enabled'):\n configurations.append_line(attributes.format('feature hsrp',\n force=True))\n\n # loop over all interfaces\n for sub, attributes2 in attributes.mapping_values('interface_attr',\n keys=self.interface_attr.keys()):\n configurations.append_block(sub.build_config(apply=False,\n attributes=attributes2, unconfig=unconfig, **kwargs))\n\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # state_change_disable N/A\n\n # interface <interface_name>\n with configurations.submode_context(attributes.format(\n 'interface {interface_name}', force=True)):\n\n # interface <interface_name>\n # hsrp bfd\n if attributes.value('bfd'):\n configurations.append_line(\n attributes.format('hsrp bfd'))\n\n # interface <interface_name>\n # hsrp bfd\n if attributes.value('bfd_enabled'):\n configurations.append_line(\n attributes.format('hsrp bfd'))\n\n # bfd_interval N/A\n # bfd_detection_multiplier N/A\n # bfd_address N/A\n # bfd_interface_name N/A\n\n # interface <interface_name>\n # hsrp version <version>\n if attributes.value('version') == 2:\n configurations.append_line(\n attributes.format('hsrp version {version}'))\n\n if attributes.value('minimum_delay') and \\\n attributes.value('reload_delay'):\n # interface <interface_name>\n # hsrp delay minimum <minimum_delay> \\\n # reload <reload_delay>\n configurations.append_line(attributes.format(\n 'hsrp delay minimum {minimum_delay} '\n 'reload {reload_delay}'))\n elif attributes.value('minimum_delay'):\n # interface <interface_name>\n # hsrp delay minimum <delay>\n configurations.append_line(attributes.format(\n 'hsrp delay minimum {minimum_delay}'))\n\n # interface <interface_name>\n # hsrp mac-refresh <mac_refresh>\n configurations.append_line(\n attributes.format('hsrp mac-refresh {mac_refresh}'))\n\n # interface <interface_name>\n # hsrp use-bia scope interface\n if attributes.value('use_bia'):\n configurations.append_line('hsrp use-bia')\n\n # redirects_disable N/A\n\n # interface <interface_name>\n # hsrp <group_number>\n line = 'hsrp {group_number}'\n if self.address_family.name == 'ipv6':\n line += ' ipv6'\n with configurations.submode_context(attributes.format(\n line, force=True)):\n\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n if attributes.value('authentication_word'):\n # interface <interface_name>\n # hsrp <group_number>\n # authentication <authentication_word>\n configurations.append_line(attributes.format(\n 'authentication {authentication_word}'))\n elif attributes.value('authentication_text'):\n # interface <interface_name>\n # hsrp <group_number>\n # authentication text <authentication_text>\n configurations.append_line(attributes.format(\n 'authentication text {authentication_text}'))\n elif attributes.value('authentication_md5_keychain'):\n # interface <interface_name>\n # hsrp <group_number>\n # authentication md5 key-chain \\\n # <authentication_md5_keychain>\n configurations.append_line(attributes.format(\n 'authentication md5 key-chain '\n '{authentication_md5_keychain}'))\n elif attributes.value('authentication_md5_keystring'):\n # interface <interface_name>\n # hsrp <group_number>\n # authentication md5 key-string \\\n # <authentication_md5_keystring>\n configurations.append_line(attributes.format(\n 'authentication md5 key-string '\n '{authentication_md5_keystring}'))\n\n if attributes.value('authentication'):\n # interface <interface_name>\n # hsrp <group_number>\n # authentication <authentication>\n configurations.append_line(attributes.format(\n 'authentication {authentication}'))\n \n # interface <interface_name>\n # hsrp <group_number>\n # ip <ip_address>\n if self.address_family.name == 'ipv4':\n configurations.append_line(\n attributes.format('ip {ip_address}'))\n elif self.address_family.name == 'ipv6':\n configurations.append_line(\n attributes.format('ipv6 {ip_address}'))\n\n # interface <interface_name>\n # hsrp <group_number>\n # ip <primary_ipv4_address>|<secondary_ipv4_address> [secondary]\n # ip <global_ipv6_address>|<link_local_ipv6_address>\n # ip autoconfig\n if self.address_family.name == 'ipv4':\n if attributes.value('primary_ipv4_address'):\n configurations.append_line(\n attributes.format('ip '\n '{primary_ipv4_address}'))\n if attributes.value('secondary_ipv4_address'):\n configurations.append_line(\n attributes.format('ip '\n '{secondary_ipv4_address} secondary'))\n elif self.address_family.name == 'ipv6':\n if attributes.value('global_ipv6_address'):\n configurations.append_line(\n attributes.format('ip '\n '{global_ipv6_address}'))\n if attributes.value('link_local_ipv6_address'):\n configurations.append_line(\n attributes.format('ip '\n '{link_local_ipv6_address}'))\n if attributes.value('hsrp_linklocal'):\n if attributes.value('hsrp_linklocal').name ==\\\n 'auto':\n configurations.append_line('ip autoconfig')\n\n # interface <interface_name>\n # hsrp <group_number>\n if attributes.value('virtual_ip_learn'):\n configurations.append_line('ip')\n\n # interface <interface_name>\n # hsrp <group_number>\n # mac-address <mac_address>\n configurations.append_line(\n attributes.format('mac-address {mac_address}'))\n\n # interface <interface_name>\n # hsrp <group_number>\n # mac-address <virtual_mac_address>\n configurations.append_line(\n attributes.format('mac-address '\n '{virtual_mac_address}'))\n\n # interface <interface_name>\n # hsrp <group_number>\n # name <group_name>\n configurations.append_line(\n attributes.format('name {group_name}'))\n\n # interface <interface_name>\n # hsrp <group_number>\n # name <session_name>\n configurations.append_line(\n attributes.format('name {session_name}'))\n\n # interface <interface_name>\n # hsrp <group_number>\n # follow <follow>\n configurations.append_line(\n attributes.format('follow {follow}'))\n \n if attributes.value('preempt') and \\\n attributes.value('preempt_minimum_delay') and \\\n attributes.value('preempt_reload_delay') and \\\n attributes.value('preempt_sync_delay'):\n # interface <interface_name>\n # hsrp <group_number>\n # preempt delay \\\n # minimum <preempt_minimum_delay> \\\n # reload <preempt_reload_delay> \\\n # sync <preempt_sync_delay>\n configurations.append_line(attributes.format(\n 'preempt delay minimum {preempt_minimum_delay} '\n 'reload {preempt_reload_delay} '\n 'sync {preempt_sync_delay}'))\n elif attributes.value('preempt') and \\\n attributes.value('preempt_minimum_delay') and \\\n attributes.value('preempt_reload_delay'):\n # interface <interface_name>\n # hsrp <group_number>\n # preempt delay minimum <preempt_minimum_delay>\\\n # reload <preempt_reload_delay>\n configurations.append_line(attributes.format(\n 'preempt delay minimum {preempt_minimum_delay} '\n 'reload {preempt_reload_delay}'))\n elif attributes.value('preempt') and \\\n attributes.value('preempt_minimum_delay'):\n # interface <interface_name>\n # hsrp <group_number>\n # preempt delay minimum <preempt_minimum_delay>\n configurations.append_line(attributes.format(\n 'preempt delay minimum '\n '{preempt_minimum_delay}'))\n elif attributes.value('preempt') and \\\n attributes.value('preempt_reload_delay'):\n # interface <interface_name>\n # hsrp <group_number>\n # preempt delay reload <preempt_reload_delay>\n configurations.append_line(attributes.format(\n 'preempt delay reload '\n '{preempt_reload_delay}'))\n elif attributes.value('preempt') and \\\n attributes.value('preempt_sync_delay'):\n # interface <interface_name>\n # hsrp <group_number>\n # preempt delay sync <preempt_sync_delay>\n configurations.append_line(attributes.format(\n 'preempt delay sync '\n '{preempt_sync_delay}'))\n elif attributes.value('preempt'):\n # interface <interface_name>\n # hsrp <group_number>\n # preempt\n configurations.append_line('preempt')\n\n # interface <interface_name>\n # hsrp <group_number>\n # priority <priority>\n configurations.append_line(\n attributes.format('priority {priority}'))\n\n # interface <interface_name>\n # hsrp <group_number>\n # timers <hello_interval_seconds> <holdtime_seconds>\n configurations.append_line(attributes.format(\n 'timers {hello_interval_seconds} '\n '{holdtime_seconds}'))\n\n # interface <interface_name>\n # hsrp <group_number>\n # timers msec <hello_interval_msec> \\\n # msec <holdtime_msec>\n configurations.append_line(attributes.format(\n 'timers msec {hello_interval_msec} '\n 'msec {holdtime_msec}'))\n\n # interface <interface_name>\n # hsrp <group_number>\n # timers <hello_sec> <hold_sec>\n # timers msec <hello_msec> <hold_sec>\n # timers <hello_sec> msec <hold_msec>\n # timers msec <hello_msec> msec <hold_msec>\n timers_config = ['timers ']\n if (attributes.value('hello_msec_flag') is False) and\\\n (attributes.value('hold_msec_flag') is False):\n timers_config.append(\\\n attributes.format('{hello_sec} {hold_sec}'))\n elif (attributes.value('hello_msec_flag') is True) and\\\n (attributes.value('hold_msec_flag') is False):\n timers_config.append(\\\n attributes.format('msec {hello_msec} {hold_sec}'))\n elif (attributes.value('hello_msec_flag') is False) and\\\n (attributes.value('hold_msec_flag') is True):\n timers_config.append(\\\n attributes.format('{hello_sec} msec {hold_msec}'))\n elif (attributes.value('hello_msec_flag') is True) and\\\n (attributes.value('hold_msec_flag') is True):\n timers_config.append(\\\n attributes.format('msec {hello_msec} '\n 'msec {hold_msec}'))\n if timers_config[1] != '':\n configurations.append_line(''.join(timers_config)) \n\n # interface <interface_name>\n # hsrp <group_number>\n # track <track> decrement <priority_decrement>\n if attributes.value('track_object') and\\\n attributes.value('priority_decrement'):\n configurations.append_line(attributes.format(\n 'track {track_object} '\n 'decrement {priority_decrement}'))\n elif attributes.value('track_object'):\n configurations.append_line(attributes.format(\n 'track {track_object}'))\n\n # interface <interface_name>\n # hsrp <group_number>\n # track <tracked_object> decrement <priority_decrement>\n if attributes.value('tracked_object') and\\\n attributes.value(\\\n 'tracked_object_priority_decrement'):\n configurations.append_line(attributes.format(\n 'track {tracked_object} '\n 'decrement '\n '{tracked_object_priority_decrement}'))\n elif attributes.value('tracked_object'):\n configurations.append_line(attributes.format(\n 'track {tracked_object}'))\n\n # tracked_interface N/A\n # tracked_intf_priority_decrement N/A\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n", "id": "4835539", "language": "Python", "matching_score": 6.324428081512451, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/hsrp/nxos/hsrp.py" }, { "content": "'''\nIOSXR Specific Configurations for Hsrp Feature objects.\n'''\n\n# Python\nfrom abc import ABC\nimport warnings\n\n# Genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning, \\\n AttributesHelper\n\n\nclass Hsrp(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, devices=None, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: router hsrp\n configurations.append_line(attributes.format('router hsrp',\n force=True))\n\n # loop over all interfaces\n for sub, attributes2 in attributes.mapping_values('interface_attr',\n keys=self.interface_attr.keys()):\n configurations.append_block(sub.build_config(apply=False,\n attributes=attributes2, unconfig=unconfig, **kwargs))\n\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # router hsrp\n # message state disable\n if attributes.value('state_change_disable'):\n configurations.append_line(\n 'message state disable')\n\n # interface <interface_name>\n with configurations.submode_context(attributes.format(\n 'interface {interface_name}', force=True)):\n\n # interface <interface_name>\n # hsrp bfd minimim delay <bfd_min_interval>\\\n configurations.append_line(attributes.format(\n 'hsrp bfd minimum-interval {bfd_min_interval}'))\n\n # interface <interface_name>\n # hsrp bfd minimim delay <bfd_interval>\\\n configurations.append_line(attributes.format(\n 'hsrp bfd minimum-interval {bfd_interval}'))\n\n # interface <interface_name>\n # hsrp bfd multiplier <bfd_multiplier>\\\n configurations.append_line(attributes.format(\n 'hsrp bfd multiplier {bfd_multiplier}'))\n\n # interface <interface_name>\n # hsrp bfd multiplier <bfd_detection_multiplier>\\\n configurations.append_line(attributes.format(\n 'hsrp bfd multiplier {bfd_detection_multiplier}'))\n\n # interface <interface_name>\n # hsrp delay minimum <minimum_delay> \\\n # reload <reload_delay>\n configurations.append_line(attributes.format(\n 'hsrp delay minimum {minimum_delay} '\n 'reload {reload_delay}'))\n\n # interface <interface_name>\n # hsrp use-bia\n if attributes.value('use_bia'):\n configurations.append_line(attributes.format(\n 'hsrp use-bia'))\n\n # interface <interface_name>\n # hsrp redirect disable\n if attributes.value('redirect'):\n configurations.append_line(attributes.format(\n 'hsrp redirect disable'))\n\n # interface <interface_name>\n # hsrp redirect disable\n if attributes.value('redirects_disable'):\n configurations.append_line(attributes.format(\n 'hsrp redirect disable'))\n\n # interface <interface_name>\n # hsrp mac-refresh <mac_refresh>\n configurations.append_line(attributes.format(\n 'hsrp mac-refresh {mac_refresh}'))\n\n # interface <interface_name>\n # address-family <address_family>\n with configurations.submode_context(attributes.format(\n 'address-family {address_family.name}', force=True)):\n\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp version <version>\n if attributes.value('version'):\n configurations.append_line(attributes.format(\n 'hsrp version {version}'))\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <bfd_address> [<bfd_interface_name>]\n if attributes.value('bfd_address') and \\\n attributes.value('bfd_interface_name'):\n configurations.append_line(attributes.format(\n 'hsrp bfd fast-detect peer {bfd_address} '\n '{bfd_interface_name}'))\n elif attributes.value('bfd_address'):\n configurations.append_line(attributes.format(\n 'hsrp bfd fast-detect peer {bfd_address}'))\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n with configurations.submode_context(attributes.format(\n 'hsrp {group_number}', force=True)):\n\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # address <ip_address>\n configurations.append_line(attributes.format(\n 'address {ip_address}'))\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # address learn\n if attributes.value('virtual_ip_learn'):\n configurations.append_line('address learn') \n\n if attributes.value('address_family').name \\\n is 'ipv4':\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # address <primary_ipv4_address>\n configurations.append_line(attributes.format(\n 'address {primary_ipv4_address}'))\n \n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # address <secondary_ipv4_address> secondary\n configurations.append_line(attributes.format(\n 'address {secondary_ipv4_address} '\n 'secondary'))\n\n if attributes.value('address_family').name \\\n is 'ipv6':\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # address global <global_ipv6_address>\n configurations.append_line(attributes.format(\n 'address global {global_ipv6_address}'))\n \n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # address linklocal <link_local_ipv6_address>\n configurations.append_line(attributes.format(\n 'address linklocal '\n '{link_local_ipv6_address}'))\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # address linklocal autoconfig\n if attributes.value('hsrp_linklocal'):\n if attributes.value('hsrp_linklocal').name\\\n == 'auto':\n configurations.append_line(\\\n 'address linklocal autoconfig')\n \n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # authentication <authentication_word>\n configurations.append_line(attributes.format(\n 'authentication {authentication_word}'))\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # authentication <authentication>\n configurations.append_line(attributes.format(\n 'authentication {authentication}'))\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # bfd fast-detect\n if attributes.value('bfd_fast_detect'):\n configurations.append_line('bfd fast-detect')\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # bfd fast-detect\n if attributes.value('bfd_enabled'):\n configurations.append_line('bfd fast-detect')\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # mac-address <mac_address>\n configurations.append_line(attributes.format(\n 'mac-address {mac_address}'))\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # mac-address <virtual_mac_address>\n configurations.append_line(attributes.format(\n 'mac-address {virtual_mac_address}'))\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # name <group_name>\n configurations.append_line(attributes.format(\n 'name {group_name}'))\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # name <session_name>\n configurations.append_line(attributes.format(\n 'name {session_name}'))\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # slave follow <follow>\n configurations.append_line(attributes.format(\n 'slave follow {follow}'))\n\n if attributes.value('preempt') and \\\n attributes.value('preempt_minimum_delay'):\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # preempt delay <preempt_minimum_delay>\n configurations.append_line(attributes.format(\n 'preempt delay {preempt_minimum_delay}'))\n elif attributes.value('preempt'):\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # preempt\n configurations.append_line(\n attributes.format('preempt'))\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # priority <priority>\n configurations.append_line(attributes.format(\n 'priority {priority}'))\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # timers <hello_interval_seconds>\\\n # <holdtime_seconds>\n configurations.append_line(attributes.format(\n 'timers {hello_interval_seconds}'\n ' {holdtime_seconds}'))\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # timers msec <hello_interval_msec> \\\n # msec <holdtime_msec>\n configurations.append_line(attributes.format(\n 'timers msec {hello_interval_msec} '\n 'msec {holdtime_msec}'))\n\n timers_config = ['timers ']\n if (attributes.value('hello_msec_flag') is False)\\\n and (attributes.value('hold_msec_flag') is False):\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # timers <hello_sec>\\\n # <hold_sec>\n timers_config.append(\\\n attributes.format('{hello_sec} {hold_sec}'))\n elif (attributes.value('hello_msec_flag') is True)\\\n and (attributes.value('hold_msec_flag') is False):\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # timers msec <hello_msec>\\\n # <hold_sec>\n timers_config.append(attributes.format(\\\n 'msec {hello_msec} {hold_sec}'))\n elif (attributes.value('hello_msec_flag') is False)\\\n and (attributes.value('hold_msec_flag') is True):\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # timers <hello_sec>\\\n # msec <hold_msec>\n timers_config.append(attributes.format(\\\n '{hello_sec} msec {hold_msec}'))\n elif (attributes.value('hello_msec_flag') is True)\\\n and (attributes.value('hold_msec_flag') is True):\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # timers msec <hello_msec>\\\n # msec <hold_msec>\n timers_config.append(attributes.format(\\\n 'msec {hello_msec} msec {hold_msec}'))\n if timers_config[1] != '':\n configurations.append_line(\\\n ''.join(timers_config))\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # track object <track_object> \\\n # [<priority_decrement>]\n if attributes.value('track_object') and\\\n attributes.value('priority_decrement'):\n configurations.append_line(attributes.format(\n 'track object {track_object}'\n ' {priority_decrement}'))\n elif attributes.value('track_object'):\n configurations.append_line(attributes.format(\n 'track object {track_object}'))\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # track object <tracked_object> \\\n # [<tracked_object_priority_decrement>]\n if attributes.value('tracked_object') and\\\n attributes.value(\\\n 'tracked_object_priority_decrement'):\n configurations.append_line(attributes.format(\n 'track object {tracked_object}'\n ' {tracked_object_priority_decrement}'))\n elif attributes.value('tracked_object'):\n configurations.append_line(attributes.format(\n 'track object {tracked_object}'))\n\n # interface <interface_name>\n # address-family <address_family>\n # hsrp <group_number>\n # track <tracked_interface> \\\n # [<tracked_intf_priority_decrement>]\n if attributes.value('tracked_interface') and\\\n attributes.value(\\\n 'tracked_intf_priority_decrement'):\n configurations.append_line(attributes.format(\n 'track {tracked_interface}'\n ' {tracked_intf_priority_decrement}'))\n elif attributes.value('tracked_interface'):\n configurations.append_line(attributes.format(\n 'track {tracked_interface}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n", "id": "9935660", "language": "Python", "matching_score": 2.3849475383758545, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/hsrp/iosxr/hsrp.py" }, { "content": "#!/usr/bin/env python\n\n'''\nIOSXE unit tests for Genie Standby conf using CLI.\n'''\n\n# Python\nimport re\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie\nfrom genie.conf import Genie\nfrom genie.conf.tests import TestCase\nfrom genie.conf.base import Testbed, Device\nfrom genie.libs.conf.hsrp.hsrp import Hsrp\nfrom genie.libs.conf.interface import Interface\n\n\nclass test_hsrp_old(TestCase):\n\n def setUp(self):\n # Set Genie Tb\n testbed = Testbed()\n Genie.testbed = testbed\n # Device\n self.dev1 = Device(name='PE1', testbed=testbed, os='iosxe')\n # Interface\n self.intf1 = Interface(name='GigabitEthernet1/0/1', device=self.dev1)\n self.intf1.shutdown = False\n self.intf1.switchport = False\n # Hsrp object\n self.hsrp1 = Hsrp()\n # Build config\n cfgs = self.intf1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' no shutdown',\n ' no switchport',\n ' exit',\n ]))\n\n def test_cli_config1(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.version = 2\n key.minimum_delay = 5\n key.reload_delay = 10\n key.group_number = 1\n key.ip_address = '192.168.1.254'\n key.priority = 110\n key.preempt = True\n key.preempt_minimum_delay = 5\n key.preempt_reload_delay = 10\n key.preempt_sync_delay = 20\n key.hello_interval_seconds = 1\n key.holdtime_seconds = 3\n key.track_object = 1\n key.priority_decrement = 20\n key.authentication_word = '<PASSWORD>'\n key.bfd = True\n key.mac_refresh = 11\n key.follow = 'test'\n\n # Build config\n cfgs = self.hsrp1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' standby version 2',\n ' standby bfd',\n ' standby delay minimum 5 reload 10',\n ' standby mac-refresh 11',\n ' standby 1 authentication cisco123',\n ' standby 1 follow test',\n ' standby 1 ip 192.168.1.254',\n ' standby 1 preempt delay minimum 5 reload 10 sync 20',\n ' standby 1 priority 110',\n ' standby 1 timers 1 3',\n ' standby 1 track 1 decrement 20',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = self.hsrp1.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' no standby version 2',\n ' no standby bfd',\n ' no standby delay minimum 5 reload 10',\n ' no standby mac-refresh 11',\n ' no standby 1 authentication cisco123',\n ' no standby 1 follow test',\n ' no standby 1 ip 192.168.1.254',\n ' no standby 1 preempt delay minimum 5 reload 10 sync 20',\n ' no standby 1 priority 110',\n ' no standby 1 timers 1 3',\n ' no standby 1 track 1 decrement 20',\n ' exit',\n ]))\n\n def test_cli_config2(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.version = 2\n key.minimum_delay = 5\n key.reload_delay = 10\n key.group_number = 1\n key.ip_address = '192.168.1.254'\n key.priority = 110\n key.preempt = True\n key.preempt_minimum_delay = 5\n key.preempt_reload_delay = 10\n key.hello_interval_seconds = 1\n key.holdtime_seconds = 3\n key.track_object = 1\n key.priority_decrement = 20\n key.authentication_text = '<PASSWORD>'\n\n # Build config\n cfgs = self.hsrp1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' standby version 2',\n ' standby delay minimum 5 reload 10',\n ' standby 1 authentication text cisco123',\n ' standby 1 ip 192.168.1.254',\n ' standby 1 preempt delay minimum 5 reload 10',\n ' standby 1 priority 110',\n ' standby 1 timers 1 3',\n ' standby 1 track 1 decrement 20',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = self.hsrp1.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' no standby version 2',\n ' no standby delay minimum 5 reload 10',\n ' no standby 1 authentication text cisco123',\n ' no standby 1 ip 192.168.1.254',\n ' no standby 1 preempt delay minimum 5 reload 10',\n ' no standby 1 priority 110',\n ' no standby 1 timers 1 3',\n ' no standby 1 track 1 decrement 20',\n ' exit',\n ]))\n\n def test_cli_config3(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.version = 2\n key.minimum_delay = 5\n key.reload_delay = 10\n key.group_number = 1\n key.ip_address = '192.168.1.254'\n key.priority = 110\n key.preempt = True\n key.preempt_minimum_delay = 5\n key.hello_interval_seconds = 1\n key.holdtime_seconds = 3\n key.track_object = 1\n key.priority_decrement = 20\n key.authentication_md5_keychain = 'abc'\n\n # Build config\n cfgs = self.hsrp1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' standby version 2',\n ' standby delay minimum 5 reload 10',\n ' standby 1 authentication md5 key-chain abc',\n ' standby 1 ip 192.168.1.254',\n ' standby 1 preempt delay minimum 5',\n ' standby 1 priority 110',\n ' standby 1 timers 1 3',\n ' standby 1 track 1 decrement 20',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = self.hsrp1.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' no standby version 2',\n ' no standby delay minimum 5 reload 10',\n ' no standby 1 authentication md5 key-chain abc',\n ' no standby 1 ip 192.168.1.254',\n ' no standby 1 preempt delay minimum 5',\n ' no standby 1 priority 110',\n ' no standby 1 timers 1 3',\n ' no standby 1 track 1 decrement 20',\n ' exit',\n ]))\n\n def test_cli_config4(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.version = 2\n key.minimum_delay = 5\n key.reload_delay = 10\n key.group_number = 1\n key.ip_address = '192.168.1.254'\n key.priority = 110\n key.preempt = True\n key.preempt_minimum_delay = 5\n key.preempt_reload_delay = 10\n key.hello_interval_seconds = 1\n key.holdtime_seconds = 3\n key.track_object = 1\n key.priority_decrement = 20\n key.authentication_md5_keystring = 'xyz'\n\n # Build config\n cfgs = self.hsrp1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' standby version 2',\n ' standby delay minimum 5 reload 10',\n ' standby 1 authentication md5 key-string xyz',\n ' standby 1 ip 192.168.1.254',\n ' standby 1 preempt delay minimum 5 reload 10',\n ' standby 1 priority 110',\n ' standby 1 timers 1 3',\n ' standby 1 track 1 decrement 20',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = self.hsrp1.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' no standby version 2',\n ' no standby delay minimum 5 reload 10',\n ' no standby 1 authentication md5 key-string xyz',\n ' no standby 1 ip 192.168.1.254',\n ' no standby 1 preempt delay minimum 5 reload 10',\n ' no standby 1 priority 110',\n ' no standby 1 timers 1 3',\n ' no standby 1 track 1 decrement 20',\n ' exit',\n ]))\n\n def test_cli_config5(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.version = 2\n key.minimum_delay = 5\n key.group_number = 15\n key.priority = 110\n key.preempt = True\n key.bfd = True\n key.use_bia = True\n key.hello_interval_msec = 55\n key.holdtime_msec = 100\n key.track_object = 1\n key.track_shutdown = True\n key.group_name = 'gandalf'\n key.mac_address = 'dead.beef.dead'\n key.redirect = True\n\n # Build config\n cfgs = self.hsrp1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' standby version 2',\n ' standby bfd',\n ' standby delay minimum 5',\n ' standby use-bia',\n ' standby redirect',\n ' standby 15 mac-address dead.beef.dead',\n ' standby 15 name gandalf',\n ' standby 15 preempt',\n ' standby 15 priority 110',\n ' standby 15 timers msec 55 msec 100',\n ' standby 15 track 1 shutdown',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = self.hsrp1.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' no standby version 2',\n ' no standby bfd',\n ' no standby delay minimum 5',\n ' no standby use-bia',\n ' no standby redirect',\n ' no standby 15 mac-address dead.beef.dead',\n ' no standby 15 name gandalf',\n ' no standby 15 preempt',\n ' no standby 15 priority 110',\n ' no standby 15 timers msec 55 msec 100',\n ' no standby 15 track 1 shutdown',\n ' exit',\n ]))\n\n def test_cli_config6(self):\n # Hsrp object\n self.hsrp1 = Hsrp()\n\n # Apply configuration\n key1 = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key1.version = 2\n key1.group_number = 10\n key1.priority = 110\n key1.preempt = True\n key1.preempt_reload_delay = 30\n \n # Hsrp object\n self.hsrp2 = Hsrp()\n\n # Apply configuration\n key2 = self.hsrp2.device_attr[self.dev1].interface_attr[self.intf1]\n key2.group_number = 20\n key2.priority = 120\n key2.preempt = True\n key2.preempt_sync_delay = 60\n\n # Build config\n cfgs1 = self.hsrp1.build_config(apply=False)\n cfgs2 = self.hsrp2.build_config(apply=False)\n\n cfgs = str(cfgs1[self.dev1.name]) + '\\n' + str(cfgs2[self.dev1.name])\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(cfgs,\n '\\n'.join([\n 'interface GigabitEthernet1/0/1\\n'\n ' standby version 2\\n'\n ' standby 10 preempt delay reload 30\\n'\n ' standby 10 priority 110\\n'\n ' exit\\n'\n 'interface GigabitEthernet1/0/1\\n'\n ' standby 20 preempt delay sync 60\\n'\n ' standby 20 priority 120\\n'\n ' exit'\n ]))\n\nclass test_hsrp(TestCase):\n\n def setUp(self):\n # Set Genie Tb\n testbed = Testbed()\n Genie.testbed = testbed\n # Device\n self.dev1 = Device(name='PE1', testbed=testbed, os='iosxe')\n # Interface\n self.intf1 = Interface(name='GigabitEthernet1/0/1', device=self.dev1)\n self.intf1.enabled = False\n self.intf1.switchport = False\n # Hsrp object\n self.hsrp1 = Hsrp()\n # Build config\n cfgs = self.intf1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' shutdown',\n ' no switchport',\n ' exit',\n ]))\n\n def test_cli_config1(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.version = 2\n key.minimum_delay = 5\n key.reload_delay = 10\n key.group_number = 1\n key.primary_ipv4_address = '192.168.1.254'\n key.priority = 110\n key.preempt = True\n key.hello_sec = 1\n key.hold_sec = 3\n key.tracked_object = 1\n key.tracked_object_priority_decrement = 20\n key.authentication = 'cisco123'\n key.bfd_enabled = True\n key.mac_refresh = 11\n key.follow = 'test'\n\n # Build config\n cfgs = self.hsrp1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' standby version 2',\n ' standby bfd',\n ' standby delay minimum 5 reload 10',\n ' standby mac-refresh 11',\n ' standby 1 authentication cisco123',\n ' standby 1 follow test',\n ' standby 1 ip 192.168.1.254',\n ' standby 1 preempt',\n ' standby 1 priority 110',\n ' standby 1 timers 1 3',\n ' standby 1 track 1 decrement 20',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = self.hsrp1.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' no standby version 2',\n ' no standby bfd',\n ' no standby delay minimum 5 reload 10',\n ' no standby mac-refresh 11',\n ' no standby 1 authentication cisco123',\n ' no standby 1 follow test',\n ' no standby 1 ip 192.168.1.254',\n ' no standby 1 preempt',\n ' no standby 1 priority 110',\n ' no standby 1 timers 1 3',\n ' no standby 1 track 1 decrement 20',\n ' exit',\n ]))\n\n def test_cli_config2(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.version = 2\n key.minimum_delay = 5\n key.reload_delay = 10\n key.group_number = 1\n key.primary_ipv4_address = '192.168.1.254'\n key.secondary_ipv4_address = '192.168.1.253'\n key.priority = 110\n key.preempt = True\n key.hello_sec = 1\n key.hold_sec = 3\n key.tracked_object = 1\n key.tracked_object_priority_decrement = 20\n key.authentication = 'cisco123'\n\n # Build config\n cfgs = self.hsrp1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' standby version 2',\n ' standby delay minimum 5 reload 10',\n ' standby 1 authentication cisco123',\n ' standby 1 ip 192.168.1.254',\n ' standby 1 ip 192.168.1.253 secondary',\n ' standby 1 preempt',\n ' standby 1 priority 110',\n ' standby 1 timers 1 3',\n ' standby 1 track 1 decrement 20',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = self.hsrp1.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' no standby version 2',\n ' no standby delay minimum 5 reload 10',\n ' no standby 1 authentication cisco123',\n ' no standby 1 ip 192.168.1.254',\n ' no standby 1 ip 192.168.1.253 secondary',\n ' no standby 1 preempt',\n ' no standby 1 priority 110',\n ' no standby 1 timers 1 3',\n ' no standby 1 track 1 decrement 20',\n ' exit',\n ]))\n\n def test_cli_config3(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.version = 2\n key.minimum_delay = 5\n key.reload_delay = 10\n key.group_number = 1\n key.primary_ipv4_address = '192.168.1.254'\n key.priority = 110\n key.preempt = True\n key.hello_sec = 1\n key.hold_sec = 3\n key.tracked_object = 1\n key.tracked_object_priority_decrement = 20\n key.authentication = 'abc'\n\n # Build config\n cfgs = self.hsrp1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' standby version 2',\n ' standby delay minimum 5 reload 10',\n ' standby 1 authentication abc',\n ' standby 1 ip 192.168.1.254',\n ' standby 1 preempt',\n ' standby 1 priority 110',\n ' standby 1 timers 1 3',\n ' standby 1 track 1 decrement 20',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = self.hsrp1.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' no standby version 2',\n ' no standby delay minimum 5 reload 10',\n ' no standby 1 authentication abc',\n ' no standby 1 ip 192.168.1.254',\n ' no standby 1 preempt',\n ' no standby 1 priority 110',\n ' no standby 1 timers 1 3',\n ' no standby 1 track 1 decrement 20',\n ' exit',\n ]))\n\n def test_cli_config4(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.version = 2\n key.minimum_delay = 5\n key.reload_delay = 10\n key.group_number = 1\n key.primary_ipv4_address = '192.168.1.254'\n key.virtual_ip_learn = True\n key.priority = 110\n key.preempt = True\n key.hello_sec = 1\n key.hold_sec = 3\n key.tracked_object = 1\n key.tracked_object_priority_decrement = 20\n key.authentication = 'xyz'\n\n # Build config\n cfgs = self.hsrp1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' standby version 2',\n ' standby delay minimum 5 reload 10',\n ' standby 1 authentication xyz',\n ' standby 1 ip 192.168.1.254',\n ' standby 1 ip',\n ' standby 1 preempt',\n ' standby 1 priority 110',\n ' standby 1 timers 1 3',\n ' standby 1 track 1 decrement 20',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = self.hsrp1.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' no standby version 2',\n ' no standby delay minimum 5 reload 10',\n ' no standby 1 authentication xyz',\n ' no standby 1 ip 192.168.1.254',\n ' no standby 1 ip',\n ' no standby 1 preempt',\n ' no standby 1 priority 110',\n ' no standby 1 timers 1 3',\n ' no standby 1 track 1 decrement 20',\n ' exit',\n ]))\n\n def test_cli_config5(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.version = 2\n key.address_family = 'ipv6'\n key.global_ipv6_address = '2001:db8::1/24'\n key.link_local_ipv6_address = 'fe80::1'\n key.hsrp_linklocal = 'auto'\n key.minimum_delay = 5\n key.group_number = 15\n key.priority = 110\n key.preempt = True\n key.bfd_enabled = True\n key.use_bia = True\n key.hello_msec_flag = True\n key.hello_msec = 55\n key.hold_msec_flag = True\n key.hold_msec = 100\n key.tracked_object = 1\n key.session_name = 'gandalf'\n key.virtual_mac_address = 'dead.beef.dead'\n key.redirects_disable = False\n\n # Build config\n cfgs = self.hsrp1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' standby version 2',\n ' standby bfd',\n ' standby delay minimum 5',\n ' standby use-bia',\n ' standby redirect',\n ' standby 15 ipv6 2001:db8::1/24',\n ' standby 15 ipv6 fe80::1',\n ' standby 15 ipv6 autoconfig',\n ' standby 15 mac-address dead.beef.dead',\n ' standby 15 name gandalf',\n ' standby 15 preempt',\n ' standby 15 priority 110',\n ' standby 15 timers msec 55 msec 100',\n ' standby 15 track 1',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = self.hsrp1.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' no standby version 2',\n ' no standby bfd',\n ' no standby delay minimum 5',\n ' no standby use-bia',\n ' no standby redirect',\n ' no standby 15 ipv6 2001:db8::1/24',\n ' no standby 15 ipv6 fe80::1',\n ' no standby 15 ipv6 autoconfig',\n ' no standby 15 mac-address dead.beef.dead',\n ' no standby 15 name gandalf',\n ' no standby 15 preempt',\n ' no standby 15 priority 110',\n ' no standby 15 timers msec 55 msec 100',\n ' no standby 15 track 1',\n ' exit',\n ]))\n\n def test_cli_config6(self):\n # Hsrp object\n self.hsrp1 = Hsrp()\n\n # Apply configuration\n key1 = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key1.version = 2\n key1.group_number = 10\n key1.priority = 110\n key1.preempt = True\n \n # Hsrp object\n self.hsrp2 = Hsrp()\n\n # Apply configuration\n key2 = self.hsrp2.device_attr[self.dev1].interface_attr[self.intf1]\n key2.group_number = 20\n key2.priority = 120\n key2.preempt = True\n\n # Build config\n cfgs1 = self.hsrp1.build_config(apply=False)\n cfgs2 = self.hsrp2.build_config(apply=False)\n\n cfgs = str(cfgs1[self.dev1.name]) + '\\n' + str(cfgs2[self.dev1.name])\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(cfgs,\n '\\n'.join([\n 'interface GigabitEthernet1/0/1\\n'\n ' standby version 2\\n'\n ' standby 10 preempt\\n'\n ' standby 10 priority 110\\n'\n ' exit\\n'\n 'interface GigabitEthernet1/0/1\\n'\n ' standby 20 preempt\\n'\n ' standby 20 priority 120\\n'\n ' exit'\n ]))\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "10010397", "language": "Python", "matching_score": 6.202490329742432, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/hsrp/iosxe/tests/test_hsrp.py" }, { "content": "#!/usr/bin/env python\n\n'''\nIOSXR unit tests for Genie Hsrp conf.\n'''\n\n# Python\nimport re\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie\nfrom genie.conf import Genie\nfrom genie.conf.tests import TestCase\nfrom genie.conf.base import Testbed, Device\nfrom genie.libs.conf.hsrp.hsrp import Hsrp\nfrom genie.libs.conf.interface import Interface\n\n\nclass test_hsrp_old(TestCase):\n\n def setUp(self):\n # Set Genie Tb\n testbed = Testbed()\n Genie.testbed = testbed\n # Device\n self.dev1 = Device(name='PE1', testbed=testbed, os='iosxr')\n # Interface\n self.intf1 = Interface(name='GigabitEthernet0/0/0/1', device=self.dev1)\n self.intf1.shutdown = False\n # Hsrp object\n self.hsrp1 = Hsrp()\n # Build config\n cfgs = self.intf1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs),\n '\\n'.join([\n 'interface GigabitEthernet0/0/0/1',\n ' no shutdown',\n ' exit',\n ]))\n\n def test_cli_config1(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.bfd_min_interval = 30\n key.bfd_multiplier = 50\n key.minimum_delay = 5\n key.reload_delay = 10\n key.mac_refresh = 20\n key.use_bia = True\n key.redirect = True\n key.address_family = 'ipv4'\n key.version = 2\n key.group_number = 30\n key.ip_address = '192.168.1.254'\n key.authentication_word = '<PASSWORD>'\n key.bfd_fast_detect = True\n key.mac_address = 'dead.beef.dead'\n key.group_name = 'gandalf'\n key.preempt = True\n key.preempt_minimum_delay = 5\n key.priority = 110\n key.hello_interval_seconds = 1\n key.holdtime_seconds = 3\n key.track_object = 1\n key.priority_decrement = 20\n\n # Build config\n cfgs = self.hsrp1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'router hsrp',\n 'interface GigabitEthernet0/0/0/1',\n ' hsrp bfd minimum-interval 30',\n ' hsrp bfd multiplier 50',\n ' hsrp delay minimum 5 reload 10',\n ' hsrp use-bia',\n ' hsrp redirect disable',\n ' hsrp mac-refresh 20',\n ' address-family ipv4',\n ' hsrp version 2',\n ' hsrp 30',\n ' address 192.168.1.254',\n ' authentication cisco123',\n ' bfd fast-detect',\n ' mac-address dead.beef.dead',\n ' name gandalf',\n ' preempt delay 5',\n ' priority 110',\n ' timers 1 3',\n ' track object 1 20',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = self.hsrp1.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'no router hsrp',\n 'interface GigabitEthernet0/0/0/1',\n ' no hsrp bfd minimum-interval 30',\n ' no hsrp bfd multiplier 50',\n ' no hsrp delay minimum 5 reload 10',\n ' no hsrp use-bia',\n ' no hsrp redirect disable',\n ' no hsrp mac-refresh 20',\n ' no address-family ipv4',\n ' exit',\n ]))\n\n def test_cli_config2(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.address_family = 'ipv6'\n key.group_number = 5\n key.priority = 110\n key.preempt = True\n key.hello_interval_msec = 300\n key.holdtime_msec = 500\n\n # Build config\n cfgs = self.hsrp1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'router hsrp',\n 'interface GigabitEthernet0/0/0/1',\n ' address-family ipv6',\n ' hsrp 5',\n ' preempt',\n ' priority 110',\n ' timers msec 300 msec 500',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = self.hsrp1.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'no router hsrp',\n 'interface GigabitEthernet0/0/0/1',\n ' no address-family ipv6',\n ' exit',\n ]))\n\n def test_cli_config_args(self):\n # create Hsrp conf by taking args\n hsrp1 = Hsrp(group_number=5, address_family = 'ipv6')\n # Apply configuration\n key = hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.priority = 110\n key.preempt = True\n key.hello_interval_msec = 300\n key.holdtime_msec = 500\n\n # Build config\n cfgs = hsrp1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'router hsrp',\n 'interface GigabitEthernet0/0/0/1',\n ' address-family ipv6',\n ' hsrp 5',\n ' preempt',\n ' priority 110',\n ' timers msec 300 msec 500',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = hsrp1.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'no router hsrp',\n 'interface GigabitEthernet0/0/0/1',\n ' no address-family ipv6',\n ' exit',\n ]))\n\n\n # create Ipv4 Hsrp conf by taking args\n hsrp2 = Hsrp(group_number=5)\n # Apply configuration\n key = hsrp2.device_attr[self.dev1].interface_attr[self.intf1]\n key.priority = 110\n key.preempt = True\n key.hello_interval_msec = 300\n key.holdtime_msec = 500\n\n # Build config\n cfgs = hsrp2.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'router hsrp',\n 'interface GigabitEthernet0/0/0/1',\n ' address-family ipv4',\n ' hsrp 5',\n ' preempt',\n ' priority 110',\n ' timers msec 300 msec 500',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = hsrp2.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'no router hsrp',\n 'interface GigabitEthernet0/0/0/1',\n ' no address-family ipv4',\n ' exit',\n ]))\n\nclass test_hsrp(TestCase):\n\n def setUp(self):\n # Set Genie Tb\n testbed = Testbed()\n Genie.testbed = testbed\n # Device\n self.dev1 = Device(name='PE1', testbed=testbed, os='iosxr')\n # Interface\n self.intf1 = Interface(name='GigabitEthernet0/0/0/1', device=self.dev1)\n self.intf1.shutdown = False\n # Hsrp object\n self.hsrp1 = Hsrp()\n # Build config\n cfgs = self.intf1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs),\n '\\n'.join([\n 'interface GigabitEthernet0/0/0/1',\n ' no shutdown',\n ' exit',\n ]))\n\n def test_cli_config1(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.bfd_interval = 30\n key.bfd_detection_multiplier = 50\n key.bfd_address = '192.168.1.2'\n key.bfd_interface_name = 'GigabitEthernet0/0/0/1'\n key.minimum_delay = 5\n key.reload_delay = 10\n key.mac_refresh = 20\n key.use_bia = True\n key.redirects_disable = True\n key.address_family = 'ipv4'\n key.version = 2\n key.group_number = 30\n key.primary_ipv4_address = '192.168.1.254'\n key.secondary_ipv4_address = '192.168.1.253'\n key.authentication = 'cisco123'\n key.bfd_enabled = True\n key.virtual_mac_address = 'dead.beef.dead'\n key.session_name = 'gandalf'\n key.preempt = True\n key.priority = 110\n key.hello_sec = 1\n key.hold_sec = 3\n key.tracked_object = 1\n key.tracked_object_priority_decrement = 20\n key.state_change_disable = True\n\n # Build config\n cfgs = self.hsrp1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'router hsrp',\n 'message state disable',\n 'interface GigabitEthernet0/0/0/1',\n ' hsrp bfd minimum-interval 30',\n ' hsrp bfd multiplier 50',\n ' hsrp delay minimum 5 reload 10',\n ' hsrp use-bia',\n ' hsrp redirect disable',\n ' hsrp mac-refresh 20',\n ' address-family ipv4',\n ' hsrp version 2',\n ' hsrp bfd fast-detect peer 192.168.1.2 GigabitEthernet0/0/0/1',\n ' hsrp 30',\n ' address 192.168.1.254',\n ' address 192.168.1.253 secondary',\n ' authentication cisco123',\n ' bfd fast-detect',\n ' mac-address dead.beef.dead',\n ' name gandalf',\n ' preempt',\n ' priority 110',\n ' timers 1 3',\n ' track object 1 20',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = self.hsrp1.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'no router hsrp',\n 'no message state disable',\n 'interface GigabitEthernet0/0/0/1',\n ' no hsrp bfd minimum-interval 30',\n ' no hsrp bfd multiplier 50',\n ' no hsrp delay minimum 5 reload 10',\n ' no hsrp use-bia',\n ' no hsrp redirect disable',\n ' no hsrp mac-refresh 20',\n ' no address-family ipv4',\n ' exit',\n ]))\n\n def test_cli_config2(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.address_family = 'ipv6'\n key.global_ipv6_address = '2001:db8:1:1::254/64'\n key.link_local_ipv6_address = 'fe80::205:73ff:fea0:19'\n key.group_number = 5\n key.priority = 110\n key.preempt = True\n key.hello_msec_flag = True\n key.hello_msec = 300\n key.hold_msec_flag = True\n key.hold_msec = 500\n key.tracked_interface = 'GigabitEthernet0/0/0/0'\n key.tracked_intf_priority_decrement = 20\n\n # Build config\n cfgs = self.hsrp1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'router hsrp',\n 'interface GigabitEthernet0/0/0/1',\n ' address-family ipv6',\n ' hsrp 5',\n ' address global 2001:db8:1:1::254/64',\n ' address linklocal fe80::205:73ff:fea0:19',\n ' preempt',\n ' priority 110',\n ' timers msec 300 msec 500',\n ' track GigabitEthernet0/0/0/0 20',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = self.hsrp1.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'no router hsrp',\n 'interface GigabitEthernet0/0/0/1',\n ' no address-family ipv6',\n ' exit',\n ]))\n\n def test_cli_config_args(self):\n # create Hsrp conf by taking args\n hsrp1 = Hsrp(group_number=5, address_family = 'ipv6')\n # Apply configuration\n key = hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.hsrp_linklocal = 'auto'\n key.priority = 110\n key.preempt = True\n key.virtual_ip_learn = True\n key.hello_msec_flag = True\n key.hello_msec = 300\n key.hold_msec_flag = True\n key.hold_msec = 500\n\n # Build config\n cfgs = hsrp1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'router hsrp',\n 'interface GigabitEthernet0/0/0/1',\n ' address-family ipv6',\n ' hsrp 5',\n ' address learn',\n ' address linklocal autoconfig',\n ' preempt',\n ' priority 110',\n ' timers msec 300 msec 500',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = hsrp1.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'no router hsrp',\n 'interface GigabitEthernet0/0/0/1',\n ' no address-family ipv6',\n ' exit',\n ]))\n\n\n # create Ipv4 Hsrp conf by taking args\n hsrp2 = Hsrp(group_number=5)\n # Apply configuration\n key = hsrp2.device_attr[self.dev1].interface_attr[self.intf1]\n key.priority = 110\n key.preempt = True\n key.hello_msec_flag = True\n key.hello_msec = 400\n key.hold_msec_flag = True\n key.hold_msec = 500\n key.follow = 'group10'\n\n # Build config\n cfgs = hsrp2.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'router hsrp',\n 'interface GigabitEthernet0/0/0/1',\n ' address-family ipv4',\n ' hsrp 5',\n ' slave follow group10',\n ' preempt',\n ' priority 110',\n ' timers msec 400 msec 500',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = hsrp2.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'no router hsrp',\n 'interface GigabitEthernet0/0/0/1',\n ' no address-family ipv4',\n ' exit',\n ]))\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "929641", "language": "Python", "matching_score": 4.929973125457764, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/hsrp/iosxr/tests/test_hsrp.py" }, { "content": "#!/usr/bin/env python\n\n'''\nIOSXE unit tests for Genie Standby conf using YANG.\n'''\n\n# Python\nimport re\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie\nfrom genie.conf import Genie\nfrom genie.conf.tests import TestCase\nfrom genie.conf.base import Testbed, Device\nfrom genie.libs.conf.hsrp.hsrp import Hsrp\nfrom genie.libs.conf.interface import Interface\n\n# YDK\nfrom ydk.models.cisco_iosxe_native import Cisco_IOS_XE_native as ned\nfrom ydk.types import DELETE, Empty\nfrom ydk.services import CRUDService\nfrom ydk.services import CodecService\nfrom ydk.providers import CodecServiceProvider\n\n# Patch a netconf provider\nfrom ydk.providers import NetconfServiceProvider as _NetconfServiceProvider\nfrom ydk.providers._provider_plugin import _ClientSPPlugin\n\nclass NetconfConnectionInfo(object):\n def __init__(self):\n self.ip = '1.1.1.1'\n self.port = 830\n self.username = 'admin'\n self.password = '<PASSWORD>'\n\nclass test_hsrp(TestCase):\n\n def setUp(self):\n\n # Set Genie Tb\n self.testbed = Testbed()\n Genie.testbed = self.testbed\n \n # Device\n self.dev1 = Device(name='PE1', testbed=self.testbed,\n os='iosxe', context='yang')\n \n # Interface\n self.intf1 = Interface(name='GigabitEthernet1/0/1', device=self.dev1)\n self.intf1.shutdown = False\n self.intf1.switchport = False\n \n # Hsrp object\n self.hsrp1 = Hsrp()\n \n # Build config\n cfgs = self.intf1.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs),\n '\\n'.join([\n 'interface GigabitEthernet1/0/1',\n ' no shutdown',\n ' no switchport',\n ' exit',\n ]))\n\n def test_yang_config1(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.version = 2\n key.minimum_delay = 5\n key.reload_delay = 10\n key.group_number = 25\n key.ip_address = '192.168.1.254'\n key.priority = 110\n key.preempt = True\n key.preempt_minimum_delay = 5\n key.preempt_reload_delay = 10\n key.preempt_sync_delay = 20\n key.hello_interval_seconds = 1\n key.holdtime_seconds = 3\n key.track_object = 1\n key.priority_decrement = 20\n key.authentication_word = '<PASSWORD>'\n key.bfd = True\n key.mac_refresh = 11\n key.follow = 'test'\n\n for dev in self.testbed.devices:\n dev.connections=Mock()\n dev.connections={'netconf':NetconfConnectionInfo()}\n\n # Build config\n build_cfgs = self.hsrp1.build_config(apply=False)\n\n compare1 = \"\"\n for i in build_cfgs['PE1']:\n compare1+=str(i)\n\n self.assertMultiLineEqual(compare1, '\\n'.join([\n '<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\\n'\n ' <target>\\n'\n ' <running></running>\\n'\n ' </target>\\n'\n ' <config>\\n'\n ' <GigabitEthernet xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\\n'\n ' <name></name>\\n'\n ' <standby>\\n'\n ' <bfd></bfd>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' <reload>10</reload>\\n'\n ' </delay>\\n'\n ' <mac-refresh>11</mac-refresh>\\n'\n ' <standby-list>\\n'\n ' <group-number>25</group-number>\\n'\n ' <authentication>\\n'\n ' <word>cisco123</word>\\n'\n ' </authentication>\\n'\n ' <follow>test</follow>\\n'\n ' <ip>\\n'\n ' <address>192.168.1.254</address>\\n'\n ' </ip>\\n'\n ' <preempt>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' <reload>10</reload>\\n'\n ' <sync>20</sync>\\n'\n ' </delay>\\n'\n ' </preempt>\\n'\n ' <priority>110</priority>\\n'\n ' <timers>\\n'\n ' <hello-interval>\\n'\n ' <seconds>1</seconds>\\n'\n ' </hello-interval>\\n'\n ' <hold-time>\\n'\n ' <seconds>3</seconds>\\n'\n ' </hold-time>\\n'\n ' </timers>\\n'\n ' <track>\\n'\n ' <number>1</number>\\n'\n ' <decrement>20</decrement>\\n'\n ' </track>\\n'\n ' </standby-list>\\n'\n ' <version>2</version>\\n'\n ' </standby>\\n'\n ' </GigabitEthernet>\\n'\n ' </config>\\n'\n '</edit-config>\\n'\n ]))\n\n # Build config\n build_uncfgs = self.hsrp1.build_unconfig(apply=False)\n\n compare2 = \"\"\n for i in build_uncfgs['PE1']:\n compare2+=str(i)\n\n self.assertMultiLineEqual(compare2, '\\n'.join([\n '<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\\n'\n ' <target>\\n'\n ' <running></running>\\n'\n ' </target>\\n'\n ' <config>\\n'\n ' <GigabitEthernet xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\\n'\n ' <name></name>\\n'\n ' <standby>\\n'\n ' <bfd></bfd>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' <reload>10</reload>\\n'\n ' </delay>\\n'\n ' <mac-refresh>11</mac-refresh>\\n'\n ' <standby-list>\\n'\n ' <group-number>25</group-number>\\n'\n ' <authentication>\\n'\n ' <word>cisco123</word>\\n'\n ' </authentication>\\n'\n ' <follow>test</follow>\\n'\n ' <ip>\\n'\n ' <address>192.168.1.254</address>\\n'\n ' </ip>\\n'\n ' <preempt>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' <reload>10</reload>\\n'\n ' <sync>20</sync>\\n'\n ' </delay>\\n'\n ' </preempt>\\n'\n ' <priority>110</priority>\\n'\n ' <timers>\\n'\n ' <hello-interval>\\n'\n ' <seconds>1</seconds>\\n'\n ' </hello-interval>\\n'\n ' <hold-time>\\n'\n ' <seconds>3</seconds>\\n'\n ' </hold-time>\\n'\n ' </timers>\\n'\n ' <track>\\n'\n ' <number>1</number>\\n'\n ' <decrement>20</decrement>\\n'\n ' </track>\\n'\n ' </standby-list>\\n'\n ' <version>2</version>\\n'\n ' </standby>\\n'\n ' </GigabitEthernet>\\n'\n ' </config>\\n'\n '</edit-config>\\n'\n ]))\n\n def test_yang_config2(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.version = 1\n key.minimum_delay = 5\n key.reload_delay = 10\n key.group_number = 25\n key.ip_address = '192.168.1.254'\n key.priority = 110\n key.preempt = True\n key.preempt_minimum_delay = 5\n key.preempt_reload_delay = 10\n key.hello_interval_seconds = 1\n key.holdtime_seconds = 3\n key.track_object = 1\n key.priority_decrement = 20\n key.authentication_text = 'cisco123'\n\n # Build config\n build_cfgs = self.hsrp1.build_config(apply=False)\n\n compare1 = \"\"\n for i in build_cfgs['PE1']:\n compare1+=str(i)\n\n # Check config built correctly\n self.assertMultiLineEqual(compare1, '\\n'.join([\n '<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\\n'\n ' <target>\\n'\n ' <running></running>\\n'\n ' </target>\\n'\n ' <config>\\n'\n ' <GigabitEthernet xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\\n'\n ' <name></name>\\n'\n ' <standby>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' <reload>10</reload>\\n'\n ' </delay>\\n'\n ' <standby-list>\\n'\n ' <group-number>25</group-number>\\n'\n ' <authentication>\\n'\n ' <word>cisco123</word>\\n'\n ' </authentication>\\n'\n ' <ip>\\n'\n ' <address>192.168.1.254</address>\\n'\n ' </ip>\\n'\n ' <preempt>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' <reload>10</reload>\\n'\n ' </delay>\\n'\n ' </preempt>\\n'\n ' <priority>110</priority>\\n'\n ' <timers>\\n'\n ' <hello-interval>\\n'\n ' <seconds>1</seconds>\\n'\n ' </hello-interval>\\n'\n ' <hold-time>\\n'\n ' <seconds>3</seconds>\\n'\n ' </hold-time>\\n'\n ' </timers>\\n'\n ' <track>\\n'\n ' <number>1</number>\\n'\n ' <decrement>20</decrement>\\n'\n ' </track>\\n'\n ' </standby-list>\\n'\n ' <version>1</version>\\n'\n ' </standby>\\n'\n ' </GigabitEthernet>\\n'\n ' </config>\\n'\n '</edit-config>\\n'\n ]))\n\n # Build unconfig\n build_uncfgs = self.hsrp1.build_unconfig(apply=False)\n\n compare2 = \"\"\n for i in build_uncfgs['PE1']:\n compare2+=str(i)\n\n # Check config built correctly\n self.assertMultiLineEqual(compare2, '\\n'.join([\n '<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\\n'\n ' <target>\\n'\n ' <running></running>\\n'\n ' </target>\\n'\n ' <config>\\n'\n ' <GigabitEthernet xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\\n'\n ' <name></name>\\n'\n ' <standby>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' <reload>10</reload>\\n'\n ' </delay>\\n'\n ' <standby-list>\\n'\n ' <group-number>25</group-number>\\n'\n ' <authentication>\\n'\n ' <word>cisco123</word>\\n'\n ' </authentication>\\n'\n ' <ip>\\n'\n ' <address>192.168.1.254</address>\\n'\n ' </ip>\\n'\n ' <preempt>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' <reload>10</reload>\\n'\n ' </delay>\\n'\n ' </preempt>\\n'\n ' <priority>110</priority>\\n'\n ' <timers>\\n'\n ' <hello-interval>\\n'\n ' <seconds>1</seconds>\\n'\n ' </hello-interval>\\n'\n ' <hold-time>\\n'\n ' <seconds>3</seconds>\\n'\n ' </hold-time>\\n'\n ' </timers>\\n'\n ' <track>\\n'\n ' <number>1</number>\\n'\n ' <decrement>20</decrement>\\n'\n ' </track>\\n'\n ' </standby-list>\\n'\n ' <version>1</version>\\n'\n ' </standby>\\n'\n ' </GigabitEthernet>\\n'\n ' </config>\\n'\n '</edit-config>\\n'\n ]))\n\n def test_yang_config3(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.version = 2\n key.minimum_delay = 5\n key.reload_delay = 10\n key.group_number = 25\n key.ip_address = '192.168.1.254'\n key.priority = 110\n key.preempt = True\n key.preempt_minimum_delay = 5\n key.hello_interval_seconds = 1\n key.holdtime_seconds = 3\n key.track_object = 1\n key.priority_decrement = 20\n key.authentication_md5_keychain = 'abc'\n\n # Build config\n build_cfgs = self.hsrp1.build_config(apply=False)\n\n compare1 = \"\"\n for i in build_cfgs['PE1']:\n compare1+=str(i)\n\n # Check config built correctly\n self.assertMultiLineEqual(compare1, '\\n'.join([\n '<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\\n'\n ' <target>\\n'\n ' <running></running>\\n'\n ' </target>\\n'\n ' <config>\\n'\n ' <GigabitEthernet xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\\n'\n ' <name></name>\\n'\n ' <standby>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' <reload>10</reload>\\n'\n ' </delay>\\n'\n ' <standby-list>\\n'\n ' <group-number>25</group-number>\\n'\n ' <authentication>\\n'\n ' <md5>\\n'\n ' <key-chain>abc</key-chain>\\n'\n ' </md5>\\n'\n ' </authentication>\\n'\n ' <ip>\\n'\n ' <address>192.168.1.254</address>\\n'\n ' </ip>\\n'\n ' <preempt>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' </delay>\\n'\n ' </preempt>\\n'\n ' <priority>110</priority>\\n'\n ' <timers>\\n'\n ' <hello-interval>\\n'\n ' <seconds>1</seconds>\\n'\n ' </hello-interval>\\n'\n ' <hold-time>\\n'\n ' <seconds>3</seconds>\\n'\n ' </hold-time>\\n'\n ' </timers>\\n'\n ' <track>\\n'\n ' <number>1</number>\\n'\n ' <decrement>20</decrement>\\n'\n ' </track>\\n'\n ' </standby-list>\\n'\n ' <version>2</version>\\n'\n ' </standby>\\n'\n ' </GigabitEthernet>\\n'\n ' </config>\\n'\n '</edit-config>\\n'\n ]))\n\n # Build unconfig\n build_uncfgs = self.hsrp1.build_unconfig(apply=False)\n\n compare2 = \"\"\n for i in build_uncfgs['PE1']:\n compare2+=str(i)\n\n # Check config built correctly\n self.assertMultiLineEqual(compare2, '\\n'.join([\n '<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\\n'\n ' <target>\\n'\n ' <running></running>\\n'\n ' </target>\\n'\n ' <config>\\n'\n ' <GigabitEthernet xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\\n'\n ' <name></name>\\n'\n ' <standby>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' <reload>10</reload>\\n'\n ' </delay>\\n'\n ' <standby-list>\\n'\n ' <group-number>25</group-number>\\n'\n ' <authentication>\\n'\n ' <md5>\\n'\n ' <key-chain>abc</key-chain>\\n'\n ' </md5>\\n'\n ' </authentication>\\n'\n ' <ip>\\n'\n ' <address>192.168.1.254</address>\\n'\n ' </ip>\\n'\n ' <preempt>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' </delay>\\n'\n ' </preempt>\\n'\n ' <priority>110</priority>\\n'\n ' <timers>\\n'\n ' <hello-interval>\\n'\n ' <seconds>1</seconds>\\n'\n ' </hello-interval>\\n'\n ' <hold-time>\\n'\n ' <seconds>3</seconds>\\n'\n ' </hold-time>\\n'\n ' </timers>\\n'\n ' <track>\\n'\n ' <number>1</number>\\n'\n ' <decrement>20</decrement>\\n'\n ' </track>\\n'\n ' </standby-list>\\n'\n ' <version>2</version>\\n'\n ' </standby>\\n'\n ' </GigabitEthernet>\\n'\n ' </config>\\n'\n '</edit-config>\\n'\n ]))\n\n def test_yang_config4(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.version = 1\n key.minimum_delay = 5\n key.reload_delay = 10\n key.group_number = 25\n key.ip_address = '192.168.1.254'\n key.priority = 110\n key.preempt = True\n key.preempt_minimum_delay = 5\n key.preempt_reload_delay = 10\n key.hello_interval_seconds = 1\n key.holdtime_seconds = 3\n key.track_object = 1\n key.priority_decrement = 20\n key.authentication_md5_keystring = 'xyz'\n\n # Build config\n build_cfgs = self.hsrp1.build_config(apply=False)\n\n compare1 = \"\"\n for i in build_cfgs['PE1']:\n compare1+=str(i)\n\n # Check config built correctly\n self.assertMultiLineEqual(compare1, '\\n'.join([\n '<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\\n'\n ' <target>\\n'\n ' <running></running>\\n'\n ' </target>\\n'\n ' <config>\\n'\n ' <GigabitEthernet xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\\n'\n ' <name></name>\\n'\n ' <standby>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' <reload>10</reload>\\n'\n ' </delay>\\n'\n ' <standby-list>\\n'\n ' <group-number>25</group-number>\\n'\n ' <authentication>\\n'\n ' <md5>\\n'\n ' <key-string>\\n'\n ' <string>xyz</string>\\n'\n ' </key-string>\\n'\n ' </md5>\\n'\n ' </authentication>\\n'\n ' <ip>\\n'\n ' <address>192.168.1.254</address>\\n'\n ' </ip>\\n'\n ' <preempt>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' <reload>10</reload>\\n'\n ' </delay>\\n'\n ' </preempt>\\n'\n ' <priority>110</priority>\\n'\n ' <timers>\\n'\n ' <hello-interval>\\n'\n ' <seconds>1</seconds>\\n'\n ' </hello-interval>\\n'\n ' <hold-time>\\n'\n ' <seconds>3</seconds>\\n'\n ' </hold-time>\\n'\n ' </timers>\\n'\n ' <track>\\n'\n ' <number>1</number>\\n'\n ' <decrement>20</decrement>\\n'\n ' </track>\\n'\n ' </standby-list>\\n'\n ' <version>1</version>\\n'\n ' </standby>\\n'\n ' </GigabitEthernet>\\n'\n ' </config>\\n'\n '</edit-config>\\n'\n ]))\n\n # Build unconfig\n build_uncfgs = self.hsrp1.build_unconfig(apply=False)\n\n compare2 = \"\"\n for i in build_uncfgs['PE1']:\n compare2+=str(i)\n\n # Check config built correctly\n self.assertMultiLineEqual(compare2, '\\n'.join([\n '<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\\n'\n ' <target>\\n'\n ' <running></running>\\n'\n ' </target>\\n'\n ' <config>\\n'\n ' <GigabitEthernet xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\\n'\n ' <name></name>\\n'\n ' <standby>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' <reload>10</reload>\\n'\n ' </delay>\\n'\n ' <standby-list>\\n'\n ' <group-number>25</group-number>\\n'\n ' <authentication>\\n'\n ' <md5>\\n'\n ' <key-string>\\n'\n ' <string>xyz</string>\\n'\n ' </key-string>\\n'\n ' </md5>\\n'\n ' </authentication>\\n'\n ' <ip>\\n'\n ' <address>192.168.1.254</address>\\n'\n ' </ip>\\n'\n ' <preempt>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' <reload>10</reload>\\n'\n ' </delay>\\n'\n ' </preempt>\\n'\n ' <priority>110</priority>\\n'\n ' <timers>\\n'\n ' <hello-interval>\\n'\n ' <seconds>1</seconds>\\n'\n ' </hello-interval>\\n'\n ' <hold-time>\\n'\n ' <seconds>3</seconds>\\n'\n ' </hold-time>\\n'\n ' </timers>\\n'\n ' <track>\\n'\n ' <number>1</number>\\n'\n ' <decrement>20</decrement>\\n'\n ' </track>\\n'\n ' </standby-list>\\n'\n ' <version>1</version>\\n'\n ' </standby>\\n'\n ' </GigabitEthernet>\\n'\n ' </config>\\n'\n '</edit-config>\\n'\n ]))\n\n def test_yang_config5(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.version = 2\n key.minimum_delay = 5\n key.group_number = 25\n key.priority = 110\n key.preempt = True\n key.bfd = True\n key.use_bia = True\n key.hello_interval_msec = 55\n key.holdtime_msec = 100\n key.track_object = 1\n key.track_shutdown = True\n key.group_name = 'gandalf'\n key.mac_address = 'dead.beef.dead'\n\n # Build config\n build_cfgs = self.hsrp1.build_config(apply=False)\n\n compare1 = \"\"\n for i in build_cfgs['PE1']:\n compare1+=str(i)\n\n # Check config built correctly\n self.assertMultiLineEqual(compare1, '\\n'.join([\n '<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\\n'\n ' <target>\\n'\n ' <running></running>\\n'\n ' </target>\\n'\n ' <config>\\n'\n ' <GigabitEthernet xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\\n'\n ' <name></name>\\n'\n ' <standby>\\n'\n ' <bfd></bfd>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' </delay>\\n'\n ' <standby-list>\\n'\n ' <group-number>25</group-number>\\n'\n ' <mac-address>dead.beef.dead</mac-address>\\n'\n ' <name>gandalf</name>\\n'\n ' <preempt/>\\n'\n ' <priority>110</priority>\\n'\n ' <timers>\\n'\n ' <hello-interval>\\n'\n ' <msec>55</msec>\\n'\n ' </hello-interval>\\n'\n ' <hold-time>\\n'\n ' <msec>100</msec>\\n'\n ' </hold-time>\\n'\n ' </timers>\\n'\n ' <track>\\n'\n ' <number>1</number>\\n'\n ' <shutdown></shutdown>\\n'\n ' </track>\\n'\n ' </standby-list>\\n'\n ' <use-bia>\\n'\n ' <scope>\\n'\n ' <interface></interface>\\n'\n ' </scope>\\n'\n ' </use-bia>\\n'\n ' <version>2</version>\\n'\n ' </standby>\\n'\n ' </GigabitEthernet>\\n'\n ' </config>\\n'\n '</edit-config>\\n'\n ]))\n\n # Build unconfig\n build_uncfgs = self.hsrp1.build_unconfig(apply=False)\n\n compare2 = \"\"\n for i in build_uncfgs['PE1']:\n compare2+=str(i)\n\n # Check config built correctly\n self.assertMultiLineEqual(compare2, '\\n'.join([\n '<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\\n'\n ' <target>\\n'\n ' <running></running>\\n'\n ' </target>\\n'\n ' <config>\\n'\n ' <GigabitEthernet xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\\n'\n ' <name></name>\\n'\n ' <standby>\\n'\n ' <bfd></bfd>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' </delay>\\n'\n ' <standby-list>\\n'\n ' <group-number>25</group-number>\\n'\n ' <mac-address>dead.beef.dead</mac-address>\\n'\n ' <name>gandalf</name>\\n'\n ' <preempt/>\\n'\n ' <priority>110</priority>\\n'\n ' <timers>\\n'\n ' <hello-interval>\\n'\n ' <msec>55</msec>\\n'\n ' </hello-interval>\\n'\n ' <hold-time>\\n'\n ' <msec>100</msec>\\n'\n ' </hold-time>\\n'\n ' </timers>\\n'\n ' <track>\\n'\n ' <number>1</number>\\n'\n ' <shutdown></shutdown>\\n'\n ' </track>\\n'\n ' </standby-list>\\n'\n ' <use-bia>\\n'\n ' <scope>\\n'\n ' <interface></interface>\\n'\n ' </scope>\\n'\n ' </use-bia>\\n'\n ' <version>2</version>\\n'\n ' </standby>\\n'\n ' </GigabitEthernet>\\n'\n ' </config>\\n'\n '</edit-config>\\n'\n ]))\n \n\n def test_yang_config6(self):\n # Apply configuration\n key = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key.version = 2\n key.minimum_delay = 5\n key.group_number = 25\n key.priority = 110\n key.preempt = True\n key.ipv6_address = 'autoconfig'\n\n # Build config\n build_cfgs = self.hsrp1.build_config(apply=False)\n\n compare1 = \"\"\n for i in build_cfgs['PE1']:\n compare1+=str(i)\n\n # Check config built correctly\n self.assertMultiLineEqual(compare1, '\\n'.join([\n '<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\\n'\n ' <target>\\n'\n ' <running></running>\\n'\n ' </target>\\n'\n ' <config>\\n'\n ' <GigabitEthernet xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\\n'\n ' <name></name>\\n'\n ' <standby>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' </delay>\\n'\n ' <standby-list>\\n'\n ' <group-number>25</group-number>\\n'\n ' <ipv6>autoconfig</ipv6>\\n'\n ' <preempt/>\\n'\n ' <priority>110</priority>\\n'\n ' </standby-list>\\n'\n ' <version>2</version>\\n'\n ' </standby>\\n'\n ' </GigabitEthernet>\\n'\n ' </config>\\n'\n '</edit-config>\\n'\n ]))\n\n # Build unconfig\n build_uncfgs = self.hsrp1.build_unconfig(apply=False)\n\n compare2 = \"\"\n for i in build_uncfgs['PE1']:\n compare2+=str(i)\n\n # Check config built correctly\n self.assertMultiLineEqual(compare2, '\\n'.join([\n '<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\\n'\n ' <target>\\n'\n ' <running></running>\\n'\n ' </target>\\n'\n ' <config>\\n'\n ' <GigabitEthernet xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\\n'\n ' <name></name>\\n'\n ' <standby>\\n'\n ' <delay>\\n'\n ' <minimum>5</minimum>\\n'\n ' </delay>\\n'\n ' <standby-list>\\n'\n ' <group-number>25</group-number>\\n'\n ' <ipv6>autoconfig</ipv6>\\n'\n ' <preempt/>\\n'\n ' <priority>110</priority>\\n'\n ' </standby-list>\\n'\n ' <version>2</version>\\n'\n ' </standby>\\n'\n ' </GigabitEthernet>\\n'\n ' </config>\\n'\n '</edit-config>\\n'\n ]))\n\n\n def test_yang_config7(self):\n \n # Hsrp object\n self.hsrp1 = Hsrp()\n\n # Apply configuration\n key1 = self.hsrp1.device_attr[self.dev1].interface_attr[self.intf1]\n key1.version = 2\n key1.group_number = 10\n key1.priority = 110\n key1.preempt = True\n key1.preempt_reload_delay = 30\n \n # Hsrp object\n self.hsrp2 = Hsrp()\n\n # Apply configuration\n key2 = self.hsrp2.device_attr[self.dev1].interface_attr[self.intf1]\n key2.group_number = 20\n key2.priority = 120\n key2.preempt = True\n key2.preempt_sync_delay = 60\n\n # Build config\n cfgs1 = self.hsrp1.build_config(apply=False)\n cfgs2 = self.hsrp2.build_config(apply=False)\n\n compare = \"\"\n for i in cfgs1['PE1']:\n compare+=str(i)\n\n for i in cfgs2['PE1']:\n compare+=str(i)\n\n # Check config built correctly\n self.assertMultiLineEqual(compare, '\\n'.join([\n '<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\\n'\n ' <target>\\n'\n ' <running></running>\\n'\n ' </target>\\n'\n ' <config>\\n'\n ' <GigabitEthernet xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\\n'\n ' <name></name>\\n'\n ' <standby>\\n'\n ' <standby-list>\\n'\n ' <group-number>10</group-number>\\n'\n ' <preempt/>\\n'\n ' <priority>110</priority>\\n'\n ' </standby-list>\\n'\n ' <version>2</version>\\n'\n ' </standby>\\n'\n ' </GigabitEthernet>\\n'\n ' </config>\\n'\n '</edit-config>\\n'\n '<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\\n'\n ' <target>\\n'\n ' <running></running>\\n'\n ' </target>\\n'\n ' <config>\\n'\n ' <GigabitEthernet xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\\n'\n ' <name></name>\\n'\n ' <standby>\\n'\n ' <standby-list>\\n'\n ' <group-number>20</group-number>\\n'\n ' <preempt/>\\n'\n ' <priority>120</priority>\\n'\n ' </standby-list>\\n'\n ' </standby>\\n'\n ' </GigabitEthernet>\\n'\n ' </config>\\n'\n '</edit-config>\\n'\n ]))\n\nif __name__ == '__main__':\n unittest.main()\n\n\n", "id": "1126655", "language": "Python", "matching_score": 4.1444878578186035, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/hsrp/iosxe/yang/tests/test_hsrp.py" }, { "content": "#!/usr/bin/env python\n\nimport unittest\nfrom unittest.mock import Mock\n\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\nfrom genie.conf.base.attributes import SubAttributes, SubAttributesDict, AttributesHelper, KeyedSubAttributes\n\nfrom genie.libs.conf.base import Routing, IPv4Neighbor, IPv4Address, IPv6Address\nfrom genie.libs.conf.address_family import AddressFamily, AddressFamilySubAttributes\nfrom genie.libs.conf.base.neighbor import IPv4NeighborSubAttributes\nfrom genie.libs.conf.ospf import Ospf\nfrom genie.libs.conf.vrf import Vrf\n\ntry:\n from ydk.models.ned import ned\n from ydk.services import CodecService\n from ydk.providers import CodecServiceProvider\n from ydk.types import DELETE, Empty\n from ydk.services import CRUDService\nexcept:\n pass\n\nclass NetconfConnectionInfo(object):\n def __init__(self):\n self.ip = str('1.1.1.1')\n self.port = 830\n self.username = 'admin'\n self.password = '<PASSWORD>'\n\nclass test_interface(unittest.TestCase):\n\n def test_init(self):\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe', context='yang')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe', context='yang')\n intf1 = Interface(name='GigabitEthernet0/0/1',device=dev1)\n intf2 = Interface(name='GigabitEthernet0/0/2',device=dev2)\n link = Link(name='1_2_1',testbed=testbed)\n link.connect_interface(interface=intf1)\n link.connect_interface(interface=intf2)\n self.assertSetEqual(\n set(link.find_interfaces()),\n set([intf1, intf2]))\n self.assertSetEqual(\n set(dev1.find_interfaces()),\n set([intf1]))\n self.assertSetEqual(\n set(dev2.find_interfaces()),\n set([intf2]))\n\n self.assertCountEqual(link.interfaces, [intf1, intf2])\n self.assertEqual(intf1.device, dev1)\n self.assertEqual(intf2.device, dev2)\n\n def test_basic_gig_cfg(self):\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe', context='yang')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe', context='yang')\n intf1 = Interface(name='GigabitEthernet0/0/1',device=dev1)\n intf2 = Interface(name='GigabitEthernet0/0/2',device=dev2)\n link = Link(name='1_2_1',testbed=testbed)\n link.connect_interface(interface=intf1)\n link.connect_interface(interface=intf2)\n vrf = Vrf(name='test')\n intf1.vrf = vrf\n intf2.vrf = vrf\n intf1.ipv4 = '1.2.3.4/30'\n intf2.ipv4 = '1.2.3.5/30'\n intf1.shutdown = False\n intf2.shutdown = True\n\n self.maxDiff = None\n cfg1 = intf1.build_config(apply=False)\n cfg2 = intf2.build_config(apply=False)\n self.assertMultiLineEqual(str(cfg1), '''<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\n <target>\n <running></running>\n </target>\n <config>\n <GigabitEthernet xmlns=\"http://cisco.com/ns/yang/ned/ios\">\n <name>0/0/1</name>\n <ip>\n <address>\n <primary>\n <address>1.2.3.4</address>\n <mask>255.255.255.252</mask>\n </primary>\n </address>\n </ip>\n <shutdown xmlns:nc=\"urn:ietf:params:xml:ns:netconf:base:1.0\" nc:operation=\"delete\"/>\n <vrf>\n <forwarding>test</forwarding>\n </vrf>\n </GigabitEthernet>\n </config>\n</edit-config>\n''')\n\n\n self.assertMultiLineEqual(str(cfg2),'''<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\n <target>\n <running></running>\n </target>\n <config>\n <GigabitEthernet xmlns=\"http://cisco.com/ns/yang/ned/ios\">\n <name>0/0/2</name>\n <ip>\n <address>\n <primary>\n <address>1.2.3.5</address>\n <mask>255.255.255.252</mask>\n </primary>\n </address>\n </ip>\n <shutdown></shutdown>\n <vrf>\n <forwarding>test</forwarding>\n </vrf>\n </GigabitEthernet>\n </config>\n</edit-config>\n''')\n\n\n uncfg1 = intf1.build_unconfig(apply=False)\n uncfg2 = intf2.build_unconfig(apply=False)\n\n self.assertMultiLineEqual(str(uncfg1), '''<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\n <target>\n <running></running>\n </target>\n <config>\n <GigabitEthernet xmlns=\"http://cisco.com/ns/yang/ned/ios\">\n <name>0/0/1</name>\n <ip>\n <address>\n <primary>\n <address xmlns:nc=\"urn:ietf:params:xml:ns:netconf:base:1.0\" nc:operation=\"delete\"/>\n <mask xmlns:nc=\"urn:ietf:params:xml:ns:netconf:base:1.0\" nc:operation=\"delete\"/>\n </primary>\n </address>\n </ip>\n <shutdown></shutdown>\n <vrf>\n <forwarding xmlns:nc=\"urn:ietf:params:xml:ns:netconf:base:1.0\" nc:operation=\"delete\"/>\n </vrf>\n </GigabitEthernet>\n </config>\n</edit-config>\n''')\n\n self.assertMultiLineEqual(str(uncfg2), '''<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\n <target>\n <running></running>\n </target>\n <config>\n <GigabitEthernet xmlns=\"http://cisco.com/ns/yang/ned/ios\">\n <name>0/0/2</name>\n <ip>\n <address>\n <primary>\n <address xmlns:nc=\"urn:ietf:params:xml:ns:netconf:base:1.0\" nc:operation=\"delete\"/>\n <mask xmlns:nc=\"urn:ietf:params:xml:ns:netconf:base:1.0\" nc:operation=\"delete\"/>\n </primary>\n </address>\n </ip>\n <shutdown></shutdown>\n <vrf>\n <forwarding xmlns:nc=\"urn:ietf:params:xml:ns:netconf:base:1.0\" nc:operation=\"delete\"/>\n </vrf>\n </GigabitEthernet>\n </config>\n</edit-config>\n''')\n\n def test_basic_loopback_cfg(self):\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe', context='yang')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe', context='yang')\n intf1 = Interface(name='Loopback100',device=dev1)\n intf2 = Interface(name='Loopback200',device=dev2)\n link = Link(name='1_2_1',testbed=testbed)\n link.connect_interface(interface=intf1)\n link.connect_interface(interface=intf2)\n vrf = Vrf(name='test')\n intf1.vrf = vrf\n intf2.vrf = vrf\n intf1.ipv4 = '1.2.3.4/32'\n intf2.ipv4 = '1.2.3.5/32'\n\n cfg1 = intf1.build_config(apply=False)\n cfg2 = intf2.build_config(apply=False)\n print(cfg1)\n print(cfg2)\n self.assertMultiLineEqual(str(cfg1), '''<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\n <target>\n <running></running>\n </target>\n <config>\n <Loopback xmlns=\"http://cisco.com/ns/yang/ned/ios\">\n <name>100</name>\n <ip>\n <address>\n <primary>\n <address>1.2.3.4</address>\n <mask>255.255.255.255</mask>\n </primary>\n </address>\n </ip>\n <vrf>\n <forwarding>test</forwarding>\n </vrf>\n </Loopback>\n </config>\n</edit-config>\n''')\n\n self.assertMultiLineEqual(str(cfg2), '''<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\n <target>\n <running></running>\n </target>\n <config>\n <Loopback xmlns=\"http://cisco.com/ns/yang/ned/ios\">\n <name>200</name>\n <ip>\n <address>\n <primary>\n <address>1.2.3.5</address>\n <mask>255.255.255.255</mask>\n </primary>\n </address>\n </ip>\n <vrf>\n <forwarding>test</forwarding>\n </vrf>\n </Loopback>\n </config>\n</edit-config>\n''')\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "2980754", "language": "Python", "matching_score": 5.335297107696533, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/interface/iosxe/yang/tests/test_interface.py" }, { "content": "#!/usr/bin/env python\n\nimport unittest\nfrom unittest.mock import Mock\n\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\nfrom genie.conf.base.attributes import SubAttributes, SubAttributesDict, AttributesHelper, KeyedSubAttributes\n\nfrom genie.libs.conf.base import Routing, IPv4Neighbor, IPv4Address, IPv6Address\nfrom genie.libs.conf.address_family import AddressFamily, AddressFamilySubAttributes\nfrom genie.libs.conf.base.neighbor import IPv4NeighborSubAttributes\nfrom genie.libs.conf.ospf import Ospf\nfrom genie.libs.conf.vrf import Vrf\n\ntry:\n from ydk.models.ned import ned\n from ydk.services import CodecService\n from ydk.providers import CodecServiceProvider\n from ydk.types import DELETE, Empty\n from ydk.services import CRUDService\nexcept:\n pass\n\n\nclass NetconfConnectionInfo(object):\n def __init__(self):\n self.ip = str('1.1.1.1')\n self.port = 830\n self.username = 'admin'\n self.password = '<PASSWORD>'\n\nclass test_vrf(unittest.TestCase):\n\n def test_init(self):\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe', context='yang')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe', context='yang')\n intf1 = Interface(name='GigabitEthernet0/0/1',device=dev1)\n intf2 = Interface(name='GigabitEthernet0/0/2',device=dev2)\n link = Link(name='1_2_1',testbed=testbed)\n link.connect_interface(interface=intf1)\n link.connect_interface(interface=intf2)\n self.assertSetEqual(\n set(link.find_interfaces()),\n set([intf1, intf2]))\n self.assertSetEqual(\n set(dev1.find_interfaces()),\n set([intf1]))\n self.assertSetEqual(\n set(dev2.find_interfaces()),\n set([intf2]))\n vrf = Vrf(name='test')\n\n self.assertSetEqual(set(vrf.devices), set([]))\n self.assertSetEqual(set(vrf.interfaces), set([]))\n intf1.vrf = vrf\n intf2.vrf = vrf\n dev1.add_feature(vrf)\n dev2.add_feature(vrf)\n\n self.assertCountEqual(vrf.devices, [dev1, dev2])\n self.assertSetEqual(set(vrf.interfaces), set([intf1, intf2]))\n\n with self.assertRaises(AttributeError):\n vrf.address_families = set([AddressFamily.ipv8_unicast])\n\n def test_basic_cfg(self):\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe', context='yang')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe', context='yang')\n vrf = Vrf(name='test')\n vrf1 = Vrf(name='test1')\n vrf2 = Vrf(name='test2')\n\n for dev in testbed.devices:\n dev.connections=Mock()\n dev.connections={'netconf':NetconfConnectionInfo()}\n dev.add_feature(vrf2)\n\n dev1.add_feature(vrf)\n dev2.add_feature(vrf1)\n vrf.address_families = set([AddressFamily.ipv4_unicast])\n vrf1.address_families = set([AddressFamily.ipv6_unicast])\n vrf2.address_families = set([AddressFamily.ipv4_unicast,\n AddressFamily.ipv6_unicast])\n\n cfg1 = vrf.build_config(apply=False)\n cfg2 = vrf1.build_config(apply=False)\n cfg3 = vrf2.build_config(apply=False, devices=['PE1'])\n cfg4 = vrf2.build_config(apply=False)\n self.assertCountEqual(cfg1.keys(), ['PE1'])\n self.assertCountEqual(cfg2.keys(), ['PE2'])\n self.assertCountEqual(cfg3.keys(), ['PE1'])\n self.assertCountEqual(cfg4.keys(), ['PE1','PE2'])\n self.assertMultiLineEqual(cfg1['PE1'], '\\n'.join(['<vrf xmlns=\"http://cisco.com/ns/yang/ned/ios\">\\n'\n' <definition>\\n'\n' <name>test</name>\\n'\n' <address-family>\\n'\n' <ipv4/>\\n'\n' </address-family>\\n'\n' </definition>\\n'\n'</vrf>\\n']))\n self.assertMultiLineEqual(cfg2['PE2'], '\\n'.join(['<vrf xmlns=\"http://cisco.com/ns/yang/ned/ios\">\\n'\n' <definition>\\n'\n' <name>test1</name>\\n'\n' <address-family>\\n'\n' <ipv6/>\\n'\n' </address-family>\\n'\n' </definition>\\n'\n'</vrf>\\n']))\n\n self.assertMultiLineEqual(cfg3['PE1'], '\\n'.join(['<vrf xmlns=\"http://cisco.com/ns/yang/ned/ios\">\\n'\n' <definition>\\n'\n' <name>test2</name>\\n'\n' <address-family>\\n'\n' <ipv4/>\\n'\n' <ipv6/>\\n'\n' </address-family>\\n'\n' </definition>\\n'\n'</vrf>\\n']))\n\n self.assertMultiLineEqual(cfg4['PE1'], '\\n'.join(['<vrf xmlns=\"http://cisco.com/ns/yang/ned/ios\">\\n'\n' <definition>\\n'\n' <name>test2</name>\\n'\n' <address-family>\\n'\n' <ipv4/>\\n'\n' <ipv6/>\\n'\n' </address-family>\\n'\n' </definition>\\n'\n'</vrf>\\n']))\n self.assertMultiLineEqual(cfg4['PE2'], '\\n'.join(['<vrf xmlns=\"http://cisco.com/ns/yang/ned/ios\">\\n'\n' <definition>\\n'\n' <name>test2</name>\\n'\n' <address-family>\\n'\n' <ipv4/>\\n'\n' <ipv6/>\\n'\n' </address-family>\\n'\n' </definition>\\n'\n'</vrf>\\n']))\n # uncfg1 = ospf.build_unconfig(apply=False)\n # self.assertCountEqual(uncfg1.keys(), ['PE1', 'PE2'])\n # self.assertMultiLineEqual(uncfg1['PE1'], '\\n'.join([\n # 'no router ospf 100\\n'\n # 'interface GigabitEthernet0/0/1\\n'\n # ' no ip ospf 100 area 0\\n'\n # ' exit' ]))\n #\n # self.assertMultiLineEqual(uncfg1['PE2'], '\\n'.join([\n # 'no router ospf 100\\n'\n # 'interface GigabitEthernet0/0/2\\n'\n # ' no ip ospf 100 area 0\\n'\n # ' exit']))\n#\n# def test_cfg_vrfs_areas_interfaces(self):\n# testbed = Genie.testbed = Testbed()\n# dev1 = Device(testbed=testbed, name='PE1', os='iosxe', context='yang')\n# dev2 = Device(testbed=testbed, name='PE2', os='iosxe', context='yang')\n# intf1 = Interface(name='GigabitEthernet0/0/1',device=dev1)\n# intf2 = Interface(name='GigabitEthernet0/0/2',device=dev2)\n# intf1_1 = Interface(name='GigabitEthernet0/1/1',device=dev1)\n# intf2_1 = Interface(name='GigabitEthernet0/1/2',device=dev2)\n# link = Link(name='1_2_1',testbed=testbed)\n# link.connect_interface(interface=intf1)\n# link.connect_interface(interface=intf2)\n# link1 = Link(name='2_2_2',testbed=testbed)\n# link1.connect_interface(interface=intf1_1)\n# link1.connect_interface(interface=intf2_1)\n# ospf = Ospf()\n# vrf = Vrf(name='test')\n# ospf.add_force_vrf(vrf)\n# link.add_feature(ospf)\n# link1.add_feature(ospf)\n# intf1.vrf = vrf\n# intf2.vrf = vrf\n# intf1_1.vrf = vrf\n# intf2_1.vrf = vrf\n# ospf.ospf_name = '100'\n# area_id = 0\n# area_id_1 = 1\n# ospf.device_attr[dev1].vrf_attr[vrf].area_attr[area_id]\n# ospf.device_attr[dev2].vrf_attr[vrf].area_attr[area_id]\n# ospf.device_attr[dev1].vrf_attr[vrf].instance_router_id = IPv4Address('2.1.1.1')\n# ospf.device_attr[dev2].vrf_attr[vrf].instance_router_id = IPv4Address('2.1.2.1')\n#\n# ospf.device_attr[dev1].vrf_attr[vrf].area_attr[area_id].interface_attr[intf1].area_interface_cost = 10\n# ospf.device_attr[dev2].vrf_attr[vrf].area_attr[area_id].interface_attr[intf2].area_interface_cost = 10\n# ospf.device_attr[dev1].vrf_attr[vrf].area_attr[area_id_1].interface_attr[intf1_1].area_interface_cost = 20\n# ospf.device_attr[dev2].vrf_attr[vrf].area_attr[area_id_1].interface_attr[intf2_1].area_interface_cost = 20\n#\n# ospf.nsr = True\n# ospf.auto_cost_ref_bw = 12345\n# cfg1 = ospf.build_config(apply=False)\n# self.assertCountEqual(cfg1.keys(), ['PE1', 'PE2'])\n# self.assertMultiLineEqual(cfg1['PE1'], '\\n'.join(['<ospf xmlns=\"http://cisco.com/ns/yang/ned/ios\">\\n'\n# ' <id>100</id>\\n'\n# ' <auto-cost>\\n'\n# ' <reference-bandwidth>12345</reference-bandwidth>\\n'\n# ' </auto-cost>\\n'\n# ' <nsr></nsr>\\n'\n# ' <router-id>2.1.1.1</router-id>\\n'\n# ' <vrf>test</vrf>\\n'\n# '</ospf>\\n'\n# '<GigabitEthernet xmlns=\"http://cisco.com/ns/yang/ned/ios\">\\n'\n# ' <name>0/0/1</name>\\n'\n# ' <ip>\\n'\n# ' <ospf>\\n'\n# ' <cost>10</cost>\\n'\n# ' <process-id>\\n'\n# ' <id>100</id>\\n'\n# ' <area>0</area>\\n'\n# ' </process-id>\\n'\n# ' </ospf>\\n'\n# ' </ip>\\n'\n# '</GigabitEthernet>\\n'\n# '<GigabitEthernet xmlns=\"http://cisco.com/ns/yang/ned/ios\">\\n'\n# ' <name>0/1/1</name>\\n'\n# ' <ip>\\n'\n# ' <ospf>\\n'\n# ' <cost>20</cost>\\n'\n# ' <process-id>\\n'\n# ' <id>100</id>\\n'\n# ' <area>1</area>\\n'\n# ' </process-id>\\n'\n# ' </ospf>\\n'\n# ' </ip>\\n'\n# '</GigabitEthernet>\\n']))\n# self.assertMultiLineEqual(cfg1['PE2'], '\\n'.join(['<ospf xmlns=\"http://cisco.com/ns/yang/ned/ios\">\\n'\n# ' <id>100</id>\\n'\n# ' <auto-cost>\\n'\n# ' <reference-bandwidth>12345</reference-bandwidth>\\n'\n# ' </auto-cost>\\n'\n# ' <nsr></nsr>\\n'\n# ' <router-id>2.1.2.1</router-id>\\n'\n# ' <vrf>test</vrf>\\n'\n# '</ospf>\\n'\n# '<GigabitEthernet xmlns=\"http://cisco.com/ns/yang/ned/ios\">\\n'\n# ' <name>0/0/2</name>\\n'\n# ' <ip>\\n'\n# ' <ospf>\\n'\n# ' <cost>10</cost>\\n'\n# ' <process-id>\\n'\n# ' <id>100</id>\\n'\n# ' <area>0</area>\\n'\n# ' </process-id>\\n'\n# ' </ospf>\\n'\n# ' </ip>\\n'\n# '</GigabitEthernet>\\n'\n# '<GigabitEthernet xmlns=\"http://cisco.com/ns/yang/ned/ios\">\\n'\n# ' <name>0/1/2</name>\\n'\n# ' <ip>\\n'\n# ' <ospf>\\n'\n# ' <cost>20</cost>\\n'\n# ' <process-id>\\n'\n# ' <id>100</id>\\n'\n# ' <area>1</area>\\n'\n# ' </process-id>\\n'\n# ' </ospf>\\n'\n# ' </ip>\\n'\n# '</GigabitEthernet>\\n']))\n#\n# self.maxDiff = None\n#\n# # Set a mock\n# dev1.configure = Mock()\n# dev2.configure = Mock()\n# dev1.add_feature(ospf)\n# dev2.add_feature(ospf)\n# # Mock config\n#\n# output = ospf.build_config(apply=True)\n\n # uncfg1 = ospf.build_unconfig(apply=False, attributes={'device_attr':{'*':{'vrf_attr':{'*':{'area_attr':area_id}}}}})\n #\n # self.assertCountEqual(uncfg1.keys(), ['PE1', 'PE2'])\n # self.assertMultiLineEqual(uncfg1['PE1'], '\\n'.join(['router ospf 100 vrf test\\n'\n # ' no passive-interface GigabitEthernet0/0/1\\n'\n # ' exit\\n'\n # 'interface GigabitEthernet0/0/1\\n'\n # ' no ip ospf 100 area 0\\n'\n # ' no ip ospf cost 10\\n'\n # ' exit']))\n #\n # self.assertMultiLineEqual(uncfg1['PE2'], '\\n'.join(['interface GigabitEthernet0/0/2\\n'\n # ' no ip ospf 100 area 0\\n'\n # ' no ip ospf cost 10\\n'\n # ' exit']))\n #\n # uncfg5 = ospf.build_unconfig(apply=False, attributes={'device_attr':{'*':{'vrf_attr':{'*':{'area_attr':{'*':{'interface_attr':'*'}}}}}}})\n #\n # self.assertCountEqual(uncfg5.keys(), ['PE1', 'PE2'])\n # self.assertMultiLineEqual(uncfg5['PE1'], '\\n'.join(['router ospf 100 vrf test\\n'\n # ' no passive-interface GigabitEthernet0/0/1\\n'\n # ' exit\\n'\n # 'interface GigabitEthernet0/0/1\\n'\n # ' no ip ospf 100 area 0\\n'\n # ' no ip ospf cost 10\\n'\n # ' exit\\n'\n # 'interface GigabitEthernet0/1/1\\n'\n # ' no ip ospf 100 area 1\\n'\n # ' no ip ospf cost 20\\n'\n # ' exit']))\n #\n # self.assertMultiLineEqual(uncfg5['PE2'], '\\n'.join(['interface GigabitEthernet0/0/2\\n'\n # ' no ip ospf 100 area 0\\n'\n # ' no ip ospf cost 10\\n'\n # ' exit\\n'\n # 'interface GigabitEthernet0/1/2\\n'\n # ' no ip ospf 100 area 1\\n'\n # ' no ip ospf cost 20\\n'\n # ' exit']))\n #\n # uncfg2 = ospf.build_unconfig(apply=False, attributes={'device_attr':{'*':{'vrf_attr':{'*':{'area_attr':'*'}}}}})\n #\n # self.assertCountEqual(uncfg2.keys(), ['PE1', 'PE2'])\n # self.assertMultiLineEqual(uncfg2['PE1'], '\\n'.join(['router ospf 100 vrf test\\n'\n # ' no passive-interface GigabitEthernet0/0/1\\n'\n # ' exit\\n'\n # 'interface GigabitEthernet0/0/1\\n'\n # ' no ip ospf 100 area 0\\n'\n # ' no ip ospf cost 10\\n'\n # ' exit\\n'\n # 'interface GigabitEthernet0/1/1\\n'\n # ' no ip ospf 100 area 1\\n'\n # ' no ip ospf cost 20\\n'\n # ' exit']))\n #\n # self.assertMultiLineEqual(uncfg2['PE2'], '\\n'.join(['interface GigabitEthernet0/0/2\\n'\n # ' no ip ospf 100 area 0\\n'\n # ' no ip ospf cost 10\\n'\n # ' exit\\n'\n # 'interface GigabitEthernet0/1/2\\n'\n # ' no ip ospf 100 area 1\\n'\n # ' no ip ospf cost 20\\n'\n # ' exit']))\n #\n #\n # uncfg3 = ospf.build_unconfig(apply=False, attributes={'device_attr':{'*':{'vrf_attr':'*'}}})\n #\n # self.assertCountEqual(uncfg3.keys(), ['PE1', 'PE2'])\n # self.assertMultiLineEqual(uncfg3['PE1'], '\\n'.join(['no router ospf 100 vrf test\\n'\n # 'interface GigabitEthernet0/0/1\\n'\n # ' no ip ospf 100 area 0\\n'\n # ' no ip ospf cost 10\\n'\n # ' exit\\n'\n # 'interface GigabitEthernet0/1/1\\n'\n # ' no ip ospf 100 area 1\\n'\n # ' no ip ospf cost 20\\n'\n # ' exit']))\n #\n # self.assertMultiLineEqual(uncfg3['PE2'], '\\n'.join(['no router ospf 100 vrf test\\n'\n # 'interface GigabitEthernet0/0/2\\n'\n # ' no ip ospf 100 area 0\\n'\n # ' no ip ospf cost 10\\n'\n # ' exit\\n'\n # 'interface GigabitEthernet0/1/2\\n'\n # ' no ip ospf 100 area 1\\n'\n # ' no ip ospf cost 20\\n'\n # ' exit']))\n #\n # uncfg4 = ospf.build_unconfig(apply=False)\n #\n # self.assertCountEqual(uncfg4.keys(), ['PE1', 'PE2'])\n # self.assertMultiLineEqual(uncfg4['PE1'], '\\n'.join(['no router ospf 100 vrf test\\n'\n # 'interface GigabitEthernet0/0/1\\n'\n # ' no ip ospf 100 area 0\\n'\n # ' no ip ospf cost 10\\n'\n # ' exit\\n'\n # 'interface GigabitEthernet0/1/1\\n'\n # ' no ip ospf 100 area 1\\n'\n # ' no ip ospf cost 20\\n'\n # ' exit']))\n #\n # self.assertMultiLineEqual(uncfg4['PE2'], '\\n'.join(['no router ospf 100 vrf test\\n'\n # 'interface GigabitEthernet0/0/2\\n'\n # ' no ip ospf 100 area 0\\n'\n # ' no ip ospf cost 10\\n'\n # ' exit\\n'\n # 'interface GigabitEthernet0/1/2\\n'\n # ' no ip ospf 100 area 1\\n'\n # ' no ip ospf cost 20\\n'\n # ' exit']))\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "9809192", "language": "Python", "matching_score": 5.739898204803467, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/vrf/iosxe/yang/tests/test_vrf.py" }, { "content": "#!/usr/bin/env python\n\n#python\nimport unittest\nfrom unittest.mock import Mock\n\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\nfrom genie.conf.base.attributes import SubAttributes, \\\n SubAttributesDict, \\\n AttributesHelper, \\\n KeyedSubAttributes\n\nfrom genie.libs.conf.vlan import Vlan\n\ntry:\n # from ydk.models.ned import ned\n from ydk.models.xe_recent_edison import Cisco_IOS_XE_native as ned\n from ydk.services import CodecService\n from ydk.providers import CodecServiceProvider\n from ydk.types import DELETE, Empty\n from ydk.services import CRUDService\n\n # patch a netconf provider\n from ydk.providers import NetconfServiceProvider as _NetconfServiceProvider\n from ydk.providers._provider_plugin import _ClientSPPlugin\nexcept:\n pass\n\nclass NetconfConnectionInfo(object):\n def __init__(self):\n self.ip = '1.1.1.1'\n self.port = 830\n self.username = 'admin'\n self.password = '<PASSWORD>'\n\nclass test_vlan(unittest.TestCase):\n\n def test_init(self):\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe', context='yang')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe', context='yang')\n intf1 = Interface(name='GigabitEthernet0/0/1',device=dev1)\n intf2 = Interface(name='GigabitEthernet0/0/2',device=dev2)\n link = Link(name='1_2_1',testbed=testbed)\n link.connect_interface(interface=intf1)\n link.connect_interface(interface=intf2)\n self.assertSetEqual(\n set(link.find_interfaces()),\n set([intf1, intf2]))\n self.assertSetEqual(\n set(dev1.find_interfaces()),\n set([intf1]))\n self.assertSetEqual(\n set(dev2.find_interfaces()),\n set([intf2]))\n vlan = Vlan()\n\n self.assertSetEqual(set(vlan.devices), set([]))\n self.assertSetEqual(set(vlan.links), set([]))\n link.add_feature(vlan)\n self.assertCountEqual(vlan.devices, [dev1, dev2])\n self.assertSetEqual(set(vlan.links), set([link]))\n self.assertSetEqual(set(vlan.interfaces), set([intf1, intf2]))\n\n with self.assertRaises(AttributeError):\n vlan.access_map_id\n\n with self.assertRaises(AttributeError):\n vlan.vlan_configuration_id\n\n def test_basic_cfg(self):\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe', context='yang')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe', context='yang')\n intf1 = Interface(name='GigabitEthernet0/0/1',device=dev1)\n intf2 = Interface(name='GigabitEthernet0/0/2',device=dev2)\n intf3 = Interface(name='Vlan100',device=dev1)\n link = Link(name='1_2_1',testbed=testbed)\n link.connect_interface(interface=intf1)\n link.connect_interface(interface=intf2)\n vlan = Vlan()\n\n for dev in testbed.devices:\n dev.connections=Mock()\n dev.connections={'netconf':NetconfConnectionInfo()}\n\n link.add_feature(vlan)\n vlan.vlan_id = 100\n\n cfg1 = vlan.build_config(apply=False)\n self.assertCountEqual(cfg1.keys(), ['PE1', 'PE2'])\n compare = \"\"\n for i in cfg1['PE1']: \n compare+=str(i)\n\n self.assertMultiLineEqual(compare, '\\n'.join(\n ['<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\\n'\n' <target>\\n'\n' <running></running>\\n'\n' </target>\\n'\n' <config>\\n'\n' <vlan xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\\n'\n' <vlan-list xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-vlan\">\\n'\n' <id>100</id>\\n'\n' </vlan-list>\\n'\n' </vlan>\\n'\n' </config>\\n'\n'</edit-config>\\n']))\n\n compare = \"\"\n for i in cfg1['PE2']: \n compare+=str(i)\n\n self.assertMultiLineEqual(compare, '\\n'.join(\n ['<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\\n'\n' <target>\\n'\n' <running></running>\\n'\n' </target>\\n'\n' <config>\\n'\n' <vlan xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\\n'\n' <vlan-list xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-vlan\">\\n'\n' <id>100</id>\\n'\n' </vlan-list>\\n'\n' </vlan>\\n'\n' </config>\\n'\n'</edit-config>\\n']))\n\n def test_basic_uncfg(self):\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe', context='yang')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe', context='yang')\n intf1 = Interface(name='GigabitEthernet0/0/1',device=dev1)\n intf2 = Interface(name='GigabitEthernet0/0/2',device=dev2)\n intf3 = Interface(name='Vlan100',device=dev1)\n vlan = Vlan()\n\n for dev in testbed.devices:\n dev.connections=Mock()\n dev.connections={'netconf':NetconfConnectionInfo()}\n\n vlan.device_attr[dev1].vlan_id = 100\n vlan.device_attr[dev2].vlan_id = 300\n\n # Unconfig testing\n # Set a mock\n dev1.configure = Mock()\n dev2.configure = Mock()\n dev1.add_feature(vlan)\n dev2.add_feature(vlan)\n # Mock config\n\n uncfg1 = vlan.build_unconfig(apply=False)\n self.assertCountEqual(uncfg1.keys(), ['PE1', 'PE2'])\n\n compare = \"\"\n for i in uncfg1['PE1']: \n compare+=str(i)\n\n # A case has been already raised by JB so the unconfig for yang works as expected.\n # Currently, retruned xml for yang unconfig is exactly the same for the unconfig one.\n self.assertMultiLineEqual(compare, '\\n'.join(\n ['<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\\n'\n' <target>\\n'\n' <running></running>\\n'\n' </target>\\n'\n' <config>\\n'\n' <vlan xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\\n'\n' <vlan-list xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-vlan\">\\n'\n' <id>100</id>\\n'\n' </vlan-list>\\n'\n' </vlan>\\n'\n' </config>\\n'\n'</edit-config>\\n']))\n\n compare = \"\"\n for i in uncfg1['PE2']: \n compare+=str(i)\n\n self.assertMultiLineEqual(compare, '\\n'.join(\n ['<edit-config xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\\n'\n' <target>\\n'\n' <running></running>\\n'\n' </target>\\n'\n' <config>\\n'\n' <vlan xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\\n'\n' <vlan-list xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-vlan\">\\n'\n' <id>300</id>\\n'\n' </vlan-list>\\n'\n' </vlan>\\n'\n' </config>\\n'\n'</edit-config>\\n']))\n\nif __name__ == '__main__':\n unittest.main()", "id": "12373780", "language": "Python", "matching_score": 6.943029403686523, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/vlan/iosxe/yang/tests/test_vlan.py" }, { "content": "#!/usr/bin/env python\n\n#python\nimport unittest\nfrom unittest.mock import Mock\n\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\nfrom genie.conf.base.attributes import SubAttributes, \\\n SubAttributesDict, \\\n AttributesHelper, \\\n KeyedSubAttributes\n\nfrom genie.libs.conf.vlan import Vlan\n\ntry:\n from ydk.models.ydkmodels import Cisco_IOS_XR_ifmgr_cfg as xr_ifmgr_cfg\n from ydk.services import CodecService\n from ydk.providers import CodecServiceProvider\n from ydk.types import DELETE, Empty\n from ydk.services import CRUDService\n\n # patch a netconf provider\n from ydk.providers import NetconfServiceProvider as _NetconfServiceProvider\n from ydk.providers._provider_plugin import _ClientSPPlugin\nexcept:\n pass\n\n\nclass NetconfConnectionInfo(object):\n def __init__(self):\n self.ip = '1.1.1.1'\n self.port = 830\n self.username = 'admin'\n self.password = '<PASSWORD>'\n\n\nclass test_vlan(unittest.TestCase):\n\n def test_vlan_interface_configuration(self):\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxr', context='yang')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxr', context='yang')\n intf1 = Interface(name='GigabitEthernet0/0/1',device=dev1, context='yang')\n intf2 = Interface(name='GigabitEthernet0/0/2',device=dev2, context='yang')\n intf3 = Interface(name='GigabitEthernet0/0/3',device=dev1, context='yang')\n link = Link(name='1_2_1',testbed=testbed)\n link.connect_interface(interface=intf1)\n link.connect_interface(interface=intf2)\n vlan = Vlan()\n\n for dev in testbed.devices:\n dev.connections=Mock()\n dev.connections={'netconf':NetconfConnectionInfo()}\n\n link.add_feature(vlan)\n\n vlan.device_attr[dev1]\n vlan.device_attr[dev2]\n vlan.device_attr[dev1].interface_attr[intf1]\n vlan.device_attr[dev1].interface_attr[intf1].eth_encap_type1 = 'dot1q'\n vlan.device_attr[dev1].interface_attr[intf1].eth_encap_val1 = 2\n vlan.device_attr[dev1].interface_attr[intf1].eth_encap_type2 = 'second-dot1q'\n vlan.device_attr[dev1].interface_attr[intf1].eth_encap_val2 = 5\n\n # Testing can't be done at the moment due to XR models lack on the device.\n # cfg1 = vlan.build_config(apply=False)\n # self.assertCountEqual(cfg1.keys(), ['PE1', 'PE2'])\n\n # compare = \"\"\n # for i in cfg1['PE1']: \n # compare+=str(i)\n\n # self.assertMultiLineEqual(compare, '\\n'.join([]))\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "6819912", "language": "Python", "matching_score": 3.1689462661743164, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/vlan/iosxr/yang/tests/test_vlan.py" }, { "content": "\nfrom abc import ABC\nimport warnings\nimport string\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base import Interface\n\nfrom genie.libs.conf.vrf import VrfSubAttributes\nfrom genie.conf.base.config import YangConfig\ntry:\n from ydk.models.ned import ned\n from ydk.types import DELETE, Empty\n from ydk.services import CRUDService\n\n # patch a netconf provider\n from ydk.providers import NetconfServiceProvider as _NetconfServiceProvider\n from ydk.providers._provider_plugin import _ClientSPPlugin\n from ydk.services import CodecService\n from ydk.providers import CodecServiceProvider\n\n class NetconfServiceProvider(_NetconfServiceProvider):\n def __init__(self, device):\n if 'yang' not in device.mapping:\n # Want it, but dont have a connection? \n raise Exception(\"Missing connection of \"\n \"type 'yang' in the device \"\n \"mapping '{map}'\".format(map=device.mapping))\n alias = device.mapping['yang']\n dev = device.connectionmgr.connections[alias]\n\n super().__init__(address=str(dev.connection_info.ip),\n port=dev.connection_info.port,\n username=dev.connection_info.username,\n password=dev.connection_info.password,\n protocol = 'ssh')\n\n self.sp_instance = _ClientSPPlugin(self.timeout,\n use_native_client=False)\n\n self.sp_instance._nc_manager = dev\n def _connect(self, *args, **kwargs): pass\nexcept:\n pass\n\nclass Ospf(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n ydk_obj = ned.Native.Router.Ospf()\n ospf_intfs = []\n for sub, attributes2 in attributes.mapping_values('vrf_attr', sort=VrfSubAttributes._sort_key):\n ospf_intfs.extend(sub.build_config(apply=False, attributes=attributes2,\n unconfig=unconfig, ydk_obj=ydk_obj, **kwargs))\n\n # instantiate crud service\n crud_service = CRUDService()\n if apply:\n\n # create netconf connection\n ncp = NetconfServiceProvider(self.device)\n\n if unconfig:\n crud_service.delete(ncp,ydk_obj)\n else:\n crud_service.create(ncp, ydk_obj)\n for inter in ospf_intfs:\n inter.apply()\n else:\n ydks = []\n\n if unconfig:\n ydks.append(YangConfig(device=self.device,\n ydk_obj=ydk_obj,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.delete))\n else:\n ydks.append(YangConfig(device=self.device,\n ydk_obj=ydk_obj,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.create))\n\n for inter in ospf_intfs:\n ydks.append(inter)\n return ydks\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class VrfAttributes(ABC):\n\n def build_config(self, ydk_obj, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n id = attributes.value('ospf_name', force = True)\n vrf = attributes.value('vrf', force = True)\n\n if id:\n ydk_obj.id = int(id)\n\n if vrf:\n ydk_obj.vrf = vrf.name\n\n # ! router ospf 1\n # ! router-id 1.1.1.1\n if attributes.value('instance_router_id'):\n ydk_obj.router_id = str(attributes.value('instance_router_id'))\n\n if attributes.value('auto_cost_ref_bw'):\n ospf_auto_cost = ydk_obj.AutoCost()\n ospf_auto_cost.reference_bandwidth = int(attributes.value('auto_cost_ref_bw'))\n ydk_obj.auto_cost = ospf_auto_cost\n\n # ! router ospf 1\n # ! nsr\n v = attributes.value('nsr')\n if v == True:\n ydk_obj.nsr = Empty()\n\n ospf_intfs = []\n for sub, attributes2 in attributes.mapping_values('area_attr', keys=self.area_attr.keys()):\n ospf_intfs.extend(sub.build_config(apply=False, attributes=attributes2, \\\n unconfig=unconfig, ydk_obj=ydk_obj, **kwargs))\n return ospf_intfs\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class AreaAttributes(ABC):\n\n def build_config(self, ydk_obj, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n ospf_intfs = []\n for sub, attributes2 in attributes.mapping_values('interface_attr'):\n ospf_intfs.append(sub.build_config(apply=False, attributes=attributes2, \\\n unconfig=unconfig, area_id = self.area_id, ydk_obj=ydk_obj, **kwargs))\n\n return ospf_intfs\n \n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, ydk_obj, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n if kwargs.get('area_id',None) is not None:\n setattr(self,'area_id',kwargs['area_id'])\n # instantiate crud service\n crud_service = CRUDService()\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n intf_name = attributes.value('interface_name')\n if intf_name.startswith('Gig'):\n ydk_obj_intf =ned.Native.Interface.Gigabitethernet()\n keep = string.digits + '//'\n ydk_obj_intf.name = ''.join(i for i in attributes.value('interface_name') if i in keep)\n\n if attributes.value('area_id') is not None:\n ospf_process = ydk_obj_intf.ip.ospf.ProcessId()\n ospf_process.id = int(attributes.value('ospf_name'))\n ospf_process.area = attributes.value('area_id')\n ydk_obj_intf.ip.ospf.process_id.append(ospf_process)\n\n if attributes.value('area_interface_cost') is not None:\n ydk_obj_intf.ip.ospf.cost = attributes.value('area_interface_cost')\n\n return YangConfig(device=self.device,\n ydk_obj=ydk_obj_intf,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.create)\n\n elif intf_name.startswith('Loop'):\n ydk_obj_intf =ned.Native.Interface.Loopback()\n keep = string.digits\n ydk_obj_intf.name = int(''.join(i for i in attributes.value('interface_name') if i in keep))\n # name is a mandatory arguments\n\n if attributes.value('area_id') is not None:\n ospf_process = ydk_obj_intf.ip.ospf.ProcessId()\n ospf_process.id = int(attributes.value('ospf_name'))\n ospf_process.area = attributes.value('area_id')\n ydk_obj_intf.ip.ospf.process_id.append(ospf_process)\n\n if attributes.value('area_interface_cost') is not None:\n ydk_obj_intf.ip.ospf.cost = attributes.value('area_interface_cost')\n\n if unconfig:\n return YangConfig(device=self.device,\n ydk_obj=ydk_obj_intf,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.delete)\n else:\n return YangConfig(device=self.device,\n ydk_obj=ydk_obj_intf,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.create)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n", "id": "7715204", "language": "Python", "matching_score": 6.4314727783203125, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ospf/iosxe/yang/ospf.py" }, { "content": "\nfrom abc import ABC\nimport warnings\nimport string\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base import Interface\n\nfrom genie.libs.conf.vrf import VrfSubAttributes\nfrom genie.conf.base.config import YangConfig\ntry:\n from ydk.models.ydkmodels import Cisco_IOS_XR_ipv4_ospf_cfg as xr_ipv4_ospf_cfg\n from ydk.types import DELETE, Empty\n from ydk.services import CRUDService\n\n # patch a netconf provider\n from ydk.providers import NetconfServiceProvider as _NetconfServiceProvider\n from ydk.providers._provider_plugin import _ClientSPPlugin\n from ydk.services import CodecService\n from ydk.providers import CodecServiceProvider\n\n class NetconfServiceProvider(_NetconfServiceProvider):\n def __init__(self, device):\n if 'yang' not in device.mapping:\n # Want it, but dont have a connection? \n raise Exception(\"Missing connection of \"\n \"type 'yang' in the device \"\n \"mapping '{map}'\".format(map=device.mapping))\n alias = device.mapping['yang']\n dev = device.connectionmgr.connections[alias]\n\n super().__init__(address=str(dev.connection_info.ip),\n port=dev.connection_info.port,\n username=dev.connection_info.username,\n password=dev.connection_info.password,\n protocol = 'ssh')\n\n self.sp_instance = _ClientSPPlugin(self.timeout,\n use_native_client=False)\n\n self.sp_instance._nc_manager = dev\n def _connect(self, *args, **kwargs): pass\nexcept:\n pass\n\nclass Ospf(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n ospf = xr_ipv4_ospf_cfg.Ospf()\n process = ospf.processes.Process()\n id = attributes.value('ospf_name', force = True)\n process.process_name = str(id)\n ospf.processes.process.append(process)\n if not unconfig:\n process.start = Empty()\n # crud_service = CRUDService()\n # ncp = NetconfServiceProvider(self.device)\n # x = crud_service.read(ncp, process)\n # abc = YangConfig(device=self.device, ydk_obj=x, ncp=ncp, crud_service=crud_service)\n # print(abc)\n v = attributes.value('nsr')\n if v == True:\n process.nsr = \"true\"\n for sub, attributes2 in attributes.mapping_values('vrf_attr', sort=VrfSubAttributes._sort_key):\n sub.build_config(apply=False, attributes=attributes2,\n unconfig=unconfig, process=process, **kwargs)\n # instantiate crud service\n crud_service = CRUDService()\n if apply:\n\n # create netconf connection3\n ncp = NetconfServiceProvider(self.device)\n if unconfig:\n crud_service.delete(ncp,process)\n else:\n crud_service.create(ncp,process)\n else:\n if unconfig:\n return YangConfig(device=self.device,\n ydk_obj=process,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.delete)\n else:\n return YangConfig(device=self.device,\n ydk_obj=process,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.create)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class VrfAttributes(ABC):\n\n def build_config(self, process, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n vrf = attributes.value('vrf', force = True)\n if vrf:\n vrf_obj = process.vrfs.Vrf()\n vrf_obj.vrf_name = vrf.name\n vrf_obj.vrf_start = Empty()\n process.vrfs.vrf.append(vrf_obj)\n else:\n vrf_obj = process.default_vrf\n\n\n # ! router ospf 1\n # ! router-id 1.1.1.1\n if attributes.value('instance_router_id'):\n vrf_obj.router_id = str(attributes.value('instance_router_id'))\n\n if attributes.value('auto_cost_ref_bw'):\n ospf_auto_cost = vrf_obj.AutoCost()\n ospf_auto_cost.bandwidth = int(attributes.value('auto_cost_ref_bw'))\n vrf_obj.auto_cost = ospf_auto_cost\n\n # ! router ospf 1\n # ! nsr\n for sub, attributes2 in attributes.mapping_values('area_attr', keys=self.area_attr.keys()):\n sub.build_config(apply=False, attributes=attributes2, \\\n unconfig=unconfig, vrf_obj=vrf_obj, **kwargs)\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class AreaAttributes(ABC):\n\n def build_config(self, vrf_obj, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n area_area_id = vrf_obj.area_addresses.AreaAreaId()\n area_area_id.area_id = int(self.area_id)\n area_area_id.running = Empty()\n vrf_obj.area_addresses.area_area_id.append(area_area_id)\n for sub, attributes2 in attributes.mapping_values('interface_attr'):\n sub.build_config(apply=False, attributes=attributes2, \\\n unconfig=unconfig, area_area_id=area_area_id, **kwargs)\n \n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, area_area_id, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n intf_name = attributes.value('interface_name')\n name_scope = area_area_id.name_scopes.NameScope()\n name_scope.interface_name = intf_name\n name_scope.running = Empty()\n area_area_id.name_scopes.name_scope.append(name_scope)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n", "id": "5927795", "language": "Python", "matching_score": 2.766097068786621, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ospf/iosxr/yang/ospf.py" }, { "content": "'''\nIOSXE specific configurations for MLD feature object.\n'''\n\n# Python\nfrom abc import ABC\n\n# Genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning, \\\n AttributesHelper\n\n# Structure Hierarchy:\n# Mld\n# +-- DeviceAtributes\n# +-- VrfAttributes\n# +-- InterfaceAttributes\n\n\nclass Mld(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n \n # VrfAttributes\n for sub, attributes2 in attributes.mapping_values('vrf_attr',\n sort=True, keys=self.vrf_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig)) \n \n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n \n class VrfAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # global_max_groups\n if attributes.value('global_max_groups'):\n cfg_str = 'ipv6 mld state-limit {global_max_groups}' \\\n if self.vrf_id == 'default' else \\\n 'ipv6 mld vrf {vrf_id} state-limit {global_max_groups}'\n\n configurations.append_line(attributes.format(cfg_str, force=True))\n\n # ipv6 mld [vrf <vrf>] ssm-map enable\n if hasattr(attributes.value('ssm'), 'data'):\n if attributes.value('ssm').data:\n cfg_str = 'ipv6 mld ssm-map enable' \\\n if self.vrf_id == 'default' else \\\n 'ipv6 mld vrf {} ssm-map enable'.format(self.vrf_id)\n\n configurations.append_line(attributes.format(cfg_str))\n\n # Ssm Attributes under vrf level config\n for ssm, attributes2 in attributes.sequence_values('ssm', sort=True):\n kwargs = {'vrf':self.vrf_id}\n if unconfig:\n configurations.append_block(ssm.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(ssm.build_config(\n apply=False, attributes=attributes2, **kwargs))\n\n # InterfaceAttributes\n for sub, attributes2 in attributes.mapping_values('interface_attr',\n sort=True, keys=self.interface_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n \n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(\n attributes.format('interface {intf}', force=True)):\n\n # enable\n if attributes.value('enable'):\n configurations.append_line('ipv6 mld router')\n\n # group_policy\n if attributes.value('group_policy'):\n configurations.append_line(\n attributes.format('ipv6 mld access-group {group_policy}'))\n\n # immediate_leave -- not supported on iosxe\n\n # max_groups\n if attributes.value('max_groups'):\n configurations.append_line(\n attributes.format('ipv6 mld limit {max_groups}'))\n\n # query_interval\n if attributes.value('query_interval'):\n configurations.append_line(\n attributes.format('ipv6 mld query-interval {query_interval}'))\n\n # query_max_response_time\n if attributes.value('query_max_response_time'):\n configurations.append_line(\n attributes.format('ipv6 mld query-max-response-time '\n '{query_max_response_time}'))\n\n # version -- not supported on iosxe\n\n # robustness_variable -- not supported on iosxe\n\n # Groups Attributes under top level config\n for groups, attributes2 in attributes.sequence_values(\n 'groups', sort=True):\n if unconfig:\n configurations.append_block(groups.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(groups.build_config(\n apply=False, attributes=attributes2, **kwargs))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n", "id": "12669133", "language": "Python", "matching_score": 5.494870662689209, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/mld/iosxe/mld.py" }, { "content": "'''\nNXOS specific configurations for Igmp feature object.\n'''\n\n# Python\nfrom abc import ABC\n\n# Genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning, \\\n AttributesHelper\n\n# Structure Hierarchy:\n# Igmp\n# +-- DeviceAtributes\n# +-- VrfAttributes\n# +-- InterfaceAttributes\n\n\nclass Igmp(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # require_router_alert\n if attributes.value('require_router_alert'):\n configurations.append_line('ip igmp enforce-router-alert')\n \n # VrfAttributes\n for sub, attributes2 in attributes.mapping_values('vrf_attr',\n sort=True, keys=self.vrf_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig)) \n \n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n \n class VrfAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # global_max_groups -- not supported on nxos\n\n with configurations.submode_context(\n attributes.format('vrf context {vrf_id}' if \n self.vrf_id != 'default' else '', force=True)):\n if unconfig and attributes.iswildcard and self.vrf_id != 'default':\n configurations.submode_unconfig()\n\n # Ssm Attributes under vrf level config\n for ssm, attributes2 in attributes.sequence_values('ssm', sort=True):\n if unconfig:\n configurations.append_block(ssm.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(ssm.build_config(\n apply=False, attributes=attributes2, **kwargs))\n\n # InterfaceAttributes\n for sub, attributes2 in attributes.mapping_values('interface_attr',\n sort=True, keys=self.interface_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n \n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(\n attributes.format('interface {intf}', force=True)):\n\n # enable\n if attributes.value('enable'):\n configurations.append_line('ip pim sparse-mode')\n\n # last_member_query_interval -- not supported on nxos\n\n # group_policy\n if attributes.value('group_policy'):\n configurations.append_line(\n attributes.format('ip igmp access-group {group_policy}'))\n\n # immediate_leave\n if attributes.value('immediate_leave'):\n configurations.append_line('ip igmp immediate-leave')\n\n # max_groups\n if attributes.value('max_groups'):\n configurations.append_line(\n attributes.format('ip igmp state-limit {max_groups}'))\n\n # query_interval\n if attributes.value('query_interval'):\n configurations.append_line(\n attributes.format('ip igmp query-interval {query_interval}'))\n\n # query_max_response_time\n if attributes.value('query_max_response_time'):\n configurations.append_line(\n attributes.format('ip igmp query-max-response-time '\n '{query_max_response_time}'))\n\n # robustness_variable\n if attributes.value('robustness_variable'):\n configurations.append_line(\n attributes.format('ip igmp robustness-variable '\n '{robustness_variable}'))\n\n # version\n if attributes.value('version'):\n configurations.append_line(\n attributes.format('ip igmp version {version}'))\n\n\n # Mroute Attributes under top level config\n for groups, attributes2 in attributes.sequence_values(\n 'groups', sort=True):\n if unconfig:\n configurations.append_block(groups.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(groups.build_config(\n apply=False, attributes=attributes2, **kwargs))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n", "id": "3749697", "language": "Python", "matching_score": 1.5787612199783325, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/igmp/nxos/igmp.py" }, { "content": "\n# -- ISIS\n# nxos: interface <intf> / ip router isis someword\n# nxos: interface <intf> / ipv6 router isis someword\n# nxos: interface <intf> / isis authentication key-chain someword\n# nxos: interface <intf> / isis authentication key-chain someword level-1\n# nxos: interface <intf> / isis authentication key-chain someword level-2\n# nxos: interface <intf> / isis authentication-check\n# nxos: interface <intf> / isis authentication-check level-1\n# nxos: interface <intf> / isis authentication-check level-2\n# nxos: interface <intf> / isis authentication-type cleartext\n# nxos: interface <intf> / isis authentication-type cleartext level-1\n# nxos: interface <intf> / isis authentication-type cleartext level-2\n# nxos: interface <intf> / isis authentication-type md5\n# nxos: interface <intf> / isis authentication-type md5 level-1\n# nxos: interface <intf> / isis authentication-type md5 level-2\n# nxos: interface <intf> / isis bfd\n# nxos: interface <intf> / isis bfd disable\n# nxos: interface <intf> / isis circuit-type level-1\n# nxos: interface <intf> / isis circuit-type level-1-2\n# nxos: interface <intf> / isis circuit-type level-2\n# nxos: interface <intf> / isis csnp-interval 1 level-1\n# nxos: interface <intf> / isis csnp-interval 1 level-2\n# nxos: interface <intf> / isis hello-interval 1\n# nxos: interface <intf> / isis hello-interval 1 level-1\n# nxos: interface <intf> / isis hello-interval 1 level-2\n# nxos: interface <intf> / isis hello-multiplier 3\n# nxos: interface <intf> / isis hello-multiplier 3 level-1\n# nxos: interface <intf> / isis hello-multiplier 3 level-2\n# nxos: interface <intf> / isis hello-padding\n# nxos: interface <intf> / isis ipv6 bfd\n# nxos: interface <intf> / isis ipv6 bfd disable\n# nxos: interface <intf> / isis ipv6 metric <0-16777215> level-1\n# nxos: interface <intf> / isis ipv6 metric <0-16777215> level-2\n# nxos: interface <intf> / isis lsp-interval 10\n# nxos: interface <intf> / isis mesh-group 1\n# nxos: interface <intf> / isis mesh-group blocked\n# nxos: interface <intf> / isis metric <0-16777215> level-1\n# nxos: interface <intf> / isis metric <0-16777215> level-2\n# nxos: interface <intf> / isis network point-to-point\n# nxos: interface <intf> / isis passive-interface level-1\n# nxos: interface <intf> / isis passive-interface level-1-2\n# nxos: interface <intf> / isis passive-interface level-2\n# nxos: interface <intf> / isis priority <0-127> level-1\n# nxos: interface <intf> / isis priority <0-127> level-2\n# nxos: interface <intf> / isis retransmit-interval 1\n# nxos: interface <intf> / isis retransmit-throttle-interval 20\n# nxos: interface <intf> / isis shutdown\n\n", "id": "54699", "language": "Python", "matching_score": 2.2356956005096436, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/isis/nxos/isis.py" }, { "content": "\n# -- OSPFv3\n# nxos: interface <intf> / ipv6 router ospfv3 someword area <0-4294967295>\n# nxos: interface <intf> / ipv6 router ospfv3 someword area <0-4294967295> secondaries none\n# nxos: interface <intf> / ipv6 router ospfv3 someword area 1.2.3.4\n# nxos: interface <intf> / ipv6 router ospfv3 someword area 1.2.3.4 secondaries none\n# nxos: interface <intf> / ipv6 router ospfv3 someword multi-area A.B.C.D or <0-4294967295>\n# nxos: interface <intf> / ospfv3 bfd\n# nxos: interface <intf> / ospfv3 bfd disable\n# nxos: interface <intf> / ospfv3 cost 1\n# nxos: interface <intf> / ospfv3 dead-interval 1\n# nxos: interface <intf> / ospfv3 hello-interval 1\n# nxos: interface <intf> / ospfv3 instance <0-255>\n# nxos: interface <intf> / ospfv3 mtu-ignore\n# nxos: interface <intf> / ospfv3 network broadcast\n# nxos: interface <intf> / ospfv3 network point-to-point\n# nxos: interface <intf> / ospfv3 passive-interface\n# nxos: interface <intf> / ospfv3 priority <0-255>\n# nxos: interface <intf> / ospfv3 retransmit-interval 1\n# nxos: interface <intf> / ospfv3 shutdown\n# nxos: interface <intf> / ospfv3 transmit-delay 1\n\n", "id": "7300770", "language": "Python", "matching_score": 1.5021268129348755, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ospfv3/nxos/ospfv3.py" }, { "content": "# -- EIGRP\n# nxos: interface <intf> / ip authentication key-chain eigrp someword someword2\n# nxos: interface <intf> / ip authentication mode eigrp someword md5\n# nxos: interface <intf> / ip bandwidth eigrp someword 1\n# nxos: interface <intf> / ip bandwidth-percent eigrp someword 1\n# nxos: interface <intf> / ip delay eigrp someword 1\n# nxos: interface <intf> / ip delay eigrp someword 1 picoseconds\n# nxos: interface <intf> / ip distribute-list eigrp someword prefix-list someword2 in\n# nxos: interface <intf> / ip distribute-list eigrp someword prefix-list someword2 out\n# nxos: interface <intf> / ip distribute-list eigrp someword route-map rpl1 in\n# nxos: interface <intf> / ip distribute-list eigrp someword route-map rpl1 out\n# nxos: interface <intf> / ip eigrp someword bfd\n# nxos: interface <intf> / ip eigrp someword bfd disable\n# nxos: interface <intf> / ip eigrp someword shutdown\n# nxos: interface <intf> / ip hello-interval eigrp someword 1\n# nxos: interface <intf> / ip hold-time eigrp someword 1\n# nxos: interface <intf> / ip mtu eigrp someword 210\n# nxos: interface <intf> / ip next-hop-self eigrp someword\n# nxos: interface <intf> / ip offset-list eigrp someword prefix-list someword2 in <0-2147483647>\n# nxos: interface <intf> / ip offset-list eigrp someword prefix-list someword2 out <0-2147483647>\n# nxos: interface <intf> / ip offset-list eigrp someword route-map rpl1 in <0-2147483647>\n# nxos: interface <intf> / ip offset-list eigrp someword route-map rpl1 out <0-2147483647>\n# nxos: interface <intf> / ip passive-interface eigrp someword\n# nxos: interface <intf> / ip router eigrp someword\n# nxos: interface <intf> / ip split-horizon eigrp someword\n# nxos: interface <intf> / ip summary-address eigrp someword 1.2.3.4 255.255.255.0\n# nxos: interface <intf> / ip summary-address eigrp someword 1.2.3.4 255.255.255.0 1\n# nxos: interface <intf> / ip summary-address eigrp someword 1.2.3.4 255.255.255.0 1 leak-map someword2\n# nxos: interface <intf> / ip summary-address eigrp someword 1.2.3.4 255.255.255.0 leak-map someword2\n# nxos: interface <intf> / ip summary-address eigrp someword 1.2.3.0/24\n# nxos: interface <intf> / ip summary-address eigrp someword 1.2.3.0/24 1\n# nxos: interface <intf> / ip summary-address eigrp someword 1.2.3.0/24 1 leak-map someword2\n# nxos: interface <intf> / ip summary-address eigrp someword 1.2.3.0/24 leak-map someword2\n# nxos: interface <intf> / ipv6 authentication key-chain eigrp someword someword2\n# nxos: interface <intf> / ipv6 authentication mode eigrp someword md5\n# nxos: interface <intf> / ipv6 bandwidth eigrp someword 1\n# nxos: interface <intf> / ipv6 bandwidth-percent eigrp someword 1\n# nxos: interface <intf> / ipv6 delay eigrp someword 1\n# nxos: interface <intf> / ipv6 delay eigrp someword 1 picoseconds\n# nxos: interface <intf> / ipv6 distribute-list eigrp someword prefix-list someword2 in\n# nxos: interface <intf> / ipv6 distribute-list eigrp someword prefix-list someword2 out\n# nxos: interface <intf> / ipv6 distribute-list eigrp someword route-map rpl1 in\n# nxos: interface <intf> / ipv6 distribute-list eigrp someword route-map rpl1 out\n# nxos: interface <intf> / ipv6 eigrp someword shutdown\n# nxos: interface <intf> / ipv6 hello-interval eigrp someword 1\n# nxos: interface <intf> / ipv6 hold-time eigrp someword 1\n# nxos: interface <intf> / ipv6 mtu eigrp someword 210\n# nxos: interface <intf> / ipv6 next-hop-self eigrp someword\n# nxos: interface <intf> / ipv6 offset-list eigrp someword prefix-list someword2 in <0-2147483647>\n# nxos: interface <intf> / ipv6 offset-list eigrp someword prefix-list someword2 out <0-2147483647>\n# nxos: interface <intf> / ipv6 offset-list eigrp someword route-map rpl1 in <0-2147483647>\n# nxos: interface <intf> / ipv6 offset-list eigrp someword route-map rpl1 out <0-2147483647>\n# nxos: interface <intf> / ipv6 passive-interface eigrp someword\n# nxos: interface <intf> / ipv6 router eigrp someword\n# nxos: interface <intf> / ipv6 split-horizon eigrp someword\n# nxos: interface <intf> / ipv6 summary-address eigrp someword fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b/128\n# nxos: interface <intf> / ipv6 summary-address eigrp someword fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b/128 1\n# nxos: interface <intf> / ipv6 summary-address eigrp someword fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b/128 1 leak-map someword2\n# nxos: interface <intf> / ipv6 summary-address eigrp someword fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b/128 leak-map someword2\n\n", "id": "3559205", "language": "Python", "matching_score": 0.5051361322402954, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/eigrp/nxos/eigrp.py" }, { "content": "# Genie\nfrom genie.ops.base import Base\n\n# IOSXE Ops\nfrom genie.libs.ops.eigrp.iosxe.eigrp import Eigrp as EigrpXE\n\nclass Eigrp(EigrpXE):\n '''\n Eigrp Ops Object\n '''\n pass\n", "id": "6665955", "language": "Python", "matching_score": 1.7005423307418823, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/eigrp/ios/eigrp.py" }, { "content": "# Genie\r\nfrom genie.ops.base import Base\r\n\r\n\r\nclass Eigrp(Base):\r\n exclude = ['hold', 'uptime']", "id": "9543504", "language": "Python", "matching_score": 0.5910260081291199, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/eigrp/eigrp.py" }, { "content": "# Genie\r\nfrom genie.ops.base import Base\r\n\r\n# iosxe show_eigrp\r\nfrom genie.libs.parser.iosxe.show_eigrp import ShowIpEigrpNeighborsDetail,\\\r\n ShowIpv6EigrpNeighborsDetail\r\n\r\nclass Eigrp(Base):\r\n '''\r\n Eigrp Ops Object\r\n '''\r\n\r\n def learn(self):\r\n '''\r\n Learn Eigrp object\r\n '''\r\n\r\n # N/A -> Keys are not supported by this OS\r\n # eigrp_instance\r\n # vrf\r\n # address_family \r\n # router_id N/A\r\n # named_mode\r\n # name\r\n # eigrp_interface\r\n # passive N/A\r\n # hello_interval N/A\r\n # hold_timer N/A\r\n # auth_val N/A\r\n # eigrp_nbr\r\n # nbr_sw_ver\r\n # os_majorver\r\n # os_minorver\r\n # tlv_majorrev\r\n # tlv_minorrev\r\n # nbr_stubinfo N/A\r\n # retransmit_count\r\n # retry_count\r\n # last_seq_number\r\n # srtt\r\n # rto\r\n # q_cnt\r\n # hold\r\n # uptime\r\n # peer_handle\r\n # prefixes\r\n # topology_ids_from_peer\r\n\r\n\r\n # ShowIpEigrpNeighborsDetail\r\n # ShowIpv6EigrpNeighborsDetail\r\n # Adding these keys\r\n # eigrp_instance\r\n # vrf\r\n # address_family\r\n # name\r\n # named_mode\r\n # eigrp_interface\r\n # eigrp_nbr\r\n # retransmit_count\r\n # retry_count\r\n # last_seq_number\r\n # srtt\r\n # rto\r\n # q_cnt\r\n # peer_handle\r\n # nbr_sw_ver\r\n # os_majorver\r\n # os_minorver\r\n # tlv_majorrev\r\n # tlv_minorrev\r\n # hold\r\n # uptime\r\n # prefixes\r\n # topology_ids_from_peer\r\n for cmd in [ShowIpEigrpNeighborsDetail, ShowIpv6EigrpNeighborsDetail]:\r\n\r\n # eigrp_instance\r\n # vrf\r\n # address_family\r\n info_src = '[eigrp_instance][(?P<as_num>.*)][vrf][(?P<vrf>.*)][address_family][(?P<address_family>.*)]'\r\n info_dest = 'info' + info_src\r\n\r\n # address_family\r\n # name\r\n # named_mode\r\n # eigrp_interface\r\n for key in ['name', 'named_mode']:\r\n # address_family\r\n # name\r\n self.add_leaf(cmd=cmd,\r\n src=info_src+'[{key}]'.format(key=key),\r\n dest=info_dest+'[{key}]'.format(key=key))\r\n\r\n info_src = '[eigrp_instance][(?P<as_num>.*)][vrf][(?P<vrf>.*)][address_family][(?P<address_family>.*)][eigrp_interface][(?P<eigrp_interface>.*)][eigrp_nbr][(?P<eigrp_nbr>.*)]'\r\n info_dest = 'info' + info_src\r\n\r\n # eigrp_interface\r\n # eigrp_nbr\r\n # nbr_sw_ver\r\n # retransmit_count\r\n # retry_count\r\n # last_seq_number\r\n # srtt\r\n # rto\r\n # q_cnt\r\n # hold\r\n # uptime\r\n # peer_handle\r\n # prefixes\r\n # topology_ids_from_peer\r\n for key in ['nbr_sw_ver', 'retransmit_count', 'retry_count',\r\n 'last_seq_number', 'srtt', 'rto', 'q_cnt', 'hold',\r\n 'uptime', 'peer_handle', 'prefixes',\r\n 'topology_ids_from_peer']:\r\n\r\n self.add_leaf(cmd=cmd,\r\n src=info_src+'[{key}]'.format(key=key),\r\n dest=info_dest+'[{key}]'.format(key=key))\r\n\r\n # eigrp_interface\r\n # eigrp_nbr\r\n # nbr_sw_ver\r\n # os_majorver\r\n # os_minorver\r\n # tlv_majorrev\r\n # tlv_minorrev\r\n for key in ['os_majorver', 'os_minorver', 'tlv_majorrev', 'tlv_minorrev']:\r\n self.add_leaf(cmd=cmd,\r\n src=info_src+'[nbr_sw_ver][(?P<nbr_sw_ver>.*)][{key}]'.format(key=key),\r\n dest=info_dest+'[nbr_sw_ver][(?P<nbr_sw_ver>.*)][{key}]'.format(key=key))\r\n\r\n # Make final Ops structure\r\n self.make(final_call=True)\r\n", "id": "7850613", "language": "Python", "matching_score": 3.7983710765838623, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/eigrp/iosxe/eigrp.py" }, { "content": "\r\n# Python\r\nimport unittest\r\nfrom unittest.mock import Mock\r\n\r\n# ATS\r\nfrom ats.topology import Device\r\n\r\n# Genie\r\nfrom genie.libs.ops.eigrp.iosxe.eigrp import Eigrp\r\nfrom genie.libs.ops.eigrp.iosxe.tests.eigrp_output import EigrpOutput\r\n\r\n# iosxe show_eigrp\r\nfrom genie.libs.parser.iosxe.show_eigrp import ShowIpEigrpNeighborsDetail,\\\r\n ShowIpv6EigrpNeighborsDetail\r\n\r\noutputs = {}\r\noutputs['show ip eigrp neighbors detail'] = EigrpOutput.ShowIpEigrpNeighborsDetail\r\noutputs['show ipv6 eigrp neighbors detail'] = EigrpOutput.ShowIpv6EigrpNeighborsDetail\r\n\r\ndef mapper(key):\r\n return outputs[key]\r\n\r\nclass test_eigrp(unittest.TestCase):\r\n\r\n def setUp(self):\r\n self.device = Device(name='aDevice')\r\n self.device.os = 'iosxe'\r\n self.device.mapping = {}\r\n self.device.mapping['cli'] = 'cli'\r\n # Give the device as as a connection type\r\n # This is done in order to call the parser on the output provided\r\n self.device.connectionmgr.connections['cli'] = self.device\r\n\r\n def test_complete_output(self):\r\n self.maxDiff = None\r\n eigrp = Eigrp(device=self.device)\r\n\r\n # Set outputs\r\n eigrp.maker.outputs[ShowIpEigrpNeighborsDetail] = {'': EigrpOutput.ShowIpEigrpNeighborsDetail}\r\n eigrp.maker.outputs[ShowIpv6EigrpNeighborsDetail] = {'': EigrpOutput.ShowIpv6EigrpNeighborsDetail}\r\n\r\n # Return outputs above as inputs to parser when called\r\n self.device.execute = Mock()\r\n self.device.execute.side_effect = mapper\r\n\r\n # Learn the feature\r\n eigrp.learn()\r\n\r\n # Verify Ops was created successfully\r\n self.assertEqual(eigrp.info, EigrpOutput.EigrpInfo)\r\n\r\n def test_selective_attribute(self):\r\n self.maxDiff = None\r\n eigrp = Eigrp(device=self.device)\r\n\r\n # Set outputs\r\n eigrp.maker.outputs[ShowIpEigrpNeighborsDetail] = {'': EigrpOutput.ShowIpEigrpNeighborsDetail}\r\n eigrp.maker.outputs[ShowIpv6EigrpNeighborsDetail] = {'': EigrpOutput.ShowIpv6EigrpNeighborsDetail}\r\n\r\n # Return outputd above as inputs to parser when called\r\n self.device.execute = Mock()\r\n self.device.execute.side_effect = mapper\r\n\r\n # Learn the feature\r\n eigrp.learn()\r\n\r\n self.assertEqual(5, eigrp.info['eigrp_instance']['100']['vrf']\\\r\n ['default']['address_family']['ipv4']\\\r\n ['eigrp_interface']['Ethernet1/0']\\\r\n ['eigrp_nbr']['10.1.2.1']\\\r\n ['nbr_sw_ver']['os_majorver'])\r\n\r\n def test_empty_output(self):\r\n self.maxDiff = None\r\n eigrp = Eigrp(device=self.device)\r\n\r\n # Set outputs\r\n eigrp.maker.outputs[ShowIpEigrpNeighborsDetail] = {'': {}}\r\n eigrp.maker.outputs[ShowIpv6EigrpNeighborsDetail] = {'': {}}\r\n\r\n # Return outputs above as inputs to parser when called\r\n self.device.execute = Mock()\r\n self.device.execute.side_effect = mapper\r\n\r\n # Learn the feature\r\n eigrp.learn()\r\n\r\n # Verify attribute is missing\r\n with self.assertRaises(AttributeError):\r\n eigrp.info['eigrp_instance']\r\n\r\n def test_missing_attributes(self):\r\n self.maxDiff = None\r\n eigrp = Eigrp(device=self.device)\r\n\r\n # Set outputs\r\n eigrp.maker.outputs[ShowIpEigrpNeighborsDetail] = {'': EigrpOutput.ShowIpEigrpNeighborsDetail}\r\n eigrp.maker.outputs[ShowIpv6EigrpNeighborsDetail] = {'': {}}\r\n\r\n # Return outputs above as inputs to parser when called\r\n self.device.execute = Mock()\r\n self.device.execute.side_effect = mapper\r\n\r\n # Learn the feature\r\n eigrp.learn()\r\n\r\n # Veritfy key not created to do output missing\r\n\r\n with self.assertRaises(KeyError):\r\n single_value_preference = eigrp.info['eigrp_instance']['vrf']\\\r\n ['address_family']['eigrp_interface']\\\r\n ['eigrp_nbr']['nbr_sw_ver']\r\n\r\n\r\nif __name__ == '__main__':\r\n unittest.main()\r\n", "id": "6252014", "language": "Python", "matching_score": 2.554727554321289, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/eigrp/iosxe/tests/test_eigrp.py" }, { "content": "# Python\nimport unittest\nfrom copy import deepcopy\nfrom unittest.mock import Mock\n\n# ATS\nfrom ats.topology import Device\n\n# Genie\nfrom genie.libs.ops.pim.ios.pim import Pim\nfrom genie.libs.ops.pim.ios.tests.pim_output import PimOutput\n\n# Parser\nfrom genie.libs.parser.ios.show_pim import ShowIpv6PimInterface,\\\n ShowIpPimInterfaceDetail,\\\n ShowIpPimInterface, \\\n ShowIpv6PimBsrCandidateRp, \\\n ShowIpPimRpMapping, \\\n ShowIpv6PimBsrElection, \\\n ShowIpPimBsrRouter, \\\n ShowIpPimNeighbor, \\\n ShowIpv6PimNeighborDetail, \\\n ShowIpPimInterfaceDf\n\nfrom genie.libs.parser.ios.show_mcast import ShowIpMroute,\\\n ShowIpv6Mroute\n\n# iosxe show_vrf\nfrom genie.libs.parser.ios.show_vrf import ShowVrfDetail\n\n\nclass test_pim(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'ios'\n self.device.custom['abstraction'] = {'order':['os']}\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n self.device.connectionmgr.connections['cli'] = self.device\n\n def test_complete_output(self):\n self.maxDiff = None\n pim = Pim(device=self.device)\n \n # Get outputs\n pim.maker.outputs[ShowVrfDetail] = \\\n {'': PimOutput.ShowVrfDetail}\n\n pim.maker.outputs[ShowIpv6PimInterface] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpv6PimInterface_default}\n\n pim.maker.outputs[ShowIpPimInterface] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpPimInterface_default}\n\n pim.maker.outputs[ShowIpPimInterface].update(\n {\"{'vrf':'VRF1'}\": PimOutput.ShowIpPimInterface_VRF1})\n\n pim.maker.outputs[ShowIpv6PimBsrElection] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpv6PimBsrElection_default}\n\n pim.maker.outputs[ShowIpv6PimBsrCandidateRp] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpv6PimBsrCandidateRp_default}\n\n pim.maker.outputs[ShowIpPimBsrRouter] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpPimBsrRouter_default}\n\n pim.maker.outputs[ShowIpPimBsrRouter].update(\n {\"{'vrf':'VRF1'}\": PimOutput.ShowIpPimBsrRouter_VRF1})\n\n pim.maker.outputs[ShowIpPimRpMapping] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpPimRpMapping_default}\n\n pim.maker.outputs[ShowIpPimRpMapping].update(\n {\"{'vrf':'VRF1'}\": PimOutput.ShowIpPimRpMapping_VRF1})\n\n pim.maker.outputs[ShowIpPimInterfaceDetail] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpPimInterfaceDetail_default}\n\n pim.maker.outputs[ShowIpPimInterfaceDetail].update(\n {\"{'vrf':'VRF1'}\": PimOutput.ShowIpPimInterfaceDetail_VRF1})\n\n pim.maker.outputs[ShowIpMroute] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpMroute_default}\n\n pim.maker.outputs[ShowIpMroute].update(\n {\"{'vrf':'VRF1'}\": PimOutput.ShowIpMroute_VRF1})\n\n pim.maker.outputs[ShowIpv6Mroute] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpv6Mroute_default}\n\n pim.maker.outputs[ShowIpPimNeighbor] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpPimNeighbor_default}\n\n pim.maker.outputs[ShowIpPimNeighbor].update(\n {\"{'vrf':'VRF1'}\": PimOutput.ShowIpPimNeighbor_VRF1})\n\n pim.maker.outputs[ShowIpv6PimNeighborDetail] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpv6PimNeighborDetail_default}\n\n pim.maker.outputs[ShowIpPimInterfaceDf] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpPimInterfaceDf_default}\n\n pim.maker.outputs[ShowIpPimInterfaceDf].update(\n {\"{'vrf':'VRF1'}\": PimOutput.ShowIpPimInterfaceDf_VRF1})\n\n # Learn the feature\n pim.learn()\n\n # Verify Ops was created successfully\n self.assertEqual(pim.info, PimOutput.Pim_info)\n\n # Verify Select Attributes \n # Check specific attribute values\n # info - default vrf\n self.assertEqual(pim.info['vrf']['default']['address_family']\\\n ['ipv4']['rp']['bsr']\\\n ['Loopback0']['address'], '10.16.2.2')\n # info - vrf VRF1\n self.assertEqual(pim.info['vrf']['VRF1']['interfaces']\\\n ['GigabitEthernet3']['address_family']['ipv4']\\\n ['bsr_border'], False)\n\n\n def test_empty_output(self):\n self.maxDiff = None\n pim = Pim(device=self.device)\n # Get outputs\n pim.maker.outputs[ShowVrfDetail] = \\\n {'': PimOutput.ShowVrfDetail}\n\n pim.maker.outputs[ShowIpv6PimInterface] = \\\n {\"{'vrf':''}\": {}}\n\n pim.maker.outputs[ShowIpv6PimInterface].update(\n {\"{'vrf':'VRF1'}\": {}})\n\n pim.maker.outputs[ShowIpPimInterface] = \\\n {\"{'vrf':''}\": {}}\n\n pim.maker.outputs[ShowIpPimInterface].update(\n {\"{'vrf':'VRF1'}\": {}})\n\n pim.maker.outputs[ShowIpv6PimBsrElection] = \\\n {\"{'vrf':''}\": {}}\n\n pim.maker.outputs[ShowIpv6PimBsrElection].update(\n {\"{'vrf':'VRF1'}\": {}})\n\n pim.maker.outputs[ShowIpv6PimBsrCandidateRp] = \\\n {\"{'vrf':''}\": {}}\n\n pim.maker.outputs[ShowIpv6PimBsrCandidateRp].update(\n {\"{'vrf':'VRF1'}\": {}}) \n\n pim.maker.outputs[ShowIpPimBsrRouter] = \\\n {\"{'vrf':''}\": {}}\n\n pim.maker.outputs[ShowIpPimBsrRouter].update(\n {\"{'vrf':'VRF1'}\": {}})\n\n pim.maker.outputs[ShowIpPimRpMapping] = \\\n {\"{'vrf':''}\": {}}\n\n pim.maker.outputs[ShowIpPimRpMapping].update(\n {\"{'vrf':'VRF1'}\": {}})\n\n pim.maker.outputs[ShowIpPimInterfaceDetail] = \\\n {\"{'vrf':''}\": {}}\n\n pim.maker.outputs[ShowIpPimInterfaceDetail].update(\n {\"{'vrf':'VRF1'}\": {}})\n\n pim.maker.outputs[ShowIpMroute] = \\\n {\"{'vrf':''}\": {}}\n\n pim.maker.outputs[ShowIpMroute].update(\n {\"{'vrf':'VRF1'}\": {}})\n\n pim.maker.outputs[ShowIpv6Mroute] = \\\n {\"{'vrf':''}\": {}}\n\n pim.maker.outputs[ShowIpv6Mroute].update(\n {\"{'vrf':'VRF1'}\": {}})\n\n pim.maker.outputs[ShowIpPimNeighbor] = \\\n {\"{'vrf':''}\": {}}\n\n pim.maker.outputs[ShowIpPimNeighbor].update(\n {\"{'vrf':'VRF1'}\": {}})\n\n pim.maker.outputs[ShowIpv6PimNeighborDetail] = \\\n {\"{'vrf':''}\": {}}\n\n pim.maker.outputs[ShowIpv6PimNeighborDetail].update(\n {\"{'vrf':'VRF1'}\": {}})\n\n pim.maker.outputs[ShowIpPimInterfaceDf] = \\\n {\"{'vrf':''}\": {}}\n\n pim.maker.outputs[ShowIpPimInterfaceDf].update(\n {\"{'vrf':'VRF1'}\": {}})\n\n # Learn the feature\n pim.learn()\n\n # Check no attribute not found\n with self.assertRaises(AttributeError):\n pim.info['vrf']\n\n def test_incomplete_output(self):\n self.maxDiff = None\n \n pim = Pim(device=self.device)\n\n # Get outputs\n pim.maker.outputs[ShowVrfDetail] = \\\n {'': PimOutput.ShowVrfDetail}\n\n pim.maker.outputs[ShowIpv6PimInterface] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpv6PimInterface_default}\n\n pim.maker.outputs[ShowIpPimInterface] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpPimInterface_default}\n\n pim.maker.outputs[ShowIpPimInterface].update(\n {\"{'vrf':'VRF1'}\": {}})\n\n pim.maker.outputs[ShowIpv6PimBsrElection] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpv6PimBsrElection_default}\n\n pim.maker.outputs[ShowIpv6PimBsrCandidateRp] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpv6PimBsrCandidateRp_default}\n\n pim.maker.outputs[ShowIpPimBsrRouter] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpPimBsrRouter_default}\n\n pim.maker.outputs[ShowIpPimBsrRouter].update(\n {\"{'vrf':'VRF1'}\": {}})\n\n pim.maker.outputs[ShowIpPimRpMapping] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpPimRpMapping_default}\n\n pim.maker.outputs[ShowIpPimRpMapping].update(\n {\"{'vrf':'VRF1'}\": {}})\n\n pim.maker.outputs[ShowIpPimInterfaceDetail] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpPimInterfaceDetail_default}\n\n pim.maker.outputs[ShowIpPimInterfaceDetail].update(\n {\"{'vrf':'VRF1'}\": {}})\n\n pim.maker.outputs[ShowIpMroute] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpMroute_default}\n\n pim.maker.outputs[ShowIpMroute].update(\n {\"{'vrf':'VRF1'}\": {}})\n\n pim.maker.outputs[ShowIpv6Mroute] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpv6Mroute_default}\n\n pim.maker.outputs[ShowIpPimNeighbor] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpPimNeighbor_default}\n\n pim.maker.outputs[ShowIpPimNeighbor].update(\n {\"{'vrf':'VRF1'}\": {}})\n\n pim.maker.outputs[ShowIpv6PimNeighborDetail] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpv6PimNeighborDetail_default}\n\n pim.maker.outputs[ShowIpPimInterfaceDf] = \\\n {\"{'vrf':''}\": PimOutput.ShowIpPimInterfaceDf_default}\n\n pim.maker.outputs[ShowIpPimInterfaceDf].update(\n {\"{'vrf':'VRF1'}\": {}})\n\n # Learn the feature\n pim.learn()\n\n # Delete missing specific attribute values\n expect_dict = deepcopy(PimOutput.Pim_info)\n \n del(expect_dict['vrf']['VRF1'])\n\n # Verify Ops was created successfully\n self.assertEqual(pim.info, expect_dict)\n\n\nif __name__ == '__main__':\n unittest.main()", "id": "8174821", "language": "Python", "matching_score": 3.2241029739379883, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/pim/ios/tests/test_pim.py" }, { "content": "# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# ATS\nfrom ats.topology import Device\n\n# Genie\nfrom genie.libs.ops.mcast.iosxr.mcast import Mcast\nfrom genie.libs.ops.mcast.iosxr.tests.mcast_output import McastOutput\n\n# iosxr show_pim\nfrom genie.libs.parser.iosxr.show_pim import ShowPimVrfMstatic, ShowPimVrfRpfSummary,\\\n ShowPimVrfInterfaceDetail\n\n# iosxr show_mrib\nfrom genie.libs.parser.iosxr.show_mrib import ShowMribVrfRoute\n\n# iosxr show_vrf\nfrom genie.libs.parser.iosxr.show_vrf import ShowVrfAllDetail\n\noutputs = {}\noutputs['show pim vrf VRF1 ipv4 interface detail'] = McastOutput.PimVrfVRF1Ipv4InterfaceDetail\noutputs['show pim vrf VRF1 ipv4 rpf summary'] = McastOutput.PimVrfVRF1Ipv4RpfSummary\noutputs['show pim vrf VRF1 ipv4 mstatic'] = McastOutput.PimVrfVRF1Ipv4Mstatic\noutputs['show mrib vrf VRF1 ipv4 route'] = McastOutput.MribVrfVRF1Ipv4Route\noutputs['show pim vrf VRF1 ipv6 interface detail'] = McastOutput.PimVrfVRF1Ipv6InterfaceDetail\noutputs['show pim vrf VRF1 ipv6 rpf summary'] = McastOutput.PimVrfVRF1Ipv6RpfSummary\noutputs['show pim vrf VRF1 ipv6 mstatic'] = McastOutput.PimVrfVRF1Ipv6Mstatic\noutputs['show mrib vrf VRF1 ipv6 route'] = McastOutput.MribVrfVRF1Ipv6Route\noutputs['show pim vrf default ipv4 interface detail'] = McastOutput.PimVrfDefaultIpv4InterfaceDetail\noutputs['show pim vrf default ipv4 rpf summary'] = McastOutput.PimVrfDefaultIpv4RpfSummary\noutputs['show pim vrf default ipv4 mstatic'] = McastOutput.PimVrfDefaultIpv4Mstatic\noutputs['show mrib vrf default ipv4 route'] = McastOutput.MribVrfDefaultIpv4Route\noutputs['show pim vrf default ipv6 interface detail'] = McastOutput.PimVrfDefaultIpv6InterfaceDetail\noutputs['show pim vrf default ipv6 rpf summary'] = McastOutput.PimVrfDefaultIpv6RpfSummary\noutputs['show pim vrf default ipv6 mstatic'] = McastOutput.PimVrfDefaultIpv6Mstatic\noutputs['show mrib vrf default ipv6 route'] = McastOutput.MribVrfDefaultIpv6Route\n\ndef mapper(key):\n return outputs[key]\n\n\nclass test_mcast(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'iosxr'\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n self.device.connectionmgr.connections['cli'] = self.device\n self.maxDiff = None\n\n def test_complete_output(self):\n mcast = Mcast(device=self.device)\n\n # Set outputs\n mcast.maker.outputs[ShowVrfAllDetail] = {'':McastOutput.ShowVrfAllDetail}\n \n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Learn the feature\n mcast.learn()\n\n # Verify Ops was created successfully\n self.assertEqual(mcast.info, McastOutput.McastInfo)\n self.assertEqual(mcast.table, McastOutput.McastTable)\n\n\n def test_selective_attribute(self):\n mcast = Mcast(device=self.device)\n\n # Set outputs\n mcast.maker.outputs[ShowVrfAllDetail] = {'':McastOutput.ShowVrfAllDetail}\n \n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Learn the feature\n mcast.learn()\n\n # Test specific attributes in info\n self.assertEqual(mcast.info['vrf']['VRF1']['address_family']['ipv4']\\\n ['mroute']['10.135.10.10/32']['path']\\\n ['192.168.1.0 GigabitEthernet1/0/0/0 10']['admin_distance'], 10)\n\n # Test specific attribute in table\n self.assertEqual(mcast.table['vrf']['default']['address_family']\\\n ['ipv4']['multicast_group']['192.168.3.11/24']\\\n ['source_address']['*']['flags'], 'D P')\n\n\n def test_empty_output(self):\n mcast = Mcast(device=self.device)\n\n # Set outputs\n mcast.maker.outputs[ShowVrfAllDetail] = {'':McastOutput.ShowVrfAllDetail}\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = ['', '', '', '', '', '', '', '', '',\\\n '', '', '', '', '', '', '', '', '',\\\n '', '', '', '', '', '', '', '', '',\\\n '', '', '', '', '']\n\n # Learn the feature\n mcast.learn()\n\n # Check no outputs in mcast.info\n with self.assertRaises(AttributeError):\n admin_distance = mcast.info['vrf']['VRF1']['address_family']\\\n ['ipv4']['mroute']['10.135.10.10/32']['path']\\\n ['192.168.1.0 GigabitEthernet1/0/0/0 10']['admin_distance']\n\n # Check no outputs in mcast.table\n with self.assertRaises(AttributeError):\n flags = mcast.table['vrf']['default']['address_family']['ipv4']\\\n ['multicast_group']['192.168.3.11/24']['source_address']['*']\\\n ['flags']\n\n\nif __name__ == '__main__':\n unittest.main()", "id": "1813386", "language": "Python", "matching_score": 4.137268543243408, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/mcast/iosxr/tests/test_mcast.py" }, { "content": "# Python\nimport unittest\nfrom copy import deepcopy\nfrom unittest.mock import Mock\n\n# ATS\nfrom ats.topology import Device\n\n# Genie\nfrom genie.libs.ops.interface.iosxr.interface import Interface\nfrom genie.libs.ops.interface.iosxr.tests.interface_output import InterfaceOutput\n\n# nxos show_interface\nfrom genie.libs.parser.iosxr.show_interface import ShowInterfacesDetail, \\\n ShowEthernetTags, \\\n ShowIpv4VrfAllInterface, \\\n ShowIpv6VrfAllInterface, \\\n ShowInterfacesAccounting\n\nfrom genie.libs.parser.iosxr.show_vrf import ShowVrfAllDetail\n\noutputs = {}\noutputs[\n 'show interface GigabitEthernet0/0/0/1 detail'] = \\\n InterfaceOutput.ShowInterfacesDetail_gi1\noutputs[\n 'show interfaces GigabitEthernet0/0/0/1 accounting'] = \\\n InterfaceOutput.ShowInterfacesAccounting_gi1\noutputs['show ethernet tags GigabitEthernet0/0/0/1'] = InterfaceOutput.ShowEthernetTag_gi1\noutputs['show ethernet tags'] = InterfaceOutput.ShowEthernetTags_all\noutputs['show vrf VRF1 detail'] = InterfaceOutput.ShowVrfAllDetail_vrf1\noutputs['show ipv4 vrf VRF1 interface'] = InterfaceOutput.ShowIpv4VrfAllInterface_vrf1\noutputs['show ipv6 vrf VRF1 interface'] = InterfaceOutput.ShowIpv6VrfAllInterface_vrf1\noutputs['show ipv4 vrf all interface'] = InterfaceOutput.ShowIpv4VrfAllInterface_all\noutputs['show ipv6 vrf all interface'] = InterfaceOutput.ShowIpv6VrfAllInterface_all\noutputs['show ipv6 vrf VRF1 interface GigabitEthernet0/0/0/1'] = InterfaceOutput.ShowIpv6VrfAllInterface_gi1\noutputs['show vrf all detail'] = InterfaceOutput.ShowVrfAllDetail_all\noutputs['show interfaces accounting'] = InterfaceOutput.ShowInterfacesAccounting_all\n\ndef mapper(key):\n return outputs[key]\n\n\nclass test_interface(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'iosxr'\n self.device.mapping = {}\n self.device.mapping['cli'] = 'cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n self.device.connectionmgr.connections['cli'] = self.device\n\n def test_complete_output(self):\n self.maxDiff = None\n intf = Interface(device=self.device)\n # Get outputs\n intf.maker.outputs[ShowInterfacesDetail] = \\\n {\"{'interface':''}\": InterfaceOutput.ShowInterfacesDetail}\n\n intf.maker.outputs[ShowEthernetTags] = \\\n {\"{'interface':''}\": InterfaceOutput.ShowEthernetTags}\n\n intf.maker.outputs[ShowVrfAllDetail] = \\\n {\"{'vrf':''}\": InterfaceOutput.ShowVrfAllDetail}\n\n intf.maker.outputs[ShowInterfacesAccounting] = \\\n {\"{'interface':''}\": InterfaceOutput.ShowInterfacesAccounting}\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n # Learn the feature\n intf.learn()\n\n # Verify Ops was created successfully\n self.assertDictEqual(intf.info, InterfaceOutput.InterfaceOpsOutput_info)\n\n def test_empty_output(self):\n self.maxDiff = None\n intf = Interface(device=self.device)\n # Get outputs\n intf.maker.outputs[ShowInterfacesDetail] = {\"{'interface':''}\": ''}\n intf.maker.outputs[ShowIpv4VrfAllInterface] = {\"{'vrf':None,'interface':''}\": ''}\n intf.maker.outputs[ShowIpv6VrfAllInterface] = {\"{'vrf':None,'interface':''}\": ''}\n intf.maker.outputs[ShowVrfAllDetail] = {\"{'vrf':''}\": ''}\n intf.maker.outputs[ShowEthernetTags] = {\"{'interface':''}\": ''}\n intf.maker.outputs[ShowInterfacesAccounting] = {\"{'interface':''}\": ''}\n outputs['show ipv4 vrf all interface'] = ''\n outputs['show ipv6 vrf all interface'] = ''\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n # Learn the feature\n intf.learn()\n\n # Check no attribute not found\n # info - vrf\n with self.assertRaises(AttributeError):\n vrf = (intf.info['MgmtEth0/0/CPU0/0']['type'])\n\n outputs['show ipv4 vrf all interface'] = InterfaceOutput.ShowIpv4VrfAllInterface_all\n outputs['show ipv6 vrf all interface'] = InterfaceOutput.ShowIpv6VrfAllInterface_all\n\n def test_custom_output(self):\n intf = Interface(device=self.device)\n # Get outputs\n intf.maker.outputs[ShowIpv4VrfAllInterface] = \\\n {\"{'vrf':''}\": InterfaceOutput.ShowIpv4VrfAllInterface}\n\n intf.maker.outputs[ShowIpv6VrfAllInterface] = \\\n {\"{'vrf':''}\": InterfaceOutput.ShowIpv6VrfAllInterface}\n\n intf.maker.outputs[ShowVrfAllDetail] = \\\n {\"{'vrf':''}\": InterfaceOutput.ShowVrfAllDetail}\n\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n # Learn the feature\n intf.learn(interface='GigabitEthernet0/0/0/1', address_family='ipv6', vrf='VRF1')\n\n self.maxDiff = None\n # Verify Ops was created successfully\n self.assertDictEqual(intf.info, InterfaceOutput.interfaceOpsOutput_custom_info)\n\n def test_selective_attribute(self):\n self.maxDiff = None\n intf = Interface(device=self.device)\n # Get outputs\n intf.maker.outputs[ShowInterfacesDetail] = \\\n {\"{'interface':''}\": InterfaceOutput.ShowInterfacesDetail}\n\n intf.maker.outputs[ShowEthernetTags] = \\\n {\"{'interface':''}\": InterfaceOutput.ShowEthernetTags}\n\n intf.maker.outputs[ShowIpv4VrfAllInterface] = \\\n {\"{'vrf':''}\": InterfaceOutput.ShowIpv4VrfAllInterface}\n\n intf.maker.outputs[ShowIpv6VrfAllInterface] = \\\n {\"{'vrf':''}\": InterfaceOutput.ShowIpv6VrfAllInterface}\n\n intf.maker.outputs[ShowVrfAllDetail] = \\\n {\"{'vrf':''}\": InterfaceOutput.ShowVrfAllDetail}\n\n intf.maker.outputs[ShowInterfacesAccounting] = \\\n {\"{'interface':''}\": InterfaceOutput.ShowInterfacesAccounting}\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n # Learn the feature\n intf.learn()\n\n # Check specific attribute values\n # info - type\n self.assertEqual(intf.info['MgmtEth0/0/CPU0/0']['type'], 'Management Ethernet')\n\n def test_incomplete_output(self):\n self.maxDiff = None\n intf = Interface(device=self.device)\n # Get outputs\n intf.maker.outputs[ShowInterfacesDetail] = \\\n {\"{'interface':''}\": InterfaceOutput.ShowInterfacesDetail}\n\n intf.maker.outputs[ShowEthernetTags] = \\\n {\"{'interface':''}\": InterfaceOutput.ShowEthernetTags}\n\n intf.maker.outputs[ShowVrfAllDetail] = \\\n {\"{'vrf':''}\": InterfaceOutput.ShowVrfAllDetail}\n\n intf.maker.outputs[ShowInterfacesAccounting] = \\\n {\"{'interface':''}\": InterfaceOutput.ShowInterfacesAccounting}\n outputs['show ipv4 vrf all interface'] = ''\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n # Learn the feature\n intf.learn()\n\n # Delete missing specific attribute values\n expect_dict = deepcopy(InterfaceOutput.InterfaceOpsOutput_info)\n del (expect_dict['GigabitEthernet0/0/0/0']['ipv4'])\n del (expect_dict['GigabitEthernet0/0/0/1']['ipv4'])\n # Verify Ops was created successfully\n self.assertEqual(intf.info, expect_dict)\n outputs['show ipv4 vrf all interface'] = InterfaceOutput.ShowIpv4VrfAllInterface_all\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "8761196", "language": "Python", "matching_score": 3.010913133621216, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/interface/iosxr/tests/test_interface.py" }, { "content": "# Python\nimport unittest\nfrom copy import deepcopy\nfrom unittest.mock import Mock\n\n# ATS\nfrom ats.topology import Device\n\n# Genie\nfrom genie.libs.ops.mld.nxos.mld import Mld\nfrom genie.libs.ops.mld.nxos.tests.mld_output import MldOutput\n\n# Parser\nfrom genie.libs.parser.nxos.show_mld import ShowIpv6MldInterface, \\\n ShowIpv6MldGroups, \\\n ShowIpv6MldLocalGroups\n\n\nclass test_mld(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'nxos'\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n self.device.connectionmgr.connections['cli'] = self.device\n\n def test_complete_output(self):\n self.maxDiff = None\n mld = Mld(device=self.device)\n # Get outputs\n mld.maker.outputs[ShowIpv6MldInterface] = \\\n {\"{'vrf':'all'}\": MldOutput.ShowIpv6MldInterface}\n\n mld.maker.outputs[ShowIpv6MldGroups] = \\\n {\"{'vrf':'all'}\": MldOutput.ShowIpv6MldGroups}\n\n mld.maker.outputs[ShowIpv6MldLocalGroups] = \\\n {\"{'vrf':'all'}\": MldOutput.ShowIpv6MldLocalGroups}\n\n # Learn the feature\n mld.learn()\n\n # Verify Ops was created successfully\n self.assertEqual(mld.info, MldOutput.Mld_info)\n\n def test_empty_output(self):\n self.maxDiff = None\n mld = Mld(device=self.device)\n # Get outputs\n mld.maker.outputs[ShowIpv6MldInterface] = \\\n {\"{'vrf':'all'}\": {}}\n\n mld.maker.outputs[ShowIpv6MldGroups] = \\\n {\"{'vrf':'all'}\": {}}\n\n mld.maker.outputs[ShowIpv6MldLocalGroups] = \\\n {\"{'vrf':'all'}\": {}}\n\n # Learn the feature\n mld.learn()\n\n # Check no attribute not found\n with self.assertRaises(AttributeError):\n mld.info['vrfs']\n\n def test_selective_attribute(self):\n self.maxDiff = None\n mld = Mld(device=self.device)\n\n # Get outputs\n mld.maker.outputs[ShowIpv6MldInterface] = \\\n {\"{'vrf':'all'}\": MldOutput.ShowIpv6MldInterface}\n\n mld.maker.outputs[ShowIpv6MldGroups] = \\\n {\"{'vrf':'all'}\": MldOutput.ShowIpv6MldGroups}\n\n mld.maker.outputs[ShowIpv6MldLocalGroups] = \\\n {\"{'vrf':'all'}\": MldOutput.ShowIpv6MldLocalGroups}\n\n # Learn the feature\n mld.learn() \n\n # Check specific attribute values\n # info - default vrf\n self.assertEqual(mld.info['vrfs']['default']['interfaces']\\\n \t\t['Ethernet2/1']['group_policy'], 'test')\n # info - vrf VRF1\n self.assertEqual(mld.info['vrfs']['VRF1']['interfaces']\\\n ['Ethernet2/2']['group']\\\n ['fdf8:f53e:61e4::18']['last_reporter'], '2001:db8:8404:751c::1')\n\n def test_incomplete_output(self):\n self.maxDiff = None\n \n mld = Mld(device=self.device)\n\n # Get outputs\n mld.maker.outputs[ShowIpv6MldInterface] = \\\n {\"{'vrf':'all'}\": MldOutput.ShowIpv6MldInterface}\n\n mld.maker.outputs[ShowIpv6MldGroups] = \\\n {\"{'vrf':'all'}\": MldOutput.ShowIpv6MldGroups}\n\n mld.maker.outputs[ShowIpv6MldLocalGroups] = \\\n {\"{'vrf':'all'}\": {}}\n\n # Learn the feature\n mld.learn()\n\n # Delete missing specific attribute values\n expect_dict = deepcopy(MldOutput.Mld_info)\n del(expect_dict['vrfs']['default']['interfaces']['Ethernet2/1']['join_group'])\n del(expect_dict['vrfs']['default']['interfaces']['Ethernet2/1']['static_group'])\n del(expect_dict['vrfs']['VRF1']['interfaces']['Ethernet2/2']['join_group'])\n del(expect_dict['vrfs']['VRF1']['interfaces']['Ethernet2/2']['static_group'])\n\n \n # Verify Ops was created successfully\n self.assertEqual(mld.info, expect_dict)\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "8298347", "language": "Python", "matching_score": 2.046863317489624, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/mld/nxos/tests/test_mld.py" }, { "content": "''' \nMLD Genie Ops Object for IOSXE - CLI.\n'''\n# super class\nfrom genie.libs.ops.mld.mld import Mld as SuperMld\n\n# iosxe show_vrf\nfrom genie.libs.parser.iosxe.show_vrf import ShowVrfDetail\n\n\nclass Mld(SuperMld):\n '''MLD Genie Ops Object'''\n\n def keys(self, item):\n '''return only the key as list from the item''' \n if isinstance(item, dict):\n return list(item.keys())\n\n def learn(self):\n '''Learn MLD Ops'''\n\n # get vrf list \n self.add_leaf(cmd=ShowVrfDetail,\n src='',\n dest='list_of_vrfs',\n action=self.keys)\n\n self.make()\n\n vrf_list = ['default']\n try:\n vrf_list.extend(self.list_of_vrfs)\n except AttributeError:\n pass\n else: \n # delete the list_of_vrfs in the info table\n del self.list_of_vrfs\n\n # loop for vrfs\n for vrf in sorted(vrf_list):\n\n # skip the vrf when it is mgmt-vrf\n if vrf == 'Mgmt-vrf':\n continue\n\n # create kwargs\n vrf_name = '' if vrf == 'default' else vrf\n \n ########################################################################\n # info\n ########################################################################\n\n # max_groups\n self.add_leaf(cmd='show ipv6 mld interface',\n src='[vrf][(?P<vrf>.*)][max_groups]',\n dest='info[vrfs][(?P<vrf>.*)][max_groups]',\n vrf=vrf_name)\n\n # groups_count is not supported on IOSXE\n\n # Interface path\n src = '[vrf][(?P<vrf>.*)][interface][(?P<interface>.*)]'\n dest = 'info[vrfs][(?P<vrf>.*)][interfaces][(?P<interface>.*)]'\n\n # interfaces\n # -- enable, group_policy\n # -- max_groups, query_interval, query_max_response_time\n # -- oper_status, querier, version\n # \n # immediate_leave, robustness_variable, joined_group are not supported on iosxe\n req_keys = ['[enable]', \n '[group_policy]', '[max_groups]', '[query_interval]',\n '[query_max_response_time]', '[oper_status]',\n '[querier]', '[version]']\n for key in req_keys:\n self.add_leaf(cmd='show ipv6 mld interface',\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n vrf=vrf_name)\n\n # interfaces\n # -- join_group\n # -- group, source\n # -- static_group\n # -- group, source\n # -- group\n # -- up_time, expire, last_reporter, filter_mode, source\n # -- host_count, host, source<last-reporter> are not supported on iosxe\n req_keys = ['[join_group][(?P<join_group>.*)]',\n '[static_group][(?P<static_group>.*)]',\n '[group][(?P<group>.*)][up_time]',\n '[group][(?P<group>.*)][expire]',\n '[group][(?P<group>.*)][filter_mode]',\n '[group][(?P<group>.*)][last_reporter]',\n '[group][(?P<group>.*)][source][(?P<source>.*)][up_time]',\n '[group][(?P<group>.*)][source][(?P<source>.*)][expire]']\n for key in req_keys:\n self.add_leaf(cmd='show ipv6 mld groups detail',\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n vrf=vrf_name)\n\n # make to write in cache\n self.make()\n\n if not hasattr(self, 'info') or \\\n 'vrfs' not in self.info or \\\n 'interfaces' not in self.info['vrfs'][vrf]:\n continue\n\n src = '[vrf][(?P<vrf>.*)]'\n dest = 'info[vrfs][(?P<vrf>.*)]'\n\n for intf in self.info['vrfs'][vrf]['interfaces']:\n if 'group' not in self.info['vrfs'][vrf]['interfaces'][intf]:\n continue\n\n for group in self.info['vrfs'][vrf]['interfaces'][intf]['group']:\n # ssm_map\n # group_range is not supported on iosxe\n req_keys = ['[ssm_map][(?P<ssm_map>.*)][source_addr]',\n '[ssm_map][(?P<ssm_map>.*)][group_address]']\n for key in req_keys:\n self.add_leaf(cmd='show ipv6 mld vrf {vrf} ssm-map {group}'.format(vrf=vrf, group=group),\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n group=group, vrf=vrf_name)\n\n\n # make to write in cache\n self.make(final_call=True)", "id": "6248266", "language": "Python", "matching_score": 5.110480785369873, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/mld/iosxe/mld.py" }, { "content": "''' \nMLD Genie Ops Object for IOSXR - CLI.\n'''\n\n# super class\nfrom genie.libs.ops.mld.mld import Mld as SuperMld\n\n# Parser\nfrom genie.libs.parser.iosxr.show_mld import ShowMldSummaryInternal, \\\n ShowMldInterface, \\\n ShowMldGroupsDetail\n\n# iosxr show_vrf\nfrom genie.libs.parser.iosxr.show_vrf import ShowVrfAllDetail\n\nclass Mld(SuperMld):\n '''MLD Genie Ops Object'''\n\n def keys(self, item):\n '''return only the key as list from the item'''\n if isinstance(item, dict):\n return list(item.keys())\n\n def learn(self):\n '''Learn MLD Ops'''\n\n # get vrf list\n self.add_leaf(cmd = ShowVrfAllDetail,\n src = '',\n dest = 'list_of_vrfs',\n action = self.keys)\n\n self.make()\n\n vrf_list = ['default']\n try:\n vrf_list.extend(self.list_of_vrfs)\n except AttributeError:\n pass\n else: \n # delete the list_of_vrfs in the info table\n del self.list_of_vrfs\n\n # loop for vrfs\n for vrf in sorted(vrf_list):\n\n # skip the vrf when it is mgmt-vrf\n if vrf == 'Mgmt-vrf':\n continue\n\n # create kwargs\n vrf_name = '' if vrf == 'default' else vrf\n \n ####################################################################\n # info #\n ####################################################################\n\n # 'vrfs'\n # vrf\n # 'groups_count'\n self.add_leaf(cmd = ShowMldInterface,\n src = '[vrf][(?P<vrf>.*)][active_groups]',\n dest = 'info[vrfs][(?P<vrf>.*)][groups_count]',\n vrf = vrf_name)\n\n # Interface path\n intf_src = '[vrf][(?P<vrf>.*)][interface][(?P<interface>.*)]'\n intf_dest = 'info[vrfs][(?P<vrf>.*)][interfaces][(?P<interface>.*)]'\n\n # 'interfaces'\n # interface\n # 'enable'\n # 'group_policy' - N/A\n # 'immediate_leave' - N/A\n # 'max_groups'\n # 'query_interval'\n # 'query_max_response_time'\n # 'robustness_variable' - N/A\n # 'version'\n # 'oper_status'\n # 'querier'\n # 'joined_group' - N/A\n for key in ['enable', 'max_groups', 'query_interval', \n 'query_max_response_time', 'version', \n 'oper_status', 'querier']:\n self.add_leaf(cmd = ShowMldInterface,\n src = intf_src + '[{}]'.format(key),\n dest = intf_dest + '[{}]'.format(key),\n vrf = vrf_name)\n\n # 'interfaces'\n # interface\n # 'join_group'\n # 'group'\n # 'source'\n # 'static_group'\n # 'group'\n # 'source'\n for key in ['group', 'source']:\n self.add_leaf(cmd = ShowMldGroupsDetail,\n src = intf_src + '[join_group][(?P<join_group>.*)][{}]'.format(key),\n dest = intf_dest + '[join_group][(?P<join_group>.*)][{}]'.format(key),\n vrf = vrf_name)\n\n self.add_leaf(cmd = ShowMldGroupsDetail,\n src = intf_src + '[static_group][(?P<static_group>.*)][{}]'.format(key),\n dest = intf_dest + '[static_group][(?P<static_group>.*)][{}]'.format(key),\n vrf = vrf_name)\n\n # 'group'\n # mcast_group\n # 'expire'\n # 'filter_mode'\n # 'host_count' - N/A\n # 'up_time'\n # 'host' - N/A\n # 'last_reporter'\n for key in ['expire', 'filter_mode', 'up_time','last_reporter']:\n self.add_leaf(cmd = ShowMldGroupsDetail,\n src = intf_src + '[group][(?P<group>.*)][{}]'.format(key),\n dest = intf_dest + '[group][(?P<group>.*)][{}]'.format(key),\n vrf = vrf_name)\n\n # 'group'\n # mcast_group\n # 'source'\n # source\n # 'expire'\n # 'up_time'\n # 'last_reporter' - N/A\n for key in ['expire', 'up_time']:\n self.add_leaf(cmd = ShowMldGroupsDetail,\n src = intf_src + '[group][(?P<group>.*)][source][(?P<source>.*)][{}]'.format(key),\n dest = intf_dest + '[group][(?P<group>.*)][source][(?P<source>.*)][{}]'.format(key),\n vrf = vrf_name)\n\n\n # make to write in cache\n self.make(final_call = True)\n", "id": "4010042", "language": "Python", "matching_score": 5.636631011962891, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/mld/iosxr/mld.py" }, { "content": "''' \nMLD Genie Ops Object for NXOS - CLI.\n'''\n# super class\nfrom genie.libs.ops.mld.mld import Mld as SuperMld\n\n# Parser\nfrom genie.libs.parser.nxos.show_mld import ShowIpv6MldInterface, \\\n ShowIpv6MldGroups, \\\n ShowIpv6MldLocalGroups\n\nclass Mld(SuperMld):\n '''MLD Genie Ops Object'''\n\n def learn(self):\n '''Learn MLD Ops'''\n \n ########################################################################\n # info\n ########################################################################\n\n # max_groups, ssm_map is not supported on NXOS ops\n\n # groups_count\n self.add_leaf(cmd=ShowIpv6MldGroups,\n src='[vrfs][(?P<vrf>.*)][groups_count]',\n dest='info[vrfs][(?P<vrf>.*)][groups_count]',\n vrf='all')\n\n # Interface path\n src = '[vrfs][(?P<vrf>.*)][interface][(?P<interface>.*)]'\n dest = 'info[vrfs][(?P<vrf>.*)][interfaces][(?P<interface>.*)]'\n\n # interfaces\n # -- enable, , group_policy, immediate_leave, version\n # -- max_groups, query_interval, query_max_response_time\n # -- oper_status, querier, robustness_variable\n # \n # joined_group are not supported on iosxe\n intf_req_keys = ['[enable]', '[immediate_leave]', '[robustness_variable]',\n '[group_policy]', '[max_groups]', '[query_interval]',\n '[query_max_response_time]', '[oper_status]',\n '[querier]', '[version]']\n # interfaces\n # -- join_group\n # -- group, source\n # -- static_group\n # -- group, source\n local_groups_req_keys = ['[join_group][(?P<join_group>.*)]',\n '[static_group][(?P<static_group>.*)]']\n # interfaces\n # -- group\n # -- up_time, expire, source, last_reporter, source\n # -- host_count, host are not supported on nxos\n # -- filter_mode is not supproted on nxos\n groups_req_keys = ['[group][(?P<group>.*)][up_time]',\n '[group][(?P<group>.*)][expire]',\n '[group][(?P<group>.*)][last_reporter]',\n '[group][(?P<group>.*)][source][(?P<source>.*)][up_time]',\n '[group][(?P<group>.*)][source][(?P<source>.*)][expire]',\n '[group][(?P<group>.*)][source][(?P<source>.*)][last_reporter]']\n\n # create cmd list dictionary\n req_dict = {ShowIpv6MldInterface: intf_req_keys,\n ShowIpv6MldLocalGroups: local_groups_req_keys,\n ShowIpv6MldGroups: groups_req_keys}\n\n for cmd, req_keys in req_dict.items():\n for key in req_keys:\n self.add_leaf(cmd=cmd,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n vrf='all')\n\n # make to write in cache\n self.make(final_call=True)", "id": "9074209", "language": "Python", "matching_score": 3.280839443206787, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/mld/nxos/mld.py" }, { "content": "''' \nMld Genie Ops Object Outputs for IOSXR.\n'''\n\n\nclass MldOutput(object):\n\n # from: genieparser/src/genie/libs/parser/iosxr/tests/test_show_vrf.py -> golden_parsed_output\n ShowVrfAllDetail = {\n \"VRF1\": {\n \"description\": \"not set\",\n \"vrf_mode\": \"regular\",\n \"address_family\": {\n \"ipv6 unicast\": {\n \"route_target\": {\n \"400:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"400:1\"\n },\n \"300:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"300:1\"\n },\n \"200:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:1\"\n },\n \"200:2\": {\n \"rt_type\": \"import\",\n \"route_target\": \"200:2\"\n }\n }\n },\n \"ipv4 unicast\": {\n \"route_target\": {\n \"400:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"400:1\"\n },\n \"300:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"300:1\"\n },\n \"200:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:1\"\n },\n \"200:2\": {\n \"rt_type\": \"import\",\n \"route_target\": \"200:2\"\n }\n }\n }\n },\n \"route_distinguisher\": \"200:1\",\n \"interfaces\": [\n \"GigabitEthernet0/0/0/1\"\n ]\n }\n }\n\n # show mld summary internal\n # from: test_show_mld.py -> golden_parsed_output1\n ShowMldSummaryInternal_default = {\n 'vrf': {\n 'default': {\n 'disabled_intf': 0,\n 'enabled_intf': 1,\n 'interface': {\n 'GigabitEthernet0/0/0/0': {\n 'igmp_r_uptime': '1d06h',\n 'last_query': '00:29:26',\n 'last_report': '00:04:16',\n 'max_groups': 6400,\n 'num_groups': 13,\n 'on': True,\n 'parent': '0x0'\n }\n },\n 'max_num_groups_x_intfs': 75000,\n 'mte_tuple_count': 0,\n 'num_groups_x_intf': 13,\n 'robustness_value': 10,\n 'supported_intf': 1,\n 'unsupported_intf': 0\n }\n }\n }\n\n # from: test_show_mld.py -> golden_output2\n ShowMldSummaryInternal_VRF = '''\\\n RP/0/0/CPU0:ios#show mld vrf VRF1 summary internal \n\n Robustness Value 10\n No. of Group x Interfaces 10\n Maximum number of Group x Interfaces 75000\n\n Supported Interfaces : 1\n Unsupported Interfaces : 0\n Enabled Interfaces : 1\n Disabled Interfaces : 0\n\n MTE tuple count : 0\n\n Interface Number Max # On Parent Last Last IGMP R\n Groups Groups query Report Uptime\n GigabitEthernet0/0/0/1 10 6400 Y 0x0 00:00:03 00:00:01 1d06h\n '''\n\n # show mld interface\n # from: test_show_mld.py -> golden_parsed_output1\n ShowMldInterface_default = {\n 'vrf': {\n 'default': {\n 'interface': {\n 'GigabitEthernet0/0/0/0': {\n 'counters': {\n 'joins': 18,\n 'leaves': 5\n },\n 'enable': True,\n 'internet_address': 'fe80::5054:ff:fefa:9ad7',\n 'interface_status': 'up',\n 'last_member_query_interval': 1,\n 'oper_status': 'up',\n 'querier': 'fe80::5054:ff:fed7:c01f',\n 'querier_timeout': 3666,\n 'query_interval': 366,\n 'query_max_response_time': 12,\n 'time_elapsed_since_igmp_router_enabled': '1d06h',\n 'time_elapsed_since_last_query_sent': '00:30:16',\n 'time_elapsed_since_last_report_received': '00:05:05',\n 'version': 2\n }\n }\n }\n }\n }\n\n ShowMldInterface_VRF = '''\\\n RP/0/0/CPU0:ios#show mld vrf VRF1 interface\n\n GigabitEthernet0/0/0/1 is up, line protocol is up\n Internet address is fe80::5054:ff:fe35:f846\n MLD is enabled on interface\n Current MLD version is 2\n MLD query interval is 366 seconds\n MLD querier timeout is 3666 seconds\n MLD max query response time is 12 seconds\n Last member query response interval is 1 seconds\n MLD activity: 12 joins, 2 leaves\n MLD querying router is fe80::5054:ff:fe35:f846 (this system)\n Time elapsed since last query sent 00:00:53\n Time elapsed since IGMP router enabled 1d06h\n Time elapsed since last report received 00:00:51\n '''\n\n \"\"\"\n # show mld ssm map detail\n # from: test_show_mld.py -> golden_parsed_output1\n ShowMldSsmMapDetail_default = {\n \n }\n\n # from: test_show_mld.py -> golden_output2\n ShowMldSsmMapDetail_VRF = '''\\\n '''\n\n \"\"\"\n\n # show mld groups detail\n # from: test_show_mld.py -> golden_parsed_output1\n ShowMldGroupsDetail_default = {\n 'vrf': {\n 'default': {\n 'interface': {\n 'GigabitEthernet0/0/0/0': {\n 'group': {\n 'fc00:e968:6179::de52:7100': {\n 'expire': 'never',\n 'filter_mode': 'exclude',\n 'host_mode': 'exclude',\n 'last_reporter': 'fe80::5054:ff:fefa:9ad7',\n 'up_time': '1d06h'\n },\n 'fc00:db20:35b:7399::5:ff28:cd4b': {\n 'expire': '01:00:01',\n 'filter_mode': 'exclude',\n 'host_mode': 'include',\n 'last_reporter': 'fe80::eca7:a4ff:fe28:cd4b',\n 'up_time': '1d06h'\n },\n 'fc00:db20:35b:7399::5:ff60:50aa': {\n 'expire': '01:00:01',\n 'filter_mode': 'exclude',\n 'host_mode': 'include',\n 'last_reporter': 'fe80::eca7:a4ff:fe28:cd4b',\n 'up_time': '1d06h'\n },\n 'fc00:db20:35b:7399::5:ffae:4aba': {\n 'expire': '01:00:01',\n 'filter_mode': 'exclude',\n 'host_mode': 'include',\n 'last_reporter': 'fe80::eca7:a4ff:fe28:cd4b',\n 'up_time': '1d06h'\n },\n 'fc00:db20:35b:7399::5:ffd7:c01f': {\n 'expire': '00:29:15',\n 'filter_mode': 'exclude',\n 'host_mode': 'include',\n 'last_reporter': 'fe80::5054:ff:fed7:c01f',\n 'up_time': '00:33:19'\n },\n 'fc00:db20:35b:7399::5:ffda:f428': {\n 'expire': '01:00:01',\n 'filter_mode': 'exclude',\n 'host_mode': 'include',\n 'last_reporter': 'fe80::eca7:a4ff:fe28:cd4b',\n 'up_time': '06:27:46'\n },\n 'fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b': {\n 'expire': 'never',\n 'filter_mode': 'exclude',\n 'host_mode': 'exclude',\n 'last_reporter': 'fe80::5054:ff:fefa:9ad7',\n 'up_time': '1d06h'\n },\n 'fc00:e968:6179::de52:7100': {\n 'expire': 'never',\n 'filter_mode': 'exclude',\n 'host_mode': 'exclude',\n 'last_reporter': 'fe80::5054:ff:fefa:9ad7',\n 'up_time': '1d06h'\n },\n 'fdf8:f53e:61e4::18': {\n 'filter_mode': 'include',\n 'host_mode': 'include',\n 'last_reporter': 'fe80::5054:ff:fefa:9ad7',\n 'source': {\n '2001:db8:2:2::2': {\n 'expire': '01:00:00',\n 'flags': 'Remote Local 2d',\n 'forward': True,\n 'up_time': '08:06:00'\n }\n },\n 'up_time': '08:06:00'\n },\n 'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b': {\n 'expire': 'never',\n 'filter_mode': 'exclude',\n 'host_mode': 'exclude',\n 'last_reporter': 'fe80::5054:ff:fefa:9ad7',\n 'up_time': '08:06:00'\n },\n 'fc00:e968:6179::de52:7100': {\n 'filter_mode': 'include',\n 'host_mode': 'include',\n 'last_reporter': 'fe80::5054:ff:fefa:9ad7',\n 'source': {\n '2001:db8:3:3::3': {\n 'expire': '01:00:00',\n 'flags': 'Remote Local e',\n 'forward': True,\n 'up_time': '00:33:28'\n }\n },\n 'up_time': '00:33:28'\n },\n 'fdf8:f53e:61e4::18': {\n 'expire': 'never',\n 'filter_mode': 'exclude',\n 'host_mode': 'exclude',\n 'last_reporter': 'fe80::5054:ff:fefa:9ad7',\n 'up_time': '00:33:28'\n },\n 'fc00:db20:35b:7399::5': {\n 'expire': '00:59:49',\n 'filter_mode': 'exclude',\n 'host_mode': 'include',\n 'last_reporter': 'fe80::5054:ff:fed7:c01f',\n 'up_time': '07:59:31'\n }\n },\n 'join_group': {\n 'fdf8:f53e:61e4::18 2001:db8:2:2::2': {\n 'group': 'fdf8:f53e:61e4::18',\n 'source': '2001:db8:2:2::2'\n }\n },\n 'static_group': {\n 'fc00:e968:6179::de52:7100 2001:db8:3:3::3': {\n 'group': 'fc00:e968:6179::de52:7100',\n 'source': '2001:db8:3:3::3'\n }\n }\n }\n }\n }\n }\n }\n\n # from: test_show_mld.py -> golden_output2\n ShowMldGroupsDetail_VRF = '''\\\n RP/0/0/CPU0:ios#show mld vrf VRF1 groups detail\n\n Interface: GigabitEthernet0/0/0/1\n Group: fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b\n Uptime: 1d06h\n Router mode: EXCLUDE (Expires: never)\n Host mode: EXCLUDE\n Last reporter: fe80::5054:ff:fe35:f846\n Source list is empty\n Interface: GigabitEthernet0/0/0/1\n Group: fc00:e968:6179::de52:7100\n Uptime: 1d06h\n Router mode: EXCLUDE (Expires: never)\n Host mode: EXCLUDE\n Last reporter: fe80::5054:ff:fe35:f846\n Source list is empty\n Interface: GigabitEthernet0/0/0/1\n Group: fc00:e968:6179::de52:7100\n Uptime: 1d06h\n Router mode: EXCLUDE (Expires: never)\n Host mode: EXCLUDE\n Last reporter: fe80::5054:ff:fe35:f846\n Source list is empty\n Interface: GigabitEthernet0/0/0/1\n Group: fc00:db20:35b:7399::5:ff00:1\n Uptime: 09:00:17\n Router mode: EXCLUDE (Expires: 00:58:14)\n Host mode: INCLUDE\n Last reporter: fe80::5054:ff:fe7c:dc70\n Source list is empty\n Interface: GigabitEthernet0/0/0/1\n Group: fc00:db20:35b:7399::5:ff24:c88d\n Uptime: 1d06h\n Router mode: EXCLUDE (Expires: 00:58:30)\n Host mode: INCLUDE\n Last reporter: fe80::7c2f:c2ff:fe24:c88d\n Source list is empty\n Interface: GigabitEthernet0/0/0/1\n Group: fc00:db20:35b:7399::5:ff7c:dc70\n Uptime: 09:00:17\n Router mode: EXCLUDE (Expires: 00:58:14)\n Host mode: INCLUDE\n Last reporter: fe80::5054:ff:fe7c:dc70\n Source list is empty\n Interface: GigabitEthernet0/0/0/1\n Group: fdf8:f53e:61e4::18\n Uptime: 08:11:27\n Router mode: INCLUDE\n Host mode: INCLUDE\n Last reporter: fe80::5054:ff:fe35:f846\n Group source list:\n Source Address Uptime Expires Fwd Flags\n 2001:db8:2:2::2 08:10:33 00:58:30 Yes Remote Local 2d\n Interface: GigabitEthernet0/0/0/1\n Group: fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b\n Uptime: 08:11:12\n Router mode: EXCLUDE (Expires: never)\n Host mode: EXCLUDE\n Last reporter: fe80::5054:ff:fe35:f846\n Source list is empty\n Interface: GigabitEthernet0/0/0/1\n Group: fc00:e968:6179::de52:7100\n Uptime: 00:39:52\n Router mode: INCLUDE\n Host mode: INCLUDE\n Last reporter: fe80::5054:ff:fe35:f846\n Group source list:\n Source Address Uptime Expires Fwd Flags\n 2001:db8:3:3::3 00:39:52 00:58:30 Yes Remote Local e\n Interface: GigabitEthernet0/0/0/1\n Group: fdf8:f53e:61e4::18\n Uptime: 00:39:44\n Router mode: EXCLUDE (Expires: never)\n Host mode: EXCLUDE\n Last reporter: fe80::5054:ff:fe35:f846\n Source list is empty \n '''\n\n Mld_info = {\n 'vrfs': {\n 'VRF1': {\n 'interfaces': {\n 'GigabitEthernet0/0/0/1': {\n 'enable': True,\n 'group': {\n 'fc00:e968:6179::de52:7100': {\n 'expire': 'never',\n 'last_reporter': 'fe80::5054:ff:fe35:f846',\n 'up_time': '1d06h'\n },\n 'fc00:db20:35b:7399::5:ff00:1': {\n 'expire': '00:58:14',\n 'last_reporter': 'fe80::5054:ff:fe7c:dc70',\n 'up_time': '09:00:17'\n },\n 'fc00:db20:35b:7399::5:ff24:c88d': {\n 'expire': '00:58:30',\n 'last_reporter': 'fe80::7c2f:c2ff:fe24:c88d',\n 'up_time': '1d06h'\n },\n 'fc00:db20:35b:7399::5:ff7c:dc70': {\n 'expire': '00:58:14',\n 'last_reporter': 'fe80::5054:ff:fe7c:dc70',\n 'up_time': '09:00:17'\n },\n 'fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b': {\n 'expire': 'never',\n 'last_reporter': 'fe80::5054:ff:fe35:f846',\n 'up_time': '1d06h'\n },\n 'fc00:e968:6179::de52:7100': {\n 'expire': 'never',\n 'last_reporter': 'fe80::5054:ff:fe35:f846',\n 'up_time': '1d06h'\n },\n 'fdf8:f53e:61e4::18': {\n 'last_reporter': 'fe80::5054:ff:fe35:f846',\n 'source': {\n '2001:db8:2:2::2': {\n 'expire': '00:58:30',\n 'up_time': '08:10:33'\n }\n },\n 'up_time': '08:11:27'\n },\n 'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b': {\n 'expire': 'never',\n 'last_reporter': 'fe80::5054:ff:fe35:f846',\n 'up_time': '08:11:12'\n },\n 'fc00:e968:6179::de52:7100': {\n 'last_reporter': 'fe80::5054:ff:fe35:f846',\n 'source': {\n '2001:db8:3:3::3': {\n 'expire': '00:58:30',\n 'up_time': '00:39:52'\n }\n },\n 'up_time': '00:39:52'\n },\n 'fdf8:f53e:61e4::18': {\n 'expire': 'never',\n 'last_reporter': 'fe80::5054:ff:fe35:f846',\n 'up_time': '00:39:44'\n }\n },\n 'join_group': {\n 'fdf8:f53e:61e4::18 2001:db8:2:2::2': {\n 'group': 'fdf8:f53e:61e4::18',\n 'source': '2001:db8:2:2::2'\n }\n },\n 'oper_status': 'up',\n 'query_interval': 366,\n 'query_max_response_time': 12,\n 'static_group': {\n 'fc00:e968:6179::de52:7100 2001:db8:3:3::3': {\n 'group': 'fc00:e968:6179::de52:7100',\n 'source': '2001:db8:3:3::3'\n }\n },\n 'version': 2\n }\n }\n },\n 'default': {\n 'interfaces': {\n 'GigabitEthernet0/0/0/0': {\n 'enable': True,\n 'group': {\n 'fc00:e968:6179::de52:7100': {\n 'expire': 'never',\n 'filter_mode': 'exclude',\n 'last_reporter': 'fe80::5054:ff:fefa:9ad7',\n 'up_time': '1d06h'\n },\n 'fc00:db20:35b:7399::5:ff28:cd4b': {\n 'expire': '01:00:01',\n 'filter_mode': 'exclude',\n 'last_reporter': 'fe80::eca7:a4ff:fe28:cd4b',\n 'up_time': '1d06h'\n },\n 'fc00:db20:35b:7399::5:ff60:50aa': {\n 'expire': '01:00:01',\n 'filter_mode': 'exclude',\n 'last_reporter': 'fe80::eca7:a4ff:fe28:cd4b',\n 'up_time': '1d06h'\n },\n 'fc00:db20:35b:7399::5:ffae:4aba': {\n 'expire': '01:00:01',\n 'filter_mode': 'exclude',\n 'last_reporter': 'fe80::eca7:a4ff:fe28:cd4b',\n 'up_time': '1d06h'\n },\n 'fc00:db20:35b:7399::5:ffd7:c01f': {\n 'expire': '00:29:15',\n 'filter_mode': 'exclude',\n 'last_reporter': 'fe80::5054:ff:fed7:c01f',\n 'up_time': '00:33:19'\n },\n 'fc00:db20:35b:7399::5:ffda:f428': {\n 'expire': '01:00:01',\n 'filter_mode': 'exclude',\n 'last_reporter': 'fe80::eca7:a4ff:fe28:cd4b',\n 'up_time': '06:27:46'\n },\n 'fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b': {\n 'expire': 'never',\n 'filter_mode': 'exclude',\n 'last_reporter': 'fe80::5054:ff:fefa:9ad7',\n 'up_time': '1d06h'\n },\n 'fffd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b': {\n 'expire': 'never',\n 'filter_mode': 'exclude',\n 'last_reporter': 'fe80::5054:ff:fefa:9ad7',\n 'up_time': '1d06h'\n },\n 'fdf8:f53e:61e4::18': {\n 'filter_mode': 'include',\n 'last_reporter': 'fe80::5054:ff:fefa:9ad7',\n 'source': {\n '2001:db8:2:2::2': {\n 'expire': '01:00:00',\n 'up_time': '08:06:00'\n }\n },\n 'up_time': '08:06:00'\n },\n 'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b': {\n 'expire': 'never',\n 'filter_mode': 'exclude',\n 'last_reporter': 'fe80::5054:ff:fefa:9ad7',\n 'up_time': '08:06:00'\n },\n 'fc00:e968:6179::de52:7100': {\n 'filter_mode': 'include',\n 'last_reporter': 'fe80::5054:ff:fefa:9ad7',\n 'source': {\n '2001:db8:3:3::3': {\n 'expire': '01:00:00',\n 'up_time': '00:33:28'\n }\n },\n 'up_time': '00:33:28'\n },\n 'fdf8:f53e:61e4::18': {\n 'expire': 'never',\n 'filter_mode': 'exclude',\n 'last_reporter': 'fe80::5054:ff:fefa:9ad7',\n 'up_time': '00:33:28'\n },\n 'fc00:db20:35b:7399::5': {\n 'expire': '00:59:49',\n 'filter_mode': 'exclude',\n 'last_reporter': 'fe80::5054:ff:fed7:c01f',\n 'up_time': '07:59:31'\n }\n },\n 'join_group': {\n 'fdf8:f53e:61e4::18 2001:db8:2:2::2': {\n 'group': 'fdf8:f53e:61e4::18',\n 'source': '2001:db8:2:2::2'\n }\n },\n 'oper_status': 'up',\n 'querier': 'fe80::5054:ff:fed7:c01f',\n 'query_interval': 366,\n 'query_max_response_time': 12,\n 'static_group': {\n 'fc00:e968:6179::de52:7100 2001:db8:3:3::3': {\n 'group': 'fc00:e968:6179::de52:7100',\n 'source': '2001:db8:3:3::3'\n }\n },\n 'version': 2\n }\n }\n }\n }\n }\n", "id": "10491080", "language": "Python", "matching_score": 5.9339919090271, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/mld/iosxr/tests/mld_output.py" }, { "content": "''' \nMld Genie Ops Object Outputs for IOS\n'''\n\n\nclass MldOutput(object):\n\n ShowVrfDetail = {\n\n \"Mgmt-vrf\": {\n \"vrf_id\": 1,\n \"interfaces\": [\n \"GigabitEthernet0/0\"\n ],\n \"address_family\": {\n \"ipv4 unicast\": {\n \"table_id\": \"0x1\",\n \"flags\": \"0x0\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n },\n \"ipv6 unicast\": {\n \"table_id\": \"0x1E000001\",\n \"flags\": \"0x0\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n }\n },\n \"flags\": \"0x1808\"\n },\n \"VRF1\": {\n \"interfaces\": [\n \"GigabitEthernet0/0\"\n ],\n \"address_family\": {\n \"ipv4 unicast\": {\n \"export_to_global\": {\n \"export_to_global_map\": \"export_to_global_map\",\n \"prefix_limit\": 1000\n },\n \"import_from_global\": {\n \"prefix_limit\": 1000,\n \"import_from_global_map\": \"import_from_global_map\"\n },\n \"table_id\": \"0x1\",\n \"routing_table_limit\": {\n \"routing_table_limit_action\": {\n \"enable_alert_limit_number\": {\n \"alert_limit_number\": 10000\n }\n }\n },\n \"route_targets\": {\n \"200:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:1\"\n },\n \"100:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"100:1\"\n }\n },\n \"flags\": \"0x2100\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n },\n \"ipv6 unicast\": {\n \"export_to_global\": {\n \"export_to_global_map\": \"export_to_global_map\",\n \"prefix_limit\": 1000\n },\n \"table_id\": \"0x1E000001\",\n \"routing_table_limit\": {\n \"routing_table_limit_action\": {\n \"enable_alert_percent\": {\n \"alert_percent_value\": 70\n },\n \"enable_alert_limit_number\": {\n \"alert_limit_number\": 7000\n }\n },\n \"routing_table_limit_number\": 10000\n },\n \"route_targets\": {\n \"200:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"200:1\"\n },\n \"400:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"400:1\"\n },\n \"300:1\": {\n \"rt_type\": \"export\",\n \"route_target\": \"300:1\"\n },\n \"100:1\": {\n \"rt_type\": \"export\",\n \"route_target\": \"100:1\"\n }\n },\n \"flags\": \"0x100\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n }\n },\n \"flags\": \"0x180C\",\n \"route_distinguisher\": \"100:1\",\n \"vrf_id\": 1\n }\n }\n\n ShowIpv6MldInterface_default = {\n \"vrf\": {\n \"default\": {\n \"interface\": {\n \"Tunnel0\": {\n \"oper_status\": \"up\",\n \"interface_adress\": \"FE80::21E:BDFF:FEBA:D000/10\",\n \"enable\": False,\n \"interface_status\": \"up\"\n },\n \"VoIP-Null0\": {\n \"oper_status\": \"up\",\n \"interface_adress\": \"::/0\",\n \"enable\": False,\n \"interface_status\": \"up\"\n },\n \"LIIN0\": {\n \"oper_status\": \"up\",\n \"interface_adress\": \"::/0\",\n \"enable\": False,\n \"interface_status\": \"up\"\n },\n \"GigabitEthernet1\": {\n \"oper_status\": \"up\",\n \"querier_timeout\": 740,\n \"active_groups\": 0,\n \"group_policy\": \"test\",\n \"query_interval\": 366,\n \"version\": 2,\n \"query_this_system\": True,\n \"querier\": \"FE80::5054:FF:FE7C:DC70\",\n \"interface_status\": \"up\",\n \"last_member_query_interval\": 1,\n \"counters\": {\n \"leaves\": 2,\n \"joins\": 11\n },\n \"max_groups\": 6400,\n \"query_max_response_time\": 16,\n \"enable\": True,\n \"interface_adress\": \"FE80::5054:FF:FE7C:DC70/10\"\n },\n \"GigabitEthernet3\": {\n \"oper_status\": \"down\",\n \"interface_adress\": \"::/0\",\n \"enable\": False,\n \"interface_status\": \"administratively down\"\n },\n \"Null0\": {\n \"oper_status\": \"up\",\n \"interface_adress\": \"FE80::1/10\",\n \"enable\": False,\n \"interface_status\": \"up\"\n }\n },\n \"max_groups\": 64000,\n \"active_groups\": 0\n }\n }\n }\n\n ShowIpv6MldInterface_VRF1 = '''\\\n R4# show ipv6 mld vrf VRF1 interface\n\n Global State Limit : 0 active out of 64000 max\n GigabitEthernet2 is up, line protocol is up\n Internet address is FE80::5054:FF:FEDD:BB49/10\n MLD is enabled on interface\n Current MLD version is 2\n MLD query interval is 366 seconds\n MLD querier timeout is 740 seconds\n MLD max query response time is 16 seconds\n Last member query response interval is 1 seconds\n Interface State Limit : 0 active out of 6400 max\n MLD activity: 9 joins, 0 leaves\n MLD querying router is FE80::5054:FF:FEDD:BB49 (this system)\n Tunnel1 is up, line protocol is up\n Internet address is FE80::21E:BDFF:FEBA:D000/10\n MLD is disabled on interface\n '''\n\n ShowIpv6MldGroupsDetail_default = {\n \"vrf\": {\n \"default\": {\n \"interface\": {\n \"GigabitEthernet1\": {\n \"group\": {\n \"fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b\": {\n \"up_time\": \"08:14:15\",\n \"source\": {\n \"2001:DB8:2:2::2\": {\n \"forward\": True,\n \"up_time\": \"08:13:22\",\n \"flags\": \"Remote Local 2D\",\n \"expire\": \"00:06:42\"\n }\n },\n \"filter_mode\": \"include\",\n \"host_mode\": \"include\",\n \"last_reporter\": \"FE80::5054:FF:FE7C:DC70\"\n },\n \"fc00:db20:35b:7399::5\": {\n \"up_time\": \"08:14:01\",\n \"filter_mode\": \"exclude\",\n \"last_reporter\": \"FE80::5054:FF:FE7C:DC70\",\n \"host_mode\": \"exclude\",\n \"expire\": \"never\"\n },\n \"fc00:db20:35b:7399::5\": {\n \"up_time\": \"00:42:41\",\n \"source\": {\n \"2001:DB8:3:3::3\": {\n \"forward\": True,\n \"up_time\": \"00:42:41\",\n \"flags\": \"Remote Local E\",\n \"expire\": \"00:06:42\"\n }\n },\n \"filter_mode\": \"include\",\n \"host_mode\": \"include\",\n \"last_reporter\": \"FE80::5054:FF:FE7C:DC70\"\n },\n \"fc00:e968:6179::de52:7100\": {\n \"up_time\": \"00:42:32\",\n \"filter_mode\": \"exclude\",\n \"last_reporter\": \"FE80::5054:FF:FE7C:DC70\",\n \"host_mode\": \"exclude\",\n \"expire\": \"never\"\n }\n },\n \"join_group\": {\n \"fc00:db20:35b:7399::5 2001:DB8:3:3::3\": {\n \"group\": \"fc00:db20:35b:7399::5\",\n \"source\": \"2001:DB8:3:3::3\"\n },\n \"fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b 2001:DB8:2:2::2\": {\n \"group\": \"fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b\",\n \"source\": \"2001:DB8:2:2::2\"\n }\n },\n \"static_group\": {\n \"fc00:db20:35b:7399::5 2001:DB8:3:3::3\": {\n \"group\": \"fc00:db20:35b:7399::5\",\n \"source\": \"2001:DB8:3:3::3\"\n }\n }\n }\n }\n }\n }\n }\n\n ShowIpv6MldGroupsDetail_VRF1 = '''\\\n R4# show ipv6 mld vrf VRF1 groups detail\n\n Interface: GigabitEthernet2\n Group: fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b\n Uptime: 08:14:20\n Router mode: INCLUDE\n Host mode: INCLUDE\n Last reporter: FE80::5054:FF:FEDD:BB49\n Group source list:\n Source Address Uptime Expires Fwd Flags\n 2001:DB8:2:2::2 08:13:56 00:12:23 Yes Remote Local 2D\n Interface: GigabitEthernet2\n Group: fc00:db20:35b:7399::5\n Uptime: 08:14:18\n Router mode: EXCLUDE (Expires: never)\n Host mode: EXCLUDE\n Last reporter: FE80::5054:FF:FEDD:BB49\n Source list is empty\n Interface: GigabitEthernet2\n Group: fc00:db20:35b:7399::5\n Uptime: 00:42:30\n Router mode: INCLUDE\n Host mode: INCLUDE\n Last reporter: FE80::5054:FF:FEDD:BB49\n Group source list:\n Source Address Uptime Expires Fwd Flags\n 2001:DB8:3:3::3 00:42:30 00:12:23 Yes Remote Local E\n Interface: GigabitEthernet2\n Group: fc00:e968:6179::de52:7100\n Uptime: 00:42:30\n Router mode: EXCLUDE (Expires: never)\n Host mode: EXCLUDE\n Last reporter: FE80::5054:FF:FEDD:BB49\n Source list is empty\n '''\n\n ShowIpv6MldSsmMap_default_1 = '''\\\n R4# show ipv6 mld ssm-map FFfdf8:f53e:61e4::18\n\n Group address : fc00:db20:35b:7399::5\n Group mode ssm : FALSE\n Database : NONE\n\n '''\n\n\n ShowIpv6MldSsmMap_default_2 = '''\\\n R4# show ipv6 mld ssm-map FFfc00:db20:35b:7399::5\n\n Group address : fc00:db20:35b:7399::5\n Group mode ssm : FALSE\n Database : NONE\n '''\n ShowIpv6MldSsmMap_default_3 = '''\\\n R4# show ipv6 mld ssm-map FFfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b\n\n Group address : fc00:db20:35b:7399::5\n Group mode ssm : FALSE\n Database : STATIC\n Source list : 2001:DB8:1:1::1\n '''\n ShowIpv6MldSsmMap_default_4 = '''\\\n R4# show ipv6 mld ssm-map FFfc00:e968:6179::de52:7100\n\n Group address : fc00:e968:6179::de52:7100\n Group mode ssm : FALSE\n Database : NONE\n '''\n\n ShowIpv6MldSsmMap_VRF1_1 = '''\\\n R4# show ipv6 mld vrf VRF1 ssm-map FFfdf8:f53e:61e4::18\n\n Group address : fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b\n Group mode ssm : FALSE\n Database : NONE\n '''\n ShowIpv6MldSsmMap_VRF1_2 = '''\\\n R4# show ipv6 mld vrf VRF1 ssm-map fc00:db20:35b:7399::5\n\n Group address : fc00:db20:35b:7399::5\n Group mode ssm : FALSE\n Database : NONE\n '''\n ShowIpv6MldSsmMap_VRF1_3 = '''\\\n R4# show ipv6 mld vrf VRF1 ssm-map fc00:db20:35b:7399::5\n\n Group address : fc00:db20:35b:7399::5\n Group mode ssm : FALSE\n Database : STATIC\n Source list : 2001:DB8:1:1::1\n '''\n ShowIpv6MldSsmMap_VRF1_4 = '''\\\n R4# show ipv6 mld vrf VRF1 ssm-map fc00:e968:6179::de52:7100\n\n Group address : fc00:e968:6179::de52:7100\n Group mode ssm : FALSE\n Database : NONE\n '''\n\n Mld_info = {\n \"vrfs\": {\n \"VRF1\": {\n \"ssm_map\": {\n \"2001:DB8:1:1::1 FF35:1::1\": {\n \"group_address\": \"fc00:db20:35b:7399::5\",\n \"source_addr\": \"2001:DB8:1:1::1\"\n }\n },\n \"max_groups\": 64000,\n \"interfaces\": {\n \"Tunnel1\": {\n \"enable\": False,\n \"oper_status\": \"up\"\n },\n \"GigabitEthernet2\": {\n \"oper_status\": \"up\",\n \"querier\": \"FE80::5054:FF:FEDD:BB49\",\n \"query_interval\": 366,\n \"join_group\": {\n \"fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b 2001:DB8:2:2::2\": {\n \"source\": \"2001:DB8:2:2::2\",\n \"group\": \"fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b\"\n }\n },\n \"enable\": True,\n \"static_group\": {\n \"fc00:db20:35b:7399::5 2001:DB8:3:3::3\": {\n \"source\": \"2001:DB8:3:3::3\",\n \"group\": \"fc00:db20:35b:7399::5\"\n }\n },\n \"group\": {\n \"fc00:e968:6179::de52:7100\": {\n \"up_time\": \"00:42:30\",\n \"expire\": \"never\",\n \"filter_mode\": \"exclude\",\n \"last_reporter\": \"FE80::5054:FF:FEDD:BB49\"\n },\n \"fc00:db20:35b:7399::5\": {\n \"up_time\": \"08:14:18\",\n \"expire\": \"never\",\n \"filter_mode\": \"exclude\",\n \"last_reporter\": \"FE80::5054:FF:FEDD:BB49\"\n },\n \"fc00:db20:35b:7399::5\": {\n \"up_time\": \"00:42:30\",\n \"source\": {\n \"2001:DB8:3:3::3\": {\n \"up_time\": \"00:42:30\",\n \"expire\": \"00:12:23\"\n }\n },\n \"filter_mode\": \"include\",\n \"last_reporter\": \"FE80::5054:FF:FEDD:BB49\"\n },\n \"fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b\": {\n \"up_time\": \"08:14:20\",\n \"source\": {\n \"2001:DB8:2:2::2\": {\n \"up_time\": \"08:13:56\",\n \"expire\": \"00:12:23\"\n }\n },\n \"filter_mode\": \"include\",\n \"last_reporter\": \"FE80::5054:FF:FEDD:BB49\"\n }\n },\n \"query_max_response_time\": 16,\n \"max_groups\": 6400,\n \"version\": 2\n }\n }\n },\n \"default\": {\n \"ssm_map\": {\n \"2001:DB8:1:1::1 fc00:db20:35b:7399::5\": {\n \"group_address\": \"fc00:db20:35b:7399::5\",\n \"source_addr\": \"2001:DB8:1:1::1\"\n }\n },\n \"max_groups\": 64000,\n \"interfaces\": {\n \"GigabitEthernet3\": {\n \"enable\": False,\n \"oper_status\": \"down\"\n },\n \"GigabitEthernet1\": {\n \"group_policy\": \"test\",\n \"group\": {\n \"fc00:e968:6179::de52:7100\": {\n \"up_time\": \"00:42:32\",\n \"expire\": \"never\",\n \"filter_mode\": \"exclude\",\n \"last_reporter\": \"FE80::5054:FF:FE7C:DC70\"\n },\n \"fc00:db20:35b:7399::5\": {\n \"up_time\": \"08:14:01\",\n \"expire\": \"never\",\n \"filter_mode\": \"exclude\",\n \"last_reporter\": \"FE80::5054:FF:FE7C:DC70\"\n },\n \"fc00:db20:35b:7399::5\": {\n \"up_time\": \"00:42:41\",\n \"source\": {\n \"2001:DB8:3:3::3\": {\n \"up_time\": \"00:42:41\",\n \"expire\": \"00:06:42\"\n }\n },\n \"filter_mode\": \"include\",\n \"last_reporter\": \"FE80::5054:FF:FE7C:DC70\"\n },\n \"fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b\": {\n \"up_time\": \"08:14:15\",\n \"source\": {\n \"2001:DB8:2:2::2\": {\n \"up_time\": \"08:13:22\",\n \"expire\": \"00:06:42\"\n }\n },\n \"filter_mode\": \"include\",\n \"last_reporter\": \"FE80::5054:FF:FE7C:DC70\"\n }\n },\n \"enable\": True,\n \"query_interval\": 366,\n \"join_group\": {\n \"fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b 2001:DB8:2:2::2\": {\n \"source\": \"2001:DB8:2:2::2\",\n \"group\": \"fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b\"\n },\n \"fc00:db20:35b:7399::5 2001:DB8:3:3::3\": {\n \"source\": \"2001:DB8:3:3::3\",\n \"group\": \"fc00:db20:35b:7399::5\"\n }\n },\n \"oper_status\": \"up\",\n \"querier\": \"FE80::5054:FF:FE7C:DC70\",\n \"query_max_response_time\": 16,\n \"static_group\": {\n \"fc00:db20:35b:7399::5 2001:DB8:3:3::3\": {\n \"source\": \"2001:DB8:3:3::3\",\n \"group\": \"fc00:db20:35b:7399::5\"\n }\n },\n \"max_groups\": 6400,\n \"version\": 2\n },\n \"VoIP-Null0\": {\n \"enable\": False,\n \"oper_status\": \"up\"\n },\n \"Tunnel0\": {\n \"enable\": False,\n \"oper_status\": \"up\"\n },\n \"LIIN0\": {\n \"enable\": False,\n \"oper_status\": \"up\"\n },\n \"Null0\": {\n \"enable\": False,\n \"oper_status\": \"up\"\n }\n }\n }\n }\n }\n ", "id": "8824341", "language": "Python", "matching_score": 4.119140148162842, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/mld/ios/tests/mld_output.py" }, { "content": "''' \nMcast Genie Ops Object Outputs for IOSXE.\n'''\n\n\nclass McastOutput(object):\n\n ShowVrfDetail = {\n \"Mgmt-vrf\": {\n \"vrf_id\": 1,\n \"interfaces\": [\n \"GigabitEthernet0/0\"\n ],\n \"address_family\": {\n \"ipv4 unicast\": {\n \"table_id\": \"0x1\",\n \"flags\": \"0x0\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n },\n \"ipv6 unicast\": {\n \"table_id\": \"0x1E000001\",\n \"flags\": \"0x0\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n }\n },\n \"flags\": \"0x1808\"\n },\n \"VRF1\": {\n \"interfaces\": [\n \"GigabitEthernet0/0\"\n ],\n \"address_family\": {\n \"ipv4 unicast\": {\n \"export_to_global\": {\n \"export_to_global_map\": \"export_to_global_map\",\n \"prefix_limit\": 1000\n },\n \"import_from_global\": {\n \"prefix_limit\": 1000,\n \"import_from_global_map\": \"import_from_global_map\"\n },\n \"table_id\": \"0x1\",\n \"routing_table_limit\": {\n \"routing_table_limit_action\": {\n \"enable_alert_percent\": {\n \"alert_percent_value\": 10000\n }\n }\n },\n \"route_targets\": {\n \"200:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:1\"\n },\n \"100:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"100:1\"\n }\n },\n \"flags\": \"0x2100\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n },\n \"ipv6 unicast\": {\n \"export_to_global\": {\n \"export_to_global_map\": \"export_to_global_map\",\n \"prefix_limit\": 1000\n },\n \"table_id\": \"0x1E000001\",\n \"routing_table_limit\": {\n \"routing_table_limit_action\": {\n \"enable_alert_percent\": {\n \"alert_percent_value\": 7000\n }\n },\n \"routing_table_limit_number\": 10000\n },\n \"route_targets\": {\n \"200:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"200:1\"\n },\n \"400:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"400:1\"\n },\n \"300:1\": {\n \"rt_type\": \"export\",\n \"route_target\": \"300:1\"\n },\n \"100:1\": {\n \"rt_type\": \"export\",\n \"route_target\": \"100:1\"\n }\n },\n \"flags\": \"0x100\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n }\n },\n \"flags\": \"0x180C\",\n \"route_distinguisher\": \"100:1\",\n \"vrf_id\": 1\n }\n }\n\n # Set output for 'vrf default' as input to parser\n ShowIpMroute_default_output = '''\\\n IP Multicast Routing Table\n Flags: D - Dense, S - Sparse, B - Bidir Group, s - SSM Group, C - Connected,\n L - Local, P - Pruned, R - RP-bit set, F - Register flag,\n T - SPT-bit set, J - Join SPT, M - MSDP created entry, E - Extranet,\n X - Proxy Join Timer Running, A - Candidate for MSDP Advertisement,\n U - URD, I - Received Source Specific Host Report, \n Z - Multicast Tunnel, z - MDT-data group sender, \n Y - Joined MDT-data group, y - Sending to MDT-data group, \n G - Received BGP C-Mroute, g - Sent BGP C-Mroute, \n N - Received BGP Shared-Tree Prune, n - BGP C-Mroute suppressed, \n Q - Received BGP S-A Route, q - Sent BGP S-A Route, \n V - RD & Vector, v - Vector, p - PIM Joins on route, \n x - VxLAN group\n Outgoing interface flags: H - Hardware switched, A - Assert winner, p - PIM Join\n Timers: Uptime/Expires\n Interface state: Interface, Next-Hop or VCD, State/Mode\n\n (*, 172.16.31.10), 00:00:03/stopped, RP 10.4.1.1, flags: SPF\n Incoming interface: Null, RPF nbr 0.0.0.0\n Outgoing interface list: Null\n\n (10.4.1.1, 172.16.31.10), 00:00:03/00:02:57, flags: PFT\n Incoming interface: Loopback0, RPF nbr 0.0.0.0, Registering\n Outgoing interface list: Null\n\n (10.1.3.1, 172.16.31.10), 00:00:03/00:02:57, flags: PFT\n Incoming interface: GigabitEthernet2, RPF nbr 0.0.0.0, Registering\n Outgoing interface list: Null\n\n (*, 192.168.127.12), 2d09h/00:02:56, RP 10.16.2.2, flags: SCL\n Incoming interface: Null, RPF nbr 0.0.0.0\n Outgoing interface list:\n Loopback0, Forward/Sparse, 2d09h/00:02:56\n\n (*, 192.168.3.11), 00:03:57/00:02:54, RP 172.16.0.0, flags: SJ\n Incoming interface: Null, RPF nbr 224.0.0.0224.0.0.0\n Outgoing interface list:\n ATM0/0, VCD 14, Forward/Sparse, 00:03:57/00:02:53\n '''\n\n ShowIpv6Mroute_default_output = '''\\\n Multicast Routing Table\n Flags:D - Dense, S - Sparse, B - Bidir Group, s - SSM Group, \n C - Connected, L - Local, I - Received Source Specific Host Report,\n P - Pruned, R - RP-bit set, F - Register flag, T - SPT-bit set,\n J - Join SPT \n Timers:Uptime/Expires\n Interface state:Interface, State\n (*, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b), 00:04:45/00:02:47, RP 2001:DB8:6::6, flags:S\n Incoming interface:Tunnel5\n RPF nbr:2001:db8:90:24::6\n Outgoing interface list:\n POS4/0, Forward, 00:04:45/00:02:47\n (2001:DB8:999::99, FF07::1), 00:02:06/00:01:23, flags:SFT\n Incoming interface:POS1/0\n RPF nbr:2001:DB8:999::99\n Outgoing interface list:\n POS4/0, Forward, 00:02:06/00:03:27\n '''\n\n ShowIpMrouteStatic_default_output = '''\\\n Mroute: 172.16.0.0/16, RPF neighbor: 172.30.10.13, distance: 1\n Mroute: 172.16.1.0/24, RPF neighbor: 172.30.10.13, distance: 1\n '''\n\n ShowIpMulticast_default_output = '''\\\n Multicast Routing: enabled\n Multicast Multipath: enabled\n Multicast Route limit: No limit\n Multicast Fallback group mode: Sparse\n Number of multicast boundaries configured with filter-autorp option: 0\n MoFRR: Disabled\n '''\n\n ShowIpv6Rpf_default_output = '''\\\n RPF information for 2001:99:99::99\n RPF interface: GigabitEthernet1\n RPF neighbor: 2001:99:99::99\n RPF route/mask: 2001:99:99::99/128\n RPF type: Mroute\n RPF recursion count: 0\n Metric preference: 128\n Metric: 0\n '''\n\n ShowIpv6PimInterface_default_output = '''\\\n Interface PIM Nbr Hello DR\n Count Intvl Prior\n\n GigabitEthernet1 on 1 30 1 \n Address: FE80::5054:FF:FE2C:6CDF\n DR : FE80::5054:FF:FEAC:64B3\n GigabitEthernet2 on 0 30 1 \n Address: FE80::5054:FF:FEBE:8787\n DR : this system\n Tunnel2 off 0 30 1 \n Address: ::\n DR : not elected\n Tunnel1 off 0 30 1 \n Address: FE80::21E:F6FF:FEAC:A600\n DR : not elected\n Null0 off 0 30 1 \n Address: FE80::1\n DR : not elected\n Tunnel3 off 0 30 1 \n Address: ::\n DR : not elected\n Tunnel4 off 0 30 1 \n Address: ::\n DR : not elected\n Loopback0 on 0 30 1 \n Address: FE80::21E:F6FF:FEAC:A600\n DR : this system\n Tunnel0 off 0 30 1 \n Address: FE80::21E:F6FF:FEAC:A600\n DR : not elected\n '''\n\n # Set output for 'show bgp vrf VRF1 all neighbors' as input to parser\n ShowIpMroute_vrf1_output = '''\\\n IP Multicast Routing Table\n Flags: D - Dense, S - Sparse, B - Bidir Group, s - SSM Group, C - Connected,\n L - Local, P - Pruned, R - RP-bit set, F - Register flag,\n T - SPT-bit set, J - Join SPT, M - MSDP created entry, E - Extranet,\n X - Proxy Join Timer Running, A - Candidate for MSDP Advertisement,\n U - URD, I - Received Source Specific Host Report, \n Z - Multicast Tunnel, z - MDT-data group sender, \n Y - Joined MDT-data group, y - Sending to MDT-data group, \n G - Received BGP C-Mroute, g - Sent BGP C-Mroute, \n N - Received BGP Shared-Tree Prune, n - BGP C-Mroute suppressed, \n Q - Received BGP S-A Route, q - Sent BGP S-A Route, \n V - RD & Vector, v - Vector, p - PIM Joins on route, \n x - VxLAN group\n Outgoing interface flags: H - Hardware switched, A - Assert winner, p - PIM Join\n Timers: Uptime/Expires\n Interface state: Interface, Next-Hop or VCD, State/Mode\n\n (*, 172.16.31.10), 00:00:04/stopped, RP 10.229.11.11, flags: SPF\n Incoming interface: Null, RPF nbr 0.0.0.0\n Outgoing interface list: Null\n\n (10.229.11.11, 172.16.31.10), 00:00:04/00:02:55, flags: PFT\n Incoming interface: Loopback1, RPF nbr 0.0.0.0, Registering\n Outgoing interface list: Null\n\n (*, 192.168.127.12), 00:08:58/00:02:52, RP 10.229.11.11, flags: SJCL\n Incoming interface: Null, RPF nbr 0.0.0.0\n Outgoing interface list:\n Loopback1, Forward/Sparse, 00:08:58/00:02:52\n '''\n\n ShowIpv6Mroute_vrf1_output = '''\\\n Multicast Routing Table\n Flags:D - Dense, S - Sparse, B - Bidir Group, s - SSM Group, \n C - Connected, L - Local, I - Received Source Specific Host Report,\n P - Pruned, R - RP-bit set, F - Register flag, T - SPT-bit set,\n J - Join SPT \n Timers:Uptime/Expires\n Interface state:Interface, State\n (*, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b), 00:04:45/00:02:47, RP 2001:DB8:6::6, flags:S\n Incoming interface:Tunnel5\n RPF nbr:2001:db8:90:24::6\n Outgoing interface list:\n POS4/0, Forward, 00:04:45/00:02:47\n (2001:DB8:999::99, FF07::1), 00:02:06/00:01:23, flags:SFT\n Incoming interface:POS1/0\n RPF nbr:2001:DB8:999::99\n Outgoing interface list:\n POS4/0, Forward, 00:02:06/00:03:27\n '''\n\n ShowIpMrouteStatic_vrf1_output = '''\\\n Mroute: 10.1.77.77/32, RPF neighbor: 10.12.12.13, distance: 1\n '''\n\n ShowIpMulticast_vrf1_output = '''\\\n Multicast Routing: enabled\n Multicast Multipath: disabled\n Multicast Route limit: No limit\n Multicast Fallback group mode: Sparse\n Number of multicast boundaries configured with filter-autorp option: 0\n MoFRR: Disabled\n '''\n\n ShowIpv6Rpf_vrf1_output = '''\\\n RPF information for 2001:99:99::99\n RPF interface: GigabitEthernet3\n RPF neighbor: 2001:99:99::99\n RPF route/mask: 2001:99:99::99/128\n RPF type: Mroute\n RPF recursion count: 0\n Metric preference: 128\n Metric: 0\n '''\n\n ShowIpv6PimInterface_vrf1_output = '''\\\n Interface PIM Nbr Hello DR\n Count Intvl Prior\n\n GigabitEthernet3 on 0 30 1 \n Address: FE80::5054:FF:FE84:F097\n DR : this system\n Tunnel5 off 0 30 1 \n Address: FE80::21E:F6FF:FEAC:A600\n DR : not elected\n Tunnel6 off 0 30 1 \n Address: ::\n DR : not elected\n Tunnel7 off 0 30 1 \n Address: ::\n DR : not elected\n Loopback1 on 0 30 1 \n Address: FE80::21E:F6FF:FEAC:A600\n DR : this system\n '''\n\n McastInfo = {\n \"vrf\": {\n \"VRF1\": {\n \"address_family\": {\n \"ipv4\": {\n \"enable\": True,\n \"multipath\": False,\n \"mroute\": {\n \"10.1.77.77/32\": {\n \"path\": {\n \"10.12.12.13 1\": {\n \"admin_distance\": \"1\",\n \"neighbor_address\": \"10.12.12.13\"\n }\n }\n }\n }\n },\n \"ipv6\": {\n \"enable\": True,\n \"mroute\": {\n \"fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b\": {\n \"path\": {\n \"2001:99:99::99 GigabitEthernet3 128\": {\n \"admin_distance\": \"128\",\n \"neighbor_address\": \"2001:99:99::99\",\n \"interface_name\": \"GigabitEthernet3\"\n }\n }\n }\n }\n }\n }\n },\n \"default\": {\n \"address_family\": {\n \"ipv6\": {\n \"enable\": True,\n \"mroute\": {\n \"fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b\": {\n \"path\": {\n \"2001:99:99::99 GigabitEthernet1 128\": {\n \"admin_distance\": \"128\",\n \"neighbor_address\": \"2001:99:99::99\",\n \"interface_name\": \"GigabitEthernet1\"\n }\n }\n }\n }\n },\n \"ipv4\": {\n \"enable\": True,\n \"multipath\": True,\n \"mroute\": {\n \"172.16.0.0/16\": {\n \"path\": {\n \"172.30.10.13 1\": {\n \"admin_distance\": \"1\",\n \"neighbor_address\": \"172.30.10.13\"\n }\n }\n },\n \"172.16.1.0/24\": {\n \"path\": {\n \"172.30.10.13 1\": {\n \"admin_distance\": \"1\",\n \"neighbor_address\": \"172.30.10.13\"\n }\n }\n }\n }\n }\n }\n }\n }\n }\n\n McastTable = {\n \"vrf\": {\n \"VRF1\": {\n \"address_family\": {\n \"ipv6\": {\n \"multicast_group\": {\n \"fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b\": {\n \"source_address\": {\n \"2001:DB8:999::99\": {\n \"expire\": \"00:01:23\",\n \"uptime\": \"00:02:06\",\n \"flags\": \"SFT\",\n \"rpf_nbr\": \"2001:DB8:999::99\",\n \"outgoing_interface_list\": {\n \"POS4/0\": {\n \"expire\": \"00:03:27\",\n \"uptime\": \"00:02:06\",\n \"state_mode\": \"forward\"\n }\n },\n \"incoming_interface_list\": {\n \"POS1/0\": {\n \"rpf_nbr\": \"2001:DB8:999::99\"\n }\n }\n },\n \"*\": {\n \"rp\": \"2001:DB8:6::6\",\n \"uptime\": \"00:04:45\",\n \"outgoing_interface_list\": {\n \"POS4/0\": {\n \"expire\": \"00:02:47\",\n \"uptime\": \"00:04:45\",\n \"state_mode\": \"forward\"\n }\n },\n \"expire\": \"00:02:47\",\n \"flags\": \"S\",\n \"rpf_nbr\": \"2001:db8:90:24::6\",\n \"incoming_interface_list\": {\n \"Tunnel5\": {\n \"rpf_nbr\": \"2001:db8:90:24::6\"\n }\n }\n }\n }\n }\n }\n },\n \"ipv4\": {\n \"multicast_group\": {\n \"192.168.127.12\": {\n \"source_address\": {\n \"*\": {\n \"rp\": \"10.229.11.11\",\n \"uptime\": \"00:08:58\",\n \"outgoing_interface_list\": {\n \"Loopback1\": {\n \"expire\": \"00:02:52\",\n \"uptime\": \"00:08:58\",\n \"state_mode\": \"forward/sparse\"\n }\n },\n \"expire\": \"00:02:52\",\n \"flags\": \"SJCL\",\n \"rpf_nbr\": \"0.0.0.0\",\n }\n }\n },\n \"172.16.31.10\": {\n \"source_address\": {\n \"*\": {\n \"rp\": \"10.229.11.11\",\n \"expire\": \"stopped\",\n \"uptime\": \"00:00:04\",\n \"flags\": \"SPF\",\n \"rpf_nbr\": \"0.0.0.0\",\n },\n \"10.229.11.11\": {\n \"expire\": \"00:02:55\",\n \"uptime\": \"00:00:04\",\n \"flags\": \"PFT\",\n \"rpf_nbr\": \"0.0.0.0\",\n \"incoming_interface_list\": {\n \"Loopback1\": {\n \"rpf_nbr\": \"0.0.0.0\",\n \"rpf_info\": \"registering\"\n }\n }\n }\n }\n }\n }\n }\n }\n },\n \"default\": {\n \"address_family\": {\n \"ipv6\": {\n \"multicast_group\": {\n \"fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b\": {\n \"source_address\": {\n \"2001:DB8:999::99\": {\n \"expire\": \"00:01:23\",\n \"uptime\": \"00:02:06\",\n \"flags\": \"SFT\",\n \"rpf_nbr\": \"2001:DB8:999::99\",\n \"outgoing_interface_list\": {\n \"POS4/0\": {\n \"expire\": \"00:03:27\",\n \"uptime\": \"00:02:06\",\n \"state_mode\": \"forward\"\n }\n },\n \"incoming_interface_list\": {\n \"POS1/0\": {\n \"rpf_nbr\": \"2001:DB8:999::99\"\n }\n }\n },\n \"*\": {\n \"rp\": \"2001:DB8:6::6\",\n \"uptime\": \"00:04:45\",\n \"outgoing_interface_list\": {\n \"POS4/0\": {\n \"expire\": \"00:02:47\",\n \"uptime\": \"00:04:45\",\n \"state_mode\": \"forward\"\n }\n },\n \"expire\": \"00:02:47\",\n \"flags\": \"S\",\n \"rpf_nbr\": \"2001:db8:90:24::6\",\n \"incoming_interface_list\": {\n \"Tunnel5\": {\n \"rpf_nbr\": \"2001:db8:90:24::6\"\n }\n }\n }\n }\n }\n }\n },\n \"ipv4\": {\n \"multicast_group\": {\n \"192.168.127.12\": {\n \"source_address\": {\n \"*\": {\n \"rp\": \"10.16.2.2\",\n \"uptime\": \"2d09h\",\n \"outgoing_interface_list\": {\n \"Loopback0\": {\n \"expire\": \"00:02:56\",\n \"uptime\": \"2d09h\",\n \"state_mode\": \"forward/sparse\"\n }\n },\n \"expire\": \"00:02:56\",\n \"flags\": \"SCL\",\n \"rpf_nbr\": \"0.0.0.0\",\n }\n }\n },\n \"172.16.31.10\": {\n \"source_address\": {\n \"*\": {\n \"rp\": \"10.4.1.1\",\n \"expire\": \"stopped\",\n \"uptime\": \"00:00:03\",\n \"flags\": \"SPF\",\n \"rpf_nbr\": \"0.0.0.0\",\n },\n \"10.1.3.1\": {\n \"expire\": \"00:02:57\",\n \"uptime\": \"00:00:03\",\n \"flags\": \"PFT\",\n \"rpf_nbr\": \"0.0.0.0\",\n \"incoming_interface_list\": {\n \"GigabitEthernet2\": {\n \"rpf_nbr\": \"0.0.0.0\",\n \"rpf_info\": \"registering\"\n }\n }\n },\n \"10.4.1.1\": {\n \"expire\": \"00:02:57\",\n \"uptime\": \"00:00:03\",\n \"flags\": \"PFT\",\n \"rpf_nbr\": \"0.0.0.0\",\n \"incoming_interface_list\": {\n \"Loopback0\": {\n \"rpf_nbr\": \"0.0.0.0\",\n \"rpf_info\": \"registering\"\n }\n }\n }\n }\n },\n \"192.168.3.11\": {\n \"source_address\": {\n \"*\": {\n \"rp\": \"172.16.0.0\",\n \"uptime\": \"00:03:57\",\n \"outgoing_interface_list\": {\n \"ATM0/0\": {\n \"expire\": \"00:02:53\",\n \"uptime\": \"00:03:57\",\n \"vcd\": \"14\",\n \"state_mode\": \"forward/sparse\"\n }\n },\n \"expire\": \"00:02:54\",\n \"flags\": \"SJ\",\n \"rpf_nbr\": \"192.168.127.12224.0.0.0\",\n }\n }\n }\n }\n }\n }\n }\n }\n }\n", "id": "4393461", "language": "Python", "matching_score": 5.672088623046875, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/mcast/iosxe/tests/mcast_output.py" }, { "content": "''' \nRip Genie Ops Object Outputs for IOSXE.\n'''\n\n\nclass RipOutput(object):\n\n ShowVrfDetail = {\n \"Mgmt-vrf\": {\n \"vrf_id\": 1,\n \"interfaces\": [\n \"GigabitEthernet0/0\"\n ],\n \"address_family\": {\n \"ipv4 unicast\": {\n \"table_id\": \"0x1\",\n \"flags\": \"0x0\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n },\n \"ipv6 unicast\": {\n \"table_id\": \"0x1E000001\",\n \"flags\": \"0x0\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n }\n },\n \"flags\": \"0x1808\"\n },\n \"VRF1\": {\n \"interfaces\": [\n \"GigabitEthernet0/0\"\n ],\n \"address_family\": {\n \"ipv4 unicast\": {\n \"export_to_global\": {\n \"export_to_global_map\": \"export_to_global_map\",\n \"prefix_limit\": 1000\n },\n \"import_from_global\": {\n \"prefix_limit\": 1000,\n \"import_from_global_map\": \"import_from_global_map\"\n },\n \"table_id\": \"0x1\",\n \"routing_table_limit\": {\n \"routing_table_limit_action\": {\n \"enable_alert_limit_number\": {\n \"alert_limit_number\": 10000\n }\n }\n },\n \"route_targets\": {\n \"200:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:1\"\n },\n \"100:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"100:1\"\n }\n },\n \"flags\": \"0x2100\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n },\n \"ipv6 unicast\": {\n \"export_to_global\": {\n \"export_to_global_map\": \"export_to_global_map\",\n \"prefix_limit\": 1000\n },\n \"table_id\": \"0x1E000001\",\n \"routing_table_limit\": {\n \"routing_table_limit_action\": {\n \"enable_alert_percent\": {\n \"alert_percent_value\": 70\n },\n \"enable_alert_limit_number\": {\n \"alert_limit_number\": 7000\n }\n },\n \"routing_table_limit_number\": 10000\n },\n \"route_targets\": {\n \"200:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"200:1\"\n },\n \"400:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"400:1\"\n },\n \"300:1\": {\n \"rt_type\": \"export\",\n \"route_target\": \"300:1\"\n },\n \"100:1\": {\n \"rt_type\": \"export\",\n \"route_target\": \"100:1\"\n }\n },\n \"flags\": \"0x100\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n }\n },\n \"flags\": \"0x180C\",\n \"route_distinguisher\": \"100:1\",\n \"vrf_id\": 1\n }\n }\n\n showIpProtocols_default = '''\\\n R1#show ip protocols | sec rip\nRouting Protocol is \"rip\"\n Output delay 50 milliseconds between packets\n Outgoing update filter list for all interfaces is not set\n Incoming update filter list for all interfaces is not set\n Incoming routes will have 10 added to metric if on list 21\n Sending updates every 10 seconds, next due in 8 seconds\n Invalid after 21 seconds, hold down 22, flushed after 23\n Default redistribution metric is 3\n Redistributing: connected, static, rip\n Neighbor(s):\n 10.1.2.2\n Default version control: send version 2, receive version 2\n Interface Send Recv Triggered RIP Key-chain\n GigabitEthernet3.100 2 2 No 1\n Automatic network summarization is not in effect\n Address Summarization:\n 172.16.0.0/17 for GigabitEthernet3.100\n Maximum path: 4\n Routing for Networks:\n 10.0.0.0\n Passive Interface(s):\n GigabitEthernet2.100\n Routing Information Sources:\n Gateway Distance Last Update\n 10.1.3.3 120 00:00:00\n 10.1.2.2 120 00:00:04\n Distance: (default is 120)\n '''\n showIpProtocols_vrf1 = '''\\\nR1#show ip protocols vrf VRF1 | sec rip\nRouting Protocol is \"rip\"\n Output delay 50 milliseconds between packets\n Outgoing update filter list for all interfaces is not set\n Incoming update filter list for all interfaces is not set\n Sending updates every 30 seconds, next due in 2 seconds\n Invalid after 180 seconds, hold down 180, flushed after 240\n Redistributing: connected, static, rip\n Default version control: send version 2, receive version 2\n Interface Send Recv Triggered RIP Key-chain\n GigabitEthernet2.200 2 2 No none\n GigabitEthernet3.200 2 2 No none\n Maximum path: 4\n Routing for Networks:\n 10.0.0.0\n 10.0.0.0\n Routing Information Sources:\n Gateway Distance Last Update\n 10.1.3.3 120 20:33:00\n 10.1.2.2 120 00:00:21\n Distance: (default is 120)\n '''\n\n showIpRipDatabase_default = '''\\\n R1#show ip rip database\n0.0.0.0/0 auto-summary\n0.0.0.0/0 redistributed\n [3] via 172.16.1.254, from 0.0.0.0,\n [3] via 172.16.1.254, from 0.0.0.0,\n10.0.0.0/8 auto-summary\n10.1.2.0/24 directly connected, GigabitEthernet2.100\n10.1.3.0/24 directly connected, GigabitEthernet3.100\n10.2.3.0/24\n [1] via 10.1.3.3, 00:00:05, GigabitEthernet3.100\n [1] via 10.1.2.2, 00:00:21, GigabitEthernet2.100\n172.16.0.0/16 auto-summary\n172.16.0.0/17 int-summary\n172.16.0.0/17\n [4] via 10.1.2.2, 00:00:00, GigabitEthernet2.100\n\n '''\n showIpRipDatabase_vrf1 = '''\\\n R1#show ip rip database vrf VRF1\n10.0.0.0/8 auto-summary\n10.1.2.0/24 directly connected, GigabitEthernet2.200\n10.1.3.0/24 directly connected, GigabitEthernet3.200\n10.2.3.0/24\n [1] via 10.1.2.2, 00:00:08, GigabitEthernet2.200\n172.16.0.0/16 auto-summary\n172.16.11.0/24 redistributed\n [15] via 0.0.0.0,\n172.16.22.0/24\n [15] via 10.1.2.2, 00:00:08, GigabitEthernet2.200\n192.168.1.0/24 auto-summary\n192.168.1.1/32 redistributed\n [1] via 0.0.0.0,\n '''\n\n showIpv6Protocols_default = '''\\\n R1#show ipv6 protocols | sec rip\n IPv6 Routing Protocol is \"rip ripng\"\n Interfaces:\n GigabitEthernet3.100\n GigabitEthernet2.100\n Redistribution:\n Redistributing protocol static with metric 3\n '''\n showIpv6Protocols_vrf1 = '''\\\n R1#show ipv6 protocols vrf VRF1 | sec rip\n IPv6 Routing Protocol is \"rip ripng\"\n Interfaces:\n GigabitEthernet3.200\n GigabitEthernet2.200\n Redistribution:\n Redistributing protocol connected with transparent metric\n Redistributing protocol static with transparent metric route-map static-to-rip\n '''\n\n showIpv6RipDatabase_default = '''\\\n R1#show ipv6 rip database\n RIP VRF \"Default VRF\", local RIB\n 2001:DB8:1:3::/64, metric 2\n GigabitEthernet3.100/FE80::F816:3EFF:FEFF:1E3D, expires in 179 secs\n 2001:DB8:2:3::/64, metric 2, installed\n GigabitEthernet3.100/FE80::F816:3EFF:FEFF:1E3D, expires in 179 secs\n 2001:DB8:2222:2222::/64, metric 7, installed\n GigabitEthernet3.100/FE80::F816:3EFF:FEFF:1E3D, expires in 179 secs\n 2001:DB8:2223:2223::/64, metric 6, installed\n GigabitEthernet2.100/FE80::F816:3EFF:FE7B:437, expires in 173 secs\n '''\n\n showIpv6RipDatabase_vrf1 = '''\\\n R1#show ipv6 rip vrf VRF1 database\nRIP VRF \"VRF1\", local RIB\n 2001:DB8:1:2::/64, metric 2\n GigabitEthernet2.200/FE80::F816:3EFF:FE7B:437, expires in 166 secs\n 2001:DB8:1:3::/64, metric 2\n GigabitEthernet3.200/FE80::F816:3EFF:FEFF:1E3D, expires in 169 secs\n 2001:DB8:2:3::/64, metric 2, installed\n GigabitEthernet3.200/FE80::F816:3EFF:FEFF:1E3D, expires in 169 secs\n GigabitEthernet2.200/FE80::F816:3EFF:FE7B:437, expires in 166 secs\n '''\n\n showIpv6Rip_default = '''\\\n R1#show ipv6 rip\n RIP VRF \"Default VRF\", port 521, multicast-group fc00:e968:6179::de52:7100, pid 635\n Administrative distance is 120. Maximum paths is 16\n Updates every 30 seconds, expire after 180\n Holddown lasts 0 seconds, garbage collect after 120\n Split horizon is on; poison reverse is off\n Default routes are not generated\n Periodic updates 399, trigger updates 8\n Full Advertisement 0, Delayed Events 0\n Interfaces:\n GigabitEthernet3.100\n GigabitEthernet2.100\n Redistribution:\n Redistributing protocol static with metric 3\n '''\n\n showIpv6Rip_vrf1= '''\\\n R1#show ipv6 rip vrf VRF1\n RIP VRF \"VRF1\", port 521, multicast-group fc00:e968:6179::de52:7100, pid 635\n Administrative distance is 120. Maximum paths is 16\n Updates every 30 seconds, expire after 180\n Holddown lasts 0 seconds, garbage collect after 120\n Split horizon is on; poison reverse is off\n Default routes are generated\n Periodic updates 390, trigger updates 3\n Full Advertisement 0, Delayed Events 0\n Interfaces:\n GigabitEthernet3.200\n GigabitEthernet2.200\n Redistribution:\n Redistributing protocol connected with transparent metric\n Redistributing protocol static with transparent metric route-map static-to-rip\n'''\n ripOpsOutput={\n \"vrf\": {\n \"default\": {\n \"address_family\": {\n \"ipv6\": {\n \"instance\": {\n \"rip ripng\": {\n \"redistribute\": {\n \"static\": {\n \"metric\": 3\n }\n },\n \"timers\": {\n \"holddown_interval\": 0,\n \"flush_interval\": 120,\n \"update_interval\": 30\n },\n \"maximum_paths\": 16,\n \"split_horizon\": True,\n \"routes\": {\n \"2001:DB8:2223:2223::/64\": {\n \"index\": {\n 1: {\n \"metric\": 6,\n \"expire_time\": \"173\",\n \"interface\": \"GigabitEthernet2.100\",\n \"next_hop\": \"FE80::F816:3EFF:FE7B:437\"\n }\n }\n },\n \"2001:DB8:2:3::/64\": {\n \"index\": {\n 1: {\n \"metric\": 2,\n \"expire_time\": \"179\",\n \"interface\": \"GigabitEthernet3.100\",\n \"next_hop\": \"FE80::F816:3EFF:FEFF:1E3D\"\n }\n }\n },\n \"2001:DB8:1:3::/64\": {\n \"index\": {\n 1: {\n \"metric\": 2,\n \"expire_time\": \"179\",\n \"interface\": \"GigabitEthernet3.100\",\n \"next_hop\": \"FE80::F816:3EFF:FEFF:1E3D\"\n }\n }\n },\n \"2001:DB8:2222:2222::/64\": {\n \"index\": {\n 1: {\n \"metric\": 7,\n \"expire_time\": \"179\",\n \"interface\": \"GigabitEthernet3.100\",\n \"next_hop\": \"FE80::F816:3EFF:FEFF:1E3D\"\n }\n }\n }\n },\n \"originate_default_route\": {\n \"enabled\": False\n },\n \"distance\": 120,\n \"poison_reverse\": False,\n \"interfaces\": {\n \"GigabitEthernet3.100\": {},\n \"GigabitEthernet2.100\": {}\n }\n }\n }\n },\n \"ipv4\": {\n \"instance\": {\n \"rip\": {\n \"distance\": 120,\n \"output_delay\": 50,\n \"maximum_paths\": 4,\n \"default_metric\": 3,\n \"routes\": {\n \"10.2.3.0/24\": {\n \"index\": {\n 1: {\n \"metric\": 1,\n \"next_hop\": \"10.1.3.3\",\n \"interface\": \"GigabitEthernet3.100\"\n },\n 2: {\n \"metric\": 1,\n \"next_hop\": \"10.1.2.2\",\n \"interface\": \"GigabitEthernet2.100\"\n }\n }\n },\n \"10.0.0.0/8\": {\n \"index\": {\n 1: {\n \"summary_type\": \"auto-summary\"\n }\n }\n },\n \"172.16.0.0/17\": {\n \"index\": {\n 1: {\n \"summary_type\": \"int-summary\"\n },\n 2: {\n \"metric\": 4,\n \"next_hop\": \"10.1.2.2\",\n \"interface\": \"GigabitEthernet2.100\"\n }\n }\n },\n \"0.0.0.0/0\": {\n \"index\": {\n 1: {\n \"summary_type\": \"auto-summary\"\n },\n 2: {\n \"metric\": 3,\n \"next_hop\": \"172.16.1.254\",\n \"redistributed\": True\n },\n 3: {\n \"metric\": 3,\n \"next_hop\": \"172.16.1.254\",\n \"redistributed\": True\n }\n }\n },\n \"172.16.0.0/16\": {\n \"index\": {\n 1: {\n \"summary_type\": \"auto-summary\"\n }\n }\n },\n \"10.1.2.0/24\": {\n \"index\": {\n 1: {\n \"route_type\": \"connected\",\n \"interface\": \"GigabitEthernet2.100\"\n }\n }\n },\n \"10.1.3.0/24\": {\n \"index\": {\n 1: {\n \"route_type\": \"connected\",\n \"interface\": \"GigabitEthernet3.100\"\n }\n }\n }\n },\n \"interfaces\": {\n \"GigabitEthernet3.100\": {\n \"passive\": True,\n \"summary_address\": {\n \"172.16.0.0/17\": {}\n }\n }\n },\n \"redistribute\": {\n \"static\": {},\n \"rip\": {},\n \"connected\": {}\n }\n }\n }\n }\n }\n },\n \"VRF1\": {\n \"address_family\": {\n \"ipv6\": {\n \"instance\": {\n \"rip ripng\": {\n \"redistribute\": {\n \"static\": {\n \"route_policy\": \"static-to-rip\"\n },\n \"connected\": {}\n },\n \"timers\": {\n \"update_interval\": 30,\n \"flush_interval\": 120,\n \"holddown_interval\": 0\n },\n \"maximum_paths\": 16,\n \"split_horizon\": True,\n \"routes\": {\n \"2001:DB8:2:3::/64\": {\n \"index\": {\n 1: {\n \"metric\": 2,\n \"expire_time\": \"169\",\n \"interface\": \"GigabitEthernet3.200\",\n \"next_hop\": \"FE80::F816:3EFF:FEFF:1E3D\"\n },\n 2: {\n \"metric\": 2,\n \"expire_time\": \"166\",\n \"interface\": \"GigabitEthernet2.200\",\n \"next_hop\": \"FE80::F816:3EFF:FE7B:437\"\n }\n }\n },\n \"2001:DB8:1:3::/64\": {\n \"index\": {\n 1: {\n \"metric\": 2,\n \"expire_time\": \"169\",\n \"interface\": \"GigabitEthernet3.200\",\n \"next_hop\": \"FE80::F816:3EFF:FEFF:1E3D\"\n }\n }\n },\n \"2001:DB8:1:2::/64\": {\n \"index\": {\n 1: {\n \"metric\": 2,\n \"expire_time\": \"166\",\n \"interface\": \"GigabitEthernet2.200\",\n \"next_hop\": \"FE80::F816:3EFF:FE7B:437\"\n }\n }\n }\n },\n \"originate_default_route\": {\n \"enabled\": True\n },\n \"distance\": 120,\n \"poison_reverse\": False,\n \"interfaces\": {\n \"GigabitEthernet2.200\": {},\n \"GigabitEthernet3.200\": {}\n }\n }\n }\n },\n \"ipv4\": {\n \"instance\": {\n \"rip\": {\n \"distance\": 120,\n \"output_delay\": 50,\n \"routes\": {\n \"192.168.1.1/32\": {\n \"index\": {\n 1: {\n \"metric\": 1,\n \"next_hop\": \"0.0.0.0\",\n \"redistributed\": True\n }\n }\n },\n \"192.168.1.0/24\": {\n \"index\": {\n 1: {\n \"summary_type\": \"auto-summary\"\n }\n }\n },\n \"172.16.22.0/24\": {\n \"index\": {\n 1: {\n \"metric\": 15,\n \"next_hop\": \"10.1.2.2\",\n \"interface\": \"GigabitEthernet2.200\"\n }\n }\n },\n \"10.0.0.0/8\": {\n \"index\": {\n 1: {\n \"summary_type\": \"auto-summary\"\n }\n }\n },\n \"10.1.2.0/24\": {\n \"index\": {\n 1: {\n \"route_type\": \"connected\",\n \"interface\": \"GigabitEthernet2.200\"\n }\n }\n },\n \"172.16.0.0/16\": {\n \"index\": {\n 1: {\n \"summary_type\": \"auto-summary\"\n }\n }\n },\n \"172.16.11.0/24\": {\n \"index\": {\n 1: {\n \"metric\": 15,\n \"next_hop\": \"0.0.0.0\",\n \"redistributed\": True\n }\n }\n },\n \"10.2.3.0/24\": {\n \"index\": {\n 1: {\n \"metric\": 1,\n \"next_hop\": \"10.1.2.2\",\n \"interface\": \"GigabitEthernet2.200\"\n }\n }\n },\n \"10.1.3.0/24\": {\n \"index\": {\n 1: {\n \"route_type\": \"connected\",\n \"interface\": \"GigabitEthernet3.200\"\n }\n }\n }\n },\n \"redistribute\": {\n \"static\": {},\n \"rip\": {},\n \"connected\": {}\n },\n \"maximum_paths\": 4\n }\n }\n }\n }\n }\n }\n}\n", "id": "12404225", "language": "Python", "matching_score": 5.0547776222229, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/rip/iosxe/tests/rip_output.py" }, { "content": "''' \nRip Genie Ops Object for IOSXE - CLI.\n'''\n\n# Genie\nfrom genie.ops.base import Base\n\n# iosxe parsers\nfrom genie.libs.parser.iosxe.show_rip import ShowIpRipDatabase,\\\n ShowIpv6RipDatabase,\\\n ShowIpv6Rip\nfrom genie.libs.parser.iosxe.show_protocols import ShowIpProtocolsSectionRip as ShowIpProtocols,\\\n ShowIpv6ProtocolsSectionRip as ShowIpv6Protocols\n# iosxe show_vrf\nfrom genie.libs.parser.iosxe.show_vrf import ShowVrfDetail\n\n\nclass Rip(Base):\n '''Rip Genie Ops Object'''\n\n def keys(self, item):\n if isinstance(item, dict):\n return list(item.keys())\n return []\n\n def learn(self):\n '''Learn Rip object'''\n\n # get vrf list\n self.add_leaf(cmd=ShowVrfDetail,\n src='',\n dest='list_of_vrfs',\n action=self.keys)\n # when show vrf details return nothing\n # initial vrf list\n try:\n self.make()\n except Exception:\n self.list_of_vrfs = []\n\n # incase attribtues are specified that show vrf won't be executed\n if not hasattr(self, 'list_of_vrfs'):\n self.list_of_vrfs = []\n\n # loop for vrfs\n for vrf in self.list_of_vrfs + ['default']:\n\n # skip the vrf when it is mgmt-vrf\n if vrf == 'Mgmt-vrf':\n continue\n\n # create kwargs\n vrf_name = '' if vrf == 'default' else vrf\n\n # Place holder to make it more readable\n ########################################################################\n ##################### Ipv4 #################\n # IPV4\n # vrf\n # af\n # instance\n # originate_default_route N/A\n # default_metric\n # distance\n # triggered_update_threshold N/A\n # maximum_paths\n # output_delay\n # distribute_list N/A\n # redistribute\n # redistribute\n # metric\n # route_policy\n # timers\n # update_interval\n # invalid_interval\n # holddown_interval\n # flush_intervalit\n # interfaces\n # interface\n # authentication N/A\n # bfd N/A\n # cost N/A\n # neighbors N/A\n # no_listen N/A\n # originate_default_route N/A\n # passive\n # split_horizon N/A\n # summary_address\n # metric N/A\n # timers N/A \n # oper_status N/A\n # next_full_update N/A\n # valid_address N/A\n # statistics N/A\n # next_triggered_update N/A\n # num_of_routes N/A\n # neighbors\n # address\n # last_update\n # bad_packets_rcvd N/A\n # bad_routes_rcvd N/A\n # routes\n # prefix\n # index\n # next_hop\n # interface\n # redistributed\n # route_type\n # summary_type\n # metric\n # expire_time N/A\n # deleted N/A\n # holddown N/A\n # need_triggered_update N/A\n # inactive N/A\n # flush_expire_before_holddown N/A\n # statistics N/A\n #\n src_instance = '[vrf][(?P<vrf>.*)][address_family][(?P<address_family>.*)][instance][(?P<instance>.*)]'\n dest_instance = 'info[vrf][(?P<vrf>.*)][address_family][(?P<address_family>.*)][instance][(?P<instance>.*)]'\n\n src_protocol = '[protocols][rip][vrf][(?P<vrf>.*)][address_family][(?P<address_family>.*)][instance][(?P<instance>.*)]'\n\n\n req_key = ['distance','maximum_paths','output_delay']\n for key in req_key:\n self.add_leaf(cmd=ShowIpProtocols,\n src=src_protocol + '[{}]'.format(key),\n dest=dest_instance + '[{}]'.format(key),\n vrf=vrf_name)\n\n self.add_leaf(cmd=ShowIpProtocols,\n src=src_protocol + '[default_redistribution_metric]',\n dest=dest_instance + '[default_metric]',\n vrf=vrf_name)\n\n src_redistribute = src_protocol + '[redistribute][(?P<redistribute>.*)]'\n dest_redistribute = dest_instance + '[redistribute][(?P<redistribute>.*)]'\n\n self.add_leaf(cmd=ShowIpProtocols,\n src=src_redistribute,\n dest=dest_redistribute,\n vrf=vrf_name)\n\n req_key = ['metric', 'route_policy']\n for key in req_key:\n self.add_leaf(cmd=ShowIpProtocols,\n src=src_redistribute + '[{}]'.format(key),\n dest=dest_redistribute + '[{}]'.format(key),\n vrf=vrf_name)\n\n src_interface = src_protocol + '[interfaces][(?P<interface>.*)]'\n dest_interface = dest_instance + '[interfaces][(?P<interface>.*)]'\n\n self.add_leaf(cmd=ShowIpProtocols,\n src=src_interface + '[passive]',\n dest=dest_interface + '[passive]',\n vrf=vrf_name)\n\n self.add_leaf(cmd=ShowIpProtocols,\n src=src_interface + '[summary_address][(?P<summary_address>).*]',\n dest=dest_interface + '[summary_address][(?P<summary_address>).*]',\n vrf=vrf_name)\n\n\n src_route = src_instance + '[routes][(?P<prefix>.*)][index][(?P<index>.*)]'\n dest_route = dest_instance + '[routes][(?P<prefix>.*)][index][(?P<index>.*)]'\n\n req_key = ['summary_type','redistributed','next_hop','metric','interface','route_type']\n for key in req_key:\n self.add_leaf(cmd='show ip rip database vrf {vrf}'.format(vrf=vrf),\n src=src_route + '[{}]'.format(key),\n dest=dest_route + '[{}]'.format(key),\n vrf=vrf_name)\n ###################################################################################\n ##################### Ipv6 #################\n # IPV6\n # vrf\n # af\n # instance\n # originate_default_route\n # enabled\n # route_policy N/A\n # default_metric N/A\n # distance\n # split_horizon\n # poison_reverse\n # triggered_update_threshold N/A\n # maximum_paths\n # output_delay N/A\n # distribute_list N/A\n # redistribute\n # redistribute\n # metric\n # route_policy\n # timers\n # update_interval\n # invalid_interval\n # holddown_interval\n # flush_interval\n # interfaces\n # interface\n # authentication N/A\n # bfd N/A\n # cost N/A\n # neighbors N/A\n # no_listen N/A\n # originate_default_route N/A\n # passive N/A\n # split_horizon\n # summary_address N/A\n # timers\n # update_interval\n # invalid_interval\n # holddown_interval\n # flush_interval\n # oper_status N/A\n # next_full_update N/A\n # valid_address N/A\n # statistics N/A\n # next_triggered_update N/A\n # num_of_routes N/A\n # neighbors\n # address\n # last_update\n # bad_packets_rcvd N/A\n # bad_routes_rcvd N/A\n # routes\n # prefix\n # index\n # next_hop\n # interface\n # redistributed N/A\n # route_type\n # summary_type N/A\n # metric\n # expire_time N/A\n # deleted N/A\n # holddown N/A\n # need_triggered_update N/A\n # inactive N/A\n # flush_expire_before_holddown N/A\n # statistics N/A\n\n\n self.add_leaf(cmd=ShowIpv6Protocols,\n src=src_instance ,\n dest=dest_instance,\n vrf=vrf_name)\n self.make()\n\n src_ipv6_redistribute = src_instance +'[redistribute][(?P<redistribute>.*)]'\n dest_ipv6_redistribute = dest_instance +'[redistribute][(?P<redistribute>.*)]'\n\n req_key = ['metric','route_policy']\n for key in req_key:\n self.add_leaf(cmd=ShowIpv6Protocols,\n src=src_ipv6_redistribute + '[{}]'.format(key),\n dest=dest_ipv6_redistribute + '[{}]'.format(key),\n vrf=vrf_name)\n\n if hasattr(self, 'info'):\n for instance, value in self.info['vrf'][vrf]['address_family']['ipv6']['instance'].items():\n\n src_ipv6 = '[vrf][(?P<vrf>.*)][address_family][(?P<address_family>.*)]'\n\n dest_ipv6 = 'info[vrf][(?P<vrf>.*)][address_family][(?P<address_family>.*)][instance]'\\\n '[{}]'.format(instance)\n\n src_ipv6_timers = src_ipv6 + '[timers]'\n dest_ipv6_timers = dest_ipv6 + '[timers]'\n\n req_key = ['update_interval','invalid_interval','holddown_interval','flush_interval']\n for key in req_key:\n self.add_leaf(cmd='show ipv6 rip vrf {vrf}'.format(vrf=vrf),\n src=src_ipv6_timers + '[{}]'.format(key),\n dest=dest_ipv6_timers + '[{}]'.format(key),\n vrf=vrf_name)\n\n req_key = ['split_horizon','poison_reverse','maximum_paths','distance']\n for key in req_key:\n self.add_leaf(cmd='show ipv6 rip vrf {vrf}'.format(vrf=vrf),\n src=src_ipv6 + '[{}]'.format(key),\n dest=dest_ipv6 + '[{}]'.format(key),\n vrf=vrf_name)\n\n src_interface = src_ipv6 + '[interfaces][(?P<interface>.*)]'\n dest_interface = dest_ipv6 + '[interfaces][(?P<interface>.*)]'\n\n self.add_leaf(cmd='show ipv6 rip vrf {vrf}'.format(vrf=vrf),\n src=src_interface,\n dest=dest_interface,\n vrf=vrf_name)\n\n self.add_leaf(cmd='show ipv6 rip vrf {vrf}'.format(vrf=vrf),\n src=src_ipv6 + '[originate_default_route]',\n dest=dest_ipv6 + '[originate_default_route]',\n vrf=vrf_name)\n\n src_ipv6_route = src_ipv6 + '[routes][(?P<prefix>.*)][index][(?P<index>.*)]'\n dest_ipv6_route = dest_ipv6 + '[routes][(?P<prefix>.*)][index][(?P<index>.*)]'\n\n req_key = ['expire_time','next_hop','metric','interface','route_type']\n for key in req_key:\n self.add_leaf(cmd='show ipv6 rip vrf {vrf} database'.format(vrf=vrf),\n src=src_ipv6_route + '[{}]'.format(key),\n dest=dest_ipv6_route + '[{}]'.format(key),\n vrf=vrf_name)\n\n del self.list_of_vrfs\n self.make(final_call=True)\n", "id": "1893891", "language": "Python", "matching_score": 6.888490676879883, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/rip/iosxe/rip.py" }, { "content": "\"\"\"\nRip Genie Ops Object for IOSXR - CLI\n\"\"\"\n# Genie\nfrom genie.ops.base import Base\n\n# IOSXR Parsers\nfrom genie.libs.parser.iosxr.show_rip import ShowRip, \\\n ShowRipStatistics, \\\n ShowRipDatabase, \\\n ShowRipInterface\nfrom genie.libs.parser.iosxr.show_vrf import ShowVrfAllDetail\n\n\nclass Rip(Base):\n \"\"\"Rip Genie Ops Object\"\"\"\n\n def keys(self, item):\n if isinstance(item, dict):\n return list(item.keys())\n\n def check_metric(self, item):\n try:\n return int(item)\n except ValueError:\n pass\n\n def lower_case(self, item):\n return item.lower()\n\n def learn(self):\n \"\"\"Learn Rip Object\"\"\"\n\n # get vrf list\n self.add_leaf(cmd=ShowVrfAllDetail,\n src='',\n dest='list_of_vrfs',\n action=self.keys)\n\n # If show vrf returns nothing make initial vrf list\n try:\n self.make()\n except Exception:\n self.list_of_vrfs = []\n\n # incase attributes are specified that show vrf won't be executed\n if not hasattr(self, 'list_of_vrfs'):\n self.list_of_vrfs = []\n\n for vrf in self.list_of_vrfs + ['default']:\n vrf_name = '' if vrf == 'default' else vrf\n\n ###################################################################\n ###################################################################\n # vrf\n # af\n # instance\n # originate_default_route N/A\n # default_metric\n # split_horizon N/A\n # poison_reverse N/A\n # distance N/A\n # triggered_update_threshold N/A\n # maximum_paths \n # output_delay N/A\n # distribute_list N/A\n # redistribute N/A\n # timers\n # update_interval\n # invalid_interval\n # holddown_interval\n # flush_interval\n # interfaces\n # interface\n # authentication\n # auth_key_chain\n # key_chain\n # auth_key\n # key N/A\n # crypto_algorithm\n # bfd N/A\n # cost\n # neighbors\n # address\n # address\n # no_listen N/A\n # originate_default_route N/A\n # passive\n # split_horizon\n # poison_reverse\n # summary_address N/A\n # timers N/A\n # oper_status\n # next_full_update N/A\n # valid_address N/A\n # statistics N/A\n # next_triggered_update N/A\n # num_of_routes \n # neighbors N/A\n # routes\n # route\n # index\n # next_hop\n # interface\n # redistributed\n # route_type\n # summary_type\n # metric\n # expire_time N/A\n # deleted N/A\n # holddown N/A\n # need_triggered_update N/A\n # inactive\n # flush_expire_before_holddown N/A\n # statistics N/A\n\n src_instance = '[vrf][(?P<vrf>.*)][address_family] \\\n [(?P<address_family>.*)][instance][(?P<instance>.*)]'\n dest_instance = 'info[vrf][(?P<vrf>.*)][address_family] \\\n [(?P<address_family>.*)][instance][(?P<instance>.*)]'\n self.add_leaf(cmd=ShowRip,\n src=src_instance + '[default_metric]',\n dest=dest_instance + '[default_metric]',\n vrf=vrf_name,\n action=self.check_metric)\n self.add_leaf(cmd=ShowRip,\n src=src_instance + '[maximum_paths]',\n dest=dest_instance + '[maximum_paths]',\n vrf=vrf_name)\n \n self.make()\n if hasattr(self, 'info'):\n try:\n if self.info['vrf'][vrf]['address_family']['ipv4']['instance']['rip']['default_metric'] is None:\n self.info['vrf'][vrf]['address_family']['ipv4']['instance']['rip'].pop('default_metric', None)\n except KeyError:\n pass\n\n src_instance_timers = src_instance + '[timers]'\n dest_instance_timers = dest_instance + '[timers]'\n req_key = ['[flush_interval]', '[holddown_interval]', '[invalid_interval]',\n '[update_interval]']\n for key in req_key:\n self.add_leaf(cmd=ShowRip,\n src=src_instance_timers + key,\n dest=dest_instance_timers + key,\n vrf=vrf_name)\n\n src_statistics = src_instance + '[statistics][routes_allocated]'\n dest_statistics = dest_instance + '[num_of_routes]'\n self.add_leaf(cmd=ShowRipStatistics,\n src=src_statistics,\n dest=dest_statistics,\n vrf=vrf_name)\n\n src_database = src_instance + '[routes][(?P<route>.*)][index][(?P<index>.*)]'\n dest_database = dest_instance + '[routes][(?P<route>.*)][index][(?P<index>.*)]'\n req_key = ['[route_type]', '[metric]', '[interface]', '[next_hop]',\n '[redistributed]', '[summary_type]', '[inactive]']\n for key in req_key:\n self.add_leaf(cmd=ShowRipDatabase,\n src=src_database + key,\n dest=dest_database + key,\n vrf=vrf_name)\n\n src_interface = src_instance + '[interfaces][(?P<interface>.*)]'\n dest_interface = dest_instance + '[interfaces][(?P<interface>.*)]'\n req_key = ['[cost]', '[passive]', '[split_horizon]', '[poison_reverse]']\n for key in req_key:\n self.add_leaf(cmd=ShowRipInterface,\n src=src_interface + key,\n dest=dest_interface + key,\n vrf=vrf_name)\n self.add_leaf(cmd=ShowRipInterface,\n src=src_interface + '[oper_status]',\n dest=dest_interface + '[oper_status]',\n vrf=vrf_name,\n action=self.lower_case)\n\n src_authentication = src_interface + '[authentication]'\n dest_authentication = dest_interface + '[authentication]'\n req_key = ['[auth_key_chain][key_chain]',\n '[auth_key][crypto_algorithm]']\n for key in req_key:\n self.add_leaf(cmd=ShowRipInterface,\n src=src_authentication + key,\n dest=dest_authentication + key,\n vrf=vrf_name)\n\n src_neighbors = src_interface + '[neighbors][(?P<neighbor>.*)][address]'\n dest_neighbors = dest_interface + '[neighbors][(?P<neighbor>.*)][address]'\n self.add_leaf(cmd=ShowRipInterface,\n src=src_neighbors,\n dest=dest_neighbors,\n vrf=vrf_name)\n\n del self.list_of_vrfs\n self.make(final_call=True)\n", "id": "1050382", "language": "Python", "matching_score": 3.468998670578003, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/rip/iosxr/rip.py" }, { "content": "# Genie package\nfrom genie.libs.ops.static_routing.static_routing import StaticRouting as SuperStaticRouting\nfrom genie.ops.base import Base\n# genie.libs\nfrom genie.libs.parser.iosxe.show_static_routing import ShowIpStaticRoute,\\\n ShowIpv6StaticDetail\n# iosxe show_vrf\nfrom genie.libs.parser.iosxe.show_vrf import ShowVrfDetail\n\nclass StaticRouting(SuperStaticRouting):\n '''StaticRouting Ops Object'''\n\n def keys(self, item):\n if isinstance(item, dict):\n return list(item.keys())\n return []\n\n def learn(self):\n '''Learn StaticRouting object'''\n\n # get vrf list\n self.add_leaf(cmd=ShowVrfDetail,\n src='',\n dest='list_of_vrfs',\n action=self.keys)\n # when show vrf details return nothing\n # initial vrf list\n try:\n self.make()\n except Exception:\n self.list_of_vrfs = []\n\n # incase attribtues are specified that show vrf won't be executed\n if not hasattr(self, 'list_of_vrfs'):\n self.list_of_vrfs = []\n\n # loop for vrfs\n for vrf in self.list_of_vrfs + ['default']:\n\n # skip the vrf when it is mgmt-vrf\n if vrf == 'Mgmt-vrf':\n continue\n\n # create kwargs\n vrf_name = '' if vrf == 'default' else vrf\n\n # vrf\n # af\n # route\n # next_hop\n # outgoing_interface\n # next_hop_vrf N/A\n # tag N/A\n # track N/A\n # next_hop_list\n # next_hop_vrf N/A\n # tag N/A\n # track N/A\n\n # new StaticRouting structure\n # Place holder to make it more readable\n\n ##############################################\n #### Ipv4 ##########\n\n src_static_routing_route = '[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)]' \\\n '[routes][(?P<route>.*)]'\n dest_static_routing_route = 'info' + src_static_routing_route\n\n self.add_leaf(cmd='show ip static route vrf {vrf}'.format(vrf=vrf),\n src=src_static_routing_route + '[route]',\n dest=dest_static_routing_route + '[route]',\n vrf=vrf_name)\n\n src_static_routing_intf = src_static_routing_route +'[next_hop][outgoing_interface][(?P<intf>.*)]'\n dest_static_routing_intf = 'info' + src_static_routing_intf\n\n\n req_key =['outgoing_interface','active','preference']\n for key in req_key:\n self.add_leaf(cmd='show ip static route vrf {vrf}'.format(vrf=vrf),\n src=src_static_routing_intf + '[{}]'.format(key),\n dest=dest_static_routing_intf + '[{}]'.format(key),\n vrf=vrf_name)\n\n\n src_static_routing_hop = src_static_routing_route +'[next_hop][next_hop_list][(?P<index>.*)]'\n dest_static_routing_hop = 'info' + src_static_routing_hop\n\n req_key = ['index', 'active', 'next_hop', 'outgoing_interface', 'preference']\n for key in req_key:\n self.add_leaf(cmd='show ip static route vrf {vrf}'.format(vrf=vrf),\n src=src_static_routing_hop + '[{}]'.format(key),\n dest=dest_static_routing_hop + '[{}]'.format(key),\n vrf=vrf_name)\n\n\n ##############################################\n #### Ipv6 ##########\n\n self.add_leaf(cmd='show ipv6 static vrf {vrf} detail'.format(vrf=vrf),\n src=src_static_routing_route + '[route]',\n dest=dest_static_routing_route + '[route]',\n vrf = vrf_name)\n\n req_key = ['outgoing_interface', 'active', 'preference']\n for key in req_key:\n self.add_leaf(cmd='show ipv6 static vrf {vrf} detail'.format(vrf=vrf),\n src=src_static_routing_intf + '[{}]'.format(key),\n dest=dest_static_routing_intf + '[{}]'.format(key),\n vrf=vrf_name)\n\n\n req_key = ['index', 'active', 'next_hop', 'outgoing_interface', 'preference']\n for key in req_key:\n self.add_leaf(cmd='show ipv6 static vrf {vrf} detail'.format(vrf=vrf),\n src=src_static_routing_hop + '[{}]'.format(key),\n dest=dest_static_routing_hop + '[{}]'.format(key),\n vrf=vrf_name)\n\n del self.list_of_vrfs\n self.make(final_call=True)\n\nclass StaticRoute(Base):\n # Keeping it for backward compatibility\n pass", "id": "12706913", "language": "Python", "matching_score": 5.189651966094971, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/static_routing/iosxe/static_routing.py" }, { "content": "# Genie package\nfrom genie.libs.ops.static_routing.static_routing import StaticRouting as SuperStaticRouting\nfrom genie.ops.base import Base\n# genie.libs\nfrom genie.libs.parser.iosxr.show_static_routing import ShowStaticTopologyDetail\n\nclass StaticRouting(SuperStaticRouting):\n '''StaticRouting Ops Object'''\n\n def learn(self):\n '''Learn StaticRouting object'''\n\n # vrf\n # af\n # route\n # next_hop\n # outgoing_interface\n # next_hop_vrf N/A\n # tag N/A\n # track N/A\n # next_hop_list\n # next_hop_vrf N/A\n # outgoing_interface N/A\n # tag N/A\n # track N/A\n\n # new StaticRouting structure\n # Place holder to make it more readable\n\n src_static_routing_route = '[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)]' \\\n '[routes][(?P<route>.*)]'\n dest_static_routing_route = 'info' + src_static_routing_route\n\n self.add_leaf(cmd=ShowStaticTopologyDetail,\n src=src_static_routing_route + '[route]',\n dest=dest_static_routing_route + '[route]'\n )\n\n src_static_routing_intf = src_static_routing_route +'[next_hop][outgoing_interface][(?P<intf>.*)]'\n dest_static_routing_intf = 'info' + src_static_routing_intf\n\n\n req_key =['outgoing_interface','active','preference']\n for key in req_key:\n self.add_leaf(cmd=ShowStaticTopologyDetail,\n src=src_static_routing_intf + '[{}]'.format(key),\n dest=dest_static_routing_intf + '[{}]'.format(key))\n\n\n src_static_routing_hop = src_static_routing_route +'[next_hop][next_hop_list][(?P<index>.*)]'\n dest_static_routing_hop = 'info' + src_static_routing_hop\n\n req_key = ['index', 'active', 'next_hop','preference']\n for key in req_key:\n self.add_leaf(cmd=ShowStaticTopologyDetail,\n src=src_static_routing_hop + '[{}]'.format(key),\n dest=dest_static_routing_hop + '[{}]'.format(key))\n\n self.make(final_call=True)\n\nclass StaticRoute(Base):\n # Keeping it for backward compatibility\n pass\n", "id": "11207097", "language": "Python", "matching_score": 3.627229690551758, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/static_routing/iosxr/static_routing.py" }, { "content": "# super class\nfrom genie.libs.ops.routing.routing import Routing as SuperRouting\n\n# genie.libs\nfrom genie.libs.parser.iosxe.show_vrf import ShowVrfDetail\n\nclass Routing(SuperRouting):\n '''Routing Ops Object'''\n\n def keys(self, item):\n if isinstance(item, dict):\n return list(item.keys())\n return []\n\n def learn(self):\n '''Learn Routing object'''\n \n # get vrf list\n self.add_leaf(cmd=ShowVrfDetail,\n src='',\n dest='list_of_vrfs',\n action=self.keys)\n # when show vrf details return nothing\n # initial vrf list\n try:\n self.make()\n except Exception:\n self.list_of_vrfs = []\n\n # incase attribtues are specified that show vrf won't be executed\n if not hasattr(self, 'list_of_vrfs'):\n self.list_of_vrfs = []\n\n # loop for vrfs\n for vrf in self.list_of_vrfs + ['default']:\n\n # skip the vrf when it is mgmt-vrf\n if vrf == 'Mgmt-vrf':\n continue\n\n # create kwargs\n vrf_name = '' if vrf == 'default' else vrf\n\n # vrf\n # af\n # route\n # route\n # active\n # source_protocol\n # metric\n # route_preference\n # source_protocol_codes\n # last_updated N/A\n # next_hop\n # outgoing_interface\n # outgoing_interface\n # next_hop_list\n # next_hop\n # updated\n # index\n # outgoing_interface\n # special_next_hop N/A\n # special_next_hop N/A\n #\n # routing structure\n # Place holder to make it more readable\n\n ##############################################\n #### Ipv4 ##########\n \n src_routing_route = '[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)]' \\\n '[routes][(?P<route>.*)]'\n dest_routing_route = 'info' + src_routing_route\n\n req_key = ['route', 'active', 'route_preference', 'metric','source_protocol','source_protocol_codes']\n for key in req_key:\n self.add_leaf(cmd='show ip route vrf {vrf}'.format(vrf=vrf),\n src=src_routing_route + '[{}]'.format(key),\n dest=dest_routing_route + '[{}]'.format(key),\n vrf=vrf_name)\n\n src_routing_intf = src_routing_route +'[next_hop][outgoing_interface][(?P<intf>.*)]'\n dest_routing_intf = 'info' + src_routing_intf\n\n self.add_leaf(cmd='show ip route vrf {vrf}'.format(vrf=vrf),\n src=src_routing_intf + '[outgoing_interface]',\n dest=dest_routing_intf + '[outgoing_interface]',\n vrf=vrf_name)\n\n\n src_routing_hop = src_routing_route +'[next_hop][next_hop_list][(?P<index>.*)]'\n dest_routing_hop = 'info' + src_routing_hop\n\n req_key = ['index', 'next_hop','outgoing_interface', 'updated']\n for key in req_key:\n self.add_leaf(cmd='show ip route vrf {vrf}'.format(vrf=vrf),\n src=src_routing_hop + '[{}]'.format(key),\n dest=dest_routing_hop + '[{}]'.format(key),\n vrf=vrf_name)\n\n\n ##############################################\n #### Ipv6 ##########\n\n self.add_leaf(cmd='show ipv6 route vrf {vrf} updated'.format(vrf=vrf),\n src='[ipv6_unicast_routing_enabled]',\n dest='info[ipv6_unicast_routing_enabled]',\n vrf=vrf_name)\n\n req_key = ['route', 'active', 'route_preference', 'metric', 'source_protocol', 'source_protocol_codes']\n for key in req_key:\n self.add_leaf(cmd='show ipv6 route vrf {vrf} updated'.format(vrf=vrf),\n src=src_routing_route + '[{}]'.format(key),\n dest=dest_routing_route + '[{}]'.format(key),\n vrf=vrf_name)\n\n\n self.add_leaf(cmd='show ipv6 route vrf {vrf} updated'.format(vrf=vrf),\n src=src_routing_intf + '[outgoing_interface]',\n dest=dest_routing_intf + '[outgoing_interface]',\n vrf=vrf_name)\n\n\n req_key = ['index', 'next_hop', 'updated', 'outgoing_interface']\n for key in req_key:\n self.add_leaf(cmd='show ipv6 route vrf {vrf} updated'.format(vrf=vrf),\n src=src_routing_hop + '[{}]'.format(key),\n dest=dest_routing_hop + '[{}]'.format(key),\n vrf=vrf_name)\n \n # delete the list_of_vrfs in the info table\n del self.list_of_vrfs\n self.make(final_call=True)\n", "id": "11654484", "language": "Python", "matching_score": 5.0215744972229, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/routing/iosxe/routing.py" }, { "content": "# super class\nfrom genie.libs.ops.routing.routing import Routing as SuperRouting\n\n# genie.libs\nfrom genie.libs.parser.nxos.show_routing import ShowIpRoute,\\\n ShowIpv6Route\n\nclass Routing(SuperRouting):\n '''Routing Ops Object'''\n\n def learn(self, address_family=None, route=None, protocol=None, interface=None, vrf=None):\n '''Learn Routing object'''\n # vrf\n # af\n # route\n # route\n # active\n # source_protocol\n # metric\n # route_preference\n # source_protocol_codes N/A\n # last_updated N/A\n # next_hop\n # outgoing_interface\n # outgoing_interface\n # next_hop_list\n # next_hop\n # updated\n # index\n # outgoing_interface\n # special_next_hop N/A\n # special_next_hop N/A\n #\n # routing structure\n # Place holder to make it more readable\n\n kwargs = {k: v for k, v in locals().items() if v}\n [kwargs.pop(x, None) for x in ['address_family', 'self']]\n \n if not address_family or address_family == 'ipv4':\n ##############################################\n #### Ipv4 ##########\n \n src_routing_route = '[vrf][{vrf}][address_family][(?P<af>.*)]' \\\n '[routes][(?P<route>.*)]'.format(\n vrf=vrf if vrf else '(?P<vrf>.*)',\n )\n\n dest_routing_route = 'info' + src_routing_route\n \n req_key = ['route', 'active', 'source_protocol','metric','route_preference']\n \n kwargs.update({'cmd': ShowIpRoute})\n kwargs.update({'vrf': 'all' if not vrf else vrf})\n \n for key in req_key:\n kwargs.update({'src': src_routing_route + '[{}]'.format(key)})\n kwargs.update({'dest': dest_routing_route + '[{}]'.format(key)})\n self.add_leaf(**kwargs)\n\n src_routing_intf = src_routing_route +'[next_hop][outgoing_interface][(?P<intf>.*)]'\n dest_routing_intf = 'info' + src_routing_intf\n\n kwargs.update({'src': src_routing_intf + '[outgoing_interface]'})\n kwargs.update({'dest': dest_routing_intf + '[outgoing_interface]'})\n\n self.add_leaf(**kwargs)\n\n src_routing_hop = src_routing_route +'[next_hop][next_hop_list][(?P<index>.*)]'\n dest_routing_hop = 'info' + src_routing_hop\n\n req_key = ['index', 'next_hop','updated','outgoing_interface']\n for key in req_key:\n kwargs.update({'src': src_routing_hop + '[{}]'.format(key)})\n kwargs.update({'dest': dest_routing_hop + '[{}]'.format(key)})\n self.add_leaf(**kwargs)\n \n if not address_family or address_family == 'ipv6':\n ##############################################\n #### Ipv6 ##########\n kwargs.update({'cmd': ShowIpv6Route})\n src_routing_route_v6 = '[vrf][{vrf}][address_family][(?P<af>.*)]' \\\n '[routes][(?P<route>.*)]'.format(\n vrf=vrf if vrf else '(?P<vrf>.*)',\n )\n dest_routing_route_v6 = 'info' + src_routing_route_v6\n\n req_key = ['route', 'active', 'route_preference', 'metric', 'source_protocol']\n for key in req_key:\n kwargs.update({'src': src_routing_route_v6 + '[{}]'.format(key)})\n kwargs.update({'dest': dest_routing_route_v6 + '[{}]'.format(key)})\n self.add_leaf(**kwargs)\n\n src_routing_intf_v6 = src_routing_route_v6 +'[next_hop][outgoing_interface][(?P<intf>.*)]'\n dest_routing_intf_v6 = 'info' + src_routing_intf_v6\n\n kwargs.update({'src': src_routing_intf_v6 + '[outgoing_interface]'})\n kwargs.update({'dest': dest_routing_intf_v6 + '[outgoing_interface]'})\n self.add_leaf(**kwargs)\n\n src_routing_hop_v6 = src_routing_route_v6 +'[next_hop][next_hop_list][(?P<index>.*)]'\n dest_routing_hop_v6 = 'info' + src_routing_hop_v6\n\n req_key = ['index', 'next_hop', 'updated','outgoing_interface']\n for key in req_key:\n kwargs.update({'src': src_routing_hop_v6 + '[{}]'.format(key)})\n kwargs.update({'dest': dest_routing_hop_v6 + '[{}]'.format(key)})\n self.add_leaf(**kwargs)\n\n self.make(final_call=True)", "id": "3443254", "language": "Python", "matching_score": 3.4914958477020264, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/routing/nxos/routing.py" }, { "content": "'''\n Route Genie Ops Object Outputs for IOSXE.\n'''\n\nclass RouteOutput(object):\n \"\"\"show ip route output \"\"\"\n\n ShowVrfDetail = {\n \"Mgmt-vrf\": {\n \"vrf_id\": 1,\n \"interfaces\": [\n \"GigabitEthernet0/0\"\n ],\n \"address_family\": {\n \"ipv4 unicast\": {\n \"table_id\": \"0x1\",\n \"flags\": \"0x0\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n },\n \"ipv6 unicast\": {\n \"table_id\": \"0x1E000001\",\n \"flags\": \"0x0\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n }\n },\n \"flags\": \"0x1808\"\n },\n \"VRF1\": {\n \"interfaces\": [\n \"GigabitEthernet0/0\"\n ],\n \"address_family\": {\n \"ipv4 unicast\": {\n \"export_to_global\": {\n \"export_to_global_map\": \"export_to_global_map\",\n \"prefix_limit\": 1000\n },\n \"import_from_global\": {\n \"prefix_limit\": 1000,\n \"import_from_global_map\": \"import_from_global_map\"\n },\n \"table_id\": \"0x1\",\n \"routing_table_limit\": {\n \"routing_table_limit_action\": {\n \"enable_alert_limit_number\": {\n \"alert_limit_number\": 10000\n }\n }\n },\n \"route_targets\": {\n \"200:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:1\"\n },\n \"100:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"100:1\"\n }\n },\n \"flags\": \"0x2100\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n },\n \"ipv6 unicast\": {\n \"export_to_global\": {\n \"export_to_global_map\": \"export_to_global_map\",\n \"prefix_limit\": 1000\n },\n \"table_id\": \"0x1E000001\",\n \"routing_table_limit\": {\n \"routing_table_limit_action\": {\n \"enable_alert_percent\": {\n \"alert_percent_value\": 70\n },\n \"enable_alert_limit_number\": {\n \"alert_limit_number\": 7000\n }\n },\n \"routing_table_limit_number\": 10000\n },\n \"route_targets\": {\n \"200:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"200:1\"\n },\n \"400:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"400:1\"\n },\n \"300:1\": {\n \"rt_type\": \"export\",\n \"route_target\": \"300:1\"\n },\n \"100:1\": {\n \"rt_type\": \"export\",\n \"route_target\": \"100:1\"\n }\n },\n \"flags\": \"0x100\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n }\n },\n \"flags\": \"0x180C\",\n \"route_distinguisher\": \"100:1\",\n \"vrf_id\": 1\n }\n }\n\n showIpRoute_default = '''\\\n R1_iosv#show ip route\n Codes: L - local, C - connected, S - static, R - RIP, M - mobile, B - BGP\n D - EIGRP, EX - EIGRP external, O - OSPF, IA - OSPF inter area\n N1 - OSPF NSSA external type 1, N2 - OSPF NSSA external type 2\n E1 - OSPF external type 1, E2 - OSPF external type 2\n i - IS-IS, su - IS-IS summary, L1 - IS-IS level-1, L2 - IS-IS level-2\n ia - IS-IS inter area, * - candidate default, U - per-user static route\n o - ODR, P - periodic downloaded static route, H - NHRP, l - LISP\n a - application route\n + - replicated route, % - next hop override\n\n Gateway of last resort is not set\n\n 10.1.0.0/32 is subnetted, 1 subnets\n C 10.4.1.1 is directly connected, Loopback0\n 10.4.0.0/32 is subnetted, 1 subnets\n S 10.16.2.2 [1/0] via 10.186.2.2, GigabitEthernet0/1\n [1/0] via 10.1.2.2, GigabitEthernet0/0\n 10.9.0.0/32 is subnetted, 1 subnets\n S 10.36.3.3 is directly connected, GigabitEthernet0/3\n is directly connected, GigabitEthernet0/2\n 10.0.0.0/8 is variably subnetted, 5 subnets, 2 masks\n C 10.1.2.0/24 is directly connected, GigabitEthernet0/0\n L 10.1.2.1/32 is directly connected, GigabitEthernet0/0\n C 10.1.3.0/24 is directly connected, GigabitEthernet0/2\n L 10.1.3.1/32 is directly connected, GigabitEthernet0/2\n O 10.2.3.0/24 [110/2] via 10.186.2.2, 06:46:59, GigabitEthernet0/1\n [110/2] via 10.1.2.2, 06:46:59, GigabitEthernet0/0\n 10.229.0.0/32 is subnetted, 1 subnets\n i L1 10.151.22.22 [115/20] via 10.186.2.2, 06:47:04, GigabitEthernet0/1\n [115/20] via 10.1.2.2, 06:47:04, GigabitEthernet0/0\n 10.4.0.0/32 is subnetted, 1 subnets\n B 10.16.32.32 [200/0] via 10.66.12.12, 1d00h\n '''\n showIpRoute_VRF1 = '''\\\n PE1#sh ip route vrf VRF1\n Routing Table: VRF1\n Codes: L - local, C - connected, S - static, R - RIP, M - mobile, B - BGP\n D - EIGRP, EX - EIGRP external, O - OSPF, IA - OSPF inter area\n N1 - OSPF NSSA external type 1, N2 - OSPF NSSA external type 2\n E1 - OSPF external type 1, E2 - OSPF external type 2\n i - IS-IS, su - IS-IS summary, L1 - IS-IS level-1, L2 - IS-IS level-2\n ia - IS-IS inter area, * - candidate default, U - per-user static route\n o - ODR, P - periodic downloaded static route, H - NHRP, l - LISP\n a - application route\n + - replicated route, % - next hop override, p - overrides from PfR\n\n Gateway of last resort is not set\n\n 10.0.0.0/24 is subnetted, 50 subnets\n O 10.0.0.0 [110/1] via 10.81.1.2, 01:02:20, GigabitEthernet0/0/2.100\n O 10.0.1.0 [110/1] via 10.81.1.2, 01:02:20, GigabitEthernet0/0/2.100\n O 10.0.2.0 [110/1] via 10.81.1.2, 01:02:20, GigabitEthernet0/0/2.100\n 10.145.0.0/24 is subnetted, 50 subnets\n B 10.145.0.0 [200/1] via 192.168.51.1, 01:01:10\n B 10.145.1.0 [200/1] via 192.168.51.1, 01:01:10\n B 10.145.2.0 [200/1] via 192.168.51.1, 01:01:10\n 10.81.0.0/8 is variably subnetted, 2 subnets, 2 masks\n C 10.81.1.0/24 is directly connected, GigabitEthernet0/0/2.100\n L 10.81.1.1/32 is directly connected, GigabitEthernet0/0/2.100\n B 192.168.4.0/24 [200/0] via 192.168.51.1, 01:01:10\n\n '''\n showIpv6RouteUpdated_default = '''\\\n R1_iosv#show ipv6 route updated\n IPv6 Routing Table - default - 23 entries\n Codes: C - Connected, L - Local, S - Static, U - Per-user Static route\n B - BGP, HA - Home Agent, MR - Mobile Router, R - RIP\n H - NHRP, I1 - ISIS L1, I2 - ISIS L2, IA - ISIS interarea\n IS - ISIS summary, D - EIGRP, EX - EIGRP external, NM - NEMO\n ND - ND Default, NDp - ND Prefix, DCE - Destination, NDr - Redirect\n O - OSPF Intra, OI - OSPF Inter, OE1 - OSPF ext 1, OE2 - OSPF ext 2\n ON1 - OSPF NSSA ext 1, ON2 - OSPF NSSA ext 2, la - LISP alt\n lr - LISP site-registrations, ld - LISP dyn-eid, a - Application\n LC 2001:1:1:1::1/128 [0/0]\n via Loopback0, receive\n Last updated 22:55:51 04 December 2017\n S 2001:2:2:2::2/128 [1/0]\n via 2001:10:1:2::2, GigabitEthernet0/0\n Last updated 22:57:07 04 December 2017\n via 2001:20:1:2::2, GigabitEthernet0/1\n Last updated 22:57:23 04 December 2017\n S 2001:3:3:3::3/128 [1/0]\n via GigabitEthernet0/2, directly connected\n Last updated 22:57:34 04 December 2017\n via GigabitEthernet0/3, directly connected\n Last updated 22:57:43 04 December 2017\n B 2001:db8:400:1::/64 [200/1]\n via 192.168.51.1%default, indirectly connected\n Last updated 09:43:27 06 December 2017\n '''\n\n showIpv6RouteUpdated_VRF1 = '''\\\n IPv6 Routing Table - VRF1 - 104 entries\n Codes: C - Connected, L - Local, S - Static, U - Per-user Static route\n B - BGP, R - RIP, H - NHRP, I1 - ISIS L1\n I2 - ISIS L2, IA - ISIS interarea, IS - ISIS summary, D - EIGRP\n EX - EIGRP external, ND - ND Default, NDp - ND Prefix, DCE - Destination\n NDr - Redirect, O - OSPF Intra, OI - OSPF Inter, OE1 - OSPF ext 1\n OE2 - OSPF ext 2, ON1 - OSPF NSSA ext 1, ON2 - OSPF NSSA ext 2\n la - LISP alt, lr - LISP site-registrations, ld - LISP dyn-eid\n a - Application\n O 2001:db8:100::/64 [110/1]\n via FE80::211:1FF:FE00:1, GigabitEthernet0/0/2.100\n Last updated 09:42:39 06 December 2017\n O 2001:db8:100:1::/64 [110/1]\n via FE80::211:1FF:FE00:1, GigabitEthernet0/0/2.100\n Last updated 09:42:39 06 December 2017\n O 2001:db8:100:4::/64 [110/1]\n via FE80::211:1FF:FE00:1, GigabitEthernet0/0/2.100\n Last updated 09:42:39 06 December 2017\n '''\n\n\n routeOpsOutput = {\n \"ipv6_unicast_routing_enabled\": True,\n \"vrf\": {\n \"default\": {\n \"address_family\": {\n \"ipv6\": {\n \"routes\": {\n \"2001:db8:400:1::/64\": {\n \"active\": True,\n \"metric\": 1,\n \"source_protocol\": \"bgp\",\n \"source_protocol_codes\": \"B\",\n \"next_hop\": {\n \"next_hop_list\": {\n 1: {\n \"index\": 1,\n \"next_hop\": \"192.168.51.1\",\n \"updated\": \"09:43:27 06 December 2017\"\n }\n }\n },\n \"route_preference\": 200,\n \"route\": \"2001:db8:400:1::/64\"\n },\n \"2001:3:3:3::3/128\": {\n \"route_preference\": 1,\n \"active\": True,\n \"metric\": 0,\n \"source_protocol\": \"static\",\n \"source_protocol_codes\": \"S\",\n \"next_hop\": {\n \"outgoing_interface\": {\n \"GigabitEthernet0/3\": {\n \"outgoing_interface\": \"GigabitEthernet0/3\"\n },\n \"GigabitEthernet0/2\": {\n \"outgoing_interface\": \"GigabitEthernet0/2\"\n }\n }\n },\n \"route\": \"2001:3:3:3::3/128\"\n },\n \"2001:2:2:2::2/128\": {\n \"active\": True,\n \"metric\": 0,\n \"source_protocol\": \"static\",\n \"source_protocol_codes\": \"S\",\n \"next_hop\": {\n \"next_hop_list\": {\n 1: {\n \"index\": 1,\n \"next_hop\": \"2001:10:1:2::2\",\n \"updated\": \"22:57:07 04 December 2017\",\n \"outgoing_interface\": \"GigabitEthernet0/0\"\n },\n 2: {\n \"index\": 2,\n \"next_hop\": \"2001:20:1:2::2\",\n \"updated\": \"22:57:23 04 December 2017\",\n \"outgoing_interface\": \"GigabitEthernet0/1\"\n }\n }\n },\n \"route_preference\": 1,\n \"route\": \"2001:2:2:2::2/128\"\n },\n \"2001:1:1:1::1/128\": {\n \"route_preference\": 0,\n \"active\": True,\n \"metric\": 0,\n \"source_protocol\": \"local\",\n \"source_protocol_codes\": \"LC\",\n \"next_hop\": {\n \"outgoing_interface\": {\n \"Loopback0\": {\n \"outgoing_interface\": \"Loopback0\"\n }\n }\n },\n \"route\": \"2001:1:1:1::1/128\"\n }\n }\n },\n \"ipv4\": {\n \"routes\": {\n \"10.4.1.1/32\": {\n \"source_protocol_codes\": \"C\",\n \"active\": True,\n \"next_hop\": {\n \"outgoing_interface\": {\n \"Loopback0\": {\n \"outgoing_interface\": \"Loopback0\"\n }\n }\n },\n \"source_protocol\": \"connected\",\n \"route\": \"10.4.1.1/32\"\n },\n \"10.1.3.1/32\": {\n \"source_protocol_codes\": \"L\",\n \"active\": True,\n \"next_hop\": {\n \"outgoing_interface\": {\n \"GigabitEthernet0/2\": {\n \"outgoing_interface\": \"GigabitEthernet0/2\"\n }\n }\n },\n \"source_protocol\": \"local\",\n \"route\": \"10.1.3.1/32\"\n },\n \"10.36.3.3/32\": {\n \"source_protocol_codes\": \"S\",\n \"active\": True,\n \"next_hop\": {\n \"outgoing_interface\": {\n \"GigabitEthernet0/3\": {\n \"outgoing_interface\": \"GigabitEthernet0/3\"\n },\n \"GigabitEthernet0/2\": {\n \"outgoing_interface\": \"GigabitEthernet0/2\"\n }\n }\n },\n \"source_protocol\": \"static\",\n \"route\": \"10.36.3.3/32\"\n },\n \"10.16.32.32/32\": {\n \"route_preference\": 200,\n \"active\": True,\n \"metric\": 0,\n \"source_protocol\": \"bgp\",\n \"source_protocol_codes\": \"B\",\n \"next_hop\": {\n \"next_hop_list\": {\n 1: {\n \"index\": 1,\n \"next_hop\": \"10.66.12.12\",\n \"updated\": \"1d00h\"\n }\n }\n },\n \"route\": \"10.16.32.32/32\"\n },\n \"10.1.2.0/24\": {\n \"source_protocol_codes\": \"C\",\n \"active\": True,\n \"next_hop\": {\n \"outgoing_interface\": {\n \"GigabitEthernet0/0\": {\n \"outgoing_interface\": \"GigabitEthernet0/0\"\n }\n }\n },\n \"source_protocol\": \"connected\",\n \"route\": \"10.1.2.0/24\"\n },\n \"10.1.2.1/32\": {\n \"source_protocol_codes\": \"L\",\n \"active\": True,\n \"next_hop\": {\n \"outgoing_interface\": {\n \"GigabitEthernet0/0\": {\n \"outgoing_interface\": \"GigabitEthernet0/0\"\n }\n }\n },\n \"source_protocol\": \"local\",\n \"route\": \"10.1.2.1/32\"\n },\n \"10.16.2.2/32\": {\n \"route_preference\": 1,\n \"active\": True,\n \"metric\": 0,\n \"source_protocol\": \"static\",\n \"source_protocol_codes\": \"S\",\n \"next_hop\": {\n \"next_hop_list\": {\n 1: {\n \"index\": 1,\n \"next_hop\": \"10.186.2.2\",\n \"outgoing_interface\": \"GigabitEthernet0/1\"\n },\n 2: {\n \"index\": 2,\n \"next_hop\": \"10.1.2.2\",\n \"outgoing_interface\": \"GigabitEthernet0/0\"\n }\n }\n },\n \"route\": \"10.16.2.2/32\"\n },\n \"10.151.22.22/32\": {\n \"route_preference\": 115,\n \"active\": True,\n \"metric\": 20,\n \"source_protocol\": \"isis\",\n \"source_protocol_codes\": \"i L1\",\n \"next_hop\": {\n \"next_hop_list\": {\n 1: {\n \"index\": 1,\n \"next_hop\": \"10.186.2.2\",\n \"updated\": \"06:47:04\",\n \"outgoing_interface\": \"GigabitEthernet0/1\"\n },\n 2: {\n \"index\": 2,\n \"next_hop\": \"10.1.2.2\",\n \"updated\": \"06:47:04\",\n \"outgoing_interface\": \"GigabitEthernet0/0\"\n }\n }\n },\n \"route\": \"10.151.22.22/32\"\n },\n \"10.2.3.0/24\": {\n \"route_preference\": 110,\n \"active\": True,\n \"metric\": 2,\n \"source_protocol\": \"ospf\",\n \"source_protocol_codes\": \"O\",\n \"next_hop\": {\n \"next_hop_list\": {\n 1: {\n \"index\": 1,\n \"next_hop\": \"10.186.2.2\",\n \"updated\": \"06:46:59\",\n \"outgoing_interface\": \"GigabitEthernet0/1\"\n },\n 2: {\n \"index\": 2,\n \"next_hop\": \"10.1.2.2\",\n \"updated\": \"06:46:59\",\n \"outgoing_interface\": \"GigabitEthernet0/0\"\n }\n }\n },\n \"route\": \"10.2.3.0/24\"\n },\n \"10.1.3.0/24\": {\n \"source_protocol_codes\": \"C\",\n \"active\": True,\n \"next_hop\": {\n \"outgoing_interface\": {\n \"GigabitEthernet0/2\": {\n \"outgoing_interface\": \"GigabitEthernet0/2\"\n }\n }\n },\n \"source_protocol\": \"connected\",\n \"route\": \"10.1.3.0/24\"\n }\n }\n }\n }\n },\n \"VRF1\": {\n \"address_family\": {\n \"ipv6\": {\n \"routes\": {\n \"2001:db8:100:1::/64\": {\n \"active\": True,\n \"next_hop\": {\n \"next_hop_list\": {\n 1: {\n \"index\": 1,\n \"next_hop\": \"FE80::211:1FF:FE00:1\",\n \"updated\": \"09:42:39 06 December 2017\",\n \"outgoing_interface\": \"GigabitEthernet0/0/2.100\"\n }\n }\n },\n \"metric\": 1,\n \"route_preference\": 110,\n \"source_protocol_codes\": \"O\",\n \"route\": \"2001:db8:100:1::/64\",\n \"source_protocol\": \"ospf\"\n },\n \"2001:db8:100:4::/64\": {\n \"active\": True,\n \"next_hop\": {\n \"next_hop_list\": {\n 1: {\n \"index\": 1,\n \"next_hop\": \"FE80::211:1FF:FE00:1\",\n \"updated\": \"09:42:39 06 December 2017\",\n \"outgoing_interface\": \"GigabitEthernet0/0/2.100\"\n }\n }\n },\n \"metric\": 1,\n \"route_preference\": 110,\n \"source_protocol_codes\": \"O\",\n \"route\": \"2001:db8:100:4::/64\",\n \"source_protocol\": \"ospf\"\n },\n \"2001:db8:100::/64\": {\n \"active\": True,\n \"next_hop\": {\n \"next_hop_list\": {\n 1: {\n \"index\": 1,\n \"next_hop\": \"FE80::211:1FF:FE00:1\",\n \"updated\": \"09:42:39 06 December 2017\",\n \"outgoing_interface\": \"GigabitEthernet0/0/2.100\"\n }\n }\n },\n \"metric\": 1,\n \"route_preference\": 110,\n \"source_protocol_codes\": \"O\",\n \"route\": \"2001:db8:100::/64\",\n \"source_protocol\": \"ospf\"\n }\n }\n },\n \"ipv4\": {\n \"routes\": {\n \"10.145.1.0/24\": {\n \"active\": True,\n \"metric\": 1,\n \"source_protocol\": \"bgp\",\n \"source_protocol_codes\": \"B\",\n \"next_hop\": {\n \"next_hop_list\": {\n 1: {\n \"index\": 1,\n \"next_hop\": \"192.168.51.1\",\n \"updated\": \"01:01:10\"\n }\n }\n },\n \"route_preference\": 200,\n \"route\": \"10.145.1.0/24\"\n },\n \"10.145.2.0/24\": {\n \"active\": True,\n \"metric\": 1,\n \"source_protocol\": \"bgp\",\n \"source_protocol_codes\": \"B\",\n \"next_hop\": {\n \"next_hop_list\": {\n 1: {\n \"index\": 1,\n \"next_hop\": \"192.168.51.1\",\n \"updated\": \"01:01:10\"\n }\n }\n },\n \"route_preference\": 200,\n \"route\": \"10.145.2.0/24\"\n },\n \"192.168.4.0/24\": {\n \"active\": True,\n \"metric\": 0,\n \"source_protocol\": \"bgp\",\n \"source_protocol_codes\": \"B\",\n \"next_hop\": {\n \"next_hop_list\": {\n 1: {\n \"index\": 1,\n \"next_hop\": \"192.168.51.1\",\n \"updated\": \"01:01:10\"\n }\n }\n },\n \"route_preference\": 200,\n \"route\": \"192.168.4.0/24\"\n },\n \"10.81.1.0/24\": {\n \"source_protocol_codes\": \"C\",\n \"active\": True,\n \"next_hop\": {\n \"outgoing_interface\": {\n \"GigabitEthernet0/0/2.100\": {\n \"outgoing_interface\": \"GigabitEthernet0/0/2.100\"\n }\n }\n },\n \"source_protocol\": \"connected\",\n \"route\": \"10.81.1.0/24\"\n },\n \"10.145.0.0/24\": {\n \"active\": True,\n \"metric\": 1,\n \"source_protocol\": \"bgp\",\n \"source_protocol_codes\": \"B\",\n \"next_hop\": {\n \"next_hop_list\": {\n 1: {\n \"index\": 1,\n \"next_hop\": \"192.168.51.1\",\n \"updated\": \"01:01:10\"\n }\n }\n },\n \"route_preference\": 200,\n \"route\": \"10.145.0.0/24\"\n },\n \"10.81.1.1/32\": {\n \"source_protocol_codes\": \"L\",\n \"active\": True,\n \"next_hop\": {\n \"outgoing_interface\": {\n \"GigabitEthernet0/0/2.100\": {\n \"outgoing_interface\": \"GigabitEthernet0/0/2.100\"\n }\n }\n },\n \"source_protocol\": \"local\",\n \"route\": \"10.81.1.1/32\"\n },\n \"10.0.0.0/24\": {\n \"active\": True,\n \"metric\": 1,\n \"source_protocol\": \"ospf\",\n \"source_protocol_codes\": \"O\",\n \"next_hop\": {\n \"next_hop_list\": {\n 1: {\n \"index\": 1,\n \"next_hop\": \"10.81.1.2\",\n \"updated\": \"01:02:20\",\n \"outgoing_interface\": \"GigabitEthernet0/0/2.100\"\n }\n }\n },\n \"route_preference\": 110,\n \"route\": \"10.0.0.0/24\"\n },\n \"10.0.2.0/24\": {\n \"active\": True,\n \"metric\": 1,\n \"source_protocol\": \"ospf\",\n \"source_protocol_codes\": \"O\",\n \"next_hop\": {\n \"next_hop_list\": {\n 1: {\n \"index\": 1,\n \"next_hop\": \"10.81.1.2\",\n \"updated\": \"01:02:20\",\n \"outgoing_interface\": \"GigabitEthernet0/0/2.100\"\n }\n }\n },\n \"route_preference\": 110,\n \"route\": \"10.0.2.0/24\"\n },\n \"10.0.1.0/24\": {\n \"active\": True,\n \"metric\": 1,\n \"source_protocol\": \"ospf\",\n \"source_protocol_codes\": \"O\",\n \"next_hop\": {\n \"next_hop_list\": {\n 1: {\n \"index\": 1,\n \"next_hop\": \"10.81.1.2\",\n \"updated\": \"01:02:20\",\n \"outgoing_interface\": \"GigabitEthernet0/0/2.100\"\n }\n }\n },\n \"route_preference\": 110,\n \"route\": \"10.0.1.0/24\"\n }\n }\n }\n }\n }\n }\n }\n", "id": "10055304", "language": "Python", "matching_score": 6.267749309539795, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/routing/iosxe/tests/routing_output.py" }, { "content": "'''\n StaticRoute Genie Ops Object Outputs for IOS\n'''\n\nclass StaticRouteOutput(object):\n # 'show ipv4 static route' output\n ShowVrfDetail = {\n \"Mgmt-vrf\": {\n \"vrf_id\": 1,\n \"interfaces\": [\n \"GigabitEthernet0/0\"\n ],\n \"address_family\": {\n \"ipv4 unicast\": {\n \"table_id\": \"0x1\",\n \"flags\": \"0x0\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n },\n \"ipv6 unicast\": {\n \"table_id\": \"0x1E000001\",\n \"flags\": \"0x0\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n }\n },\n \"flags\": \"0x1808\"\n },\n \"VRF1\": {\n \"interfaces\": [\n \"GigabitEthernet0/0\"\n ],\n \"address_family\": {\n \"ipv4 unicast\": {\n \"export_to_global\": {\n \"export_to_global_map\": \"export_to_global_map\",\n \"prefix_limit\": 1000\n },\n \"import_from_global\": {\n \"prefix_limit\": 1000,\n \"import_from_global_map\": \"import_from_global_map\"\n },\n \"table_id\": \"0x1\",\n \"routing_table_limit\": {\n \"routing_table_limit_action\": {\n \"enable_alert_limit_number\": {\n \"alert_limit_number\": 10000\n }\n }\n },\n \"route_targets\": {\n \"200:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:1\"\n },\n \"100:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"100:1\"\n }\n },\n \"flags\": \"0x2100\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n },\n \"ipv6 unicast\": {\n \"export_to_global\": {\n \"export_to_global_map\": \"export_to_global_map\",\n \"prefix_limit\": 1000\n },\n \"table_id\": \"0x1E000001\",\n \"routing_table_limit\": {\n \"routing_table_limit_action\": {\n \"enable_alert_percent\": {\n \"alert_percent_value\": 70\n },\n \"enable_alert_limit_number\": {\n \"alert_limit_number\": 7000\n }\n },\n \"routing_table_limit_number\": 10000\n },\n \"route_targets\": {\n \"200:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"200:1\"\n },\n \"400:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"400:1\"\n },\n \"300:1\": {\n \"rt_type\": \"export\",\n \"route_target\": \"300:1\"\n },\n \"100:1\": {\n \"rt_type\": \"export\",\n \"route_target\": \"100:1\"\n }\n },\n \"flags\": \"0x100\",\n \"vrf_label\": {\n 'allocation_mode': 'per-prefix'\n }\n }\n },\n \"flags\": \"0x180C\",\n \"route_distinguisher\": \"100:1\",\n \"vrf_id\": 1\n }\n }\n showIpv4StaticRoute = {\n 'vrf': {\n 'VRF1': {\n 'address_family': {\n 'ipv4': {\n 'routes': {\n '10.16.2.2/32': {\n 'route': '10.16.2.2/32',\n 'next_hop': {\n 'next_hop_list': {\n 1: {\n 'index': 1,\n 'active': True,\n 'next_hop': '10.1.2.2',\n 'outgoing_interface': 'GigabitEthernet0/0',\n 'preference': 1,\n },\n 2: {\n 'index': 2,\n 'active': False,\n 'next_hop': '10.186.2.2',\n 'outgoing_interface': 'GigabitEthernet0/1',\n 'preference': 2,\n },\n 3: {\n 'index': 3,\n 'active': False,\n 'next_hop': '10.186.2.2',\n 'preference': 3,\n },\n },\n },\n },\n '10.36.3.3/32': {\n 'route': '10.36.3.3/32',\n 'next_hop': {\n 'outgoing_interface': {\n 'GigabitEthernet0/2': {\n 'active': True,\n 'outgoing_interface': 'GigabitEthernet0/2',\n 'preference': 1,\n },\n 'GigabitEthernet0/3': {\n 'active': True,\n 'outgoing_interface': 'GigabitEthernet0/3',\n 'preference': 1,\n },\n },\n },\n },\n },\n },\n },\n },\n },\n }\n showIpv6StaticRoute = {\n 'vrf': {\n 'default': {\n 'address_family': {\n 'ipv6': {\n 'routes': {\n '2001:2:2:2::2/128': {\n 'route': '2001:2:2:2::2/128',\n 'next_hop': {\n 'next_hop_list': {\n 1: {\n 'index': 1,\n 'active': False,\n 'next_hop': '2001:10:1:2::2',\n 'resolved_outgoing_interface': 'GigabitEthernet0/0',\n 'resolved_paths_number': 1,\n 'max_depth': 1,\n 'preference': 3,\n },\n 2: {\n 'index': 2,\n 'next_hop': '2001:20:1:2::2',\n 'active': True,\n 'outgoing_interface': 'GigabitEthernet0/1',\n 'preference': 1,\n },\n 3: {\n 'index': 3,\n 'active': False,\n 'next_hop': '2001:10:1:2::2',\n 'outgoing_interface': 'GigabitEthernet0/0',\n 'rejected_by': 'routing table',\n 'preference': 11,\n 'tag': 100,\n 'track': 1,\n 'track_state': 'up',\n },\n },\n },\n },\n '2fdf8:f53e:61e4::18/128': {\n 'route': '2001:3:3:3::3/128',\n 'next_hop': {\n 'outgoing_interface': {\n 'GigabitEthernet0/3': {\n 'outgoing_interface': 'GigabitEthernet0/3',\n 'active': True,\n 'preference': 1,\n },\n 'GigabitEthernet0/2': {\n 'outgoing_interface': 'GigabitEthernet0/2',\n 'active': True,\n 'preference': 1,\n },\n },\n },\n },\n },\n },\n },\n },\n },\n }\n\n showIpv4StaticRoute_default ='''\\\n R1#show ip static route\n\n '''\n showIpv4StaticRoute_vrf1 = '''\\\n R1#show ip static route vrf VRF1\n Codes: M - Manual static, A - AAA download, N - IP NAT, D - DHCP,\n G - GPRS, V - Crypto VPN, C - CASA, P - Channel interface processor,\n B - BootP, S - Service selection gateway\n DN - Default Network, T - Tracking object\n L - TL1, E - OER, I - iEdge\n D1 - Dot1x Vlan Network, K - MWAM Route\n PP - PPP default route, MR - MRIPv6, SS - SSLVPN\n H - IPe Host, ID - IPe Domain Broadcast\n U - User GPRS, TE - MPLS Traffic-eng, LI - LIIN\n IR - ICMP Redirect\n Codes in []: A - active, N - non-active, B - BFD-tracked, D - Not Tracked, P - permanent\n Static local RIB for VRF1\n M 10.16.2.2/32 [1/0] via GigabitEthernet0/0 10.1.2.2 [A]\n M [2/0] via GigabitEthernet0/1 10.186.2.2 [N]\n M [3/0] via 10.186.2.2 [N]\n M 10.36.3.3/32 [1/0] via GigabitEthernet0/2 [A]\n M [1/0] via GigabitEthernet0/3 [A]\n '''\n showIpv6StaticRoute_default = '''\\\nR1#show ipv6 static detail\nIPv6 Static routes Table - default\nCodes: * - installed in RIB, u/m - Unicast/Multicast only\n U - Per-user Static route\n N - ND Static route\n M - MIP Static route\n P - DHCP-PD Static route\n R - RHI Static route\n 2fdf8:f53e:61e4::18/128 via 2001:10:1:2::2, distance 3\n Resolves to 1 paths (max depth 1)\n via GigabitEthernet0/0\n* 2001:2:2:2::2/128 via 2001:20:1:2::2, GigabitEthernet0/1, distance 1\n 2001:2:2:2::2/128 via 2001:10:1:2::2, GigabitEthernet0/0, distance 11, tag 100\n Rejected by routing table\n Tracked object 1 is Up\n* 2001:3:3:3::3/128 via GigabitEthernet0/3, distance 1\n* 2001:3:3:3::3/128 via GigabitEthernet0/2, distance 1\n '''\n showIpv6StaticRoute_vrf1 = '''\\\nR1#show ipv6 static vrf VRF1 detail\n\n '''\n staticRouteOpsOutput = {\n 'vrf': {\n 'VRF1': {\n 'address_family': {\n 'ipv4': {\n 'routes': {\n '10.16.2.2/32': {\n 'route': '10.16.2.2/32',\n 'next_hop': {\n 'next_hop_list': {\n 1: {\n 'index': 1,\n 'active': True,\n 'next_hop': '10.1.2.2',\n 'outgoing_interface': 'GigabitEthernet0/0',\n 'preference': 1,\n },\n 2: {\n 'index': 2,\n 'active': False,\n 'next_hop': '10.186.2.2',\n 'outgoing_interface': 'GigabitEthernet0/1',\n 'preference': 2,\n },\n 3: {\n 'index': 3,\n 'active': False,\n 'next_hop': '10.186.2.2',\n 'preference': 3,\n },\n },\n },\n },\n '10.36.3.3/32': {\n 'route': '10.36.3.3/32',\n 'next_hop': {\n 'outgoing_interface': {\n 'GigabitEthernet0/2': {\n 'active': True,\n 'outgoing_interface': 'GigabitEthernet0/2',\n 'preference': 1,\n },\n 'GigabitEthernet0/3': {\n 'active': True,\n 'outgoing_interface': 'GigabitEthernet0/3',\n 'preference': 1,\n },\n },\n },\n },\n },\n },\n\n },\n },\n 'default': {\n 'address_family': {\n 'ipv6': {\n 'routes': {\n '2001:2:2:2::2/128': {\n 'route': '2001:2:2:2::2/128',\n 'next_hop': {\n 'next_hop_list': {\n 1: {\n 'index': 1,\n 'active': False,\n 'next_hop': '2001:10:1:2::2',\n 'preference': 3,\n },\n 2: {\n 'index': 2,\n 'next_hop': '2001:20:1:2::2',\n 'active': True,\n 'outgoing_interface': 'GigabitEthernet0/1',\n 'preference': 1,\n },\n 3: {\n 'index': 3,\n 'active': False,\n 'next_hop': '2001:10:1:2::2',\n 'outgoing_interface': 'GigabitEthernet0/0',\n 'preference': 11,\n },\n },\n },\n },\n '2001:3:3:3::3/128': {\n 'route': '2001:3:3:3::3/128',\n 'next_hop': {\n 'outgoing_interface': {\n 'GigabitEthernet0/3': {\n 'outgoing_interface': 'GigabitEthernet0/3',\n 'active': True,\n 'preference': 1,\n },\n 'GigabitEthernet0/2': {\n 'outgoing_interface': 'GigabitEthernet0/2',\n 'active': True,\n 'preference': 1,\n },\n },\n },\n },\n },\n },\n },\n },\n },\n }\n", "id": "6769661", "language": "Python", "matching_score": 3.9555015563964844, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/static_routing/ios/tests/static_routing_output.py" }, { "content": "''' \nVrf Genie Ops Object for IOSXE - CLI.\n'''\n\n# Genie\nfrom genie.libs.ops.vrf.vrf import Vrf as SuperVrf\nfrom genie.ops.base import Context\n\n\nclass Vrf(SuperVrf):\n '''Vrf Genie Ops Object'''\n\n def learn(self, vrf=''):\n\n '''Learn Vrf Ops'''\n\n ########################################################################\n # info\n ########################################################################\n src = '[(?P<vrf>.*)][address_family][(?P<af>.*)]'\n dest = 'info[vrfs][(?P<vrf>.*)][address_family][(?P<af>.*)]'\n keys = ['[route_targets]', '[import_from_global][import_from_global_map]',\n '[export_to_global][export_to_global_map]',\n '[routing_table_limit][routing_table_limit_action]',\n '[routing_table_limit][routing_table_limit_number]',\n '[routing_table_limit][enable_simple_alert]']\n # route_distinguisher\n self.add_leaf(cmd='show vrf detail',\n src='[(?P<vrf>.*)][route_distinguisher]',\n dest='info[vrfs][(?P<vrf>.*)][route_distinguisher]',\n vrf=vrf)\n for key in keys:\n self.add_leaf(cmd='show vrf detail',\n src=src + '{key}'.format(key=key),\n dest=dest + '{key}'.format(key=key), vrf=vrf)\n\n self.make(final_call=True)\n", "id": "6126895", "language": "Python", "matching_score": 4.024323463439941, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/vrf/iosxe/vrf.py" }, { "content": "''' \nVrf Genie Ops Object for IOSXR - CLI.\n'''\n\n# Genie\nfrom genie.libs.ops.vrf.vrf import Vrf as SuperVrf\nfrom genie.ops.base import Context\n\n# iosxe show_vrf\nfrom genie.libs.parser.iosxr.show_vrf import ShowVrfAllDetail\n\n\nclass Vrf(SuperVrf):\n '''Vrf Genie Ops Object'''\n\n def learn(self, vrf=''):\n\n '''Learn Vrf Ops'''\n\n src = '[(?P<vrf>.*)][address_family][(?P<af>.*)][route_target]'\n dest = 'info[vrfs][(?P<vrf>.*)][address_family][(?P<af>.*)][route_targets]'\n\n self.add_leaf(cmd=ShowVrfAllDetail,\n src='[(?P<vrf>.*)][route_distinguisher]',\n dest='info[vrfs][(?P<vrf>.*)][route_distinguisher]',\n vrf=vrf)\n self.add_leaf(cmd=ShowVrfAllDetail,\n src=src,\n dest=dest,\n vrf=vrf)\n\n self.make(final_call=True)", "id": "4049430", "language": "Python", "matching_score": 2.489576816558838, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/vrf/iosxr/vrf.py" }, { "content": "''' \nMCAST Genie Ops Object for IOSXR - CLI.\n'''\n# super class\nfrom genie.libs.ops.mcast.mcast import Mcast as SuperMacst\n\n# nxos show_mcast\nfrom genie.libs.parser.iosxr.show_pim import ShowPimVrfMstatic, ShowPimVrfRpfSummary,\\\n ShowPimVrfInterfaceDetail\n\n# iosxr show_mrib\nfrom genie.libs.parser.iosxr.show_mrib import ShowMribVrfRoute\n\n# iosxr show_vrf\nfrom genie.libs.parser.iosxr.show_vrf import ShowVrfAllDetail\n\nclass Mcast(SuperMacst):\n '''Mcast Genie Ops Object'''\n\n def set_enable(self, item):\n try:\n item.keys()\n return True\n except:\n return False\n\n def learn(self):\n '''Learn Mcast Ops'''\n\n # vrf\n # vrf_name\n # address_family\n # af_name\n info_src = '[vrf][(?P<vrf>.*)][address_family][(?P<address_family>.*)]'\n info_dest = 'info' + info_src\n\n # vrf\n # vrf_name\n # address_family\n # af_name\n # multicast_group\n # group_name\n # source_address\n # address_name\n tbl_src = '[vrf][(?P<vrf>.*)][address_family][(?P<address_family>.*)][multicast_group][(?P<multicast_group>.*)][source_address][(?P<source_address>.*)]'\n tbl_dest = 'table' + tbl_src\n\n # Get list of vrfs present on system\n self.add_leaf(cmd=ShowVrfAllDetail,\n src='',\n dest='list_of_vrfs',\n action=lambda x: list(x.keys()))\n self.make()\n\n if hasattr(self, 'list_of_vrfs'):\n\n for vrf in sorted(self.list_of_vrfs):\n \n for af in ['ipv4', 'ipv6']:\n\n ############################################################\n # INFO\n ############################################################\n\n # enable\n self.add_leaf(cmd=ShowPimVrfInterfaceDetail,\n src='[vrf][(?P<vrf>.*)][interfaces]',\n dest='info[vrf][(?P<vrf>.*)][address_family][{af}][enable]'.format(af=af),\n vrf=vrf, af=af,\n action=self.set_enable)\n\n # multipath\n self.add_leaf(cmd=ShowPimVrfRpfSummary,\n src=info_src+'[multipath]',\n dest=info_dest+'[multipath]',\n vrf=vrf, af=af)\n\n # mroute\n self.add_leaf(cmd=ShowPimVrfMstatic,\n src=info_src+'[mroute]',\n dest=info_dest+'[mroute]',\n vrf=vrf, af=af)\n\n ############################################################\n # TABLE\n ############################################################\n\n # flags\n self.add_leaf(cmd=ShowMribVrfRoute,\n src=tbl_src+'[flags]',\n dest=tbl_dest+'[flags]',\n vrf=vrf, af=af)\n\n # uptime\n self.add_leaf(cmd=ShowMribVrfRoute,\n src=tbl_src+'[uptime]',\n dest=tbl_dest+'[uptime]',\n vrf=vrf, af=af)\n\n # incoming_interface_list\n # rpf_nbr\n self.add_leaf(cmd=ShowMribVrfRoute,\n src=tbl_src+'[incoming_interface_list][(?P<intf>.*)][rpf_nbr]',\n dest=tbl_dest+'[incoming_interface_list][(?P<intf>.*)][rpf_nbr]',\n vrf=vrf, af=af)\n\n # outgoing_interface_list\n # uptime\n self.add_leaf(cmd=ShowMribVrfRoute,\n src=tbl_src+'[outgoing_interface_list][(?P<intf>.*)][uptime]',\n dest=tbl_dest+'[outgoing_interface_list][(?P<intf>.*)][uptime]',\n vrf=vrf, af=af)\n\n # outgoing_interface_list\n # flags\n self.add_leaf(cmd=ShowMribVrfRoute,\n src=tbl_src+'[outgoing_interface_list][(?P<intf>.*)][flags]',\n dest=tbl_dest+'[outgoing_interface_list][(?P<intf>.*)][flags]',\n vrf=vrf, af=af)\n\n # Make final Ops structure\n self.make(final_call=True)", "id": "1518612", "language": "Python", "matching_score": 5.030246734619141, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/mcast/iosxr/mcast.py" }, { "content": "''' \nMCAST Genie Ops Object for IOSXE - CLI.\n'''\n# super class\nfrom genie.libs.ops.mcast.mcast import Mcast as SuperMacst\n\n# iosxe show_rpf\nfrom genie.libs.parser.iosxe.show_rpf import ShowIpv6Rpf\n\n# iosxe show_pim\nfrom genie.libs.parser.iosxe.show_pim import ShowIpv6PimInterface\n\n# iosxe show_vrf\nfrom genie.libs.parser.iosxe.show_vrf import ShowVrfDetail\n\n\nclass Mcast(SuperMacst):\n '''Mcast Genie Ops Object'''\n\n def get_vrfs(self, item):\n if isinstance(item, dict):\n return list(item.keys())\n\n def learn(self):\n '''Learn Mcast Ops'''\n\n # get vrf list \n self.add_leaf(cmd=ShowVrfDetail,\n src='',\n dest='list_of_vrfs',\n action=self.get_vrfs)\n\n self.make()\n\n vrf_list = ['default']\n try:\n vrf_list.extend(self.list_of_vrfs)\n except:\n pass\n else: \n # delete the list_of_vrfs in the info table\n del self.list_of_vrfs\n\n # loop for vrfs\n for vrf in sorted(vrf_list):\n\n # skip the vrf when it is mgmt-vrf\n if vrf == 'Mgmt-vrf':\n continue\n\n # create kwargs\n vrf_name = '' if vrf == 'default' else vrf\n \n\n ########################################################################\n # info\n ########################################################################\n\n # enable - ipv4\n self.add_leaf(cmd='show ip multicast vrf {vrf}'.format(vrf=vrf),\n src='[vrf][(?P<vrf>.*)][enable]',\n dest='info[vrf][(?P<vrf>.*)][address_family][ipv4][enable]',\n vrf=vrf_name)\n\n # multipath - ipv4\n self.add_leaf(cmd='show ip multicast vrf {vrf}'.format(vrf=vrf),\n src='[vrf][(?P<vrf>.*)][multipath]',\n dest='info[vrf][(?P<vrf>.*)][address_family][ipv4][multipath]',\n vrf=vrf_name)\n\n # enable - ipv6\n self.add_leaf(cmd=ShowIpv6PimInterface,\n src='[vrf][(?P<vrf>.*)][interface][(?P<interface>.*)][pim_enabled]',\n dest='info[vrf][(?P<vrf>.*)][interface][(?P<interface>.*)][pim_enabled]',\n vrf=vrf_name)\n \n\n # multipath - ipv6 from show run\n\n # ipv4 - neighbor_address, admin_distance\n info_src = '[vrf][(?P<vrf>.*)][mroute][(?P<mroute>.*)][path][(?P<path>.*)]'\n info_dest = 'info[vrf][(?P<vrf>.*)][address_family][ipv4]'\\\n '[mroute][(?P<mroute>.*)][path][(?P<path>.*)]'\n\n for key in ['neighbor_address', 'admin_distance']:\n self.add_leaf(cmd='show ip mroute vrf {vrf} static'.format(vrf=vrf),\n src=info_src+'[{key}]'.format(key=key),\n dest=info_dest+'[{key}]'.format(key=key),\n vrf=vrf_name)\n\n ########################################################################\n # table\n ########################################################################\n\n tbl_src = '[vrf][(?P<vrf>.*)][address_family][(?P<address_family>.*)]'\\\n '[multicast_group][(?P<mcast_group>.*)][source_address][(?P<source_address>.*)]'\n tbl_dest = 'table' + tbl_src\n\n # flags, uptime, expire, rp\n # incoming_interface_list\n # outgoing_interface_list\n for key in ['flags', 'uptime', 'expire', 'rp', 'rpf_nbr',\n 'incoming_interface_list', 'outgoing_interface_list']:\n\n # ipv4 & ipv6\n for cmd in ['show ip mroute', 'show ipv6 mroute']:\n self.add_leaf(cmd=cmd,\n src=tbl_src+'[{key}]'.format(key=key),\n dest=tbl_dest+'[{key}]'.format(key=key),\n vrf=vrf_name)\n\n self.make()\n\n try:\n ipv6_mroute_list = self.table['vrf'][vrf]['address_family']['ipv6']['multicast_group'].keys()\n except:\n ipv6_mroute_list = []\n\n # ipv6 - neighbor_address, interface_name, admin_distance\n info_src = '[vrf][(?P<vrf>.*)][path][(?P<path>.*)]'\n for mroute in ipv6_mroute_list:\n info_dest = 'info[vrf][(?P<vrf>.*)][address_family][ipv6]'\\\n '[mroute][{mroute}][path][(?P<path>.*)]'.format(mroute=mroute)\n\n for key in ['neighbor_address', 'interface_name', 'admin_distance']:\n self.add_leaf(cmd=ShowIpv6Rpf,\n src=info_src+'[{key}]'.format(key=key),\n dest=info_dest+'[{key}]'.format(key=key),\n vrf=vrf_name, mroute=mroute)\n self.make(final_call=True)\n\n # define attribute enable - ipv6\n # the commadn is Show ipv6 rpf, so this is only for ipv6 enabled\n try:\n for intf in self.info['vrf'][vrf]['interface']:\n if self.info['vrf'][vrf]['interface'][intf]['pim_enabled']:\n self.info['vrf'][vrf]['address_family']['ipv6']['enable'] = True\n break\n except:\n pass\n\n # delete unused ops attribute\n try:\n del(self.info['vrf'][vrf]['interface'])\n except:\n pass", "id": "8974039", "language": "Python", "matching_score": 3.132953643798828, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/mcast/iosxe/mcast.py" }, { "content": "''' \nBGP Genie Ops Object for NXOS - CLI.\n'''\n\nimport re\n\n# Super class\nfrom genie.libs.ops.bgp.bgp import Bgp as SuperBgp\n# nxos show_bgp\nfrom genie.libs.parser.nxos.show_bgp import ShowBgpProcessVrfAll, ShowBgpPeerSession,\\\n ShowBgpPeerPolicy, ShowBgpPeerTemplate,\\\n ShowBgpVrfAllAll,\\\n ShowBgpVrfAllNeighbors,\\\n ShowBgpVrfAllAllNextHopDatabase,\\\n ShowBgpVrfAllAllSummary,\\\n ShowBgpVrfAllAllDampeningParameters,\\\n ShowBgpVrfAllNeighborsAdvertisedRoutes,\\\n ShowBgpVrfAllNeighborsRoutes,\\\n ShowBgpVrfAllNeighborsReceivedRoutes\n\n# nxos show_vrf\nfrom genie.libs.parser.nxos.show_vrf import ShowVrf\n\n# nxos show_routing\nfrom genie.libs.parser.nxos.show_routing import ShowRoutingVrfAll\n\n\nclass Bgp(SuperBgp):\n '''BGP Genie Ops Object'''\n\n # Callables\n def get_af_key(self, item):\n return {self.change_label_af_name(key): {} for key in item.keys()}\n \n def get_af_name(self, item):\n p = re.compile(r'(?P<afname>.*) RD')\n m = p.match(item)\n if m:\n item = m.groupdict()['afname']\n return self.change_label_af_name(item)\n\n def convert_to_int(self, item):\n return int(item)\n\n def intf_capitalize(self, item):\n return item.capitalize()\n\n def change_label_af_name(self, af):\n if 'label ' in af:\n af = af.replace('label ', 'labeled-')\n return af\n\n def learn(self, address_family='all', vrf='all', neighbor=''):\n\n '''Learn BGP Ops'''\n \n ########################################################################\n # info\n ########################################################################\n \n # Global callable\n self.callables = {'get_af_name': self.get_af_name,\n 'change_label_af_name': self.change_label_af_name}\n\n # bgp_id\n if vrf != 'all':\n self.add_leaf(cmd=ShowBgpProcessVrfAll,\n src='[bgp_tag]',\n dest='info[instance][default][bgp_id]',\n action=self.convert_to_int)\n\n # protocol_state\n self.add_leaf(cmd=ShowBgpProcessVrfAll,\n src='[bgp_protocol_state]',\n dest='info[instance][default][protocol_state]')\n else:\n self.add_leaf(cmd=ShowBgpProcessVrfAll,\n src='[bgp_tag]',\n dest='info[instance][default][bgp_id]',\n action=self.convert_to_int, vrf=vrf)\n\n # protocol_state\n self.add_leaf(cmd=ShowBgpProcessVrfAll,\n src='[bgp_protocol_state]',\n dest='info[instance][default][protocol_state]', vrf=vrf)\n\n # ======================================================================\n # peer_session\n # ======================================================================\n \n # ps_name\n ps_source = '[peer_session][(?P<peer_session>.*)]'\n ps_dest = 'info[instance][default][peer_session][(?P<peer_session>.*)]'\n \n # fall_over_bfd\n self.add_leaf(cmd=ShowBgpPeerSession,\n src=ps_source+'[bfd]',\n dest=ps_dest+'[fall_over_bfd]')\n\n # suppress_four_byte_as_capability\n self.add_leaf(cmd=ShowBgpPeerSession,\n src=ps_source+'[suppress_capabilities]',\n dest=ps_dest+'[suppress_four_byte_as_capability]')\n\n # description\n self.add_leaf(cmd=ShowBgpPeerSession,\n src=ps_source+'[description]',\n dest=ps_dest+'[description]')\n\n # disable_connected_check\n self.add_leaf(cmd=ShowBgpPeerSession,\n src=ps_source+'[disable_connectivity_check]',\n dest=ps_dest+'[disable_connected_check]')\n\n # ebgp_multihop_enable\n self.add_leaf(cmd=ShowBgpPeerSession,\n src=ps_source+'[ebgp_multihop_enable]',\n dest=ps_dest+'[ebgp_multihop_enable]')\n\n # ebgp_multihop_max_hop\n self.add_leaf(cmd=ShowBgpPeerSession,\n src=ps_source+'[ebgp_multihop_limit]',\n dest=ps_dest+'[ebgp_multihop_max_hop]')\n\n # local_as_as_no\n self.add_leaf(cmd=ShowBgpPeerSession,\n src=ps_source+'[local_as]',\n dest=ps_dest+'[local_as_as_no]')\n\n # local_no_prepend - N/A\n # local_dual_as - N/A\n # local_replace_as - N/A\n\n # password_text\n self.add_leaf(cmd=ShowBgpPeerSession,\n src=ps_source+'[password]',\n dest=ps_dest+'[password_text]')\n\n # remote_as\n self.add_leaf(cmd=ShowBgpPeerSession,\n src=ps_source+'[remote_as]',\n dest=ps_dest+'[remote_as]')\n\n # shutdown\n self.add_leaf(cmd=ShowBgpPeerSession,\n src=ps_source+'[shutdown]',\n dest=ps_dest+'[shutdown]')\n\n # keepalive_interval\n self.add_leaf(cmd=ShowBgpPeerSession,\n src=ps_source+'[keepalive]',\n dest=ps_dest+'[keepalive_interval]')\n\n # holdtime\n self.add_leaf(cmd=ShowBgpPeerSession,\n src=ps_source+'[holdtime]',\n dest=ps_dest+'[holdtime]')\n\n # transport_connection_mode\n self.add_leaf(cmd=ShowBgpPeerSession,\n src=ps_source+'[transport_connection_mode]',\n dest=ps_dest+'[transport_connection_mode]')\n\n # update_source\n self.add_leaf(cmd=ShowBgpPeerSession,\n src=ps_source+'[update_source]',\n dest=ps_dest+'[update_source]')\n\n # ======================================================================\n # peer_policy\n # ======================================================================\n \n # pp_name\n pp_src = '[peer_policy][(?P<peer_policy>.*)]'\n pp_dest = 'info[instance][default][peer_policy][(?P<peer_policy>.*)]'\n\n # allowas_in\n self.add_leaf(cmd=ShowBgpPeerPolicy,\n src=pp_src+'[allowas_in]',\n dest=pp_dest+'[allowas_in]')\n\n # allowas_in_as_number - N/A\n\n # as_override\n self.add_leaf(cmd=ShowBgpPeerPolicy,\n src=pp_src+'[as_override]',\n dest=pp_dest+'[as_override]')\n\n # default_originate\n self.add_leaf(cmd=ShowBgpPeerPolicy,\n src=pp_src+'[default_originate]',\n dest=pp_dest+'[default_originate]')\n\n # default_originate_route_map\n self.add_leaf(cmd=ShowBgpPeerPolicy,\n src=pp_src+'[default_originate_route_map]',\n dest=pp_dest+'[default_originate_route_map]')\n\n # route_map_name_in\n self.add_leaf(cmd=ShowBgpPeerPolicy,\n src=pp_src+'[route_map_name_in]',\n dest=pp_dest+'[route_map_name_in]')\n\n # route_map_name_out\n self.add_leaf(cmd=ShowBgpPeerPolicy,\n src=pp_src+'[route_map_name_out]',\n dest=pp_dest+'[route_map_name_out]')\n\n # maximum_prefix_max_prefix_no\n self.add_leaf(cmd=ShowBgpPeerPolicy,\n src=pp_src+'[maximum_prefix_max_prefix_no]',\n dest=pp_dest+'[maximum_prefix_max_prefix_no]')\n\n # maximum_prefix_threshold - N/A\n # maximum_prefix_restart - N/A\n # maximum_prefix_warning_only - N/A\n \n # next_hop_self\n self.add_leaf(cmd=ShowBgpPeerPolicy,\n src=pp_src+'[next_hop_self]',\n dest=pp_dest+'[next_hop_self]')\n\n # route_reflector_client\n self.add_leaf(cmd=ShowBgpPeerPolicy,\n src=pp_src+'[route_reflector_client]',\n dest=pp_dest+'[route_reflector_client]')\n\n # send_community\n self.add_leaf(cmd=ShowBgpPeerPolicy,\n src=pp_src+'[send_community]',\n dest=pp_dest+'[send_community]')\n\n # soft_reconfiguration\n self.add_leaf(cmd=ShowBgpPeerPolicy,\n src=pp_src+'[soft_reconfiguration]',\n dest=pp_dest+'[soft_reconfiguration]')\n\n # soo\n self.add_leaf(cmd=ShowBgpPeerPolicy,\n src=pp_src+'[site_of_origin]',\n dest=pp_dest+'[soo]')\n\n # ======================================================================\n # vrf\n # ======================================================================\n \n # vrf_id\n vrf_src = '[vrf][(?P<vrf>.*)]'\n vrf_dest = 'info[instance][default][vrf][(?P<vrf>.*)]'\n\n # always_compare_med - N/A\n # bestpath_compare_routerid - N/A\n # bestpath_cost_community_ignore - N/A\n # bestpath_med_missin_at_worst - N/A\n\n # cluster_id\n self.add_leaf(cmd=ShowBgpProcessVrfAll,\n src=vrf_src+'[cluster_id]',\n dest=vrf_dest+'[cluster_id]',\n vrf=vrf)\n\n # confederation_identifier\n self.add_leaf(cmd=ShowBgpProcessVrfAll,\n src=vrf_src+'[confed_id]',\n dest=vrf_dest+'[confederation_identifier]',\n vrf=vrf)\n\n # confederation_peer_as - N/A\n\n # graceful_restart\n self.add_leaf(cmd=ShowBgpProcessVrfAll,\n src=vrf_src+'[graceful_restart]',\n dest=vrf_dest+'[graceful_restart]',\n vrf=vrf)\n\n # graceful_restart_restart_time\n self.add_leaf(cmd=ShowBgpProcessVrfAll,\n src=vrf_src+'[graceful_restart_restart_time]',\n dest=vrf_dest+'[graceful_restart_restart_time]',\n vrf=vrf)\n\n # graceful_restart_stalepath_time\n self.add_leaf(cmd=ShowBgpProcessVrfAll,\n src=vrf_src+'[graceful_restart_stalepath_time]',\n dest=vrf_dest+'[graceful_restart_stalepath_time]',\n vrf=vrf)\n\n # log_neighbor_changes - N/A\n\n # router_id\n self.add_leaf(cmd=ShowBgpProcessVrfAll,\n src=vrf_src+'[router_id]',\n dest=vrf_dest+'[router_id]',\n vrf=vrf)\n\n # keepalive_interval - N/A\n # holdtime - N/A\n # enforce_first_as - N/A\n # fast_external_fallover - N/A\n # default_choice_ipv4_unicast - N/A\n\n # ======================================================================\n # vrf: address_family\n # ======================================================================\n\n # address_family\n # addr_family\n af_src = '[address_family][(?P<afname>{get_af_name})]'\n af_dest = '[address_family][(?P<afname>{get_af_name})]'\n\n # Add empty address family\n self.add_leaf(cmd=ShowBgpProcessVrfAll,\n src=vrf_src+'[address_family]',\n dest=vrf_dest+'[address_family]',\n action=self.get_af_key,\n vrf=vrf)\n\n # dampening\n self.add_leaf(cmd=ShowBgpVrfAllAllDampeningParameters,\n src=vrf_src+af_src+'[dampening]',\n dest=vrf_dest+af_dest+'[dampening]',\n vrf=vrf,\n address_family=address_family)\n\n # dampening_route_map\n self.add_leaf(cmd=ShowBgpVrfAllAllDampeningParameters,\n src=vrf_src+af_src+'[dampening_route_map]',\n dest=vrf_dest+af_dest+'[dampening_route_map]',\n vrf=vrf,\n address_family=address_family)\n\n # dampening_half_life_time\n self.add_leaf(cmd=ShowBgpVrfAllAllDampeningParameters,\n src=vrf_src+af_src+'[dampening_half_life_time]',\n dest=vrf_dest+af_dest+'[dampening_half_life_time]',\n vrf=vrf,\n address_family=address_family)\n\n # dampening_reuse_time\n self.add_leaf(cmd=ShowBgpVrfAllAllDampeningParameters,\n src=vrf_src+af_src+'[dampening_reuse_time]',\n dest=vrf_dest+af_dest+'[dampening_reuse_time]',\n vrf=vrf,\n address_family=address_family)\n\n # dampening_suppress_time\n self.add_leaf(cmd=ShowBgpVrfAllAllDampeningParameters,\n src=vrf_src+af_src+'[dampening_suppress_time]',\n dest=vrf_dest+af_dest+'[dampening_suppress_time]',\n vrf=vrf,\n address_family=address_family)\n\n # dampening_max_suppress_time\n self.add_leaf(cmd=ShowBgpVrfAllAllDampeningParameters,\n src=vrf_src+af_src+'[dampening_max_suppress_time]',\n dest=vrf_dest+af_dest+'[dampening_max_suppress_time]',\n vrf=vrf,\n address_family=address_family)\n\n # nexthop_route_map - N/A\n\n # nexthop_trigger_enable\n self.add_leaf(cmd=ShowBgpVrfAllAllNextHopDatabase,\n src=vrf_src+af_src+'[af_nexthop_trigger_enable]',\n dest=vrf_dest+af_dest+'[nexthop_trigger_enable]',\n vrf=vrf,\n address_family=address_family)\n\n # nexthop_trigger_delay_critical\n self.add_leaf(cmd=ShowBgpVrfAllAllNextHopDatabase,\n src=vrf_src+af_src+'[nexthop_trigger_delay_critical]',\n dest=vrf_dest+af_dest+'[nexthop_trigger_delay_critical]',\n vrf=vrf,\n address_family=address_family)\n\n # nexthop_trigger_delay_non_critical\n self.add_leaf(cmd=ShowBgpVrfAllAllNextHopDatabase,\n src=vrf_src+af_src+'[nexthop_trigger_delay_non_critical]',\n dest=vrf_dest+af_dest+'[nexthop_trigger_delay_non_critical]',\n vrf=vrf,\n address_family=address_family)\n\n # client_to_client_reflection - N/A\n\n # distance_extern_as\n self.add_leaf(cmd=ShowRoutingVrfAll,\n src=vrf_src+af_src+'[bgp_distance_extern_as]',\n dest=vrf_dest+af_dest+'[distance_extern_as]',\n vrf=vrf)\n\n # distance_internal_as\n self.add_leaf(cmd=ShowRoutingVrfAll,\n src=vrf_src+af_src+'[bgp_distance_internal_as]',\n dest=vrf_dest+af_dest+'[distance_internal_as]',\n vrf=vrf)\n\n # distance_local\n self.add_leaf(cmd=ShowRoutingVrfAll,\n src=vrf_src+af_src+'[bgp_distance_local]',\n dest=vrf_dest+af_dest+'[distance_local]',\n vrf=vrf)\n\n # maximum_paths_ebgp\n self.add_leaf(cmd=ShowBgpProcessVrfAll,\n src=vrf_src+af_src+'[ebgp_max_paths]',\n dest=vrf_dest+af_dest+'[maximum_paths_ebgp]',\n vrf=vrf)\n\n # maximum_paths_ibgp\n self.add_leaf(cmd=ShowBgpProcessVrfAll,\n src=vrf_src+af_src+'[ibgp_max_paths]',\n dest=vrf_dest+af_dest+'[maximum_paths_ibgp]',\n vrf=vrf)\n\n # maximum_paths_eibgp - N/A\n\n # aggregate_address_ipv4_address\n self.add_leaf(cmd=ShowBgpVrfAllAll,\n src=vrf_src+af_src+'[aggregate_address_ipv4_address]',\n dest=vrf_dest+af_dest+'[aggregate_address_ipv4_address]',\n vrf=vrf,\n address_family=address_family)\n\n # aggregate_address_ipv4_mask\n self.add_leaf(cmd=ShowBgpVrfAllAll,\n src=vrf_src+af_src+'[aggregate_address_ipv4_mask]',\n dest=vrf_dest+af_dest+'[aggregate_address_ipv4_mask]',\n vrf=vrf,\n address_family=address_family)\n\n # aggregate_address_as_set\n self.add_leaf(cmd=ShowBgpVrfAllAll,\n src=vrf_src+af_src+'[aggregate_address_as_set]',\n dest=vrf_dest+af_dest+'[aggregate_address_as_set]',\n vrf=vrf,\n address_family=address_family)\n\n # aggregate_address_summary_only\n self.add_leaf(cmd=ShowBgpVrfAllAll,\n src=vrf_src+af_src+'[aggregate_address_summary_only]',\n dest=vrf_dest+af_dest+'[aggregate_address_summary_only]',\n vrf=vrf,\n address_family=address_family)\n\n # network_number - N/A\n # network_mask - N/A\n # network_route_map - N/A\n # redist_isis - N/A\n # redist_isis_metric - N/A\n # redist_isis_route_policy - N/A\n # redist_ospf - N/A\n # redist_ospf_metric - N/A\n # redist_ospf_route_policy - N/A\n # redist_rip - N/A\n # redist_rip_metric - N/A\n # redist_rip_route_policy - N/A\n # redist_static - N/A\n # redist_static_metric - N/A\n # redist_static_route_policy - N/A\n # redist_connected - N/A\n # redist_connected_metric - N/A\n # redist_connected_route_policy - N/A\n\n # v6_aggregate_address_ipv6_address\n self.add_leaf(cmd=ShowBgpVrfAllAll,\n src=vrf_src+af_src+'[v6_aggregate_address_ipv6_address]',\n dest=vrf_dest+af_dest+'[v6_aggregate_address_ipv6_address]',\n vrf=vrf,\n address_family=address_family)\n\n # v6_aggregate_address_as_set\n self.add_leaf(cmd=ShowBgpVrfAllAll,\n src=vrf_src+af_src+'[v6_aggregate_address_as_set]',\n dest=vrf_dest+af_dest+'[v6_aggregate_address_as_set]',\n vrf=vrf,\n address_family=address_family)\n\n # v6_aggregate_address_summary_only\n self.add_leaf(cmd=ShowBgpVrfAllAll,\n src=vrf_src+af_src+'[v6_aggregate_address_summary_only]',\n dest=vrf_dest+af_dest+'[v6_aggregate_address_summary_only]',\n vrf=vrf,\n address_family=address_family)\n\n # v6_network_number - N/A\n # v6_network_route_map - N/A\n # v6_allocate_label_all - N/A\n # retain_rt_all - N/A\n \n # label_allocation_mode\n self.add_leaf(cmd=ShowBgpProcessVrfAll,\n src=vrf_src+af_src+'[label_mode]',\n dest=vrf_dest+af_dest+'[label_allocation_mode]',\n vrf=vrf)\n\n # ======================================================================\n # vrf: neighbor\n # ======================================================================\n\n # Get vrfs for input to 'show bgp vrf <vrf_name> all neighbors'\n\n self.add_leaf(cmd=ShowVrf,\n src='[vrfs]',\n dest='info[list_of_vrfs]',\n vrf=vrf)\n self.make()\n\n # Creating a list of all vrfs configured on the device. Looping through\n # each vrf to execute \"show bgp vrf <vrf_name> all neighbors\"\n if hasattr(self, 'info') and 'list_of_vrfs' in self.info:\n\n for vrf_name in sorted(self.info['list_of_vrfs']):\n\n if vrf_name == 'management':\n continue\n\n # neighbor_id\n nbr_src = '[neighbor][(?P<neighbor_id>.*)]'\n nbr_dest = 'info[instance][default][vrf][{vrf_name}][neighbor][(?P<neighbor_id>.*)]'.format(vrf_name=vrf_name)\n\n # fall_over_bfd\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[bfd_live_detection]',\n dest=nbr_dest+'[fall_over_bfd]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # suppress_four_byte_as_capability\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[suppress_four_byte_as_capability]',\n dest=nbr_dest+'[suppress_four_byte_as_capability]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # description\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[description]',\n dest=nbr_dest+'[description]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # disable_connected_check\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[disable_connected_check]',\n dest=nbr_dest+'[disable_connected_check]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # ebgp_multihop\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[ebgp_multihop]',\n dest=nbr_dest+'[ebgp_multihop]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # ebgp_multihop_max_hop\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[ebgp_multihop_max_hop]',\n dest=nbr_dest+'[ebgp_multihop_max_hop]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # inherit_peer_session\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[inherit_peer_session]',\n dest=nbr_dest+'[inherit_peer_session]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # local_as_as_no\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[local_as]',\n dest=nbr_dest+'[local_as_as_no]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # local_as_no_prepend - N/A\n # local_as_replace_as - N/A\n # local_as_dual_as - N/A\n \n # remote_as\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[remote_as]',\n dest=nbr_dest+'[remote_as]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # remove_private_as\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[remove_private_as]',\n dest=nbr_dest+'[remove_private_as]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # shutdown\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[shutdown]',\n dest=nbr_dest+'[shutdown]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # keepalive_interval\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[bgp_negotiated_keepalive_timers][keepalive_interval]',\n dest=nbr_dest+'[keepalive_interval]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # holdtime\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[bgp_negotiated_keepalive_timers][hold_time]',\n dest=nbr_dest+'[holdtime]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # bgp_version\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[bgp_version]',\n dest=nbr_dest+'[bgp_version]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n \n # installed_prefixes - N/A\n\n # session_state\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[session_state]',\n dest=nbr_dest+'[session_state]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n \n # bgp_negotiated_keepalive_timers\n # keepalive_interval\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[bgp_negotiated_keepalive_timers][keepalive_interval]',\n dest=nbr_dest+'[bgp_negotiated_keepalive_timers][keepalive_interval]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n \n # bgp_negotiated_keepalive_timers\n # hold_time\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[bgp_negotiated_keepalive_timers][hold_time]',\n dest=nbr_dest+'[bgp_negotiated_keepalive_timers][hold_time]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # bgp_session_transport\n # connection\n # state\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[session_state]',\n dest=nbr_dest+'[bgp_session_transport][connection][state]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # bgp_session_transport\n # connection\n # mode\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[bgp_session_transport][connection][mode]',\n dest=nbr_dest+'[bgp_session_transport][connection][mode]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # bgp_session_transport\n # connection\n # last_reset\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[bgp_session_transport][connection][last_reset]',\n dest=nbr_dest+'[bgp_session_transport][connection][last_reset]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # bgp_session_transport\n # connection\n # reset_reason\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[bgp_session_transport][connection][reset_reason]',\n dest=nbr_dest+'[bgp_session_transport][connection][reset_reason]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n \n # bgp_session_transport\n # transport\n # local_port\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[bgp_session_transport][transport][local_port]',\n dest=nbr_dest+'[bgp_session_transport][transport][local_port]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # bgp_session_transport\n # transport\n # local_host\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[bgp_session_transport][transport][local_host]',\n dest=nbr_dest+'[bgp_session_transport][transport][local_host]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # bgp_session_transport\n # transport\n # foreign_port\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[bgp_session_transport][transport][foreign_port]',\n dest=nbr_dest+'[bgp_session_transport][transport][foreign_port]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n # bgp_session_transport\n # transport\n # foreign_host\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[bgp_session_transport][transport][foreign_host]',\n dest=nbr_dest+'[bgp_session_transport][transport][foreign_host]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n \n # bgp_session_transport\n # transport\n # mss - N/A\n\n # minimum_neighbor_hold - N/A\n \n # up_time\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[up_time]',\n dest=nbr_dest+'[up_time]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # update_source\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[update_source]',\n dest=nbr_dest+'[update_source]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor,\n action=self.intf_capitalize)\n\n # password_text\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[tcp_md5_auth]',\n dest=nbr_dest+'[password_text]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # bgp_negotiated_capabilities\n # route_refresh\n # four_octets_asn\n # vpnv4_unicast\n # vpnv6_unicast\n # ipv4_mvpn\n # graceful_restart\n # enhanced_refresh\n # multisession\n # stateful_switchover\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[bgp_negotiated_capabilities]',\n dest=nbr_dest+'[bgp_negotiated_capabilities]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # bgp_neighbor_counters\n # messages\n # sent/received\n # opens\n # updates\n # notifications\n # keepalives\n # route_refreshes\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[bgp_neighbor_counters]',\n dest=nbr_dest+'[bgp_neighbor_counters]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # ==============================================================\n # vrf: neighbor - address_family\n # ==============================================================\n\n # address_family\n # nbr_af_name\n # bgp_table_version\n # session_state\n req_keys = ['bgp_table_version','session_state']\n for key in req_keys:\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[address_family][(?P<address_family>{{change_label_af_name}})][{}]'.format(key),\n dest=nbr_dest+'[address_family][(?P<address_family>{{change_label_af_name}})][{}]'.format(key),\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n # address_family\n # nbr_af_name\n # routing_table_version - N/A\n\n # address_family\n # nbr_af_name\n # prefixes\n # total_entries\n # memory_usage\n self.add_leaf(cmd=ShowBgpVrfAllAllSummary,\n src='[vrf][?P<vrf>.*)][neighbor][(?P<neighbor>.*)][address_family][(?P<address_family>{change_label_af_name})][prefixes]',\n dest='info[instance][default][vrf][(?P<vrf>.*)][neighbor][(?P<neighbor>.*)][address_family][(?P<address_family>{change_label_af_name})][prefixes]',\n vrf=vrf,\n address_family=address_family)\n\n # address_family\n # nbr_af_name\n # path\n # total_entries\n # memory_usage\n self.add_leaf(cmd=ShowBgpVrfAllAllSummary,\n src='[vrf][?P<vrf>.*)][neighbor][(?P<neighbor>.*)][address_family][(?P<address_family>{change_label_af_name})][path]',\n dest='info[instance][default][vrf][(?P<vrf>.*)][neighbor][(?P<neighbor>.*)][address_family][(?P<address_family>{change_label_af_name})][path]',\n vrf=vrf,\n address_family=address_family)\n\n # total_memory - N/A\n # allowas_in - N/A\n # allowas_in_as_number - N/A\n\n # inherit_peer_policy\n # inherit_peer_seq\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[address_family][(?P<address_family>{change_label_af_name})][inherit_peer_policy]',\n dest=nbr_dest+'[address_family][(?P<address_family>{change_label_af_name})][inherit_peer_policy]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n # maximum_prefix_max_prefix_no\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[address_family][(?P<address_family>{change_label_af_name})][maximum_prefix_max_prefix_no]',\n dest=nbr_dest+'[address_family][(?P<address_family>{change_label_af_name})][maximum_prefix_max_prefix_no]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n # maximum_prefix_threshold - N/A\n # maximum_prefix_restart - N/A\n # maximum_prefix_warning_only - N/A\n\n # address_family\n # nbr_af_name\n # route_map_name_in\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[address_family][(?P<address_family>{change_label_af_name})][route_map_name_in]',\n dest=nbr_dest+'[address_family][(?P<address_family>{change_label_af_name})][route_map_name_in]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n # address_family\n # nbr_af_name\n # route_map_name_out\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[address_family][(?P<address_family>{change_label_af_name})][route_map_name_out]',\n dest=nbr_dest+'[address_family][(?P<address_family>{change_label_af_name})][route_map_name_out]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n # route_reflector_client\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[address_family][(?P<address_family>{change_label_af_name})][route_reflector_client]',\n dest=nbr_dest+'[address_family][(?P<address_family>{change_label_af_name})][route_reflector_client]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n # address_family\n # nbr_af_name\n # send_community\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[address_family][(?P<address_family>{change_label_af_name})][send_community]',\n dest=nbr_dest+'[address_family][(?P<address_family>{change_label_af_name})][send_community]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n # address_family\n # nbr_af_name\n # soft_configuration\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[address_family][(?P<address_family>{change_label_af_name})][soft_configuration]',\n dest=nbr_dest+'[address_family][(?P<address_family>{change_label_af_name})][soft_configuration]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n # next_hop_self\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[address_family][(?P<address_family>{change_label_af_name})][next_hop_self]',\n dest=nbr_dest+'[address_family][(?P<address_family>{change_label_af_name})][next_hop_self]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n # as_override\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[address_family][(?P<address_family>{change_label_af_name})][as_override]',\n dest=nbr_dest+'[address_family][(?P<address_family>{change_label_af_name})][as_override]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n # default_originate\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[address_family][(?P<address_family>{change_label_af_name})][default_originate]',\n dest=nbr_dest+'[address_family][(?P<address_family>{change_label_af_name})][default_originate]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n # default_originate_route_map\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[address_family][(?P<address_family>{change_label_af_name})][default_originate_route_map]',\n dest=nbr_dest+'[address_family][(?P<address_family>{change_label_af_name})][default_originate_route_map]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n # soo\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src=nbr_src+'[address_family][(?P<address_family>{change_label_af_name})][soo]',\n dest=nbr_dest+'[address_family][(?P<address_family>{change_label_af_name})][soo]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n ########################################################################\n # table\n ########################################################################\n\n # vrf\n # vrf_id\n # address_family\n # af_name\n table_src = '[vrf][(?P<vrf_name>.*)][address_family][(?P<address_family>{change_label_af_name})]'\n table_dest = 'table[instance][default]'+ table_src\n\n # route_identifier\n self.add_leaf(cmd=ShowBgpVrfAllAllSummary,\n src='[vrf][(?P<vrf_name>.*)][neighbor][?P<neighbor>.*][address_family][(?P<address_family>{change_label_af_name})][route_identifier]',\n dest=table_dest+'[route_identifier]',\n vrf=vrf,\n address_family=address_family)\n\n # local_as\n self.add_leaf(cmd=ShowBgpVrfAllAllSummary,\n src='[vrf][(?P<vrf_name>.*)][neighbor][?P<neighbor>.*][address_family][(?P<address_family>{change_label_af_name})][local_as][(?P<local_as>.*)]',\n dest=table_dest+'[local_as][(?P<local_as>.*)]',\n vrf=vrf,\n address_family=address_family)\n\n # bgp_table_version\n self.add_leaf(cmd=ShowBgpVrfAllAll,\n src=table_src+'[bgp_table_version]',\n dest=table_dest+'[bgp_table_version]',\n vrf=vrf,\n address_family=address_family)\n\n # routing_table_version - N/A\n\n # route_distinguisher\n self.add_leaf(cmd=ShowBgpVrfAllAll,\n src=table_src+'[route_distinguisher]',\n dest=table_dest+'[route_distinguisher]',\n vrf=vrf,\n address_family=address_family)\n\n # default_vrf\n self.add_leaf(cmd=ShowBgpVrfAllAll,\n src=table_src+'[default_vrf]',\n dest=table_dest+'[default_vrf]',\n vrf=vrf,\n address_family=address_family)\n\n # paths - N/A\n\n # prefixes\n # prefix\n prefix_src = '[prefixes][(?P<prefix>.*)]'\n prefix_dest = '[prefixes][(?P<prefix>.*)]'\n\n # table_version - N/A\n\n # index\n # index\n idx_src = prefix_src+'[index][(?P<index>.*)]'\n idx_dest = prefix_dest+'[index][(?P<index>.*)]'\n\n # next_hop_igp_metric - N/A\n # gateway - N/A\n # cluster_id - N/A\n # update_group - N/A\n\n # next_hop\n self.add_leaf(cmd=ShowBgpVrfAllAll,\n src=table_src+idx_src+'[next_hop]',\n dest=table_dest+idx_dest+'[next_hop]',\n vrf=vrf,\n address_family=address_family)\n\n # status_codes\n self.add_leaf(cmd=ShowBgpVrfAllAll,\n src=table_src+idx_src+'[status_codes]',\n dest=table_dest+idx_dest+'[status_codes]',\n vrf=vrf,\n address_family=address_family)\n\n # origin_codes\n self.add_leaf(cmd=ShowBgpVrfAllAll,\n src=table_src+idx_src+'[origin_codes]',\n dest=table_dest+idx_dest+'[origin_codes]',\n vrf=vrf,\n address_family=address_family)\n\n # metric\n self.add_leaf(cmd=ShowBgpVrfAllAll,\n src=table_src+idx_src+'[metric]',\n dest=table_dest+idx_dest+'[metric]',\n vrf=vrf,\n address_family=address_family)\n\n # localpref\n self.add_leaf(cmd=ShowBgpVrfAllAll,\n src=table_src+idx_src+'[localprf]',\n dest=table_dest+idx_dest+'[localpref]',\n vrf=vrf,\n address_family=address_family)\n\n # weight\n self.add_leaf(cmd=ShowBgpVrfAllAll,\n src=table_src+idx_src+'[weight]',\n dest=table_dest+idx_dest+'[weight]',\n vrf=vrf,\n address_family=address_family)\n\n # ext_community - N/A\n # mpls_labels_inout - N/A\n # originator - N/A\n # cluster_list - N/A\n\n ########################################################################\n # routes_per_peer\n ########################################################################\n\n # Routes per peer top level key\n if neighbor:\n rpp_src = '[vrf][(?P<vrf>.*)][neighbor][{neighbor}][address_family][(' \\\n '?P<address_family>.*)]'.format(neighbor=neighbor)\n rpp_dest = 'routes_per_peer[instance][default][vrf][(?P<vrf>.*)][neighbor][' \\\n '{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})]'.format(neighbor=neighbor)\n else:\n rpp_src = '[vrf][(?P<vrf>.*)][neighbor][(?P<neighbor>.*)][address_family][(?P<address_family>{change_label_af_name})]'\n rpp_dest = 'routes_per_peer[instance][default][vrf][(?P<vrf>.*)][neighbor][(?P<neighbor>.*)][address_family][(?P<address_family>{change_label_af_name})]'\n rpp_keys = ['[<KEY> '[msg_sent]', '[tbl_ver]', '[inq]', '[outq]', '[up_down]','[state_pfxrcd]']\n rpp_dest_keys = ['[<KEY>]', '[msg_sent]', '[tbl_ver]', '[input_queue]', '[output_queue]', '[up_down]', '[state_pfxrcd]']\n\n for i, key in enumerate(rpp_keys):\n self.add_leaf(cmd=ShowBgpVrfAllAllSummary,\n src=rpp_src+key,\n dest=rpp_dest+rpp_dest_keys[i],\n vrf=vrf,\n address_family=address_family)\n\n if hasattr(self, 'info') and 'list_of_vrfs' in self.info:\n\n # Got vrfs from \"vrf: neighbor\" section for below commands\n for vrf_name in sorted(self.info['list_of_vrfs']):\n\n # remote_as\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src='[neighbor][(?P<neighbor_id>.*)][remote_as][(?P<remote_as>.*)]',\n dest='routes_per_peer[instance][default][vrf][{vrf_name}][neighbor][(?P<neighbor_id>.*)][remote_as][(?P<remote_as>.*)]'.format(vrf_name=vrf_name),\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n\n # Get neighbors for input to:\n # * 'show bgp vrf <vrf_name> all neighbors <neighbor> advertised-routes'\n # * 'show bgp vrf <vrf_name> all neighbors <neighbor> routes'\n # * 'show bgp vrf <vrf_name> all neighbors <neighbor> receieved-routes'\n self.add_leaf(cmd=ShowBgpVrfAllNeighbors,\n src='[neighbor]',\n dest='routes_per_peer[list_of_neighbors]',\n vrf=vrf_name,\n address_family=address_family,\n neighbor=neighbor)\n self.make()\n\n if hasattr (self, 'routes_per_peer') and\\\n 'list_of_neighbors' in self.routes_per_peer:\n\n for nbr in sorted(self.routes_per_peer['list_of_neighbors']):\n\n # advertised\n self.add_leaf(cmd=ShowBgpVrfAllNeighborsAdvertisedRoutes,\n src='[vrf][{vrf_name}][neighbor][{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})][advertised]'.format(vrf_name=vrf_name, neighbor=nbr),\n dest='routes_per_peer[instance][default][vrf][{vrf_name}][neighbor][{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})][advertised]'.format(vrf_name=vrf_name, neighbor=nbr),\n vrf=vrf_name,\n address_family=address_family,\n neighbor=nbr)\n\n # advertised - route_distinguisher\n self.add_leaf(cmd=ShowBgpVrfAllNeighborsAdvertisedRoutes,\n src='[vrf][{vrf_name}][neighbor][{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})][route_distinguisher]'.format(vrf_name=vrf_name, neighbor=nbr),\n dest='routes_per_peer[instance][default][vrf][{vrf_name}][neighbor][{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})][route_distinguisher]'.format(vrf_name=vrf_name, neighbor=nbr),\n vrf=vrf_name,\n address_family=address_family,\n neighbor=nbr)\n\n # advertised - default_vrf\n self.add_leaf(cmd=ShowBgpVrfAllNeighborsAdvertisedRoutes,\n src='[vrf][{vrf_name}][neighbor][{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})][default_vrf]'.format(vrf_name=vrf_name, neighbor=nbr),\n dest='routes_per_peer[instance][default][vrf][{vrf_name}][neighbor][{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})][default_vrf]'.format(vrf_name=vrf_name, neighbor=nbr),\n vrf=vrf_name,\n address_family=address_family,\n neighbor=nbr)\n # routes\n self.add_leaf(cmd=ShowBgpVrfAllNeighborsRoutes,\n src='[vrf][{vrf_name}][neighbor][{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})][routes]'.format(vrf_name=vrf_name, neighbor=nbr),\n dest='routes_per_peer[instance][default][vrf][{vrf_name}][neighbor][{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})][routes]'.format(vrf_name=vrf_name, neighbor=nbr),\n vrf=vrf_name,\n address_family=address_family,\n neighbor=nbr)\n # routes - route_distinguisher\n self.add_leaf(cmd=ShowBgpVrfAllNeighborsRoutes,\n src='[vrf][{vrf_name}][neighbor][{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})][route_distinguisher]'.format(vrf_name=vrf_name, neighbor=nbr),\n dest='routes_per_peer[instance][default][vrf][{vrf_name}][neighbor][{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})][route_distinguisher]'.format(vrf_name=vrf_name, neighbor=nbr),\n vrf=vrf_name,\n address_family=address_family,\n neighbor=nbr)\n # routes - default_vrf\n self.add_leaf(cmd=ShowBgpVrfAllNeighborsRoutes,\n src='[vrf][{vrf_name}][neighbor][{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})][default_vrf]'.format(vrf_name=vrf_name, neighbor=nbr),\n dest='routes_per_peer[instance][default][vrf][{vrf_name}][neighbor][{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})][default_vrf]'.format(vrf_name=vrf_name, neighbor=nbr),\n vrf=vrf_name,\n address_family=address_family,\n neighbor=nbr)\n # received_routes\n self.add_leaf(cmd=ShowBgpVrfAllNeighborsReceivedRoutes,\n src='[vrf][{vrf_name}][neighbor][{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})][received_routes]'.format(vrf_name=vrf_name, neighbor=nbr),\n dest='routes_per_peer[instance][default][vrf][{vrf_name}][neighbor][{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})][received_routes]'.format(vrf_name=vrf_name, neighbor=nbr),\n vrf=vrf_name,\n address_family=address_family,\n neighbor=nbr)\n # received_routes - route_distinguisher\n self.add_leaf(cmd=ShowBgpVrfAllNeighborsReceivedRoutes,\n src='[vrf][{vrf_name}][neighbor][{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})][route_distinguisher]'.format(vrf_name=vrf_name, neighbor=nbr),\n dest='routes_per_peer[instance][default][vrf][{vrf_name}][neighbor][{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})][route_distinguisher]'.format(vrf_name=vrf_name, neighbor=nbr),\n vrf=vrf_name,\n address_family=address_family,\n neighbor=nbr)\n # received_routes - default_vrf\n self.add_leaf(cmd=ShowBgpVrfAllNeighborsReceivedRoutes,\n src='[vrf][{vrf_name}][neighbor][{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})][default_vrf]'.format(vrf_name=vrf_name, neighbor=nbr),\n dest='routes_per_peer[instance][default][vrf][{vrf_name}][neighbor][{neighbor}][address_family][(?P<address_family>{{change_label_af_name}})][default_vrf]'.format(vrf_name=vrf_name, neighbor=nbr),\n vrf=vrf_name,\n address_family=address_family,\n neighbor=nbr)\n # clear list of neighbors for next vrf\n del self.routes_per_peer['list_of_neighbors']\n\n ########################################################################\n # Final Structure\n ########################################################################\n\n # Make final Ops structure\n self.make(final_call=True)\n\n # Delete unnecessary keys\n try:\n del self.info['list_of_vrfs']\n except:\n pass\n\n", "id": "1124153", "language": "Python", "matching_score": 7.431079387664795, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/bgp/nxos/bgp.py" }, { "content": "''' \nBGP Genie Ops Object for IOSXE - CLI.\n'''\n\nimport re\n\n\n# Super class\nfrom genie.libs.ops.bgp.bgp import Bgp as SuperBgp\n\n\nclass Bgp(SuperBgp):\n '''BGP Genie Ops Object'''\n\n # Callables\n def get_af_key(self, item):\n return {key: {} for key in item.keys()}\n\n def get_af_name(self, item):\n p = re.compile(r'(?P<afname>.*) RD')\n m = p.match(item)\n if m:\n item = m.groupdict()['afname']\n return item\n\n def learn(self, vrf='', address_family='', neighbor=''):\n '''Learn BGP Ops'''\n\n # if user input address family with RD, trim the rd,\n # otherwise some command will fail to execute\n af_regex=re.compile(r'(?P<af>[\\w\\s]+) +RD +[:\\d]+')\n m = af_regex.match(address_family)\n if m:\n af = m.groupdict()['af']\n else:\n af = address_family\n\n restricted = {'ipv4 unicast', 'ipv6 unicast'}\n ########################################################################\n # info\n ########################################################################\n \n # Global callable\n self.callables = {'get_af_name': self.get_af_name}\n\n # bgp_id\n if af in restricted:\n bgp_nbr_class = 'show bgp {address_family} vrf {vrf} neighbors {neighbor}'.\\\n format(address_family=address_family, vrf=vrf, neighbor=neighbor)\n bgp_adv_route_class = 'show bgp {address_family} neighbors {neighbor} advertised-routes'.\\\n format(address_family=address_family, neighbor=neighbor)\n bgp_rec_rout_class = 'show bgp {address_family} neighbors {neighbor} received-routes'.\\\n format(address_family=address_family, neighbor=neighbor)\n bgp_route_class = 'show bgp {address_family} vrf {vrf} neighbors {neighbor} routes'.\\\n format(address_family=address_family, vrf=vrf, neighbor=neighbor)\n else:\n bgp_nbr_class = 'show bgp {address_family} all neighbors {neighbor}'.\\\n format(address_family=address_family, neighbor=neighbor)\n bgp_adv_route_class = 'show bgp {address_family} all neighbors {neighbor} advertised-routes'.\\\n format(address_family=address_family, neighbor=neighbor)\n bgp_rec_rout_class = 'show bgp {address_family} all neighbors {neighbor} received-routes'.\\\n format(address_family=address_family, neighbor=neighbor)\n bgp_route_class = 'show bgp {address_family} all neighbors {neighbor} routes'.\\\n format(address_family=address_family, neighbor=neighbor)\n\n for cmd in ['show bgp {address_family} all summary'.format(address_family=af), \n 'show bgp vrf {vrf} all summary'.format(vrf=vrf),\n 'show bgp all summary']:\n self.add_leaf(cmd=cmd,\n src='[bgp_id]',\n dest='info[instance][default][bgp_id]',\n vrf=vrf,\n address_family=af)\n\n # protocol_state - N/A\n\n # ======================================================================\n # peer_session\n # ======================================================================\n\n # peer_session\n # ps_name\n ps_source = '[peer_session][(?P<peer_session>.*)]'\n ps_dest = 'info[instance][default][peer_session][(?P<peer_session>.*)]'\n\n peer_session_keys = ['fall_over_bfd',\\\n 'suppress_four_byte_as_capability', 'description',\\\n 'disable_connected_check', 'ebgp_multihop_enable',\\\n 'ebgp_multihop_max_hop', 'local_as_as_no', 'password_text',\\\n 'remote_as', 'shutdown', 'keepalive_interval',\\\n 'holdtime', 'transport_connection_mode', 'update_source']\n\n for key in peer_session_keys:\n\n self.add_leaf(cmd='show ip bgp template peer-session',\n src='{ps_source}[{key}]'.format(ps_source=ps_source,\n key=key),\n dest='{ps_dest}[{key}]'.format(ps_dest=ps_dest,\n key=key))\n\n # local_no_prepend - N/A\n # local_dual_as - N/A\n # local_replace_as - N/A\n\n # ======================================================================\n # peer_policy\n # ======================================================================\n\n # peer_policy\n # pp_name\n pp_src = '[peer_policy][(?P<peer_policy>.*)]'\n pp_dest = 'info[instance][default][peer_policy][(?P<peer_policy>.*)]'\n\n peer_policy_keys = ['allowas_in', 'as_override', 'default_originate',\\\n 'default_originate_route_map', 'route_map_name_in',\\\n 'route_map_name_out', 'maximum_prefix_max_prefix_no',\\\n 'next_hop_self', 'route_reflector_client', 'send_community',\\\n 'soft_reconfiguration', 'soo']\n\n for key in peer_policy_keys:\n\n self.add_leaf(cmd='show ip bgp template peer-policy',\n src='{pp_src}[{key}]'.format(pp_src=pp_src,key=key),\n dest='{pp_dest}[{key}]'.format(pp_dest=pp_dest,key=key))\n\n # allowas_in_as_number - N/A\n # maximum_prefix_threshold - N/A\n # maximum_prefix_restart - N/A\n # maximum_prefix_warning_only - N/A\n\n # ======================================================================\n # vrf\n # ======================================================================\n \n # vrf_id\n\n if vrf:\n vrf_src = '[vrf][{vrf}]'.format(vrf=vrf)\n vrf_dest = 'info[instance][default][vrf][{vrf}]'.format(vrf=vrf)\n else:\n vrf_src = '[vrf][(?P<vrf>.*)]'\n vrf_dest = 'info[instance][default][vrf][(?P<vrf>.*)]'\n\n # always_compare_med - N/A\n # bestpath_compare_routerid - N/A\n # bestpath_cost_community_ignore - N/A\n # bestpath_med_missin_at_worst - N/A\n\n # cluster_id\n self.add_leaf(cmd='show bgp all cluster-ids',\n src=vrf_src+'[cluster_id]',\n dest=vrf_dest+'[cluster_id]')\n\n # confederation_identifier- N/A\n # confederation_peer_as - N/A\n # graceful_restart - N/A\n # graceful_restart_restart_time - N/A\n # graceful_restart_stalepath_time - N/A\n # log_neighbor_changes - N/A\n # router_id - N/A\n # keepalive_interval - N/A\n # holdtime - N/A\n # enforce_first_as - N/A\n # fast_external_fallover - N/A\n # default_choice_ipv4_unicast - N/A\n\n # ======================================================================\n # vrf: address_family\n # ======================================================================\n\n # address_family\n # addr_family\n af_src = '[address_family][(?P<afname>{get_af_name})]'\n af_dest = '[address_family][(?P<afname>{get_af_name})]'\n\n # Add empty address family\n self.add_leaf(cmd='show ip bgp all dampening parameters',\n src=vrf_src+'[address_family]',\n dest=vrf_dest+'[address_family]',\n action=self.get_af_key)\n\n # vrf: address_family\n vrf_add_src = vrf_src+af_src\n vrf_add_dest = vrf_dest+af_dest\n\n vrf_add_keys = ['dampening', 'dampening_half_life_time',\\\n 'dampening_reuse_time', 'dampening_suppress_time',\\\n 'dampening_max_suppress_time']\n\n for key in vrf_add_keys:\n self.add_leaf(cmd='show ip bgp all dampening parameters',\n src='{vrf_add_src}[{key}]'.format(\n vrf_add_src=vrf_add_src, key=key),\n dest='{vrf_add_dest}[{key}]'.format(\n vrf_add_dest=vrf_add_dest, key=key))\n\n # nexthop_route_map - N/A\n # nexthop_trigger_enable - N/A\n # nexthop_trigger_delay_critical - N/A\n # nexthop_trigger_delay_non_critical - N/A\n # client_to_client_reflection - N/A\n # distance_extern_as - N/A\n # distance_internal_as - N/A\n # distance_local - N/A\n # maximum_paths_ebgp - N/A\n # maximum_paths_ibgp - N/A\n # maximum_paths_eibgp - N/A\n # aggregate_address_ipv4_address - N/A\n # aggregate_address_ipv4_mask - N/A\n # aggregate_address_as_set - N/A\n # aggregate_address_summary_only - N/A\n # network_number - N/A\n # network_mask - N/A\n # network_route_map - N/A\n # redist_isis - N/A\n # redist_isis_metric - N/A\n # redist_isis_route_policy - N/A\n # redist_ospf - N/A\n # redist_ospf_metric - N/A\n # redist_ospf_route_policy - N/A\n # redist_rip - N/A\n # redist_rip_metric - N/A\n # redist_rip_route_policy - N/A\n # redist_static - N/A\n # redist_static_metric - N/A\n # redist_static_route_policy - N/A\n # redist_connected - N/A\n # redist_connected_metric - N/A\n # redist_connected_route_policy - N/A\n # v6_aggregate_address_ipv6_address - N/A\n # v6_aggregate_address_as_set - N/A\n # v6_aggregate_address_summary_only - N/A\n # v6_network_number - N/A\n # v6_network_route_map - N/A\n # v6_allocate_label_all - N/A\n # retain_rt_all - N/A\n # label_allocation_mode - N/A\n\n # ======================================================================\n # vrf: neighbor\n # ======================================================================\n\n # neighbor_id\n if vrf:\n if neighbor:\n nbr_src = '[vrf][{vrf}][neighbor][{neighbor}]'.format(vrf=vrf, neighbor=neighbor)\n nbr_dest = 'info[instance][default][vrf][{vrf}][neighbor]' \\\n '[{neighbor}]'.format(vrf=vrf, neighbor=neighbor)\n else:\n nbr_src = '[vrf][{vrf}][neighbor][(?P<neighbor_id>.*)]'.format(vrf=vrf)\n nbr_dest = 'info[instance][default][vrf][{vrf}][neighbor]'\\\n '[(?P<neighbor_id>.*)]'.format(vrf=vrf)\n else:\n if neighbor:\n nbr_src = '[vrf][(?P<vrf>.*)][neighbor][{neighbor}]'.format(neighbor=neighbor)\n nbr_dest = 'info[instance][default][vrf][(?P<vrf>.*)][neighbor]' \\\n '[{neighbor}]'.format(neighbor=neighbor)\n else:\n nbr_src = '[vrf][(?P<vrf>.*)][neighbor][(?P<neighbor_id>.*)]'\n nbr_dest = 'info[instance][default][vrf][(?P<vrf>.*)][neighbor]'\\\n '[(?P<neighbor_id>.*)]'\n\n neighbor_keys = ['description', 'remote_as',\\\n 'shutdown', 'bgp_version',\\\n 'session_state']\n for key in neighbor_keys:\n self.add_leaf(cmd=bgp_nbr_class,\n src='{nbr_src}[{key}]'.format(nbr_src=nbr_src,\n key=key),\n dest='{nbr_dest}[{key}]'.format(nbr_dest=nbr_dest,\n key=key),\n address_family=af,\n neighbor=neighbor)\n # fall_over_bfd - N/A\n # suppress_four_byte_as_capability - N/A\n # disable_connected_check - N/A\n # ebgp_multihop - N/A\n # ebgp_multihop_max_hop - N/A\n # inherit_peer_session - N/A\n # local_as_as_no - N/A\n # local_as_no_prepend - N/A\n # local_as_replace_as - N/A\n # local_as_dual_as - N/A\n # remove_private_as - N/A\n # installed_prefixes - N/A\n\n # bgp_negotiated_keepalive_timers\n # keepalive_interval\n self.add_leaf(cmd=bgp_nbr_class,\n src=nbr_src+'[bgp_negotiated_keepalive_timers]'\n '[keepalive_interval]',\n dest=nbr_dest+'[bgp_negotiated_keepalive_timers]'\n '[keepalive_interval]',\n address_family=af,\n neighbor=neighbor)\n\n # bgp_negotiated_keepalive_timers\n # hold_time\n self.add_leaf(cmd=bgp_nbr_class,\n src=nbr_src+'[bgp_negotiated_keepalive_timers]'\n '[hold_time]',\n dest=nbr_dest+'[bgp_negotiated_keepalive_timers]'\n '[hold_time]',\n address_family=af,\n neighbor=neighbor)\n\n # bgp_session_transport\n # connection\n # state\n self.add_leaf(cmd=bgp_nbr_class,\n src=nbr_src + '[session_state]',\n dest=nbr_dest + '[bgp_session_transport][connection][state]',\n address_family=af,\n neighbor=neighbor)\n\n # bgp_session_transport\n # connection\n # mode - N/A\n\n # bgp_session_transport\n # connection\n # last_reset\n self.add_leaf(cmd=bgp_nbr_class,\n src=nbr_src+'[bgp_session_transport][connection]'\n '[last_reset]',\n dest=nbr_dest+'[bgp_session_transport][connection]'\n '[last_reset]',\n address_family=af,\n neighbor=neighbor)\n\n # bgp_session_transport\n # connection\n # reset_reason\n self.add_leaf(cmd=bgp_nbr_class,\n src=nbr_src+'[bgp_session_transport][connection]'\n '[reset_reason]',\n dest=nbr_dest+'[bgp_session_transport][connection]'\n '[reset_reason]',\n address_family=af,\n neighbor=neighbor)\n \n # bgp_session_transport\n # transport\n # local_port\n self.add_leaf(cmd=bgp_nbr_class,\n src=nbr_src+'[bgp_session_transport][transport]'\n '[local_port]',\n dest=nbr_dest+'[bgp_session_transport][transport]'\n '[local_port]',\n address_family=af,\n neighbor=neighbor)\n\n # bgp_session_transport\n # transport\n # local_host\n self.add_leaf(cmd=bgp_nbr_class,\n src=nbr_src+'[bgp_session_transport][transport]'\n '[local_host]',\n dest=nbr_dest+'[bgp_session_transport][transport]'\n '[local_host]',\n address_family=af,\n neighbor=neighbor)\n\n # bgp_session_transport\n # transport\n # foreign_port\n self.add_leaf(cmd=bgp_nbr_class,\n src=nbr_src+'[bgp_session_transport][transport]'\n '[foreign_port]',\n dest=nbr_dest+'[bgp_session_transport][transport]'\n '[foreign_port]',\n address_family=af,\n neighbor=neighbor)\n\n # bgp_session_transport\n # transport\n # foreign_host\n self.add_leaf(cmd=bgp_nbr_class,\n src=nbr_src+'[bgp_session_transport][transport]'\n '[foreign_host]',\n dest=nbr_dest+'[bgp_session_transport][transport]'\n '[foreign_host]',\n address_family=af,\n neighbor=neighbor)\n \n # bgp_session_transport\n # transport\n # mss\n self.add_leaf(cmd=bgp_nbr_class,\n src=nbr_src+'[bgp_session_transport][transport]'\n '[mss]',\n dest=nbr_dest+'[bgp_session_transport][transport]'\n '[mss]',\n address_family=af,\n neighbor=neighbor)\n\n # minimum_neighbor_hold - N/A\n # up_time - N/A\n # update_source - N/A\n # password_text - N/A\n\n # bgp_negotiated_capabilities\n capabilities_src = nbr_src+'[bgp_negotiated_capabilities]'\n capabilities_dest = nbr_dest+'[bgp_negotiated_capabilities]'\n\n capabilities_keys = ['route_refresh', 'four_octets_asn',\\\n 'vpnv4_unicast', 'vpnv6_unicast',\\\n 'graceful_restart', 'enhanced_refresh', 'multisession',\\\n 'stateful_switchover']\n\n for key in capabilities_keys:\n self.add_leaf(cmd=bgp_nbr_class,\n src='{capabilities_src}[{key}]'.format(\n capabilities_src=capabilities_src, key=key),\n dest='{capabilities_dest}[{key}]'.format(\n capabilities_dest=capabilities_dest, key=key),\n address_family=af,\n neighbor=neighbor)\n\n # bgp_neighbor_counters_messages\n counters_src = nbr_src+'[bgp_neighbor_counters][messages]'\n counters_dest = nbr_dest+'[bgp_neighbor_counters][messages]'\n\n counters_keys = ['opens', 'updates', 'notifications', 'keepalives',\\\n 'route_refreshes']\n\n for key in counters_keys:\n\n self.add_leaf(cmd=bgp_nbr_class,\n src='{counters_src}[sent][{key}]'.format(\n counters_src=counters_src, key=key),\n dest='{counters_dest}[sent][{key}]'.format(\n counters_dest=counters_dest, key=key),\n address_family=af,\n neighbor=neighbor)\n\n self.add_leaf(cmd=bgp_nbr_class,\n src='{counters_src}[received][{key}]'.format(\n counters_src=counters_src, key=key),\n dest='{counters_dest}[received][{key}]'.format(\n counters_dest=counters_dest, key=key),\n address_family=af,\n neighbor=neighbor)\n\n # ==============================================================\n # vrf: neighbor - address_family\n # ==============================================================\n\n # address_family\n # nbr_af_name\n nbr_af_src = '[address_family][(?P<afname>{get_af_name})]'\n nbr_af_dest = '[address_family][(?P<afname>{get_af_name})]'\n\n # aggregated source and destination\n final_src = nbr_src+nbr_af_src\n final_dest = nbr_dest+nbr_af_dest\n\n nbr_af_keys = ['bgp_table_version', 'routing_table_version',\\\n 'prefixes', 'path', 'total_memory', 'session_state']\n\n for key in nbr_af_keys:\n # address_family\n # nbr_af_name\n self.add_leaf(cmd='show bgp all summary',\n src='{final_src}[{key}]'.format(\n final_src=final_src, key=key),\n dest='{final_dest}[{key}]'.format(\n final_dest=final_dest, key=key),\n address_family=af,\n vrf=vrf)\n\n # allowas_in - N/A\n # allowas_in_as_number - N/A\n\n # inherit_peer_policy\n # inherit_peer_seq - N/A\n\n # maximum_prefix_max_prefix_no - N/A\n # maximum_prefix_threshold - N/A\n # maximum_prefix_restart - N/A\n # maximum_prefix_warning_only - N/A\n\n # Get neighbors for input to:\n # * 'show bgp all neighbors <neighbor> policy'\n # * 'show bgp all neighbors <WORD> received-routes'\n\n self.add_leaf(cmd=bgp_nbr_class,\n src='[list_of_neighbors]',\n dest='info[list_of_neighbors]',\n address_family=af,\n neighbor=neighbor)\n\n self.make()\n\n if hasattr (self, 'info') and\\\n 'list_of_neighbors' in self.info:\n\n for nbr in sorted(self.info['list_of_neighbors']):\n\n if vrf:\n # address_family\n # nbr_af_name\n # route_map_name_in\n self.add_leaf(cmd='show bgp all neighbors {neighbor} policy'.format(neighbor=nbr),\n src='[vrf][{vrf}][neighbor]'\n '[{neighbor}]'.format(neighbor=nbr, vrf=vrf) +\n nbr_af_src + '[nbr_af_route_map_name_in]',\n dest='info[instance][default][vrf][{vrf}]'\n '[neighbor][{neighbor}]'.format(\n neighbor=nbr, vrf=vrf) + nbr_af_dest +\n '[route_map_name_in]',\n neighbor=nbr)\n\n # address_family\n # nbr_af_name\n # route_map_name_out\n self.add_leaf(cmd='show bgp all neighbors {neighbor} policy'.format(neighbor=nbr),\n src='[vrf][{vrf}][neighbor][{neighbor}]'. \\\n format(neighbor=nbr, vrf=vrf) + nbr_af_src +\n '[nbr_af_route_map_name_out]',\n dest='info[instance][default][vrf][{vrf}]'\n '[neighbor][{neighbor}]'.format(neighbor=nbr,\n vrf=vrf) +\n nbr_af_dest + '[route_map_name_out]',\n neighbor=nbr)\n else:\n # address_family\n # nbr_af_name\n # route_map_name_in\n self.add_leaf(cmd='show bgp all neighbors {neighbor} policy'.format(neighbor=nbr),\n src='[vrf][(?P<vrf>.*)][neighbor]'\n '[{neighbor}]'.format(neighbor=nbr)+\n nbr_af_src+'[nbr_af_route_map_name_in]',\n dest='info[instance][default][vrf][(?P<vrf>.*)]'\n '[neighbor][{neighbor}]'.format(\n neighbor=nbr)+nbr_af_dest+\n '[route_map_name_in]',\n neighbor=nbr)\n\n # address_family\n # nbr_af_name\n # route_map_name_out\n self.add_leaf(cmd='show bgp all neighbors {neighbor} policy'.format(neighbor=nbr),\n src='[vrf][(?P<vrf>.*)][neighbor][{neighbor}]'.\\\n format(neighbor=nbr)+nbr_af_src+\n '[nbr_af_route_map_name_out]',\n dest='info[instance][default][vrf][(?P<vrf>.*)]'\n '[neighbor][{neighbor}]'.format(neighbor=\\\n nbr)+nbr_af_dest+'[route_map_name_out]',\n neighbor=nbr)\n\n # clear list of neighbors\n del self.info['list_of_neighbors']\n\n # route_reflector_client - N/A\n # address_family\n # nbr_af_name\n # send_community - N/A\n # address_family\n # nbr_af_name\n # soft_configuration - N/A\n # next_hop_self - N/A\n # as_override - N/A\n # default_originate - N/A\n # default_originate_route_map - N/A\n # soo - N/A\n\n\n ########################################################################\n # table\n ########################################################################\n\n # vrf\n # vrf_id\n # address_family\n # af_name\n if vrf:\n table_src = '[vrf][{vrf}][address_family]' \\\n '[(?P<address_family>.*)]'.format(vrf=vrf)\n else:\n table_src = '[vrf][(?P<vrf_name>.*)][address_family][(?P<address_family>.*)]'\n\n table_dest = 'table[instance][default]'+ table_src\n\n table_keys = ['route_identifier', 'bgp_table_version',\\\n 'route_distinguisher', 'default_vrf']\n\n for key in table_keys:\n self.add_leaf(cmd='show bgp {address_family} all'.format(address_family=af),\n src='{table_src}[{key}]'.format(\n table_src=table_src, key=key),\n dest='{table_dest}[{key}]'.format(\n table_dest=table_dest, key=key),\n address_family=af)\n\n\n\n\n\n\n # Building prefix section\n if vrf:\n if address_family:\n prefix_src = '[instance][default][vrf][{vrf}][address_family][{address_family}][prefixes][(?P<prefix>.*)]'.format(\n address_family=address_family, vrf=vrf)\n\n prefix_dest = 'table[instance][default][vrf][{vrf}][address_family][{address_family}]'.format(\n address_family=address_family, vrf=vrf)\n\n else:\n prefix_src = '[instance][default][vrf][{vrf}]' \\\n '[address_family][(?P<address_family>.*)][prefixes]' \\\n '[(?P<prefix>.*)]'.format(vrf=vrf)\n prefix_dest = 'table[instance][default][vrf][{vrf}][address_family]' \\\n '[(?P<address_family>.*)]'.format(vrf=vrf)\n\n else:\n if address_family:\n prefix_src = '[instance][default][vrf][(?P<vrf_name>.*)]' \\\n '[address_family][{address_family}][prefixes]' \\\n '[(?P<prefix>.*)]'.format(address_family=address_family)\n prefix_dest = 'table[instance][default][vrf][(?P<vrf_name>.*)][' \\\n 'address_family][{address_family}]'.format(\n address_family=address_family)\n\n else:\n prefix_src = '[instance][default][vrf][(?P<vrf_name>.*)]'\\\n '[address_family][(?P<address_family>.*)][prefixes]'\\\n '[(?P<prefix>.*)]'\n prefix_dest = 'table[instance][default][vrf][(?P<vrf_name>.*)][address_family]' \\\n '[(?P<address_family>.*)]'\n # path\n self.add_leaf(cmd='show bgp all detail',\n src=prefix_src + '[paths]',\n dest=prefix_dest + '[prefixes][(?P<prefix>.*)][paths]', vrf=vrf,\n address_family=af)\n # table_version\n self.add_leaf(cmd='show bgp all detail',\n src=prefix_src + '[table_version]',\n dest=prefix_dest + '[prefixes][(?P<prefix>.*)][table_version]',\n vrf=vrf, address_family=af)\n\n # index_dest = table_dest+'[prefixes][(?P<prefix>.*)][index][(?P<index>.*)]'\n\n index_keys = ['next_hop', 'next_hop_igp_metric', 'gateway',\\\n 'update_group', 'status_codes', 'origin_codes', 'metric',\\\n 'localpref', 'weight', 'originator']\n\n for key in index_keys:\n\n self.add_leaf(cmd='show bgp all detail',\n src=prefix_src + '[index][(?P<index>.*)][{key}]'.format(key=key),\n dest=prefix_dest + '[prefixes][(?P<prefix>.*)][index][(?P<index>.*)][{key}]'.format(key=key),\n vrf=vrf, address_family=af)\n\n # cluster_id - N/A\n # mpls_labels_inout - N/A\n # cluster_list - N/A\n\n # local_as\n self.add_leaf(cmd='show bgp all summary',\n src='[vrf][(?P<vrf_name>.*)][address_family][(?P<address_family>.*)][local_as]',\n dest='table[instance][default][vrf][(?P<vrf_name>.*)][address_family][(?P<address_family>.*)][local_as]',\n vrf=vrf, address_family=af)\n\n # routing_table_version\n self.add_leaf(cmd='show bgp all summary',\n src='[vrf][(?P<vrf_name>.*)][address_family][(?P<address_family>.*)][routing_table_version]',\n dest='table[instance][default][vrf][(?P<vrf_name>.*)][address_family][(?P<address_family>.*)][routing_table_version]',\n vrf=vrf, address_family=af)\n\n # ext_community\n self.add_leaf(cmd='show bgp all detail',\n src=prefix_src + '[index][(?P<index>.*)]'\n '[evpn][ext_community]',\n dest=prefix_dest+'[prefixes][(?P<prefix>.*)][index][(?P<index>.*)][ext_community]',\n vrf=vrf, address_family=af)\n\n ########################################################################\n # routes_per_peer\n ########################################################################\n\n # Routes per peer top level key\n if neighbor:\n if address_family:\n rpp_src = '[vrf][(?P<vrf>.*)][neighbor][{neighbor}]' \\\n '[address_family][{address_family}]'.format(neighbor=neighbor, address_family=address_family)\n rpp_dest = 'routes_per_peer[instance][default][vrf][(?P<vrf>.*)]' \\\n '[neighbor][{neighbor}][address_family]' \\\n '[{address_family}]'.format(neighbor=neighbor, address_family=address_family)\n else:\n rpp_src = '[vrf][(?P<vrf>.*)][neighbor][{neighbor}]' \\\n '[address_family][(?P<address_family>.*)]'.format(neighbor=neighbor)\n rpp_dest = 'routes_per_peer[instance][default][vrf][(?P<vrf>.*)]' \\\n '[neighbor][{neighbor}][address_family]' \\\n '[(?P<address_family>.*)]'.format(neighbor=neighbor)\n else:\n if address_family:\n rpp_src = '[vrf][(?P<vrf>.*)][neighbor][(?P<neighbor>.*)]' \\\n '[address_family][{address_family}]'.format(address_family=address_family)\n rpp_dest = 'routes_per_peer[instance][default][vrf][(?P<vrf>.*)]' \\\n '[neighbor][(?P<neighbor>.*)][address_family]' \\\n '[{address_family}]'.format(address_family=address_family)\n else:\n rpp_src = '[vrf][(?P<vrf>.*)][neighbor][(?P<neighbor>.*)]'\\\n '[address_family][(?P<address_family>.*)]'\n rpp_dest = 'routes_per_peer[instance][default][vrf][(?P<vrf>.*)]'\\\n '[neighbor][(?P<neighbor>.*)][address_family]'\\\n '[(?P<address_family>.*)]'\n\n rpp_keys = ['<KEY>', 'msg_sent', 'tbl_ver',\\\n 'input_queue', 'output_queue', 'up_down', 'state_pfxrcd']\n\n for key in rpp_keys:\n self.add_leaf(cmd='show bgp all summary',\n src='{rpp_src}[{key}]'.format(\n rpp_src=rpp_src, key=key),\n dest='{rpp_dest}[{key}]'.format(\n rpp_dest=rpp_dest, key=key),\n vrf=vrf, address_family=af)\n\n\n # remote_as\n if vrf:\n self.add_leaf(cmd=bgp_nbr_class,\n src='[vrf][{vrf}][neighbor][(?P<neighbor>.*)]'\n '[remote_as]'.format(vrf=vrf),\n dest='routes_per_peer[instance][default][vrf]'\n '[{vrf}][neighbor][(?P<neighbor>.*)]'\n '[remote_as]'.format(vrf=vrf), neighbor=neighbor,\n address_family=af)\n else:\n self.add_leaf(cmd=bgp_nbr_class,\n src='[vrf][(?P<vrf>.*)][neighbor][(?P<neighbor>.*)]'\n '[remote_as]',\n dest='routes_per_peer[instance][default][vrf]'\n '[(?P<vrf>.*)][neighbor][(?P<neighbor>.*)]'\n '[remote_as]', neighbor=neighbor, address_family=af)\n\n # Get neighbors for input to:\n # * 'show bgp all neighbors <WORD> advertised-routes'\n # * 'show bgp all neighbors <WORD> received-routes'\n # * 'show bgp all neighbors <WORD> routes'\n\n self.add_leaf(cmd=bgp_nbr_class,\n src='[list_of_neighbors]',\n dest='routes_per_peer[list_of_neighbors]',\n address_family=af,\n neighbor=neighbor)\n self.make()\n\n if hasattr (self, 'routes_per_peer') and\\\n 'list_of_neighbors' in self.routes_per_peer:\n\n for nbr in sorted(self.routes_per_peer['list_of_neighbors']):\n if vrf:\n\n if address_family:\n rpp_nbr_src = '[vrf][{vrf}][neighbor][{neighbor}][' \\\n 'address_family][{address_family}]'.format(\n vrf=vrf, neighbor=nbr, address_family=address_family)\n rpp_nbr_dest = 'routes_per_peer[instance][default][vrf][{vrf}][' \\\n 'neighbor][{neighbor}][address_family][{address_family}]'.format(\n vrf=vrf, neighbor=nbr, address_family=address_family)\n else:\n rpp_nbr_src = '[vrf][{vrf}][neighbor][{neighbor}][address_family][(' \\\n '?P<address_family>.*)]'.format(vrf=vrf, neighbor=nbr)\n rpp_nbr_dest = 'routes_per_peer[instance][default][vrf][{vrf}][' \\\n 'neighbor][{neighbor}]' \\\n '[address_family][(?P<address_family>.*)]'.format(\n vrf=vrf, neighbor=nbr)\n else:\n\n if address_family:\n rpp_nbr_src = '[vrf][(?P<vrf>.*)][neighbor][{neighbor}][' \\\n 'address_family][{address_family}]'.format(\n neighbor=nbr, address_family=address_family)\n\n rpp_nbr_dest = 'routes_per_peer[instance][default][vrf][(' \\\n '?P<vrf>.*)][neighbor][{neighbor}]' \\\n '[address_family][{address_family}]'.format(\n neighbor=nbr, address_family=address_family)\n else:\n rpp_nbr_src = '[vrf][(?P<vrf>.*)][neighbor][{neighbor}][' \\\n 'address_family][(?P<address_family>.*)]'.format(neighbor=nbr)\n\n rpp_nbr_dest = 'routes_per_peer[instance][default][vrf][(' \\\n '?P<vrf>.*)][' \\\n 'neighbor][{neighbor}]' \\\n '[address_family][(?P<address_family>.*)]'.format(neighbor=nbr)\n # route_distinguisher\n self.add_leaf(cmd=bgp_adv_route_class,\n src=rpp_nbr_src + '[route_distinguisher]',\n dest=rpp_nbr_dest + '[route_distinguisher]',\n neighbor=nbr, address_family=af)\n\n # default_vrf\n self.add_leaf(cmd=bgp_adv_route_class,\n src=rpp_nbr_src + '[default_vrf]',\n dest=rpp_nbr_dest + '[default_vrf]',\n neighbor=nbr, address_family=af)\n\n # advertised\n self.add_leaf(cmd=bgp_adv_route_class,\n src=rpp_nbr_src + '[advertised]',\n dest=rpp_nbr_dest + '[advertised]',\n neighbor=nbr, address_family=af)\n\n # routes\n self.add_leaf(cmd=bgp_route_class,\n src=rpp_nbr_src + '[routes]',\n dest=rpp_nbr_dest + '[routes]',\n neighbor=nbr, address_family=af)\n\n # received_routes\n self.add_leaf(cmd=bgp_rec_rout_class,\n src=rpp_nbr_src + '[received_routes]',\n dest=rpp_nbr_dest + '[received_routes]',\n neighbor=nbr, address_family=af)\n\n # clear list of neighbors\n del self.routes_per_peer['list_of_neighbors']\n\n ########################################################################\n # Final Structure\n ########################################################################\n\n # Make final Ops structure\n self.make(final_call=True)\n\n if hasattr (self, 'routes_per_peer'):\n # Removing 'path_type' from the ops structure\n for vrf in self.routes_per_peer['instance']['default']['vrf']:\n for neighbor in self.routes_per_peer['instance']['default']\\\n ['vrf'][vrf]['neighbor']:\n if 'address_family' in self.routes_per_peer['instance']\\\n ['default']['vrf'][vrf]['neighbor'][neighbor]:\n for add_family in self.routes_per_peer['instance']\\\n ['default']['vrf'][vrf]['neighbor'][neighbor]\\\n ['address_family']:\n if 'advertised' in self.routes_per_peer['instance']\\\n ['default']['vrf'][vrf]['neighbor'][neighbor]\\\n ['address_family'][add_family]:\n for route in self.routes_per_peer['instance']\\\n ['default']['vrf'][vrf]['neighbor'][neighbor]\\\n ['address_family'][add_family]\\\n ['advertised']:\n for idx in self.routes_per_peer['instance']\\\n ['default']['vrf'][vrf]['neighbor']\\\n [neighbor]['address_family']\\\n [add_family]['advertised'][route]\\\n ['index']:\n if 'path_type' in self.routes_per_peer\\\n ['instance']['default']['vrf'][vrf]\\\n ['neighbor'][neighbor]\\\n ['address_family'][add_family]\\\n ['advertised'][route]['index'][idx]:\n del self.routes_per_peer['instance']\\\n ['default']['vrf'][vrf]['neighbor']\\\n [neighbor]['address_family']\\\n [add_family]['advertised']\\\n [route]['index'][idx]\\\n ['path_type']\n if 'received_routes' in self.routes_per_peer\\\n ['instance']['default']['vrf'][vrf]['neighbor']\\\n [neighbor]['address_family'][add_family]:\n for route in self.routes_per_peer['instance']\\\n ['default']['vrf'][vrf]['neighbor'][neighbor]\\\n ['address_family'][add_family]\\\n ['received_routes']:\n for idx in self.routes_per_peer['instance']\\\n ['default']['vrf'][vrf]['neighbor']\\\n [neighbor]['address_family']\\\n [add_family]['received_routes'][route]\\\n ['index']:\n if 'path_type' in self.routes_per_peer\\\n ['instance']['default']['vrf'][vrf]\\\n ['neighbor'][neighbor]\\\n ['address_family'][add_family]\\\n ['received_routes'][route]['index']\\\n [idx]:\n del self.routes_per_peer['instance']\\\n ['default']['vrf'][vrf]['neighbor']\\\n [neighbor]['address_family']\\\n [add_family]\\\n ['received_routes'][route]\\\n ['index'][idx]['path_type']\n if 'routes' in self.routes_per_peer\\\n ['instance']['default']['vrf'][vrf]['neighbor']\\\n [neighbor]['address_family'][add_family]:\n for route in self.routes_per_peer['instance']\\\n ['default']['vrf'][vrf]['neighbor'][neighbor]\\\n ['address_family'][add_family]\\\n ['routes']:\n for idx in self.routes_per_peer['instance']\\\n ['default']['vrf'][vrf]['neighbor']\\\n [neighbor]['address_family']\\\n [add_family]['routes'][route]\\\n ['index']:\n if 'path_type' in self.routes_per_peer\\\n ['instance']['default']['vrf'][vrf]\\\n ['neighbor'][neighbor]\\\n ['address_family'][add_family]\\\n ['routes'][route]['index']\\\n [idx]:\n del self.routes_per_peer['instance']\\\n ['default']['vrf'][vrf]['neighbor']\\\n [neighbor]['address_family']\\\n [add_family]\\\n ['routes'][route]\\\n ['index'][idx]['path_type']", "id": "9149525", "language": "Python", "matching_score": 8.118353843688965, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/bgp/iosxe/bgp.py" }, { "content": "''' \nBGP Genie Ops Object for IOSXR - CLI.\n'''\n\n# Super class\nfrom genie.libs.ops.bgp.bgp import Bgp as SuperBgp\n\n# Parser\nfrom genie.libs.parser.iosxr.show_bgp import ShowPlacementProgramAll,\\\n ShowBgpInstanceAfGroupConfiguration,\\\n ShowBgpInstanceSessionGroupConfiguration,\\\n ShowBgpInstanceProcessDetail,\\\n ShowBgpInstanceNeighborsDetail,\\\n ShowBgpInstanceNeighborsAdvertisedRoutes,\\\n ShowBgpInstanceNeighborsReceivedRoutes,\\\n ShowBgpInstanceNeighborsRoutes,\\\n ShowBgpInstanceSummary,\\\n ShowBgpInstanceAllAll, ShowBgpInstances\n\n\nclass Bgp(SuperBgp):\n '''BGP Genie Ops Object'''\n\n # Callables\n def get_key(self, item):\n # For the sake of simplicity and in the interest of time, \n # this has been done to reduce the number of for loops for easier\n # maintenance and readability\n # self.neighbors = item.keys()\n return list(item.keys())\n\n def learn(self, instance='all', vrf='all', address_family='', neighbor=''):\n '''Learn BGP Ops'''\n\n ########################################################################\n # info\n ########################################################################\n\n # instance\n # instance_name\n\n # bgp_id\n self.add_leaf(cmd=ShowBgpInstances,\n src='[instance][(?P<instance>.*)][bgp_id][(?P<bgp_id>.*)]',\n dest='info[instance][(?P<instance>.*)][bgp_id][(?P<bgp_id>.*)]')\n\n # protocol_state\n self.add_leaf(cmd=ShowPlacementProgramAll,\n src='[program][bgp][instance][(?P<instance>.*)][active_state][(?P<active_state>.*)]',\n dest='info[instance][(?P<instance>.*)][protocol_state][(?P<active_state>.*)]')\n\n # ======================================================================\n # peer_session\n # ======================================================================\n\n # peer_session\n # ps_name\n ps_source = '[(?P<instance>.*)][peer_session][(?P<session_session>.*)]'\n ps_dest = 'info[instance][(?P<instance>.*)][peer_session][(?P<session_session>.*)]'\n\n peer_session_keys = ['fall_over_bfd', 'suppress_four_byte_as_capability',\\\n 'description', 'disable_connected_check', 'ebgp_multihop_enable',\\\n 'ebgp_multihop_max_hop', 'local_as_as_no', 'local_no_prepend',\\\n 'local_dual_as', 'local_replace_as', 'password_text', 'remote_as',\\\n 'shutdown', 'keepalive_interval', 'holdtime', 'update_source',\\\n 'transport_connection_mode']\n\n for key in peer_session_keys:\n\n self.add_leaf(cmd=ShowBgpInstanceSessionGroupConfiguration,\n src='{ps_source}[{key}]'.format(ps_source=ps_source,key=key),\n dest='{ps_dest}[{key}]'.format(ps_dest=ps_dest,key=key))\n\n\n # ======================================================================\n # peer_policy\n # ======================================================================\n\n # peer_policy\n # pp_name\n pp_src = '[instance][(?P<instance>.*)][pp_name][(?P<peer_policy>.*)]'\n pp_dest = 'info[instance][(?P<instance>.*)][peer_policy][(?P<peer_policy>.*)]'\n\n peer_policy_keys = ['allowas_in', 'allowas_in_as_number', 'as_override',\\\n 'default_originate', 'default_originate_route_map',\\\n 'route_map_name_in', 'route_map_name_out',\\\n 'next_hop_self', 'route_reflector_client',\\\n 'send_community', 'soft_reconfiguration', 'soo',\\\n 'maximum_prefix_max_prefix_no',\\\n 'maximum_prefix_threshold', 'maximum_prefix_restart',\\\n 'maximum_prefix_warning_only']\n\n for key in peer_policy_keys:\n\n self.add_leaf(cmd=ShowBgpInstanceAfGroupConfiguration,\n src='{pp_src}[{key}]'.format(pp_src=pp_src,key=key),\n dest='{pp_dest}[{key}]'.format(pp_dest=pp_dest,key=key))\n\n\n # ======================================================================\n # vrf\n # ======================================================================\n\n # Init vrf first loop\n vrf_loop1 = False\n\n for vrf_type in ['all', 'vrf']:\n if vrf != 'all':\n vrf_type = 'vrf'\n\n for af in ['ipv4 unicast', 'ipv6 unicast']:\n if address_family:\n af=address_family\n # Set or skip 'all all all' iteration\n if vrf_type == 'all':\n if vrf_loop1:\n continue\n else:\n af = ''\n vrf_loop1 = True\n # vrf\n # vrf_id\n vrf_src = '[instance][(?P<instance>.*)][vrf][(?P<vrf>.*)]'\n vrf_dest = 'info[instance][(?P<instance>.*)][vrf][(?P<vrf>.*)]'\n\n vrf_keys = ['always_compare_med', 'bestpath_compare_routerid',\n 'bestpath_cost_community_ignore',\n 'bestpath_med_missin_at_worst', 'cluster_id',\n 'log_neighbor_changes', 'router_id',\n 'enforce_first_as', 'fast_external_fallover']\n for key in vrf_keys:\n\n # set key values\n src_key = dest_key = key\n\n if key == 'cluster_id':\n src_key = 'active_cluster_id'\n\n self.add_leaf(cmd=ShowBgpInstanceProcessDetail,\n src='{vrf_src}[{src_key}]'.format(vrf_src=vrf_src,src_key=src_key),\n dest='{vrf_dest}[{dest_key}]'.format(vrf_dest=vrf_dest,dest_key=dest_key),\n vrf_type=vrf_type, vrf=vrf, instance=instance,\n address_family=af)\n\n # confederation_identifier - N/A\n # confederation_peer_as - N/A\n # graceful_restart - N/A\n # graceful_restart_restart_time - N/A\n # graceful_restart_stalepath_time - N/A\n # keepalive_interval - ??\n # holdtime - ??\n # default_choice_ipv4_unicast - N/A\n\n # ==============================================================\n # vrf: address_family\n # ==============================================================\n\n # address_family\n # af_name\n af_src = vrf_src + '[address_family][(?P<address_family>.*)]'\n af_dest = vrf_dest + '[address_family][(?P<address_family>.*)]'\n\n vrf_af_keys = ['dampening', 'dampening_route_map',\\\n 'dampening_half_life_time', 'dampening_reuse_time',\\\n 'dampening_suppress_time', 'dampening_max_suppress_time',\\\n 'client_to_client_reflection']\n\n for key in vrf_af_keys:\n\n # set key values\n src_key = dest_key = key\n\n self.add_leaf(cmd=ShowBgpInstanceProcessDetail,\n src='{af_src}[{src_key}]'.format(af_src=af_src,src_key=src_key),\n dest='{af_dest}[{dest_key}]'.format(af_dest=af_dest,dest_key=dest_key),\n vrf_type=vrf_type, vrf=vrf,\n instance=instance, address_family=af)\n\n # nexthop_route_map - N/A\n # nexthop_trigger_enable - N/A\n # nexthop_trigger_delay_critical - N/A\n # nexthop_trigger_delay_non_critical - N/A\n # distance_extern_as - N/A\n # distance_internal_as - N/A\n # distance_local - N/A\n # maximum_paths_ebgp - N/A\n # maximum_paths_ibgp - N/A\n # maximum_paths_eibgp - N/A\n # aggregate_address_ipv4_address - N/A\n # aggregate_address_ipv4_mask - N/A\n # aggregate_address_as_set - N/A\n # aggregate_address_summary_only - N/A\n # network_number - N/A\n # network_mask - N/A\n # network_route_map - N/A\n # redist_isis - N/A\n # redist_isis_metric - N/A\n # redist_isis_route_policy - N/A\n # redist_ospf - N/A\n # redist_ospf_metric - N/A\n # redist_ospf_route_policy - N/A\n # redist_rip - N/A\n # redist_rip_metric - N/A\n # redist_rip_route_policy - N/A\n # redist_static - N/A\n # redist_static_metric - N/A\n # redist_static_route_policy - N/A\n # redist_connected - N/A\n # redist_connected_metric - N/A\n # redist_connected_route_policy - N/A\n # v6_aggregate_address_ipv6_address - N/A\n # v6_aggregate_address_as_set - N/A\n # v6_aggregate_address_summary_only - N/A\n # v6_network_number - N/A\n # v6_network_route_map - N/A\n # v6_allocate_label_all - N/A\n # retain_rt_all - N/A\n # label_allocation_mode - N/A\n\n # ==============================================================\n # vrf: neighbor\n # ==============================================================\n\n # neighbor\n # neighbor_id\n nbr_src = '[instance][(?P<instance>.*)][vrf][(?P<vrf>.*)][neighbor][(?P<neighbor>.*)]'\n nbr_dest = 'info[instance][(?P<instance>.*)][vrf][(?P<vrf>.*)][neighbor][(?P<neighbor>.*)]'\n\n nbr_keys = ['suppress_four_byte_as_capability', 'description',\\\n 'ebgp_multihop', 'ebgp_multihop_max_hop',\\\n 'local_as_as_no', 'local_as_no_prepend',\\\n 'local_as_replace_as', 'local_as_dual_as',\\\n 'remote_as', 'remove_private_as', 'shutdown',\\\n 'keepalive_interval', 'holdtime', \\\n 'transport_connection_mode', 'session_state',\\\n 'bgp_negotiated_keepalive_timers',\\\n 'minimum_neighbor_hold', 'up_time', ]\n\n for key in nbr_keys:\n\n # set key values\n src_key = dest_key = key\n\n if key == 'minimum_neighbor_hold':\n src_key = 'min_acceptable_hold_time'\n\n self.add_leaf(cmd=ShowBgpInstanceNeighborsDetail,\n src='{nbr_src}[{src_key}]'.format(nbr_src=nbr_src,src_key=src_key),\n dest='{nbr_dest}[{dest_key}]'.format(nbr_dest=nbr_dest,dest_key=dest_key),\n vrf_type=vrf_type, vrf=vrf,\n instance=instance, address_family=af,\n neighbor=neighbor)\n\n # fall_over_bfd - N/A\n # disable_connected_check - N/A\n # inherit_peer_session - N/A\n # bgp_version - ??\n # installed_prefixes - ??\n # update_source - N/A\n # password_text - N/A\n\n # bgp_session_transport\n # connection\n # state\n # mode\n # last_reset\n # reset_reason\n self.add_leaf(cmd=ShowBgpInstanceNeighborsDetail,\n src=nbr_src+'[bgp_session_transport][connection]',\n dest=nbr_dest+'[bgp_session_transport][connection]',\n vrf_type=vrf_type, vrf=vrf,\n instance=instance, address_family=af,\n neighbor=neighbor)\n\n # bgp_session_transport\n # transport\n # local_port\n # local_host\n # foreign_port\n # foreign_host\n # mss\n self.add_leaf(cmd=ShowBgpInstanceNeighborsDetail,\n src=nbr_src+'[bgp_session_transport][transport]',\n dest=nbr_dest+'[bgp_session_transport][transport]',\n vrf_type=vrf_type, vrf=vrf,\n instance=instance, address_family=af,\n neighbor=neighbor)\n\n # bgp_negotiated_capabilities\n # route_refresh\n self.add_leaf(cmd=ShowBgpInstanceNeighborsDetail,\n src=nbr_src+'[bgp_negotiated_capabilities][route_refresh]',\n dest=nbr_dest+'[bgp_negotiated_capabilities][route_refresh]',\n vrf_type=vrf_type, vrf=vrf,\n instance=instance, address_family=af,\n neighbor=neighbor)\n\n # bgp_negotiated_capabilities\n # four_octets_asn\n self.add_leaf(cmd=ShowBgpInstanceNeighborsDetail,\n src=nbr_src+'[bgp_negotiated_capabilities][four_octets_asn]',\n dest=nbr_dest+'[bgp_negotiated_capabilities][four_octets_asn]',\n vrf_type=vrf_type, vrf=vrf,\n instance=instance, address_family=af,\n neighbor=neighbor)\n\n # bgp_negotiated_capabilities\n # vpnv4_unicast\n self.add_leaf(cmd=ShowBgpInstanceNeighborsDetail,\n src=nbr_src+'[bgp_negotiated_capabilities][vpnv4_unicast]',\n dest=nbr_dest+'[bgp_negotiated_capabilities][vpnv4_unicast]',\n vrf_type=vrf_type, vrf=vrf,\n instance=instance, address_family=af,\n neighbor=neighbor)\n\n # bgp_negotiated_capabilities\n # vpnv6_unicast\n self.add_leaf(cmd=ShowBgpInstanceNeighborsDetail,\n src=nbr_src+'[bgp_negotiated_capabilities][vpnv6_unicast]',\n dest=nbr_dest+'[bgp_negotiated_capabilities][vpnv6_unicast]',\n vrf_type=vrf_type, vrf=vrf,\n instance=instance, address_family=af,\n neighbor=neighbor)\n\n # bgp_negotiated_capabilities\n # graceful_restart - N/A\n\n # bgp_negotiated_capabilities\n # enhanced_refresh - N/A\n\n # bgp_negotiated_capabilities\n # multisession - N/A\n\n # bgp_negotiated_capabilities\n # stateful_switchover - N/A\n\n # bgp_neighbor_counters\n # messages\n # sent\n # opens\n # updates\n # notifications\n # keepalives\n # route_refreshes\n self.add_leaf(cmd=ShowBgpInstanceNeighborsDetail,\n src=nbr_src+'[bgp_neighbor_counters][messages][sent]',\n dest=nbr_dest+'[bgp_neighbor_counters][messages][sent]',\n vrf_type=vrf_type, vrf=vrf,\n instance=instance, address_family=af,\n neighbor=neighbor)\n\n # bgp_neighbor_counters\n # messages\n # received\n # opens\n # updates\n # notifications\n # keepalives\n # route_refreshes\n self.add_leaf(cmd=ShowBgpInstanceNeighborsDetail,\n src=nbr_src+'[bgp_neighbor_counters][messages][received]',\n dest=nbr_dest+'[bgp_neighbor_counters][messages][received]',\n vrf_type=vrf_type, vrf=vrf,\n instance=instance, address_family=af,\n neighbor=neighbor)\n\n # ==============================================================\n # vrf: neighbor - address_family\n # ==============================================================\n\n # nbr_address_family\n # nbr_af_name\n nbr_af_src = nbr_src + '[address_family][(?P<address_family>.*)]'\n nbr_af_dest = nbr_dest + '[address_family][(?P<address_family>.*)]'\n\n nbr_af_keys = ['bgp_table_version', 'allowas_in',\\\n 'allowas_in_as_number', 'route_map_name_in',\\\n 'route_map_name_out', 'route_reflector_client',\\\n 'send_community', 'soft_configuration', 'as_override',\\\n 'default_originate', 'default_originate_route_map',\\\n 'soo', 'maximum_prefix_max_prefix_no',\\\n 'maximum_prefix_threshold', 'maximum_prefix_restart',\\\n 'maximum_prefix_warning_only']\n\n for key in nbr_af_keys:\n\n # set key values\n src_key = dest_key = key\n\n if key == 'bgp_table_version':\n src_key = 'neighbor_version'\n\n self.add_leaf(cmd=ShowBgpInstanceNeighborsDetail,\n src='{nbr_af_src}[{src_key}]'.format(nbr_af_src=nbr_af_src,src_key=src_key),\n dest='{nbr_af_dest}[{dest_key}]'.format(nbr_af_dest=nbr_af_dest,dest_key=dest_key),\n vrf_type=vrf_type, vrf=vrf,\n instance=instance, address_family=af,\n neighbor=neighbor)\n if address_family:\n break\n # routing_table_version - N/A\n # prefixes - N/A\n # total_entries - N/A\n # memory_usage - N/A\n # path - N/A\n # total_entries - N/A\n # memory_usage - N/A\n # total_memory - N/A\n # inherit_peer_policy - N/A\n # inherit_peer_seq - N/A\n # next_hop_self - N/A\n\n ########################################################################\n # table\n ########################################################################\n\n # Init table first loop\n table_loop1 = False\n\n for vrf_type in ['all', 'vrf']:\n if vrf != 'all':\n vrf_type = 'vrf'\n\n # Set or skip 'all all all' iteration\n if vrf_type == 'all':\n if table_loop1:\n continue\n else:\n\n table_loop1 = True\n\n # instance\n # instance_name\n # vrf\n # vrf_id\n # address_family\n # af_name\n tbl_src = '[instance][(?P<instance>.*)][vrf][(?P<vrf>.*)][address_family][(?P<address_family>.*)]'\n tbl_dest = 'table[instance][(?P<instance>.*)][vrf][(?P<vrf>.*)][address_family][(?P<address_family>.*)]'\n\n table_keys = ['route_distinguisher', 'default_vrf',\\\n 'route_identifier', 'local_as',\\\n 'bgp_table_version', 'prefixes']\n\n for key in table_keys:\n\n # set key values\n src_key = dest_key = key\n\n if key == 'prefixes':\n src_key = 'prefix'\n\n self.add_leaf(cmd=ShowBgpInstanceAllAll,\n src='{tbl_src}[{src_key}]'.format(tbl_src=tbl_src,src_key=src_key),\n dest='{tbl_dest}[{dest_key}]'.format(tbl_dest=tbl_dest,dest_key=dest_key),\n vrf_type=vrf_type, vrf=vrf, instance=instance, address_family=address_family)\n\n # paths - N/A\n\n # ########################################################################\n # # routes_per_peer\n # ########################################################################\n \n # Init routes_per_peer first loop\n rpp_loop1 = False\n\n for vrf_type in ['all', 'vrf']:\n if vrf != 'all':\n vrf_type = 'vrf'\n for af in ['ipv4 unicast', 'ipv6 unicast']:\n if address_family:\n af = address_family\n\n # Set or skip 'all all all' iteration\n if vrf_type == 'all':\n if rpp_loop1:\n continue\n else:\n af = ''\n rpp_loop1 = True\n\n # instance\n # instance_name\n # vrf\n # vrf_name\n # neighbor\n # neighbor_id\n rpp_src = '[instance][(?P<instance>.*)][vrf][(?P<vrf>.*)][neighbor][(' \\\n '?P<neighbor_id>.*)]'\n rpp_dest = 'routes_per_peer[instance][(?P<instance>.*)][vrf][(' \\\n '?P<vrf>.*)][neighbor][(?P<neighbor_id>.*)]'\n\n # remote_as\n self.add_leaf(cmd=ShowBgpInstanceSummary,\n src=rpp_src + '[remote_as]',\n dest=rpp_dest + '[remote_as]',\n vrf_type=vrf_type, address_family=af,\n vrf=vrf, instance=instance)\n\n # address_family\n # af_name\n rpp_af_src = rpp_src + '[address_family][(?P<address_family>.*)]'\n rpp_af_dest = rpp_dest + '[address_family][(?P<address_family>.*)]'\n\n rpp_af_keys = ['route_distinguisher', 'default_vrf', 'msg_rcvd',\n 'msg_sent', 'tbl_ver', 'input_queue',\n 'output_queue', 'up_down', 'state_pfxrcd']\n\n for key in rpp_af_keys:\n # set key values\n src_key = dest_key = key\n\n self.add_leaf(cmd=ShowBgpInstanceSummary,\n src='{rpp_af_src}[{src_key}]'.format(\n rpp_af_src=rpp_af_src, src_key=src_key),\n dest='{rpp_af_dest}[{dest_key}]'.format(\n rpp_af_dest=rpp_af_dest, dest_key=dest_key),\n vrf_type=vrf_type, address_family=af,\n vrf=vrf, instance=instance)\n if address_family:\n break\n\n # Get list of neighbors\n \n # Init list\n rpp_nbrs_loop1 = False\n for vrf_type in ['all', 'vrf']:\n if vrf != 'all':\n vrf_type = 'vrf'\n for af in ['ipv4 unicast', 'ipv6 unicast']:\n if address_family:\n af = address_family\n # Set or skip 'all all all' iteration\n if vrf_type == 'all':\n if rpp_nbrs_loop1:\n continue\n else:\n af = ''\n rpp_nbrs_loop1 = True\n\n self.add_leaf(cmd=ShowBgpInstanceNeighborsDetail,\n src='[instance][(?P<instance>.*)][vrf][(?P<vrf>.*)][neighbor]',\n dest='neighbors',\n vrf_type=vrf_type, address_family=af, vrf=vrf,\n instance=instance, neighbor=neighbor, action=self.get_key)\n self.make()\n\n if hasattr(self, 'neighbors'):\n\n for nbr in sorted(self.neighbors):\n # print('\\nneighbor is {neighbor}'.format(neighbor=neighbor))\n\n # advertised\n self.add_leaf(cmd=ShowBgpInstanceNeighborsAdvertisedRoutes,\n src='[instance][(?P<instance>.*)][vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][advertised]',\n dest='routes_per_peer[instance][(?P<instance>.*)][vrf][(?P<vrf>.*)][neighbor][{neighbor}][address_family][(?P<af>.*)][advertised]'.format(\n neighbor=nbr),\n vrf_type=vrf_type, neighbor=nbr, vrf=vrf, instance=instance,\n address_family=af)\n\n # received\n self.add_leaf(cmd=ShowBgpInstanceNeighborsReceivedRoutes,\n src='[instance][(?P<instance>.*)][vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][received]',\n dest='routes_per_peer[instance][(?P<instance>.*)][vrf][(?P<vrf>.*)][neighbor][{neighbor}][address_family][(?P<af>.*)][received_routes]'.format(\n neighbor=nbr),\n vrf_type=vrf_type, neighbor=nbr,\n address_family=af, vrf=vrf,\n instance=instance,)\n\n # routes\n self.add_leaf(cmd=ShowBgpInstanceNeighborsRoutes,\n src='[instance][(?P<instance>.*)][vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][routes]',\n dest='routes_per_peer[instance][(?P<instance>.*)][vrf][(?P<vrf>.*)][neighbor][{neighbor}][address_family][(?P<af>.*)][routes]'.format(\n neighbor=nbr),\n vrf_type=vrf_type, neighbor=nbr,\n address_family=af, vrf=vrf,\n instance=instance,)\n\n # Delete neighbors for this iteration\n del self.neighbors\n if address_family:\n break\n\n ########################################################################\n # Final Structure\n ########################################################################\n\n # Make final Ops structure\n self.make(final_call=True)\n \n # Delete unnecessary keys\n try:\n del self.list_of_neighbors\n except:\n pass", "id": "1784557", "language": "Python", "matching_score": 5.287278175354004, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/bgp/iosxr/bgp.py" }, { "content": "# import python\nimport warnings\nfrom abc import ABC\n\n# import genie\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import \\\n UnsupportedAttributeWarning, AttributesHelper\n\n# import genie.libs\nfrom genie.libs.conf.rip import Rip\nfrom genie.libs.conf.ospf import Ospf\nfrom genie.libs.conf.isis import Isis\nfrom genie.libs.conf.vrf import VrfSubAttributes\nfrom genie.libs.conf.route_policy import RoutePolicy\nfrom genie.libs.conf.address_family import AddressFamily\n\n# Structure Hierarchy:\n# Bgp\n# +--DeviceAttributes\n# +-- PeerSessionAttributes\n# +-- PeerPolicyAttributes\n# +-- VrfAttributes\n# +-- AddressFamilyAttributes\n# +-- NeighborAttributes\n# +-- AddressFamilyAttributes\n\n\nclass Bgp(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n bgp_id = self.bgp_id or self.asn\n # iosxr: router bgp 100 [instance someword]\n line = 'router bgp {}'.format(bgp_id)\n if (self.instance_name and \\\n self.instance_name != 'default') or \\\n (attributes.value('instance_name') and \\\n attributes.value('instance_name') != 'default'):\n line += ' instance {instance_name}'\n\n # iosxr: router bgp 100 [instance someword](config-bgp)\n with configurations.submode_context(attributes.format(line,\n force=True)):\n if unconfig:\n if (attributes.attributes and \\\n 'instance_name' in attributes.attributes and \\\n isinstance(attributes.attributes['instance_name'], dict) and \\\n None in attributes.attributes['instance_name'].values()) or \\\n attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: router bgp 100 [instance someword] / nsr\n # iosxr: router bgp 100 [instance someword] / nsr disable\n v = attributes.value('nsr')\n if v is not None:\n if v:\n configurations.append_line('nsr')\n else:\n configurations.append_line('nsr disable')\n\n # iosxr: router bgp 100 [instance someword] /\n # vrf someword (config-bgp-vrf)\n for sub, attributes2 in attributes.mapping_values(\n 'vrf_attr', sort=True):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n # iosxr: router bgp 100 [instance someword]\n # / session-group <ps_name>\n for sub, attributes2 in attributes.mapping_values(\n 'peer_session_attr'):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig,\n **kwargs))\n\n # iosxr: router bgp 100 [instance someword]\n # / af-group <pp_name> address-family <af_name>\n for sub, attributes2 in attributes.mapping_values(\n 'peer_policy_attr'):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig,\n **kwargs))\n\n # iosxr: router bgp 100 / [instance someword] /\n # ibgp policy out enforce-modifications\n if attributes.value('ibgp_policy_out_enforce_modifications'):\n configurations.append_line(\n 'ibgp policy out enforce-modifications')\n\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n\n class PeerSessionAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n with configurations.submode_context(\n attributes.format(\n 'session-group {ps_name}',force=True)):\n if unconfig and attributes.iswildcard:\n # Never reached!\n configurations.submode_unconfig()\n\n # iosxr: session-group <peer_session> \\ bfd fast-detect\n if attributes.value('ps_fall_over_bfd'):\n configurations.append_line(\n attributes.format('bfd fast-detect'))\n\n # iosxr: session-group <peer_session> \\\n # capability suppress 4-byte-as\n if attributes.value(\n 'ps_suppress_four_byte_as_capability'):\n configurations.append_line(attributes.format(\n 'capability suppress 4-byte-as'))\n\n # iosxr: session-group <peer_session> \\\n # description PEER-SESSION\n if attributes.value('ps_description'):\n configurations.append_line(\n attributes.format('description {ps_description}'))\n\n # iosxr: session-group <peer_session> \\\n # ignore-connected-check\n if attributes.value('ps_disable_connected_check'):\n configurations.append_line(\n attributes.format('ignore-connected-check'))\n\n # iosxr: session-group <peer_session> \\\n # ebgp-multihop 255\n if attributes.value('ps_ebgp_multihop_max_hop'):\n configurations.append_line(\n attributes.format('ebgp-multihop '\n '{ps_ebgp_multihop_max_hop}'))\n elif attributes.value('ps_ebgp_multihop'):\n configurations.append_line(\n attributes.format('ebgp-multihop'))\n\n # iosxr: session-group <peer_session> \\\n # local-as 111 [no-prepend replace-as dual-as]\n if attributes.value('ps_local_as_as_no'):\n base_s = 'local-as {ps_local_as_as_no}'\n if attributes.value('ps_local_as_no_prepend'):\n base_s += ' no-prepend'\n if attributes.value('ps_local_as_replace_as'):\n base_s += ' replace-as'\n if attributes.value('ps_local_as_dual_as'):\n base_s += ' dual-as'\n configurations.append_line(\n attributes.format(base_s))\n\n # iosxr: session-group <peer_session> \\\n # password <PASSWORD>\n if attributes.value('ps_password_text'):\n configurations.append_line(\n attributes.format('password {ps_password_text}'))\n\n # iosxr: session-group <peer_session> \\ remote-as 500\n if attributes.value('ps_remote_as'):\n configurations.append_line(\n attributes.format('remote-as {ps_remote_as}'))\n\n # iosxr: session-group <peer_session> \\ shutdown\n if attributes.value('ps_shutdown'):\n configurations.append_line(\n attributes.format('shutdown'))\n\n # iosxr: session-group <peer_session> \\\n # timers 111 222\n if attributes.value('ps_keepalive_interval') and \\\n attributes.value('ps_hodltime'):\n configurations.append_line(\n attributes.format('timers bgp '\n '{ps_keepalive_interval} {ps_hodltime}'))\n\n # iosxr: session-group <peer_session> \\\n # transport connection-mode passive\n if attributes.value('ps_transport_connection_mode'):\n configurations.append_line(attributes.format(\n 'session-open-mode '\n '{ps_transport_connection_mode.value}'))\n\n # iosxr: session-group <peer_session> \\\n # update-source loopback0\n if attributes.value('ps_update_source'):\n configurations.append_line(\n attributes.format('update-source '\n '{ps_update_source}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n\n class PeerPolicyAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n if not attributes.value('pp_af_name'):\n return ''\n with configurations.submode_context(\n attributes.format(\n 'af-group {pp_name} address-family {pp_af_name.value}',\n force=True)):\n if unconfig and attributes.iswildcard:\n # Never reached!\n configurations.submode_unconfig()\n\n # iosxr: af-group <pp_name> address-family <pp_af_name> \\\n # allowas-in [9]\n if attributes.value('pp_allowas_in'):\n if attributes.value('pp_allowas_in_as_number'):\n configurations.append_line(\n attributes.format('allowas-in '\n '{pp_allowas_in_as_number}'))\n else:\n configurations.append_line(\n attributes.format('allowas-in'))\n\n # iosxr: af-group <pp_name> address-family <pp_af_name> \\\n # as-override\n if attributes.value('pp_as_override'):\n configurations.append_line(\n attributes.format('as-override'))\n\n # iosxr: af-group <pp_name> address-family <pp_af_name> \\\n # default-originate\n # iosxr: af-group <pp_name> address-family <pp_af_name> \\\n # default-originate route-policy test\n if attributes.value('pp_default_originate'):\n if attributes.value(\n 'pp_default_originate_route_map'):\n configurations.append_line(\n attributes.format('default-originate '\n 'route-policy '\n '{pp_default_originate_route_map}'))\n else:\n configurations.append_line(\n attributes.format('default-originate'))\n\n # iosxr: af-group <pp_name> address-family <pp_af_name> \\\n # route-policy test-map in\n if attributes.value('pp_route_map_name_in'):\n configurations.append_line(\n attributes.format('route-policy '\n '{pp_route_map_name_in} in'))\n\n # iosxr: af-group <pp_name> address-family <pp_af_name> \\\n # route-policy test-map out\n if attributes.value('pp_route_map_name_out'):\n configurations.append_line(\n attributes.format('route-policy '\n '{pp_route_map_name_out} out'))\n\n # iosxr: af-group <pp_name> address-family <pp_af_name> \\\n # mmaximum-prefix <pp_maximum_prefix_max_prefix_no> \n # [<pp_maximum_prefix_threshold> ]\n # [restart <pp_maximum_prefix_restart> | warning-only ]\n if attributes.value('pp_maximum_prefix_max_prefix_no'):\n line = 'maximum-prefix '\\\n '{pp_maximum_prefix_max_prefix_no}'\n if attributes.value('pp_maximum_prefix_threshold'):\n line += ' {pp_maximum_prefix_threshold}'\n if attributes.value('pp_maximum_prefix_restart'):\n line += ' restart {pp_maximum_prefix_restart}'\n elif attributes.value(\n 'pp_maximum_prefix_warning_only'):\n line += ' warning-only'\n configurations.append_line(attributes.format(line))\n\n # iosxr: af-group <pp_name> address-family <pp_af_name> \\\n # next-hop-self\n if attributes.value('pp_next_hop_self'):\n configurations.append_line(\n attributes.format(\n 'next-hop-self'))\n\n # iosxr: af-group <pp_name> address-family <pp_af_name> \\\n # route-reflector-client\n if attributes.value('pp_route_reflector_client'):\n configurations.append_line(\n attributes.format(\n 'route-reflector-client'))\n\n # iosxr: af-group <pp_name> address-family <pp_af_name> \\\n # send-community-ebgp\n if attributes.value('pp_send_community'):\n if attributes.value('pp_send_community').value ==\\\n 'standard':\n configurations.append_line(attributes.format(\n 'send-community-ebgp'))\n\n # iosxr: template peer-session <peer_session> \\\n # send-extended-community-ebgp\n if attributes.value('pp_send_community').value ==\\\n 'extended':\n configurations.append_line(attributes.format(\n 'send-extended-community-ebgp'))\n\n # iosxr: af-group <pp_name> address-family <pp_af_name> \\\n # send-community-ebgp\n # send-extended-community-ebgp\n if attributes.value('pp_send_community').value == 'both':\n configurations.append_line(attributes.format(\n 'send-community-ebgp'))\n configurations.append_line(attributes.format(\n 'send-extended-community-ebgp'))\n\n # iosxr: af-group <pp_name> address-family <pp_af_name> \\\n # soft-reconfiguration inbound\n if attributes.value('pp_soft_reconfiguration'):\n configurations.append_line(\n attributes.format('soft-reconfiguration inbound'))\n\n # iosxr: af-group <pp_name> address-family <pp_af_name> \\\n # site-of-origin 100:100\n if attributes.value('pp_soo') and \\\n self.vrf_id != 'default':\n configurations.append_line(\n attributes.format('site-of-origin {pp_soo}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n\n class VrfAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: router bgp 100 / instance someword (config-bgp)\n # iosxr: router bgp 100 / [instance someword] /\n # vrf someword (config-bgp-vrf)\n with configurations.submode_context(\n None if self.vrf_name == 'default' else \\\n attributes.format('vrf {vrf_name}', force=True)):\n if self.vrf_name != 'default' and unconfig \\\n and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: router bgp 100 / [instance someword] /\n # [vrf someword] / bfd minimum-interval 3\n if attributes.value('bfd_minimum_interval'):\n configurations.append_line(attributes.format(\n 'bfd minimum-interval {bfd_minimum_interval}'))\n\n # iosxr: router bgp 100 / [instance someword] /\n # [vrf someword] / bfd multiplier 2\n if attributes.value('bfd_multiplier'):\n configurations.append_line(attributes.format(\n 'bfd multiplier {bfd_multiplier}'))\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] /bgp bestpath cost-community ignore |\n # bgp bestpath compare-routerid |\n # bgp bestpath med always |\n # bgp bestpath med missing-as-worst\n if attributes.value('always_compare_med'):\n configurations.append_line(attributes.format(\n 'bgp bestpath med always'))\n if attributes.value('bestpath_compare_routerid'):\n configurations.append_line(attributes.format(\n 'bgp bestpath compare-routerid'))\n if attributes.value('bestpath_cost_community_ignore'):\n configurations.append_line(attributes.format(\n 'bgp bestpath cost-community ignore'))\n if attributes.value('bestpath_med_missing_at_worst'):\n configurations.append_line(attributes.format(\n 'bgp bestpath med missing-as-worst'))\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] / bgp cluster-id <cluster_id>\n if attributes.value('cluster_id') and \\\n self.vrf_name == 'default':\n configurations.append_line(attributes.format(\n 'bgp cluster-id {cluster_id}'))\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] / bgp confederation identifier \n # <confederation_identifier>\n if attributes.value('confederation_identifier') and \\\n self.vrf_name == 'default':\n configurations.append_line(\n attributes.format('bgp confederation identifier '\n '{confederation_identifier}'))\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] / bgp confederation peers \n # <confederation_peers_as>\n if attributes.value('confederation_peers_as') and \\\n self.vrf_name == 'default':\n configurations.append_line(\n attributes.format('bgp confederation peers '\n '{confederation_peers_as}'))\n\n # iosxr: router bgp 100 [instance someword] /\n # vrf someword / rd 100.200:300 | rd auto\n if attributes.value('rd') and self.vrf_name != 'default':\n configurations.append_line(attributes.format('rd {rd}'))\n\n # iosxr: router bgp 100 [instance someword] /\n # bgp graceful-restart\n if self.vrf_name == 'default':\n if attributes.value('graceful_restart'):\n configurations.append_line('bgp graceful-restart')\n\n # iosxr: router bgp 100 [instance someword] /\n # bgp graceful-restart restart-time 1\n if attributes.value('graceful_restart_restart_time'):\n configurations.append_line(\n attributes.format('bgp graceful-restart '\n 'restart-time '\n '{graceful_restart_restart_time}'))\n\n # iosxr: router bgp 100 [instance someword] /\n # bgp graceful-restart stalepath-time 1\n if attributes.value('graceful_restart_stalepath_time'):\n configurations.append_line(\n attributes.format('bgp graceful-restart '\n 'stalepath-time '\n '{graceful_restart_stalepath_time}'))\n\n # iosxr: router bgp 100 [vrf someword] /\n # bgp log neighbor changes disable\n if attributes.value('log_neighbor_changes') is False:\n configurations.append_line(\n attributes.format(\n 'bgp log neighbor changes disable'))\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] / bgp router-id 1.2.3.4\n #TODO: what about router id as loopback interface object ?\n configurations.append_line(attributes.format(\n 'bgp router-id {router_id}'))\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] /\n # timers bgp <keepalive-interval> <holdtime>\n if attributes.value('keepalive_interval') and \\\n attributes.value('holdtime'):\n configurations.append_line(\n attributes.format('timers bgp '\n '{keepalive_interval} {holdtime}'))\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] /\n # bgp enforce-first-as disable\n if attributes.value('enforce_first_as') is False:\n configurations.append_line(\n attributes.format('bgp enforce-first-as disable'))\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] / \n # bgp fast-external-fallover disable\n if attributes.value('fast_external_fallover') is False:\n configurations.append_line(\n attributes.format(\n 'bgp fast-external-fallover disable'))\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] / address-family ... (config-bgp-vrf-af)\n for sub, attributes2 in attributes.mapping_values(\n 'address_family_attr', sort=True):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] / neighbor <ipv4|ipv6> (config-bgp-vrf-nbr)\n for sub, attributes2 in attributes.mapping_values(\n 'neighbor_attr'):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] / nexthop mpls forwarding ibgp\n if attributes.value('nexthop_mpls_forwarding_ibgp'):\n configurations.append_line(\n 'nexthop mpls forwarding ibgp')\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n class AddressFamilyAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] / address-family ... (config-bgp-vrf-af)\n with configurations.submode_context(attributes.format(\n 'address-family {address_family.value}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: address-family ipv4 unicast/\n # bgp dampening\n # bgp dampening 25 |\n # bgp dampening 25 1000 1500 255 |\n # bgp dampening route_map_name\n if attributes.value('af_dampening'):\n if attributes.value(\n 'af_dampening_half_life_time') and \\\n attributes.value(\n 'af_dampening_reuse_time') and \\\n attributes.value(\n 'af_dampening_suppress_time') and \\\n attributes.value(\n 'af_dampening_max_suppress_time'):\n configurations.append_line(\n attributes.format('bgp dampening '\n '{af_dampening_half_life_time} '\n '{af_dampening_resuse_time} '\n '{af_dampening_suppress_time} '\n '{af_dampening_max_suppress_time}'))\n elif attributes.value(\n 'af_dampening_half_life_time'):\n configurations.append_line(attributes.format(\n 'bgp dampening '\n '{af_dampening_half_life_time}'))\n elif attributes.value(\n 'af_dampening_route_map'):\n configurations.append_line(attributes.format(\n 'bgp dampening '\n 'route-policy {af_dampening_route_map}'))\n else:\n configurations.append_line('bgp dampening')\n\n # iosxr: address-family ipv4 unicast/\n # nexthop route-map <af_nexthop_route_map>\n if attributes.value('af_nexthop_route_map') and \\\n self.vrf_name == 'default':\n configurations.append_line(\n attributes.format(\n 'nexthop route-policy '\n '{af_nexthop_route_map}'))\n\n # iosxr: address-family ipv4 unicast/\n # nexthop trigger-delay critical\n # <af_nexthop_trigger_delay_critical> \n # nexthop trigger-delay non-critical\n # <af_nexthop_trigger_delay_non_critical>\n if attributes.value('af_nexthop_trigger_enable') and \\\n self.vrf_name == 'default':\n if attributes.value(\n 'af_nexthop_trigger_delay_critical'):\n configurations.append_line(attributes.format(\n 'nexthop trigger-delay critical '\n '{af_nexthop_trigger_delay_critical}'))\n elif attributes.value(\n 'af_nexthop_trigger_delay_non_critical'):\n configurations.append_line(attributes.format(\n 'nexthop trigger-delay non-critical '\n '{af_nexthop_trigger_delay_non_critical}'))\n\n # iosxr: address-family ipv4 unicast/\n # bgp client-to-client reflection disable\n if attributes.value('af_client_to_client_reflection') \\\n is False:\n configurations.append_line(\n attributes.format(\n 'bgp client-to-client reflection disable'))\n\n # iosxr: address-family ipv4 unicast/\n # distance <af_distance_extern_as> \n # <af_distance_internal_as> <af_distance_local>\n if attributes.value('af_distance_extern_as') and \\\n attributes.value('af_distance_internal_as') and \\\n attributes.value('af_distance_local') :\n configurations.append_line(\n attributes.format(\n 'distance bgp {af_distance_extern_as} '\n '{af_distance_internal_as} '\n '{af_distance_local}'))\n\n # iosxr: address-family ipv4 unicast/ \n # maximum-paths ebgp <af_maximum_paths_ebgp>\n if attributes.value('af_maximum_paths_ebgp') or \\\n attributes.value('maximum_paths_ebgp'):\n configurations.append_line(\n 'maximum-paths ebgp {}'.format(\n self.af_maximum_paths_ebgp or\n self.maximum_paths_ebgp))\n\n # iosxr: address-family ipv4 unicast/\n # maximum-paths ibgp <af_maximum_paths_ibgp>\n if attributes.value('af_maximum_paths_ibgp') or \\\n attributes.value('maximum_paths_ibgp'):\n configurations.append_line(\n 'maximum-paths ibgp {}'.format(\n self.af_maximum_paths_ibgp or\n self.maximum_paths_ibgp))\n\n # iosxr: address-family ipv4 unicast/\n # maximum-paths eibgp <af_maximum_paths_eibgp>\n if attributes.value('af_maximum_paths_eibgp'):\n configurations.append_line(\n attributes.format('maximum-paths eibgp '\n '{af_maximum_paths_eibgp}'))\n\n # iosxr: address-family ipv4 unicast/\n # aggregate-address <af_aggregate_address_ipv4_address>\n # /<af_aggregate_address_ipv4_mask>\n # [as-set] | summary-only\n v = attributes.value(\n 'af_aggregate_address_ipv4_address')\n k = attributes.value('af_aggregate_address_ipv4_mask')\n if v and k:\n line = 'aggregate-address '\\\n '{af_aggregate_address_ipv4_address}/'\\\n '{af_aggregate_address_ipv4_mask}'\n if attributes.value('af_aggregate_address_as_set'):\n line += ' as-set'\n elif attributes.value(\n 'af_aggregate_address_summary_only'):\n line += ' summary-only'\n configurations.append_line(attributes.format(line))\n\n # iosxr: address-family ipv4 unicast/\n # network <af_network_number>/<af_network_mask>\n # [route-policy <af_network_route_map>]\n if attributes.value('af_network_number') and \\\n attributes.value('af_network_mask'):\n line = 'network {af_network_number}/'\\\n '{af_network_mask}'\n if attributes.value('af_network_route_map'):\n line += ' route-policy {af_network_route_map}'\n configurations.append_line(attributes.format(line))\n\n if attributes.value('redistributes'):\n # iosxr: router bgp 100 [instance someword] / \n # address-family ipv4|ipv6 unicast / \n # redistribute isis|ospf someword | rip\n # [metric <0-4294967295>] [route-policy <rtepol>]\n for redistribute, redistribute_attributes in \\\n attributes.sequence_values('redistributes'):\n assert redistribute_attributes.iswildcard\n cfg = 'redistribute'\n if isinstance(redistribute.protocol, str):\n # connected, subscriber\n cfg += redistribute_attributes.format(\n ' {protocol}')\n elif isinstance(redistribute.protocol, Ospf):\n cfg += redistribute_attributes.format(\n ' ospf {protocol.pid}')\n elif isinstance(redistribute.protocol, Isis):\n cfg += redistribute_attributes.format(\n ' isis {protocol.pid}')\n elif isinstance(redistribute.protocol, Rip):\n cfg += redistribute_attributes.format(' rip')\n else:\n raise ValueError(redistribute.protocol)\n cfg += redistribute_attributes.format(\n ' metric {metric}')\n cfg += redistribute_attributes.format(\n ' route-policy {route_policy.name}')\n configurations.append_line(cfg)\n else:\n # iosxr: address-family ipv4 unicast/\n # redistribute isis <af_redist_isis>\n # metric <af_redist_isis_metric> |\n # route-policy <af_redist_isis_route_policy>\n if attributes.value('af_redist_isis') and \\\n self.vrf_name == 'default':\n line = 'redistribute isis '\\\n '{af_redist_isis}'\n if attributes.value('af_redist_isis_metric'):\n line += ' metric {af_redist_isis_metric}'\n elif attributes.value(\n 'af_redist_isis_route_policy'):\n line += ' route-policy '\\\n '{af_redist_isis_route_policy}'\n configurations.append_line(attributes.format(line))\n\n\n # iosxr: address-family ipv4 unicast/\n # redistribute ospf <af_redist_ospf>\n # metric <af_redist_ospf_metric> |\n # route-policy <af_redist_ospf_route_policy>\n if attributes.value('af_redist_ospf'):\n line = 'redistribute ospf '\\\n '{af_redist_ospf}'\n if attributes.value('af_redist_ospf_metric'):\n line += ' metric {af_redist_ospf_metric}'\n elif attributes.value(\n 'af_redist_ospf_route_policy'):\n line += ' route-policy '\\\n '{af_redist_ospf_route_policy}'\n configurations.append_line(attributes.format(line))\n\n # iosxr: address-family ipv4 unicast/\n # redistribute rip\n # metric <af_redist_rip_metric> |\n # route-policy <af_redist_rip_route_policy>\n if attributes.value('af_redist_rip'):\n line = 'redistribute rip'\n if attributes.value('af_redist_rip_metric'):\n line += ' metric {af_redist_rip_metric}'\n elif attributes.value(\n 'af_redist_rip_route_policy'):\n line += ' route-policy '\\\n '{af_redist_rip_route_policy}'\n configurations.append_line(attributes.format(line))\n\n # iosxr: address-family ipv4 unicast/\n # redistribute static\n # metric <af_redist_static_metric> |\n # route-policy <af_redist_static_route_policy>\n if attributes.value('af_redist_static'):\n line = 'redistribute static'\n if attributes.value('af_redist_static_metric'):\n line += ' metric {af_redist_static_metric}'\n elif attributes.value(\n 'af_redist_static_route_policy'):\n line += ' route-policy '\\\n '{af_redist_static_route_policy}'\n configurations.append_line(attributes.format(line))\n\n # iosxr: address-family ipv4 unicast/\n # redistribute connected\n # metric <af_redist_connected_metric> |\n # route-policy <af_redist_static_route_policy>\n if attributes.value('af_redist_connected'):\n line = 'redistribute connected'\n if attributes.value('af_redist_connected_metric'):\n line += ' metric {af_redist_connected_metric}'\n elif attributes.value(\n 'af_redist_connected_route_policy'):\n line += ' route-policy '\\\n '{af_redist_connected_route_policy}'\n configurations.append_line(attributes.format(line))\n\n # iosxr: address-family ipv6 unicast/\n # aggregate-address <af_v6_aggregate_address_ipv6_address>\n # [as-set] | summary-only\n if attributes.value(\n 'af_v6_aggregate_address_ipv6_address'):\n line = 'aggregate-address '\\\n '{af_v6_aggregate_address_ipv6_address}'\n if attributes.value(\n 'af_v6_aggregate_address_as_set'):\n line += ' as-set'\n elif attributes.value(\n 'af_v6_aggregate_address_summary_only'):\n line += ' summary-only'\n configurations.append_line(attributes.format(line))\n\n # iosxr: address-family ipv6 unicast/\n # network <af_v6_network_number> [route-policy \n # <af_v6_network_route_map> ] +\n if attributes.value('af_v6_network_number'):\n if attributes.value('af_v6_network_route_map'):\n configurations.append_line(attributes.format(\n 'network {af_v6_network_number} '\n 'route-policy {af_v6_network_route_map}'))\n else:\n configurations.append_line(attributes.format(\n 'network {af_v6_network_number}'))\n\n # iosxr: address-family ipv4 unicast/\n # allocate-label all\n if attributes.value('af_v6_allocate_label_all'):\n configurations.append_line(attributes.format(\n 'allocate-label all'))\n else:\n if isinstance(self.allocate_label, RoutePolicy):\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] / address-family ipv4|ipv6 unicast /\n # allocate-label all |\n # allocate-label route-policy <rtepol>\n configurations.append_line(attributes.format(\n 'allocate-label route-policy '\n '{allocate_label.name}'))\n else:\n configurations.append_line(attributes.format(\n 'allocate-label {allocate_label}'))\n\n # iosxr: address-family vpnv4 unicast/\n # retain route-target all | \n # retain route-target route-policy <rtepol>\n if attributes.value('af_retain_rt_all') or \\\n attributes.value('retain_route_target'):\n configurations.append_line(attributes.format(\n 'retain route-target all'))\n else:\n configurations.append_line(attributes.format(\n 'retain route-target route-policy '\n '{retain_route_target}'))\n\n # iosxr: address-family ipv4 unicast/\n # label mode per-vrf\n if attributes.value('af_label_allocation_mode') or \\\n attributes.value('label_mode'):\n configurations.append_line(attributes.format(\n 'label mode per-vrf'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n class NeighborAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] / neighbor <ipv4|ipv6> (config-bgp-vrf-nbr)\n with configurations.submode_context(attributes.format(\n 'neighbor {neighbor}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] / neighbor <ipv4|ipv6> /\n # graceful-restart |\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] / neighbor <ipv4|ipv6> /\n # graceful-restart disable\n v = attributes.value('graceful_restart',\n inherited=False)\n if v is not None:\n if v:\n configurations.append_line(\n 'graceful-restart')\n else:\n configurations.append_line(\n 'graceful-restart disable')\n\n # handle key nbr_fall_over_bfd\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] / neighbor <ipv4|ipv6> \n # bfd fast-detect\n if attributes.value('nbr_fall_over_bfd'):\n configurations.append_line('bfd fast-detect')\n elif attributes.value('bfd_fast_detect'):\n v = attributes.value('bfd_fast_detect')\n if v:\n if v is True:\n configurations.append_line(\n 'bfd fast-detect')\n else:\n configurations.append_line(\n 'bfd fast-detect {}'.format(v))\n\n elif attributes.value('nbr_fall_over_bfd') is False or \\\n attributes.value('bfd_fast_detect') is False:\n configurations.append_line(\n 'bfd fast-detect disable')\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] / neighbor <ipv4|ipv6> /\n # bfd minimum-interval 3\n configurations.append_line(attributes.format(\n 'bfd minimum-interval {bfd_minimum_interval}',\n inherited=False))\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] /\n # neighbor <ipv4|ipv6> / bfd multiplier 2\n configurations.append_line(attributes.format(\n 'bfd multiplier {bfd_multiplier}', inherited=False))\n\n # iosxr: router bgp 100 [instance someword] / [vrf someword] / neighbor <ipv4|ipv6> / remote-as 1\n # iosxr: router bgp 100 [instance someword] / [vrf someword] / neighbor <ipv4|ipv6> / remote-as 100.200\n # iosxr: router bgp 100 [instance someword] / [vrf someword] / neighbor <ipv4|ipv6> / remote-as 65536\n if not self.bgp_id:\n configurations.append_line(attributes.format(\n 'remote-as {asn}'))\n\n # handle key nbr_remote_as\n # iosxr: neighbor <neighbor_id> / remote-as\n # <nbr_remote_as>\n if attributes.value('nbr_remote_as'):\n configurations.append_line(\n attributes.format('remote-as {nbr_remote_as}'))\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] / neighbor <ipv4|ipv6> /\n # update-source Bundle-Ether1 \n if attributes.value('update_source') or \\\n attributes.value('nbr_update_source'):\n\n if hasattr(attributes.value('update_source'), 'name'):\n val = attributes.value('update_source').name\n else:\n val = self.nbr_update_source\n configurations.append_line(\n 'update-source {}'.format(val))\n\n # iosxr: neighbor <neighbor_id> \\\n # capability suppress 4-byte-as\n if attributes.value(\n 'nbr_suppress_four_byte_as_capability'):\n configurations.append_line(\n attributes.format(\n 'capability suppress 4-byte-as'))\n\n # iosxr: neighbor <neighbor_id> \\\n # description <nbr_description>\n if attributes.value(\n 'nbr_description'):\n configurations.append_line(\n attributes.format(\n 'description {nbr_description}'))\n\n # iosxr: neighbor <neighbor_id> \\\n # ignore-connected-check\n if attributes.value(\n 'nbr_disable_connected_check'):\n configurations.append_line(\n attributes.format(\n 'ignore-connected-check'))\n\n # iosxr: neighbor <neighbor_id> \\\n # ebgp-multihop <nbr_ebgp_multihop_max_hop>\n # [ <nbr_ebgp_multihop_max_hop> ]\n if attributes.value('nbr_ebgp_multihop'):\n if attributes.value('nbr_ebgp_multihop_max_hop'):\n configurations.append_line(\n attributes.format(\n 'ebgp-multihop '\n '{nbr_ebgp_multihop_max_hop}'))\n else:\n configurations.append_line(\n 'ebgp-multihop')\n elif attributes.value('ebgp_multihop_max_hop_count') \\\n is not None \\\n or attributes.value('ebgp_multihop_mpls'):\n cfg = 'ebgp-multihop'\n cfg += attributes.format(\n ' {ebgp_multihop_max_hop_count}')\n if attributes.value('ebgp_multihop_mpls') is True:\n cfg += ' mpls'\n configurations.append_line(cfg)\n\n # iosxr: neighbor <neighbor_id> \\ inherit peer-session\n # <nbr_inherit_peer_session>\n if attributes.value(\n 'nbr_inherit_peer_session'):\n configurations.append_line(\n attributes.format(\n 'use session-group '\n '{nbr_inherit_peer_session}'))\n\n # iosxr: neighbor <neighbor_id> \\\n # local-as 111 [no-prepend replace-as dual-as]\n if attributes.value('nbr_local_as_as_no'):\n base_s = 'local-as {nbr_local_as_as_no}'\n if attributes.value('nbr_local_as_no_prepend'):\n base_s += ' no-prepend'\n if attributes.value('nbr_local_as_replace_as'):\n base_s += ' replace-as'\n if attributes.value('nbr_local_as_dual_as'):\n base_s += ' dual-as'\n configurations.append_line(\n attributes.format(base_s))\n\n # iosxr: neighbor <neighbor_id> / \n # address-family <nbr_remove_private_as_af_name>\n # remove-private-as\n if attributes.value('nbr_remove_private_as') and \\\n attributes.value('nbr_remove_private_as_af_name'):\n configurations.append_line(\n attributes.format(\n 'address-family '\n '{nbr_remove_private_as_af_name.value} '\n 'remove-private-AS'))\n\n # iosxr: neighbor <neighbor_id> / shutdown\n if attributes.value('nbr_shutdown'):\n configurations.append_line(\n attributes.format('shutdown'))\n\n # iosxr: neighbor <neighbor_id> / timers\n # <nbr_keepalive_interval> <nbr_holdtime>\n if attributes.value('nbr_keepalive_interval') and \\\n attributes.value('nbr_holdtime'):\n configurations.append_line(\n attributes.format('timers '\n '{nbr_keepalive_interval} {nbr_holdtime}'))\n\n # iosxr: neighbor <neighbor_id> / password\n # <nbr_password_text>\n if attributes.value('nbr_password_text'):\n configurations.append_line(attributes.format(\n 'password {nbr_password_text}'))\n\n # iosxr: neighbor <neighbor_id> \\\n # transport connection-mode\n # {nbr_transport_connection_mode}\n if attributes.value('nbr_transport_connection_mode'):\n configurations.append_line(attributes.format(\n 'session-open-mode '\n '{nbr_transport_connection_mode.value}'))\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] / neighbor <ipv4|ipv6> /\n # address-family ... (config-bgp-vrf-nbr-af)\n for sub, attributes2 in attributes.mapping_values(\n 'address_family_attr', sort=True):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n class AddressFamilyAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: router bgp 100 [instance someword] /\n # [vrf someword] / neighbor <ipv4|ipv6> /\n # address-family ... (config-bgp-vrf-nbr-af)\n with configurations.submode_context(\n attributes.format(\n 'address-family {address_family.value}',\n force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: router bgp 100 / [instance someword] / vrf someword / neighbor <ipv4|ipv6> / address-family l2vpn evpn / encapsulation-type mpls\n # iosxr: router bgp 100 / [instance someword] / vrf someword / neighbor <ipv4|ipv6> / address-family l2vpn evpn / encapsulation-type vxlan\n # iosxr: router bgp 100 / [instance someword] / vrf someword / neighbor <ipv4|ipv6> / address-family vpnv4|vpnv6 unicast / encapsulation-type mpls\n # iosxr: router bgp 100 / [instance someword] / vrf someword / neighbor <ipv4|ipv6> / address-family vpnv4|vpnv6 unicast / encapsulation-type vxlan\n v = attributes.value('encapsulation_type')\n if v is not None:\n cfg = 'encapsulation-type {}'.format(v)\n if self.address_family in [\n AddressFamily.l2vpn_evpn,\n AddressFamily.vpnv4_unicast,\n AddressFamily.vpnv6_unicast,\n ]:\n configurations.append_line(cfg)\n elif not self.isinherited('encapsulation_type'):\n warnings.warn('{} {}'.\\\n format(self.address_family, cfg),\\\n UnsupportedAttributeWarning)\n\n # iosxr: router bgp 100 [instance someword] /\n # vrf someword / neighbor <ipv4|ipv6> /\n # address-family vpnv4|vpnv6 unicast /\n # import [stitching-rt]\n # [re-originate [stitching-rt]]\n if attributes.value('import_stitching_rt') \\\n or attributes.value('import_stitching_rt_re_originate') \\\n or attributes.value('import_re_originate') \\\n or attributes.value('import_re_originate_stitching_rt'):\n cfg = 'import'\n if attributes.value('import_stitching_rt', force=True) \\\n or attributes.value('import_stitching_rt_re_originate', force=True):\n cfg += ' stitching-rt'\n if attributes.value('import_stitching_rt_re_originate', force=True):\n cfg += ' re-originate'\n if attributes.value('import_re_originate', force=True) \\\n or attributes.value('import_re_originate_stitching_rt', force=True):\n cfg += ' re-originate'\n if attributes.value('import_re_originate_stitching_rt', force=True):\n cfg += ' stitching-rt'\n if self.address_family in [\n AddressFamily.l2vpn_evpn,\n AddressFamily.vpnv4_unicast,\n AddressFamily.vpnv6_unicast,\n ]:\n configurations.append_line(cfg)\n elif not (\n self.isinherited('import_stitching_rt')\n and self.isinherited('import_re_originate')\n and self.isinherited(\n 'import_re_originate_stitching_rt')):\n warnings.warn('{} {}'.\\\n format(self.address_family, cfg),\\\n UnsupportedAttributeWarning)\n\n # iosxr: router bgp 100 [instance someword] /\n # vrf someword / neighbor <ipv4|ipv6> /\n # address-family l2vpn evpn /\n # advertise l2vpn evpn re-originated |\n # re-originated regular-rt |\n # re-originated stitching-rt\n v = attributes.value(\n 'advertise_l2vpn_evpn_re_originated')\n if v:\n if v is True:\n cfg = 'advertise l2vpn evpn re-originated'\n else:\n cfg = 'advertise l2vpn evpn re-originated {}'.format(v)\n if self.address_family in [\n AddressFamily.l2vpn_evpn,\n ]:\n configurations.append_line(cfg)\n elif not self.isinherited(\n 'advertise_l2vpn_evpn_re_originated'):\n warnings.warn('{} {}'.\\\n format(self.address_family, cfg),\\\n UnsupportedAttributeWarning)\n\n # iosxr: router bgp 100 [instance someword] / vrf someword /\n # neighbor <ipv4|ipv6> / address-family l2vpn evpn | vpnv4 unicast /\n # advertise vpnv4 unicast\n if attributes.value('advertise_vpnv4_unicast'):\n cfg = 'advertise vpnv4 unicast'\n # TODO\n configurations.append_line(cfg)\n\n # iosxr: address-family <nbr_af_name> \\ allowas-in\n # [ <allowas-in-cnt> ]\n if attributes.value('nbr_af_allowas_in'):\n if attributes.value(\n 'nbr_af_allowas_in_as_number'):\n configurations.append_line(\n attributes.format('allowas-in '\n '{nbr_af_allowas_in_as_number}'))\n else:\n configurations.append_line(\n attributes.format('allowas-in'))\n\n # iosxr: address-family <nbr_af_name> \\\n # inherit peer-policy <nbr_af_inherit_peer_policy>\n # <nbr_af_inherit_peer_seq>\n if attributes.value('nbr_af_inherit_peer_policy'):\n configurations.append_line(\n attributes.format('use af-group '\n '{nbr_af_inherit_peer_policy}'))\n\n # iosxr: address-family <nbr_af_name> \\\n # maximum-prefix\n # <nbr_af_maximum_prefix_max_prefix_no>\n # [<nbr_af_maximum_prefix_threshold>] restart\n # [restart <nbr_af_maximum_prefix_restart> |\n # warning-only ]\n if attributes.value(\n 'nbr_af_maximum_prefix_max_prefix_no'):\n line = 'maximum-prefix '\\\n '{nbr_af_maximum_prefix_max_prefix_no}'\n if attributes.value(\n 'nbr_af_maximum_prefix_threshold'):\n line += ' {nbr_af_maximum_prefix_threshold}'\n if attributes.value(\n 'nbr_af_maximum_prefix_restart'):\n line += ' restart '\\\n '{nbr_af_maximum_prefix_restart}'\n elif attributes.value(\n 'nbr_af_maximum_prefix_warning_only'):\n line += ' warning-only'\n configurations.append_line(\n attributes.format(line))\n\n # iosxr: address-family <nbr_af_name> \\\n # route-policy <nbr_af_route_map_name_in> in\n if attributes.value('nbr_af_route_map_name_in'):\n configurations.append_line(\n attributes.format('route-policy '\n '{nbr_af_route_map_name_in} in'))\n elif hasattr(self.route_policy_in, 'name'):\n configurations.append_line(\n attributes.format('route-policy '\n '{route_policy_in.name} in'))\n\n # iosxr: address-family <nbr_af_name> \\\n # route-policy <nbr_af_route_map_name_out> out\n if attributes.value('nbr_af_route_map_name_out'):\n configurations.append_line(\n attributes.format('route-policy \\\n {nbr_af_route_map_name_out} out'))\n elif hasattr(self.route_policy_out, 'name'):\n configurations.append_line(\n attributes.format('route-policy '\n '{route_policy_out.name} in'))\n\n # iosxr: address-family <nbr_af_name> \\\n # route-reflector-client\n if attributes.value(\n 'nbr_af_route_reflector_client') or \\\n attributes.value('route_reflector_client'):\n configurations.append_line(\n attributes.format(\n 'route-reflector-client'))\n elif attributes.value(\n 'route_reflector_client_inheritance_disable',\n force=True):\n configurations.append_line(\n attributes.format(\n 'route-reflector-client '\n 'inheritance-disable'))\n\n # iosxr: address-family <nbr_af_name> \\\n # send-community-ebgp\n nbr_v1 = attributes.value('nbr_af_send_community')\n if nbr_v1:\n if nbr_v1.value == 'standard':\n configurations.append_line(\n attributes.format(\n 'send-community-ebgp'))\n\n # iosxr: address-family <nbr_af_name> \\\n # send-extended-community-ebgp\n if nbr_v1.value == 'extended':\n configurations.append_line(\n attributes.format(\n 'send-extended-community-ebgp'))\n\n # iosxr: address-family <nbr_af_name> \\\n # send-community-ebgp\n # send-extended-community-ebgp\n if nbr_v1.value == 'both':\n configurations.append_line(\n attributes.format(\n 'send-community-ebgp'))\n configurations.append_line(\n attributes.format(\n 'send-extended-community-ebgp'))\n elif attributes.value('send_community_ebgp'):\n configurations.append_line(\n attributes.format(\n 'send-community-ebgp'))\n elif attributes.value('send_extended_community_ebgp'):\n configurations.append_line(\n attributes.format(\n 'send-extended-community-ebgp'))\n elif attributes.value(\n 'send_extended_community_ebgp_inheritance_disable',\n force=True):\n configurations.append_line(\n attributes.format(\n 'send-extended-community-ebgp '\n 'inheritance-disable'))\n\n\n # iosxr: address-family <nbr_af_name> \\\n # soft-reconfiguration inbound\n if attributes.value('nbr_af_soft_reconfiguration'):\n configurations.append_line(\n attributes.format('soft-reconfiguration '\n 'inbound'))\n\n # iosxr: address-family <nbr_af_name> \\\n # next-hop-self\n if attributes.value('nbr_af_next_hop_self') or \\\n attributes.value('nexthop_self'):\n configurations.append_line(\n attributes.format('next-hop-self'))\n elif attributes.value(\n 'nexthop_self_inheritance_disable'):\n configurations.append_line(\n attributes.format('next-hop-self '\n 'inheritance-disable'))\n\n # iosxr: address-family <nbr_af_name> \\\n # as-override\n if attributes.value('nbr_af_as_override') or \\\n attributes.value('as_override'):\n\n configurations.append_line(\n attributes.format('as-override'))\n\n if attributes.value(\n 'as_override_inheritance', force=True) is False: \n configurations.append_line(\n attributes.format(\n 'as-override inheritance-disable'))\n\n # iosxr: address-family <nbr_af_name> \\\n # default-originate\n # iosxr: address-family <nbr_af_name> \\\n # default-originate route-policy test\n if attributes.value('nbr_af_default_originate'):\n if attributes.value(\n 'nbr_af_default_originate_route_map'):\n configurations.append_line(\n attributes.format('default-originate '\n 'route-policy '\n '{nbr_af_default_originate_route_map}'))\n else:\n configurations.append_line(\n attributes.format('default-originate'))\n\n # iosxr: address-family <nbr_af_name> \\\n # site-of-origin 100:100\n if attributes.value('nbr_af_soo') and \\\n self.vrf_id != 'default':\n configurations.append_line(\n attributes.format(\n 'site-of-origin {nbr_af_soo}'))\n\n # signalling ldp disable\n if attributes.value('nbr_af_suppress_signaling_protocol_ldp'):\n configurations.append_line(\n attributes.format('signalling ldp disable'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True,\n attributes=None, **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n", "id": "11118847", "language": "Python", "matching_score": 9.680337905883789, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/bgp/iosxr/bgp.py" }, { "content": "\n# import python\nfrom abc import ABC\nfrom netaddr import IPNetwork\n\n# import genie\nfrom genie.conf.base.attributes import AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\nfrom genie.libs.conf.ospf import Ospf\nfrom genie.libs.conf.isis import Isis\nfrom genie.libs.conf.rip import Rip\n\n\n# Structure Hierarchy:\n# Bgp\n# +--DeviceAttributes\n# +-- PeerSessionAttributes\n# +-- PeerPolicyAttributes\n# +-- VrfAttributes\n# +-- AddressFamilyAttributes\n# +-- NeighborAttributes\n# +-- AddressFamilyAttributes\n\n\nclass Bgp(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # nxos: feature bgp\n if attributes.value('enabled'):\n if unconfig is False:\n configurations.append_line(\n attributes.format('feature bgp'))\n\n # Make sure that only enabled was provided in attributes\n # If wildcard, then delete everything\n elif unconfig is True and\\\n attributes.attributes == {'enabled': {True: None}} or \\\n attributes.iswildcard:\n configurations.append_line('no feature bgp', raw=True)\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n # nxos: router bgp 100\n with configurations.submode_context(attributes.format(\n 'router bgp {bgp_id}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # nxos: router bgp 100 / shutdown\n if attributes.value('protocol_shutdown'):\n configurations.append_line(\n attributes.format('shutdown'))\n\n for sub, attributes2 in attributes.mapping_values('vrf_attr',\n sort=True, keys=self.vrf_attr):configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n for sub, attributes2 in attributes.mapping_values(\n 'peer_session_attr', keys=self.peer_session_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig,\n **kwargs))\n\n for sub, attributes2 in attributes.mapping_values(\n 'peer_policy_attr', keys=self.peer_policy_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig,\n **kwargs))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class PeerPolicyAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n with configurations.submode_context(\n attributes.format('template peer-policy {pp_name}',\n force=True)):\n if unconfig and attributes.iswildcard:\n # Never reached!\n configurations.submode_unconfig()\n\n # nxos: template peer-policy <pp_name> \\\n # allowas-in [9]\n if attributes.value('pp_allowas_in'):\n if attributes.value('pp_allowas_in_as_number'):\n configurations.append_line(\n attributes.format('allowas-in '\n '{pp_allowas_in_as_number}'))\n else:\n configurations.append_line(\n attributes.format('allowas-in'))\n\n # nxos: template peer-policy <pp_name>\\\n # as-override\n if attributes.value('pp_as_override'):\n configurations.append_line(\n attributes.format('as-override'))\n\n # nxos: template peer-policy <pp_name> \\\n # send-community\n if attributes.value('pp_send_community'):\n if attributes.value('pp_send_community').value ==\\\n 'standard':\n configurations.append_line(attributes.format(\n 'send-community'))\n\n # nxos: template peer-policy <pp_name> \\\n # send-community extended\n if attributes.value('pp_send_community').value == \\\n 'extended':\n configurations.append_line(attributes.format(\n 'send-community extended'))\n\n # nxos: template peer-policy <pp_name>\\\n # send-communitys\n # nxos: template peer-policy <pp_name> \\\n # send-community extended\n if attributes.value('pp_send_community').value == \\\n 'both':\n configurations.append_line(attributes.format(\n 'send-community'))\n configurations.append_line(attributes.format(\n 'send-community extended'))\n\n # nxos: template peer-policy <pp_name> \\\n # route-reflector-client\n if attributes.value('pp_route_reflector_client'):\n configurations.append_line(\n attributes.format(\n 'route-reflector-client'))\n\n # nxos: template peer-policy <pp_name> \\\n # next-hop-self\n if attributes.value('pp_next_hop_self'):\n configurations.append_line(\n attributes.format(\n 'next-hop-self'))\n\n # nxos: template peer-policy <pp_name> \\\n # route-map test-map in\n if attributes.value('pp_route_map_name_in'):\n configurations.append_line(\n attributes.format('route-map '\n '{pp_route_map_name_in} in'))\n\n # nxos: template peer-policy <pp_name> \\\n # route-map test-map out\n if attributes.value('pp_route_map_name_out'):\n configurations.append_line(\n attributes.format('route-map '\n '{pp_route_map_name_out} out'))\n\n # nxos: template peer-policy <pp_name> \\\n # mmaximum-prefix <pp_maximum_prefix_max_prefix_no> \n # [<pp_maximum_prefix_threshold> ]\n # [restart <pp_maximum_prefix_restart> | warning-only ]\n if attributes.value('pp_maximum_prefix_max_prefix_no'):\n if attributes.value('pp_maximum_prefix_threshold'):\n if attributes.value('pp_maximum_prefix_restart'):\n configurations.append_line(\n attributes.format('maximum-prefix '\n '{pp_maximum_prefix_max_prefix_no} '\n '{pp_maximum_prefix_threshold} '\n 'restart {pp_maximum_prefix_restart}'))\n if attributes.value(\n 'pp_maximum_prefix_warning_only'):\n configurations.append_line(\n attributes.format('maximum-prefix '\n '{pp_maximum_prefix_max_prefix_no} '\n '{pp_maximum_prefix_threshold} '\n 'warning-only'))\n elif attributes.value('pp_maximum_prefix_restart'):\n configurations.append_line(\n attributes.format('maximum-prefix '\n '{pp_maximum_prefix_max_prefix_no} '\n 'restart {pp_maximum_prefix_restart}'))\n elif attributes.value(\n 'pp_maximum_prefix_warning_only'):\n configurations.append_line(\n attributes.format('maximum-prefix '\n '{pp_maximum_prefix_max_prefix_no} '\n 'warning-only'))\n else:\n configurations.append_line(attributes.format(\n 'maximum-prefix '\n '{pp_maximum_prefix_max_prefix_no}'))\n\n # nxos: template peer-policy <pp_name> \\\n # default-originate\n # nxos: template peer-policy <pp_name> \\\n # default-originate route-map test\n if attributes.value('pp_default_originate'):\n if attributes.value(\n 'pp_default_originate_route_map'):\n configurations.append_line(\n attributes.format('default-originate '\n 'route-map '\n '{pp_default_originate_route_map}'))\n else:\n configurations.append_line(\n attributes.format('default-originate'))\n\n # nxos: template peer-policy <pp_name> \\\n # soft-reconfiguration inbound\n if attributes.value('pp_soft_reconfiguration'):\n configurations.append_line(\n attributes.format('soft-reconfiguration inbound'))\n\n # nxos: template peer-policy <pp_name> \\\n # soo 100:100\n if attributes.value('pp_soo'):\n configurations.append_line(\n attributes.format('soo {pp_soo}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class PeerSessionAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n with configurations.submode_context(\n attributes.format(\n 'template peer-session {ps_name}',force=True)):\n if unconfig and attributes.iswildcard:\n # Never reached!\n configurations.submode_unconfig()\n\n # nxos: template peer-session <peer_session> \\ bfd\n if attributes.value('ps_fall_over_bfd'):\n configurations.append_line(\n attributes.format('bfd'))\n\n # nxos: template peer-session <peer_session> \\\n # remote-as 500\n if attributes.value('ps_remote_as'):\n configurations.append_line(\n attributes.format('remote-as {ps_remote_as}'))\n\n # nxos: template peer-session <peer_session> \\\n # local-as 111\n if attributes.value('ps_local_as_as_no'):\n configurations.append_line(\n attributes.format('local-as {ps_local_as_as_no}'))\n\n # nxos: template peer-session <peer_session> \\\n # local-as 111 [no-prepend [replace-as [dual-as]]]\n if attributes.value('ps_local_as_as_no'):\n base_s = 'local-as {ps_local_as_as_no}'\n if attributes.value('ps_local_as_no_prepend'):\n base_s += ' no-prepend'\n if attributes.value('ps_local_as_replace_as'):\n base_s += ' replace-as'\n if attributes.value('ps_local_as_dual_as'):\n base_s += ' dual-as'\n configurations.append_line(\n attributes.format(base_s))\n\n # nxos: template peer-session <peer_session> \\\n # description PEER-SESSION\n if attributes.value('ps_description'):\n configurations.append_line(\n attributes.format('description {ps_description}'))\n\n # nxos: template peer-session <peer_session> \\\n # password 3 <PASSWORD>\n if attributes.value('ps_password_text'):\n configurations.append_line(\n attributes.format('password {ps_password_text}'))\n\n # nxos: template peer-session <peer_session> \\ shutdown\n if attributes.value('ps_shutdown'):\n configurations.append_line(\n attributes.format('shutdown'))\n\n # nxos: template peer-session <peer_session> \\\n # update-source loopback0\n if attributes.value('ps_update_source'):\n configurations.append_line(\n attributes.format('update-source '\n '{ps_update_source}'))\n\n # nxos: template peer-session <peer_session> \\\n # disable-connected-check\n if attributes.value('ps_disable_connected_check'):\n configurations.append_line(\n attributes.format('disable-connected-check'))\n\n # nxos: template peer-session <peer_session> \\\n # capability suppress 4-byte-as\n if attributes.value(\n 'ps_suppress_four_byte_as_capability'):\n configurations.append_line(attributes.format(\n 'capability suppress 4-byte-as'))\n\n # nxos: template peer-session <peer_session> \\\n # ebgp-multihop 255\n if attributes.value('ps_ebgp_multihop_max_hop'):\n configurations.append_line(\n attributes.format('ebgp-multihop '\n '{ps_ebgp_multihop_max_hop}'))\n elif attributes.value('ps_ebgp_multihop'):\n configurations.append_line(\n attributes.format('ebgp-multihop 255'))\n\n # nxos: template peer-session <peer_session> \\\n # transport connection-mode passive\n if attributes.value('ps_transport_connection_mode'):\n if attributes.value(\n 'ps_transport_connection_mode').value ==\\\n 'active':\n configurations.append_line(attributes.format(\n 'no transport connection-mode passive'))\n\n if attributes.value(\n 'ps_transport_connection_mode').value ==\\\n 'passive':\n configurations.append_line(attributes.format(\n 'transport connection-mode passive'))\n\n # nxos: template peer-session <peer_session> \\\n # timers 111 222\n if attributes.value('ps_keepalive_interval') and \\\n attributes.value('ps_hodltime'):\n configurations.append_line(\n attributes.format('timers '\n '{ps_keepalive_interval} {ps_hodltime}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class VrfAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(\n None if self.vrf_name == 'default' else\n attributes.format('vrf {vrf_name}', force=True)):\n if self.vrf_name != 'default' and unconfig and \\\n attributes.iswildcard:\n configurations.submode_unconfig()\n\n # nxos: router bgp 100 / [instance someword] /\n # [vrf someword] /bestpath { always-compare-med |\n # med missing-as-worst | compare-routerid |\n # cost-community ignore }\n if attributes.value('always_compare_med'):\n configurations.append_line(attributes.format(\n 'bestpath always-compare-med'))\n if attributes.value('bestpath_compare_routerid'):\n configurations.append_line(attributes.format(\n 'bestpath compare-routerid'))\n if attributes.value('bestpath_cost_community_ignore'):\n configurations.append_line(attributes.format(\n 'bestpath cost-community ignore'))\n if attributes.value('bestpath_med_missing_at_worst'):\n configurations.append_line(attributes.format(\n 'bestpath med missing-as-worst'))\n\n # nxos: router bgp 100 / [instance someword] /\n # [vrf someword] / cluster-id <cluster_id>\n if attributes.value('cluster_id'):\n configurations.append_line(attributes.format(\n 'cluster-id {cluster_id}'))\n\n # nxos: router bgp 100 / [instance someword] /\n # [vrf someword] / confederation identifier \n # <confederation_identifier>\n if attributes.value('confederation_identifier'):\n configurations.append_line(\n attributes.format('confederation identifier '\n '{confederation_identifier}'))\n\n # nxos: router bgp 100 / [instance someword] /\n # [vrf someword] / confederation peers \n # <confederation_peers_as>\n if attributes.value('confederation_peers_as'):\n configurations.append_line(\n attributes.format('confederation peers '\n '{confederation_peers_as}'))\n\n # nxos: router bgp 100 / [vrf someword] / graceful-restart\n if attributes.value('graceful_restart'):\n configurations.append_line(\n attributes.format('graceful-restart'))\n\n # nxos: router bgp 100 / [vrf someword] /\n # graceful-restart restart-time 240\n if attributes.value('graceful_restart_restart_time'):\n configurations.append_line(\n attributes.format('graceful-restart restart-time '\n '{graceful_restart_restart_time}'))\n\n # nxos: router bgp 100 / [vrf someword] /\n # graceful-restart stalepath-time 600\n if attributes.value('graceful_restart_stalepath_time'):\n configurations.append_line(\n attributes.format('graceful-restart '\n 'stalepath-time '\n '{graceful_restart_stalepath_time}'))\n\n # nxos: router bgp 100 / [vrf someword] /\n # log-neighbor-changes\n if attributes.value('log_neighbor_changes'):\n configurations.append_line(\n attributes.format('log-neighbor-changes'))\n\n # nxos: router bgp 100 / [vrf someword] / router-id 1.2.3.4\n if attributes.value('router_id'):\n configurations.append_line(attributes.format(\n 'router-id {router_id}'))\n\n # nxos: router bgp 100 / [vrf someword] /\n # timers bgp <keepalive-interval> <holdtime>\n if attributes.value('keepalive_interval') and \\\n attributes.value('holdtime'):\n configurations.append_line(\n attributes.format('timers bgp '\n '{keepalive_interval} {holdtime}'))\n\n # nxos: router bgp 100 / [vrf someword] /\n # enforce-first-as\n if attributes.value('enforce_first_as'):\n configurations.append_line(\n attributes.format('enforce-first-as'))\n\n # nxos: router bgp 100 / [vrf someword] / \n # fast-external-fallover\n if attributes.value('fast_external_fallover'):\n configurations.append_line(\n attributes.format('fast-external-fallover'))\n\n # nxos: router bgp 100 / [vrf someword] /\n # dynamic-med-interval 78\n if attributes.value('dynamic_med_interval'):\n configurations.append_line(\n attributes.format('dynamic-med-interval '\n '{dynamic_med_interval}'))\n\n # nxos: router bgp 100 / [vrf someword] /\n # shutdown\n if attributes.value('shutdown'):\n configurations.append_line(\n attributes.format('shutdown'))\n\n # nxos: router bgp 100 / [vrf someword] /\n # flush_routes\n if attributes.value('flush_routes'):\n configurations.append_line(\n attributes.format('flush_routes'))\n\n # nxos: router bgp 100 / [vrf someword] /\n # isolate\n if attributes.value('isolate'):\n configurations.append_line(\n attributes.format('isolate'))\n\n # nxos: router bgp 100 / [vrf someword] /\n # disable-policy-batching ipv4 prefix-list <WORD>\n if attributes.value('disable_policy_batching_ipv4'):\n configurations.append_line(\n attributes.format('disable-policy-batching '\n 'ipv4 prefix-list '\n '{disable_policy_batching_ipv4}'))\n\n # nxos: router bgp 100 / [vrf someword] /\n # disable-policy-batching ipv6 prefix-list <WORD>\n if attributes.value('disable_policy_batching_ipv6'):\n configurations.append_line(\n attributes.format('disable-policy-batching '\n 'ipv6 prefix-list '\n '{disable_policy_batching_ipv6}'))\n\n for neighbor_sub, neighbor_attributes in \\\n attributes.mapping_values('neighbor_attr'):\n configurations.append_block(\n neighbor_sub.build_config(apply=False,\n attributes=neighbor_attributes,\n unconfig=unconfig))\n\n for address_family_sub, address_family_attributes in \\\n attributes.mapping_values(\n 'address_family_attr', sort=True,\n keys = self.address_family_attr):\n configurations.append_block(\n address_family_sub.build_config(apply=False,\n attributes=address_family_attributes,\n unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class AddressFamilyAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(\n attributes.format('address-family '\n '{address_family.value}',\n force=True)):\n if unconfig and attributes.iswildcard:\n # Never reached!\n configurations.submode_unconfig()\n\n # nxos: address-family ipv4 unicast/\n # redistribute <protocol> route-map <route_policy>\n isis_route_policy_name = ''\n ospf_route_policy_name = ''\n rip_route_policy_name = ''\n if attributes.value('af_redist_isis_route_policy'):\n isis_route_policy_name = attributes.format(\n 'af_redist_isis_route_policy')\n elif attributes.value('af_redist_ospf_route_policy'):\n ospf_route_policy_name = attributes.format(\n 'af_redist_ospf_route_policy')\n elif attributes.value('af_redist_rip_route_policy'):\n rip_route_policy_name = attributes.format(\n 'af_redist_rip_route_policy')\n\n # redistribute ospf <af_redist_ospf> route-map <route_policy>\n if attributes.value('af_redist_ospf') and \\\n attributes.value('af_redist_ospf_route_policy'):\n configurations.append_line(\n attributes.format(\n 'redistribute ospf {af_redist_ospf} route-map '\n '{af_redist_ospf_route_policy}'))\n\n # redistribute rip <af_redist_rip> route-map <route_policy>\n if attributes.value('af_redist_rip') and \\\n attributes.value('af_redist_rip_route_policy'):\n configurations.append_line(\n attributes.format(\n 'redistribute rip {af_redist_rip} route-map '\n '{af_redist_rip_route_policy}'))\n\n # redistribute isis <af_redist_isis> route-map <route_policy>\n if attributes.value('af_redist_isis') and \\\n attributes.value('af_redist_ospf_route_policy'):\n configurations.append_line(\n attributes.format(\n 'redistribute isis {af_redist_isis} route-map '\n '{af_redist_isis_route_policy}'))\n\n for redistribute, redistribute_attributes in \\\n attributes.sequence_values('redistributes'):\n assert redistribute_attributes.iswildcard\n cfg = 'redistribute'\n if isinstance(redistribute.protocol, Ospf) and\\\n ospf_route_policy_name:\n cfg += redistribute_attributes.format(\n ' ospf {protocol.pid} '\n 'route-map {ospf_route_policy_name}')\n elif isinstance(redistribute.protocol, Isis) and\\\n isis_route_policy_name:\n cfg += redistribute_attributes.format(\n ' isis {protocol.pid} '\n 'route-map {isis_route_policy_name}')\n elif isinstance(redistribute.protocol, Rip) and\\\n rip_route_policy_name:\n cfg += redistribute_attributes.format(\n ' rip {protocol.pid} '\n 'route-map {rip_route_policy_name}')\n else:\n raise ValueError(redistribute.protocol)\n configurations.append_line(cfg)\n\n # nxos: address-family ipv4 unicast/\n # redistribute static route-map <route_policy>\n if attributes.value('af_redist_static'):\n configurations.append_line(\n attributes.format(\n 'redistribute static route-map '\n '{af_redist_static_route_policy}'))\n\n # nxos: address-family ipv4 unicast/\n # redistribute direct route-map <route_policy>\n if attributes.value('af_redist_connected'):\n configurations.append_line(\n attributes.format(\n 'redistribute direct route-map '\n '{af_redist_connected_route_policy}'))\n\n # nxos: address-family ipv4 unicast/\n # dampening 25 1000 1500 255\n if attributes.value('af_dampening'):\n if attributes.value('af_dampening_half_life_time'):\n if attributes.value('af_dampening_reuse_time'):\n if attributes.value(\n 'af_dampening_suppress_time'):\n if attributes.value(\n 'af_dampening_max_suppress_time'):\n configurations.append_line(\n attributes.format('dampening '\n '{af_dampening_half_life_time} '\n '{af_dampening_reuse_time} '\n '{af_dampening_suppress_time} '\n '{af_dampening_max_suppress_time}'))\n # dampening\n else:\n configurations.append_line(\n 'dampening')\n\n # nxos: address-family ipv4 unicast/\n # nexthop route-map <af_nexthop_route_map>\n if attributes.value('af_nexthop_route_map'):\n configurations.append_line(\n attributes.format(\n 'nexthop route-map '\n '{af_nexthop_route_map}'))\n\n # nxos: address-family ipv4 unicast/\n # nexthop trigger-delay critical\n # <af_nexthop_trigger_delay_critical> non-critical\n # <af_nexthop_trigger_delay_non_critical>\n if attributes.value('af_nexthop_trigger_enable'):\n if attributes.value(\n 'af_nexthop_trigger_delay_critical') and \\\n attributes.value(\n 'af_nexthop_trigger_delay_non_critical') :\n configurations.append_line(attributes.format(\n 'nexthop trigger-delay critical '\n '{af_nexthop_trigger_delay_critical} '\n 'non-critical '\n '{af_nexthop_trigger_delay_non_critical}'))\n\n # nxos: address-family ipv4 unicast/\n # client-to-client reflection\n if attributes.value('af_client_to_client_reflection'):\n configurations.append_line(\n attributes.format(\n 'client-to-client reflection'))\n\n # nxos: address-family ipv4 unicast/\n # distance <af_distance_extern_as> \n # <af_distance_internal_as> <af_distance_local>\n if attributes.value('af_distance_extern_as'):\n if attributes.value('af_distance_internal_as') and\\\n attributes.value('af_distance_local') :\n configurations.append_line(\n attributes.format(\n 'distance {af_distance_extern_as} '\n '{af_distance_internal_as} '\n '{af_distance_local}'))\n\n # nxos: address-family ipv4 unicast/ \n # maximum-paths <af_maximum_paths_ebgp>\n if attributes.value('af_maximum_paths_ebgp'):\n configurations.append_line(\n attributes.format('maximum-paths '\n '{af_maximum_paths_ebgp}'))\n\n # nxos: address-family ipv4 unicast/\n # maximum-paths ibgp <af_maximum_paths_ibgp>\n if attributes.value('af_maximum_paths_ibgp'):\n configurations.append_line(\n attributes.format('maximum-paths ibgp '\n '{af_maximum_paths_ibgp}'))\n\n # nxos: address-family ipv4 unicast/\n # maximum-paths eigp <af_maximum_paths_eibgp>\n if attributes.value('af_maximum_paths_eibgp'):\n configurations.append_line(\n attributes.format('maximum-paths eibgp '\n '{af_maximum_paths_eibgp}'))\n\n # nxos: address-family ipv4 unicast/\n # aggregate-address <af_aggregate_address_ipv4_address>\n # /<af_aggregate_address_ipv4_mask>\n # [ as-set | summary-only ]\n v = attributes.value(\n 'af_aggregate_address_ipv4_address')\n k = attributes.value('af_aggregate_address_ipv4_mask')\n if v and k:\n base_s = 'aggregate-address '\\\n '{af_aggregate_address_ipv4_address}/'\\\n '{af_aggregate_address_ipv4_mask}'\n if attributes.value(\n 'af_aggregate_address_as_set'):\n base_s += ' as-set'\n if attributes.value(\n 'af_aggregate_address_summary_only'):\n base_s += ' summary-only'\n\n configurations.append_line(\n attributes.format(base_s))\n\n # nxos: address-family ipv4 unicast/\n # network <af_network_number> mask <af_network_mask>\n if attributes.value('af_network_number') and \\\n attributes.value('af_network_mask'):\n\n # Convert mask from /24 to 255.255.255.0 (example)\n dummy = '{}/{}'.format('0.0.0.0',\n attributes.value('af_network_mask'))\n mask = str(IPNetwork(dummy).netmask)\n\n # Build cfg string\n cfg_str = 'network {af_network_number} '\n cfg_str += ' mask {}'.format(mask)\n\n # Add configuration\n configurations.append_line(attributes.format(cfg_str))\n\n # nxos: address-family ipv4 unicast/ aggregate-address \n # <af_v6_aggregate_address_ipv6_address> \n if attributes.value(\n 'af_v6_aggregate_address_ipv6_address'):\n configurations.append_line(\n attributes.format('aggregate-address '\n '{af_v6_aggregate_address_ipv6_address}'))\n\n # nxos: address-family ipv4 unicast/\n # network <af_v6_network_number> [route-map \n # <af_v6_network_route_map> ] +\n if attributes.value('af_v6_network_number'):\n if attributes.value('af_v6_network_route_map'):\n configurations.append_line(attributes.format(\n 'network {af_v6_network_number} '\n 'route-map {af_v6_network_route_map}'))\n else:\n configurations.append_line(attributes.format(\n 'network {af_v6_network_number}'))\n\n # nxos: address-family ipv4 unicast/\n # af_v6_allocate_label_all\n if attributes.value('af_v6_allocate_label_all'):\n configurations.append_line(attributes.format(\n 'allocate-label all'))\n\n # nxos: address-family ipv4 unicast/\n # retain route-target all\n if attributes.value('af_retain_rt_all'):\n configurations.append_line(attributes.format(\n 'retain route-target all'))\n\n # nxos: address-family ipv4 unicast/\n # label-allocation-mode per-vrf |\n # no label-allocation-mode [ per-vrf ]\n if attributes.value('af_label_allocation_mode'):\n configurations.append_line(attributes.format(\n 'label-allocation-mode per-vrf'))\n\n # nxos: address-family <af>/\n # advertise-pip | no advertise-pip ]\n if attributes.value('af_advertise_pip'):\n configurations.append_line(attributes.format(\n 'advertise-pip'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n class NeighborAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(\n attributes.format('neighbor {neighbor}',\n force=True)):\n if unconfig and attributes.iswildcard:\n # Never reached!\n configurations.submode_unconfig()\n\n # nxos: neighbor <neighbor_id> \\ bfd\n if attributes.value('nbr_fall_over_bfd') or \\\n attributes.value('bfd_fast_detect'):\n configurations.append_line(\n attributes.format('bfd'))\n\n # nxos: neighbor <neighbor_id> \\\n # capability suppress 4-byte-as\n if attributes.value(\n 'nbr_suppress_four_byte_as_capability'):\n configurations.append_line(\n attributes.format(\n 'capability suppress 4-byte-as'))\n\n # nxos: neighbor <neighbor_id> \\\n # description <nbr_description>\n if attributes.value(\n 'nbr_description'):\n configurations.append_line(\n attributes.format(\n 'description {nbr_description}'))\n\n # nxos: neighbor <neighbor_id> \\\n # disable-connected-check\n if attributes.value(\n 'nbr_disable_connected_check'):\n configurations.append_line(\n attributes.format(\n 'disable-connected-check'))\n\n # nxos: neighbor <neighbor_id> \\\n # ebgp-multihop <nbr_ebgp_multihop_max_hop>\n # no ebgp-multihop [<nbr_ebgp_multihop_max_hop>]\n if attributes.value('nbr_ebgp_multihop'):\n if attributes.value('nbr_ebgp_multihop_max_hop'):\n configurations.append_line(\n attributes.format('ebgp-multihop '\n '{nbr_ebgp_multihop_max_hop}'),\n unconfig_cmd='default ebgp-multihop')\n\n # nxos: neighbor <neighbor_id> \\\n # ebgp-multihop <ebgp_multihop_max_hop_count>\n if attributes.value(\n 'ebgp_multihop_max_hop_count'):\n configurations.append_line(attributes.format(\n 'ebgp-multihop '\n '{ebgp_multihop_max_hop_count}'))\n\n # nxos: neighbor <neighbor_id> \\ inherit peer-session\n # <nbr_inherit_peer_session>\n if attributes.value(\n 'nbr_inherit_peer_session'):\n configurations.append_line(\n attributes.format(\n 'inherit peer-session '\n '{nbr_inherit_peer_session}'))\n\n # nxos: neighbor <neighbor_id> \\ local-as \n #<nbr_local_as_as_no>[no-prepend[replace-as[dual-as]]]}\n if attributes.value('nbr_local_as_as_no'):\n cfg = 'local-as {nbr_local_as_as_no}'\n if attributes.value('nbr_local_as_no_prepend'):\n cfg += ' no-prepend'\n if attributes.value('nbr_local_as_replace_as'):\n cfg += ' replace-as'\n if attributes.value('nbr_local_as_dual_as'):\n cfg += ' dual-as'\n\n configurations.append_line(\n attributes.format(cfg))\n\n if attributes.value('nbr_local_as_as_no'):\n if attributes.value('nbr_local_as_no_prepend'):\n if attributes.value('nbr_local_as_replace_as'):\n if attributes.value(\n 'nbr_local_as_dual_as'):\n configurations.append_line(\n attributes.format('local-as '\n '{nbr_local_as_as_no} no-prepend '\n 'replace-as dual-as'))\n else:\n configurations.append_line(\n attributes.format('local-as '\n '{nbr_local_as_as_no}'))\n\n # nxos: neighbor <neighbor_id> / remote-as\n # <nbr_remote_as>\n if attributes.value('nbr_remote_as'):\n configurations.append_line(\n attributes.format('remote-as {nbr_remote_as}'))\n\n # nxos: neighbor <neighbor_id> / remove-private-as\n if attributes.value('nbr_remove_private_as'):\n configurations.append_line(\n attributes.format('remove-private-as'))\n\n # nxos: neighbor <neighbor_id> / shutdown\n if attributes.value('nbr_shutdown'):\n configurations.append_line(\n attributes.format('shutdown'))\n\n # nxos: neighbor <neighbor_id> / timers\n # <nbr_keepalive_interval> <nbr_holdtime>\n if attributes.value('nbr_keepalive_interval') and \\\n attributes.value('nbr_holdtime'):\n configurations.append_line(\n attributes.format('timers '\n '{nbr_keepalive_interval} {nbr_holdtime}'))\n\n # nxos: neighbor <neighbor_id> / update-source\n # <nbr_update_source>\n if attributes.value('update_source') or \\\n attributes.value('nbr_update_source'):\n\n if hasattr(attributes.value('update_source'),\n 'name'):\n val = attributes.value('update_source').name\n else:\n val = self.nbr_update_source\n configurations.append_line(\n 'update-source {}'.format(val))\n\n # nxos: neighbor <neighbor_id> / password\n # <nbr_password_text>\n if attributes.value('nbr_password_text'):\n configurations.append_line(attributes.format(\n 'password {nbr_password_text}'))\n\n # nxos: neighbor <neighbor_id> \\\n # transport connection-mode\n # {nbr_transport_connection_mode}\n if attributes.value('nbr_transport_connection_mode'):\n configurations.append_line(attributes.format(\n 'transport connection-mode '\n '{nbr_transport_connection_mode.value}'))\n\n # nxos: neighbor <neighbor_id> \\\n # peer-type {nbr_peer_type>}\n if attributes.value('nbr_peer_type'):\n configurations.append_line(attributes.format(\n 'peer-type {nbr_peer_type.value}'))\n\n for address_family_sub, address_family_attributes in \\\n attributes.mapping_values(\n 'address_family_attr', sort=True):\n configurations.append_block(\n address_family_sub.build_config(apply=False,\n attributes=address_family_attributes,\n unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n class AddressFamilyAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(\n attributes.format('address-family '\n '{address_family.value}',\n force=True)):\n if unconfig and attributes.iswildcard:\n # Never reached!\n configurations.submode_unconfig()\n\n # nxos: address-family <nbr_af_name> \\ allowas-in\n # [ <allowas-in-cnt> ]\n if attributes.value('nbr_af_allowas_in'):\n if attributes.value(\n 'nbr_af_allowas_in_as_number'):\n configurations.append_line(\n attributes.format('allowas-in '\n '{nbr_af_allowas_in_as_number}'))\n else:\n configurations.append_line(\n attributes.format('allowas-in'))\n\n # nxos: address-family <nbr_af_name> \\\n # inherit peer-policy <nbr_af_inherit_peer_policy>\n # <nbr_af_inherit_peer_seq>\n if attributes.value(\n 'nbr_af_inherit_peer_policy') and \\\n attributes.value('nbr_af_inherit_peer_seq'):\n configurations.append_line(\n attributes.format('inherit peer-policy '\n '{nbr_af_inherit_peer_policy} '\n '{nbr_af_inherit_peer_seq}'))\n\n # nxos: address-family <nbr_af_name> \\\n # maximum-prefix\n # <nbr_af_maximum_prefix_max_prefix_no>\n # [<nbr_af_maximum_prefix_threshold>] restart\n # [restart <nbr_af_maximum_prefix_restart> |\n # warning-only ]\n v = attributes.value(\n 'nbr_af_maximum_prefix_warning_only')\n if attributes.value(\n 'nbr_af_maximum_prefix_max_prefix_no'):\n if attributes.value(\n 'nbr_af_maximum_prefix_threshold'):\n if attributes.value(\n 'nbr_af_maximum_prefix_restart'):\n if v:\n configurations.append_line(\n attributes.format(\n 'maximum-prefix '\n '{nbr_af_maximum_prefix_max_prefix_no}'\n ' restart '\n '{nbr_af_maximum_prefix_restart} '\n '{nbr_af_maximum_prefix_warning_only}'))\n else:\n configurations.append_line(\n attributes.format('maximum-prefix '\n '{nbr_af_maximum_prefix_max_prefix_no}'))\n\n # nxos: address-family <nbr_af_name> \\\n # route-map <nbr_af_route_map_name_in> in\n if attributes.value('nbr_af_route_map_name_in'):\n configurations.append_line(\n attributes.format('route-map '\n '{nbr_af_route_map_name_in} in'))\n\n # nxos: address-family <nbr_af_name> \\\n # route-map <nbr_af_route_map_name_out> out\n if attributes.value('nbr_af_route_map_name_out'):\n configurations.append_line(\n attributes.format('route-map'\n ' {nbr_af_route_map_name_out} out'))\n\n # nxos: address-family <nbr_af_name> \\\n # route-reflector-client\n if attributes.value(\n 'nbr_af_route_reflector_client'):\n configurations.append_line(\n attributes.format(\n 'route-reflector-client'))\n\n # nxos: address-family <nbr_af_name> \\\n # send-community\n if attributes.value('nbr_af_send_community'):\n if attributes.value(\n 'nbr_af_send_community').value == \\\n 'standard':\n configurations.append_line(\n attributes.format('send-community'))\n\n # nxos: address-family <nbr_af_name> \\\n # send-community extended\n if attributes.value(\n 'nbr_af_send_community').value == \\\n 'extended':\n configurations.append_line(\n attributes.format(\n 'send-community extended'))\n\n # nxos: address-family <nbr_af_name> \\\n # send-communitys\n # nxos: address-family <nbr_af_name> \\\n # send-community extended\n if attributes.value(\n 'nbr_af_send_community').value == 'both':\n configurations.append_line(\n attributes.format('send-community'))\n configurations.append_line(\n attributes.format(\n 'send-community extended'))\n\n # nxos: address-family <nbr_af_name> \\\n # soft-reconfiguration inbound\n if attributes.value('nbr_af_soft_reconfiguration'):\n configurations.append_line(\n attributes.format('soft-reconfiguration '\n 'inbound'))\n\n # nxos: address-family <nbr_af_name> \\\n # next-hop-self\n if attributes.value('nbr_af_next_hop_self'):\n configurations.append_line(\n attributes.format('next-hop-self'))\n\n # nxos: address-family <nbr_af_name> \\\n # as-override\n if attributes.value('nbr_af_as_override'):\n configurations.append_line(\n attributes.format('as-override'))\n\n # nxos: address-family <nbr_af_name> \\\n # default-originate\n # nxos: address-family <nbr_af_name> \\\n # default-originate route-map test\n if attributes.value('nbr_af_default_originate'):\n if attributes.value(\n 'nbr_af_default_originate_route_map'):\n configurations.append_line(\n attributes.format('default-originate '\n 'route-map '\n '{nbr_af_default_originate_route_map}'))\n else:\n configurations.append_line(\n attributes.format('default-originate'))\n\n # nxos except n9k,n9kv : address-family <nbr_af_name> \\\n # suppress-signaling-protocol ldp\n if attributes.value('nbr_af_suppress_signaling_protocol_ldp'):\n configurations.append_line(\n attributes.format('suppress-signling-protocol ldp'))\n\n # nxos: address-family <nbr_af_name> \\\n # soo 100:100\n if attributes.value('nbr_af_soo'):\n configurations.append_line(\n attributes.format('soo {nbr_af_soo}'))\n\n # nxos: address-family <nbr_af_name> \\\n # rewrite-evpn-rt-asn\n if attributes.value('nbr_af_rewrite_evpn_rt_asn'):\n configurations.append_line(\n attributes.format('rewrite-evpn-rt-asn'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n", "id": "5475357", "language": "Python", "matching_score": 3.9272100925445557, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/bgp/nxos/bgp.py" }, { "content": "#!/usr/bin/env python\n\nimport unittest\nfrom unittest.mock import Mock\n\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning\n\nfrom genie.libs.conf.base import Redistribution\nfrom genie.libs.conf.bgp import Bgp\nfrom genie.libs.conf.isis import Isis\nfrom genie.libs.conf.ospf import Ospf\nfrom genie.libs.conf.route_policy import RoutePolicy\nfrom genie.libs.conf.vrf import Vrf\n\n\nclass test_bgp(TestCase):\n\n def test_init(self):\n\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxr')\n intf1 = Interface(device=dev1, name='GigabitEthernet0/0/0/1',\n ipv4='10.1.0.1/24')\n intf2 = Interface(device=dev1, name='GigabitEthernet0/0/0/2',\n ipv4='10.2.0.1/24')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxr')\n intf3 = Interface(device=dev2, name='GigabitEthernet0/0/0/3',\n ipv4='10.1.0.2/24')\n intf4 = Interface(device=dev2, name='GigabitEthernet0/0/0/4',\n ipv4='10.2.0.2/24')\n vrf1 = Vrf(name='vrf1')\n vrf2 = Vrf(name='a') # must be < 'default'\n\n with self.assertNoWarnings():\n\n Genie.testbed = None\n with self.assertRaises(TypeError):\n bgp = Bgp()\n with self.assertRaises(TypeError):\n bgp = Bgp(testbed=testbed)\n Genie.testbed = testbed\n with self.assertRaises(TypeError):\n bgp = Bgp()\n bgp = Bgp(asn=100)\n self.assertIs(bgp.testbed, testbed)\n Genie.testbed = testbed\n bgp = Bgp(asn=100)\n self.assertIs(bgp.testbed, Genie.testbed)\n self.assertIs(bgp.testbed, testbed)\n\n dev1.add_feature(bgp)\n\n cfgs = bgp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'router bgp 100',\n ' exit',\n ]))\n\n dev2.add_feature(bgp)\n\n cfgs = bgp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name, dev2.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'router bgp 100',\n ' exit',\n ]))\n self.assertMultiLineEqual(str(cfgs[dev2.name]), '\\n'.join([\n 'router bgp 100',\n ' exit',\n ]))\n\n ospf1 = Ospf(pid=1)\n isis1 = Isis(pid=1)\n rtepol1 = RoutePolicy(name='rtepol1')\n bgp.redistributes = (\n 'connected',\n Redistribution(ospf1, metric=20),\n Redistribution(isis1, route_policy=rtepol1),\n )\n bgp.device_attr[dev1].address_family_attr['ipv4 unicast']\n bgp.device_attr[dev2].address_family_attr['ipv4 unicast']\n\n cfgs = bgp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name, dev2.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'router bgp 100',\n ' address-family ipv4 unicast',\n ' redistribute connected',\n ' redistribute ospf 1 metric 20',\n ' redistribute isis 1 route-policy rtepol1',\n ' exit',\n ' exit',\n ]))\n self.assertMultiLineEqual(str(cfgs[dev2.name]), '\\n'.join([\n 'router bgp 100',\n ' address-family ipv4 unicast',\n ' redistribute connected',\n ' redistribute ospf 1 metric 20',\n ' redistribute isis 1 route-policy rtepol1',\n ' exit',\n ' exit',\n ]))\n\n del bgp.redistributes\n\n bgp.device_attr[dev1].add_neighbor(intf3.ipv4)\n bgp.device_attr[dev1].neighbor_attr[intf3.ipv4].\\\n address_family_attr['ipv4 unicast']\n bgp.device_attr[dev1].add_vrf(vrf1)\n bgp.device_attr[dev1].vrf_attr[vrf1].address_family_attr['ipv4 unicast']\n bgp.device_attr[dev1].vrf_attr[vrf1].add_neighbor(intf4.ipv4)\n bgp.device_attr[dev1].vrf_attr[vrf1].neighbor_attr[intf4.ipv4].\\\n address_family_attr['ipv4 unicast']\n bgp.device_attr[dev1].add_vrf(vrf2)\n bgp.device_attr[dev1].vrf_attr[vrf2].address_family_attr['ipv4 unicast']\n\n cfgs = bgp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name, dev2.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'router bgp 100',\n ' address-family ipv4 unicast',\n ' exit',\n ' neighbor 10.1.0.2',\n ' remote-as 100',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ' vrf a',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ' vrf vrf1',\n ' address-family ipv4 unicast',\n ' exit',\n ' neighbor 10.2.0.2',\n ' remote-as 100',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ' exit',\n ' exit',\n ]))\n self.assertMultiLineEqual(str(cfgs[dev2.name]), '\\n'.join([\n 'router bgp 100',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ]))\n\n cfgs = bgp.build_config(apply=False,\n attributes='device_attr__PE1__vrf_attr__default')\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'router bgp 100',\n ' address-family ipv4 unicast',\n ' exit',\n ' neighbor 10.1.0.2',\n ' remote-as 100',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n cfgs = bgp.build_config(apply=False, attributes={\n 'device_attr': {\n '*': (),\n },\n })\n self.assertCountEqual(cfgs.keys(), [dev1.name, dev2.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'router bgp 100',\n ' exit',\n ]))\n self.assertMultiLineEqual(str(cfgs[dev2.name]), '\\n'.join([\n 'router bgp 100',\n ' exit',\n ]))\n\n cfgs = bgp.build_config(apply=False, attributes={\n 'device_attr': {\n 'PE1': 'vrf_attr__default',\n },\n })\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'router bgp 100',\n ' address-family ipv4 unicast',\n ' exit',\n ' neighbor 10.1.0.2',\n ' remote-as 100',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n cfgs = bgp.build_config(apply=False, attributes={\n 'device_attr': {\n 'PE1': 'vrf_attr__default__neighbor_attr__10.1.0.2',\n },\n })\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'router bgp 100',\n ' neighbor 10.1.0.2',\n ' remote-as 100',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n def setUp(self):\n\n Genie.testbed = testbed = Testbed()\n self.dev1 = Device(testbed=testbed, name='PE1', os='iosxr')\n self.intf1 = Interface(device=self.dev1,\n name='GigabitEthernet0/0/0/1', ipv4='10.1.0.1/24')\n self.intf2 = Interface(device=self.dev1,\n name='GigabitEthernet0/0/0/2', ipv4='10.2.0.1/24')\n self.dev2 = Device(testbed=testbed, name='PE2', os='iosxr')\n self.intf3 = Interface(device=self.dev2,\n name='GigabitEthernet0/0/0/3', ipv4='10.1.0.2/24')\n self.intf4 = Interface(device=self.dev2,\n name='GigabitEthernet0/0/0/4', ipv4='10.2.0.2/24')\n self.vrf1 = Vrf(name='vrf1')\n self.vrf2 = Vrf(name='default')\n\n def test_bgp_device_attr(self):\n # For failures\n self.maxDiff = None\n\n # shorten the line\n dev1 = self.dev1\n\n # ===== no instance ======= \n # Bgp object\n bgp = Bgp(bgp_id=100)\n dev1.add_feature(bgp)\n # Build config\n cfgs = bgp.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100',\n ' exit',\n ]))\n # ===== with instance ======= \n # Bgp object\n bgp = Bgp(bgp_id=100)\n dev1.add_feature(bgp)\n bgp.device_attr[dev1].instance_name = 'test'\n # Build config\n cfgs = bgp.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100 instance test',\n ' exit',\n ]))\n\n # ===== with different instances ======= \n # Bgp object\n bgp1 = Bgp(bgp_id=100, instance_name='test1')\n dev1.add_feature(bgp1)\n # Bgp object\n bgp2 = Bgp(bgp_id=100, instance_name='test2')\n dev1.add_feature(bgp2)\n # Bgp object\n bgp3 = Bgp(bgp_id=100, instance_name='test3')\n dev1.add_feature(bgp3)\n # Build config\n cfg1 = bgp1.build_config(apply=False)\n cfg2 = bgp2.build_config(apply=False)\n cfg3 = bgp3.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfg1[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100 instance test1',\n ' exit',\n ]))\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfg2[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100 instance test2',\n ' exit',\n ]))\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfg3[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100 instance test3',\n ' exit',\n ]))\n\n # Build unconfig\n cfg1 = bgp1.build_unconfig(apply=False)\n cfg2 = bgp2.build_unconfig(apply=False)\n cfg3 = bgp3.build_unconfig(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfg1[dev1.name]), '\\n'.\\\n join([\n 'no router bgp 100 instance test1',\n ]))\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfg2[dev1.name]), '\\n'.\\\n join([\n 'no router bgp 100 instance test2',\n ]))\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfg3[dev1.name]), '\\n'.\\\n join([\n 'no router bgp 100 instance test3',\n ]))\n\n def test_cfg_l2vpn_vpls(self):\n\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxr')\n bgp = Bgp(asn=100,bgp_id=100)\n\n # Defining attributes\n nbr_af_name = 'l2vpn vpls'\n neighbor_id = '10.2.0.2'\n\n bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\\\n address_family_attr[nbr_af_name].nbr_af_suppress_signaling_protocol_ldp = True\n\n bgp.device_attr[dev1]\n\n self.assertIs(bgp.testbed, testbed)\n dev1.add_feature(bgp)\n\n cfgs = bgp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['router bgp 100',\n ' neighbor 10.2.0.2',\n ' address-family l2vpn vpls',\n ' signalling ldp disable',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n uncfgs = bgp.build_unconfig(\n apply=False,\n attributes={'device_attr': {'*': {'vrf_attr':\n {'*': {'neighbor_attr': \\\n {'*': {'address_family_attr': \\\n {'*': \"nbr_af_suppress_signaling_protocol_ldp\"}}}}}}}})\n\n self.assertCountEqual(uncfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(uncfgs[dev1.name]), '\\n'.join(\n ['router bgp 100',\n ' neighbor 10.2.0.2',\n ' address-family l2vpn vpls',\n ' no signalling ldp disable',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n def test_bgp_peer_session_attr(self):\n # For failures\n self.maxDiff = None\n\n # shorten the line\n dev1 = self.dev1\n \n # Bgp object\n bgp = Bgp(bgp_id=100)\n dev1.add_feature(bgp)\n ps = 'PEERSESSION'\n bgp.device_attr[dev1].peer_session_attr[ps].ps_transport_connection_mode = 'passive'\n bgp.device_attr[dev1].peer_session_attr[ps].ps_suppress_four_byte_as_capability =True\n bgp.device_attr[dev1].peer_session_attr[ps].ps_description = 'some description'\n bgp.device_attr[dev1].peer_session_attr[ps].ps_disable_connected_check = True\n bgp.device_attr[dev1].peer_session_attr[ps].ps_shutdown = True\n bgp.device_attr[dev1].peer_session_attr[ps].ps_update_source = 'GigabitEthernet0/0/0/2'\n # Build config\n cfgs = bgp.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100',\n ' session-group PEERSESSION',\n ' capability suppress 4-byte-as',\n ' description some description',\n ' ignore-connected-check',\n ' shutdown',\n ' session-open-mode passive',\n ' update-source GigabitEthernet0/0/0/2',\n ' exit',\n ' exit',\n ]))\n\n def test_bgp_peer_policy_attr(self):\n # For failures\n self.maxDiff = None\n\n # shorten the line\n dev1 = self.dev1\n \n # Bgp object\n bgp = Bgp(bgp_id=100)\n dev1.add_feature(bgp)\n pp = 'PEERPOLICY'\n bgp.device_attr[dev1].peer_policy_attr[pp].pp_af_name = 'ipv4 unicast'\n bgp.device_attr[dev1].peer_policy_attr[pp].pp_allowas_in = True\n bgp.device_attr[dev1].peer_policy_attr[pp].pp_allowas_in_as_number = 5\n bgp.device_attr[dev1].peer_policy_attr[pp].pp_as_override = True\n bgp.device_attr[dev1].peer_policy_attr[pp].pp_default_originate = True\n bgp.device_attr[dev1].peer_policy_attr[pp].pp_default_originate_route_map = 'pp-map-test'\n bgp.device_attr[dev1].peer_policy_attr[pp].pp_route_map_name_in = 'allin'\n bgp.device_attr[dev1].peer_policy_attr[pp].pp_route_map_name_out = 'allout'\n bgp.device_attr[dev1].peer_policy_attr[pp].pp_maximum_prefix_max_prefix_no = 100\n bgp.device_attr[dev1].peer_policy_attr[pp].pp_maximum_prefix_threshold = 50\n bgp.device_attr[dev1].peer_policy_attr[pp].pp_maximum_prefix_restart = 99\n bgp.device_attr[dev1].peer_policy_attr[pp].pp_next_hop_self = True\n bgp.device_attr[dev1].peer_policy_attr[pp].pp_route_reflector_client = True\n bgp.device_attr[dev1].peer_policy_attr[pp].pp_send_community = 'standard'\n bgp.device_attr[dev1].peer_policy_attr[pp].pp_soft_reconfiguration = True\n # Build config\n cfgs = bgp.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100',\n ' af-group PEERPOLICY address-family ipv4 unicast',\n ' allowas-in 5',\n ' as-override',\n ' default-originate route-policy pp-map-test',\n ' route-policy allin in',\n ' route-policy allout out',\n ' maximum-prefix 100 50 restart 99',\n ' next-hop-self',\n ' route-reflector-client',\n ' send-community-ebgp',\n ' soft-reconfiguration inbound',\n ' exit',\n ' exit',\n ]))\n\n def test_bgp_vrf_attr(self):\n # For failures\n self.maxDiff = None\n\n # shorten the line\n dev1 = self.dev1\n\n # ===== none-default vrf ======= \n # Bgp object\n bgp = Bgp(bgp_id=100)\n dev1.add_feature(bgp)\n \n bgp.device_attr[dev1].add_vrf(self.vrf1)\n bgp.device_attr[dev1].vrf_attr[self.vrf1].rd = '100:1'\n bgp.device_attr[dev1].vrf_attr[self.vrf1].always_compare_med = True\n bgp.device_attr[dev1].vrf_attr[self.vrf1].graceful_restart = True\n bgp.device_attr[dev1].vrf_attr[self.vrf1].\\\n graceful_restart_restart_time = 100\n bgp.device_attr[dev1].vrf_attr[self.vrf1].\\\n graceful_restart_stalepath_time = 150\n bgp.device_attr[dev1].vrf_attr[self.vrf1].log_neighbor_changes = False\n bgp.device_attr[dev1].vrf_attr[self.vrf1].router_id = '1.1.1.1'\n bgp.device_attr[dev1].vrf_attr[self.vrf1].keepalive_interval = 10\n bgp.device_attr[dev1].vrf_attr[self.vrf1].holdtime = 100\n bgp.device_attr[dev1].vrf_attr[self.vrf1].enforce_first_as = False\n\n # Build config\n cfgs = bgp.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100',\n ' vrf vrf1',\n ' bgp bestpath med always',\n ' rd 100:1',\n ' bgp log neighbor changes disable',\n ' bgp router-id 1.1.1.1',\n ' timers bgp 10 100',\n ' bgp enforce-first-as disable',\n ' exit',\n ' exit',\n ]))\n\n # ===== default vrf ======= \n # Bgp object\n bgp = Bgp(bgp_id=100)\n dev1.add_feature(bgp)\n bgp.device_attr[dev1].add_vrf(self.vrf2)\n bgp.device_attr[dev1].vrf_attr[self.vrf2].always_compare_med = True\n bgp.device_attr[dev1].vrf_attr[self.vrf2].cluster_id = 10\n bgp.device_attr[dev1].vrf_attr[self.vrf2].confederation_identifier = 20\n bgp.device_attr[dev1].vrf_attr[self.vrf2].confederation_peers_as = '66'\n bgp.device_attr[dev1].vrf_attr[self.vrf2].graceful_restart = True\n bgp.device_attr[dev1].vrf_attr[self.vrf2].\\\n graceful_restart_restart_time = 100\n bgp.device_attr[dev1].vrf_attr[self.vrf2].\\\n graceful_restart_stalepath_time = 150\n bgp.device_attr[dev1].vrf_attr[self.vrf2].log_neighbor_changes = True\n bgp.device_attr[dev1].vrf_attr[self.vrf2].router_id = '1.1.1.1'\n bgp.device_attr[dev1].vrf_attr[self.vrf2].keepalive_interval = 10\n bgp.device_attr[dev1].vrf_attr[self.vrf2].holdtime = 100\n bgp.device_attr[dev1].vrf_attr[self.vrf2].fast_external_fallover = False\n\n # Build config\n cfgs = bgp.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100',\n ' bgp bestpath med always',\n ' bgp cluster-id 10',\n ' bgp confederation identifier 20',\n ' bgp confederation peers 66',\n ' bgp graceful-restart',\n ' bgp graceful-restart restart-time 100',\n ' bgp graceful-restart stalepath-time 150',\n ' bgp router-id 1.1.1.1',\n ' timers bgp 10 100',\n ' bgp fast-external-fallover disable',\n ' exit',\n ]))\n\n\n def test_bgp_vrf_family_attr(self):\n # For failures\n self.maxDiff = None\n\n # shorten the line\n dev1 = self.dev1\n\n # ===== none-default vrf ======= \n # Bgp object\n bgp = Bgp(bgp_id=100)\n dev1.add_feature(bgp)\n af = 'ipv4 unicast'\n af6 = 'ipv6 unicast'\n \n bgp.device_attr[dev1].add_vrf(self.vrf1)\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af]\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_dampening = True\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_dampening_route_map = 'route-test'\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_dampening_half_life_time = 30\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_client_to_client_reflection = True\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_distance_extern_as = 100\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_distance_internal_as = 110\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_distance_local = 120\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_maximum_paths_ebgp = 15\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_maximum_paths_ibgp = 20\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_maximum_paths_eibgp = 30\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_aggregate_address_ipv4_address = '2.2.2.2'\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_aggregate_address_ipv4_mask = 24\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_aggregate_address_summary_only = True\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_network_number = '3.3.3.3'\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_network_mask = 24\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_network_route_map = 'network-map-test'\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_redist_isis = '1'\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_redist_ospf = '1'\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_redist_ospf_metric = 10\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_redist_rip = True\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af].\\\n af_redist_rip_route_policy = 'rip-route-test'\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af6].\\\n af_v6_network_number = '3:3::/48'\n bgp.device_attr[dev1].vrf_attr[self.vrf1].address_family_attr[af6].\\\n af_v6_aggregate_address_ipv6_address = '2:2::/64'\n\n # Build config\n cfgs = bgp.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100',\n ' vrf vrf1',\n ' address-family ipv4 unicast',\n ' bgp dampening 30',\n ' distance bgp 100 110 120',\n ' maximum-paths ebgp 15',\n ' maximum-paths ibgp 20',\n ' maximum-paths eibgp 30',\n ' aggregate-address 2.2.2.2/24 summary-only',\n ' network 3.3.3.3/24 route-policy network-map-test',\n ' redistribute ospf 1 metric 10',\n ' redistribute rip route-policy rip-route-test',\n ' exit',\n ' address-family ipv6 unicast',\n ' aggregate-address 2:2::/64',\n ' network 3:3::/48',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n # ===== default vrf ======= \n # Bgp object\n bgp = Bgp(bgp_id=100)\n dev1.add_feature(bgp)\n af = 'vpnv4 unicast'\n bgp.device_attr[dev1].add_vrf(self.vrf2)\n bgp.device_attr[dev1].vrf_attr[self.vrf2].always_compare_med = True\n bgp.device_attr[dev1].vrf_attr[self.vrf2].address_family_attr[af].\\\n \taf_retain_rt_all = True\n\n # Build config\n cfgs = bgp.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100',\n ' bgp bestpath med always',\n ' address-family vpnv4 unicast',\n ' retain route-target all',\n ' exit',\n ' exit',\n ]))\n\n def test_bgp_vrf_neighbor_attr(self):\n # For failures\n self.maxDiff = None\n\n # shorten the line\n dev1 = self.dev1\n\n # ===== none-default vrf ======= \n # Bgp object\n bgp = Bgp(bgp_id=100)\n dev1.add_feature(bgp)\n nei = '10.1.1.1'\n nei6 = 'fc00:db20:35b:7399::5:1'\n \n bgp.device_attr[dev1].add_vrf(self.vrf1)\n bgp.device_attr[dev1].vrf_attr[self.vrf1].add_neighbor(nei)\n bgp.device_attr[dev1].vrf_attr[self.vrf1].neighbor_attr[nei].\\\n nbr_fall_over_bfd = True\n bgp.device_attr[dev1].vrf_attr[self.vrf1].neighbor_attr[nei].\\\n nbr_ebgp_multihop = True\n bgp.device_attr[dev1].vrf_attr[self.vrf1].neighbor_attr[nei].\\\n nbr_ebgp_multihop_max_hop = 30\n bgp.device_attr[dev1].vrf_attr[self.vrf1].neighbor_attr[nei].\\\n nbr_inherit_peer_session = 'PEERSESSION'\n bgp.device_attr[dev1].vrf_attr[self.vrf1].neighbor_attr[nei].\\\n nbr_local_as_as_no = 200\n bgp.device_attr[dev1].vrf_attr[self.vrf1].neighbor_attr[nei].\\\n nbr_remote_as = 500\n bgp.device_attr[dev1].vrf_attr[self.vrf1].neighbor_attr[nei].\\\n nbr_remove_private_as = True\n bgp.device_attr[dev1].vrf_attr[self.vrf1].neighbor_attr[nei].\\\n nbr_remove_private_as_af_name = 'ipv4 unicast'\n bgp.device_attr[dev1].vrf_attr[self.vrf1].neighbor_attr[nei].\\\n nbr_shutdown = True\n bgp.device_attr[dev1].vrf_attr[self.vrf1].neighbor_attr[nei].\\\n nbr_keepalive_interval = 15\n bgp.device_attr[dev1].vrf_attr[self.vrf1].neighbor_attr[nei].\\\n nbr_holdtime = 300\n bgp.device_attr[dev1].vrf_attr[self.vrf1].neighbor_attr[nei].\\\n nbr_update_source = 'GigabitEthernet0/0/0/1'\n bgp.device_attr[dev1].vrf_attr[self.vrf1].neighbor_attr[nei].\\\n nbr_password_text = '<PASSWORD>'\n bgp.device_attr[dev1].vrf_attr[self.vrf1].neighbor_attr[nei].\\\n nbr_transport_connection_mode = 'active'\n\n # Build config\n cfgs = bgp.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100',\n ' vrf vrf1',\n ' neighbor 10.1.1.1',\n ' bfd fast-detect',\n ' remote-as 500',\n ' update-source GigabitEthernet0/0/0/1',\n ' ebgp-multihop 30',\n ' use session-group PEERSESSION',\n ' local-as 200',\n ' address-family ipv4 unicast remove-private-AS',\n ' shutdown',\n ' timers 15 300',\n ' password <PASSWORD>',\n ' session-open-mode active',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n def test_bgp_vrf_neighbor_family_attr(self):\n # For failures\n self.maxDiff = None\n\n # shorten the line\n dev1 = self.dev1\n\n # ===== none-default vrf ======= \n # Bgp object\n bgp = Bgp(bgp_id=100)\n dev1.add_feature(bgp)\n nei = 'fc00:db20:35b:7399::5:1'\n af = 'ipv6 unicast'\n \n bgp.device_attr[dev1].add_vrf(self.vrf2)\n bgp.device_attr[dev1].vrf_attr[self.vrf2].address_family_attr[af]\n bgp.device_attr[dev1].vrf_attr[self.vrf2].add_neighbor(nei)\n bgp.device_attr[dev1].vrf_attr[self.vrf2].neighbor_attr[nei].\\\n nbr_remote_as = 100\n bgp.device_attr[dev1].vrf_attr[self.vrf2].neighbor_attr[nei].\\\n address_family_attr[af]\n bgp.device_attr[dev1].vrf_attr[self.vrf2].neighbor_attr[nei].\\\n address_family_attr[af].nbr_af_allowas_in = True\n bgp.device_attr[dev1].vrf_attr[self.vrf2].neighbor_attr[nei].\\\n address_family_attr[af].nbr_af_inherit_peer_policy = 'policy-test'\n bgp.device_attr[dev1].vrf_attr[self.vrf2].neighbor_attr[nei].\\\n address_family_attr[af].nbr_af_maximum_prefix_max_prefix_no = 666\n bgp.device_attr[dev1].vrf_attr[self.vrf2].neighbor_attr[nei].\\\n address_family_attr[af].nbr_af_maximum_prefix_warning_only = True\n bgp.device_attr[dev1].vrf_attr[self.vrf2].neighbor_attr[nei].\\\n address_family_attr[af].nbr_af_route_map_name_in = 'nei-af-in'\n bgp.device_attr[dev1].vrf_attr[self.vrf2].neighbor_attr[nei].\\\n address_family_attr[af].nbr_af_route_reflector_client = True\n bgp.device_attr[dev1].vrf_attr[self.vrf2].neighbor_attr[nei].\\\n address_family_attr[af].nbr_af_next_hop_self = True\n bgp.device_attr[dev1].vrf_attr[self.vrf2].neighbor_attr[nei].\\\n address_family_attr[af].nbr_af_default_originate = True\n\n # Build config\n cfgs = bgp.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100',\n ' address-family ipv6 unicast',\n ' exit',\n ' neighbor fc00:db20:35b:7399::5:1',\n ' remote-as 100',\n ' address-family ipv6 unicast',\n ' allowas-in',\n ' use af-group policy-test',\n ' maximum-prefix 666 warning-only',\n ' route-policy nei-af-in in',\n ' route-reflector-client',\n ' next-hop-self',\n ' default-originate',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n def test_bgp_with_attributes(self):\n # For failures\n self.maxDiff = None\n\n # shorten the line\n dev1 = self.dev1\n\n # ===== no instance ======= \n # Bgp object\n bgp = Bgp(bgp_id=100)\n dev1.add_feature(bgp)\n bgp.device_attr[dev1].vrf_attr[self.vrf1]\n attributes = {'device_attr': {dev1: {'vrf_attr': {self.vrf1: None}}}}\n # Build config\n cfgs = bgp.build_unconfig(apply=False, attributes=attributes)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100',\n ' no vrf vrf1',\n ' exit',\n ]))\n\n # ===== instance ======= \n # Bgp object\n bgp = Bgp(bgp_id=100, instance_name='test')\n dev1.add_feature(bgp)\n bgp.device_attr[dev1].vrf_attr[self.vrf1]\n bgp.device_attr[dev1].instance_name = 'test'\n attributes = {'device_attr': {dev1: {'vrf_attr': {self.vrf1: None}}}}\n # Build config\n cfgs = bgp.build_unconfig(apply=False, attributes=attributes)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100 instance test',\n ' no vrf vrf1',\n ' exit',\n ]))\n\n # ===== instance as attribute ======= \n # Bgp object\n bgp = Bgp(bgp_id=100)\n dev1.add_feature(bgp)\n bgp.device_attr[dev1].instance_name = 'test'\n attributes = {'device_attr': {dev1: {'instance_name': {'test': None}}}}\n # Build config\n cfgs = bgp.build_unconfig(apply=False, attributes=attributes)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'no router bgp 100 instance test',\n ]))\n\n # ===== instance as attribute ======= \n # Bgp object\n bgp = Bgp(bgp_id=100, instance='test')\n dev1.add_feature(bgp)\n bgp.device_attr[dev1].instance_name = 'test'\n attributes = {'device_attr': {dev1: {'instance_name': {'test': None}}}}\n # Build config\n cfgs = bgp.build_unconfig(apply=False, attributes=attributes)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'no router bgp 100 instance test',\n ]))\n\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "3693367", "language": "Python", "matching_score": 5.653019428253174, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/bgp/iosxr/tests/test_bgp.py" }, { "content": "#!/usr/bin/env python\n\n# import python\nimport unittest\n\n# import genie\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Interface\n\n# import genie.libs\nfrom genie.libs.conf.bgp import Bgp\nfrom genie.libs.conf.vrf import Vrf\n\n\nclass test_bgp(TestCase):\n\n # Old code test\n def test_init(self):\n\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n intf1 = Interface(device=dev1, name='GigabitEthernet0/0/1',\n ipv4='10.1.0.1/24')\n intf2 = Interface(device=dev1, name='GigabitEthernet0/0/2',\n ipv4='10.2.0.1/24')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe')\n intf3 = Interface(device=dev2, name='GigabitEthernet0/0/3',\n ipv4='10.1.0.2/24')\n intf4 = Interface(device=dev2, name='GigabitEthernet0/0/4',\n ipv4='10.2.0.2/24')\n vrf1 = Vrf(name='vrf1')\n vrf2 = Vrf(name='a')\n\n with self.assertNoWarnings():\n\n Genie.testbed = None\n with self.assertRaises(TypeError):\n bgp = Bgp()\n with self.assertRaises(TypeError):\n bgp = Bgp(testbed=testbed)\n Genie.testbed = testbed\n with self.assertRaises(TypeError):\n bgp = Bgp()\n bgp = Bgp(asn=100)\n self.assertIs(bgp.testbed, testbed)\n Genie.testbed = testbed\n bgp = Bgp(asn=100)\n self.assertIs(bgp.testbed, Genie.testbed)\n self.assertIs(bgp.testbed, testbed)\n\n dev1.add_feature(bgp)\n\n cfgs = bgp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'router bgp 100',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ]))\n\n dev2.add_feature(bgp)\n\n cfgs = bgp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name, dev2.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'router bgp 100',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ]))\n self.assertMultiLineEqual(str(cfgs[dev2.name]), '\\n'.join([\n 'router bgp 100',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ]))\n\n bgp.device_attr[dev1].add_neighbor(intf3.ipv4)\n bgp.device_attr[dev1].add_vrf(vrf1)\n bgp.device_attr[dev1].vrf_attr[vrf1].add_neighbor(intf4.ipv4)\n bgp.device_attr[dev1].vrf_attr[vrf1].neighbor_attr[intf4.ipv4].\\\n address_family_attr['ipv4 unicast'].activate = True\n bgp.device_attr[dev1].add_vrf(vrf2)\n\n cfgs = bgp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name, dev2.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'router bgp 100',\n ' neighbor 10.1.0.2 remote-as 100',\n ' address-family ipv4 unicast',\n ' exit',\n ' address-family ipv4 unicast vrf a',\n ' exit',\n ' neighbor 10.2.0.2 remote-as 100',\n ' address-family ipv4 unicast vrf vrf1',\n ' neighbor 10.2.0.2 activate',\n ' exit',\n ' address-family ipv4 unicast vrf vrf1',\n ' exit',\n ' exit',\n ]))\n self.assertMultiLineEqual(str(cfgs[dev2.name]), '\\n'.join([\n 'router bgp 100',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ]))\n\n cfgs = bgp.build_config(apply=False,\n attributes='device_attr__PE1__vrf_attr__default')\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'router bgp 100',\n ' neighbor 10.1.0.2 remote-as 100',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ]))\n\n cfgs = bgp.build_config(apply=False, attributes={\n 'device_attr': {\n '*': (),\n },\n })\n self.assertCountEqual(cfgs.keys(), [dev1.name, dev2.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'router bgp 100',\n ' exit',\n ]))\n self.assertMultiLineEqual(str(cfgs[dev2.name]), '\\n'.join([\n 'router bgp 100',\n ' exit',\n ]))\n\n cfgs = bgp.build_config(apply=False, attributes={\n 'device_attr': {\n 'PE1': 'vrf_attr__default',\n },\n })\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'router bgp 100',\n ' neighbor 10.1.0.2 remote-as 100',\n ' address-family ipv4 unicast',\n ' exit',\n ' exit',\n ]))\n\n cfgs = bgp.build_config(apply=False, attributes={\n 'device_attr': {\n 'PE1': 'vrf_attr__default__neighbor_attr__10.1.0.2',\n },\n })\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'router bgp 100',\n ' neighbor 10.1.0.2 remote-as 100',\n ' exit',\n ]))\n\n def test_cfg(self):\n\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n intf1 = Interface(device=dev1, name='Ethernet0/0/1',\n ipv4='10.1.0.1/24')\n intf2 = Interface(device=dev1, name='Ethernet0/0/2',\n ipv4='10.2.0.1/24')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe')\n intf3 = Interface(device=dev2, name='Ethernet0/0/3',\n ipv4='10.1.0.2/24', ipv6='2001:111:222::/64')\n intf4 = Interface(device=dev2, name='Ethernet0/0/4',\n ipv4='10.2.0.2/24')\n\n with self.assertNoWarnings():\n\n Genie.testbed = None\n with self.assertRaises(TypeError):\n bgp = Bgp()\n with self.assertRaises(TypeError):\n bgp = Bgp(testbed=testbed)\n Genie.testbed = testbed\n with self.assertRaises(TypeError):\n bgp = Bgp()\n bgp = Bgp(bgp_id=100)\n self.assertIs(bgp.testbed, testbed)\n Genie.testbed = testbed\n bgp = Bgp(bgp_id=100)\n self.assertIs(bgp.testbed, Genie.testbed)\n self.assertIs(bgp.testbed, testbed)\n\n # Defining attributes\n af_name = 'ipv4 unicast'\n af_name2 = 'link-state'\n bgp.device_attr[dev1]\n bgp.device_attr[dev1].vrf_attr[None].always_compare_med = True\n bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name]\n bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name2]\n bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name].\\\n af_label_allocation_mode = 'per-vrf'\n bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name].\\\n af_dampening = True\n neighbor_id = intf4.ipv4\n bgp.device_attr[dev1].vrf_attr[None].add_neighbor(neighbor_id)\n bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id]\n bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\\\n nbr_remote_as = 200\n bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\\\n nbr_fall_over_bfd = True\n bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\\\n nbr_shutdown = True\n bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\\\n nbr_update_source = 'loopback0'\n bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\\\n nbr_remove_private_as = True\n bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\\\n nbr_remove_private_as_af_name = 'ipv4 unicast'\n nbr_af_name = 'ipv4 multicast'\n bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\\\n address_family_attr[nbr_af_name]\n bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\\\n address_family_attr[nbr_af_name].nbr_af_allowas_in = True\n\n dev1.add_feature(bgp)\n\n cfgs = bgp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['router bgp 100',\n ' bgp always-compare-med',\n ' neighbor 10.2.0.2 fall-over bfd',\n ' neighbor 10.2.0.2 remote-as 200',\n ' address-family ipv4 unicast',\n ' neighbor 10.2.0.2 remove-private-as',\n ' exit',\n ' neighbor 10.2.0.2 shutdown',\n ' neighbor 10.2.0.2 update-source loopback0',\n ' address-family ipv4 multicast',\n ' neighbor 10.2.0.2 activate',\n ' neighbor 10.2.0.2 allowas-in',\n ' exit',\n ' address-family ipv4 unicast',\n ' bgp dampening',\n ' exit',\n ' address-family link-state',\n ' exit',\n ' exit',\n 'mpls label mode vrf default protocol bgp-vpnv4 per-vrf',\n ]))\n\n def test_partial_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n\n with self.assertNoWarnings():\n\n Genie.testbed = None\n with self.assertRaises(TypeError):\n bgp = Bgp()\n with self.assertRaises(TypeError):\n bgp = Bgp(testbed=testbed)\n Genie.testbed = testbed\n with self.assertRaises(TypeError):\n bgp = Bgp()\n bgp = Bgp(bgp_id=100)\n self.assertIs(bgp.testbed, testbed)\n Genie.testbed = testbed\n bgp = Bgp(bgp_id=100)\n self.assertIs(bgp.testbed, Genie.testbed)\n self.assertIs(bgp.testbed, testbed)\n\n # Defining attributes\n bgp.device_attr[dev1]\n dev1.add_feature(bgp)\n ps_name = 'PEER-SESSION'\n bgp.device_attr[dev1].peer_session_attr[ps_name].ps_fall_over_bfd=\\\n True\n partial_cfg1 = bgp.build_config(\n apply=False,\n attributes={'device_attr':\\\n {'*':{'peer_session_attr':\\\n {'*':\"ps_fall_over_bfd\"}}}})\n\n self.assertCountEqual(partial_cfg1.keys(), [dev1.name])\n self.assertMultiLineEqual(str(partial_cfg1[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100',\n ' template peer-session PEER-SESSION',\n ' fall-over bfd',\n ' exit',\n ' exit',\n ]))\n\n def test_cfg2(self):\n\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n intf1 = Interface(device=dev1, name='Ethernet0/0/1',\n ipv4='10.1.0.1/24')\n intf2 = Interface(device=dev1, name='Ethernet0/0/2',\n ipv4='10.2.0.1/24',\n ipv6='2001::1')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe')\n intf3 = Interface(device=dev2, name='Ethernet0/0/3',\n ipv4='10.1.0.2/24', ipv6='2001:111:222::/64')\n intf4 = Interface(device=dev2, name='Ethernet0/0/4',\n ipv4='10.2.0.2/24')\n\n with self.assertNoWarnings():\n\n Genie.testbed = None\n with self.assertRaises(TypeError):\n bgp = Bgp()\n with self.assertRaises(TypeError):\n bgp = Bgp(testbed=testbed)\n Genie.testbed = testbed\n with self.assertRaises(TypeError):\n bgp = Bgp()\n bgp = Bgp(bgp_id=100)\n self.assertIs(bgp.testbed, testbed)\n Genie.testbed = testbed\n bgp = Bgp(bgp_id=100)\n self.assertIs(bgp.testbed, Genie.testbed)\n self.assertIs(bgp.testbed, testbed)\n\n # Defining attributes\n vrf_id = 'vrf1'\n vrf1 = Vrf(vrf_id)\n pp_name = 'PEER-POLICY'\n ps_name = 'PEER-SESSION'\n bgp.device_attr[dev2].peer_policy_attr[pp_name].pp_allowas_in =\\\n True\n bgp.device_attr[dev2].peer_session_attr[ps_name].ps_fall_over_bfd=\\\n True\n bgp.device_attr[dev2].vrf_attr[None].cluster_id = '150'\n af_name = 'ipv6 unicast'\n bgp.device_attr[dev2].vrf_attr[None].address_family_attr[af_name]\n bgp.device_attr[dev2].vrf_attr[None].address_family_attr[af_name].\\\n af_nexthop_route_map = 'test'\n bgp.device_attr[dev2].vrf_attr[None].address_family_attr[af_name].\\\n af_dampening = True\n bgp.device_attr[dev2].vrf_attr[None].address_family_attr[af_name].\\\n af_dampening_half_life_time = 1\n bgp.device_attr[dev2].vrf_attr[None].address_family_attr[af_name].\\\n af_dampening_reuse_time = 10\n bgp.device_attr[dev2].vrf_attr[None].address_family_attr[af_name].\\\n af_dampening_suppress_time = 30\n bgp.device_attr[dev2].vrf_attr[None].address_family_attr[af_name].\\\n af_dampening_max_suppress_time = 2\n neighbor_id = intf2.ipv4\n bgp.device_attr[dev2].vrf_attr[vrf1].add_neighbor(neighbor_id)\n bgp.device_attr[dev2].vrf_attr[vrf1].neighbor_attr[neighbor_id].\\\n nbr_suppress_four_byte_as_capability = True\n\n dev2.add_feature(bgp)\n cfgs = bgp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev2.name])\n self.maxDiff = None\n self.assertEqual(str(cfgs[dev2.name]), '\\n'.join(\n ['router bgp 100',\n ' bgp cluster-id 150',\n ' address-family ipv6 unicast',\n ' bgp dampening 1 10 30 2',\n ' bgp nexthop route-map test',\n ' exit',\n ' address-family ipv4 unicast vrf vrf1',\n ' neighbor 10.2.0.1 dont-capability-negotiate four-octets-as',\n ' exit',\n ' template peer-session PEER-SESSION',\n ' fall-over bfd',\n ' exit',\n ' template peer-policy PEER-POLICY',\n ' allowas-in',\n ' exit',\n ' exit',\n ]))\n\n def test_cfg3(self):\n\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n intf1 = Interface(device=dev1, name='Ethernet0/0/1',\n ipv4='10.1.0.1/24')\n intf2 = Interface(device=dev1, name='Ethernet0/0/2',\n ipv4='10.2.0.1/24',\n ipv6='2001::1')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe')\n intf3 = Interface(device=dev2, name='Ethernet0/0/3',\n ipv4='10.1.0.2/24', ipv6='2001:111:222::/64')\n intf4 = Interface(device=dev2, name='Ethernet0/0/4',\n ipv4='10.2.0.2/24')\n\n with self.assertNoWarnings():\n\n Genie.testbed = None\n with self.assertRaises(TypeError):\n bgp = Bgp()\n with self.assertRaises(TypeError):\n bgp = Bgp(testbed=testbed)\n Genie.testbed = testbed\n with self.assertRaises(TypeError):\n bgp = Bgp()\n bgp = Bgp(bgp_id=100)\n self.assertIs(bgp.testbed, testbed)\n Genie.testbed = testbed\n bgp = Bgp(bgp_id=100)\n self.assertIs(bgp.testbed, Genie.testbed)\n self.assertIs(bgp.testbed, testbed)\n\n # Defining attributes\n vrf_id = 'vrf1'\n vrf1 = Vrf(vrf_id)\n pp_name = 'PEER-POLICY'\n ps_name = 'PEER-SESSION'\n bgp.device_attr[dev2].peer_policy_attr[pp_name].pp_as_override =\\\n True\n bgp.device_attr[dev2].peer_session_attr[ps_name].ps_remote_as=\\\n 12\n bgp.device_attr[dev2].vrf_attr[None].cluster_id = '150'\n af_name = 'ipv6 unicast'\n bgp.device_attr[dev2].vrf_attr[None].address_family_attr[af_name]\n bgp.device_attr[dev2].vrf_attr[None].address_family_attr[af_name].\\\n af_client_to_client_reflection = True\n nbr_af_name = 'ipv6 unicast'\n neighbor_id2 = intf2.ipv6\n bgp.device_attr[dev2].vrf_attr[vrf1].neighbor_attr[neighbor_id2].\\\n address_family_attr[nbr_af_name]\n bgp.device_attr[dev2].vrf_attr[vrf1].neighbor_attr[neighbor_id2].\\\n address_family_attr[nbr_af_name].\\\n nbr_af_maximum_prefix_max_prefix_no = 300000\n\n dev2.add_feature(bgp)\n cfgs = bgp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev2.name])\n self.maxDiff = None\n self.assertEqual(str(cfgs[dev2.name]), '\\n'.join(\n ['router bgp 100',\n ' bgp cluster-id 150',\n ' address-family ipv6 unicast',\n ' bgp client-to-client reflection',\n ' exit',\n ' address-family ipv6 unicast vrf vrf1',\n ' neighbor 2001::1 activate',\n ' neighbor 2001::1 maximum-prefix 300000',\n ' exit',\n ' template peer-session PEER-SESSION',\n ' remote-as 12',\n ' exit',\n ' template peer-policy PEER-POLICY',\n ' as-override',\n ' exit',\n ' exit',\n ]))\n\n def test_uncfg(self):\n\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n intf1 = Interface(device=dev1, name='Ethernet0/0/1',\n ipv4='10.1.0.1/24')\n intf2 = Interface(device=dev1, name='Ethernet0/0/2',\n ipv4='10.2.0.1/24')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe')\n intf3 = Interface(device=dev2, name='Ethernet0/0/3',\n ipv4='10.1.0.2/24', ipv6='2001:111:222::/64')\n intf4 = Interface(device=dev2, name='Ethernet0/0/4',\n ipv4='10.2.0.2/24')\n\n with self.assertNoWarnings():\n\n Genie.testbed = None\n with self.assertRaises(TypeError):\n bgp = Bgp()\n with self.assertRaises(TypeError):\n bgp = Bgp(testbed=testbed)\n Genie.testbed = testbed\n with self.assertRaises(TypeError):\n bgp = Bgp()\n bgp = Bgp(bgp_id=100)\n self.assertIs(bgp.testbed, testbed)\n Genie.testbed = testbed\n bgp = Bgp(bgp_id=100)\n self.assertIs(bgp.testbed, Genie.testbed)\n self.assertIs(bgp.testbed, testbed)\n\n dev1.add_feature(bgp)\n\n uncfgs = bgp.build_unconfig(apply=False)\n self.assertCountEqual(uncfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(uncfgs[dev1.name]), '\\n'.join([\n 'no router bgp 100',\n ]))\n\n # Defining attributes\n af_name = 'vpnv4 unicast'\n af_name2 = 'link-state'\n vrf_id = 'vrf1'\n vrf1 = Vrf(vrf_id)\n bgp.device_attr[dev1]\n bgp.device_attr[dev1].vrf_attr[None].always_compare_med = True\n bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name]\n bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name2]\n bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name].\\\n af_dampening = True\n bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name].\\\n af_dampening_half_life_time = 1\n bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name].\\\n af_dampening_reuse_time = 10\n bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name].\\\n af_dampening_suppress_time = 30\n bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name].\\\n af_dampening_max_suppress_time = 2\n neighbor_id = intf4.ipv4\n bgp.device_attr[dev1].vrf_attr[vrf1].add_neighbor(neighbor_id)\n bgp.device_attr[dev1].vrf_attr[vrf1].neighbor_attr[neighbor_id]\n bgp.device_attr[dev1].vrf_attr[vrf1].neighbor_attr[neighbor_id].\\\n nbr_fall_over_bfd = True\n\n\n partial_uncfg1 = bgp.build_unconfig(\n apply=False,\n attributes={'device_attr':\\\n {'*':{'vrf_attr':\\\n {'*':\"always_compare_med\"}}}})\n self.assertCountEqual(partial_uncfg1.keys(), [dev1.name])\n self.assertMultiLineEqual(str(partial_uncfg1[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100',\n ' no bgp always-compare-med',\n ' exit',\n ]))\n\n partial_uncfg2 = bgp.build_unconfig(\\\n apply=False,\n attributes={'device_attr':\\\n {'*':{'vrf_attr':'*'}}})\n self.assertCountEqual(partial_uncfg2.keys(), [dev1.name])\n self.assertMultiLineEqual(str(partial_uncfg2[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100',\n ' no bgp always-compare-med',\n ' no address-family link-state',\n ' no address-family vpnv4 unicast',\n ' no neighbor 10.2.0.2',\n ' no address-family ipv4 unicast vrf vrf1',\n ' exit',\n ]))\n\n partial_uncfg3 = bgp.build_unconfig(\n apply=False,\n attributes={'device_attr':\\\n {'*': {'vrf_attr':\\\n {'*': {'neighbor_attr':\\\n {'*':\"nbr_fall_over_bfd\"}}}}}})\n self.assertCountEqual(partial_uncfg3.keys(), [dev1.name])\n self.assertMultiLineEqual(str(partial_uncfg3[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100',\n ' address-family ipv4 unicast vrf vrf1',\n ' no neighbor 10.2.0.2 fall-over bfd',\n ' exit',\n ' exit',\n ]))\n\n # Defining neighbor address family\n nbr_af_name = 'ipv4 unicast'\n bgp.device_attr[dev1].vrf_attr[vrf1].neighbor_attr[neighbor_id].\\\n address_family_attr[nbr_af_name]\n bgp.device_attr[dev1].vrf_attr[vrf1].neighbor_attr[neighbor_id].\\\n address_family_attr[nbr_af_name].nbr_af_allowas_in = True\n\n partial_uncfg4 = bgp.build_unconfig(\n apply=False,\n attributes={'device_attr': {'*': {'vrf_attr':\n {'*': {'neighbor_attr':\\\n {'*': {'address_family_attr':\\\n {'*':\"nbr_af_allowas_in\"}}}}}}}})\n self.assertCountEqual(partial_uncfg4.keys(), [dev1.name])\n self.assertMultiLineEqual(str(partial_uncfg4[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100',\n ' address-family ipv4 unicast vrf vrf1',\n ' no neighbor 10.2.0.2 activate',\n ' no neighbor 10.2.0.2 allowas-in',\n ' exit',\n ' exit',\n ]))\n\n partial_uncfg5 = bgp.build_unconfig(\n apply=False,\n attributes={'device_attr':\\\n {'*':{'vrf_attr':\\\n {'*':{'address_family_attr':\\\n {'*':\"af_dampening\"}}}}}})\n self.assertCountEqual(partial_uncfg5.keys(), [dev1.name])\n self.assertMultiLineEqual(str(partial_uncfg5[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100',\n ' address-family vpnv4 unicast',\n ' no bgp dampening',\n ' exit',\n ' exit',\n ]))\n\n def test_uncfg2(self):\n\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n\n with self.assertNoWarnings():\n\n Genie.testbed = None\n with self.assertRaises(TypeError):\n bgp = Bgp()\n with self.assertRaises(TypeError):\n bgp = Bgp(testbed=testbed)\n Genie.testbed = testbed\n with self.assertRaises(TypeError):\n bgp = Bgp()\n bgp = Bgp(bgp_id=100)\n self.assertIs(bgp.testbed, testbed)\n Genie.testbed = testbed\n bgp = Bgp(bgp_id=100)\n self.assertIs(bgp.testbed, Genie.testbed)\n self.assertIs(bgp.testbed, testbed)\n\n dev1.add_feature(bgp)\n\n # Defining attributes\n af_name = 'ipv4 unicast'\n bgp.device_attr[dev1]\n bgp.device_attr[dev1].vrf_attr[None].always_compare_med = True\n bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name]\n bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name].\\\n af_label_allocation_mode = 'per-vrf'\n\n uncfg = bgp.build_unconfig(apply=False)\n self.assertCountEqual(uncfg.keys(), [dev1.name])\n self.assertMultiLineEqual(str(uncfg[dev1.name]), '\\n'.\\\n join([\n 'no router bgp 100',\n 'no mpls label mode vrf default protocol bgp-vpnv4 per-vrf',\n ]))\n\n def test_uncfg3(self):\n\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe')\n intf4 = Interface(device=dev2, name='Ethernet0/0/4',\n ipv4='10.2.0.2/24')\n\n\n with self.assertNoWarnings():\n\n Genie.testbed = None\n with self.assertRaises(TypeError):\n bgp = Bgp()\n with self.assertRaises(TypeError):\n bgp = Bgp(testbed=testbed)\n Genie.testbed = testbed\n with self.assertRaises(TypeError):\n bgp = Bgp()\n bgp = Bgp(bgp_id=100)\n self.assertIs(bgp.testbed, testbed)\n Genie.testbed = testbed\n bgp = Bgp(bgp_id=100)\n self.assertIs(bgp.testbed, Genie.testbed)\n self.assertIs(bgp.testbed, testbed)\n\n dev1.add_feature(bgp)\n\n # Defining attributes\n bgp.device_attr[dev1]\n neighbor_id = intf4.ipv4\n bgp.device_attr[dev1].vrf_attr[None].add_neighbor(neighbor_id)\n bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id]\n\n partial_uncfg = bgp.build_unconfig(\n apply=False,\n attributes={'device_attr':\\\n {'*':{'vrf_attr':\\\n {'*':{'neighbor_attr':'*'}}}}})\n\n self.assertCountEqual(partial_uncfg.keys(), [dev1.name])\n self.assertMultiLineEqual(str(partial_uncfg[dev1.name]), '\\n'.\\\n join([\n 'router bgp 100',\n ' no neighbor 10.2.0.2',\n ' exit',\n ]))\n\n def test_cfg_l2vpn_vpls(self):\n\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n bgp = Bgp(bgp_id=100)\n\n # Defining attributes\n nbr_af_name = 'l2vpn vpls'\n neighbor_id = '10.2.0.2'\n\n bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\\\n address_family_attr[nbr_af_name].nbr_af_suppress_signaling_protocol_ldp = True\n\n bgp.device_attr[dev1]\n\n self.assertIs(bgp.testbed, testbed)\n dev1.add_feature(bgp)\n\n cfgs = bgp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join(\n ['router bgp 100',\n ' address-family l2vpn vpls',\n ' neighbor 10.2.0.2 activate',\n ' neighbor 10.2.0.2 suppress-signaling-protocol ldp',\n ' exit',\n ' exit',\n ]))\n\n uncfgs = bgp.build_unconfig(\n apply=False,\n attributes={'device_attr': {'*': {'vrf_attr':\n {'*': {'neighbor_attr': \\\n {'*': {'address_family_attr': \\\n {'*': \"nbr_af_suppress_signaling_protocol_ldp\"}}}}}}}})\n\n self.assertCountEqual(uncfgs.keys(), [dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(uncfgs[dev1.name]), '\\n'.join(\n ['router bgp 100',\n ' address-family l2vpn vpls',\n ' no neighbor 10.2.0.2 activate',\n ' exit',\n ' exit',\n ]))\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "12064609", "language": "Python", "matching_score": 3.3616020679473877, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/bgp/iosxe/tests/test_bgp.py" }, { "content": "#!/usr/bin/env python\n\nimport unittest\nfrom unittest.mock import Mock\nimport re\n\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\nfrom genie.libs.conf.ldp import Ldp\nfrom genie.libs.conf.address_family import AddressFamily, AddressFamilySubAttributes\nfrom genie.libs.conf.vrf import Vrf\nfrom genie.libs.conf.access_list import AccessList\nfrom genie.libs.conf.base.neighbor import IPv4LsrNeighbor, IPv6Neighbor, IPLsrNeighborSubAttributes\n\n\n\nclass test_ldp(unittest.TestCase):\n\n def setUp(self):\n self.maxDiff = None\n tb = Genie.testbed = Testbed()\n self.dev1 = Device(testbed=tb, name='PE1', os='iosxe')\n self.dev2 = Device(testbed=tb, name='PE2', os='iosxe')\n self.i1 = Interface(name='GigabitEthernet0/0/1', device=self.dev1)\n self.i2 = Interface(name='GigabitEthernet0/0/2', device=self.dev2)\n self.i3 = Interface(name='GigabitEthernet0/0/3', device=self.dev1)\n self.i4 = Interface(name='GigabitEthernet0/0/4', device=self.dev2)\n self.i5 = Interface(name='GigabitEthernet0/0/5', device=self.dev1)\n self.i6 = Interface(name='GigabitEthernet0/0/6', device=self.dev2)\n self.i7 = Interface(name='GigabitEthernet0/0/7', device=self.dev1)\n self.i8 = Interface(name='GigabitEthernet0/0/8', device=self.dev2)\n self.link = Link(name='1_2_1')\n self.link.connect_interface(interface=self.i1)\n self.link.connect_interface(interface=self.i2)\n self.link2 = Link(name='1_2_2')\n self.link2.connect_interface(interface=self.i3)\n self.link2.connect_interface(interface=self.i4)\n self.link3 = Link(name='1_2_3')\n self.link3.connect_interface(interface=self.i5)\n self.link3.connect_interface(interface=self.i6)\n self.link4 = Link(name='1_2_4')\n self.link4.connect_interface(interface=self.i7)\n self.link4.connect_interface(interface=self.i8)\n self.assertSetEqual(\n set(self.link.find_interfaces()),\n set([self.i1, self.i2]))\n self.assertSetEqual(\n set(self.dev1.find_interfaces()),\n set([self.i1, self.i3, self.i5, self.i7]))\n self.assertSetEqual(\n set(self.dev2.find_interfaces()),\n set([self.i2, self.i4, self.i6, self.i8]))\n\n def test_01_interface_only(self):\n\n ldp = Ldp()\n self.assertSetEqual(set(ldp.devices), set([]))\n self.assertSetEqual(set(ldp.links), set([]))\n\n\n self.link.add_feature(ldp)\n self.link2.add_feature(ldp)\n self.link3.add_feature(ldp)\n self.link4.add_feature(ldp)\n self.assertCountEqual(ldp.devices, [self.dev1, self.dev2])\n self.assertSetEqual(set(ldp.links), set([self.link, self.link2, self.link3, self.link4]))\n self.assertSetEqual(set(ldp.interfaces), set([self.i1, self.i2, self.i3, self.i4, self.i5, self.i6, self.i7, self.i8]))\n\n # ==Test interface-only config==\n out = ldp.build_config(apply=False)\n if 1:\n self.assertRegex(str(out['PE1']), 'interface GigabitEthernet0/0/1')\n self.assertRegex(str(out['PE1']), 'interface GigabitEthernet0/0/3')\n self.assertRegex(str(out['PE1']), 'interface GigabitEthernet0/0/5')\n self.assertRegex(str(out['PE1']), 'interface GigabitEthernet0/0/7')\n self.assertRegex(str(out['PE2']), 'interface GigabitEthernet0/0/2')\n self.assertRegex(str(out['PE2']), 'interface GigabitEthernet0/0/4')\n self.assertRegex(str(out['PE2']), 'interface GigabitEthernet0/0/6')\n self.assertRegex(str(out['PE2']), 'interface GigabitEthernet0/0/8')\n\n def test_1_top_level(self):\n\n acl1 = AccessList(name='1')\n acl2 = AccessList(name='2')\n acl3 = AccessList(name='3')\n acl4 = AccessList(name='4')\n acl5 = AccessList(name='5')\n acl6 = AccessList(name='6')\n acl7 = AccessList(name='7')\n acl8 = AccessList(name='8')\n acl9 = AccessList(name='9')\n acl11 = AccessList(name='11')\n acl22 = AccessList(name='22')\n acl33 = AccessList(name='33')\n acl44 = AccessList(name='44')\n\n ldp = Ldp()\n self.link.add_feature(ldp)\n\n # ==Test top-level config==\n ldp.hello_holdtime = 100\n ldp.hello_interval = 200\n ldp.targeted_hello_accept = True\n ldp.gr = True\n ldp.gr_fwdstate_holdtime = 60\n ldp.nsr = True\n ldp.device_attr['PE1'].session_holdtime = 444\n ldp.session_protection = True\n ldp.session_protection_dur = 222\n ldp.device_attr['PE2'].session_protection_dur = 333\n ldp.session_protection_for_acl = acl1\n\n out = ldp.build_config(apply=False)\n if 1:\n self.assertCountEqual(out.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(str(out['PE1']), '\\n'.join([\n 'mpls label protocol ldp',\n 'mpls ip',\n 'mpls ldp nsr',\n 'mpls ldp graceful-restart',\n 'mpls ldp graceful-restart timers forwarding-holding 60',\n 'mpls ldp discovery hello interval 200',\n 'mpls ldp discovery hello holdtime 100',\n 'mpls ldp discovery targeted-hello accept',\n 'mpls ldp session protection for 1 222',\n 'interface GigabitEthernet0/0/1',\n ' mpls ip',\n ' exit',\n ]))\n\n self.assertMultiLineEqual(str(out['PE2']), '\\n'.join([\n 'mpls label protocol ldp',\n 'mpls ip',\n 'mpls ldp nsr',\n 'mpls ldp graceful-restart',\n 'mpls ldp graceful-restart timers forwarding-holding 60',\n 'mpls ldp discovery hello interval 200',\n 'mpls ldp discovery hello holdtime 100',\n 'mpls ldp discovery targeted-hello accept',\n 'mpls ldp session protection for 1 333',\n 'interface GigabitEthernet0/0/2',\n ' mpls ip',\n ' exit',\n ]))\n\n if 1:\n # set the per-attr variables that are inherited by VRF\n ldp.device_attr['PE1'].vrf_attr['default'].router_id = self.i1\n ldp.device_attr['PE2'].vrf_attr['default'].router_id = self.i2\n\n out = ldp.build_config(apply=False)\n self.assertRegex(str(out['PE1']), 'router-id GigabitEthernet0/0/1')\n\n self.assertRegex(str(out['PE2']), 'router-id GigabitEthernet0/0/2')\n\n # Check unconfig - <nsr> config filter\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__nsr')\n if 1:\n self.assertRegex(str(out['PE1']), 'no mpls ldp nsr')\n self.assertRegex(str(out['PE2']), 'no mpls ldp nsr')\n\n # Check unconfig - <gr> config filter=\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__gr')\n if 1:\n self.assertRegex(str(out['PE1']), 'no mpls ldp graceful-restart')\n self.assertRegex(str(out['PE2']), 'no mpls ldp graceful-restart')\n\n def test_2_per_vrf(self):\n\n acl1 = AccessList(name='1')\n acl2 = AccessList(name='2')\n acl3 = AccessList(name='3')\n acl4 = AccessList(name='4')\n acl5 = AccessList(name='5')\n acl6 = AccessList(name='6')\n acl7 = AccessList(name='7')\n acl8 = AccessList(name='8')\n acl9 = AccessList(name='9')\n acl11 = AccessList(name='11')\n acl22 = AccessList(name='22')\n acl33 = AccessList(name='33')\n acl44 = AccessList(name='44')\n\n ldp = Ldp()\n self.link.add_feature(ldp)\n vrf = Vrf(name='vrf1')\n ldp.add_force_vrf(vrf)\n vrf2 = Vrf(name='vrf2')\n ldp.add_force_vrf(vrf2)\n ldp.device_attr['PE1'].router_id = self.i1\n ldp.device_attr['PE2'].router_id = self.i2\n ldp.device_attr['PE1'].vrf_attr['vrf1'].router_id = self.i3\n ldp.device_attr['PE2'].vrf_attr['vrf1'].router_id = self.i4\n\n # Test per vrf config\n \n # Check vrf config - full config\n out = ldp.build_config(apply=False)\n\n self.assertMultiLineEqual(str(out['PE1']), '\\n'.join([\n 'mpls label protocol ldp',\n 'mpls ip',\n 'mpls ldp router-id GigabitEthernet0/0/1',\n 'mpls ldp router-id vrf vrf1 GigabitEthernet0/0/3',\n 'interface GigabitEthernet0/0/1',\n ' mpls ip',\n ' exit',\n ]))\n\n\n self.assertMultiLineEqual(str(out['PE2']), '\\n'.join([\n 'mpls label protocol ldp',\n 'mpls ip',\n 'mpls ldp router-id GigabitEthernet0/0/2',\n 'mpls ldp router-id vrf vrf1 GigabitEthernet0/0/4',\n 'interface GigabitEthernet0/0/2',\n ' mpls ip',\n ' exit',\n ]))\n\n\n # Test VRF unconfig\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__vrf_attr')\n self.assertTrue('no mpls ldp router-id GigabitEthernet0/0/1' in str(out['PE1']))\n self.assertTrue('no mpls ldp router-id GigabitEthernet0/0/2' in str(out['PE2']))\n self.assertTrue('no mpls ldp router-id vrf vrf1 GigabitEthernet0/0/3' in str(out['PE1']))\n self.assertTrue('no mpls ldp router-id vrf vrf1 GigabitEthernet0/0/4' in str(out['PE2']))\n\n out2 = ldp.build_config(apply=False, attributes='device_attr__*__vrf_attr')\n self.assertTrue('mpls ldp router-id GigabitEthernet0/0/1' in str(out2['PE1']))\n self.assertTrue('mpls ldp router-id GigabitEthernet0/0/2' in str(out2['PE2']))\n self.assertTrue('mpls ldp router-id vrf vrf1 GigabitEthernet0/0/3' in str(out2['PE1']))\n self.assertTrue('mpls ldp router-id vrf vrf1 GigabitEthernet0/0/4' in str(out2['PE2']))\n\n # Check vrf config - vrf <vrf1> config filter\n out3 = ldp.build_config(apply=False, attributes='device_attr__*__vrf_attr__vrf1')\n self.assertTrue('mpls ldp router-id vrf vrf1 GigabitEthernet0/0/3' in str(out3['PE1']))\n self.assertTrue('mpls ldp router-id vrf vrf1 GigabitEthernet0/0/4' in str(out3['PE2']))\n # interface output from default vrf should not be built\n self.assertNotRegex(str(out['PE1']), '(?s)interface Gig')\n\n # Check vrf unconfig - vrf <vrf1> config filter\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__vrf_attr__vrf1')\n self.assertEqual(str(out['PE1']), 'no mpls ldp router-id vrf vrf1 GigabitEthernet0/0/3')\n self.assertEqual(str(out['PE2']), 'no mpls ldp router-id vrf vrf1 GigabitEthernet0/0/4')\n\n # Check vrf unconfig - vrf <vrf1> config filter\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__vrf_attr__*__router_id')\n self.assertTrue('no mpls ldp router-id GigabitEthernet0/0/1' in str(out['PE1']))\n self.assertTrue('no mpls ldp router-id GigabitEthernet0/0/2' in str(out['PE2']))\n self.assertTrue('no mpls ldp router-id vrf vrf1 GigabitEthernet0/0/3' in str(out['PE1']))\n self.assertTrue('no mpls ldp router-id vrf vrf1 GigabitEthernet0/0/4' in str(out['PE2']))\n\n # Check vrf unconfig - vrf <vrf1> config filter after adding router-id to vrf2\n ldp.device_attr['PE2'].vrf_attr['vrf2'].router_id = self.i6\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__vrf_attr__*__router_id')\n self.assertTrue('mpls ldp router-id GigabitEthernet0/0/1' in str(out['PE1']))\n self.assertTrue('mpls ldp router-id GigabitEthernet0/0/2' in str(out['PE2']))\n self.assertTrue('mpls ldp router-id vrf vrf1 GigabitEthernet0/0/3' in str(out['PE1']))\n self.assertTrue('mpls ldp router-id vrf vrf1 GigabitEthernet0/0/4' in str(out['PE2']))\n self.assertTrue('mpls ldp router-id vrf vrf2 GigabitEthernet0/0/6' in str(out['PE2']))\n\n\n # Check vrf config - vrf <vrf2> config filter\n out3 = ldp.build_config(apply=False, attributes='device_attr__*__vrf_attr__vrf2')\n self.assertEqual(str(out3['PE2']) ,'mpls ldp router-id vrf vrf2 GigabitEthernet0/0/6')\n\n def test_3_per_intf(self):\n\n acl1 = AccessList(name='1')\n acl2 = AccessList(name='2')\n acl3 = AccessList(name='3')\n acl4 = AccessList(name='4')\n acl5 = AccessList(name='5')\n acl6 = AccessList(name='6')\n acl7 = AccessList(name='7')\n acl8 = AccessList(name='8')\n acl9 = AccessList(name='9')\n acl11 = AccessList(name='11')\n acl22 = AccessList(name='22')\n acl33 = AccessList(name='33')\n acl44 = AccessList(name='44')\n\n ldp = Ldp()\n self.link.add_feature(ldp)\n self.link4.add_feature(ldp)\n\n ldp.hello_interval = 88\n ldp.device_attr['PE2'].hello_holdtime = 99\n\n\n # ==Test per interface config===\n out = ldp.build_config(apply=False)\n if 1:\n\n self.assertMultiLineEqual(str(out['PE1']), '\\n'.join([\n 'mpls label protocol ldp',\n 'mpls ip',\n 'mpls ldp discovery hello interval 88',\n 'interface GigabitEthernet0/0/1',\n ' mpls ip',\n ' exit',\n 'interface GigabitEthernet0/0/7',\n ' mpls ip',\n ' exit',\n ]))\n\n\n self.assertMultiLineEqual(str(out['PE2']), '\\n'.join([\n 'mpls label protocol ldp',\n 'mpls ip',\n 'mpls ldp discovery hello interval 88',\n 'mpls ldp discovery hello holdtime 99',\n 'interface GigabitEthernet0/0/2',\n ' mpls ip',\n ' exit',\n 'interface GigabitEthernet0/0/8',\n ' mpls ip',\n ' exit',\n ]))\n\n # Check intf unconfig - intf <GigabitEthernet0/0/0/8> config filter\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__interface_attr__GigabitEthernet0/0/8')\n self.assertMultiLineEqual(str(out['PE2']), '\\n'.join([\n 'interface GigabitEthernet0/0/8',\n ' no mpls ip',\n ' exit',\n ]))\n\n\n def test_4_per_neighbor(self):\n\n ldp = Ldp()\n self.link.add_feature(ldp)\n nbr1 = IPv4LsrNeighbor('1.2.3.4:0')\n nbr2 = IPv4LsrNeighbor('1.2.3.5:0')\n nbr3 = IPv4LsrNeighbor('1.2.3.6:0')\n \n\n ldp.device_attr['PE1'].password = '<PASSWORD>'\n ldp.device_attr['PE2'].password = '<PASSWORD>'\n ldp.device_attr['PE1'].vrf_attr['default'].neighbors = [nbr1, nbr2, nbr3]\n ldp.device_attr['PE1'].vrf_attr['default'].neighbor_attr[nbr2].password = '<PASSWORD>'\n ldp.device_attr['PE1'].vrf_attr['default'].neighbor_attr[nbr3].password = '<PASSWORD>'\n\n # ==Test per neighbor config==\n out = ldp.build_config(apply=False)\n\n self.assertMultiLineEqual(str(out['PE1']), '\\n'.join([\n 'mpls label protocol ldp',\n 'mpls ip',\n 'mpls ldp neighbor 1.2.3.4 password <PASSWORD>',\n 'mpls ldp neighbor 1.2.3.5 password <PASSWORD>',\n 'mpls ldp neighbor 1.2.3.6 password <PASSWORD>',\n 'interface GigabitEthernet0/0/1',\n ' mpls ip',\n ' exit',\n ]))\n\n\n self.assertMultiLineEqual(str(out['PE2']), '\\n'.join([\n 'mpls label protocol ldp',\n 'mpls ip',\n 'interface GigabitEthernet0/0/2',\n ' mpls ip',\n ' exit',\n ]))\n\n # Check nbr config - nbr <1.2.3.5:0'> config filter\n out = ldp.build_config(apply=False, attributes='device_attr__*__vrf_attr__*__neighbor_attr__1.2.3.5:0')\n self.assertMultiLineEqual(str(out['PE1']), '\\n'.join([\n 'mpls ldp neighbor 1.2.3.5 password <PASSWORD>',\n ]))\n\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__vrf_attr__*__neighbor_attr__1.2.3.5:0__password')\n self.assertMultiLineEqual(str(out['PE1']), '\\n'.join([\n 'no mpls ldp neighbor 1.2.3.5 password <PASSWORD>',\n ]))\n \n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__vrf_attr__*__neighbor_attr__1.2.3.6:0__password')\n self.assertMultiLineEqual(str(out['PE1']), '\\n'.join([\n 'no mpls ldp neighbor 1.2.3.6 password <PASSWORD>',\n ]))\n\n def test_8_per_vrf_per_neighbor(self):\n\n ldp = Ldp()\n self.link.add_feature(ldp)\n vrf = Vrf(name='vrf1')\n ldp.add_force_vrf(vrf)\n nbr1 = IPv4LsrNeighbor('1.2.3.4:0')\n nbr2 = IPv4LsrNeighbor('1.2.3.5:0')\n nbr3 = IPv4LsrNeighbor('1.2.3.6:0')\n\n ldp.device_attr['PE1'].vrf_attr['vrf1'].neighbors = [nbr1, nbr2, nbr3]\n ldp.device_attr['PE1'].vrf_attr['vrf1'].neighbor_attr['1.2.3.5:0'].password = '<PASSWORD>'\n ldp.device_attr['PE1'].vrf_attr['vrf1'].neighbor_attr['1.2.3.6:0'].password = '<PASSWORD>'\n\n # ==Test per vrf per neighbor config==\n out = ldp.build_config(apply=False)\n\n self.assertMultiLineEqual(str(out['PE1']), '\\n'.join([\n 'mpls label protocol ldp',\n 'mpls ip',\n 'mpls ldp neighbor vrf vrf1 1.2.3.5 password <PASSWORD>',\n 'mpls ldp neighbor vrf vrf1 1.2.3.6 password <PASSWORD>',\n 'interface GigabitEthernet0/0/1',\n ' mpls ip',\n ' exit',\n ]))\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "3046986", "language": "Python", "matching_score": 7.4402570724487305, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ldp/iosxe/tests/test_ldp.py" }, { "content": "#!/usr/bin/env python\n\nimport unittest\nfrom unittest.mock import Mock\nimport re\n\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\nfrom genie.libs.conf.ldp import Ldp\nfrom genie.libs.conf.address_family import AddressFamily, AddressFamilySubAttributes\nfrom genie.libs.conf.vrf import Vrf\nfrom genie.libs.conf.access_list import AccessList\nfrom genie.libs.conf.base.neighbor import IPv4LsrNeighbor, IPv6Neighbor, IPLsrNeighborSubAttributes\n\n\n\nclass test_ldp(unittest.TestCase):\n\n def setUp(self):\n self.maxDiff = None\n tb = Genie.testbed = Testbed()\n self.dev1 = Device(testbed=tb, name='PE1', os='iosxr')\n self.dev2 = Device(testbed=tb, name='PE2', os='iosxr')\n self.i1 = Interface(name='GigabitEthernet0/0/0/1', device=self.dev1)\n self.i2 = Interface(name='GigabitEthernet0/0/0/2', device=self.dev2)\n self.i3 = Interface(name='GigabitEthernet0/0/0/3', device=self.dev1)\n self.i4 = Interface(name='GigabitEthernet0/0/0/4', device=self.dev2)\n self.i5 = Interface(name='GigabitEthernet0/0/0/5', device=self.dev1)\n self.i6 = Interface(name='GigabitEthernet0/0/0/6', device=self.dev2)\n self.i7 = Interface(name='GigabitEthernet0/0/0/7', device=self.dev1)\n self.i8 = Interface(name='GigabitEthernet0/0/0/8', device=self.dev2)\n self.link = Link(name='1_2_1')\n self.link.connect_interface(interface=self.i1)\n self.link.connect_interface(interface=self.i2)\n self.link2 = Link(name='1_2_2')\n self.link2.connect_interface(interface=self.i3)\n self.link2.connect_interface(interface=self.i4)\n self.link3 = Link(name='1_2_3')\n self.link3.connect_interface(interface=self.i5)\n self.link3.connect_interface(interface=self.i6)\n self.link4 = Link(name='1_2_4')\n self.link4.connect_interface(interface=self.i7)\n self.link4.connect_interface(interface=self.i8)\n self.assertSetEqual(\n set(self.link.find_interfaces()),\n set([self.i1, self.i2]))\n self.assertSetEqual(\n set(self.dev1.find_interfaces()),\n set([self.i1, self.i3, self.i5, self.i7]))\n self.assertSetEqual(\n set(self.dev2.find_interfaces()),\n set([self.i2, self.i4, self.i6, self.i8]))\n\n def test_01_interface_only(self):\n\n ldp = Ldp()\n self.assertSetEqual(set(ldp.devices), set([]))\n self.assertSetEqual(set(ldp.links), set([]))\n\n if 0:\n # TODO print(\"before\")\n # TODO print(ldp.devices)\n # TODO print(ldp.links)\n # TODO print(ldp.interfaces)\n pass\n\n self.link.add_feature(ldp)\n self.link2.add_feature(ldp)\n self.link3.add_feature(ldp)\n self.link4.add_feature(ldp)\n self.assertCountEqual(ldp.devices, [self.dev1, self.dev2])\n self.assertSetEqual(set(ldp.links), set([self.link, self.link2, self.link3, self.link4]))\n self.assertSetEqual(set(ldp.interfaces), set([self.i1, self.i2, self.i3, self.i4, self.i5, self.i6, self.i7, self.i8]))\n\n if 1:\n # TODO print(\"after\")\n # TODO print(ldp.links)\n # TODO print(ldp.devices)\n # TODO print(ldp.interfaces)\n pass\n\n # ==Test interface-only config==\n out = ldp.build_config(apply=False)\n if 1:\n self.assertRegex(str(out['PE1']), 'interface GigabitEthernet0/0/0/1')\n self.assertRegex(str(out['PE1']), 'interface GigabitEthernet0/0/0/3')\n self.assertRegex(str(out['PE1']), 'interface GigabitEthernet0/0/0/5')\n self.assertRegex(str(out['PE1']), 'interface GigabitEthernet0/0/0/7')\n self.assertRegex(str(out['PE2']), 'interface GigabitEthernet0/0/0/2')\n self.assertRegex(str(out['PE2']), 'interface GigabitEthernet0/0/0/4')\n self.assertRegex(str(out['PE2']), 'interface GigabitEthernet0/0/0/6')\n self.assertRegex(str(out['PE2']), 'interface GigabitEthernet0/0/0/8')\n\n def test_1_top_level(self):\n\n acl1 = AccessList(name='acl1')\n acl2 = AccessList(name='acl2')\n acl3 = AccessList(name='acl3')\n acl4 = AccessList(name='acl4')\n acl5 = AccessList(name='acl5')\n acl6 = AccessList(name='acl6')\n acl7 = AccessList(name='acl7')\n acl8 = AccessList(name='acl8')\n acl9 = AccessList(name='acl9')\n acl11 = AccessList(name='acl11')\n acl22 = AccessList(name='acl22')\n acl33 = AccessList(name='acl33')\n acl44 = AccessList(name='acl44')\n acl1111 = AccessList(name='acl1111')\n acl2222 = AccessList(name='acl2222')\n acl3333 = AccessList(name='acl3333')\n acl4444 = AccessList(name='acl4444')\n\n ldp = Ldp()\n self.link.add_feature(ldp)\n\n # ==Test top-level config==\n ldp.capabilities_cisco_iosxr = False\n ldp.default_vrf_impl_ipv4 = False\n ldp.ds_tlv = False\n ldp.hello_holdtime = 100\n ldp.hello_interval = 200\n ldp.instance_tlv = False\n ldp.quickstart = False\n ldp.targeted_hello_holdtime = 10\n ldp.targeted_hello_interval = 15\n ldp.entropy_label = True\n ldp.gr = True\n ldp.gr_fwdstate_holdtime = 60\n ldp.gr_maintain_acl = acl3\n ldp.gr_reconnect_timeout = 60\n ldp.igp_sync_delay_on_proc_restart = 300\n ldp.igp_sync_delay_on_session_up = 200\n ldp.log_gr = True\n ldp.log_hello_adj = True\n ldp.log_neighbor = True\n ldp.log_nsr = True\n ldp.log_sess_prot = True\n ldp.ltrace_buffer_multiplier = 3\n ldp.dualstack_tlv_compliance = True\n ldp.dualstack_transport_max_wait = 30\n ldp.dualstack_transport_prefer_ipv4 = True\n ldp.nsr = True\n ldp.password_type = 'clear'\n ldp.password = '<PASSWORD>'\n ldp.device_attr['PE2'].password_type = 'encrypted'\n ldp.device_attr['PE2'].password = '<PASSWORD>'\n ldp.session_backoff_init = 10\n ldp.device_attr['PE1'].session_backoff_init = 20\n ldp.session_backoff_max = 150\n ldp.device_attr['PE2'].session_backoff_max = 250\n ldp.session_holdtime = 333\n ldp.device_attr['PE1'].session_holdtime = 444\n ldp.session_protection = True\n ldp.session_protection_dur = 222\n ldp.device_attr['PE2'].session_protection_dur = 333\n ldp.session_protection_for_acl = acl1\n ldp.signalling_dscp = 16\n\n out = ldp.build_config(apply=False)\n if 1:\n self.assertCountEqual(out.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(str(out['PE1']), '\\n'.join([\n 'mpls ldp',\n ' capabilities cisco ios-xr disable',\n ' default-vrf implicit-ipv4 disable',\n ' discovery',\n ' ds-tlv disable',\n ' hello holdtime 100',\n ' hello interval 200',\n ' instance-tlv disable',\n ' quick-start disable',\n ' targeted-hello holdtime 10',\n ' targeted-hello interval 15',\n ' exit',\n ' entropy-label',\n ' graceful-restart',\n ' graceful-restart forwarding-state-holdtime 60',\n ' graceful-restart reconnect-timeout 60',\n ' igp sync delay on-proc-restart 300',\n ' igp sync delay on-session-up 200',\n ' nsr',\n ' session backoff 20 150',\n ' session holdtime 444',\n ' session protection for acl1 duration 222',\n ' signalling dscp 16',\n ' log',\n ' graceful-restart',\n ' hello-adjacency',\n ' neighbor',\n ' nsr',\n ' session-protection',\n ' exit',\n ' ltrace-buffer multiplier 3',\n ' graceful-restart helper-peer maintain-on-local-reset for acl3',\n ' address-family ipv4',\n ' exit',\n ' interface GigabitEthernet0/0/0/1',\n ' address-family ipv4',\n ' exit',\n ' exit',\n ' neighbor',\n ' dual-stack tlv-compliance',\n ' dual-stack transport-connection max-wait 30',\n ' dual-stack transport-connection prefer ipv4',\n ' password clear <PASSWORD>',\n ' exit',\n ' exit', \n ]))\n\n self.assertMultiLineEqual(str(out['PE2']), '\\n'.join([\n 'mpls ldp',\n ' capabilities cisco ios-xr disable',\n ' default-vrf implicit-ipv4 disable',\n ' discovery',\n ' ds-tlv disable',\n ' hello holdtime 100',\n ' hello interval 200',\n ' instance-tlv disable',\n ' quick-start disable',\n ' targeted-hello holdtime 10',\n ' targeted-hello interval 15',\n ' exit',\n ' entropy-label',\n ' graceful-restart',\n ' graceful-restart forwarding-state-holdtime 60',\n ' graceful-restart reconnect-timeout 60',\n ' igp sync delay on-proc-restart 300',\n ' igp sync delay on-session-up 200',\n ' nsr',\n ' session backoff 10 250',\n ' session holdtime 333',\n ' session protection for acl1 duration 333',\n ' signalling dscp 16',\n ' log',\n ' graceful-restart',\n ' hello-adjacency',\n ' neighbor',\n ' nsr',\n ' session-protection',\n ' exit',\n ' ltrace-buffer multiplier 3',\n ' graceful-restart helper-peer maintain-on-local-reset for acl3',\n ' address-family ipv4',\n ' exit',\n ' interface GigabitEthernet0/0/0/2',\n ' address-family ipv4',\n ' exit',\n ' exit',\n ' neighbor',\n ' dual-stack tlv-compliance',\n ' dual-stack transport-connection max-wait 30',\n ' dual-stack transport-connection prefer ipv4',\n ' password encrypted <PASSWORD>',\n ' exit',\n ' exit', \n ]))\n\n if 1:\n # set the per-attr variables that are inherited by VRF\n ldp.device_attr['PE1'].vrf_attr['default'].router_id = '1.1.1.1'\n ldp.device_attr['PE2'].vrf_attr['default'].router_id = '2.2.2.2'\n ldp.device_attr['PE1'].vrf_attr['default'].session_dod_acl = acl11\n ldp.device_attr['PE2'].vrf_attr['default'].session_dod_acl = acl22\n\n out = ldp.build_config(apply=False)\n self.assertRegex(str(out['PE1']), 'router-id 1.1.1.1')\n self.assertRegex(str(out['PE1']), 'session downstream-on-demand with acl11')\n\n self.assertRegex(str(out['PE2']), 'router-id 2.2.2.2')\n self.assertRegex(str(out['PE2']), 'session downstream-on-demand with acl22')\n\n # Check unconfig - <nsr> config filter\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__nsr')\n if 1:\n self.assertRegex(str(out['PE1']), 'no nsr')\n self.assertRegex(str(out['PE2']), 'no nsr')\n\n # Check unconfig - <gr> config filter=\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__gr')\n if 1:\n self.assertRegex(str(out['PE1']), 'no graceful-restart')\n self.assertRegex(str(out['PE2']), 'no graceful-restart')\n\n def test_2_per_vrf(self):\n\n acl1 = AccessList(name='acl1')\n acl2 = AccessList(name='acl2')\n acl3 = AccessList(name='acl3')\n acl4 = AccessList(name='acl4')\n acl5 = AccessList(name='acl5')\n acl6 = AccessList(name='acl6')\n acl7 = AccessList(name='acl7')\n acl8 = AccessList(name='acl8')\n acl9 = AccessList(name='acl9')\n acl11 = AccessList(name='acl11')\n acl22 = AccessList(name='acl22')\n acl33 = AccessList(name='acl33')\n acl44 = AccessList(name='acl44')\n acl1111 = AccessList(name='acl1111')\n acl2222 = AccessList(name='acl2222')\n acl3333 = AccessList(name='acl3333')\n acl4444 = AccessList(name='acl4444')\n\n ldp = Ldp()\n self.link.add_feature(ldp)\n vrf = Vrf(name='vrf1')\n ldp.add_force_vrf(vrf)\n vrf2 = Vrf(name='vrf2')\n ldp.add_force_vrf(vrf2)\n ldp.device_attr['PE1'].router_id = '1.1.1.1'\n ldp.device_attr['PE2'].router_id = '2.2.2.2'\n ldp.device_attr['PE1'].vrf_attr['vrf1'].router_id = '11.11.11.11'\n ldp.device_attr['PE2'].vrf_attr['vrf1'].router_id = '22.22.22.22'\n ldp.device_attr['PE1'].session_dod_acl = acl1\n ldp.device_attr['PE2'].session_dod_acl = acl2\n ldp.device_attr['PE1'].vrf_attr['vrf1'].session_dod_acl = acl1111\n ldp.device_attr['PE2'].vrf_attr['vrf1'].session_dod_acl = acl2222\n ldp.device_attr['PE1'].vrf_attr['vrf1'].password_type = 'clear'\n ldp.device_attr['PE1'].vrf_attr['vrf1'].password = '<PASSWORD>'\n ldp.device_attr['PE2'].vrf_attr['vrf1'].password_type = 'encrypted'\n ldp.device_attr['PE2'].vrf_attr['vrf1'].password = '<PASSWORD>'\n ldp.device_attr['PE1'].vrf_attr['vrf1'].gr_maintain_acl = acl33\n ldp.device_attr['PE1'].vrf_attr['vrf2'].session_dod_acl = acl3333\n ldp.device_attr['PE2'].vrf_attr['vrf2'].session_dod_acl = acl4444\n ldp.device_attr['PE1'].vrf_attr['vrf2'].password_type = 'clear'\n ldp.device_attr['PE1'].vrf_attr['vrf2'].password = '<PASSWORD>'\n ldp.device_attr['PE2'].vrf_attr['vrf2'].password_type = 'encrypted'\n ldp.device_attr['PE2'].vrf_attr['vrf2'].password = '<PASSWORD>'\n ldp.device_attr['PE1'].vrf_attr['vrf2'].gr_maintain_acl = acl44\n\n # Test per vrf config\n \n # Check vrf config - full config\n out = ldp.build_config(apply=False)\n self.assertRegex(str(out['PE1']), '(?s)vrf1.*router-id 11.11.11.11')\n self.assertRegex(str(out['PE1']), 'router-id 1.1.1.1')\n self.assertRegex(str(out['PE2']), '(?s)vrf1.*router-id 22.22.22.22')\n self.assertRegex(str(out['PE2']), 'router-id 2.2.2.2')\n self.assertRegex(str(out['PE1']), '(?s)vrf1.*session downstream-on-demand with acl1111')\n self.assertRegex(str(out['PE2']), '(?s)vrf1.*session downstream-on-demand with acl2222')\n self.assertRegex(str(out['PE1']), '(?s)vrf1.*neighbor.*password clear password1')\n self.assertRegex(str(out['PE2']), '(?s)vrf1.*neighbor.*password encrypted <PASSWORD>')\n self.assertRegex(str(out['PE1']), '(?s)vrf1.*graceful-restart helper-peer maintain-on-local-reset for acl33')\n self.assertRegex(str(out['PE1']), '(?s)vrf2.*session downstream-on-demand with acl3333')\n self.assertRegex(str(out['PE2']), '(?s)vrf2.*session downstream-on-demand with acl4444')\n self.assertRegex(str(out['PE1']), '(?s)vrf2.*neighbor.*password clear password1')\n self.assertRegex(str(out['PE2']), '(?s)vrf2.*neighbor.*password encrypted <PASSWORD>')\n self.assertRegex(str(out['PE1']), '(?s)vrf2.*graceful-restart helper-peer maintain-on-local-reset for acl44')\n\n\n # Test VRF unconfig\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__vrf_attr')\n self.assertRegex(str(out['PE1']), 'no vrf vrf1')\n self.assertRegex(str(out['PE1']), 'no vrf vrf2')\n self.assertRegex(str(out['PE2']), 'no vrf vrf1')\n self.assertRegex(str(out['PE2']), 'no vrf vrf2')\n\n # Check vrf config - vrf config filter all-inclusive \n out2 = ldp.build_config(apply=False, attributes='device_attr__*__vrf_attr')\n self.assertRegex(str(out2['PE1']), '(?s)vrf1.*router-id 11.11.11.11')\n self.assertRegex(str(out2['PE2']), '(?s)vrf1.*router-id 22.22.22.22')\n self.assertRegex(str(out2['PE1']), '(?s)vrf1.*session downstream-on-demand with acl1111')\n self.assertRegex(str(out2['PE2']), '(?s)vrf1.*session downstream-on-demand with acl2222')\n self.assertRegex(str(out2['PE1']), '(?s)vrf1.*neighbor.*password clear <PASSWORD>')\n self.assertRegex(str(out2['PE2']), '(?s)vrf1.*neighbor.*password encrypted <PASSWORD>')\n self.assertRegex(str(out2['PE1']), '(?s)vrf1.*graceful-restart helper-peer maintain-on-local-reset for acl33')\n self.assertRegex(str(out2['PE1']), '(?s)vrf2.*session downstream-on-demand with acl3333')\n self.assertRegex(str(out2['PE2']), '(?s)vrf2.*session downstream-on-demand with acl4444')\n self.assertRegex(str(out2['PE1']), '(?s)vrf2.*neighbor.*password clear <PASSWORD>')\n self.assertRegex(str(out2['PE2']), '(?s)vrf2.*neighbor.*password encrypted <PASSWORD>')\n self.assertRegex(str(out2['PE1']), '(?s)vrf2.*graceful-restart helper-peer maintain-on-local-reset for acl44')\n # interface output will be built as it comes from vrf default\n self.assertRegex(str(out2['PE1']), '(?s)interface Gig')\n \n\n # Check vrf config - device <PE1> config filter\n out3 = ldp.build_config(apply=False, attributes='device_attr__PE1')\n self.assertTrue('PE1' in out3)\n self.assertFalse('PE2' in out3)\n self.assertRegex(str(out3['PE1']), '(?s)vrf1.*router-id 11.11.11.11')\n self.assertRegex(str(out3['PE1']), '(?s)vrf1.*session downstream-on-demand with acl1111')\n self.assertRegex(str(out3['PE1']), '(?s)vrf1.*neighbor.*password clear password1')\n self.assertRegex(str(out3['PE1']), '(?s)vrf1.*graceful-restart helper-peer maintain-on-local-reset for acl33')\n self.assertRegex(str(out3['PE1']), '(?s)vrf2.*session downstream-on-demand with acl3333')\n self.assertRegex(str(out3['PE1']), '(?s)vrf2.*neighbor.*password clear <PASSWORD>')\n self.assertRegex(str(out3['PE1']), '(?s)vrf2.*graceful-restart helper-peer maintain-on-local-reset for acl44')\n self.assertRegex(str(out3['PE1']), '(?s)interface Gig')\n out3.keys()\n\n # Check vrf config - vrf <vrf1> config filter\n out3 = ldp.build_config(apply=False, attributes='device_attr__*__vrf_attr__vrf1')\n self.assertRegex(str(out3['PE1']), '(?s)vrf1.*router-id 11.11.11.11')\n self.assertRegex(str(out3['PE2']), '(?s)vrf1.*router-id 22.22.22.22')\n self.assertRegex(str(out3['PE1']), '(?s)vrf1.*session downstream-on-demand with acl1111')\n self.assertRegex(str(out3['PE2']), '(?s)vrf1.*session downstream-on-demand with acl2222')\n self.assertRegex(str(out3['PE1']), '(?s)vrf1.*neighbor.*password clear <PASSWORD>')\n self.assertRegex(str(out3['PE2']), '(?s)vrf1.*neighbor.*password encrypted <PASSWORD>')\n self.assertRegex(str(out3['PE1']), '(?s)vrf1.*graceful-restart helper-peer maintain-on-local-reset for acl33')\n # vrf2 output should not be built\n self.assertNotRegex(str(out3['PE1']), '(?s)vrf2.*session downstream-on-demand with acl3333')\n self.assertNotRegex(str(out3['PE2']), '(?s)vrf2.*session downstream-on-demand with acl4444')\n self.assertNotRegex(str(out3['PE1']), '(?s)vrf2.*neighbor.*password clear <PASSWORD>')\n self.assertNotRegex(str(out3['PE2']), '(?s)vrf2.*neighbor.*password encrypted <PASSWORD>')\n self.assertNotRegex(str(out3['PE1']), '(?s)vrf2.*graceful-restart helper-peer maintain-on-local-reset for acl44')\n # interface output from default vrf should not be built\n self.assertNotRegex(str(out3['PE1']), '(?s)interface Gig')\n\n # Check vrf unconfig - vrf <vrf1> config filter\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__vrf_attr__vrf1')\n self.assertRegex(str(out['PE1']), 'no vrf vrf1')\n self.assertNotRegex(str(out['PE1']), 'no vrf vrf2')\n self.assertRegex(str(out['PE2']), 'no vrf vrf1')\n self.assertNotRegex(str(out['PE2']), 'no vrf vrf2')\n\n # Check vrf unconfig - vrf <vrf1> config filter\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__vrf_attr__*__router_id')\n if 1:\n self.assertRegex(str(out['PE1']), '(?s)vrf1.*no router-id')\n self.assertRegex(str(out['PE2']), '(?s)vrf1.*no router-id')\n\n # Check vrf unconfig - vrf <vrf1> config filter after adding router-id to vrf2\n ldp.device_attr['PE1'].vrf_attr['vrf2'].router_id = '11.11.11.11'\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__vrf_attr__*__router_id')\n if 1:\n self.assertRegex(str(out['PE1']), '(?s)vrf1.*no router-id')\n self.assertRegex(str(out['PE1']), '(?s)vrf2.*no router-id')\n self.assertRegex(str(out['PE2']), '(?s)vrf1.*no router-id')\n\n # Check vrf config - vrf <vrf2> config filter\n out3 = ldp.build_config(apply=False, attributes='device_attr__*__vrf_attr__vrf2')\n # TODO print(\"\\nPE1 config\\n\" + str(out3['PE1']))\n # TODO print(\"\\nPE2 config\\n\" + str(out3['PE2']))\n self.assertNotRegex(str(out3['PE1']), '(?s)vrf1.*router-id 11.11.11.11')\n self.assertNotRegex(str(out3['PE2']), '(?s)vrf1.*router-id 22.22.22.22')\n self.assertNotRegex(str(out3['PE1']), '(?s)vrf1.*session downstream-on-demand with acl1111')\n self.assertNotRegex(str(out3['PE2']), '(?s)vrf1.*session downstream-on-demand with acl2222')\n self.assertNotRegex(str(out3['PE1']), '(?s)vrf1.*neighbor.*password clear <PASSWORD>')\n self.assertNotRegex(str(out3['PE2']), '(?s)vrf1.*neighbor.*password encrypted <PASSWORD>')\n self.assertNotRegex(str(out3['PE1']), '(?s)vrf1.*graceful-restart helper-peer maintain-on-local-reset for acl33')\n # vrf2 output should not be built\n self.assertRegex(str(out3['PE1']), '(?s)vrf2.*session downstream-on-demand with acl3333')\n self.assertRegex(str(out3['PE2']), '(?s)vrf2.*session downstream-on-demand with acl4444')\n self.assertRegex(str(out3['PE1']), '(?s)vrf2.*neighbor.*password clear <PASSWORD>')\n self.assertRegex(str(out3['PE2']), '(?s)vrf2.*neighbor.*password encrypted <PASSWORD>')\n self.assertRegex(str(out3['PE1']), '(?s)vrf2.*graceful-restart helper-peer maintain-on-local-reset for acl44')\n # interface output should not be built\n self.assertNotRegex(str(out3['PE1']), '(?s)interface Gig.*address-family')\n\n def test_3_per_intf(self):\n\n acl1 = AccessList(name='acl1')\n acl2 = AccessList(name='acl2')\n acl3 = AccessList(name='acl3')\n acl4 = AccessList(name='acl4')\n acl5 = AccessList(name='acl5')\n acl6 = AccessList(name='acl6')\n acl7 = AccessList(name='acl7')\n acl8 = AccessList(name='acl8')\n acl9 = AccessList(name='acl9')\n acl11 = AccessList(name='acl11')\n acl22 = AccessList(name='acl22')\n acl33 = AccessList(name='acl33')\n acl44 = AccessList(name='acl44')\n acl1111 = AccessList(name='acl1111')\n acl2222 = AccessList(name='acl2222')\n acl3333 = AccessList(name='acl3333')\n acl4444 = AccessList(name='acl4444')\n\n ldp = Ldp()\n self.link.add_feature(ldp)\n self.link4.add_feature(ldp)\n\n ldp.device_attr['PE1'].interface_attr['GigabitEthernet0/0/0/1'].igp_sync_delay_on_session_up = 250\n ldp.device_attr['PE1'].interface_attr['GigabitEthernet0/0/0/1'].disc_hello_dualstack_tlv = AddressFamily.ipv4\n ldp.device_attr['PE1'].interface_attr['GigabitEthernet0/0/0/1'].hello_holdtime = 50\n ldp.device_attr['PE1'].interface_attr['GigabitEthernet0/0/0/1'].hello_interval = 60\n ldp.device_attr['PE1'].interface_attr['GigabitEthernet0/0/0/1'].quickstart = False\n\n ldp.hello_interval = 88\n ldp.device_attr['PE2'].hello_holdtime = 99\n ldp.device_attr['PE2'].interface_attr['GigabitEthernet0/0/0/8'].disc_hello_dualstack_tlv = AddressFamily.ipv6\n ldp.device_attr['PE2'].interface_attr['GigabitEthernet0/0/0/8'].igp_sync_delay_on_session_up = False\n\n\n # ==Test per interface config===\n out = ldp.build_config(apply=False)\n if 1:\n self.assertRegex(str(out['PE1']), '(?s)interface GigabitEthernet0/0/0/1.*igp sync delay on-session-up 250')\n self.assertRegex(str(out['PE1']), '(?s)interface GigabitEthernet0/0/0/1.*discovery hello dual-stack-tlv ipv4')\n self.assertRegex(str(out['PE1']), '(?s)interface GigabitEthernet0/0/0/1.*discovery hello holdtime 50')\n self.assertRegex(str(out['PE1']), '(?s)interface GigabitEthernet0/0/0/1.*discovery hello interval 60')\n self.assertRegex(str(out['PE1']), '(?s)interface GigabitEthernet0/0/0/1.*discovery quick-start disable')\n self.assertRegex(str(out['PE2']), '(?s)interface GigabitEthernet0/0/0/8.*igp sync delay on-session-up disable')\n self.assertRegex(str(out['PE2']), '(?s)interface GigabitEthernet0/0/0/8.*discovery hello dual-stack-tlv ipv6')\n # hello interval and hello holdtime are NOT inherited\n self.assertNotRegex(str(out['PE2']), '(?s)interface GigabitEthernet0/0/0/2.*discovery hello holdtime 99')\n self.assertNotRegex(str(out['PE2']), '(?s)interface GigabitEthernet0/0/0/2.*discovery hello interval 88')\n\n # Check intf config - intf <GigabitEthernet0/0/0/8> config filter\n # (needs vrf as it is under default VRF)\n out = ldp.build_config(apply=False, attributes='device_attr__*__vrf_attr__*__interface_attr__GigabitEthernet0/0/0/8')\n if 1:\n self.assertNotRegex(str(out['PE1']), '(?s)interface GigabitEthernet0/0/0/1.*igp sync delay on-session-up 250')\n self.assertNotRegex(str(out['PE1']), '(?s)interface GigabitEthernet0/0/0/1.*discovery hello dual-stack-tlv ipv4')\n self.assertNotRegex(str(out['PE1']), '(?s)interface GigabitEthernet0/0/0/1.*discovery hello holdtime 50')\n self.assertNotRegex(str(out['PE1']), '(?s)interface GigabitEthernet0/0/0/1.*discovery hello interval 60')\n self.assertNotRegex(str(out['PE1']), '(?s)interface GigabitEthernet0/0/0/1.*discovery quick-start disable')\n self.assertRegex(str(out['PE2']), '(?s)interface GigabitEthernet0/0/0/8.*igp sync delay on-session-up disable')\n self.assertRegex(str(out['PE2']), '(?s)interface GigabitEthernet0/0/0/8.*discovery hello dual-stack-tlv ipv6')\n self.assertNotRegex(str(out['PE2']), '(?s)interface GigabitEthernet0/0/0/2.*discovery hello holdtime 99')\n self.assertNotRegex(str(out['PE2']), '(?s)interface GigabitEthernet0/0/0/2.*discovery hello interval 88')\n\n # Check intf unconfig - intf <GigabitEthernet0/0/0/8> config filter\n # (needs vrf as it is under default VRF)\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__vrf_attr__*__interface_attr__GigabitEthernet0/0/0/8')\n if 1:\n self.assertRegex(str(out['PE2']), 'no interface GigabitEthernet0/0/0/8')\n self.assertNotRegex(str(out['PE1']), 'interface GigabitEthernet0/0/0/1')\n self.assertNotRegex(str(out['PE2']), 'interface GigabitEthernet0/0/0/2')\n\n # Check intf unconfig - intf <GigabitEthernet0/0/0/8> attr <disc_hello_dualstack_tlv> config filter\n # (needs vrf as it is under default VRF)\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__vrf_attr__*__interface_attr__GigabitEthernet0/0/0/8__disc_hello_dualstack_tlv')\n if 1:\n self.assertRegex(str(out['PE2']), '(?s)interface GigabitEthernet0/0/0/8.*no discovery hello dual-stack-tlv')\n self.assertNotRegex(str(out['PE1']), 'interface GigabitEthernet0/0/0/1')\n self.assertNotRegex(str(out['PE2']), 'interface GigabitEthernet0/0/0/2')\n\n def test_4_per_af(self):\n\n acl1 = AccessList(name='acl1')\n acl2 = AccessList(name='acl2')\n acl3 = AccessList(name='acl3')\n acl4 = AccessList(name='acl4')\n acl5 = AccessList(name='acl5')\n acl6 = AccessList(name='acl6')\n acl7 = AccessList(name='acl7')\n acl8 = AccessList(name='acl8')\n acl9 = AccessList(name='acl9')\n acl11 = AccessList(name='acl11')\n acl22 = AccessList(name='acl22')\n acl33 = AccessList(name='acl33')\n acl44 = AccessList(name='acl44')\n acl1111 = AccessList(name='acl1111')\n acl2222 = AccessList(name='acl2222')\n acl3333 = AccessList(name='acl3333')\n acl4444 = AccessList(name='acl4444')\n\n ldp = Ldp()\n self.link.add_feature(ldp)\n ldp.address_families = set([AddressFamily.ipv4])\n ldp.address_families = set([AddressFamily.ipv4, AddressFamily.ipv6])\n\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].targeted_hello_accept_from_acl = acl6\n\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].transport_address = '1.2.3.4'\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv6'].transport_address = 'fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b'\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].advertise = False\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv6'].advertise = False\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].advertise_expnull_to_acl = acl4\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv6'].advertise_expnull = True\n #TODO - how does this work?\n #ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].advertise_interfaces = 'GigabitEthernet0/0/0/3'\n #ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv6'].advertise_interfaces = 'GigabitEthernet0/0/0/3'\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].default_route = True\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].allocate_for_host_routes = True\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv6'].allocate_for_acl = acl5\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].impnull_override_for_acl = acl1\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv6'].impnull_override_for_acl = acl2\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].target_hello_accept = True\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv6'].target_hello_accept_from_acl = acl7\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].redist_bgp = True\n #ldp.device_attr['PE1'].address_family_attr['ipv6'].redist_bgp_advto_acl = acl9\n #ldp.device_attr['PE1'].address_family_attr['ipv6'].redist_bgp_as = '100.200'\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].te_autotunnel_mesh_group_id = 'all'\n #ldp.device_attr['PE1'].address_family_attr['ipv6'].te_autotunnel_mesh_group_id = '100'\n\n\n # ==Test per address-family config==\n out = ldp.build_config(apply=False)\n if 1:\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4.*discovery transport-address 1.2.3.4')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv6.*discovery transport-address fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4.*label.*local.*advertise.*disable')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv6.*label.*local.*advertise.*disable')\n #self.assertRegex(str(out['PE1']), '(?s)address-family ipv4.*label.*local.*advertise.*interface GigabitEthernet0/0/0/1')\n #self.assertRegex(str(out['PE1']), '(?s)address-family ipv6.*label.*local.*advertise.*interface GigabitEthernet0/0/0/3')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4.*label.*local.*advertise.*explicit-null to acl4')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv6.*label.*local.*advertise.*explicit-null')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4.*label.*local.*default-route')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4.*label.*local.*allocate for host-routes')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv6.*label.*local.*allocate for acl5')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4.*label.*local.*implicit-null-override for acl1')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv6.*label.*local.*implicit-null-override for acl2')\n\n # Check af config - af <ipv4> config filter\n out = ldp.build_config(apply=False, attributes='device_attr__*__vrf_attr__*__address_family_attr__ipv4')\n if 1:\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4.*discovery transport-address 1.2.3.4')\n self.assertNotRegex(str(out['PE1']), '(?s)address-family ipv6.*discovery transport-address fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4.*label.*local.*advertise.*disable')\n self.assertNotRegex(str(out['PE1']), '(?s)address-family ipv6.*label.*local.*advertise.*disable')\n #self.assertRegex(str(out['PE1']), '(?s)address-family ipv4.*label.*local.*advertise.*interface GigabitEthernet0/0/0/1')\n #self.assertNotRegex(str(out['PE1']), '(?s)address-family ipv6.*label.*local.*advertise.*interface GigabitEthernet0/0/0/2')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4.*label.*local.*advertise.*explicit-null to acl4')\n self.assertNotRegex(str(out['PE1']), '(?s)address-family ipv6.*label.*local.*advertise.*explicit-null')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4.*label.*local.*default-route')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4.*label.*local.*allocate for host-routes')\n self.assertNotRegex(str(out['PE1']), '(?s)address-family ipv6.*label.*local.*allocate for acl5')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4.*label.*local.*implicit-null-override for acl1')\n self.assertNotRegex(str(out['PE1']), '(?s)address-family ipv6.*label.*local.*implicit-null-override for acl2')\n\n # Check af unconfig - af <ipv4> unconfig filter\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__vrf_attr__*__address_family_attr__ipv4')\n if 1:\n self.assertRegex(str(out['PE1']), 'no address-family ipv4')\n self.assertNotRegex(str(out['PE1']), 'no address-family ipv6')\n\n # Check af unconfig - af <ipv4> <transport_address> unconfig filter\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__vrf_attr__*__address_family_attr__ipv4__transport_address')\n if 1:\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4.*no discovery transport-address')\n\n def test_5_per_interface_per_af(self):\n\n acl1 = AccessList(name='acl1')\n acl2 = AccessList(name='acl2')\n acl3 = AccessList(name='acl3')\n acl4 = AccessList(name='acl4')\n acl5 = AccessList(name='acl5')\n acl6 = AccessList(name='acl6')\n acl7 = AccessList(name='acl7')\n acl8 = AccessList(name='acl8')\n acl9 = AccessList(name='acl9')\n acl11 = AccessList(name='acl11')\n acl22 = AccessList(name='acl22')\n acl33 = AccessList(name='acl33')\n acl44 = AccessList(name='acl44')\n acl1111 = AccessList(name='acl1111')\n acl2222 = AccessList(name='acl2222')\n acl3333 = AccessList(name='acl3333')\n acl4444 = AccessList(name='acl4444')\n\n ldp = Ldp()\n self.link.add_feature(ldp)\n ldp.address_families = set([AddressFamily.ipv4, AddressFamily.ipv6])\n\n ldp.device_attr['PE1'].interface_attr['GigabitEthernet0/0/0/1'].address_family_attr['ipv4'].igp_autoconfig = False\n\n # ipv6 config is rejected, comment this out\n #ldp.device_attr['PE1'].interface_attr['GigabitEthernet0/0/0/1'].address_family_attr['ipv6'].mldp.enabled = False\n #ldp.device_attr['PE1'].interface_attr['GigabitEthernet0/0/0/1'].address_family_attr['ipv6'].igp = True\n\n ldp.device_attr['PE1'].interface_attr['GigabitEthernet0/0/0/1'].address_family_attr['ipv4'].transport_address = '2.3.4.5'\n ldp.device_attr['PE1'].interface_attr['GigabitEthernet0/0/0/1'].address_family_attr['ipv6'].transport_address = 'fc00:db20:35b:7399::5'\n ldp.device_attr['PE2'].interface_attr['GigabitEthernet0/0/0/2'].address_family_attr['ipv4'].transport_address = 'interface'\n ldp.device_attr['PE2'].interface_attr['GigabitEthernet0/0/0/2'].address_family_attr['ipv6'].transport_address = 'interface'\n\n # ==Test per interface per address-family config===\n out = ldp.build_config(apply=False)\n if 1:\n self.assertRegex(str(out['PE1']), '(?s)interface GigabitEthernet0/0/0/1.*address-family ipv4.*igp auto-config disable')\n self.assertRegex(str(out['PE1']), '(?s)interface GigabitEthernet0/0/0/1.*address-family ipv4.*discovery transport-address 2.3.4.5')\n self.assertRegex(str(out['PE1']), '(?s)interface GigabitEthernet0/0/0/1.*address-family ipv6.*discovery transport-address fc00:db20:35b:7399::5')\n self.assertRegex(str(out['PE2']), '(?s)interface GigabitEthernet0/0/0/2.*address-family ipv4.*discovery transport-address interface')\n self.assertRegex(str(out['PE2']), '(?s)interface GigabitEthernet0/0/0/2.*address-family ipv6.*discovery transport-address interface')\n\n def test_6_per_vrf_per_interface_per_af(self):\n\n ldp = Ldp()\n self.link.add_feature(ldp)\n\n vrf = Vrf(name='vrf1')\n self.i1.vrf = vrf\n self.i2.vrf = vrf\n ldp.device_attr['PE1'].vrf_attr['vrf1'].interface_attr['GigabitEthernet0/0/0/1'].address_family_attr['ipv4'].transport_address = '2.3.4.5'\n ldp.device_attr['PE2'].vrf_attr['vrf1'].interface_attr['GigabitEthernet0/0/0/2'].address_family_attr['ipv4'].transport_address = 'interface'\n\n # ==Test vrf per interface per address-family config==\n out = ldp.build_config(apply=False)\n if 1:\n self.assertRegex(str(out['PE1']), '(?s)vrf1.*interface GigabitEthernet0/0/0/1.*address-family ipv4.*discovery transport-address 2.3.4.5')\n self.assertRegex(str(out['PE2']), '(?s)vrf1.*interface GigabitEthernet0/0/0/2.*address-family ipv4.*discovery transport-address interface')\n\n def test_7_per_neighbor(self):\n\n ldp = Ldp()\n self.link.add_feature(ldp)\n nbr1 = IPv4LsrNeighbor('1.2.3.4:0')\n nbr2 = IPv4LsrNeighbor('1.2.3.5:0')\n nbr3 = IPv4LsrNeighbor('1.2.3.6:0')\n \n\n ldp.device_attr['PE1'].password_type = 'clear'\n ldp.device_attr['PE1'].password = '<PASSWORD>'\n ldp.device_attr['PE2'].password_type = 'encrypted'\n ldp.device_attr['PE2'].password = '<PASSWORD>'\n ldp.device_attr['PE1'].vrf_attr['default'].neighbors = [nbr1, nbr2, nbr3]\n ldp.device_attr['PE1'].vrf_attr['default'].neighbor_attr[nbr1].disable_password = True\n ldp.device_attr['PE1'].vrf_attr['default'].neighbor_attr[nbr2].password_type = 'clear'\n ldp.device_attr['PE1'].vrf_attr['default'].neighbor_attr[nbr2].password = '<PASSWORD>'\n ldp.device_attr['PE1'].vrf_attr['default'].neighbor_attr[nbr3].password_type = 'encrypted'\n ldp.device_attr['PE1'].vrf_attr['default'].neighbor_attr[nbr3].password = '<PASSWORD>'\n\n # ==Test per neighbor config==\n out = ldp.build_config(apply=False)\n if 1:\n self.assertRegex(str(out['PE1']), 'neighbor\\n.*password clear password1')\n self.assertRegex(str(out['PE2']), 'neighbor\\n.*password encrypted <PASSWORD>')\n self.assertRegex(str(out['PE1']), '1.2.3.4:0 password disable')\n self.assertRegex(str(out['PE1']), '1.2.3.5:0 password clear blah')\n self.assertRegex(str(out['PE1']), '1.2.3.6:0 password encrypted <PASSWORD>')\n\n # Check nbr config - nbr <1.2.3.5:0'> config filter\n out = ldp.build_config(apply=False, attributes='device_attr__*__vrf_attr__*__neighbor_attr__1.2.3.5:0')\n if 1:\n self.assertNotRegex(str(out['PE1']), 'neighbor\\n.*password clear password1')\n self.assertNotRegex(str(out['PE2']), 'neighbor\\n.*password encrypted <PASSWORD>')\n self.assertNotRegex(str(out['PE1']), '1.2.3.4:0 password disable')\n self.assertRegex(str(out['PE1']), '1.2.3.5:0 password clear blah')\n self.assertNotRegex(str(out['PE1']), '1.2.3.6:0 password encrypted <PASSWORD>')\n\n # Check nbr unconfig - nbr <1.2.3.5:0'> config filter\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__vrf_attr__*__neighbor_attr__1.2.3.4:0__disable_password')\n if 1:\n self.assertNotRegex(str(out['PE1']), 'neighbor\\n.*password clear password1')\n self.assertNotRegex(str(out['PE2']), 'neighbor\\n.*password encrypted <PASSWORD>')\n self.assertRegex(str(out['PE1']), 'no 1.2.3.4:0 password disable')\n self.assertNotRegex(str(out['PE1']), '1.2.3.5:0 password clear blah')\n self.assertNotRegex(str(out['PE1']), '1.2.3.6:0 password encrypted <PASSWORD>')\n\n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__vrf_attr__*__neighbor_attr__1.2.3.5:0__password')\n self.assertRegex(str(out['PE1']), 'no 1.2.3.5:0 password clear blah')\n \n out = ldp.build_unconfig(apply=False, attributes='device_attr__*__vrf_attr__*__neighbor_attr__1.2.3.6:0__password')\n self.assertRegex(str(out['PE1']), 'no 1.2.3.6:0 password encrypted <PASSWORD>')\n\n def test_8_per_vrf_per_neighbor(self):\n\n acl1 = AccessList(name='acl1')\n acl2 = AccessList(name='acl2')\n acl3 = AccessList(name='acl3')\n acl4 = AccessList(name='acl4')\n acl5 = AccessList(name='acl5')\n acl6 = AccessList(name='acl6')\n acl7 = AccessList(name='acl7')\n acl8 = AccessList(name='acl8')\n acl9 = AccessList(name='acl9')\n acl11 = AccessList(name='acl11')\n acl22 = AccessList(name='acl22')\n acl33 = AccessList(name='acl33')\n acl44 = AccessList(name='acl44')\n acl1111 = AccessList(name='acl1111')\n acl2222 = AccessList(name='acl2222')\n acl3333 = AccessList(name='acl3333')\n acl4444 = AccessList(name='acl4444')\n\n ldp = Ldp()\n self.link.add_feature(ldp)\n vrf = Vrf(name='vrf1')\n ldp.add_force_vrf(vrf)\n nbr1 = IPv4LsrNeighbor('1.2.3.4:0')\n nbr2 = IPv4LsrNeighbor('1.2.3.5:0')\n nbr3 = IPv4LsrNeighbor('1.2.3.6:0')\n\n ldp.device_attr['PE1'].vrf_attr['vrf1'].neighbors = [nbr1, nbr2, nbr3]\n ldp.device_attr['PE1'].vrf_attr['vrf1'].neighbor_attr['1.2.3.4:0'].disable_password = True\n ldp.device_attr['PE1'].vrf_attr['vrf1'].neighbor_attr['1.2.3.5:0'].password_type = 'clear'\n ldp.device_attr['PE1'].vrf_attr['vrf1'].neighbor_attr['1.2.3.5:0'].password = '<PASSWORD>'\n ldp.device_attr['PE1'].vrf_attr['vrf1'].neighbor_attr['1.2.3.6:0'].password_type = 'encrypted'\n ldp.device_attr['PE1'].vrf_attr['vrf1'].neighbor_attr['1.2.3.6:0'].password = '<PASSWORD>'\n\n # ==Test per vrf per neighbor config==\n out = ldp.build_config(apply=False)\n if 1:\n self.assertRegex(str(out['PE1']), '(?s)vrf1\\n.*neighbor\\n.*1.2.3.4:0 password disable')\n self.assertRegex(str(out['PE1']), '(?s)vrf1\\n.*neighbor\\n.*1.2.3.5:0 password clear <PASSWORD>')\n self.assertRegex(str(out['PE1']), '(?s)vrf1\\n.*neighbor\\n.*1.2.3.6:0 password encrypted <PASSWORD>')\n\n def test_9_per_af_per_neighbor(self):\n\n acl1 = AccessList(name='acl1')\n acl2 = AccessList(name='acl2')\n acl3 = AccessList(name='acl3')\n acl4 = AccessList(name='acl4')\n acl5 = AccessList(name='acl5')\n acl6 = AccessList(name='acl6')\n acl7 = AccessList(name='acl7')\n acl8 = AccessList(name='acl8')\n acl9 = AccessList(name='acl9')\n acl11 = AccessList(name='acl11')\n acl22 = AccessList(name='acl22')\n acl33 = AccessList(name='acl33')\n acl44 = AccessList(name='acl44')\n acl1111 = AccessList(name='acl1111')\n acl2222 = AccessList(name='acl2222')\n acl3333 = AccessList(name='acl3333')\n acl4444 = AccessList(name='acl4444')\n nbr1 = IPv4LsrNeighbor('1.2.3.4:0')\n nbr2 = IPv4LsrNeighbor('1.2.3.5:0')\n nbr3 = IPv4LsrNeighbor('1.2.3.6:0')\n nbr4 = IPv4LsrNeighbor('1.2.3.7:0')\n nbr5 = IPv4LsrNeighbor('1.2.3.8:0')\n nbr6 = IPv6Neighbor('fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b')\n nbr7 = IPv6Neighbor('fc00:e968:6179::de52:7100')\n nbr8 = IPv6Neighbor('fc00:db20:35b:7399::5')\n\n ldp = Ldp()\n ldp.address_families = set([AddressFamily.ipv4, AddressFamily.ipv6])\n self.link.add_feature(ldp)\n\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].neighbors = [nbr1, nbr2, nbr3, nbr4, nbr5]\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].neighbor_attr['1.2.3.4:0'].targeted = True\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].neighbor_attr['1.2.3.5:0'].targeted = True\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].neighbor_attr['1.2.3.6:0'].targeted = False\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].neighbor_attr['1.2.3.5:0'].advertise_for_acl = acl1\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].neighbor_attr['1.2.3.6:0'].advertise_for_acl = acl2\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].neighbor_attr['1.2.3.7:0'].accept_for_acl = acl1\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv4'].neighbor_attr['1.2.3.8:0'].accept_for_acl = acl2\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv6'].neighbors = [nbr6, nbr7, nbr8]\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv6'].neighbor_attr['fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b'].targeted = True\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv6'].neighbor_attr['fc00:e968:6179::de52:7100'].targeted = True\n ldp.device_attr['PE1'].vrf_attr['default'].address_family_attr['ipv6'].neighbor_attr['fc00:db20:35b:7399::5'].targeted = False\n\n # ==Test per af per neighbor config==\n out = ldp.build_config(apply=False)\n #print(\"\\n PE1 CONFIG\\n\" + str(out['PE1']))\n #print(\"\\n PE2 CONFIG\\n\" + str(out['PE2']))\n if 1:\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4\\n.*neighbor 1.2.3.4 targeted')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4\\n.*neighbor 1.2.3.5 targeted')\n self.assertNotRegex(str(out['PE1']), '(?s)address-family ipv4\\n.*neighbor 1.2.3.6 targeted')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4\\n.*label.*local.*advertise.*to 1.2.3.5:0 for acl1')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4\\n.*label.*local.*advertise.*to 1.2.3.6:0 for acl2')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4\\n.*label.*remote.*accept.*from 1.2.3.7:0 for acl1')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv4\\n.*label.*remote.*accept.*from 1.2.3.8:0 for acl2')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv6\\n.*neighbor fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b targeted')\n self.assertRegex(str(out['PE1']), '(?s)address-family ipv6\\n.*neighbor fc00:e968:6179::de52:7100 targeted')\n \n def test_x1_per_vrf_per_af(self):\n\n acl1 = AccessList(name='acl1')\n acl2 = AccessList(name='acl2')\n acl3 = AccessList(name='acl3')\n acl4 = AccessList(name='acl4')\n acl5 = AccessList(name='acl5')\n acl6 = AccessList(name='acl6')\n acl7 = AccessList(name='acl7')\n acl8 = AccessList(name='acl8')\n acl9 = AccessList(name='acl9')\n acl11 = AccessList(name='acl11')\n acl22 = AccessList(name='acl22')\n acl33 = AccessList(name='acl33')\n acl44 = AccessList(name='acl44')\n acl1111 = AccessList(name='acl1111')\n acl2222 = AccessList(name='acl2222')\n acl3333 = AccessList(name='acl3333')\n acl4444 = AccessList(name='acl4444')\n\n ldp = Ldp()\n self.link.add_feature(ldp)\n\n vrf = Vrf(name='vrf1')\n ldp.add_force_vrf(vrf)\n ldp.device_attr['PE1'].vrf_attr['vrf1'].address_family_attr['ipv4'].transport_address = '1.2.3.4'\n ldp.device_attr['PE1'].vrf_attr['vrf1'].address_family_attr['ipv4'].advertise = False\n ldp.device_attr['PE1'].vrf_attr['vrf1'].address_family_attr['ipv4'].advertise_expnull_to_acl = acl4\n #TODO Skip for now\n #ldp.device_attr['PE1'].vrf_attr['vrf1'].address_family_attr['ipv4'].advertise_interfaces = 'GigabitEthernet0/0/0/1'\n ldp.device_attr['PE1'].vrf_attr['vrf1'].address_family_attr['ipv4'].allocate_for_host_routes = True\n ldp.device_attr['PE1'].vrf_attr['vrf1'].address_family_attr['ipv4'].default_route = True\n ldp.device_attr['PE1'].vrf_attr['vrf1'].address_family_attr['ipv4'].impnull_override_for_acl = acl1\n\n # ==Test per vrf per address family config==\n out = ldp.build_config(apply=False)\n #print(\"\\n PE1 CONFIG\\n\" + str(out['PE1']))\n #print(\"\\n PE2 CONFIG\\n\" + str(out['PE2']))\n if 1:\n self.assertRegex(str(out['PE1']), '(?s)vrf1.*address-family ipv4.*discovery transport-address 1.2.3.4')\n self.assertRegex(str(out['PE1']), '(?s)vrf1.*address-family ipv4.*label.*local.*advertise.*disable')\n #self.assertRegex(str(out['PE1']), '(?s)vrf1.*address-family ipv4.*label.*local.*advertise.*interface GigabitEthernet0/0/0/1')\n self.assertRegex(str(out['PE1']), '(?s)vrf1.*address-family ipv4.*label.*local.*advertise.*explicit-null to acl4')\n self.assertRegex(str(out['PE1']), '(?s)vrf1.*address-family ipv4.*label.*local.*default-route')\n self.assertRegex(str(out['PE1']), '(?s)vrf1.*address-family ipv4.*label.*local.*allocate for host-routes')\n self.assertRegex(str(out['PE1']), '(?s)vrf1.*address-family ipv4.*label.*local.*implicit-null-override for acl1')\n\n def test_x2_per_vrf_per_af_per_nbr(self):\n\n acl1 = AccessList(name='acl1')\n acl2 = AccessList(name='acl2')\n acl3 = AccessList(name='acl3')\n acl4 = AccessList(name='acl4')\n acl5 = AccessList(name='acl5')\n acl6 = AccessList(name='acl6')\n acl7 = AccessList(name='acl7')\n acl8 = AccessList(name='acl8')\n acl9 = AccessList(name='acl9')\n acl11 = AccessList(name='acl11')\n acl22 = AccessList(name='acl22')\n acl33 = AccessList(name='acl33')\n acl44 = AccessList(name='acl44')\n acl1111 = AccessList(name='acl1111')\n acl2222 = AccessList(name='acl2222')\n acl3333 = AccessList(name='acl3333')\n acl4444 = AccessList(name='acl4444')\n nbr1 = IPv4LsrNeighbor('1.2.3.5:0')\n nbr2 = IPv4LsrNeighbor('1.2.3.6:0')\n nbr3 = IPv4LsrNeighbor('1.2.3.7:0')\n nbr4 = IPv4LsrNeighbor('1.2.3.8:0')\n\n ldp = Ldp()\n self.link.add_feature(ldp)\n\n vrf = Vrf(name='vrf1')\n ldp.add_force_vrf(vrf)\n ldp.device_attr['PE1'].vrf_attr['vrf1'].address_family_attr['ipv4'].neighbors = [nbr1, nbr2, nbr3, nbr4]\n ldp.device_attr['PE1'].vrf_attr['vrf1'].address_family_attr['ipv4'].neighbor_attr['1.2.3.5:0'].advertise_for_acl = acl1\n ldp.device_attr['PE1'].vrf_attr['vrf1'].address_family_attr['ipv4'].neighbor_attr['1.2.3.6:0'].advertise_for_acl = acl2\n ldp.device_attr['PE1'].vrf_attr['vrf1'].address_family_attr['ipv4'].neighbor_attr['1.2.3.7:0'].accept_for_acl = acl1\n ldp.device_attr['PE1'].vrf_attr['vrf1'].address_family_attr['ipv4'].neighbor_attr['1.2.3.8:0'].accept_for_acl = acl2\n\n # ==Test per vrf per address family per neighbor config==\n out = ldp.build_config(apply=False)\n if 1:\n self.assertRegex(str(out['PE1']), '(?s)vrf1.*address-family ipv4\\n.*label.*local.*advertise.*to 192.168.127.12:0 for acl1')\n self.assertRegex(str(out['PE1']), '(?s)vrf1.*address-family ipv4\\n.*label.*local.*advertise.*to 172.16.17.32:0 for acl2')\n self.assertRegex(str(out['PE1']), '(?s)vrf1.*address-family ipv4.*label.*remote.*accept.*from 1.2.3.7:0 for acl1')\n self.assertRegex(str(out['PE1']), '(?s)vrf1.*address-family ipv4.*label.*remote.*accept.*from 1.2.3.8:0 for acl2')\n\n def test_x3_config_from_ldp_job(self):\n\n acl1 = AccessList(name='acl1')\n acl2 = AccessList(name='acl2')\n acl3 = AccessList(name='acl3')\n acl4 = AccessList(name='acl4')\n acl5 = AccessList(name='acl5')\n acl6 = AccessList(name='acl6')\n acl7 = AccessList(name='acl7')\n acl8 = AccessList(name='acl8')\n acl9 = AccessList(name='acl9')\n acl11 = AccessList(name='acl11')\n acl22 = AccessList(name='acl22')\n acl33 = AccessList(name='acl33')\n acl44 = AccessList(name='acl44')\n acl1111 = AccessList(name='acl1111')\n acl2222 = AccessList(name='acl2222')\n acl3333 = AccessList(name='acl3333')\n acl4444 = AccessList(name='acl4444')\n\n ldp = Ldp()\n self.link.add_feature(ldp)\n self.link2.add_feature(ldp)\n self.link3.add_feature(ldp)\n self.link4.add_feature(ldp)\n\n ldp.device_attr['PE1'].router_id = '1.1.1.1'\n ldp.device_attr['PE2'].router_id = '2.2.2.2'\n ldp.nsr = True\n ldp.gr = True\n ldp.session_protection = True\n\n # ==Test LDP GR config from LDP Tier1 job==\n out = ldp.build_config(apply=False)\n if 1:\n self.assertRegex(str(out['PE1']), 'router-id 1.1.1.1')\n self.assertRegex(str(out['PE1']), 'graceful-restart')\n self.assertRegex(str(out['PE1']), 'session protection')\n self.assertRegex(str(out['PE1']), 'nsr')\n self.assertRegex(str(out['PE1']), 'address-family ipv4')\n self.assertRegex(str(out['PE2']), 'address-family ipv4')\n\n for intf_obj in ldp.device_attr['PE1'].interfaces:\n self.assertRegex(str(out['PE1']), '(?s)interface {name}.*address-family ipv4'.\n format(name=intf_obj.name))\n\n for intf_obj in ldp.device_attr['PE2'].interfaces:\n self.assertRegex(str(out['PE2']), '(?s)interface {name}.*address-family ipv4'.\n format(name=intf_obj.name))\n\n # ==Test LDP GR unconfig from LDP Tier1 job==\n out = ldp.build_unconfig(apply=False)\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "3596534", "language": "Python", "matching_score": 4.702584743499756, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ldp/tests/test_ldp.py" }, { "content": "\n# Table of contents:\n# class Ldp:\n# class DeviceAttributes:\n# def build_config/build_unconfig:\n# class VrfAttributes:\n# def build_config/build_unconfig:\n# (class NeighborAttributes)\n# class AddressFamilyAttributes:\n# def build_config/build_unconfig:\n# (class NeighborAttributes)\n# class InterfaceAttributes:\n# def build_config/build_unconfig:\n# class AddressFamilyAttributes:\n# def build_config/build_unconfig:\n\nfrom abc import ABC\nimport warnings\nimport re\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning, AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\n\nfrom genie.libs.conf.address_family import AddressFamily\nfrom genie.libs.conf.base import PasswordType\nfrom genie.libs.conf.vrf import VrfSubAttributes\n\nclass Ldp(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: mpls ldp (config-ldp)\n with configurations.submode_context('mpls ldp'):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: mpls ldp / capabilities cisco ios-xr disable\n if attributes.value('capabilities_cisco_iosxr') is False:\n configurations.append_line('capabilities cisco ios-xr disable')\n\n # iosxr: mpls ldp / default-vrf implicit-ipv4 disable\n if attributes.value('default_vrf_impl_ipv4') is False:\n configurations.append_line('default-vrf implicit-ipv4 disable')\n\n # iosxr: mpls ldp / discovery (config-ldp-disc)\n with configurations.submode_context('discovery', cancel_empty=True):\n\n # iosxr: mpls ldp / discovery / ds-tlv disable\n if attributes.value('ds_tlv') is False:\n configurations.append_line('ds-tlv disable')\n\n # iosxr: mpls ldp / discovery / hello holdtime 1\n configurations.append_line(attributes.format('hello holdtime {hello_holdtime}'))\n\n # iosxr: mpls ldp / discovery / hello interval 1\n configurations.append_line(attributes.format('hello interval {hello_interval}'))\n\n # iosxr: mpls ldp / discovery / instance-tlv disable\n if attributes.value('instance_tlv') is False:\n configurations.append_line('instance-tlv disable')\n\n # iosxr: mpls ldp / discovery / quick-start disable\n if attributes.value('quickstart') is False:\n configurations.append_line('quick-start disable')\n\n # iosxr: mpls ldp / discovery / targeted-hello holdtime 1\n configurations.append_line(attributes.format('targeted-hello holdtime {targeted_hello_holdtime}'))\n\n # iosxr: mpls ldp / discovery / targeted-hello interval 1\n configurations.append_line(attributes.format('targeted-hello interval {targeted_hello_interval}'))\n\n # iosxr: mpls ldp / entropy-label\n if attributes.value('entropy_label'):\n configurations.append_line('entropy-label')\n\n # iosxr: mpls ldp / graceful-restart\n if attributes.value('gr'):\n configurations.append_line('graceful-restart')\n\n # iosxr: mpls ldp / graceful-restart forwarding-state-holdtime 60\n configurations.append_line(attributes.format('graceful-restart forwarding-state-holdtime {gr_fwdstate_holdtime}'))\n\n # iosxr: mpls ldp / graceful-restart reconnect-timeout 60\n configurations.append_line(attributes.format('graceful-restart reconnect-timeout {gr_reconnect_timeout}'))\n\n # iosxr: mpls ldp / igp sync delay on-proc-restart 60\n configurations.append_line(attributes.format('igp sync delay on-proc-restart {igp_sync_delay_on_proc_restart}'))\n\n # iosxr: mpls ldp / igp sync delay on-session-up 5\n if attributes.value('igp_sync_delay_on_session_up') is False:\n pass\n else:\n configurations.append_line(attributes.format('igp sync delay on-session-up {igp_sync_delay_on_session_up}'))\n\n # iosxr: mpls ldp / nsr\n if attributes.value('nsr'):\n configurations.append_line('nsr')\n\n # iosxr: mpls ldp / session backoff 100 200\n configurations.append_line(attributes.format('session backoff {session_backoff_init} {session_backoff_max}'))\n\n # iosxr: mpls ldp / session holdtime 15\n configurations.append_line(attributes.format('session holdtime {session_holdtime}'))\n\n # iosxr: mpls ldp / session protection\n # iosxr: mpls ldp / session protection duration 30\n # iosxr: mpls ldp / session protection duration infinite\n # iosxr: mpls ldp / session protection for someword\n # iosxr: mpls ldp / session protection for someword duration 30\n # iosxr: mpls ldp / session protection for someword duration infinite\n if attributes.value('session_protection') or attributes.value('session_protection_for_acl'):\n cfg = 'session protection'\n cfg += attributes.format(' for {session_protection_for_acl.name}', force=True)\n if self.session_protection_dur is float('inf'):\n cfg += ' duration infinite'\n else:\n cfg += attributes.format(' duration {session_protection_dur}', force=True)\n configurations.append_line(cfg)\n\n # iosxr: mpls ldp / signalling dscp <0-63>\n configurations.append_line(attributes.format('signalling dscp {signalling_dscp}'))\n\n # iosxr: mpls ldp / log (config-ldp-log)\n with configurations.submode_context('log', cancel_empty=True):\n\n # iosxr: mpls ldp / log / graceful-restart\n if attributes.value('log_gr'):\n configurations.append_line('graceful-restart')\n\n # iosxr: mpls ldp / log / hello-adjacency\n if attributes.value('log_hello_adj'):\n configurations.append_line('hello-adjacency')\n\n # iosxr: mpls ldp / log / neighbor\n if attributes.value('log_neighbor'):\n configurations.append_line('neighbor')\n\n # iosxr: mpls ldp / log / nsr\n if attributes.value('log_nsr'):\n configurations.append_line('nsr')\n\n # iosxr: mpls ldp / log / session-protection\n if attributes.value('log_sess_prot'):\n configurations.append_line('session-protection')\n\n # iosxr: mpls ldp / ltrace-buffer multiplier 1\n configurations.append_line(attributes.format('ltrace-buffer multiplier {ltrace_buffer_multiplier}'))\n\n for sub, attributes2 in attributes.mapping_values('vrf_attr', keys=self.vrfs, sort=True):\n configurations.append_block(sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(str(configurations), fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class VrfAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: mpls ldp / vrf someword (config-ldp-vrf)\n with configurations.submode_context(\n None if self.vrf_name == 'default' else attributes.format('vrf {vrf_name}', force=True)):\n if self.vrf_name != 'default' and unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: mpls ldp / [vrf someword] / router-id 1.2.3.4\n configurations.append_line(attributes.format('router-id {router_id}'))\n\n # iosxr: mpls ldp / [vrf someword] / session downstream-on-demand with someword\n configurations.append_line(attributes.format('session downstream-on-demand with {session_dod_acl.name}'))\n\n # iosxr: mpls ldp / [vrf someword] / graceful-restart helper-peer maintain-on-local-reset for someword\n configurations.append_line(attributes.format('graceful-restart helper-peer maintain-on-local-reset for {gr_maintain_acl.name}'))\n\n for sub, attributes2 in attributes.mapping_values('address_family_attr', keys=self.address_families, sort=True):\n configurations.append_block(sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n for sub, attributes2 in attributes.mapping_values('interface_attr', keys=self.interfaces, sort=True):\n configurations.append_block(sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n # iosxr: mpls ldp / neighbor (config-ldp-nbr)\n # iosxr: mpls ldp / vrf someword / neighbor (config-ldp-vrf-nbr)\n with configurations.submode_context('neighbor', cancel_empty=True):\n\n if self.vrf_name == 'default':\n # iosxr: mpls ldp / neighbor / dual-stack tlv-compliance\n if attributes.value('dualstack_tlv_compliance'):\n configurations.append_line('dual-stack tlv-compliance')\n\n if self.vrf_name == 'default':\n # iosxr: mpls ldp / neighbor / dual-stack transport-connection max-wait <0-60>\n configurations.append_line(attributes.format('dual-stack transport-connection max-wait {dualstack_transport_max_wait}'))\n\n if self.vrf_name == 'default':\n # iosxr: mpls ldp / neighbor / dual-stack transport-connection prefer ipv4\n if attributes.value('dualstack_transport_prefer_ipv4'):\n configurations.append_line('dual-stack transport-connection prefer ipv4')\n\n # iosxr: mpls ldp / [vrf someword] / neighbor / password clear some clear password\n # iosxr: mpls ldp / [vrf someword] / neighbor / password encrypted <PASSWORD>\n if self.password_type is PasswordType.clear:\n configurations.append_line(attributes.format('password clear {password}'))\n elif self.password_type is PasswordType.encrypted:\n configurations.append_line(attributes.format('password encrypted {password}'))\n\n for neighbor, neighbor_attributes in attributes.mapping_values('neighbor_attr', keys=self.neighbors, sort=True):\n\n # iosxr: mpls ldp / [vrf someword] / neighbor / 1.2.3.4:0 password disable\n if neighbor_attributes.value('disable_password'):\n configurations.append_line(neighbor_attributes.format('{neighbor} password disable', force_neighbor=True))\n else:\n # iosxr: mpls ldp / [vrf someword] / neighbor / 1.2.3.4:0 password clear some clear password\n # iosxr: mpls ldp / [vrf someword] / neighbor / 1.2.3.4:0 password encrypted <PASSWORD>\n if neighbor.password_type is PasswordType.clear:\n configurations.append_line(neighbor_attributes.format('{neighbor} password clear {password}', force_neighbor=True, inherited=False))\n elif neighbor.password_type is PasswordType.encrypted:\n configurations.append_line(neighbor_attributes.format('{neighbor} password encrypted {password}', force_neighbor=True, inherited=False))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class AddressFamilyAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: mpls ldp / address-family ipv4 (config-ldp-af)\n # iosxr: mpls ldp / address-family ipv6 (config-ldp-af)\n # iosxr: mpls ldp / vrf someword / address-family ipv4 (config-ldp-vrf-af)\n with configurations.submode_context(attributes.format('address-family {address_family.value}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n if self.vrf_name == 'default':\n if self.targeted_hello_accept_from_acl:\n # iosxr: mpls ldp / address-family ipv4|ipv6 / discovery targeted-hello accept from someword\n configurations.append_line(attributes.format('discovery targeted-hello accept from {targeted_hello_accept_from_acl.name}'))\n else:\n # iosxr: mpls ldp / address-family ipv4|ipv6 / discovery targeted-hello accept\n if attributes.value('targeted_hello_accept'):\n configurations.append_line('discovery targeted-hello accept')\n\n if self.address_family is AddressFamily.ipv4 or self.vrf_name == 'default':\n # iosxr: mpls ldp / [vrf someword] / address-family ipv4 / discovery transport-address 1.2.3.4\n # iosxr: mpls ldp / address-family ipv6 / discovery transport-address fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b\n v = attributes.value('transport_address')\n if v is not None:\n if v == 'interface':\n pass\n else:\n configurations.append_line(attributes.format('discovery transport-address {transport_address}'))\n\n if self.address_family is AddressFamily.ipv4 or self.vrf_name == 'default':\n # iosxr: mpls ldp / address-family ipv4 / label (config-ldp-af-lbl)\n # iosxr: mpls ldp / vrf someword / address-family ipv4 / label (config-ldp-vrf-af-lbl)\n # iosxr: mpls ldp / address-family ipv6 / label (config-ldp-af-lbl)\n with configurations.submode_context('label', cancel_empty=True):\n\n # iosxr: mpls ldp / address-family ipv4 / label / local (config-ldp-af-lbl-lcl)\n # iosxr: mpls ldp / vrf someword / address-family ipv4 / label / local (config-ldp-vrf-af-lbl-lcl)\n # iosxr: mpls ldp / address-family ipv6 / label / local (config-ldp-af-lbl-lcl)\n with configurations.submode_context('local', cancel_empty=True):\n\n # iosxr: mpls ldp / address-family ipv4 / label / local / advertise (config-ldp-af-lbl-lcl-advt)\n # iosxr: mpls ldp / vrf someword / address-family ipv4 / label / local / advertise (config-ldp-vrf-af-lbl-lcl-advt)\n # iosxr: mpls ldp / address-family ipv6 / label / local / advertise (config-ldp-af-lbl-lcl-advt)\n with configurations.submode_context('advertise', cancel_empty=True):\n\n # iosxr: mpls ldp / [vrf someword] / address-family ipv4 / label / local / advertise / disable\n # iosxr: mpls ldp / address-family ipv6 / label / local / advertise / disable\n if attributes.value('advertise') is False:\n configurations.append_line('disable')\n\n # iosxr: mpls ldp / [vrf someword] / address-family ipv4 / label / local / advertise / explicit-null\n # iosxr: mpls ldp / [vrf someword] / address-family ipv4 / label / local / advertise / explicit-null for someword\n # iosxr: mpls ldp / [vrf someword] / address-family ipv4 / label / local / advertise / explicit-null for someword to someword2\n # iosxr: mpls ldp / [vrf someword] / address-family ipv4 / label / local / advertise / explicit-null to someword\n # iosxr: mpls ldp / address-family ipv6 / label / local / advertise / explicit-null\n # iosxr: mpls ldp / address-family ipv6 / label / local / advertise / explicit-null for someword\n # iosxr: mpls ldp / address-family ipv6 / label / local / advertise / explicit-null for someword to someword2\n # iosxr: mpls ldp / address-family ipv6 / label / local / advertise / explicit-null to someword\n if self.advertise_expnull_for_acl and self.advertise_expnull_to_acl:\n configurations.append_line(attributes.format('explicit-null for {advertise_expnull_for_acl.name} to {advertise_expnull_to_acl.name}'))\n elif self.advertise_expnull_to_acl:\n configurations.append_line(attributes.format('explicit-null to {advertise_expnull_to_acl.name}'))\n elif self.advertise_expnull_for_acl:\n configurations.append_line(attributes.format('explicit-null for {advertise_expnull_for_acl.name}'))\n elif attributes.value('advertise_expnull'):\n configurations.append_line('explicit-null')\n\n # iosxr: mpls ldp / address-family ipv4|ipv6 / label / local / advertise / for someword\n # iosxr: mpls ldp / address-family ipv4|ipv6 / label / local / advertise / for someword to someword2\n if self.advertise_for_acl and self.advertise_to_acl:\n configurations.append_line(attributes.format('for {advertise_for_acl.name} to {advertise_to_acl.name}'))\n else:\n configurations.append_line(attributes.format('for {advertise_for_acl.name}'))\n\n # iosxr: mpls ldp / [vrf someword] / address-family ipv4 / label / local / advertise / interface GigabitEthernet0/0/0/0\n # iosxr: mpls ldp / address-family ipv6 / label / local / advertise / interface GigabitEthernet0/0/0/0\n for v, attributes2 in attributes.sequence_values('advertise_interfaces'):\n configurations.append_line('interface {}'.format(v.name))\n\n for neighbor, neighbor_attributes in attributes.mapping_values('neighbor_attr', keys=self.neighbors, sort=True):\n\n # iosxr: mpls ldp / [vrf someword] / address-family ipv4 / label / local / advertise / to 1.2.3.4:0 for someword\n # iosxr: mpls ldp / address-family ipv6 / label / local / advertise / to 1.2.3.4:0 for someword\n configurations.append_line(neighbor_attributes.format('to {neighbor} for {advertise_for_acl.name}', force_neighbor=True, inherited=False))\n\n # iosxr: mpls ldp / [vrf someword] / address-family ipv4 / label / local / allocate for someword\n # iosxr: mpls ldp / address-family ipv6 / label / local / allocate for someword\n configurations.append_line(attributes.format('allocate for {allocate_for_acl.name}'))\n\n # iosxr: mpls ldp / [vrf someword] / address-family ipv4 / label / local / allocate for host-routes\n # iosxr: mpls ldp / address-family ipv6 / label / local / allocate for host-routes\n if attributes.value('allocate_for_host_routes'):\n configurations.append_line('allocate for host-routes')\n\n # iosxr: mpls ldp / [vrf someword] / address-family ipv4 / label / local / default-route\n # iosxr: mpls ldp / address-family ipv6 / label / local / default-route\n if attributes.value('default_route'):\n configurations.append_line('default-route')\n\n # iosxr: mpls ldp / [vrf someword] / address-family ipv4 / label / local / implicit-null-override for someword\n # iosxr: mpls ldp / address-family ipv6 / label / local / implicit-null-override for someword\n configurations.append_line(attributes.format('implicit-null-override for {impnull_override_for_acl.name}'))\n\n # iosxr: mpls ldp / address-family ipv4 / label / remote (config-ldp-af-lbl-rmt)\n # iosxr: mpls ldp / vrf someword / address-family ipv4 / label / remote (config-ldp-vrf-af-lbl-rmt)\n # iosxr: mpls ldp / address-family ipv6 / label / remote (config-ldp-af-lbl-rmt)\n with configurations.submode_context('remote', cancel_empty=True):\n\n # iosxr: mpls ldp / address-family ipv4|ipv6 / label / remote / accept (config-ldp-af-lbl-rmt-acpt)\n # iosxr: mpls ldp / vrf someword / address-family ipv4 / label / remote / accept (config-ldp-vrf-af-lbl-rmt-acpt)\n with configurations.submode_context('accept', cancel_empty=True):\n\n for neighbor, neighbor_attributes in attributes.mapping_values('neighbor_attr', keys=self.neighbors, sort=True):\n\n # iosxr: mpls ldp / [vrf someword] / address-family ipv4 / label / remote / accept / from 1.2.3.4:0 for someword\n # iosxr: mpls ldp / address-family ipv6 / label / remote / accept / from 1.2.3.4:0 for someword\n configurations.append_line(neighbor_attributes.format('from {neighbor} for {accept_for_acl.name}', force_neighbor=True))\n\n for neighbor, neighbor_attributes in attributes.mapping_values('neighbor_attr', keys=self.neighbors, sort=True):\n\n if self.vrf_name == 'default':\n # iosxr: mpls ldp / address-family ipv4 / neighbor 1.2.3.4 targeted\n # iosxr: mpls ldp / address-family ipv6 / neighbor fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b targeted\n if neighbor_attributes.value('targeted'):\n configurations.append_line(neighbor_attributes.format('neighbor {neighbor.ip} targeted', force_neighbor=True))\n\n if self.vrf_name == 'default':\n # iosxr: mpls ldp / address-family ipv4|ipv6 / redistribute (config-ldp-af-redist)\n with configurations.submode_context('redistribute', cancel_empty=True):\n\n # iosxr: mpls ldp / address-family ipv4|ipv6 / redistribute / bgp (config-ldp-af-redist-bgp)\n with configurations.submode_context(\n 'bgp', cancel_empty=attributes.value('redist_bgp') is not True):\n\n # iosxr: mpls ldp / address-family ipv4|ipv6 / redistribute / bgp / advertise-to someword\n configurations.append_line(attributes.format('advertise-to {redist_bgp_advto_acl.name}'))\n\n # iosxr: mpls ldp / address-family ipv4|ipv6 / redistribute / bgp / as 1\n # iosxr: mpls ldp / address-family ipv4|ipv6 / redistribute / bgp / as 100.200\n # iosxr: mpls ldp / address-family ipv4|ipv6 / redistribute / bgp / as 65536\n configurations.append_line(attributes.format('as {redist_bgp_as}'))\n\n if self.vrf_name == 'default':\n # iosxr: mpls ldp / address-family ipv4|ipv6 / traffic-eng (config-ldp-af-te)\n with configurations.submode_context('traffic-eng', cancel_empty=True):\n\n # iosxr: mpls ldp / address-family ipv4|ipv6 / traffic-eng / auto-tunnel mesh (config-ldp-af-te-mesh)\n with configurations.submode_context('auto-tunnel mesh', cancel_empty=True):\n\n # iosxr: mpls ldp / address-family ipv4|ipv6 / traffic-eng / auto-tunnel mesh / group <0-4294967295>\n # iosxr: mpls ldp / address-family ipv4|ipv6 / traffic-eng / auto-tunnel mesh / group all\n v = attributes.value('te_autotunnel_mesh_group_id')\n if v is not None:\n if v == 'all':\n configurations.append_line('group all')\n else:\n configurations.append_line(attributes.format('group {te_autotunnel_mesh_group_id}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: mpls ldp / interface GigabitEthernet0/0/0/0 (config-ldp-if)\n # iosxr: mpls ldp / vrf someword / interface GigabitEthernet0/0/0/0 (config-ldp-vrf-if)\n with configurations.submode_context(attributes.format('interface {interface_name}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: mpls ldp / interface GigabitEthernet0/0/0/0 / discovery hello dual-stack-tlv ipv4\n # iosxr: mpls ldp / interface GigabitEthernet0/0/0/0 / discovery hello dual-stack-tlv ipv6\n configurations.append_line(attributes.format('discovery hello dual-stack-tlv {disc_hello_dualstack_tlv.value}'))\n\n # iosxr: mpls ldp / interface GigabitEthernet0/0/0/0 / discovery hello holdtime 1\n configurations.append_line(attributes.format('discovery hello holdtime {hello_holdtime}', inherited=False))\n\n # iosxr: mpls ldp / interface GigabitEthernet0/0/0/0 / discovery hello interval 1\n configurations.append_line(attributes.format('discovery hello interval {hello_interval}', inherited=False))\n\n # iosxr: mpls ldp / interface GigabitEthernet0/0/0/0 / discovery quick-start disable\n if attributes.value('quickstart', inherited=False) is False:\n configurations.append_line('discovery quick-start disable')\n\n # iosxr: mpls ldp / interface GigabitEthernet0/0/0/0 / igp sync delay on-session-up 5\n # iosxr: mpls ldp / interface GigabitEthernet0/0/0/0 / igp sync delay on-session-up disable\n if attributes.value('igp_sync_delay_on_session_up') is False:\n configurations.append_line('igp sync delay on-session-up disable')\n else:\n configurations.append_line(attributes.format('igp sync delay on-session-up {igp_sync_delay_on_session_up}', inherited=False))\n\n for sub, attributes2 in attributes.mapping_values('address_family_attr', keys=self.address_families, sort=True):\n configurations.append_block(sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class AddressFamilyAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxr: mpls ldp / [vrf someword] / interface GigabitEthernet0/0/0/0 / address-family ipv4 (config-ldp-vrf-if-af)\n # iosxr: mpls ldp / interface GigabitEthernet0/0/0/0 / address-family ipv6 (config-ldp-if-af)\n with configurations.submode_context(attributes.format('address-family {address_family.value}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxr: mpls ldp / [vrf someword] / interface GigabitEthernet0/0/0/0 / address-family ipv4 / discovery transport-address 1.2.3.4\n # iosxr: mpls ldp / interface GigabitEthernet0/0/0/0 / address-family ipv6 / discovery transport-address fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b\n # iosxr: mpls ldp / [vrf someword] / interface GigabitEthernet0/0/0/0 / address-family ipv4 / discovery transport-address interface\n # iosxr: mpls ldp / interface GigabitEthernet0/0/0/0 / address-family ipv6 / discovery transport-address interface\n v = attributes.value('transport_address')\n if v is not None:\n if v == 'interface':\n configurations.append_line('discovery transport-address interface')\n else:\n configurations.append_line(attributes.format('discovery transport-address {transport_address}', inherited=False))\n\n # iosxr: mpls ldp / interface GigabitEthernet0/0/0/0 / address-family ipv4|ipv6 / igp auto-config disable\n if attributes.value('igp_autoconfig') is False:\n configurations.append_line('igp auto-config disable')\n\n # iosxr: mpls ldp / interface GigabitEthernet0/0/0/0 / address-family ipv4|ipv6 / mldp disable\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n# iosxr: mpls ldp / mldp (config-ldp-mldp)\n# iosxr: mpls ldp / mldp / logging internal\n# iosxr: mpls ldp / mldp / logging notifications\n# iosxr: mpls ldp / mldp / vrf someword (config-ldp-mldp-vrf)\n# iosxr: mpls ldp / mldp / address-family ipv4 (config-ldp-mldp-af)\n# iosxr: mpls ldp / mldp / vrf someword / address-family ipv4 (config-ldp-mldp-vrf-af)\n# iosxr: mpls ldp / mldp / [vrf someword] / address-family ipv4 / carrier-supporting-carrier\n# iosxr: mpls ldp / mldp / [vrf someword] / address-family ipv4 / forwarding recursive\n# iosxr: mpls ldp / mldp / [vrf someword] / address-family ipv4 / forwarding recursive route-policy someword\n# iosxr: mpls ldp / mldp / [vrf someword] / address-family ipv4 / make-before-break\n# iosxr: mpls ldp / mldp / [vrf someword] / address-family ipv4 / make-before-break delay <0-600>\n# iosxr: mpls ldp / mldp / [vrf someword] / address-family ipv4 / make-before-break delay <0-600> <0-60>\n# iosxr: mpls ldp / mldp / [vrf someword] / address-family ipv4 / make-before-break route-policy someword\n# iosxr: mpls ldp / mldp / [vrf someword] / address-family ipv4 / mofrr\n# iosxr: mpls ldp / mldp / [vrf someword] / address-family ipv4 / mofrr route-policy someword\n# iosxr: mpls ldp / mldp / [vrf someword] / address-family ipv4 / neighbor 1.2.3.4 route-policy someword in\n# iosxr: mpls ldp / mldp / [vrf someword] / address-family ipv4 / neighbor route-policy someword in\n# iosxr: mpls ldp / mldp / [vrf someword] / address-family ipv4 / recursive-fec\n# iosxr: mpls ldp / mldp / [vrf someword] / address-family ipv4 / recursive-fec route-policy someword\n# iosxr: mpls ldp / mldp / [vrf someword] / address-family ipv4 / rib unicast-always\n# iosxr: mpls ldp / mldp / [vrf someword] / address-family ipv4 / static mp2mp 1.2.3.4 1\n# iosxr: mpls ldp / mldp / [vrf someword] / address-family ipv4 / static p2mp 1.2.3.4 1\n# iosxr: mpls ldp / interface GigabitEthernet0/0/0/0 / address-family ipv4|ipv6 / mldp disable\n\n", "id": "5002920", "language": "Python", "matching_score": 5.146592617034912, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ldp/iosxr/ldp.py" }, { "content": "\n__all__ = (\n 'Ldp',\n)\n\nimport re\n\nfrom genie.utils.cisco_collections import typedset\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import DeviceFeature, LinkFeature, Interface\nimport genie.conf.base.attributes\nfrom genie.conf.base.attributes import SubAttributes, SubAttributesDict, AttributesHelper\n\nfrom genie.libs.conf.base import IPv4Address, IPv6Address\nfrom genie.libs.conf.base.neighbor import IPv4LsrNeighbor, IPv6Neighbor, IPLsrNeighborSubAttributes\nfrom genie.libs.conf.address_family import AddressFamily, AddressFamilySubAttributes\nfrom genie.libs.conf.vrf import Vrf, VrfSubAttributes\nfrom genie.libs.conf.base import PasswordType\nfrom genie.libs.conf.access_list import AccessList\nfrom genie.libs.conf.route_policy import RoutePolicy\n\ndef _ldp_neighbor(value):\n try:\n return IPv4LsrNeighbor(value)\n except (TypeError, ValueError):\n pass\n try:\n return IPv6Neighbor(value)\n except (TypeError, ValueError):\n pass\n raise ValueError(value)\n\n\nclass Ldp(DeviceFeature, LinkFeature):\n\n @property\n def interfaces(self):\n interfaces = set()\n interfaces.update(*[link.interfaces for link in self.links])\n return frozenset(interfaces)\n\n @property\n def vrfs(self):\n return \\\n self.force_vrfs | \\\n {intf.vrf for intf in self.interfaces}\n\n force_vrfs = managedattribute(\n name='force_vrfs',\n read_only=True,\n finit=set,\n gettype=frozenset)\n # XXXJST TODO force_vrfs needs to also be accessible per-device. Being read_only, that can't happen\n\n def add_force_vrf(self, vrf):\n assert vrf is None or isinstance(vrf, Vrf)\n self.force_vrfs # init!\n self._force_vrfs.add(vrf)\n\n def remove_force_vrf(self, vrf):\n assert vrf is None or isinstance(vrf, Vrf)\n self.force_vrfs # init!\n self._force_vrfs.remove(vrf)\n\n address_families = managedattribute(\n name='address_families',\n finit=typedset(AddressFamily, {AddressFamily.ipv4}).copy,\n type=typedset(AddressFamily)._from_iterable)\n\n # Top level configs\n\n shutdown = managedattribute(\n name='shutdown',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n default_route = managedattribute(\n name='default_route',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n capabilities_cisco_iosxr = managedattribute(\n name='capabilities_cisco_iosxr',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n default_vrf_impl_ipv4 = managedattribute(\n name='default_vrf_impl_ipv4',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n ds_tlv = managedattribute(\n name='ds_tlv',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n hello_holdtime = managedattribute(\n name='hello_holdtime',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n targetted_hello_holdtime = managedattribute(\n name='targetted_hello_holdtime',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n hello_interval = managedattribute(\n name='hello_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n targetted_hello_interval = managedattribute(\n name='targetted_hello_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n instance_tlv = managedattribute(\n name='instance_tlv',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n quickstart = managedattribute(\n name='quickstart',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n targeted_hello_holdtime = managedattribute(\n name='targeted_hello_holdtime',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n targeted_hello_interval = managedattribute(\n name='targeted_hello_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n entropy_label = managedattribute(\n name='entropy_label',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n gr = managedattribute(\n name='gr',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n gr_fwdstate_holdtime = managedattribute(\n name='gr_fwdstate_holdtime',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n gr_max_recovery = managedattribute(\n name='gr_max_recovery',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n gr_neighbor_liveness = managedattribute(\n name='gr_neighbor_liveness',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n gr_reconnect_timeout = managedattribute(\n name='gr_reconnect_timeout',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n igp_sync = managedattribute(\n name='igp_sync',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n igp_sync_delay_time = managedattribute(\n name='igp_sync_delay_time',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n igp_sync_delay_on_proc_restart = managedattribute(\n name='igp_sync_delay_on_proc_restart',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n igp_sync_delay_on_session_up = managedattribute(\n name='igp_sync_delay_on_session_up',\n default=None,\n type=(None,\n managedattribute.test_istype(int),\n managedattribute.test_in((\n False,\n ))))\n\n log_gr = managedattribute(\n name='log_gr',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n log_hello_adj = managedattribute(\n name='log_hello_adj',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n log_neighbor = managedattribute(\n name='log_neighbor',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n log_nsr = managedattribute(\n name='log_nsr',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n log_sess_prot = managedattribute(\n name='log_sess_prot',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n ltrace_buffer_multiplier = managedattribute(\n name='ltrace_buffer_multiplier',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n dualstack_tlv_compliance = managedattribute(\n name='dualstack_tlv_compliance',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n dualstack_transport_max_wait = managedattribute(\n name='dualstack_transport_max_wait',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n dualstack_transport_prefer_ipv4 = managedattribute(\n name='dualstack_transport_prefer_ipv4',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n password_type = managedattribute(\n name='password_type',\n default=None,\n type=(None, PasswordType))\n\n password = managedattribute(\n name='password',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n password_for_acl = managedattribute(\n name='password_for_acl',\n default=None,\n type=(None, managedattribute.test_isinstance(AccessList)))\n\n disable_password = managedattribute(\n name='disable_password',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n nsr = managedattribute(\n name='nsr',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n session_backoff_init = managedattribute(\n name='session_backoff_init',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n session_backoff_max = managedattribute(\n name='session_backoff_max',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n session_holdtime = managedattribute(\n name='session_holdtime',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n session_protection = managedattribute(\n name='session_protection',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n session_protection_for_acl = managedattribute(\n name='session_protection_for_acl',\n default=None,\n type=(None, managedattribute.test_isinstance(AccessList)))\n\n session_protection_dur = managedattribute(\n name='session_protection_dur',\n default=None,\n type=(None,\n managedattribute.test_istype(int),\n managedattribute.test_in((\n float('inf'),\n ))))\n\n signalling_dscp = managedattribute(\n name='signalling_dscp',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n session_dod_with_acl = managedattribute(\n name='session_dod_with_acl',\n default=None,\n type=(None, managedattribute.test_isinstance(AccessList)))\n\n gr_maintain_acl = managedattribute(\n name='gr_maintain_acl',\n default=None,\n type=(None, managedattribute.test_isinstance(AccessList)))\n\n disc_hello_dualstack_tlv = managedattribute(\n name='disc_hello_dualstack_tlv',\n default=None,\n type=(None, managedattribute.test_in((\n AddressFamily.ipv4,\n AddressFamily.ipv6,\n ))))\n\n igp_autoconfig = managedattribute(\n name='igp_autoconfig',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n transport_address = managedattribute(\n name='transport_address',\n default=None,\n type=(None,\n managedattribute.test_in((\n 'interface',\n )),\n IPv4Address,\n IPv6Address))\n\n targeted = managedattribute(\n name='targeted',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n advertise = managedattribute(\n name='advertise',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n advertise_expnull = managedattribute(\n name='advertise_expnull',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n advertise_expnull_for_acl = managedattribute(\n name='advertise_expnull_for_acl',\n default=None,\n type=(None, managedattribute.test_isinstance(AccessList)))\n\n advertise_expnull_to_acl = managedattribute(\n name='advertise_expnull_to_acl',\n default=None,\n type=(None, managedattribute.test_isinstance(AccessList)))\n\n advertise_interfaces = managedattribute(\n name='advertise_interfaces',\n finit=typedset(managedattribute.test_isinstance(Interface)).copy,\n type=typedset(managedattribute.test_isinstance(Interface))._from_iterable)\n\n allocate_for_acl = managedattribute(\n name='allocate_for_acl',\n default=None,\n type=(None, managedattribute.test_isinstance(AccessList)))\n\n allocate_for_host_routes = managedattribute(\n name='allocate_for_host_routes',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n default_route = managedattribute(\n name='default_route',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n impnull_override_for_acl = managedattribute(\n name='impnull_override_for_acl',\n default=None,\n type=(None, managedattribute.test_isinstance(AccessList)))\n\n targeted_hello_accept = managedattribute(\n name='targeted_hello_accept',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n targeted_hello_accept_from_acl = managedattribute(\n name='targeted_hello_accept_from_acl',\n default=None,\n type=(None, managedattribute.test_isinstance(AccessList)))\n\n redist_bgp = managedattribute(\n name='redist_bgp',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n redist_bgp_advto_acl = managedattribute(\n name='redist_bgp_advto_acl',\n default=None,\n type=(None, managedattribute.test_isinstance(AccessList)))\n\n redist_bgp_as = managedattribute(\n name='redist_bgp_as',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n te_autotunnel_mesh_group_id = managedattribute(\n name='te_autotunnel_mesh_group_id',\n default=None,\n type=(None,\n managedattribute.test_in((\n 'all',\n )),\n managedattribute.test_istype(int)))\n\n advertise_for_acl = managedattribute(\n name='advertise_for_acl',\n default=None,\n type=(None, managedattribute.test_isinstance(AccessList)))\n\n advertise_to_acl = managedattribute(\n name='advertise_to_acl',\n default=None,\n type=(None, managedattribute.test_isinstance(AccessList)))\n\n accept_for_acl = managedattribute(\n name='accept_for_acl',\n default=None,\n type=(None, managedattribute.test_isinstance(AccessList)))\n\n session_dod_acl = managedattribute(\n name='session_dod_acl',\n default=None,\n type=(None, managedattribute.test_isinstance(AccessList)))\n\n class MldpAttributes(object):\n\n enabled = managedattribute(\n name='enabled',\n default=False,\n type=managedattribute.test_istype(bool))\n\n csc = managedattribute(\n name='csc',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n forwarding_recursive = managedattribute(\n name='forwarding_recursive',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n forwarding_recursive_route_policy = managedattribute(\n name='forwarding_recursive_route_policy',\n default=None,\n type=(None, managedattribute.test_isinstance(RoutePolicy)))\n\n make_before_break = managedattribute(\n name='make_before_break',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n make_before_break_delay = managedattribute(\n name='make_before_break_delay',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n make_before_break_delete_delay = managedattribute(\n name='make_before_break_delete_delay',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n make_before_break_route_policy = managedattribute(\n name='make_before_break_route_policy',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n mofrr = managedattribute(\n name='mofrr',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n mofrr_route_policy = managedattribute(\n name='mofrr_route_policy',\n default=None,\n type=(None, managedattribute.test_isinstance(RoutePolicy)))\n\n route_policy_in = managedattribute(\n name='route_policy_in',\n default=None,\n type=(None, managedattribute.test_isinstance(RoutePolicy)))\n\n recursive_fec = managedattribute(\n name='recursive_fec',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n recorsive_fec_route_policy = managedattribute(\n name='recorsive_fec_route_policy',\n default=None,\n type=(None, managedattribute.test_isinstance(RoutePolicy)))\n\n rib_unicast_always = managedattribute(\n name='rib_unicast_always',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # TODO need support for multiple root_ip/num_lsps\n mp2mp_static_root_ip = managedattribute(\n name='mp2mp_static_root_ip',\n default=None,\n type=(None, IPv4Address))\n\n mp2mp_static_num_lsps = managedattribute(\n name='mp2mp_static_num_lsps',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n p2mp_static_root_ip = managedattribute(\n name='p2mp_static_root_ip',\n default=None,\n type=(None, IPv4Address))\n\n p2mp_static_num_lsps = managedattribute(\n name='p2mp_static_num_lsps',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n log_internal = managedattribute(\n name='log_internal',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n log_notifications = managedattribute(\n name='log_notifications',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n mldp = managedattribute(\n name='mldp',\n read_only=True,\n finit=MldpAttributes,\n doc=MldpAttributes.__doc__)\n\n class DeviceAttributes(genie.conf.base.attributes.DeviceSubAttributes):\n\n enabled_feature = managedattribute(\n name='enabled_feature',\n default=False,\n type=managedattribute.test_istype(bool),\n doc='''Argument to control 'feature ldp' CLI''')\n\n address_families = managedattribute(\n name='address_families',\n type=typedset(AddressFamily)._from_iterable)\n\n @address_families.defaulter\n def address_families(self):\n return frozenset(self.parent.address_families)\n\n advertise_interfaces = managedattribute(\n name='advertise_interfaces',\n type=typedset(managedattribute.test_isinstance(Interface))._from_iterable)\n\n @advertise_interfaces.defaulter\n def advertise_interfaces(self):\n device = self.device\n return frozenset(interface\n for interface in self.parent.advertise_interfaces\n if interface.device is device)\n\n @property\n def vrfs(self):\n return \\\n self.force_vrfs | \\\n {intf.vrf for intf in self.interfaces}\n\n @property\n def interfaces(self):\n device = self.device\n interfaces = set(self.parent.interfaces)\n #interfaces.update(*[link.interfaces for link in self.parent.links])\n interfaces = {intf for intf in interfaces if intf.device is device}\n return frozenset(interfaces)\n\n class MldpAttributes(SubAttributes):\n\n def __init__(self, _device_attr):\n self._device_attr = _device_attr\n super().__init__(\n # Ldp.mldp\n parent=_device_attr.parent.mldp)\n\n @property\n def testbed(self):\n return self._device_attr.testbed\n\n @property\n def device_name(self):\n return self._device_attr.device_name\n\n @property\n def device(self):\n return self._device_attr.device\n\n mldp = managedattribute(\n name='mldp',\n read_only=True,\n doc=MldpAttributes.__doc__)\n\n @mldp.initter\n def mldp(self):\n return self.MldpAttributes(_device_attr=self)\n\n class VrfAttributes(VrfSubAttributes):\n\n address_families = managedattribute(\n name='address_families',\n type=typedset(AddressFamily)._from_iterable)\n\n @address_families.defaulter\n def address_families(self):\n return frozenset(self.parent.address_families)\n\n advertise_interfaces = managedattribute(\n name='advertise_interfaces',\n type=typedset(managedattribute.test_isinstance(Interface))._from_iterable)\n\n @advertise_interfaces.defaulter\n def advertise_interfaces(self):\n return frozenset(self.parent.advertise_interfaces)\n\n # implicit: interface_attr = parent.interface_attr\n # implicit: interfaces = parent.interfaces\n\n router_id = managedattribute(\n name='router_id',\n default=None,\n type=(None, IPv4Address,\\\n managedattribute.test_isinstance(Interface)))\n\n class NeighborAttributes(IPLsrNeighborSubAttributes):\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n neighbor_attr = managedattribute(\n name='neighbor_attr',\n read_only=True,\n doc=NeighborAttributes.__doc__)\n\n @neighbor_attr.initter\n def neighbor_attr(self):\n return SubAttributesDict(self.NeighborAttributes, parent=self)\n\n neighbors = managedattribute(\n name='neighbors',\n finit=typedset(_ldp_neighbor).copy,\n type=typedset(_ldp_neighbor)._from_iterable)\n\n class AddressFamilyAttributes(AddressFamilySubAttributes):\n\n allowed_keys = (AddressFamily.ipv4, AddressFamily.ipv6)\n\n advertise_interfaces = managedattribute(\n name='advertise_interfaces',\n type=typedset(managedattribute.test_isinstance(Interface))._from_iterable)\n\n @advertise_interfaces.defaulter\n def advertise_interfaces(self):\n return frozenset(self.parent.advertise_interfaces)\n\n class NeighborAttributes(IPLsrNeighborSubAttributes):\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n neighbor_attr = managedattribute(\n name='neighbor_attr',\n read_only=True,\n doc=NeighborAttributes.__doc__)\n\n @neighbor_attr.initter\n def neighbor_attr(self):\n return SubAttributesDict(self.NeighborAttributes, parent=self)\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n address_family_attr = managedattribute(\n name='address_family_attr',\n read_only=True,\n doc=AddressFamilyAttributes.__doc__)\n\n @address_family_attr.initter\n def address_family_attr(self):\n return SubAttributesDict(self.AddressFamilyAttributes, parent=self)\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n vrf_attr = managedattribute(\n name='vrf_attr',\n read_only=True,\n doc=VrfAttributes.__doc__)\n\n @vrf_attr.initter\n def vrf_attr(self):\n return SubAttributesDict(self.VrfAttributes, parent=self)\n\n @property\n def router_id(self):\n return self.vrf_attr[None].router_id\n\n @router_id.setter\n def router_id(self, value):\n self.vrf_attr[None].router_id = value\n\n @property\n def neighbor_attr(self):\n return self.vrf_attr[None].neighbor_attr\n\n @property\n def address_family_attr(self):\n return self.vrf_attr[None].address_family_attr\n\n class InterfaceAttributes(genie.conf.base.attributes.InterfaceSubAttributes):\n\n address_families = managedattribute(\n name='address_families',\n type=typedset(AddressFamily)._from_iterable)\n\n @address_families.defaulter\n def address_families(self):\n return frozenset(self.parent.address_families)\n\n class AddressFamilyAttributes(AddressFamilySubAttributes):\n\n allowed_keys = (AddressFamily.ipv4, AddressFamily.ipv6)\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n address_family_attr = managedattribute(\n name='address_family_attr',\n read_only=True,\n doc=AddressFamilyAttributes.__doc__)\n\n @address_family_attr.initter\n def address_family_attr(self):\n return SubAttributesDict(self.AddressFamilyAttributes, parent=self)\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n def build_config(self, links=None, apply=True, attributes=None, **kwargs):\n attributes = AttributesHelper(self, attributes)\n\n cfgs = {}\n\n if links is None:\n devices = self.devices\n else:\n devices = set().union(*[link.devices for link in links])\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, links=None, apply=True, attributes=None, **kwargs):\n attributes = AttributesHelper(self, attributes)\n\n cfgs = {}\n\n if links is None:\n devices = self.devices\n else:\n devices = set().union(*[link.devices for link in links])\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n", "id": "8230071", "language": "Python", "matching_score": 3.949341297149658, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ldp/ldp.py" }, { "content": "\n__all__ = (\n 'Pim',\n )\n# import python\nimport ipaddress\nfrom enum import Enum\nfrom ipaddress import IPv4Address, IPv4Interface, IPv6Address, IPv6Interface\n\n# import genie\nimport genie.conf.base.attributes\nfrom genie.utils.cisco_collections import typedset\nfrom genie.conf.base import Base, Interface\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import DeviceFeature, InterfaceFeature, LinkFeature\nfrom genie.conf.base.attributes import SubAttributes, \\\n SubAttributesDict, \\\n AttributesHelper\n\n# import genie.libs\nfrom .rp_address import RPAddressGroup\nfrom genie.libs.conf.base import Routing\nfrom genie.libs.conf.vrf import Vrf, VrfSubAttributes\nfrom genie.conf.base.attributes import InterfaceSubAttributes\nfrom genie.libs.conf.address_family import AddressFamily, \\\n AddressFamilySubAttributes\n\nfrom genie.abstract import Lookup\n\nfrom genie.ops.base import Context\nfrom genie.ops.base import Base as ops_Base\n\nfrom genie.libs import parser\n\n\nclass Pim(Routing, DeviceFeature, InterfaceFeature):\n\n address_families = managedattribute(\n name='address_families',\n finit=typedset(AddressFamily, {AddressFamily.ipv4}).copy,\n type=typedset(AddressFamily)._from_iterable)\n\n sparse = managedattribute(\n name='sparse',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n rp_address = managedattribute(\n name='rp_address',\n default=None,\n type=(None, IPv4Address, IPv6Address))\n\n # ==================== NXOS specific ====================\n\n # feature pim\n # feature pim6\n enabled = managedattribute(\n name='enabled',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc='Enable or disable both feature pim and feature pim6')\n\n # feature pim\n enabled_pim = managedattribute(\n name='enabled_pim',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc='Enable or disable feature pim')\n\n # feature_pim6\n enabled_pim6 = managedattribute(\n name='enabled_pim6',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc='Enable or disable feature pim6')\n\n # ===========================================================\n\n # enable_bidir\n enabled_bidir = managedattribute(\n name='enabled_bidir',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc='Enable or disable feature bidir only for iosxe')\n\n # ==== PIM Auto-RP =======\n auto_rp = managedattribute(\n name='auto_rp',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Auto-RP protocol RP-distribution configuration\")\n\n send_rp = managedattribute(\n name='send_rp',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Configures router to send Auto-RP Announce messages\")\n\n send_rp_announce_rp_group = managedattribute(\n name='send_rp_announce_rp_group',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"IP address of RP for group\")\n\n send_rp_announce_intf = managedattribute(\n name='send_rp_announce_intf',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"auto-rp interface\")\n\n send_rp_announce_group_list = managedattribute(\n name='send_rp_announce_group_list',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Group range list\")\n\n send_rp_announce_route_map = managedattribute(\n name='send_rp_announce_route_map',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\" Group range policy for Auto-RP Candidate RP\")\n\n send_rp_announce_prefix_list = managedattribute(\n name='send_rp_announce_prefix_list',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Prefix List policy for Auto-RP Candidate RP\")\n\n send_rp_announce_interval = managedattribute(\n name='send_rp_announce_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Auto-RP Announce message transmission interval\")\n\n send_rp_announce_scope = managedattribute(\n name='send_rp_announce_scope',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure the scope of Auto-RP Announce messages\")\n\n send_rp_announce_bidir = managedattribute(\n name='send_rp_announce_bidir',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Group range advertised in PIM bidirectional mode\")\n\n auto_rp_discovery = managedattribute(\n name='auto_rp_discovery',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Configures router as an Auto-RP RP-mapping agent\")\n\n send_rp_discovery = managedattribute(\n name='send_rp_discovery',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Configures router to send Auto-RP Discovery messages\")\n\n send_rp_discovery_intf = managedattribute(\n name='send_rp_discovery_intf',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Auto-RP Discovery messages interface\")\n\n send_rp_discovery_scope = managedattribute(\n name='send_rp_discovery_scope',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure the scope of Auto-RP Discovery messages\")\n\n send_rp_discovery_interval = managedattribute(\n name='send_rp_discovery_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Auto-RP Discovery message transmission interval\")\n\n autorp_listener = managedattribute(\n name='autorp_listener',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Listen to Auto-RP messages\")\n\n # ==== PIM BSR =======\n # === bsr-candidate ===\n bsr_candidate_interface = managedattribute(\n name='bsr_candidate_interface',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Configure router as a Bootstrap Router candidate interface\")\n\n bsr_candidate_hash_mask_length = managedattribute(\n name='bsr_candidate_hash_mask_length',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Hash mask length used in Bootstrap messages\")\n\n bsr_candidate_priority = managedattribute(\n name='bsr_candidate_priority',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"BSR priority used in Bootstrap messages\")\n\n bsr_candidate_interval = managedattribute(\n name='bsr_candidate_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Bootstrap message transmission interval\")\n\n bsr_candidate_accept_rp_acl = managedattribute(\n name='bsr_candidate_accept_rp_acl',\n default=None,\n type=(None, managedattribute.test_istype(str),\n managedattribute.test_istype(int)),\n doc=\"bsr_candidate_accept_rp_acl\")\n\n bsr_candidate_address = managedattribute(\n name='bsr_candidate_address',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"bsr_candidate_address\")\n\n # === bsr rp-candidate ====\n bsr_rp_candidate_interface = managedattribute(\n name='bsr_rp_candidate_interface',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Configure router as a Rendezvous Point (RP) candidate interface\")\n\n bsr_rp_candidate_group_list = managedattribute(\n name='bsr_rp_candidate_group_list',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Group range list\")\n\n bsr_rp_candidate_route_map = managedattribute(\n name='bsr_rp_candidate_route_map',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Group range policy for Candidate RP\")\n\n bsr_rp_candidate_prefix_list = managedattribute(\n name='bsr_rp_candidate_prefix_list',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Prefix List policy for Candidate RP\")\n\n bsr_rp_candidate_priority = managedattribute(\n name='bsr_rp_candidate_priority',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Group range policy for Candidate RP\")\n\n bsr_rp_candidate_interval = managedattribute(\n name='bsr_rp_candidate_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Bootstrap message transmission interval\")\n\n bsr_rp_candidate_bidir = managedattribute(\n name='bsr_rp_candidate_bidir',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Group range advertised in PIM bidirectional mode\")\n\n bsr_rp_candidate_address = managedattribute(\n name='bsr_rp_candidate_address',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"bsr_rp_candidate_address\")\n\n # # ==== PIM Other =======\n accept_register = managedattribute(\n name='accept_register',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"A route-map name\")\n\n # only used for nxos ipv4\n accept_register_prefix_list = managedattribute(\n name='accept_register_prefix_list',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Prefix List policy for Registers\")\n\n log_neighbor_changes = managedattribute(\n name='log_neighbor_changes',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Log up/down PIM neighbor transitions\")\n\n register_source = managedattribute(\n name='accept_register_route_map',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Configure source address for Register messages\")\n\n sg_expiry_timer = managedattribute(\n name='sg_expiry_timer',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Adjust expiry time for PIM ASM (S,G) routes\")\n\n # NXOS only\n sg_expiry_timer_infinity = managedattribute(\n name='sg_expiry_timer_infinity',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Never expire (S,G) route due to data inactivity\")\n\n sg_expiry_timer_sg_list = managedattribute(\n name='sg_expiry_timer_sg_list',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Specifies route-map for (S,G)s to apply the expiry timer\")\n\n sg_expiry_timer_prefix_list = managedattribute(\n name='sg_expiry_timer_prefix_list',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Specifies prefix-list for (S,G)s to apply the expiry timer\")\n\n class SPT_SWITCH_INFINITY(Enum):\n active = 0\n passive = 'infinity'\n\n spt_switch_infinity = managedattribute(\n name='spt_switch_infinity',\n default=False,\n type=(None, SPT_SWITCH_INFINITY),\n doc=\"Source-tree switching threshold\")\n\n spt_switch_policy = managedattribute(\n name='spt_switch_policy',\n default=None,\n type=(None, managedattribute.test_istype(str),\n managedattribute.test_istype(int)),\n doc=\"Specify group ranges through policy\")\n\n # ==== PIM AddressFamily Interface =======\n class MODE(Enum):\n mode1 = 'dense-mode'\n mode2 = 'sparse-mode'\n mode3 = 'sparse-dense-mode'\n\n mode = managedattribute(\n name='mode',\n default=None,\n type=(None, MODE),\n doc=\"pim mode - only 'sparse-mode' valid for NXOS\")\n\n boundary = managedattribute(\n name='boundary',\n default=None,\n type=(None, managedattribute.test_istype(str),\n managedattribute.test_istype(int)),\n doc=\"ip multicast boundary/jp_policy\")\n\n boundary_filter_autorp = managedattribute(\n name='boundary_filter_autorp',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"boundary group\")\n\n boundary_in = managedattribute(\n name='boundary_in',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"boundary direction in/jp_policy_in\")\n\n boundary_out = managedattribute(\n name='boundary_out',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"boundary direction out/jp_policy_out\")\n\n bsr_border = managedattribute(\n name='bsr_border',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"bsr border - prevents both BSR and Auto-RP\")\n\n hello_interval = managedattribute(\n name='hello_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"hello interval\")\n\n hello_interval_msec = managedattribute(\n name='hello_interval_msec',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"hello interval msec\")\n\n dr_priority = managedattribute(\n name='dr_priority',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"pim dr-priority\")\n\n neighbor_filter = managedattribute(\n name='neighbor_filter',\n default=None,\n type=(None, managedattribute.test_istype(str),\n managedattribute.test_istype(int)),\n doc=\"pim neighbor filter\")\n\n # NXOS only\n neighbor_filter_prefix_list = managedattribute(\n name='neighbor_filter_prefix_list',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"pim neighbor filter prefix list\")\n\n @property\n def vrfs(self):\n return \\\n self.force_vrfs | \\\n {intf.vrf for intf in self.interfaces}\n\n force_vrfs = managedattribute(\n name='force_vrfs',\n read_only=True,\n finit=set,\n gettype=frozenset)\n # XXXJST TODO force_vrfs needs to also be accessible per-device. Being read_only, that can't happen\n\n def add_force_vrf(self, vrf):\n assert vrf is None or isinstance(vrf, Vrf)\n self.force_vrfs # init!\n self._force_vrfs.add(vrf)\n\n def remove_force_vrf(self, vrf):\n assert vrf is None or isinstance(vrf, Vrf)\n self.force_vrfs # init!\n self._force_vrfs.remove(vrf)\n\n class DeviceAttributes(genie.conf.base.attributes.DeviceSubAttributes):\n\n address_families = managedattribute(\n name='address_families',\n type=typedset(AddressFamily)._from_iterable)\n\n @property\n def vrfs(self):\n return \\\n self.force_vrfs | \\\n {intf.vrf for intf in self.interfaces}\n\n @address_families.defaulter\n def address_families(self):\n return frozenset(self.parent.address_families)\n\n class VrfAttributes(VrfSubAttributes):\n\n address_families = managedattribute(\n name='address_families',\n type=typedset(AddressFamily)._from_iterable)\n\n @address_families.defaulter\n def address_families(self):\n return frozenset(self.parent.address_families)\n\n class AddressFamilyAttributes(AddressFamilySubAttributes):\n \n rp_addresses = managedattribute(\n name='rp_addresses',\n finit=typedset(managedattribute.test_isinstance(RPAddressGroup)).copy,\n type=typedset(managedattribute.test_isinstance(RPAddressGroup))._from_iterable,\n doc='A `set` of RPAddressGroup associated objects')\n\n def add_static_rp(self, rp_addresses):\n self.rp_addresses.add(rp_addresses)\n\n def remove_static_rp(self, rp_addresses):\n rp_addresses._device = None\n try:\n self.rp_addresses.remove(rp_addresses)\n except:\n pass\n\n class InterfaceAttributes(InterfaceSubAttributes):\n pass\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n address_family_attr = managedattribute(\n name='address_family_attr',\n read_only=True,\n doc=AddressFamilyAttributes.__doc__)\n\n @address_family_attr.initter\n def address_family_attr(self):\n return SubAttributesDict(self.AddressFamilyAttributes, parent=self)\n\n vrf_attr = managedattribute(\n name='vrf_attr',\n read_only=True,\n doc=VrfAttributes.__doc__)\n\n @vrf_attr.initter\n def vrf_attr(self):\n return SubAttributesDict(self.VrfAttributes, parent=self)\n\n def __init__(self, parent, key):\n super().__init__(parent, key)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n def build_config(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n\n @classmethod\n def learn_config(self, device, **kwargs):\n '''\n A method that learn the device configurational state and create\n a conf object with the same configuration.\n\n Args:\n self (`obj`): Conf object.\n device (`obj`): The device that will be used to parse the\n command.\n '''\n if kwargs.get('attributes', None):\n kwargs['attributes'].extend(['v4_vrfs_list', 'v6_vrfs_list'])\n\n # Abstracting the show running bgp as per device os\n ret = Lookup.from_device(device)\n cmd = ret.parser.show_pim.ShowRunningConfigPim\n\n maker = ops_Base(device=device, **kwargs)\n\n maker.add_leaf(cmd=cmd,\n src='[feature_pim]',\n dest='pim[enabled_pim]',\n address_family='ipv4',\n pip_str='feature')\n\n maker.add_leaf(cmd=cmd,\n src='[feature_pim6]',\n dest='pim[enabled_pim6]',\n address_family='ipv6',\n pip_str='feature')\n\n # get vrfs for usage on attribtues of specific vrf\n maker.add_leaf(cmd=cmd,\n src='[vrf]',\n dest='v4_vrfs_list',\n pip_str='vrf',\n address_family='ipv4',\n action=lambda x: list(x.keys()))\n\n maker.add_leaf(cmd=cmd,\n src='[vrf]',\n dest='v6_vrfs_list',\n pip_str='vrf',\n address_family='ipv6',\n action=lambda x: list(x.keys()))\n\n # A workaround to pass the context as in maker it expects Context.cli\n # not just a string 'cli.\n maker.context_manager[cmd] = Context.cli\n maker.make()\n\n maker.v4_vrfs_list = getattr(maker, 'v4_vrfs_list', [])\n maker.v4_vrfs_list.append('default')\n maker.v4_vrfs_list = set(maker.v4_vrfs_list)\n\n maker.v6_vrfs_list = getattr(maker, 'v6_vrfs_list', [])\n maker.v6_vrfs_list.append('default')\n maker.v6_vrfs_list = set(maker.v6_vrfs_list)\n\n v4_map = map(lambda x: (x, 'ipv4'), maker.v4_vrfs_list)\n v6_map = map(lambda x: (x, 'ipv6'), maker.v6_vrfs_list)\n\n for vrf, af in list(v4_map) + list(v6_map):\n\n # only support on ipv4\n # auto-rp \n if af == 'ipv4':\n atuo_an_src = '[vrf][{vrf}][address_family][ipv4][rp][autorp][send_rp_announce]'.format(vrf=vrf)\n atuo_an_dest = 'pim[vrf_attr][{vrf}][address_family_attr][ipv4]'.format(vrf=vrf)\n\n for src_key, dest_key in {'interface':'send_rp_announce_intf',\n 'group':'send_rp_announce_rp_group',\n 'group_list':'send_rp_announce_group_list',\n 'route_map':'send_rp_announce_route_map',\n 'prefix_list':'send_rp_announce_prefix_list',\n 'interval':'send_rp_announce_interval',\n 'scope':'send_rp_announce_scope',\n 'bidir':'send_rp_announce_bidir',\n }.items():\n\n maker.add_leaf(cmd=cmd,\n src=atuo_an_src + '[%s]' % src_key,\n dest=atuo_an_dest + '[%s]' % dest_key,\n pip_str='send-rp-announce',\n vrf=vrf,\n address_family='ipv4')\n\n\n maker.make()\n\n if kwargs.get('attributes', None):\n kwargs['attributes'].remove('v4_vrfs_list')\n kwargs['attributes'].remove('v6_vrfs_list')\n\n # Take a copy of the object dictionary\n if not hasattr(maker, 'pim'):\n maker.pim = {}\n new_pim = maker.pim\n\n # List of mapped conf objects\n conf_obj_list = []\n\n # Main structure attributes in the conf object\n structure_keys = ['vrf_attr',\n 'address_family_attr']\n\n # Instiantiate a PIM conf object\n conf_obj = self()\n\n # Pass the class method not the instnace.\n maker.dict_to_obj(conf=conf_obj,\\\n struct=structure_keys,\\\n struct_to_map=new_pim)\n\n conf_obj_list.append(conf_obj)\n\n # List of mapped conf objects\n return conf_obj_list\n\n\n", "id": "6515668", "language": "Python", "matching_score": 5.924811840057373, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/pim/pim.py" }, { "content": "# Genie package\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import Base, \\\n DeviceFeature, \\\n LinkFeature, \\\n Interface\nimport genie.conf.base.attributes\nfrom genie.conf.base.attributes import SubAttributes, \\\n SubAttributesDict, \\\n AttributesHelper, \\\n KeyedSubAttributes\nfrom genie.abstract import Lookup\nfrom genie.ops.base import Context\nfrom genie.ops.base import Base as ops_Base\nfrom genie.libs import parser\n\n__all__ = (\n 'Msdp',\n )\n# Table of contents:\n# class Msdp:\n# class DeviceAttributes:\n# class VrfAttributes:\n# class PeerAttributes:\n\nclass Msdp(DeviceFeature, LinkFeature):\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n # =============================================\n # Device attributes\n # =============================================\n class DeviceAttributes(genie.conf.base.attributes.DeviceSubAttributes):\n\n # VrfAttributes\n class VrfAttributes(KeyedSubAttributes):\n def __init__(self, key, *args, **kwargs):\n self.vrf_name = key\n super().__init__(*args, **kwargs)\n\n # PeerAttribute\n class PeerAttributes(KeyedSubAttributes):\n def __init__(self, key, *args, **kwargs):\n self.address = key\n super().__init__(*args, **kwargs)\n\n peer_attr = managedattribute(\n name='peer_attr',\n read_only=True,\n doc=PeerAttributes.__doc__)\n\n @peer_attr.initter\n def peer_attr(self):\n return SubAttributesDict(self.PeerAttributes, parent=self)\n\n vrf_attr = managedattribute(\n name='vrf_attr',\n read_only=True,\n doc=VrfAttributes.__doc__)\n\n @vrf_attr.initter\n def vrf_attr(self):\n return SubAttributesDict(self.VrfAttributes, parent=self)\n\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n # ============ managedattributes ============#\n vrf_name = managedattribute(\n name='vrf_name',\n default=None,\n type=managedattribute.test_istype(str),\n doc='vrf name')\n\n enabled = managedattribute(\n name='enabled',\n default=None,\n type=managedattribute.test_istype(bool),\n doc='enabling msdp feature')\n\n originating_rp = managedattribute(\n name='originating_rp',\n default=None,\n type=managedattribute.test_istype(str),\n doc='set originating rp')\n\n global_connect_retry_interval = managedattribute(\n name='global_connect_retry_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='set global connect retry interval')\n\n address = managedattribute(\n name='address',\n default=None,\n type=managedattribute.test_istype(str),\n doc='set address')\n\n peer_as = managedattribute(\n name='peer_as',\n default=None,\n type=managedattribute.test_istype(str),\n doc='set peer as')\n\n connected_source = managedattribute(\n name='connected_source',\n default=None,\n type=managedattribute.test_istype(str),\n doc='set connected source')\n\n enable = managedattribute(\n name='enable',\n default=None,\n type=managedattribute.test_istype(bool),\n doc='set connection status')\n\n description = managedattribute(\n name='description',\n default=None,\n type=managedattribute.test_istype(str),\n doc='set description')\n\n mesh_group = managedattribute(\n name='mesh_group',\n default=None,\n type=managedattribute.test_istype(str),\n doc='set mesh group')\n\n sa_filter_in = managedattribute(\n name='sa_filter_in',\n default=None,\n type=managedattribute.test_istype(str),\n doc='set filter in')\n\n sa_filter_out = managedattribute(\n name='sa_filter_out',\n default=None,\n type=managedattribute.test_istype(str),\n doc='set filter out')\n\n sa_limit = managedattribute(\n name='sa_limit',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='set sa limit')\n\n keepalive_interval = managedattribute(\n name='keepalive_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='set keepalive interval')\n\n holdtime_interval = managedattribute(\n name='holdtime_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='set keepalive timeout')\n\n # =========================================================\n # build_config\n # =========================================================\n def build_config(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n @classmethod\n def learn_config(self, device, **kwargs):\n '''\n A method that learn the device configurational state and create\n a conf object with the same configuration.\n\n Args:\n self (`obj`): Conf object.\n device (`obj`): The device that will be used to parse the\n command.\n '''\n if kwargs.get('attributes', None):\n kwargs['attributes'].append('vrfs_list')\n\n # Abstracting the show running bgp as per device os\n ret = Lookup.from_device(device)\n cmd = ret.parser.show_msdp.ShowRunningConfigMsdp\n\n maker = ops_Base(device=device, **kwargs)\n\n # get vrfs for usage on attribtues of specific vrf\n maker.add_leaf(cmd=cmd,\n src='[vrf]',\n dest='vrfs_list',\n pip_str='vrf',\n action=lambda x: list(x.keys()))\n\n # A workaround to pass the context as lin maker it expects Context.cli\n # not just a string 'cli.\n maker.context_manager[cmd] = Context.cli\n maker.make()\n\n maker.vrfs_list = getattr(maker, 'vrfs_list', [])\n maker.vrfs_list.append('default')\n maker.vrfs_list = set(maker.vrfs_list)\n\n for vrf in maker.vrfs_list:\n\n # ----- global attributes -----\n global_src = '[vrf][{vrf}][global]'.format(vrf=vrf)\n global_dest = 'msdp[vrf_attr][{vrf}]'.format(vrf=vrf)\n\n # connect_retry_interval\n maker.add_leaf(cmd=cmd,\n src=global_src + '[timer][connect_retry_interval]',\n dest=global_dest + '[global_connect_retry_interval]',\n pip_str='reconnect-interval',\n vrf=vrf)\n\n # originating_rp\n maker.add_leaf(cmd=cmd,\n src=global_src + '[originating_rp]',\n dest=global_dest + '[originating_rp]',\n pip_str='originator-id',\n vrf=vrf)\n\n # ----- peer attributes -----\n peer_src = '[vrf][{vrf}][peer][(?P<peer>.*)]'.format(vrf=vrf)\n peer_dest = 'msdp[vrf_attr][{vrf}][peer_attr][(?P<peer>.*)]'.format(vrf=vrf)\n\n # description\n maker.add_leaf(cmd=cmd,\n src=peer_src + '[description]',\n dest=peer_dest + '[description]',\n pip_str='description',\n vrf=vrf)\n\n # connected_source, peer_as\n for src, dest in {'[connect_source]': '[connected_source]',\n '[peer_as]': '[peer_as]'\n }.items():\n\n maker.add_leaf(cmd=cmd,\n src=peer_src + src,\n dest=peer_dest + dest,\n pip_str='connect-source',\n vrf=vrf)\n\n #keepalive_interval, holdtime_interval\n for src, dest in {'[timer][keepalive_interval]': '[keepalive_interval]',\n '[timer][holdtime_interval]': '[holdtime_interval]'\n }.items():\n\n maker.add_leaf(cmd=cmd,\n src=peer_src + src,\n dest=peer_dest + dest,\n pip_str='keepalive',\n vrf=vrf)\n\n\n maker.make()\n\n if kwargs.get('attributes', None):\n kwargs['attributes'].remove('vrfs_list')\n\n # Take a copy of the object dictionary\n if not hasattr(maker, 'msdp'):\n maker.msdp = {}\n new_msdp = maker.msdp\n\n # List of mapped conf objects\n conf_obj_list = []\n\n # Main structure attributes in the conf object\n structure_keys = ['vrf_attr',\n 'peer_attr']\n\n # Instiantiate a PIM conf object\n conf_obj = self()\n\n # Pass the class method not the instnace.\n maker.dict_to_obj(conf=conf_obj,\\\n struct=structure_keys,\\\n struct_to_map=new_msdp)\n\n conf_obj_list.append(conf_obj)\n\n # List of mapped conf objects\n return conf_obj_list\n", "id": "11878349", "language": "Python", "matching_score": 4.094326019287109, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/msdp/msdp.py" }, { "content": "__all__ = (\n 'Ntp',\n )\n# ptyhon\nfrom enum import Enum\n\n# genie\nfrom genie.utils.cisco_collections import typedset\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.base import DeviceFeature, InterfaceFeature\n\n# genie.libs\nfrom genie.libs.conf.vrf import VrfSubAttributes\nfrom genie.conf.base.attributes import DeviceSubAttributes, \\\n SubAttributesDict,\\\n InterfaceSubAttributes, \\\n AttributesHelper, \\\n KeyedSubAttributes\n\n# Structure Hierarchy:\n# Ntp\n# +-- DeviceAttribute\n# +-- VrfAttributes\n# | +-- ServerAttributes\n# | +-- PeerAttributes\n# +-- AuthKeyAttribute\n# +-- InterfaceAttribute\n\n\nclass Ntp(DeviceFeature):\n \n # enabled\n enabled = managedattribute(\n name='enabled',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Enable NTP feature.\")\n\n # master_stratum\n master_stratum = managedattribute(\n name='master_stratum',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure NTP master clock stratum number.\")\n\n # auth_enabled\n auth_enabled = managedattribute(\n name='auth_enabled',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Enable/Disable authentication.\")\n\n # source_interface\n source_interface = managedattribute(\n name='source_interface',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Configure Source interface sending NTP packets.\")\n\n # vrf\n vrf = managedattribute(\n name='vrf',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Configure per-VRF information.\")\n\n # server_address\n server_address = managedattribute(\n name='server_address',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Configure Hostname/IP address of the NTP Server.\")\n\n # server_key_id\n server_key_id = managedattribute(\n name='server_key_id',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure Keyid to be used while communicating to this server.\")\n\n # server_minpoll\n server_minpoll = managedattribute(\n name='server_minpoll',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure Minimum interval to poll a server.\")\n\n # server_maxpoll\n server_maxpoll = managedattribute(\n name='server_maxpoll',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure Maximum interval to poll a server.\")\n\n # server_prefer\n server_prefer = managedattribute(\n name='server_prefer',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Configure Preferred Server.\")\n\n # server_version\n server_version = managedattribute(\n name='server_version',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure server_version.\")\n\n # peer_address\n peer_address = managedattribute(\n name='peer_address',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Configure Hostname/IP address of the NTP Peer.\")\n\n # peer_key_id\n peer_key_id = managedattribute(\n name='peer_key_id',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure Keyid to be used while communicating to this peer.\")\n\n # peer_minpoll\n peer_minpoll = managedattribute(\n name='peer_minpoll',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure Minimum interval to poll a peer.\")\n\n # peer_maxpoll\n peer_maxpoll = managedattribute(\n name='peer_maxpoll',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure Maximum interval to poll a peer.\")\n\n # peer_prefer\n peer_prefer = managedattribute(\n name='peer_prefer',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Configure Preferred peer.\")\n\n # peer_version\n peer_version = managedattribute(\n name='peer_version',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure peer version.\")\n\n # auth_key_id\n auth_key_id = managedattribute(\n name='auth_key_id',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure NTP authentication key number.\")\n\n # auth_algorithm\n class AUTH_ALGORITHM(Enum):\n md5 = 'md5'\n\n auth_algorithm = managedattribute(\n name='auth_algorithm',\n default=None,\n type=(None, AUTH_ALGORITHM),\n doc=\"Use md5 authentication scheme.\")\n \n # auth_key\n auth_key = managedattribute(\n name='auth_key',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Configure MD5 string.\")\n\n # auth_trusted_key\n auth_trusted_key = managedattribute(\n name='auth_trusted_key',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Configure NTP trusted-key.\")\n\n # if_disabled\n if_disabled = managedattribute(\n name='if_disabled',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Enable/Disable NTP.\")\n\n\n class DeviceAttributes(DeviceSubAttributes):\n\n class VrfAttributes(VrfSubAttributes):\n \n class ServerAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.server_address = key\n super().__init__(parent)\n\n server_attr = managedattribute(\n name='server_attr',\n read_only=True,\n doc=ServerAttributes.__doc__)\n\n @server_attr.initter\n def server_attr(self):\n return SubAttributesDict(\n self.ServerAttributes, parent=self)\n\n \n class PeerAttributes(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.peer_address = key\n super().__init__(parent)\n\n peer_attr = managedattribute(\n name='peer_attr',\n read_only=True,\n doc=ServerAttributes.__doc__)\n\n @peer_attr.initter\n def peer_attr(self):\n return SubAttributesDict(\n self.PeerAttributes, parent=self)\n\n vrf_attr = managedattribute(\n name='vrf_attr',\n read_only=True,\n doc=VrfAttributes.__doc__)\n\n @vrf_attr.initter\n def vrf_attr(self):\n return SubAttributesDict(self.VrfAttributes, parent=self)\n\n class AuthKeyAttribute(KeyedSubAttributes):\n\n def __init__(self, parent, key):\n self.auth_key_id = key\n super().__init__(parent)\n\n auth_key_attr = managedattribute(\n name='auth_key_attr',\n read_only=True,\n doc=AuthKeyAttribute.__doc__)\n\n @auth_key_attr.initter\n def auth_key_attr(self):\n return SubAttributesDict(self.AuthKeyAttribute, parent=self)\n\n class InterfaceAttribute(InterfaceSubAttributes):\n pass\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttribute.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttribute, parent=self)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n # ===========================================================\n\n def build_config(self, devices=None, apply=True, attributes=None,\n unconfig=False):\n cfgs = {}\n attributes = AttributesHelper(self, attributes)\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr', sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n if apply:\n for device_name, cfg in sorted(cfgs.items()):\n self.testbed.config_on_devices(cfg, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True, attributes=None):\n cfgs = {}\n attributes = AttributesHelper(self, attributes)\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr', sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n for device_name, cfg in sorted(cfgs.items()):\n self.testbed.config_on_devices(cfg, fail_invalid=True)\n else:\n return cfgs\n", "id": "3114239", "language": "Python", "matching_score": 5.209300994873047, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ntp/ntp.py" }, { "content": "'''\nNXOS specific configurations for Ntp feature object.\n'''\n\n# Python\nfrom abc import ABC\n\n# Genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning, \\\n AttributesHelper\n# Structure Hierarchy:\n# Ntp\n# +-- DeviceAttribute\n# +-- VrfAttributes\n# | +-- ServerAttributes\n# | +-- PeerAttributes\n# +-- AuthKeyAttribute\n\nclass Ntp(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # enabled\n if attributes.value('enabled'):\n if unconfig is False:\n configurations.append_line(\n attributes.format('feature ntp'))\n\n # Make sure that only enabled was provided in attributes\n # If wildcard, then delete everything\n elif unconfig is True and\\\n attributes.attributes == {'enabled': {True: None}} or \\\n attributes.iswildcard:\n configurations.append_line('no feature ntp', raw=True)\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n # master_stratum\n if attributes.value('master_stratum'):\n configurations.append_line(\n attributes.format('ntp master {master_stratum}'))\n\n # auth_enabled\n if attributes.value('auth_enabled'):\n configurations.append_line('ntp authenticate')\n \n # VrfAttributes\n for sub, attributes1 in attributes.mapping_values('vrf_attr',\n sort=True, keys=self.vrf_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes1,\n unconfig=unconfig))\n \n # AuthKeyAttribute\n for sub, attributes2 in attributes.mapping_values('auth_key_attr',\n sort=True, keys=self.auth_key_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n \n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n\n class VrfAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # source_interface\n if attributes.value('source_interface') and \\\n self.vrf_name == 'default':\n configurations.append_line(\n attributes.format('ntp source-interface {source_interface}'))\n\n # ServerAttributes\n for sub, attributes1 in attributes.mapping_values('server_attr',\n sort=True, keys=self.server_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes1,\n unconfig=unconfig))\n\n # PeerAttributes\n for sub, attributes2 in attributes.mapping_values('peer_attr',\n sort=True, keys=self.peer_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n \n class ServerAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n # assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # server_address\n if not attributes.value('server_address') and not unconfig:\n return str()\n\n conf_line = 'ntp server {server_address}'\n\n # server_key_id\n if attributes.value('server_key_id'):\n conf_line += ' key {server_key_id}'\n\n # server_maxpoll\n if attributes.value('server_maxpoll'):\n conf_line += ' maxpoll {server_maxpoll}'\n\n # server_minpoll\n if attributes.value('server_minpoll'):\n conf_line += ' minpoll {server_minpoll}'\n\n # server_prefer\n if attributes.value('server_prefer'):\n conf_line += ' prefer'\n\n if self.vrf_name and self.vrf_name != 'default':\n conf_line += ' use-vrf {vrf_name}'\n\n # append the line\n configurations.append_line(attributes.format(conf_line, force=True))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n \n class PeerAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # peer_address\n if not attributes.value('peer_address') and not unconfig:\n return str()\n\n conf_line = 'ntp peer {peer_address}'\n\n # peer_key_id\n if attributes.value('peer_key_id'):\n conf_line += ' key {peer_key_id}'\n\n # peer_maxpoll\n if attributes.value('peer_maxpoll'):\n conf_line += ' maxpoll {peer_maxpoll}'\n\n # peer_minpoll\n if attributes.value('peer_minpoll'):\n conf_line += ' minpoll {peer_minpoll}'\n\n # peer_prefer\n if attributes.value('peer_prefer'):\n conf_line += ' prefer'\n\n if self.vrf_name and self.vrf_name != 'default':\n conf_line += ' use-vrf {vrf_name}'\n\n # append the line\n configurations.append_line(attributes.format(conf_line, force=True))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n\n class AuthKeyAttribute(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # auth_key_id, auth_algorithm, auth_key\n if attributes.value('auth_algorithm') and \\\n attributes.value('auth_key'):\n configurations.append_line(\n attributes.format('ntp authentication-key '\n '{auth_key_id} {auth_algorithm.value} {auth_key}', force=True))\n\n # auth_trusted_key\n if attributes.value('auth_trusted_key'):\n configurations.append_line(\n attributes.format('ntp trusted-key {auth_key_id}', force=True))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)", "id": "6825686", "language": "Python", "matching_score": 3.907402992248535, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ntp/nxos/ntp.py" }, { "content": "#!/usr/bin/env python\n\n# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning\n\n# Ntp\nfrom genie.libs.conf.ntp import Ntp\n\n# Vrf\nfrom genie.libs.conf.vrf import Vrf\n\n\nclass test_ntp(TestCase):\n\n def setUp(self):\n \n # Set Genie Tb\n testbed = Testbed()\n Genie.testbed = testbed\n \n # Device\n self.dev1 = Device(name='PE1', testbed=testbed, os='nxos')\n \n # Ntp object\n self.ntp = Ntp()\n\n def test_ntp_config(self):\n\n # For failures\n self.maxDiff = None\n\n # VRF configuration\n vrf1 = Vrf('VRF1')\n self.ntp.device_attr[self.dev1].enabled = True\n self.ntp.device_attr[self.dev1].master_stratum = 10\n self.ntp.device_attr[self.dev1].auth_enabled = True\n\n self.ntp.device_attr[self.dev1].vrf_attr[vrf1].server_attr['1.1.1.1']\\\n .server_key_id = 1\n self.ntp.device_attr[self.dev1].vrf_attr[vrf1].server_attr['1.1.1.1']\\\n .server_minpoll = 5\n self.ntp.device_attr[self.dev1].vrf_attr[vrf1].server_attr['1.1.1.1']\\\n .server_maxpoll = 15\n self.ntp.device_attr[self.dev1].vrf_attr[vrf1].server_attr['1.1.1.1']\\\n .server_prefer = True\n self.ntp.device_attr[self.dev1].vrf_attr[vrf1].peer_attr['192.168.127.12']\\\n .peer_key_id = 3\n self.ntp.device_attr[self.dev1].vrf_attr[vrf1].peer_attr['192.168.127.12']\\\n .peer_prefer = True\n # non-default vrf won't have source_interface configured even if \n # register the attribute\n self.ntp.device_attr[self.dev1].vrf_attr[vrf1].source_interface = 'Ethernet2/1'\n\n \n vrf2 = Vrf('default')\n self.ntp.device_attr[self.dev1].vrf_attr[vrf2].server_attr['192.168.3.11']\\\n .server_key_id = 2\n self.ntp.device_attr[self.dev1].vrf_attr[vrf2].server_attr['192.168.3.11']\\\n .server_prefer = True\n self.ntp.device_attr[self.dev1].vrf_attr[vrf2].peer_attr['172.16.58.3']\\\n .peer_key_id = 4\n self.ntp.device_attr[self.dev1].vrf_attr[vrf2].peer_attr['172.16.58.3']\\\n .peer_minpoll = 6\n self.ntp.device_attr[self.dev1].vrf_attr[vrf2].peer_attr['172.16.58.3']\\\n .peer_maxpoll = 16\n self.ntp.device_attr[self.dev1].vrf_attr[vrf2].peer_attr['172.16.58.3']\\\n .peer_prefer = True\n self.ntp.device_attr[self.dev1].vrf_attr[vrf2].source_interface = 'Ethernet2/1'\n\n\n self.ntp.device_attr[self.dev1].auth_key_attr[1].auth_algorithm = 'md5'\n self.ntp.device_attr[self.dev1].auth_key_attr[1].auth_key = 'wawy 7'\n self.ntp.device_attr[self.dev1].auth_key_attr[1].auth_trusted_key = True\n\n\n # Build ntp configuration\n cfgs = self.ntp.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'feature ntp',\n 'ntp master 10',\n 'ntp authenticate',\n 'ntp source-interface Ethernet2/1',\n 'ntp server 192.168.3.11 key 2 prefer',\n 'ntp peer 172.16.58.3 key 4 maxpoll 16 minpoll 6 prefer',\n 'ntp server 1.1.1.1 key 1 maxpoll 15 minpoll 5 prefer use-vrf VRF1',\n 'ntp peer 192.168.127.12 key 3 prefer use-vrf VRF1',\n 'ntp authentication-key 1 md5 wawy 7',\n 'ntp trusted-key 1',\n ]))\n\n # Build unconfig\n cfgs = self.ntp.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'no feature ntp',\n ]))\n\n # Build unconfig with attribute\n cfgs = self.ntp.build_unconfig(apply=False,\n attributes={'device_attr': {\n self.dev1: {\n 'auth_enabled': None,\n 'vrf_attr': {\n vrf1: {\n 'peer_attr': {\n '3.3.3.3': {\n 'peer_key_id': None,\n 'peer_prefer': None\n }\n }\n },\n vrf2: {\n 'peer_attr': None,\n }\n },\n 'auth_key_attr': {\n 1: {\n 'auth_trusted_key': None,\n }\n }}}})\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'no ntp authenticate',\n 'no ntp peer 172.16.58.3 key 4 maxpoll 16 minpoll 6 prefer',\n 'no ntp peer 192.168.127.12 key 3 prefer use-vrf VRF1',\n 'no ntp trusted-key 1'\n ]))\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "2832977", "language": "Python", "matching_score": 1.7727835178375244, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/ntp/nxos/tests/test_ntp.py" }, { "content": "#!/usr/bin/env python\n\n# python\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie package\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device\n\n# Genie XBu_shared\nfrom genie.libs.conf.msdp.msdp import Msdp\n\noutputs = {}\n\ndef mapper(key):\n return outputs[key]\n\n\nclass test_msdp(TestCase):\n def setUp(self):\n # Set Genie Tb\n testbed = Testbed()\n Genie.testbed = testbed\n # Device\n self.dev1 = Device(name='PE1', testbed=testbed, os='nxos')\n\n def test_msdp_feature_cfg(self):\n\n msdp = Msdp()\n self.maxDiff = None\n\n self.dev1.add_feature(msdp)\n msdp.device_attr[self.dev1].enabled = True\n\n cfgs = msdp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [self.dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.join(\n ['feature msdp',\n ]))\n\n uncfg = msdp.build_unconfig(apply=False)\n self.assertCountEqual(uncfg.keys(), [self.dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(uncfg[self.dev1.name]), '\\n'.join(\n ['no feature msdp',\n ]))\n\n def test_msdp_vrf_default_cfg(self):\n\n msdp = Msdp()\n self.dev1.add_feature(msdp)\n vrf_name = 'default'\n msdp.device_attr[self.dev1].vrf_attr[vrf_name].originating_rp = 'loopback2'\n msdp.device_attr[self.dev1].vrf_attr[vrf_name].global_connect_retry_interval = 33\n\n cfgs = msdp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [self.dev1.name])\n\n cfgs = msdp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [self.dev1.name])\n self.maxDiff = None\n self.assertEqual(str(cfgs[self.dev1.name]), '\\n'.join(\n ['ip msdp originator-id loopback2',\n 'ip msdp reconnect-interval 33'\n ]))\n uncfg = msdp.build_unconfig(apply=False, attributes={\n 'device_attr':\n {self.dev1.name:\n {'vrf_attr': {\n vrf_name: {\n 'originating_rp':None,\n 'global_connect_retry_interval':None}}}}})\n self.assertCountEqual(uncfg.keys(), [self.dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(uncfg[self.dev1.name]), '\\n'.join(\n ['no ip msdp originator-id loopback2',\n 'no ip msdp reconnect-interval 33',\n ]))\n\n def test_msdp_vrf_cfg(self):\n msdp = Msdp()\n self.dev1.add_feature(msdp)\n vrf_name = 'VRF1'\n msdp.device_attr[self.dev1].vrf_attr[vrf_name].originating_rp = 'loopback2'\n msdp.device_attr[self.dev1].vrf_attr[vrf_name].global_connect_retry_interval = 33\n\n cfgs = msdp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [self.dev1.name])\n\n cfgs = msdp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [self.dev1.name])\n self.maxDiff = None\n self.assertEqual(str(cfgs[self.dev1.name]), '\\n'.join(\n ['vrf context VRF1',\n ' ip msdp originator-id loopback2',\n ' ip msdp reconnect-interval 33',\n ' exit'\n ]))\n uncfg = msdp.build_unconfig(apply=False, attributes={\n 'device_attr':\n {self.dev1.name:\n {'vrf_attr': {\n vrf_name: {\n 'originating_rp':None,\n 'global_connect_retry_interval':None}}}}})\n self.assertCountEqual(uncfg.keys(), [self.dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(uncfg[self.dev1.name]), '\\n'.join(\n ['vrf context VRF1',\n ' no ip msdp originator-id loopback2',\n ' no ip msdp reconnect-interval 33',\n ' exit',\n ]))\n\n def test_msdp_peer_cfg(self):\n msdp = Msdp()\n self.dev1.add_feature(msdp)\n vrf_name = 'default'\n address = '1.1.1.1'\n msdp.device_attr[self.dev1].enabled = True\n msdp.device_attr[self.dev1].vrf_attr[vrf_name].peer_attr[address].peer_as = '100'\n msdp.device_attr[self.dev1].vrf_attr[vrf_name].peer_attr[address].connected_source = 'loopback2'\n msdp.device_attr[self.dev1].vrf_attr[vrf_name].peer_attr[address].enable = False\n msdp.device_attr[self.dev1].vrf_attr[vrf_name].peer_attr[address].description = 'R1'\n msdp.device_attr[self.dev1].vrf_attr[vrf_name].peer_attr[address].mesh_group = '1'\n msdp.device_attr[self.dev1].vrf_attr[vrf_name].peer_attr[address].sa_filter_in = 'filtera'\n msdp.device_attr[self.dev1].vrf_attr[vrf_name].peer_attr[address].sa_filter_out = 'filtera'\n msdp.device_attr[self.dev1].vrf_attr[vrf_name].peer_attr[address].sa_limit = 111\n msdp.device_attr[self.dev1].vrf_attr[vrf_name].peer_attr[address].keepalive_interval = 13\n msdp.device_attr[self.dev1].vrf_attr[vrf_name].peer_attr[address].holdtime_interval = 50\n\n cfgs = msdp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [self.dev1.name])\n self.maxDiff = None\n self.assertEqual(str(cfgs[self.dev1.name]), '\\n'.join(\n ['feature msdp',\n 'ip msdp peer 1.1.1.1 connect-source loopback2 remote-as 100',\n 'ip msdp shutdown 1.1.1.1',\n 'ip msdp description 1.1.1.1 R1',\n 'ip msdp mesh-group 1.1.1.1 1',\n 'ip msdp sa-policy 1.1.1.1 filtera in',\n 'ip msdp sa-policy 1.1.1.1 filtera out',\n 'ip msdp sa-limit 1.1.1.1 111',\n 'ip msdp keepalive 1.1.1.1 13 50',\n ]))\n uncfg = msdp.build_unconfig(apply=False, attributes={\n 'device_attr':\n {self.dev1.name:\n {'vrf_attr': {\n 'default': {\n 'peer_attr': {\n '1.1.1.1': {\n 'address': None,\n 'enable':None,\n 'description':None,\n 'mesh_group':None,\n 'sa_filter_in':None,\n 'sa_filter_out':None,\n 'sa_limit':None,\n 'keepalive_interval':None,\n 'holdtime_interval':None}}}}}}})\n self.assertCountEqual(uncfg.keys(), [self.dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(uncfg[self.dev1.name]), '\\n'.join(\n ['no ip msdp peer 1.1.1.1',\n 'no ip msdp shutdown 1.1.1.1',\n 'no ip msdp description 1.1.1.1 R1',\n 'no ip msdp mesh-group 1.1.1.1 1',\n 'no ip msdp sa-policy 1.1.1.1 filtera in',\n 'no ip msdp sa-policy 1.1.1.1 filtera out',\n 'no ip msdp sa-limit 1.1.1.1 111',\n 'no ip msdp keepalive 1.1.1.1 13 50',\n ]))\n # unconfig msdp when all keys are configured\n uncfg_feature = msdp.build_unconfig(apply=False)\n self.assertCountEqual(uncfg_feature.keys(), [self.dev1.name])\n\n self.maxDiff = None\n self.assertMultiLineEqual(str(uncfg_feature[self.dev1.name]), '\\n'.join(\n ['no feature msdp',\n ]))\n\n def test_msdp_enable_uncfg(self):\n msdp = Msdp()\n self.dev1.add_feature(msdp)\n vrf_name = 'default'\n address = '1.1.1.1'\n msdp.device_attr[self.dev1].vrf_attr[vrf_name].peer_attr[address].enable = True\n\n cfgs = msdp.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [self.dev1.name])\n self.maxDiff = None\n self.assertEqual(str(cfgs[self.dev1.name]), '\\n'.join(\n ['no ip msdp shutdown 1.1.1.1',\n ]))\n uncfg = msdp.build_unconfig(apply=False, attributes={\n 'device_attr':\n {self.dev1.name:\n {'vrf_attr': {\n vrf_name: {\n 'peer_attr': {\n address: {\n 'enable':None}}}}}}})\n self.assertCountEqual(uncfg.keys(), [self.dev1.name])\n self.maxDiff = None\n self.assertMultiLineEqual(str(uncfg[self.dev1.name]), '\\n'.join(\n ['ip msdp shutdown 1.1.1.1',\n ]))\n\n def test_learn_config(self):\n\n testbed = Testbed()\n dev = Device(testbed=testbed, name='PE2', os='nxos')\n dev.custom = {'abstraction':{'order':['os'], 'context':'cli'}}\n dev.mapping={}\n dev.mapping['cli']='cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n dev.connectionmgr.connections['cli'] = dev\n\n golden_output = {'return_value': '''\n N95_2_R2# show run msdp\n!Command: show running-config msdp\n!Time: Mon Aug 27 20:17:11 2018\n\nversion 7.0(3)I7(3)\nfeature msdp\n\nip msdp description 6.6.6.6 test description\nip msdp keepalive 6.6.6.6 50 60\nip msdp reconnect-interval 15\n\nvrf context VRF1\n ip msdp description 6.6.6.6 test description on VRF1\n '''}\n\n golden_output_vrf = '''\n N95_2_R2# show run pim | inc vrf\nvrf context VRF1\n '''\n golden_output_reconnect = '''\n N95_2_R2# show run msdp | sec '^i' | inc reconnect-interval\nip msdp reconnect-interval 15\n '''\n golden_output_reconnect_vrf = '''\n N95_2_R2# show run msdp | sec VRF1 | inc reconnect-interval\n\n '''\n golden_output_description = '''\n N95_2_R2# show run msdp | sec '^i | inc description\nip msdp description 6.6.6.6 test description\n '''\n golden_output_description_vrf = '''\n N95_2_R2# show run msdp | sec VRF1 | inc description\n ip msdp description 6.6.6.6 test description on VRF1\n '''\n golden_output_keepalive = '''\n N95_2_R2# show run msdp | sec '^i' | inc keepalive\nip msdp keepalive 6.6.6.6 50 60\n '''\n golden_output_keepalive_vrf = '''\n N95_2_R2# show run msdp | sec VRF1 | inc keepalive\n '''\n golden_output_originator_id = '''\n N95_2_R2# show run msdp | sec '^i' | inc originator-id\nip msdp originator-id loopback0\n '''\n golden_output_originator_id_vrf = '''\n N95_2_R2# show run msdp | sec VRF1 | inc originator-id\n ip msdp originator-id loopback11\n '''\n golden_output_connect_source = '''\n N95_2_R2# show run msdp | sec '^i' | inc originator-id\nip msdp peer 6.6.6.6 connect-source loopback0\n '''\n golden_output_connect_source_vrf = '''\n N95_2_R2# show run msdp | sec VRF1 | inc originator-id\n ip msdp peer 6.6.6.6 connect-source loopback11 remote-as 234\n '''\n\n msdp = Msdp()\n outputs['show running-config msdp | inc vrf'] = golden_output_vrf\n outputs[\"show running-config msdp | sec '^i' | inc reconnect-interval\"] = golden_output_reconnect\n outputs[\"show running-config msdp | sec VRF1 | inc reconnect-interval\"] = golden_output_reconnect_vrf\n outputs[\"show running-config msdp | sec '^i' | inc description\"] = golden_output_description\n outputs[\"show running-config msdp | sec VRF1 | inc description\"] = golden_output_description_vrf\n outputs[\"show running-config msdp | sec '^i' | inc keepalive\"] = golden_output_keepalive\n outputs[\"show running-config msdp | sec VRF1 | inc keepalive\"] = golden_output_keepalive_vrf\n outputs[\"show running-config msdp | sec '^i' | inc originator-id\"] = golden_output_originator_id\n outputs[\"show running-config msdp | sec VRF1 | inc originator-id\"] = golden_output_originator_id_vrf\n outputs[\"show running-config msdp | sec '^i' | inc connect-source\"] = golden_output_connect_source\n outputs[\"show running-config msdp | sec VRF1 | inc connect-source\"] = golden_output_connect_source_vrf\n # Return outputs above as inputs to parser when called\n dev.execute = Mock()\n dev.execute.side_effect = mapper\n\n learn = Msdp.learn_config(device=dev,\n attributes=['msdp[vrf_attr][default][peer_attr][(.*)][description]'])\n\n self.assertEqual(learn[0].device_attr[dev].vrf_attr['default']\\\n .peer_attr['6.6.6.6'].description, 'test description')\n self.assertEqual(learn[0].device_attr[dev].vrf_attr['default']\\\n .peer_attr['6.6.6.6'].keepalive_interval, None)\n\n learn = Msdp.learn_config(device=dev)\n\n self.assertEqual(learn[0].device_attr[dev].vrf_attr['default']\\\n .peer_attr['6.6.6.6'].keepalive_interval, 50)\n self.assertEqual(learn[0].device_attr[dev].vrf_attr['default']\\\n .peer_attr['6.6.6.6'].holdtime_interval, 60)\n\n self.assertEqual(learn[0].device_attr[dev].vrf_attr['VRF1']\\\n .peer_attr['6.6.6.6'].description, 'test description on VRF1')\n self.assertEqual(learn[0].device_attr[dev].vrf_attr['default']\\\n .peer_attr['6.6.6.6'].description, 'test description')\n\n self.assertEqual(learn[0].device_attr[dev].vrf_attr['default']\\\n .originating_rp, 'loopback0')\n self.assertEqual(learn[0].device_attr[dev].vrf_attr['VRF1']\\\n .originating_rp, 'loopback11')\n\n self.assertEqual(learn[0].device_attr[dev].vrf_attr['default']\\\n .peer_attr['6.6.6.6'].connected_source, 'loopback0')\n self.assertEqual(learn[0].device_attr[dev].vrf_attr['VRF1']\\\n .peer_attr['6.6.6.6'].connected_source, 'loopback11')\n self.assertEqual(learn[0].device_attr[dev].vrf_attr['VRF1']\\\n .peer_attr['6.6.6.6'].peer_as, '234')\n\n self.assertEqual(learn[0].device_attr[dev].vrf_attr['default']\\\n .global_connect_retry_interval, 15)\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "11532915", "language": "Python", "matching_score": 3.8522605895996094, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/msdp/nxos/tests/test_msdp.py" }, { "content": "\"\"\"\nImplement NXOS Specific Configurations for Msdp objects.\n\"\"\"\n\n# Table of contents:\n# class Msdp:\n# class DeviceAttributes:\n# class VrfAttributes:\n# class PeerAttributes:\n\n# Python\nfrom abc import ABC\n# Genie package\nfrom genie.conf.base.attributes import AttributesHelper\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\n\nclass Msdp(ABC):\n\n class DeviceAttributes(ABC):\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # enabled\n if attributes.value('enabled'):\n if unconfig is False:\n configurations.append_line(attributes.format(\n 'feature msdp'))\n elif unconfig is True and\\\n attributes.attributes == {'enabled': {True: None}} or \\\n attributes.iswildcard:\n configurations.append_line('no feature msdp', raw=True)\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n # VrfAttributes\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n for sub, attributes2 in attributes.mapping_values('vrf_attr',\n sort=True,\n keys=self.vrf_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class VrfAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(\n None if self.vrf_name == 'default' else\n attributes.format('vrf context {vrf_name}', force=True)):\n if self.vrf_name != 'default' and unconfig and \\\n attributes.iswildcard:\n configurations.submode_unconfig()\n\n if attributes.value('originating_rp'):\n configurations.append_line(\n attributes.format(\n 'ip msdp originator-id {originating_rp}'))\n\n if attributes.value('global_connect_retry_interval'):\n configurations.append_line(\n attributes.format(\n 'ip msdp reconnect-interval {global_connect_retry_interval}'))\n # PeerAttributes\n for sub, attributes2 in attributes.mapping_values('peer_attr',\n sort=True,\n keys=self.peer_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class PeerAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if unconfig and attributes.value('address'):\n configurations.append_line(\n attributes.format('ip msdp peer {}'.format(self.address)))\n \n # only add mst_id in attributes when \n # unconfig for specific attributes is enable\n if unconfig and attributes.attributes:\n attributes.attributes['address'] = None\n\n if attributes.value('connected_source'):\n if attributes.value('peer_as'):\n configurations.append_line(\n attributes.format('ip msdp peer {address} '\n 'connect-source {connected_source} remote-as {peer_as}'))\n else:\n configurations.append_line(\n attributes.format('ip msdp peer {address} '\n 'connect-source {connected_source}'))\n\n enable = attributes.value('enable')\n if enable is not None:\n if not enable:\n config_cmd = 'ip msdp shutdown {address}'\n unconfig_cmd = 'no ip msdp shutdown {}'.format(self.address)\n else:\n config_cmd = 'no ip msdp shutdown {address}'\n unconfig_cmd = 'ip msdp shutdown {}'.format(self.address)\n configurations.append_line(\n attributes.format(config_cmd),\n unconfig_cmd=unconfig_cmd)\n\n\n if attributes.value('description'):\n configurations.append_line(\n attributes.format('ip msdp description {address} {description}'))\n\n if attributes.value('mesh_group'):\n configurations.append_line(\n attributes.format('ip msdp mesh-group {address} {mesh_group}'))\n\n if attributes.value('sa_filter_in'):\n configurations.append_line(\n attributes.format('ip msdp sa-policy {address} {sa_filter_in} in'))\n\n if attributes.value('sa_filter_out'):\n configurations.append_line(\n attributes.format('ip msdp sa-policy {address} {sa_filter_out} out'))\n\n if attributes.value('sa_limit'):\n configurations.append_line(\n attributes.format('ip msdp sa-limit {address} {sa_limit}'))\n\n if attributes.value('keepalive_interval')\\\n and attributes.value('holdtime_interval'):\n configurations.append_line(\n attributes.format('ip msdp keepalive {address} {keepalive_interval} {holdtime_interval}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n", "id": "7123848", "language": "Python", "matching_score": 2.914996385574341, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/msdp/nxos/msdp.py" }, { "content": "\n# import python\nfrom abc import ABC\n\n# import genie\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning, \\\n AttributesHelper\n\n\nclass Stp(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n \n # spanning-tree bridge assurance\n if attributes.value('bridge_assurance'):\n configurations.append_line(\n attributes.format('spanning-tree bridge assurance'))\n \n # spanning-tree etherchannel guard misconfig\n if attributes.value('etherchannel_misconfig_guard'):\n configurations.append_line(\n attributes.format('spanning-tree etherchannel guard misconfig'))\n \n # errdisable recovery interval <bpduguard_timeout_recovery>\n if attributes.value('bpduguard_timeout_recovery'):\n configurations.append_line(\n attributes.format('errdisable recovery '\n 'interval {bpduguard_timeout_recovery}'))\n \n # spanning-tree loopguard default\n if attributes.value('loop_guard'):\n configurations.append_line(\n attributes.format('spanning-tree loopguard default'))\n \n # spanning-tree portfast bpduguard default\n if attributes.value('bpdu_guard'):\n configurations.append_line(\n attributes.format('spanning-tree portfast bpduguard default'))\n \n # spanning-tree portfast bpdufilter default\n if attributes.value('bpdu_filter'):\n configurations.append_line(\n attributes.format('spanning-tree portfast bpdufilter default'))\n\n # mpde attributes\n for sub, attributes2 in attributes.mapping_values('mode_attr',\n sort=True, keys=self.mode_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class ModeAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # spanning-tree mode {mode}\n if attributes.value('mode'):\n mode = 'mst' if 'mstp' in self.mode else self.mode\n configurations.append_line(\n attributes.format('spanning-tree mode {}'.format(mode), force=True))\n\n # spanning-tree transmit hold-count {hold_count}\n configurations.append_line(\n attributes.format('spanning-tree transmit hold-count {hold_count}'))\n\n # mst_attr\n for sub, attributes2 in attributes.mapping_values('mst_attr',\n sort=True, keys=self.mst_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n # pvst_attr\n for sub, attributes2 in attributes.mapping_values('pvst_attr',\n sort=True, keys=self.pvst_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # ---------------\n # mode MST\n # ---------------\n class MstAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # spanning-tree mst max-hops <m_max_hop>\n configurations.append_line(\n attributes.format('spanning-tree mst max-hops {m_max_hop}'))\n\n # spanning-tree mst hello-time <m_hello_time>\n configurations.append_line(\n attributes.format('spanning-tree mst hello-time {m_hello_time}'))\n\n # spanning-tree mst max-age <m_max_age>\n configurations.append_line(\n attributes.format('spanning-tree mst max-age {m_max_age}'))\n\n # spanning-tree mst forward-time <m_forwarding_delay>\n configurations.append_line(\n attributes.format('spanning-tree mst forward-time {m_forwarding_delay}'))\n\n # instance_attr\n for sub, attributes2 in attributes.mapping_values('instance_attr',\n sort=True, keys=self.instance_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n # interface_attr\n for sub, attributes2 in attributes.mapping_values('interface_attr',\n sort=True, keys=self.interface_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # the interface should have vrf(name = vrf_name) attached\n with configurations.submode_context(\n attributes.format('interface {interface_name}',\n force=True)):\n # spanning-tree portfast\n configurations.append_line(attributes.format(\n 'spanning-tree portfast')) if attributes.value('m_if_edge_port') and \\\n 'edge_enable' in attributes.value('m_if_edge_port') else None\n\n # spanning-tree link-type point-to-point\n configurations.append_line(attributes.format(\n 'spanning-tree link-type point-to-point')) if attributes.value('m_if_link_type') and \\\n 'p2p' in attributes.value('m_if_link_type') else None\n\n # spanning-tree link-type shared\n configurations.append_line(attributes.format(\n 'spanning-tree link-type shared')) if attributes.value('m_if_link_type') and \\\n 'shared' in attributes.value('m_if_link_type') else None\n\n # spanning-tree guard <m_if_guard>\n configurations.append_line(attributes.format(\n 'spanning-tree guard {m_if_guard}'))\n\n # spanning-tree bpduguard enable\n configurations.append_line(attributes.format(\n 'spanning-tree bpduguard enable')) if \\\n attributes.value('m_if_bpdu_guard') else None\n\n # spanning-tree bpdufilter enable\n configurations.append_line(attributes.format(\n 'spanning-tree bpdufilter enable')) if \\\n attributes.value('m_if_bpdu_filter') else None\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n class InstanceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # only add mst_id in attributes when \n # unconfig for specific attributes is enable\n if unconfig and attributes.attributes:\n attributes.attributes['mst_id'] = None\n\n # the interface should have vrf(name = vrf_name) attached\n if attributes.value('m_vlans') or \\\n attributes.value('m_name') or \\\n attributes.value('m_revision'):\n with configurations.submode_context(\n attributes.format('spanning-tree mst configuration',\n force=True)):\n\n # instance <mst_id> vlan <m_vlans>\n configurations.append_line(\n attributes.format(\n 'instance {mst_id} vlan {m_vlans}'))\n\n # name <m_name>\n configurations.append_line(\n attributes.format(\n 'name {m_name}'))\n\n # revision <m_revision>\n configurations.append_line(\n attributes.format(\n 'revision {m_revision}'))\n\n # spanning-tree mst <mst_id> priority <m_bridge_priority>\n configurations.append_line(\n attributes.format(\n 'spanning-tree mst {mst_id} priority {m_bridge_priority}'))\n\n # interface_attr\n for sub, attributes2 in attributes.mapping_values('interface_attr',\n sort=True, keys=self.interface_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig,\n mst_id = self.mst_id))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n \n self.mst_id = kwargs['mst_id']\n \n # only add mst_id in attributes when \n # unconfig for specific attributes is enable\n if unconfig and attributes.attributes:\n attributes.attributes['mst_id'] = None\n\n # the interface should have vrf(name = vrf_name) attached\n with configurations.submode_context(\n attributes.format('interface {interface_name}',\n force=True)):\n # spanning-tree mst <mst_id> cost <m_inst_if_cost>\n configurations.append_line(\n attributes.format(\n 'spanning-tree mst {mst_id} cost {m_inst_if_cost}'))\n\n # spanning-tree mst <mst_id> port-priority <m_inst_if_port_priority>\n configurations.append_line(\n attributes.format(\n 'spanning-tree mst {mst_id} port-priority {m_inst_if_port_priority}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n # ---------------\n # mode Pvst\n # ---------------\n class PvstAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # instance_attr\n for sub, attributes2 in attributes.mapping_values('vlan_attr',\n sort=True, keys=self.vlan_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n # interface_attr\n for sub, attributes2 in attributes.mapping_values('interface_attr',\n sort=True, keys=self.interface_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # the interface should have vrf(name = vrf_name) attached\n with configurations.submode_context(\n attributes.format('interface {interface_name}',\n force=True)):\n # spanning-tree portfast\n configurations.append_line(attributes.format(\n 'spanning-tree portfast')) if attributes.value('p_if_edge_port') and \\\n 'edge_enable' in attributes.value('p_if_edge_port') else None\n\n # spanning-tree link-type point-to-point\n configurations.append_line(attributes.format(\n 'spanning-tree link-type point-to-point')) if attributes.value('p_if_link_type') and \\\n 'p2p' in attributes.value('p_if_link_type') else None\n\n # spanning-tree link-type shared\n configurations.append_line(attributes.format(\n 'spanning-tree link-type shared')) if attributes.value('p_if_link_type') and \\\n 'shared' in attributes.value('p_if_link_type') else None\n\n # spanning-tree guard <p_if_guard>\n configurations.append_line(attributes.format(\n 'spanning-tree guard {p_if_guard}'))\n\n # spanning-tree bpduguard enable\n configurations.append_line(attributes.format(\n 'spanning-tree bpduguard enable')) if \\\n attributes.value('p_if_bpdu_guard') else None\n\n # spanning-tree bpdufilter enable\n configurations.append_line(attributes.format(\n 'spanning-tree bpdufilter enable')) if \\\n attributes.value('p_if_bpdu_filter') else None\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n class VlanAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # only add vlan_id in attributes when \n # unconfig for specific attributes is enable\n if unconfig and attributes.attributes:\n attributes.attributes['vlan'] = None\n\n if attributes.value('vlan_id'):\n configurations.append_line(attributes.format(\n 'spanning-tree vlan {vlan_id}'))\n\n # spanning-tree vlan <vlan_id> hello-time <v_hello_time>\n configurations.append_line(\n attributes.format(\n 'spanning-tree vlan {vlan} hello-time {v_hello_time}'))\n\n # spanning-tree vlan <vlan_id> max-age <v_max_age>\n configurations.append_line(\n attributes.format(\n 'spanning-tree vlan {vlan} max-age {v_max_age}'))\n\n # spanning-tree vlan <vlan_id> forward-time <v_forwarding_delay>\n configurations.append_line(\n attributes.format(\n 'spanning-tree vlan {vlan} forward-time {v_forwarding_delay}'))\n\n # spanning-tree vlan <vlan_id> priority <v_bridge_priority>\n configurations.append_line(\n attributes.format(\n 'spanning-tree vlan {vlan} priority {v_bridge_priority}'))\n\n # interface_attr\n for sub, attributes2 in attributes.mapping_values('interface_attr',\n sort=True, keys=self.interface_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig,\n vlan_id = self.vlan))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n self.vlan = kwargs['vlan_id']\n\n # only add vlan_id in attributes when \n # unconfig for specific attributes is enable\n if unconfig and attributes.attributes:\n attributes.attributes['vlan'] = None\n\n # the interface should have vrf(name = vrf_name) attached\n with configurations.submode_context(\n attributes.format('interface {interface_name}',\n force=True)):\n # spanning-tree mst <mst_id> cost <m_inst_if_cost>\n configurations.append_line(\n attributes.format(\n 'spanning-tree vlan {vlan} cost {v_if_cost}'))\n\n # spanning-tree vlan <vlan_id> port-priority <v_if_port_priority>\n configurations.append_line(\n attributes.format(\n 'spanning-tree vlan {vlan} port-priority {v_if_port_priority}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n\n\n", "id": "4902761", "language": "Python", "matching_score": 5.169695854187012, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/stp/iosxe/stp.py" }, { "content": "#!/usr/bin/env python\n\n# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning\n\n# Stp\nfrom genie.libs.conf.stp import Stp\n\n\nclass test_stp(TestCase):\n\n def setUp(self):\n \n # Set Genie Tb\n testbed = Testbed()\n Genie.testbed = testbed\n \n # Device\n self.dev1 = Device(name='PE1', testbed=testbed, os='iosxe')\n\n def test_stp_mst_full_config(self):\n\n # For failures\n self.maxDiff = None\n \n # Pim object\n stp = Stp()\n self.dev1.add_feature(stp)\n\n # bridge_assurance command rejected by router\n stp.device_attr[self.dev1].etherchannel_misconfig_guard = True\n stp.device_attr[self.dev1].bpduguard_timeout_recovery = 333\n stp.device_attr[self.dev1].loop_guard = True\n stp.device_attr[self.dev1].bpdu_guard = True\n stp.device_attr[self.dev1].bpdu_filter = True\n\n # mode_attr\n stp.device_attr[self.dev1].mode_attr['mstp'].hold_count = 10\n\n # mst_attr\n stp.device_attr[self.dev1].mode_attr['mstp'].mst_attr['default'].m_max_hop = 30\n stp.device_attr[self.dev1].mode_attr['mstp'].mst_attr['default'].m_hello_time = 5\n stp.device_attr[self.dev1].mode_attr['mstp'].mst_attr['default'].m_max_age = 10\n stp.device_attr[self.dev1].mode_attr['mstp'].mst_attr['default'].m_forwarding_delay = 4\n\n # instance_attr\n stp.device_attr[self.dev1].mode_attr['mstp'].mst_attr['default'].\\\n instance_attr[100].m_vlans = '200-210'\n stp.device_attr[self.dev1].mode_attr['mstp'].mst_attr['default'].\\\n instance_attr[100].m_name = 'MST'\n stp.device_attr[self.dev1].mode_attr['mstp'].mst_attr['default'].\\\n instance_attr[100].m_revision = 300\n stp.device_attr[self.dev1].mode_attr['mstp'].mst_attr['default'].\\\n instance_attr[10].m_bridge_priority = 4096\n\n stp.device_attr[self.dev1].mode_attr['mstp'].mst_attr['default'].\\\n instance_attr[10].interface_attr['GigabitEthernet1/0/15'].m_inst_if_cost = '123'\n stp.device_attr[self.dev1].mode_attr['mstp'].mst_attr['default'].\\\n instance_attr[10].interface_attr['GigabitEthernet1/0/15'].m_inst_if_port_priority = 32\n\n # interface_attr\n stp.device_attr[self.dev1].mode_attr['mstp'].mst_attr['default'].\\\n interface_attr['GigabitEthernet1/0/15'].m_if_edge_port = 'edge_enable'\n stp.device_attr[self.dev1].mode_attr['mstp'].mst_attr['default'].\\\n interface_attr['GigabitEthernet1/0/15'].m_if_link_type = 'p2p'\n stp.device_attr[self.dev1].mode_attr['mstp'].mst_attr['default'].\\\n interface_attr['GigabitEthernet1/0/15'].m_if_guard = 'none'\n stp.device_attr[self.dev1].mode_attr['mstp'].mst_attr['default'].\\\n interface_attr['GigabitEthernet1/0/15'].m_if_bpdu_guard = True\n stp.device_attr[self.dev1].mode_attr['mstp'].mst_attr['default'].\\\n interface_attr['GigabitEthernet1/0/15'].m_if_bpdu_filter = True\n\n cfgs = stp.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'spanning-tree etherchannel guard misconfig',\n 'errdisable recovery interval 333',\n 'spanning-tree loopguard default',\n 'spanning-tree portfast bpduguard default',\n 'spanning-tree portfast bpdufilter default',\n 'spanning-tree mode mst',\n 'spanning-tree transmit hold-count 10',\n 'spanning-tree mst max-hops 30',\n 'spanning-tree mst hello-time 5',\n 'spanning-tree mst max-age 10',\n 'spanning-tree mst forward-time 4',\n 'spanning-tree mst 10 priority 4096',\n 'interface GigabitEthernet1/0/15',\n ' spanning-tree mst 10 cost 123',\n ' spanning-tree mst 10 port-priority 32',\n ' exit',\n 'spanning-tree mst configuration',\n ' instance 100 vlan 200-210',\n ' name MST',\n ' revision 300',\n ' exit',\n 'interface GigabitEthernet1/0/15',\n ' spanning-tree portfast',\n ' spanning-tree link-type point-to-point',\n ' spanning-tree guard none',\n ' spanning-tree bpduguard enable',\n ' spanning-tree bpdufilter enable',\n ' exit',\n ]))\n\n cfgs = stp.build_unconfig(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'no spanning-tree etherchannel guard misconfig',\n 'no errdisable recovery interval 333',\n 'no spanning-tree loopguard default',\n 'no spanning-tree portfast bpduguard default',\n 'no spanning-tree portfast bpdufilter default',\n 'no spanning-tree mode mst',\n 'no spanning-tree transmit hold-count 10',\n 'no spanning-tree mst max-hops 30',\n 'no spanning-tree mst hello-time 5',\n 'no spanning-tree mst max-age 10',\n 'no spanning-tree mst forward-time 4',\n 'no spanning-tree mst 10 priority 4096',\n 'interface GigabitEthernet1/0/15',\n ' no spanning-tree mst 10 cost 123',\n ' no spanning-tree mst 10 port-priority 32',\n ' exit',\n 'spanning-tree mst configuration',\n ' no instance 100 vlan 200-210',\n ' no name MST',\n ' no revision 300',\n ' exit',\n 'interface GigabitEthernet1/0/15',\n ' no spanning-tree portfast',\n ' no spanning-tree link-type point-to-point',\n ' no spanning-tree guard none',\n ' no spanning-tree bpduguard enable',\n ' no spanning-tree bpdufilter enable',\n ' exit',\n ]))\n\n # uncfg with attributes\n cfgs = stp.build_unconfig(apply=False,\n attributes={'device_attr': {\n self.dev1: {\n 'mode_attr': {\n 'mstp': {\n 'mst_attr': {\n 'default': {\n 'm_max_age': None,\n 'instance_attr': {\n 100: {\n 'm_name': None,\n },\n 10: {\n 'm_bridge_priority': None,\n 'interface_attr': {\n 'GigabitEthernet1/0/15': {\n 'm_inst_if_port_priority': None,\n }\n }\n }\n },\n 'interface_attr': {\n 'GigabitEthernet1/0/15': {\n 'm_if_bpdu_filter': None\n }\n }\n }\n }\n }\n }\n }}})\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'no spanning-tree mst max-age 10',\n 'no spanning-tree mst 10 priority 4096',\n 'interface GigabitEthernet1/0/15',\n ' no spanning-tree mst 10 port-priority 32',\n ' exit',\n 'spanning-tree mst configuration',\n ' no name MST',\n ' exit',\n 'interface GigabitEthernet1/0/15',\n ' no spanning-tree bpdufilter enable',\n ' exit',\n ]))\n\n def test_stp_pvst_full_config(self):\n\n # For failures\n self.maxDiff = None\n \n # Pim object\n stp = Stp()\n self.dev1.add_feature(stp)\n\n # bridge_assurance command rejected by router\n stp.device_attr[self.dev1].etherchannel_misconfig_guard = True\n stp.device_attr[self.dev1].bpduguard_timeout_recovery = 333\n stp.device_attr[self.dev1].loop_guard = True\n stp.device_attr[self.dev1].bpdu_guard = True\n stp.device_attr[self.dev1].bpdu_filter = True\n\n # vlan_attr\n stp.device_attr[self.dev1].mode_attr['pvst'].pvst_attr['default'].\\\n vlan_attr['500'].v_hello_time = 5\n stp.device_attr[self.dev1].mode_attr['pvst'].pvst_attr['default'].\\\n vlan_attr['500'].v_max_age = 10\n stp.device_attr[self.dev1].mode_attr['pvst'].pvst_attr['default'].\\\n vlan_attr['500'].v_forwarding_delay = 15\n stp.device_attr[self.dev1].mode_attr['pvst'].pvst_attr['default'].\\\n vlan_attr['500'].v_bridge_priority = 4096\n stp.device_attr[self.dev1].mode_attr['pvst'].pvst_attr['default'].\\\n vlan_attr['666'].vlan_id = '666'\n\n stp.device_attr[self.dev1].mode_attr['pvst'].pvst_attr['default'].\\\n vlan_attr['500'].interface_attr['GigabitEthernet1/0/15'].v_if_cost = '123'\n stp.device_attr[self.dev1].mode_attr['pvst'].pvst_attr['default'].\\\n vlan_attr['500'].interface_attr['GigabitEthernet1/0/15'].v_if_port_priority = 16\n\n # interface_attr\n stp.device_attr[self.dev1].mode_attr['pvst'].pvst_attr['default'].\\\n interface_attr['GigabitEthernet1/0/15'].p_if_edge_port = 'edge_enable'\n stp.device_attr[self.dev1].mode_attr['pvst'].pvst_attr['default'].\\\n interface_attr['GigabitEthernet1/0/15'].p_if_link_type = 'shared'\n stp.device_attr[self.dev1].mode_attr['pvst'].pvst_attr['default'].\\\n interface_attr['GigabitEthernet1/0/15'].p_if_guard = 'root'\n stp.device_attr[self.dev1].mode_attr['pvst'].pvst_attr['default'].\\\n interface_attr['GigabitEthernet1/0/15'].p_if_bpdu_guard = True\n stp.device_attr[self.dev1].mode_attr['pvst'].pvst_attr['default'].\\\n interface_attr['GigabitEthernet1/0/15'].p_if_bpdu_filter = True\n\n cfgs = stp.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'spanning-tree etherchannel guard misconfig',\n 'errdisable recovery interval 333',\n 'spanning-tree loopguard default',\n 'spanning-tree portfast bpduguard default',\n 'spanning-tree portfast bpdufilter default',\n 'spanning-tree mode pvst',\n 'spanning-tree vlan 500 hello-time 5',\n 'spanning-tree vlan 500 max-age 10',\n 'spanning-tree vlan 500 forward-time 15',\n 'spanning-tree vlan 500 priority 4096',\n 'interface GigabitEthernet1/0/15',\n ' spanning-tree vlan 500 cost 123',\n ' spanning-tree vlan 500 port-priority 16',\n ' exit',\n 'spanning-tree vlan 666',\n 'interface GigabitEthernet1/0/15',\n ' spanning-tree portfast',\n ' spanning-tree link-type shared',\n ' spanning-tree guard root',\n ' spanning-tree bpduguard enable',\n ' spanning-tree bpdufilter enable',\n ' exit',\n ]))\n\n cfgs = stp.build_unconfig(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'no spanning-tree etherchannel guard misconfig',\n 'no errdisable recovery interval 333',\n 'no spanning-tree loopguard default',\n 'no spanning-tree portfast bpduguard default',\n 'no spanning-tree portfast bpdufilter default',\n 'no spanning-tree mode pvst',\n 'no spanning-tree vlan 500 hello-time 5',\n 'no spanning-tree vlan 500 max-age 10',\n 'no spanning-tree vlan 500 forward-time 15',\n 'no spanning-tree vlan 500 priority 4096',\n 'interface GigabitEthernet1/0/15',\n ' no spanning-tree vlan 500 cost 123',\n ' no spanning-tree vlan 500 port-priority 16',\n ' exit',\n 'no spanning-tree vlan 666',\n 'interface GigabitEthernet1/0/15',\n ' no spanning-tree portfast',\n ' no spanning-tree link-type shared',\n ' no spanning-tree guard root',\n ' no spanning-tree bpduguard enable',\n ' no spanning-tree bpdufilter enable',\n ' exit',\n ]))\n\n # uncfg with attributes\n cfgs = stp.build_unconfig(apply=False,\n attributes={'device_attr': {\n self.dev1: {\n 'mode_attr': {\n 'pvst': {\n 'pvst_attr': {\n 'default': {\n 'vlan_attr': {\n '500': {\n 'v_bridge_priority': None,\n 'interface_attr': {\n 'GigabitEthernet1/0/15': {\n 'v_if_cost': None\n }\n }\n }\n },\n 'interface_attr': {\n 'GigabitEthernet1/0/15': {\n 'p_if_bpdu_filter': None\n }\n }\n }\n }\n }\n }\n }}})\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'no spanning-tree vlan 500 priority 4096',\n 'interface GigabitEthernet1/0/15',\n ' no spanning-tree vlan 500 cost 123',\n ' exit',\n 'interface GigabitEthernet1/0/15',\n ' no spanning-tree bpdufilter enable',\n ' exit',\n ]))\n \n\n def test_stp_rapid_pvst_full_config(self):\n\n # For failures\n self.maxDiff = None\n \n # Pim object\n stp = Stp()\n self.dev1.add_feature(stp)\n\n # bridge_assurance command rejected by router\n stp.device_attr[self.dev1].etherchannel_misconfig_guard = True\n stp.device_attr[self.dev1].bpduguard_timeout_recovery = 333\n stp.device_attr[self.dev1].loop_guard = True\n stp.device_attr[self.dev1].bpdu_guard = True\n stp.device_attr[self.dev1].bpdu_filter = True\n\n # vlan_attr\n stp.device_attr[self.dev1].mode_attr['rapid-pvst'].pvst_attr['default'].\\\n vlan_attr['500'].v_hello_time = 5\n stp.device_attr[self.dev1].mode_attr['rapid-pvst'].pvst_attr['default'].\\\n vlan_attr['500'].v_max_age = 10\n stp.device_attr[self.dev1].mode_attr['rapid-pvst'].pvst_attr['default'].\\\n vlan_attr['500'].v_forwarding_delay = 15\n stp.device_attr[self.dev1].mode_attr['rapid-pvst'].pvst_attr['default'].\\\n vlan_attr['500'].v_bridge_priority = 4096\n\n stp.device_attr[self.dev1].mode_attr['rapid-pvst'].pvst_attr['default'].\\\n vlan_attr['500'].interface_attr['GigabitEthernet1/0/15'].v_if_cost = '123'\n stp.device_attr[self.dev1].mode_attr['rapid-pvst'].pvst_attr['default'].\\\n vlan_attr['500'].interface_attr['GigabitEthernet1/0/15'].v_if_port_priority = 16\n\n # interface_attr\n stp.device_attr[self.dev1].mode_attr['rapid-pvst'].pvst_attr['default'].\\\n interface_attr['GigabitEthernet1/0/15'].p_if_edge_port = 'edge_enable'\n stp.device_attr[self.dev1].mode_attr['rapid-pvst'].pvst_attr['default'].\\\n interface_attr['GigabitEthernet1/0/15'].p_if_link_type = 'shared'\n stp.device_attr[self.dev1].mode_attr['rapid-pvst'].pvst_attr['default'].\\\n interface_attr['GigabitEthernet1/0/15'].p_if_guard = 'root'\n stp.device_attr[self.dev1].mode_attr['rapid-pvst'].pvst_attr['default'].\\\n interface_attr['GigabitEthernet1/0/15'].p_if_bpdu_guard = True\n stp.device_attr[self.dev1].mode_attr['rapid-pvst'].pvst_attr['default'].\\\n interface_attr['GigabitEthernet1/0/15'].p_if_bpdu_filter = True\n\n cfgs = stp.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'spanning-tree etherchannel guard misconfig',\n 'errdisable recovery interval 333',\n 'spanning-tree loopguard default',\n 'spanning-tree portfast bpduguard default',\n 'spanning-tree portfast bpdufilter default',\n 'spanning-tree mode rapid-pvst',\n 'spanning-tree vlan 500 hello-time 5',\n 'spanning-tree vlan 500 max-age 10',\n 'spanning-tree vlan 500 forward-time 15',\n 'spanning-tree vlan 500 priority 4096',\n 'interface GigabitEthernet1/0/15',\n ' spanning-tree vlan 500 cost 123',\n ' spanning-tree vlan 500 port-priority 16',\n ' exit',\n 'interface GigabitEthernet1/0/15',\n ' spanning-tree portfast',\n ' spanning-tree link-type shared',\n ' spanning-tree guard root',\n ' spanning-tree bpduguard enable',\n ' spanning-tree bpdufilter enable',\n ' exit',\n ]))\n\n cfgs = stp.build_unconfig(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'no spanning-tree etherchannel guard misconfig',\n 'no errdisable recovery interval 333',\n 'no spanning-tree loopguard default',\n 'no spanning-tree portfast bpduguard default',\n 'no spanning-tree portfast bpdufilter default',\n 'no spanning-tree mode rapid-pvst',\n 'no spanning-tree vlan 500 hello-time 5',\n 'no spanning-tree vlan 500 max-age 10',\n 'no spanning-tree vlan 500 forward-time 15',\n 'no spanning-tree vlan 500 priority 4096',\n 'interface GigabitEthernet1/0/15',\n ' no spanning-tree vlan 500 cost 123',\n ' no spanning-tree vlan 500 port-priority 16',\n ' exit',\n 'interface GigabitEthernet1/0/15',\n ' no spanning-tree portfast',\n ' no spanning-tree link-type shared',\n ' no spanning-tree guard root',\n ' no spanning-tree bpduguard enable',\n ' no spanning-tree bpdufilter enable',\n ' exit',\n ]))\n\n # uncfg with attributes\n cfgs = stp.build_unconfig(apply=False,\n attributes={'device_attr': {\n self.dev1: {\n 'mode_attr': {\n 'rapid-pvst': {\n 'pvst_attr': {\n 'default': {\n 'm_max_age': None,\n 'vlan_attr': {\n '500': {\n 'v_bridge_priority': None\n }\n },\n 'interface_attr': {\n 'GigabitEthernet1/0/15': {\n 'p_if_bpdu_filter': None\n }\n }\n }\n }\n }\n }\n }}})\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'no spanning-tree vlan 500 priority 4096',\n 'interface GigabitEthernet1/0/15',\n ' no spanning-tree bpdufilter enable',\n ' exit',\n ]))\n \n\nif __name__ == '__main__':\n unittest.main()\n", "id": "2641164", "language": "Python", "matching_score": 1.878162145614624, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/stp/iosxe/tests/test_stp.py" }, { "content": "''' \nStp Genie Ops Object for IOSXR - CLI.\n'''\n# Genie\nfrom genie.libs.ops.stp.stp import Stp as SuperStp\nfrom genie.ops.base import Context\n\n# Parser\nfrom genie.libs.parser.iosxr.show_spanning_tree import ShowSpanningTreeMst, \\\n ShowSpanningTreeMstag, \\\n ShowSpanningTreePvrst, \\\n ShowSpanningTreePvrsTag, \\\n ShowSpanningTreePvsTag\n\nclass Stp(SuperStp):\n '''STP Genie Ops Object'''\n\n def learn(self, mst_domain=None, mstag_domain=None, pvst_id=None, pvrstag_domain=None, pvstag_domain=None):\n '''Learn stp Ops'''\n\n ########################################################################\n # info\n ########################################################################\n # global - N/A\n # bridge_assurance - N/A\n # etherchannel_misconfig_guard - N/A\n # bpduguard_timeout_recovery - N/A\n # loop_guard - N/A\n # bpdu_guard - N/A\n # bpdu_filter - N/A\n # mstp\n # mst_domain\n # domain - N/A\n # name - N/A\n # revision - N/A\n # max_hop - N/A\n # hello_time - N/A\n # max_age - N/A\n # forwarding_delay - N/A\n # hold_count - N/A\n # mst_instances\n # mst_id\n # mst_id\n # vlan\n # bridge_priority\n # bridge_address\n # designated_root_priority\n # designated_root_address\n # root_port - N/A\n # root_cost\n # hold_time - N/A\n # topology_changes - N/A\n # time_since_topology_change - N/A\n # interfaces\n # m_interface\n # name\n # cost\n # port_priority\n # port_num\n # role\n # port_state\n # designated_root_priority - N/A\n # designated_root_address - N/A\n # designated_cost - N/A\n # designated_bridge_priority\n # designated_bridge_address\n # designated_port_priority\n # designated_port_num\n # forward_transitions - N/A\n # counters - N/A\n # bpdu_sent - N/A\n # bpdu_received - N/A\n # interfaces - N/A\n # m_interface - N/A\n # name - N/A\n # edge_port - N/A\n # link_type - N/A\n # guard - N/A\n # bpdu_guard - N/A\n # bpdu_filter - N/A\n # mstag\n # mag_domain\n # domain\n # interfaces\n # mag_interface\n # interface\n # name\n # revision\n # bridge_id\n # preempt_delay\n # preempt_delay_state - N/A\n # max_age\n # provider_bridge\n # port_id\n # external_cost\n # hello_time\n # active\n # counters - N/A\n # bpdu_sent - N/A\n # instances\n # mag_id\n # instance\n # root_id\n # vlans\n # priority\n # root_priority\n # port_priority\n # cost\n # counters\n # topology_changes\n # pvst\n # pvst_id\n # pvst_id\n # max_age - N/A\n # hold_count - N/A\n # forwarding_delay - N/A\n # hello_time - N/A\n # vlans\n # vlan_id\n # vlan_id\n # hello_time - N/A\n # max_age - N/A\n # forwarding_delay - N/A\n # bridge_priority\n # configured_bridge_priority - N/A\n # sys_id_ext\n # bridge_address\n # designated_root_priority\n # designated_root_address\n # root_port - N/A\n # root_cost - N/A\n # hold_time - N/A\n # topology_changes - N/A\n # time_since_topology_change - N/A\n # interface\n # v_interface\n # name\n # cost\n # port_priority\n # port_num\n # role\n # port_state\n # designated_root_priority - N/A\n # designated_root_address - N/A\n # designated_cost - N/A\n # designated_bridge_priority\n # designated_bridge_address\n # designated_port_priority\n # designated_port_num\n # forward_transitions - N/A\n # counters - N/A\n # bpdu_sent - N/A\n # bpdu_received - N/A\n # interfaces - N/A\n # p_interface - N/A\n # name - N/A\n # edge_port - N/A\n # link_type - N/A\n # guard - N/A\n # bpdu_guard - N/A\n # bpdu_filter - N/A\n # hello_time - N/A\n # rapid_pvst - N/A\n # pvst_id - N/A\n # pvst_id - N/A\n # max_age - N/A\n # hold_count - N/A\n # forwarding_delay - N/A\n # hello_time - N/A\n # vlans - N/A\n # vlan_id - N/A\n # vlan_id - N/A\n # hello_time - N/A\n # max_age - N/A\n # forwarding_delay - N/A\n # bridge_priority - N/A\n # configured_bridge_priority - N/A\n # sys_id_ext - N/A\n # bridge_address - N/A\n # designated_root_priority - N/A\n # designated_root_address - N/A\n # root_port - N/A\n # root_cost - N/A\n # hold_time - N/A\n # topology_changes - N/A\n # time_since_topology_change - N/A\n # interface - N/A\n # v_interface - N/A\n # name - N/A\n # cost - N/A\n # port_priority - N/A\n # port_num - N/A\n # role - N/A\n # port_state - N/A\n # designated_root_priority - N/A\n # designated_root_address - N/A\n # designated_cost - N/A\n # designated_bridge_priority - N/A\n # designated_bridge_address - N/A\n # designated_port_priority - N/A\n # designated_port_num - N/A\n # forward_transitions - N/A\n # counters - N/A\n # bpdu_sent - N/A\n # bpdu_received - N/A\n # interfaces - N/A\n # p_interface - N/A\n # name - N/A\n # edge_port - N/A\n # link_type - N/A\n # guard - N/A\n # bpdu_guard - N/A\n # bpdu_filter - N/A\n # hello_time - N/A\n # pvrstag\n # prag_domain\n # domain\n # interfaces\n # prag_interface\n # interface\n # vlans\n # prag_vlan\n # root_priority\n # root_id - N/A\n # root_cost\n # priority - N/A\n # bridge_id\n # port_priority\n # max_age\n # hello_time\n # preempt_delay\n # preempt_delay_state\n # sub_interface\n # sub_interface_state\n # port_id\n # active\n # counters\n # bpdu_sent - N/A\n # topology_changes\n # pvstag\n # pag_domain\n # domain\n # interfaces\n # pag_interface\n # interface\n # vlans\n # pag_vlan\n # root_priority\n # root_id - N/A\n # root_cost\n # priority - N/A\n # bridge_id\n # port_priority\n # max_age\n # hello_time\n # preempt_delay\n # preempt_delay_state\n # sub_interface\n # sub_interface_state\n # port_id\n # active\n # counters\n # bpdu_sent - N/A\n # topology_changes\n\n mstp_domain_instances_src = '[mstp][(?P<mstp_domain>.*)][mst_instances][(?P<mst_id>.*)]'\n mstp_domain_instances_des = 'info[mstp][(?P<mstp_domain>.*)][mst_instances][(?P<mst_id>.*)]'\n\n if mst_domain:\n for key in ['mst_id', 'vlan', 'bridge_priority',\n 'bridge_address' ,\n 'designated_root_priority', 'designated_root_address', 'root_cost', \n ]:\n\n self.add_leaf(cmd=ShowSpanningTreeMst,\n src=mstp_domain_instances_src + '[%s]' % key,\n dest=mstp_domain_instances_des + '[%s]' % key,\n mst=mst_domain)\n\n mstp_domain_interfaces_src = mstp_domain_instances_src + '[interfaces][(?P<m_interface>.*)]'\n mstp_domain_interfaces_des = mstp_domain_instances_des + '[interfaces][(?P<m_interface>.*)]'\n\n if mst_domain:\n for key in ['name', 'cost', 'port_priority', 'port_num', 'role', \n 'port_state', 'designated_cost', 'designated_bridge_priority',\n 'designated_bridge_address', 'designated_port_priority', \n 'designated_port_num']:\n\n self.add_leaf(cmd=ShowSpanningTreeMst,\n src=mstp_domain_interfaces_src + '[%s]' % key,\n dest=mstp_domain_interfaces_des + '[%s]' % key,\n mst=mst_domain)\n\n mstag_src = '[mstag][(?P<mstag>.*)]'\n mstag_des = 'info[mstag][(?P<mstag>.*)]'\n\n if mstag_domain:\n self.add_leaf(cmd=ShowSpanningTreeMstag,\n src=mstag_src + '[domain]',\n dest=mstag_des + '[domain]',\n mag_domain=mstag_domain)\n\n mstag_interfaces_src = mstag_src + '[interfaces][(?P<m_interface>.*)]'\n mstag_interfaces_des = mstag_des + '[interfaces][(?P<m_interface>.*)]'\n \n if mstag_domain:\n for key in ['interface', 'preempt_delay', 'name', 'revision' , 'max_age', \n 'provider_bridge', 'bridge_id', 'port_id', 'external_cost', 'hello_time',\n 'active']:\n self.add_leaf(cmd=ShowSpanningTreeMstag,\n src=mstag_interfaces_src + '[%s]' % key,\n dest=mstag_interfaces_des + '[%s]' % key,\n mag_domain=mstag_domain)\n\n mstag_instances_src = mstag_src + '[interfaces][instances][(?P<m_instance>.*)]'\n mstag_instances_des = mstag_des + '[interfaces][instances][(?P<m_instance>.*)]'\n \n if mstag_domain:\n for key in ['instance', 'vlans', 'priority', 'port_priority', 'cost',\n 'root_priority']:\n self.add_leaf(cmd=ShowSpanningTreeMstag,\n src=mstag_instances_src + '[%s]' % key,\n dest=mstag_instances_des + '[%s]' % key,\n mag_domain=mstag_domain)\n\n self.add_leaf(cmd=ShowSpanningTreeMstag,\n src= mstag_instances_src + '[counters][topology_changes]',\n dest=mstag_instances_des + '[counters][topology_changes]',\n mag_domain=mstag_domain)\n \n pvst_src = '[pvst][(?P<pvst>.*)]'\n pvst_des = 'info[pvst][(?P<pvst>.*)]'\n\n if pvst_id:\n self.add_leaf(cmd=ShowSpanningTreePvrst,\n src= pvst_src + '[pvst_id]',\n dest=pvst_des + '[pvst_id]',\n pvst_id=pvst_id)\n\n pvst_vlans_src = pvst_src + '[vlans][(?P<vlans>.*)]'\n pvst_vlans_des = pvst_des + '[vlans][(?P<vlans>.*)]'\n if pvst_id:\n for key in ['vlan_id', 'designated_root_priority', 'designated_root_address',\n 'bridge_priority', 'sys_id_ext', 'bridge_address']:\n self.add_leaf(cmd=ShowSpanningTreePvrst,\n src=pvst_vlans_src + '[%s]' % key,\n dest=pvst_vlans_des + '[%s]' % key,\n pvst_id=pvst_id)\n\n pvst_vlans_interface_src = pvst_vlans_src + '[interface][(?P<m_interface>.*)]'\n pvst_vlans_interface_des = pvst_vlans_des + '[interface][(?P<m_interface>.*)]'\n if pvst_id:\n for key in ['name', 'cost', 'role', 'port_priority', 'port_num', 'port_state',\n 'designated_bridge_priority', 'designated_bridge_address', \n 'designated_port_priority', 'designated_port_num']:\n self.add_leaf(cmd=ShowSpanningTreePvrst,\n src=pvst_vlans_interface_src + '[%s]' % key,\n dest=pvst_vlans_interface_des + '[%s]' % key,\n pvst_id=pvst_id)\n\n pvrstag_src = '[pvrstag][(?P<pvrstag>.*)]'\n pvrstag_des = 'info[pvrstag][(?P<pvrstag>.*)]'\n if pvrstag_domain:\n self.add_leaf(cmd=ShowSpanningTreePvrsTag,\n src=pvrstag_src + '[domain]',\n dest=pvrstag_des + '[domain]',\n pvrstag_domain=pvrstag_domain)\n\n pvrstag_interfaces_src = pvrstag_src + '[interfaces][(?P<m_interface>.*)]'\n pvrstag_interfaces_des = pvrstag_des + '[interfaces][(?P<m_interface>.*)]'\n if pvrstag_domain:\n self.add_leaf(cmd=ShowSpanningTreePvrsTag,\n src=pvrstag_interfaces_src + '[interface]',\n dest=pvrstag_interfaces_des + '[interface]',\n pvrstag_domain=pvrstag_domain)\n\n pvrstag_vlans_src = pvrstag_interfaces_src + '[vlans][(?P<vlans>.*)]'\n pvrstag_vlans_des = pvrstag_interfaces_des + '[vlans][(?P<vlans>.*)]'\n if pvrstag_domain:\n for key in ['preempt_delay', 'preempt_delay_state', 'sub_interface', \n 'sub_interface_state', 'max_age', 'root_priority', \n 'root_cost', 'bridge_id', 'port_priority', \n 'port_id', 'hello_time', 'active']:\n self.add_leaf(cmd=ShowSpanningTreePvrsTag,\n src=pvrstag_vlans_src + '[%s]' % key,\n dest=pvrstag_vlans_des + '[%s]' % key,\n pvrstag_domain=pvrstag_domain)\n\n self.add_leaf(cmd=ShowSpanningTreePvrsTag,\n src= pvrstag_vlans_src + '[counters][topology_changes]',\n dest=pvrstag_vlans_des + '[counters][topology_changes]',\n pvrstag_domain=pvrstag_domain)\n\n pvstag_src = '[pvstag][(?P<pvrstag>.*)]'\n pvstag_des = 'info[pvstag][(?P<pvrstag>.*)]'\n if pvstag_domain:\n self.add_leaf(cmd=ShowSpanningTreePvsTag,\n src=pvstag_src + '[domain]',\n dest=pvstag_des + '[domain]',\n pvstag_domain=pvstag_domain)\n\n pvstag_interfaces_src = pvstag_src + '[interfaces][(?P<m_interface>.*)]'\n pvstag_interfaces_des = pvstag_des + '[interfaces][(?P<m_interface>.*)]'\n\n if pvstag_domain:\n self.add_leaf(cmd=ShowSpanningTreePvsTag,\n src=pvstag_interfaces_src + '[interface]',\n dest=pvstag_interfaces_des + '[interface]',\n pvstag_domain=pvstag_domain)\n\n pvstag_vlans_src = pvstag_interfaces_src + '[vlans][(?P<vlans>.*)]'\n pvstag_vlans_des = pvstag_interfaces_des + '[vlans][(?P<vlans>.*)]'\n if pvstag_domain:\n for key in ['preempt_delay', 'preempt_delay_state', 'sub_interface', \n 'sub_interface_state', 'max_age', 'root_priority',\n 'root_cost', 'bridge_id', 'port_priority', \n 'port_id', 'hello_time', 'active']:\n self.add_leaf(cmd=ShowSpanningTreePvsTag,\n src=pvstag_vlans_src + '[%s]' % key,\n dest=pvstag_vlans_des + '[%s]' % key,\n pvstag_domain=pvstag_domain)\n\n self.add_leaf(cmd=ShowSpanningTreePvsTag,\n src= pvstag_vlans_src + '[counters][topology_changes]',\n dest=pvstag_vlans_des + '[counters][topology_changes]',\n pvstag_domain=pvstag_domain)\n\n # make to write in cache\n self.make(final_call=True)", "id": "11313696", "language": "Python", "matching_score": 5.7640700340271, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/stp/iosxr/stp.py" }, { "content": "'''STP Genie Ops Object Outputs for IOSXE.'''\n\n\nclass StpMstOutput(object):\n\n ShowSpanningTreeDetail = {\n \"mstp\": {\n \"mst_instances\": {\n 0: {\n \"hello_timer\": 0,\n \"notification_timer\": 0,\n \"bridge_sysid\": 0,\n \"forwarding_delay\": 30,\n \"hello_time\": 10,\n \"topology_change_timer\": 0,\n \"time_since_topology_change\": \"03:09:48\",\n \"notification_times\": 10,\n \"mst_id\": 0,\n \"topology_change_flag\": False,\n \"root_of_spanning_tree\": True,\n \"hold_time\": 1,\n \"topology_changes\": 3,\n \"bridge_address\": \"d8b1.9009.bf80\",\n \"interfaces\": {\n \"Port-channel24\": {\n \"number_of_forward_transitions\": 1,\n \"designated_port_id\": \"128.2400\",\n \"status\": \"designated forwarding\",\n \"designated_bridge_priority\": 32768,\n \"forward_delay\": 0,\n \"designated_bridge_address\": \"d8b1.9009.bf80\",\n \"designated_path_cost\": 0,\n \"designated_root_priority\": 32768,\n \"port_identifier\": \"128.2400.\",\n \"cost\": 6660,\n \"counters\": {\n \"bpdu_sent\": 1099019,\n \"bpdu_received\": 2191582\n },\n \"designated_root_address\": \"d8b1.9009.bf80\",\n \"boundary\": \"PVST\",\n \"port_num\": 2400,\n \"port_priority\": 128,\n \"name\": \"Port-channel24\",\n \"hold\": 0,\n \"message_age\": 0,\n \"link_type\": \"point-to-point\",\n \"loop_guard\": True\n },\n \"Port-channel14\": {\n \"number_of_forward_transitions\": 0,\n \"designated_port_id\": \"128.2390\",\n \"status\": \"broken (PVST Sim. Inconsistent)\",\n \"designated_bridge_priority\": 32768,\n \"forward_delay\": 0,\n \"designated_bridge_address\": \"d8b1.9009.bf80\",\n \"designated_path_cost\": 0,\n \"designated_root_priority\": 32768,\n \"port_identifier\": \"128.2390.\",\n \"cost\": 6660,\n \"counters\": {\n \"bpdu_sent\": 138231,\n \"bpdu_received\": 167393\n },\n \"designated_root_address\": \"d8b1.9009.bf80\",\n \"boundary\": \"PVST\",\n \"port_num\": 2390,\n \"port_priority\": 128,\n \"name\": \"Port-channel14\",\n \"hold\": 0,\n \"message_age\": 0,\n \"link_type\": \"point-to-point\",\n \"loop_guard\": True\n }\n },\n \"topology_change_times\": 70,\n \"topology_from_port\": \"Port-channel24\",\n \"bridge_priority\": 32768,\n \"topology_detected_flag\": False,\n \"max_age\": 40\n },\n 10: {\n \"hello_timer\": 0,\n \"notification_timer\": 0,\n \"bridge_sysid\": 0,\n \"forwarding_delay\": 30,\n \"hello_time\": 10,\n \"topology_change_timer\": 0,\n \"time_since_topology_change\": \"03:09:48\",\n \"notification_times\": 10,\n \"mst_id\": 0,\n \"topology_change_flag\": False,\n \"root_of_spanning_tree\": False,\n \"hold_time\": 1,\n \"topology_changes\": 3,\n \"bridge_address\": \"d8b1.9009.bf80\",\n \"interfaces\": {\n \"GigabitEthernet1/0/5\": {\n \"number_of_forward_transitions\": 1,\n \"designated_port_id\": \"128.2400\",\n \"status\": \"designated forwarding\",\n \"designated_bridge_priority\": 32768,\n \"forward_delay\": 0,\n \"designated_bridge_address\": \"d8b1.9009.bf80\",\n \"designated_path_cost\": 0,\n \"designated_root_priority\": 32768,\n \"port_identifier\": \"128.2400.\",\n \"cost\": 6660,\n \"counters\": {\n \"bpdu_sent\": 1099019,\n \"bpdu_received\": 2191582\n },\n \"designated_root_address\": \"d8b1.9009.bf80\",\n \"boundary\": \"PVST\",\n \"port_num\": 2400,\n \"port_priority\": 128,\n \"name\": \"Port-channel24\",\n \"hold\": 0,\n \"message_age\": 0,\n \"link_type\": \"point-to-point\",\n \"loop_guard\": True\n },\n \"Port-channel14\": {\n \"number_of_forward_transitions\": 0,\n \"designated_port_id\": \"128.2390\",\n \"status\": \"broken (PVST Sim. Inconsistent)\",\n \"designated_bridge_priority\": 32768,\n \"forward_delay\": 0,\n \"designated_bridge_address\": \"d8b1.9009.bf80\",\n \"designated_path_cost\": 0,\n \"designated_root_priority\": 32768,\n \"port_identifier\": \"128.2390.\",\n \"cost\": 6660,\n \"counters\": {\n \"bpdu_sent\": 138231,\n \"bpdu_received\": 167393\n },\n \"designated_root_address\": \"d8b1.9009.bf80\",\n \"boundary\": \"PVST\",\n \"port_num\": 2390,\n \"port_priority\": 128,\n \"name\": \"Port-channel14\",\n \"hold\": 0,\n \"message_age\": 0,\n \"link_type\": \"point-to-point\",\n \"loop_guard\": True\n }\n },\n \"topology_change_times\": 70,\n \"topology_from_port\": \"Port-channel24\",\n \"bridge_priority\": 32768,\n \"topology_detected_flag\": False,\n \"max_age\": 40\n }\n },\n \"forwarding_delay\": 30,\n \"hello_time\": 10,\n \"max_age\": 40,\n \"hold_count\": 20\n }\n }\n\n ShowSpanningTreeMstDetail = {\n \"mst_instances\": {\n 0: {\n \"bridge_priority\": 32768,\n \"interfaces\": {\n \"GigabitEthernet1/0/23\": {\n \"designated_regional_root_cost\": 0,\n \"port_priority\": 128,\n \"designated_root_priority\": 32768,\n \"designated_bridge_port_id\": \"128.23\",\n \"designated_bridge_priority\": 32768,\n \"forward_delay\": 0,\n \"port_id\": \"128.23\",\n \"name\": \"GigabitEthernet1/0/23\",\n \"designated_regional_root_priority\": 32768,\n \"forward_transitions\": 1,\n \"counters\": {\n \"bpdu_sent\": 493,\n \"bpdu_received\": 0\n },\n \"designated_regional_root_address\": \"3820.565b.8600\",\n \"status\": \"designated forwarding\",\n \"designated_root_cost\": 0,\n \"designated_bridge_address\": \"3820.565b.8600\",\n \"designated_root_address\": \"3820.565b.8600\",\n \"cost\": 20000,\n \"message_expires\": 0\n }\n },\n \"operational\": {\n \"max_age\": 35,\n \"tx_hold_count\": 20,\n \"hello_time\": 10,\n \"forward_delay\": 30\n },\n \"sysid\": 0,\n \"root\": \"CIST\",\n \"bridge_address\": \"3820.565b.8600\",\n \"configured\": {\n \"max_age\": 35,\n \"forward_delay\": 30,\n \"hello_time\": 10,\n \"max_hops\": 10\n },\n \"mst_id\": 0,\n \"vlan\": \"1-99,201-4094\"\n },\n 10: {\n \"bridge_priority\": 61450,\n \"interfaces\": {\n \"GigabitEthernet1/0/23\": {\n \"port_priority\": 128,\n \"designated_root_priority\": 61450,\n \"designated_bridge_port_id\": \"128.23\",\n \"designated_bridge_priority\": 61450,\n \"forward_delay\": 0,\n \"port_id\": \"128.23\",\n \"name\": \"GigabitEthernet1/0/23\",\n \"forward_transitions\": 1,\n \"counters\": {\n \"bpdu_sent\": 493,\n \"bpdu_received\": 0\n },\n \"message_expires\": 0,\n \"status\": \"designated forwarding\",\n \"designated_root_cost\": 0,\n \"designated_bridge_address\": \"3820.565b.8600\",\n \"designated_root_address\": \"3820.565b.8600\",\n \"cost\": 20000\n }\n },\n \"sysid\": 10,\n \"root\": \"MST10\",\n \"bridge_address\": \"3820.565b.8600\",\n \"mst_id\": 10,\n \"vlan\": \"100-200\"\n }\n }\n }\n\n ShowSpanningTreeSummary = {\n \"bpdu_filter\": False,\n \"extended_system_id\": True,\n \"etherchannel_misconfig_guard\": False,\n \"total_statistics\": {\n \"forwardings\": 10,\n \"listenings\": 0,\n \"root_bridges\": 2,\n \"stp_actives\": 16,\n \"learnings\": 0,\n \"blockings\": 6\n },\n \"root_bridge_for\": \"MST0, MST100\",\n \"bpdu_guard\": False,\n \"mode\": {\n \"mst\": {\n \"MST100\": {\n \"blocking\": 3,\n \"forwarding\": 1,\n \"listening\": 0,\n \"stp_active\": 4,\n \"learning\": 0\n },\n \"MST0\": {\n \"blocking\": 3,\n \"forwarding\": 9,\n \"listening\": 0,\n \"stp_active\": 12,\n \"learning\": 0\n }\n }\n },\n \"uplink_fast\": False,\n \"backbone_fast\": False,\n \"portfast_default\": False,\n \"loop_guard\": False\n\n }\n\n ShowErrdisableRecovery = {\n \"bpduguard_timeout_recovery\": 333,\n \"timer_status\": {\n \"gbic-invalid\": False,\n \"oam-remote-failure\": False,\n \"arp-inspection\": False,\n \"dtp-flap\": False,\n \"port-mode-failure\": False,\n \"loopback\": False,\n \"mac-limit\": False,\n \"psp\": False,\n \"channel-misconfig (STP)\": False,\n \"l2ptguard\": False,\n \"Recovery command: \\\"clear\": False,\n \"link-monitor-failure\": False,\n \"vmps\": False,\n \"bpduguard\": False,\n \"sfp-config-mismatch\": False,\n \"dual-active-recovery\": False,\n \"pagp-flap\": False,\n \"security-violation\": False,\n \"storm-control\": False,\n \"psecure-violation\": False,\n \"udld\": False,\n \"inline-power\": False,\n \"link-flap\": False,\n \"evc-lite input mapping fa\": False,\n \"pppoe-ia-rate-limit\": False,\n \"dhcp-rate-limit\": False\n }\n }\n\n ShowSpanningTree = {\n \"mstp\": {\n \"mst_instances\": {\n 0: {\n \"bridge\": {\n \"hello_time\": 7,\n \"priority\": 32768,\n \"forward_delay\": 15,\n \"address\": \"ecbd.1d09.5680\",\n \"max_age\": 12,\n \"configured_bridge_priority\": 32768,\n \"sys_id_ext\": 0,\n },\n \"interfaces\": {\n \"GigabitEthernet1/0/5\": {\n \"port_state\": \"forwarding\",\n \"bound\": \"RSTP\",\n \"port_num\": 5,\n \"port_priority\": 128,\n \"type\": \"P2p\",\n \"cost\": 20000,\n \"role\": \"root\"\n },\n \"Port-channel14\": {\n \"port_state\": \"broken\",\n \"bound\": \"PVST\",\n \"port_num\": 2390,\n \"port_priority\": 128,\n \"type\": \"P2p\",\n \"cost\": 6660,\n \"role\": \"designated\"\n },\n \"Port-channel24\": {\n \"port_state\": \"forwarding\",\n \"bound\": \"PVST\",\n \"port_num\": 2400,\n \"port_priority\": 128,\n \"type\": \"P2p\",\n \"cost\": 6660,\n \"role\": \"designated\"\n }\n },\n \"root\": {\n \"hello_time\": 10,\n \"priority\": 32768,\n \"forward_delay\": 30,\n \"max_age\": 35,\n \"cost\": 20000,\n \"address\": \"3820.565b.8600\",\n \"interface\": \"GigabitEthernet1/0/5\",\n \"port\": 5\n }\n },\n 10: {\n \"bridge\": {\n \"hello_time\": 7,\n \"priority\": 61450,\n \"forward_delay\": 15,\n \"address\": \"ecbd.1d09.5680\",\n \"max_age\": 12,\n \"configured_bridge_priority\": 61440,\n \"sys_id_ext\": 10,\n },\n \"interfaces\": {\n \"GigabitEthernet1/0/5\": {\n \"port_state\": \"forwarding\",\n \"bound\": \"RSTP\",\n \"port_num\": 5,\n \"port_priority\": 128,\n \"type\": \"P2p\",\n \"cost\": 20000,\n \"role\": \"master \"\n },\n \"Port-channel14\": {\n \"port_state\": \"broken\",\n \"bound\": \"PVST\",\n \"port_num\": 2390,\n \"port_priority\": 128,\n \"type\": \"P2p\",\n \"cost\": 6660,\n \"role\": \"designated\"\n }\n },\n \"root\": {\n \"hello_time\": 10,\n \"priority\": 61450,\n \"forward_delay\": 30,\n \"address\": \"ecbd.1d09.5680\",\n \"max_age\": 35\n }\n }\n }\n }\n }\n\n ShowSpanningTreeMstConfiguration = {\n \"mstp\": {\n \"revision\": 111,\n \"name\": \"mst\",\n \"instances_configured\": 2,\n \"mst_instances\": {\n 10: {\n \"vlan_mapped\": \"100-200\"\n },\n 0: {\n \"vlan_mapped\": \"1-99,201-4094\"\n }\n }\n }\n }\n\n Stp_info = {\n \"global\": {\n \"etherchannel_misconfig_guard\": False,\n \"bpdu_filter\": False,\n \"bpdu_guard\": False,\n \"bpduguard_timeout_recovery\": 333,\n \"loop_guard\": False\n },\n \"mstp\": {\n \"default\": {\n \"max_age\": 40,\n \"domain\": \"default\",\n \"revision\": 111,\n \"forwarding_delay\": 30,\n \"name\": \"mst\",\n \"hello_time\": 10,\n \"max_hop\": 10,\n \"mst_instances\": {\n 0: {\n \"bridge_address\": \"d8b1.9009.bf80\",\n \"time_since_topology_change\": \"03:09:48\",\n \"designated_root_address\": \"3820.565b.8600\",\n \"designated_root_priority\": 32768,\n \"configured_bridge_priority\": 32768,\n \"sys_id_ext\": 0,\n \"bridge_priority\": 32768,\n \"root_port\": 5,\n \"topology_changes\": 3,\n \"hold_time\": 1,\n \"vlan\": \"1-99,201-4094\",\n \"interfaces\": {\n \"GigabitEthernet1/0/5\": {\n \"designated_port_num\": 5,\n \"role\": \"root\",\n \"port_state\": \"forwarding\",\n \"designated_port_priority\": 128\n },\n \"Port-channel14\": {\n \"designated_root_priority\": 32768,\n \"port_num\": 2390,\n \"counters\": {\n \"bpdu_received\": 167393,\n \"bpdu_sent\": 138231\n },\n \"designated_port_num\": 2390,\n \"name\": \"Port-channel14\",\n \"cost\": 6660,\n \"designated_bridge_address\": \"d8b1.9009.bf80\",\n \"role\": \"designated\",\n \"port_priority\": 128,\n \"port_state\": \"broken\",\n \"forward_transitions\": 0,\n \"designated_bridge_priority\": 32768,\n \"designated_root_address\": \"d8b1.9009.bf80\",\n \"designated_cost\": 0,\n \"designated_port_priority\": 128\n },\n \"Port-channel24\": {\n \"designated_root_priority\": 32768,\n \"port_num\": 2400,\n \"counters\": {\n \"bpdu_received\": 2191582,\n \"bpdu_sent\": 1099019\n },\n \"designated_port_num\": 2400,\n \"name\": \"Port-channel24\",\n \"cost\": 6660,\n \"designated_bridge_address\": \"d8b1.9009.bf80\",\n \"role\": \"designated\",\n \"port_priority\": 128,\n \"port_state\": \"forwarding\",\n \"forward_transitions\": 1,\n \"designated_bridge_priority\": 32768,\n \"designated_root_address\": \"d8b1.9009.bf80\",\n \"designated_cost\": 0,\n \"designated_port_priority\": 128\n }\n },\n \"mst_id\": 0,\n \"root_cost\": 20000\n },\n 10: {\n \"time_since_topology_change\": \"03:09:48\",\n \"designated_root_priority\": 61450,\n \"bridge_priority\": 32768,\n \"configured_bridge_priority\": 61440,\n \"sys_id_ext\": 10,\n \"bridge_address\": \"d8b1.9009.bf80\",\n \"topology_changes\": 3,\n \"hold_time\": 1,\n \"vlan\": \"100-200\",\n \"interfaces\": {\n \"GigabitEthernet1/0/5\": {\n \"designated_root_priority\": 32768,\n \"port_num\": 2400,\n \"counters\": {\n \"bpdu_received\": 2191582,\n \"bpdu_sent\": 1099019\n },\n \"designated_port_num\": 5,\n \"name\": \"Port-channel24\",\n \"cost\": 6660,\n \"designated_bridge_address\": \"d8b1.9009.bf80\",\n \"role\": \"master \",\n \"port_priority\": 128,\n \"port_state\": \"forwarding\",\n \"forward_transitions\": 1,\n \"designated_bridge_priority\": 32768,\n \"designated_root_address\": \"d8b1.9009.bf80\",\n \"designated_cost\": 0,\n \"designated_port_priority\": 128\n },\n \"Port-channel14\": {\n \"designated_root_priority\": 32768,\n \"port_num\": 2390,\n \"counters\": {\n \"bpdu_received\": 167393,\n \"bpdu_sent\": 138231\n },\n \"designated_port_num\": 2390,\n \"name\": \"Port-channel14\",\n \"cost\": 6660,\n \"designated_bridge_address\": \"d8b1.9009.bf80\",\n \"role\": \"designated\",\n \"port_priority\": 128,\n \"port_state\": \"broken\",\n \"forward_transitions\": 0,\n \"designated_bridge_priority\": 32768,\n \"designated_root_address\": \"d8b1.9009.bf80\",\n \"designated_cost\": 0,\n \"designated_port_priority\": 128\n }\n },\n \"mst_id\": 0,\n \"designated_root_address\": \"ecbd.1d09.5680\"\n }\n },\n \"hold_count\": 20\n }\n }\n }\n\n\nclass StpRpstOutput(object):\n\n ShowSpanningTreeDetail = {\n \"rapid_pvst\": {\n \"forwarding_delay\": 15,\n \"vlans\": {\n 201: {\n \"forwarding_delay\": 15,\n \"hello_timer\": 0,\n \"bridge_sysid\": 201,\n \"hold_time\": 1,\n \"time_since_topology_change\": \"00:00:14\",\n \"notification_timer\": 0,\n \"topology_change_flag\": True,\n \"topology_changes\": 1,\n \"topology_change_times\": 35,\n \"aging_timer\": 300,\n \"topology_from_port\": \"Port-channel14\",\n \"topology_change_timer\": 21,\n \"bridge_address\": \"ecbd.1d09.5680\",\n \"notification_times\": 2,\n \"bridge_priority\": 28672,\n \"topology_detected_flag\": False,\n \"hello_time\": 2,\n \"interfaces\": {\n \"GigabitEthernet1/0/5\": {\n \"designated_bridge_address\": \"ecbd.1d09.5680\",\n \"number_of_forward_transitions\": 1,\n \"port_identifier\": \"128.5.\",\n \"counters\": {\n \"bpdu_received\": 4,\n \"bpdu_sent\": 20\n },\n \"cost\": 4,\n \"designated_port_id\": \"128.5\",\n \"designated_root_priority\": 24777,\n \"designated_root_address\": \"58bf.eab6.2f00\",\n \"port_num\": 5,\n \"status\": \"designated forwarding\",\n \"port_priority\": 128,\n \"forward_delay\": 0,\n \"hold\": 0,\n \"message_age\": 0,\n \"peer\": \"STP\",\n \"link_type\": \"point-to-point\",\n \"designated_bridge_priority\": 28873,\n \"designated_path_cost\": 3,\n \"name\": \"GigabitEthernet1/0/5\"\n }\n },\n \"max_age\": 20,\n \"hold_count\": 6,\n \"vlan_id\": 201\n },\n 100: {\n \"forwarding_delay\": 15,\n \"hello_timer\": 0,\n \"bridge_sysid\": 100,\n \"hold_time\": 1,\n \"time_since_topology_change\": \"00:00:34\",\n \"notification_timer\": 0,\n \"hello_time\": 2,\n \"hold_count\": 6,\n \"topology_change_flag\": True,\n \"topology_changes\": 1,\n \"notification_times\": 2,\n \"aging_timer\": 300,\n \"topology_from_port\": \"Port-channel12\",\n \"topology_change_timer\": 0,\n \"bridge_address\": \"3820.565b.1b80\",\n \"topology_change_times\": 35,\n \"bridge_priority\": 24576,\n \"topology_detected_flag\": False,\n \"root_of_spanning_tree\": True,\n \"interfaces\": {\n \"Port-channel12\": {\n \"designated_bridge_address\": \"3820.565b.1b80\",\n \"number_of_forward_transitions\": 1,\n \"port_identifier\": \"128.2388.\",\n \"counters\": {\n \"bpdu_received\": 0,\n \"bpdu_sent\": 34\n },\n \"cost\": 3,\n \"designated_port_id\": \"128.2388\",\n \"designated_root_priority\": 24676,\n \"designated_root_address\": \"3820.565b.1b80\",\n \"port_num\": 2388,\n \"status\": \"designated forwarding\",\n \"port_priority\": 128,\n \"forward_delay\": 0,\n \"hold\": 0,\n \"message_age\": 0,\n \"link_type\": \"point-to-point\",\n \"designated_bridge_priority\": 24676,\n \"designated_path_cost\": 0,\n \"name\": \"Port-channel12\"\n }\n },\n \"max_age\": 20,\n \"vlan_id\": 100\n }\n },\n \"max_age\": 20,\n \"hold_count\": 6,\n \"hello_time\": 2\n }\n }\n\n ShowSpanningTreeSummary = {\n \"etherchannel_misconfig_guard\": True,\n \"loop_guard\": False,\n \"bpdu_filter\": False,\n \"backbone_fast\": False,\n \"uplink_fast\": False,\n \"root_bridge_for\": \"none\",\n \"mode\": {\n \"rapid_pvst\": {\n \"VLAN0200\": {\n \"forwarding\": 2,\n \"learning\": 0,\n \"listening\": 0,\n \"stp_active\": 2,\n \"blocking\": 0\n },\n \"VLAN0201\": {\n \"forwarding\": 2,\n \"learning\": 0,\n \"listening\": 0,\n \"stp_active\": 2,\n \"blocking\": 0\n },\n }\n },\n \"portfast_default\": False,\n \"extended_system_id\": True,\n \"total_statistics\": {\n \"stp_actives\": 4,\n \"blockings\": 0,\n \"root_bridges\": 2,\n \"forwardings\": 4,\n \"learnings\": 0,\n \"listenings\": 0\n },\n \"bpdu_guard\": False\n }\n\n ShowErrdisableRecovery = {\n \"bpduguard_timeout_recovery\": 333,\n \"timer_status\": {\n \"gbic-invalid\": False,\n \"oam-remote-failure\": False,\n \"arp-inspection\": False,\n \"dtp-flap\": False,\n \"port-mode-failure\": False,\n \"loopback\": False,\n \"mac-limit\": False,\n \"psp\": False,\n \"channel-misconfig (STP)\": False,\n \"l2ptguard\": False,\n \"Recovery command: \\\"clear\": False,\n \"link-monitor-failure\": False,\n \"vmps\": False,\n \"bpduguard\": False,\n \"sfp-config-mismatch\": False,\n \"dual-active-recovery\": False,\n \"pagp-flap\": False,\n \"security-violation\": False,\n \"storm-control\": False,\n \"psecure-violation\": False,\n \"udld\": False,\n \"inline-power\": False,\n \"link-flap\": False,\n \"evc-lite input mapping fa\": False,\n \"pppoe-ia-rate-limit\": False,\n \"dhcp-rate-limit\": False\n }\n }\n\n ShowSpanningTree = {\n \"rapid_pvst\": {\n \"vlans\": {\n 200: {\n \"bridge\": {\n \"hello_time\": 2,\n \"priority\": 28872,\n \"forward_delay\": 15,\n \"max_age\": 20,\n \"aging_time\": 300,\n \"address\": \"ecbd.1d09.5680\",\n \"configured_bridge_priority\": 28672,\n \"sys_id_ext\": 200,\n },\n \"interfaces\": {\n \"GigabitEthernet1/0/5\": {\n \"peer\": \"STP\",\n \"port_state\": \"forwarding\",\n \"port_num\": 5,\n \"port_priority\": 128,\n \"type\": \"P2p\",\n \"cost\": 4,\n \"role\": \"designated\"\n },\n \"Port-channel14\": {\n \"port_state\": \"forwarding\",\n \"port_num\": 2390,\n \"port_priority\": 128,\n \"type\": \"P2p\",\n \"cost\": 3,\n \"role\": \"root\"\n }\n },\n \"root\": {\n \"hello_time\": 2,\n \"priority\": 24776,\n \"forward_delay\": 15,\n \"max_age\": 20,\n \"cost\": 3,\n \"address\": \"58bf.eab6.2f00\",\n \"interface\": \"Port-channel14\",\n \"port\": 2390\n }\n },\n 201: {\n \"bridge\": {\n \"hello_time\": 2,\n \"priority\": 28873,\n \"forward_delay\": 15,\n \"max_age\": 20,\n \"aging_time\": 300,\n \"address\": \"ecbd.1d09.5680\",\n \"configured_bridge_priority\": 28672,\n \"sys_id_ext\": 201,\n },\n \"interfaces\": {\n \"GigabitEthernet1/0/5\": {\n \"peer\": \"STP\",\n \"port_state\": \"forwarding\",\n \"port_num\": 5,\n \"port_priority\": 128,\n \"type\": \"P2p\",\n \"cost\": 4,\n \"role\": \"designated\"\n },\n \"Port-channel14\": {\n \"port_state\": \"forwarding\",\n \"port_num\": 2390,\n \"port_priority\": 128,\n \"type\": \"P2p\",\n \"cost\": 3,\n \"role\": \"root\"\n }\n },\n \"root\": {\n \"hello_time\": 2,\n \"priority\": 24777,\n \"forward_delay\": 15,\n \"max_age\": 20,\n \"cost\": 3,\n \"address\": \"58bf.eab6.2f00\",\n \"interface\": \"Port-channel14\",\n \"port\": 2390\n }\n }\n }\n }\n }\n\n Stp_info = {\n \"global\": {\n \"bpdu_guard\": False,\n \"bpduguard_timeout_recovery\": 333,\n \"etherchannel_misconfig_guard\": True,\n \"bpdu_filter\": False,\n \"loop_guard\": False\n },\n \"rapid_pvst\": {\n \"default\": {\n \"hold_count\": 6,\n \"hello_time\": 2,\n \"max_age\": 20,\n \"forwarding_delay\": 15,\n \"pvst_id\": \"default\",\n \"vlans\": {\n 200: {\n \"designated_root_priority\": 24776,\n \"root_port\": 2390,\n \"root_cost\": 3,\n \"configured_bridge_priority\": 28672,\n \"sys_id_ext\": 200,\n \"interfaces\": {\n \"Port-channel14\": {\n \"designated_port_priority\": 128,\n \"role\": \"root\",\n \"designated_port_num\": 2390,\n \"port_state\": \"forwarding\"\n },\n \"GigabitEthernet1/0/5\": {\n \"designated_port_priority\": 128,\n \"role\": \"designated\",\n \"designated_port_num\": 5,\n \"port_state\": \"forwarding\"\n }\n },\n \"designated_root_address\": \"58bf.eab6.2f00\"\n },\n 201: {\n \"time_since_topology_change\": \"00:00:14\",\n \"bridge_priority\": 28672,\n \"configured_bridge_priority\": 28672,\n \"sys_id_ext\": 201,\n \"hold_count\": 6,\n \"vlan_id\": 201,\n \"interfaces\": {\n \"Port-channel14\": {\n \"designated_port_priority\": 128,\n \"role\": \"root\",\n \"designated_port_num\": 2390,\n \"port_state\": \"forwarding\"\n },\n \"GigabitEthernet1/0/5\": {\n \"port_num\": 5,\n \"counters\": {\n \"bpdu_sent\": 20,\n \"bpdu_received\": 4\n },\n \"cost\": 4,\n \"designated_bridge_address\": \"ecbd.1d09.5680\",\n \"forward_transitions\": 1,\n \"name\": \"GigabitEthernet1/0/5\",\n \"designated_port_num\": 5,\n \"role\": \"designated\",\n \"designated_bridge_priority\": 28873,\n \"designated_root_address\": \"58bf.eab6.2f00\",\n \"port_priority\": 128,\n \"designated_cost\": 3,\n \"designated_root_priority\": 24777,\n \"designated_port_priority\": 128,\n \"port_state\": \"forwarding\"\n }\n },\n \"hello_time\": 2,\n \"max_age\": 20,\n \"forwarding_delay\": 15,\n \"hold_time\": 1,\n \"topology_changes\": 1,\n \"designated_root_priority\": 24777,\n \"root_port\": 2390,\n \"root_cost\": 3,\n \"bridge_address\": \"ecbd.1d09.5680\",\n \"designated_root_address\": \"58bf.eab6.2f00\"\n },\n 100: {\n \"time_since_topology_change\": \"00:00:34\",\n \"bridge_priority\": 24576,\n \"hello_time\": 2,\n \"max_age\": 20,\n \"vlan_id\": 100,\n \"hold_count\": 6,\n \"forwarding_delay\": 15,\n \"interfaces\": {\n \"Port-channel12\": {\n \"designated_bridge_priority\": 24676,\n \"designated_root_address\": \"3820.565b.1b80\",\n \"port_num\": 2388,\n \"port_priority\": 128,\n \"counters\": {\n \"bpdu_sent\": 34,\n \"bpdu_received\": 0\n },\n \"designated_cost\": 0,\n \"cost\": 3,\n \"designated_root_priority\": 24676,\n \"forward_transitions\": 1,\n \"name\": \"Port-channel12\",\n \"designated_bridge_address\": \"3820.565b.1b80\"\n }\n },\n \"hold_time\": 1,\n \"topology_changes\": 1,\n \"bridge_address\": \"3820.565b.1b80\"\n }\n }\n }\n }\n }", "id": "1061933", "language": "Python", "matching_score": 1.903944730758667, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/stp/iosxe/tests/spanning_tree_output.py" }, { "content": "\"\"\"\nMSDP Genie Ops Object Outputs for IOS\n\n\"\"\"\n\n\nclass MsdpOutput(object):\n\n # 'show ip msdp peer'\n ShowIpMsdpPeer = {\n \"vrf\": {\n \"default\": {\n \"peer\": {\n \"10.16.2.2\": {\n \"peer_as\": 65000,\n \"session_state\": \"Up\",\n \"resets\": \"0\",\n \"connect_source\": \"Loopback0\",\n \"connect_source_address\": \"10.4.1.1\",\n \"elapsed_time\": \"00:13:18\",\n \"statistics\": {\n \"sent\": {\n \"data_message\": 15,\n \"sa_message\": 14,\n \"sa_response\": 0,\n \"data_packets\": 0,\n },\n \"received\": {\n \"data_message\": 28,\n \"sa_message\": 0,\n \"sa_request\": 0,\n \"data_packets\": 0,\n },\n \"output_msg_discarded\": 0,\n \"established_transitions\": 1,\n \"queue\": {\"size_in\": 0, \"size_out\": 0},\n \"error\": {\"rpf_failure\": 0},\n },\n \"conn_count_cleared\": \"00:22:05\",\n \"sa_filter\": {\n \"in\": {\n \"(S,G)\": {\"filter\": \"none\", \"route_map\": \"none\"},\n \"RP\": {\"filter\": \"none\", \"route_map\": \"none\"},\n },\n \"out\": {\n \"(S,G)\": {\"filter\": \"none\", \"route_map\": \"none\"},\n \"RP\": {\"filter\": \"none\", \"route_map\": \"none\"},\n },\n },\n \"sa_request\": {\"input_filter\": \"none\"},\n \"ttl_threshold\": 0,\n \"sa_learned_from\": 0,\n \"signature_protection\": False,\n },\n \"10.36.3.3\": {\n \"peer_as\": 65000,\n \"session_state\": \"Up\",\n \"resets\": \"0\",\n \"connect_source\": \"Loopback0\",\n \"connect_source_address\": \"10.4.1.1\",\n \"elapsed_time\": \"00:16:57\",\n \"statistics\": {\n \"sent\": {\n \"data_message\": 20,\n \"sa_message\": 19,\n \"sa_response\": 0,\n \"data_packets\": 0,\n },\n \"received\": {\n \"data_message\": 19,\n \"sa_message\": 0,\n \"sa_request\": 0,\n \"data_packets\": 0,\n },\n \"output_msg_discarded\": 0,\n \"established_transitions\": 1,\n \"queue\": {\"size_in\": 0, \"size_out\": 0},\n \"error\": {\"rpf_failure\": 0},\n },\n \"conn_count_cleared\": \"00:22:14\",\n \"sa_filter\": {\n \"in\": {\n \"(S,G)\": {\"filter\": \"none\", \"route_map\": \"none\"},\n \"RP\": {\"filter\": \"none\", \"route_map\": \"none\"},\n },\n \"out\": {\n \"(S,G)\": {\"filter\": \"none\", \"route_map\": \"none\"},\n \"RP\": {\"filter\": \"none\", \"route_map\": \"none\"},\n },\n },\n \"sa_request\": {\"input_filter\": \"none\"},\n \"ttl_threshold\": 0,\n \"sa_learned_from\": 0,\n \"signature_protection\": False,\n },\n }\n }\n }\n }\n\n ShowIpMsdpPeer_golden = \"\"\"\n MSDP Peer 10.16.2.2 (?), AS 65000\n Connection status:\n State: Up, Resets: 0, Connection source: Loopback0 (10.4.1.1)\n Uptime(Downtime): 00:13:18, Messages sent/received: 15/28\n Output messages discarded: 0\n Connection and counters cleared 00:22:05 ago\n SA Filtering:\n Input (S,G) filter: none, route-map: none\n Input RP filter: none, route-map: none\n Output (S,G) filter: none, route-map: none\n Output RP filter: none, route-map: none\n SA-Requests:\n Input filter: none\n Peer ttl threshold: 0\n SAs learned from this peer: 0\n Number of connection transitions to Established state: 1\n Input queue size: 0, Output queue size: 0\n MD5 signature protection on MSDP TCP connection: not enabled\n Message counters:\n RPF Failure count: 0\n SA Messages in/out: 0/14\n SA Requests in: 0\n SA Responses out: 0\n Data Packets in/out: 0/0\n MSDP Peer 10.36.3.3 (?), AS 65000\n Connection status:\n State: Up, Resets: 0, Connection source: Loopback0 (10.4.1.1)\n Uptime(Downtime): 00:16:57, Messages sent/received: 20/19\n Output messages discarded: 0\n Connection and counters cleared 00:22:14 ago\n SA Filtering:\n Input (S,G) filter: none, route-map: none\n Input RP filter: none, route-map: none\n Output (S,G) filter: none, route-map: none\n Output RP filter: none, route-map: none\n SA-Requests:\n Input filter: none\n Peer ttl threshold: 0\n SAs learned from this peer: 0\n Number of connection transitions to Established state: 1\n Input queue size: 0, Output queue size: 0\n MD5 signature protection on MSDP TCP connection: not enabled\n Message counters:\n RPF Failure count: 0\n SA Messages in/out: 0/19\n SA Requests in: 0\n SA Responses out: 0\n Data Packets in/out: 0/0\n \"\"\"\n\n # 'show ip msdp sa-cache'\n ShowIpMsdpSaCache = {\n \"vrf\": {\n \"default\": {\n \"num_of_sa_cache\": 1,\n \"sa_cache\": {\n \"192.168.3.11 10.3.3.18\": {\n \"group\": \"192.168.3.11\",\n \"source_addr\": \"10.3.3.18\",\n \"up_time\": \"00:00:10\",\n \"expire\": \"00:05:49\",\n \"peer_as\": 3,\n \"peer\": \"10.1.100.4\",\n \"origin_rp\": {\"10.3.100.8\": {\"rp_address\": \"10.3.100.8\"}},\n \"peer_learned_from\": \"10.1.100.4\",\n \"rpf_peer\": \"10.1.100.4\",\n \"statistics\": {\n \"received\": {\"sa\": 1, \"encapsulated_data_received\": 1}\n },\n }\n },\n }\n }\n }\n\n ShowIpMsdpSaCache_golden = \"\"\"\n MSDP Source-Active Cache - 1 entries\n (10.3.3.18, 192.168.3.11), RP 10.3.100.8, BGP/AS 3, 00:00:10/00:05:49, Peer 10.1.100.4\n Learned from peer 10.1.100.4, RPF peer 10.1.100.4, \n SAs received: 1, Encapsulated data received: 1\n \"\"\"\n\n MsdpInfo = {\n \"vrf\": {\n \"default\": {\n \"peer\": {\n \"10.36.3.3\": {\n \"elapsed_time\": \"00:16:57\",\n \"peer_as\": 65000,\n \"connect_source\": \"Loopback0\",\n \"ttl_threshold\": 0,\n \"session_state\": \"established\",\n \"statistics\": {\n \"received\": {\"sa_message\": 0, \"sa_request\": 0},\n \"queue\": {\"size_in\": 0, \"size_out\": 0},\n \"sent\": {\"sa_message\": 19, \"sa_response\": 0},\n \"error\": {\"rpf_failure\": 0},\n },\n },\n \"10.16.2.2\": {\n \"elapsed_time\": \"00:13:18\",\n \"peer_as\": 65000,\n \"connect_source\": \"Loopback0\",\n \"ttl_threshold\": 0,\n \"session_state\": \"established\",\n \"statistics\": {\n \"received\": {\"sa_message\": 0, \"sa_request\": 0},\n \"queue\": {\"size_in\": 0, \"size_out\": 0},\n \"sent\": {\"sa_message\": 14, \"sa_response\": 0},\n \"error\": {\"rpf_failure\": 0},\n },\n },\n },\n \"sa_cache\": {\n \"192.168.3.11 10.3.3.18\": {\n \"group\": \"192.168.3.11\",\n \"source_addr\": \"10.3.3.18\",\n \"peer_learned_from\": \"10.1.100.4\",\n \"rpf_peer\": \"10.1.100.4\",\n \"up_time\": \"00:00:10\",\n \"expire\": \"00:05:49\",\n \"origin_rp\": {\"10.3.100.8\": {\"rp_address\": \"10.3.100.8\"}},\n }\n },\n }\n }\n }\n", "id": "4382589", "language": "Python", "matching_score": 5.152045249938965, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/msdp/ios/tests/msdp_output.py" }, { "content": "# super class\r\nfrom genie.libs.ops.msdp.msdp import Msdp as SuperMsdp\r\n\r\n# iosxe show msdp\r\nfrom genie.libs.parser.iosxe.show_msdp import ShowIpMsdpPeer,\\\r\n ShowIpMsdpSaCache\r\n\r\n\r\nclass Msdp(SuperMsdp):\r\n '''\r\n Msdp Ops Object\r\n '''\r\n\r\n def get_session_state(self, item):\r\n\r\n session_state_mapping_dict = {\r\n '(N/A)': 'disabled',\r\n 'Down': 'inactive',\r\n 'Listen': 'listen',\r\n 'Connect': 'connecting',\r\n 'Up': 'established',\r\n 'shutdown': 'admin-shutdown'\r\n }\r\n\r\n return session_state_mapping_dict[item]\r\n\r\n def learn(self):\r\n '''\r\n Learn Msdp object\r\n ''' \r\n # N/A -> Keys not supported by this OS\r\n # vrf\r\n # global N/A\r\n # peer\r\n # connect_source\r\n # peer_as\r\n # authentication N/A\r\n # enable N/A\r\n # description N/A\r\n # mesh_group N/A\r\n # sa_filter N/A\r\n # sa_limit N/A\r\n # timer N/A\r\n # ttl_threshold\r\n # session_state\r\n # elapsed_time\r\n # is_default_peer N/A\r\n # statistics\r\n # last_message_received N/A\r\n # num_of_sg_received N/A\r\n # discontinuity_time N/A\r\n # error\r\n # rpf_failure\r\n # queue\r\n # size_in\r\n # size_out\r\n # received\r\n # keepalive N/A\r\n # notification N/A\r\n # sa_message\r\n # sa_response N/A\r\n # sa_request\r\n # total N/A\r\n # sent\r\n # keepalive N/A\r\n # notification N/A\r\n # sa_message\r\n # sa_response\r\n # sa_request N/A\r\n # total N/A\r\n # sa_policy N/A\r\n # sa_cache\r\n # [sa_group sa_source_addr]\r\n # group\r\n # source_addr\r\n # origin_rp\r\n # rp_address\r\n # up_time\r\n # expire\r\n # holddown_interval N/A\r\n # peer_learned_from \r\n # rpf_peer\r\n\r\n # vrf\r\n # peer \r\n info_src = '[vrf][(?P<vrf>.*)][peer][(?P<peer>.*)]'\r\n info_dest = 'info' + info_src\r\n\r\n # ShowIpMsdpPeer\r\n\r\n for key in ['elapsed_time', 'peer_as', \r\n 'connect_source' , 'ttl_threshold']:\r\n\r\n self.add_leaf(cmd=ShowIpMsdpPeer,\r\n src=info_src+'[{key}]'.format(key=key),\r\n dest=info_dest+'[{key}]'.format(key=key))\r\n\r\n self.add_leaf(cmd=ShowIpMsdpPeer,\r\n src=info_src+'[session_state]',\r\n dest=info_dest+'[session_state]',\r\n action=self.get_session_state)\r\n\r\n # statistics\r\n # received\r\n # sa_message\r\n # sa_request\r\n for key in ['sa_message', 'sa_request',]:\r\n\r\n self.add_leaf(cmd=ShowIpMsdpPeer,\r\n src=info_src+'[statistics][received][{key}]'\\\r\n .format(key=key),\r\n dest=info_dest+'[statistics][received][{key}]'\\\r\n .format(key=key))\r\n\r\n # statistics\r\n # queue\r\n # size_in\r\n # size_out\r\n for key in ['size_in', 'size_out']:\r\n self.add_leaf(cmd=ShowIpMsdpPeer,\r\n src=info_src+'[statistics][queue][{key}]'.format(key=key),\r\n dest=info_dest+'[statistics][queue][{key}]'.format(key=key)) \r\n\r\n # statistics\r\n # sent\r\n # sa_message\r\n # sa_response\r\n for key in ['sa_message', 'sa_response']:\r\n self.add_leaf(cmd=ShowIpMsdpPeer,\r\n src=info_src+'[statistics][sent][{key}]'.format(key=key),\r\n dest=info_dest+'[statistics][sent][{key}]'.format(key=key))\r\n\r\n # statistics\r\n # error\r\n # rpf_failure\r\n self.add_leaf(cmd=ShowIpMsdpPeer,\r\n src=info_src+'[statistics][error][rpf_failure]',\r\n dest=info_dest+'[statistics][error][rpf_failure]')\r\n\r\n # ShowIpMsdpSaCache\r\n\r\n # vrf\r\n # sa_cache\r\n info_src = '[vrf][(?P<vrf>.*)][sa_cache][(?P<sa_cache>.*)]'\r\n info_dest = 'info' + info_src\r\n\r\n for key in ['group', 'source_addr', 'peer_learned_from', 'rpf_peer',\r\n 'up_time', 'expire',]:\r\n\r\n self.add_leaf(cmd=ShowIpMsdpSaCache,\r\n src=info_src+'[{key}]'.format(key=key),\r\n dest=info_dest+'[{key}]'.format(key=key))\r\n\r\n # origin_rp\r\n # rp_address\r\n self.add_leaf(cmd=ShowIpMsdpSaCache,\r\n src=info_src+'[origin_rp][(?P<rp_address>.*)][rp_address]',\r\n dest=info_dest+'[origin_rp][(?P<rp_address>.*)][rp_address]')\r\n\r\n # Make final Ops structure\r\n self.make(final_call=True)\r\n", "id": "1715722", "language": "Python", "matching_score": 5.01686429977417, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/msdp/iosxe/msdp.py" }, { "content": "'''\n Msdp Genie Ops Object Outputs for NXOS.\n'''\n\nclass MsdpOutput(object):\n # 'show ip msdp peer vrf all' output\n showIpMsdpPeerVrf = '''\\\n R3_titatnium# show ip msdp peer vrf all\n MSDP peer 10.4.1.1 for VRF \"default\"\n AS 100, local address: 10.36.3.3 (loopback0)\n Description: R1\n Connection status: Established\n Uptime(Downtime): 01:27:25\n Last reset reason: Keepalive timer expired\n Password: <PASSWORD>\n Keepalive Interval: 60 sec\n Keepalive Timeout: 90 sec\n Reconnection Interval: 33 sec\n Policies:\n SA in: none, SA out: none\n SA limit: 111\n Member of mesh-group: 1\n Statistics (in/out):\n Last messaged received: 00:00:22\n SAs: 0/0, SA-Requests: 0/0, SA-Responses: 0/0\n In/Out Ctrl Msgs: 0/0, In/Out Data Msgs: 0/0\n Remote/Local Port 26743/639\n Keepalives: 92/119, Notifications: 0/6\n RPF check failures: 0\n Cache Lifetime: 00:03:30\n Established Transitions: 6\n Connection Attempts: 0\n Discontinuity Time: 01:27:25\n\n MSDP peer 10.94.44.44 for VRF \"VRF1\"\n AS 200, local address: 10.21.33.34 (loopback3)\n Description: R4\n Connection status: Inactive, Connecting in: 00:00:23\n Uptime(Downtime): 01:03:22\n Password: <PASSWORD>\n Keepalive Interval: 60 sec\n Keepalive Timeout: 90 sec\n Reconnection Interval: 44 sec\n Policies:\n SA in: none, SA out: none\n SA limit: 44\n Member of mesh-group: 2\n Statistics (in/out):\n Last messaged received: never\n SAs: 0/0, SA-Requests: 0/0, SA-Responses: 0/0\n In/Out Ctrl Msgs: 0/0, In/Out Data Msgs: 0/0\n Remote/Local Port 0/0\n Keepalives: 0/0, Notifications: 0/0\n RPF check failures: 0\n Cache Lifetime: 00:03:30\n Established Transitions: 0\n Connection Attempts: 88\n Discontinuity Time: 00:00:20\n '''\n # 'show ip msdp sa-cache detail vrf' output\n showIpMsdpSaCacheDetailVrf = '''\\\n\n nexus# show ip msdp sa-cache detail vrf all\n MSDP SA Route Cache for VRF \"default\" - 1 entries\n Source Group RP ASN Uptime\n 172.16.25.2 172.16.58.3 10.106.106.106 100 00:02:43\n Peer: 10.106.106.106, Expires: 00:02:32\n '''\n\n # show ip msdp policy statistics sa-policy 10.4.1.1 in\n ShowIpMsdpPolicyStatisticsSaPolicyIn = '''\\\n N95_2_R2# show ip msdp policy statistics sa-policy 10.4.1.1 in \n C: No. of comparisions, M: No. of matches\n\n route-map filtera permit 10\n match ip address mcast-all-groups C: 0 M: 0 \n route-map filtera permit 20\n match ip address mcast-all-groups2 C: 0 M: 0 \n\n Total accept count for policy: 0 \n Total reject count for policy: 0 \n '''\n\n # show ip msdp policy statistics sa-policy 10.4.1.1 in Vrf VRF1\n ShowIpMsdpPolicyStatisticsSaPolicyInVRF1 = '''\\\n R4# show ip msdp policy statistics sa-policy 10.94.44.44 in Vrf VRF1\n No SA input policy set for this peer\n '''\n\n # show ip msdp policy statistics sa-policy 10.4.1.1 out\n ShowIpMsdpPolicyStatisticsSaPolicyOut = '''\\\n N95_2_R2# show ip msdp policy statistics sa-policy 10.4.1.1 out\n C: No. of comparisions, M: No. of matches\n\n route-map filtera permit 10\n match ip address mcast-all-groups C: 0 M: 0 \n route-map filtera permit 20\n match ip address mcast-all-groups2 C: 0 M: 0 \n\n Total accept count for policy: 0 \n Total reject count for policy: 0\n '''\n\n # show ip msdp policy statistics sa-policy 10.4.1.1 out Vrf VRF1\n ShowIpMsdpPolicyStatisticsSaPolicyOutVRF1 = '''\\\n R4# show ip msdp policy statistics sa-policy 10.94.44.44 out Vrf VRF1\n No SA input policy set for this peer\n '''\n\n # show ip msdp summary\n ShowIpMsdpSummary = '''\\\n N95_2_R2# show ip msdp summary vrf all\n MSDP Peer Status Summary for VRF \"default\"\n Local ASN: 0, originator-id: 10.16.2.2\n\n Number of configured peers: 1\n Number of established peers: 1\n Number of shutdown peers: 0\n\n Peer Peer Connection Uptime/ Last msg (S,G)s\n Address ASN State Downtime Received Received\n 10.4.1.1 0 Established 05:46:19 00:00:51 1\n '''\n\n # show ip msdp summary Vrf VRF1\n ShowIpMsdpSummaryVRF1 = '''\\\n N95_2_R2# show ip msdp summary \n\n MSDP Peer Status Summary for VRF \"VRF1\"\n Local ASN: 0, originator-id: 10.16.2.2\n\n Number of configured peers: 1\n Number of established peers: 1\n Number of shutdown peers: 0\n\n Peer Peer Connection Uptime/ Last msg (S,G)s\n Address ASN State Downtime Received Received\n 10.94.44.44 0 Established 05:46:18 00:00:55 0\n '''\n\n showOpsOutput={\n \"vrf\": {\n \"VRF1\": {\n \"global\": {\n \"originator_id\": \"10.16.2.2\",\n \"statistics\": {\n \"num_of_configured_peers\": 1,\n \"num_of_established_peers\": 1,\n \"num_of_shutdown_peers\": 0\n },\n \"local_as\": 0\n },\n \"peer\": {\n \"10.94.44.44\": {\n \"timer\": {\n \"holdtime_interval\": 90,\n \"connect_retry_interval\": 44,\n \"keepalive_interval\": 60\n },\n \"statistics\": {\n \"last_message_received\": \"00:00:55\",\n \"error\": {\n \"rpf_failure\": \"0\"\n },\n \"discontinuity_time\": \"00:00:20\",\n \"num_of_sg_received\": 0,\n \"sent\": {\n \"keepalive\": 0,\n \"total\": 0,\n \"sa_request\": 0,\n \"sa_response\": 0,\n \"notification\": 0\n },\n \"received\": {\n \"keepalive\": 0,\n \"total\": 0,\n \"sa_request\": 0,\n \"sa_response\": 0,\n \"notification\": 0\n }\n },\n \"sa_limit\": \"44\",\n \"enable\": False,\n \"description\": \"R4\",\n \"session_state\": \"inactive\",\n \"peer_as\": \"200\",\n \"elapsed_time\": \"01:03:22\",\n \"mesh_group\": \"2\",\n \"connect_source\": \"loopback3\"\n }\n }\n },\n \"default\": {\n \"sa_cache\": {\n \"172.16.58.3 172.16.25.2\": {\n \"up_time\": \"00:02:43\",\n \"group\": \"172.16.58.3\",\n \"peer_learned_from\": \"10.106.106.106\",\n \"source_addr\": \"172.16.25.2\",\n \"expire\": \"00:02:32\",\n \"origin_rp\": {\n \"10.106.106.106\": {\n \"rp_address\": \"10.106.106.106\"\n }\n }\n }\n },\n \"global\": {\n \"originator_id\": \"10.16.2.2\",\n \"statistics\": {\n \"num_of_configured_peers\": 1,\n \"num_of_established_peers\": 1,\n \"num_of_shutdown_peers\": 0\n },\n \"local_as\": 0\n },\n \"peer\": {\n \"10.4.1.1\": {\n \"timer\": {\n \"holdtime_interval\": 90,\n \"connect_retry_interval\": 33,\n \"keepalive_interval\": 60\n },\n \"statistics\": {\n \"last_message_received\": \"00:00:51\",\n \"error\": {\n \"rpf_failure\": \"0\"\n },\n \"discontinuity_time\": \"01:27:25\",\n \"num_of_sg_received\": 1,\n \"sa_policy\": {\n \"in\": {\n \"total_reject_count\": 0,\n \"total_accept_count\": 0,\n \"filtera\": {\n \"route-map filtera permit 10 match ip address mcast-all-groups\": {\n \"num_of_comparison\": 0,\n \"num_of_matches\": 0\n },\n \"route-map filtera permit 20 match ip address mcast-all-groups2\": {\n \"num_of_comparison\": 0,\n \"num_of_matches\": 0\n }\n }\n },\n \"out\": {\n \"total_reject_count\": 0,\n \"total_accept_count\": 0,\n \"filtera\": {\n \"route-map filtera permit 10 match ip address mcast-all-groups\": {\n \"num_of_comparison\": 0,\n \"num_of_matches\": 0\n },\n \"route-map filtera permit 20 match ip address mcast-all-groups2\": {\n \"num_of_comparison\": 0,\n \"num_of_matches\": 0\n }\n }\n }\n },\n \"sent\": {\n \"keepalive\": 119,\n \"total\": 0,\n \"sa_request\": 0,\n \"sa_response\": 0,\n \"notification\": 6\n },\n \"received\": {\n \"keepalive\": 92,\n \"total\": 0,\n \"sa_request\": 0,\n \"sa_response\": 0,\n \"notification\": 0\n }\n },\n \"sa_limit\": \"111\",\n \"enable\": True,\n \"description\": \"R1\",\n \"session_state\": \"established\",\n \"peer_as\": \"100\",\n \"elapsed_time\": \"01:27:25\",\n \"mesh_group\": \"1\",\n \"connect_source\": \"loopback0\"\n }\n }\n }\n }\n }\n\n\n", "id": "11786460", "language": "Python", "matching_score": 1.7394599914550781, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/msdp/nxos/tests/msdp_output.py" }, { "content": "# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# ATS\nfrom ats.topology import Device\n\n# Genie\nfrom genie.libs.ops.msdp.ios.msdp import Msdp\nfrom genie.libs.ops.msdp.ios.tests.msdp_output import MsdpOutput\n\n# ios show msdp\nfrom genie.libs.parser.iosxe.show_msdp import (ShowIpMsdpPeer,\n ShowIpMsdpSaCache)\n\noutputs = {}\noutputs['show ip msdp peer'] = MsdpOutput.ShowIpMsdpPeer_golden\noutputs['show ip msdp sa-cache'] = MsdpOutput.ShowIpMsdpSaCache_golden\n\ndef mapper(key):\n return outputs[key]\n\nclass test_msdp(unittest.TestCase):\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'ios'\n self.device.custom['abstraction'] = {'order':['os']}\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n self.device.connectionmgr.connections['cli'] = self.device\n\n def test_complete_output(self):\n self.maxDiff = None\n msdp = Msdp(device=self.device)\n\n # Set outputs\n msdp.maker.outputs[ShowIpMsdpPeer] = {'': MsdpOutput.ShowIpMsdpPeer}\n msdp.maker.outputs[ShowIpMsdpSaCache] = {'': MsdpOutput.ShowIpMsdpSaCache}\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Learn the feature\n msdp.learn()\n\n # Verify Ops was created successfully\n self.assertEqual(msdp.info, MsdpOutput.MsdpInfo)\n\n def test_selective_attribute(self):\n self.maxDiff = None\n msdp = Msdp(device=self.device)\n\n # Set outputs\n msdp.maker.outputs[ShowIpMsdpPeer] = {'': MsdpOutput.ShowIpMsdpPeer}\n msdp.maker.outputs[ShowIpMsdpSaCache] = {'': MsdpOutput.ShowIpMsdpSaCache}\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Learn the feature\n msdp.learn()\n\n self.assertEqual('Loopback0', msdp.info['vrf']['default']\\\n ['peer']['10.16.2.2']['connect_source'])\n\n def test_empty_output(self):\n self.maxDiff = None\n msdp = Msdp(device=self.device)\n\n # Set outputs\n msdp.maker.outputs[ShowIpMsdpPeer] = {'': ''}\n msdp.maker.outputs[ShowIpMsdpSaCache] = {'': ''}\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Learn the feature\n msdp.learn()\n\n with self.assertRaises(AttributeError):\n msdp.info['vrf']\n\n def test_missing_attributes(self):\n self.maxDiff = None\n msdp = Msdp(device=self.device)\n\n # Set outputs\n msdp.maker.outputs[ShowIpMsdpPeer] = {'': MsdpOutput.ShowIpMsdpPeer}\n msdp.maker.outputs[ShowIpMsdpSaCache] = {'': {}}\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Learn the feature\n msdp.learn()\n\n with self.assertRaises(KeyError):\n msdp.info['vrf']['default']['sa_cache']\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "5702768", "language": "Python", "matching_score": 1.3280072212219238, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/msdp/ios/tests/test_msdp.py" }, { "content": "# super class\nfrom genie.libs.ops.msdp.iosxe.msdp import Msdp as MsdpXE\n\nclass Msdp(MsdpXE):\n '''\n Msdp Ops Object\n '''\n pass", "id": "3837628", "language": "Python", "matching_score": 1, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/msdp/ios/msdp.py" }, { "content": "from .msdp import *\n", "id": "3747531", "language": "Python", "matching_score": 0, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/msdp/__init__.py" }, { "content": "'''Library of useful utility functions'''\n\nimport itertools as _itertools # don't export\n\n\ndef round_nearest(value):\n '''Round value to the nearest integer.\n By default, Python rounds to the nearest *even* digit.'''\n return int(value\n + (0.5 if value >= 0 else -0.5))\n\nfrom fractions import gcd\n\n\ndef lcm(a, b):\n '''Return lowest common multiple.'''\n return a * b // gcd(a, b)\n\n\ndef nth(iterable, n, default=None):\n '''Returns the nth item or a default value.\n\n From: Itertools Recipes (https://docs.python.org/3.4/library/itertools.html)\n '''\n return next(_itertools.islice(iterable, n, None), default)\n\n", "id": "1555023", "language": "Python", "matching_score": 0.2237500548362732, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/utils/__init__.py" }, { "content": "\"\"\"Copyright 2019 Cisco Systems\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n\nThe contents of this file are licensed under the Apache License, Version 2.0\n(the \"License\"); you may not use this file except in compliance with the\nLicense. You may obtain a copy of the License at\n\nhttp://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\nWARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\nLicense for the specific language governing permissions and limitations under\nthe License.\n\"\"\"\n\n\"\"\"This library wraps gNMI functionality to ease usage in Python programs.\"\"\"\n\n\nfrom .client import Client\nfrom .xr import XRClient\nfrom .nx import NXClient\nfrom .xe import XEClient\nfrom .builder import ClientBuilder\n\n__version__ = \"1.0.10\"\n", "id": "1355033", "language": "Python", "matching_score": 0.6434510946273804, "max_stars_count": 1, "path": "src/cisco_gnmi/__init__.py" }, { "content": "#! /usr/bin/env python\n\n\"\"\"Setup file for yang.ncdiff package\n\nSee:\n https://packaging.python.org/en/latest/distributing.html\n\"\"\"\n\nimport os\nimport re\nimport sys\nimport shlex\nimport unittest\nimport subprocess\nfrom setuptools import setup, find_packages, Command\nfrom setuptools.command.test import test\n\npkg_name = 'yang.ncdiff'\npkg_path = '/'.join(pkg_name.split('.'))\n\nclass CleanCommand(Command):\n '''Custom clean command\n\n cleanup current directory:\n - removes build/\n - removes src/*.egg-info\n - removes *.pyc and __pycache__ recursively\n\n Example\n -------\n python setup.py clean\n\n '''\n\n user_options = []\n description = 'CISCO SHARED : Clean all build artifacts'\n\n def initialize_options(self):\n pass\n\n def finalize_options(self):\n pass\n\n def run(self):\n os.system('rm -vrf ./build ./dist ./src/*.egg-info')\n os.system('find . -type f -name \"*.pyc\" | xargs rm -vrf')\n os.system('find . -type d -name \"__pycache__\" | xargs rm -vrf')\n\nclass TestCommand(Command):\n user_options = []\n description = 'CISCO SHARED : Run unit tests against this package'\n\n def initialize_options(self):\n pass\n\n def finalize_options(self):\n pass\n\n def run(self):\n # where the tests are (relative to here)\n tests = os.path.join('src', pkg_path, 'tests')\n\n # call unittests\n sys.exit(unittest.main(\n module = None,\n argv = ['python -m unittest', 'discover', tests],\n failfast = True))\n\nclass BuildDocs(Command):\n user_options = []\n description = ('CISCO SHARED : Build and privately distribute '\n 'Sphinx documentation for this package')\n\n def initialize_options(self):\n pass\n\n def finalize_options(self):\n pass\n\n def run(self):\n user = os.environ['USER']\n sphinx_build_cmd = \"sphinx-build -b html -c docs/ \" \\\n \"-d ./__build__/documentation/doctrees docs/ ./__build__/documentation/html\"\n try:\n\n ret_code = subprocess.call(shlex.split(sphinx_build_cmd))\n sys.exit(0)\n except Exception as e:\n print(\"Failed to build documentation : {}\".format(str(e)))\n sys.exit(1)\n\n\ndef read(*paths):\n '''read and return txt content of file'''\n with open(os.path.join(os.path.dirname(__file__), *paths)) as fp:\n return fp.read()\n\n\ndef find_version(*paths):\n '''reads a file and returns the defined __version__ value'''\n version_match = re.search(r\"^__version__ ?= ?['\\\"]([^'\\\"]*)['\\\"]\",\n read(*paths), re.M)\n if version_match:\n return version_match.group(1)\n raise RuntimeError(\"Unable to find version string.\")\n\n\n# launch setup\nsetup(\n name = pkg_name,\n version = find_version('src', pkg_path, '__init__.py'),\n\n # descriptions\n description = 'A config state diff calculator for NETCONF',\n long_description = 'A package to generate NETCONF edit-config when two config states are given.',\n\n # the package's documentation page.\n url = 'https://github.com/CiscoTestAutomation/yang.git',\n\n # author details\n author = '<NAME>',\n author_email = '<EMAIL>',\n maintainer_email = '<EMAIL>',\n\n # project licensing\n license = 'Apache 2.0',\n\n platforms = ['CEL',],\n\n # see https://pypi.python.org/pypi?%3Aaction=list_classifiers\n classifiers = [\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Telecommunications Industry',\n 'License :: OSI Approved :: Apache Software License',\n 'Operating System :: POSIX :: Linux',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3 :: Only',\n 'Topic :: Software Development :: Testing',\n ],\n\n # uses namespace package\n namespace_packages = ['yang'],\n\n # project keywords\n keywords = 'pyats cisco-shared',\n\n # project packages\n packages = find_packages(where = 'src'),\n\n # project directory\n package_dir = {\n '': 'src',\n },\n\n # additional package data files that goes into the package itself\n package_data = {'':['README.rst',\n 'tests/yang/*.*']},\n\n # Standalone scripts\n scripts = [\n ],\n\n # console entry point\n entry_points = {\n },\n\n # package dependencies\n install_requires = [\n 'pyang >= 1.7.3',\n 'ncclient >= 0.6.3, < 0.6.10',\n 'requests >= 2.18.4',\n 'cisco-gnmi',\n 'xmljson >= 0.1.9',\n 'yang.connector >= 3.0.0',\n ],\n\n # any additional groups of dependencies.\n # install using: $ pip install -e .[dev]\n extras_require = {\n 'dev': ['coverage',\n 'restview',\n 'Sphinx',\n 'sphinxcontrib-napoleon',\n 'sphinx-rtd-theme'],\n },\n\n # any data files placed outside this package.\n # See: http://docs.python.org/3.4/distutils/setupscript.html\n # format:\n # [('target', ['list', 'of', 'files'])]\n # where target is sys.prefix/<target>\n data_files = [],\n\n # custom commands for setup.py\n cmdclass = {\n 'clean': CleanCommand,\n 'test': TestCommand,\n 'docs': BuildDocs,\n },\n\n # non zip-safe (never tested it)\n zip_safe = False,\n)\n", "id": "4503474", "language": "Python", "matching_score": 1.760154366493225, "max_stars_count": 15, "path": "ncdiff/setup.py" }, { "content": "import os\nfrom genie.harness.main import gRun\n\ntest_list = [\n 'BGPglobalTest',\n 'BGPneighborTest'\n]\n\ndef main():\n test_path = os.path.dirname(os.path.abspath(__file__))\n\n gRun(trigger_uids=test_list,\n trigger_datafile='testspec_example.yaml',\n subsection_datafile='subsection_datafile.yaml')\n\n", "id": "7098327", "language": "Python", "matching_score": 0.8904045224189758, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/pipeline/tests/testjob.py" }, { "content": "import re\nimport os\nimport yaml\nimport inspect\nimport logging\nimport importlib\nfrom collections import ChainMap\nfrom copy import deepcopy\nfrom collections import namedtuple\n\nfrom robot.api.deco import keyword\nfrom robot.libraries.BuiltIn import BuiltIn, RobotNotRunningError\n\nfrom genie.abstract import Lookup\n\nfrom ats.utils.objects import find, R\nfrom ats.aetest import executer, reporter\nfrom ats.datastructures.logic import Or\nfrom ats.results import (Passed, Failed, Aborted, Errored,\n Skipped, Blocked, Passx)\n\nfrom genie.conf import Genie\nfrom genie.utils.diff import Diff\nfrom genie.conf.base import loader\nfrom genie.conf.base import Testbed\nfrom genie.utils.config import Config\nfrom genie.harness.script import TestScript\nfrom genie.utils.loadattr import load_attribute\nfrom genie.utils.profile import unpickle, pickle\nfrom genie.harness.discovery import GenieScriptDiscover\nfrom genie.harness.datafile.loader import TriggerdatafileLoader,\\\n VerificationdatafileLoader,\\\n PtsdatafileLoader\n\nlog = logging.getLogger(__name__)\n\n\nclass GenieRobot(object):\n '''Genie RobotFramework library'''\n\n # Need to maintain the testscript object\n ROBOT_LIBRARY_SCOPE = \"GLOBAL\"\n\n def __init__(self):\n # save builtin so we dont have to re-create then everytime\n self.builtin = BuiltIn()\n\n # Need to create a testscript\n try:\n # If pyATS, then call their use_testbed api, then convert\n self._pyats_testscript = self.builtin.get_library_instance(\n 'ats.robot.pyATSRobot').testscript\n except RuntimeError:\n self._pyats_testscript = None\n # No pyATS\n pass\n except RobotNotRunningError:\n # For building doc\n return\n finally:\n self._genie_testscript = TestScript(Testscript)\n\n @property\n def testscript(self):\n try:\n return self._genie_testscript\n except Exception:\n return self._pyats_testscript\n\n @keyword('use genie testbed \"${testbed}\"')\n def genie_testbed(self, testbed):\n '''Create the genie testbed'''\n try:\n # If pyATS, then call their use_testbed api, then convert\n self.builtin.get_library_instance('ats.robot.pyATSRobot').\\\n use_testbed(testbed)\n testbed = self.builtin.get_library_instance('ats.robot.'\\\n 'pyATSRobot').testbed\n except RuntimeError:\n # No pyATS\n self.testbed = loader.load(testbed)\n else:\n # Has pyATS, so converted and then save locally and also for pyATS\n self.testbed = Genie.init(testbed)\n self.builtin.get_library_instance('ats.robot.'\\\n 'pyATSRobot').testbed =\\\n self.testbed\n self.testscript.parameters['testbed'] = self.testbed\n\n # Load Genie Datafiles (Trigger, Verification and PTS)\n\n # This make UUT mandatory. When learning, aka no trigger\n # the UUT are not mandatory\n self.loaded_yamls = True\n self._load_genie_datafile()\n if not self.trigger_datafile:\n self.loaded_yamls = False\n log.warning(\"Could not load the Datafile correctly\")\n\n # Metaparser\n @keyword('parse \"${parser:[^\"]+}\" on device \"${device:[^\"]+}\"')\n def metaparser_on_device(self, parser, device):\n '''Call any `metaparser` parser and parse the device output.'''\n return self.metaparser_on_device_alias_context(device=device,\n alias=None,\n parser=parser)\n\n @keyword('parse \"${parser:[^\"]+}\" on device \"${device:[^\"]+}\" with '\n 'context \"${context}\"')\n def metaparser_on_device_context(self, parser, device, context):\n '''Call any `metaparser` parser and parse the device output with\n a context (cli, xml, yang, ...)\n '''\n # Using `cli`, as its the default for RASTA\n return self.metaparser_on_device_alias_context(alias=None,\n device=device,\n parser=parser,\n context=context)\n\n @keyword('parse \"${parser:[^\"]+}\" on device \"${device:[^\"]+}\" '\n 'using alias \"${alias:[^\"]+}\"')\n def metaparser_on_device_alias(self, parser, device, alias):\n '''Call any `metaparser` parser and parse the device using a specific\n alias\n '''\n return self.metaparser_on_device_alias_context(alias=alias,\n device=device,\n parser=parser)\n\n @keyword('parse \"${parser:[^\"]+}\" on device \"${device:[^\"]+}\" '\n 'using alias \"${alias:[^\"]+}\" with context \"${context}\"')\n def metaparser_on_device_alias_context(self, parser, device, alias,\n context='cli'):\n '''Call any `metaparser` parser and parse the device using a specific\n alias with a context (cli, xml, yang, ...)\n '''\n device_handle = self._search_device(device)\n\n # Look for the alias. If it doesnt exist, let it crash to the user as\n # only valid alias should be provided\n con = device_handle\n if alias:\n con = getattr(device_handle, alias)\n\n return con.parse(parser)\n\n # Genie Ops\n @keyword('learn \"${feature:[^\"]+}\" on device \"${device:[^\"]+}\"')\n def genie_ops_on_device(self, feature, device):\n '''Learn Ops feature on device'''\n return self.genie_ops_on_device_alias_context(feature=feature,\n alias=None,\n device=device)\n\n @keyword('learn \"${feature:[^\"]+}\" on device \"${device:[^\"]+}\" with '\n 'context \"${context:[^\"]+}\"')\n def genie_ops_on_device_context(self, feature, device, context):\n '''Learn Ops feature on device with a context (cli, xml, yang, ...)'''\n return self.genie_ops_on_device_alias_context(feature=feature,\n alias=None,\n context=context,\n device=device)\n\n @keyword('learn \"${feature:[^\"]+}\" on device \"${device:[^\"]+}\" '\n 'using alias \"${alias:[^\"]+}\"')\n def genie_ops_on_device_alias(self, feature, device, alias):\n '''Learn Ops feature on device using a specific alias'''\n return self.genie_ops_on_device_alias_context(feature=feature,\n alias=alias,\n device=device)\n\n @keyword('learn \"${feature:[^\"]+}\" on device \"${device:[^\"]+}\" '\n 'using alias \"${alias:[^\"]+}\" with context \"${context:[^\"]+}\"')\n def genie_ops_on_device_alias_context(self, feature, device, alias,\n context='cli'):\n '''Learn Ops feature on device using a specific alias with a context\n (cli, xml, yang, ...)\n '''\n device_handle = self._search_device(device)\n\n # Look for the alias. If it doesnt exist, let it crash to the user as\n # only valid alias should be provided\n con = device_handle\n if alias:\n con = getattr(device_handle, alias)\n device_handle.mapping[alias] = con\n\n # Find the feature for this device\n # 1) Directory must exists in genie.libs.ops.<feature>\n # 2) Then abstraction will kick in to find the right one.\n # 3) The directory syntax is <feature>.<feature.<Feature> \n # Where the class is capitalized but the directory/files arent.\n\n # First import genie.libs for abstraction\n package = 'genie.libs.ops'\n\n try:\n mod = importlib.import_module(package)\n except ImportError as e:\n raise ImportError(\"package 'genie' and library 'genie.libs' \"\n \"are mandatory to have to learn '{f}' \"\n .format(f=feature)) from e\n\n # Now find the right library\n attr_name = '.'.join([feature.lower(), feature.lower(),\n feature.title()])\n\n # Find the right library with abstraction if needed\n # Get context in there\n added_context = False\n if hasattr(device_handle, 'custom') and\\\n 'abstraction' in device_handle.custom and\\\n 'order' in device_handle.custom['abstraction']:\n # Add context to it\n backup_abstraction = deepcopy(device_handle.custom['abstraction'])\n device_handle.custom['abstraction']['order'].append('context')\n device_handle.custom['abstraction']['context'] = context\n added_context = True\n\n try:\n cls = load_attribute(package, attr_name, device=device_handle)\n except Exception as e:\n msg = \"Could not find {p}.{a} for device {d}\"\\\n .format(p=package, a=attr_name, d=device_handle.name)\n raise Exception(msg) from e\n\n if added_context:\n device_handle.custom['abstraction'] = backup_abstraction\n\n # Call the Ops now\n ops = cls(device_handle)\n ops.learn()\n return ops\n\n @keyword('Run verification \"${name:[^\"]+}\" on device \"${device:[^\"]+}\" '\n 'using alias \"${alias:[^\"]+}\"')\n def genie_run_verification_alias(self, name, device, alias):\n '''Call any verification defined in the verification datafile\n on device using a specific alias\n '''\n return self.genie_run_verification_alias_context(name=name,\n alias=alias,\n device=device,\n context='cli')\n\n @keyword('Run verification \"${name:[^\"]+}\" on device \"${device:[^\"]+}\" '\n 'with context \"${context:[^\"]+}\"')\n def genie_run_verification_context(self, name, device, context):\n '''Call any verification defined in the verification datafile\n on device with a context (cli, xml, yang, ...)\n '''\n return self.genie_run_verification_alias_context(name=name,\n alias=None,\n device=device,\n context=context)\n\n @keyword('Run verification \"${name:[^\"]+}\" on device \"${device:[^\"]+}\"')\n def genie_run_verification(self, name, device):\n '''Call any verification defined in the verification datafile\n on device\n '''\n return self.genie_run_verification_alias_context(name=name,\n alias=None,\n device=device,\n context='cli')\n\n @keyword('Run verification \"${name:[^\"]+}\" on device \"${device:[^\"]+}\" '\n 'using alias \"${alias:[^\"]+}\" with context \"${context:[^\"]+}\"')\n def genie_run_verification_alias_context(self, name, device, alias,\n context):\n '''Call any verification defined in the verification datafile\n on device using a specific alias with a context (cli, xml, yang, ...)\n '''\n if not self.loaded_yamls:\n self.builtin.fail(\"Could not load the yaml files - Make sure you \"\n \"have an uut device\")\n\n # Set the variables to find the verification\n self.testscript.verification_uids = Or(name+'$')\n self.testscript.verification_groups = None\n self.testscript.verifications = deepcopy(self.verification_datafile)\n self.testscript.triggers = None\n\n # Modify the parameters to include context\n if name in self.testscript.verifications:\n # Add new parameters named context\n # No need to revert, as a deepcopy was taken, and after discovery\n # nothing is done with the datafiles after\n if 'devices' in self.testscript.verifications[name]:\n # For each device add context\n for dev in self.testscript.verifications[name]['devices']:\n # To shorten the variable\n verf = self.testscript.verifications[name]\n if 'devices_attributes' not in verf or\\\n verf['devices_attributes'][dev] == 'None':\n verf.setdefault('devices_attributes', {})\n verf['devices_attributes'].setdefault(dev, {})\n verf['devices_attributes'][dev] = {}\n\n self.testscript.verifications[name]\\\n ['devices_attributes'][dev]['context'] = context\n\n self._run_genie_trigger_verification(name=name, alias=alias,\n device=device, context=context)\n\n @keyword('Run trigger \"${name:[^\"]+}\" on device \"${device:[^\"]+}\" '\n 'using alias \"${alias:[^\"]+}\"')\n def genie_run_trigger_alias(self, name, device, alias):\n '''Call any trigger defined in the trigger datafile on device\n using a specific alias\n '''\n return self.genie_run_trigger_alias_context(name=name,\n alias=alias,\n device=device,\n context='cli')\n\n @keyword('Run trigger \"${name:[^\"]+}\" on device \"${device:[^\"]+}\" '\n 'with context \"${context:[^\"]+}\"')\n def genie_run_trigger_context(self, name, device, context):\n '''Call any trigger defined in the trigger datafile on device\n with a context (cli, xml, yang, ...)\n '''\n return self.genie_run_trigger_alias_context(name=name,\n alias=None,\n device=device,\n context=context)\n\n @keyword('Run trigger \"${name:[^\"]+}\" on device \"${device:[^\"]+}\"')\n def genie_run_trigger(self, name, device):\n '''Call any trigger defined in the trigger datafile on device\n '''\n return self.genie_run_trigger_alias_context(name=name,\n alias=None,\n device=device,\n context='cli')\n\n @keyword('Run trigger \"${name}\" on device \"${device:[^\"]+}\" '\n 'using alias \"${alias:[^\"]+}\" with context \"${context:[^\"]+}\"')\n def genie_run_trigger_alias_context(self, name, device, alias, context):\n '''Call any trigger defined in the trigger datafile on device\n using a specific alias with a context (cli, xml, yang, ...)\n '''\n\n if not self.loaded_yamls:\n self.builtin.fail(\"Could not load the yaml files - Make sure you \"\n \"have an uut device\")\n\n # Set the variables to find the trigger\n device_handle = self._search_device(device)\n\n self.testscript.trigger_uids = Or(name+'$')\n self.testscript.trigger_groups = None\n self.testscript.triggers = deepcopy(self.trigger_datafile)\n self.testscript.verifications = None\n\n # Modify the parameters to include context\n self._add_abstraction_datafiles(datafile=self.testscript.triggers,\n name=name,\n context=context,\n device=device_handle)\n\n\n self._run_genie_trigger_verification(name=name, alias=alias,\n device=device, context=context)\n\n @keyword('verify count \"${number:[^\"]+}\" \"${structure:[^\"]+}\" on device \"${device:[^\"]+}\"')\n def verify_count(self, number, structure, device):\n '''Verify that a specific number of <...> is <...> on a device.\n\n Supports the same functionality as the alias keyword.\n '''\n return self.verify_count_alias(number, structure, device)\n\n @keyword('verify count \"${number:[^\"]+}\" \"${structure:[^\"]+}\" '\n 'on device \"${device:[^\"]+}\" using alias \"${alias:[^\"]+}\"')\n def verify_count_alias(self, number, structure, device, alias=None):\n '''Verify that a specific number of <...> is <...> on a device using a\n specific alias\n\n verify count \"<number>\" \"bgp neighbors\" on device \"<device>\"\n\n verify count \"<number>\" \"bgp routes\" on device \"<device>\"\n\n verify count \"<number>\" \"ospf neighbors\" on device \"<device>\"\n\n verify count \"<number>\" \"interfaces neighbors\" on device \"<device>\"\n '''\n # First word of action is the protocol\n # Last word is the expected value\n # the rest is the structure.\n protocol, structure = structure.split(' ', 1)\n\n # Make sure we support this protocol\n count = 0\n if protocol == 'bgp':\n # Load bgp\n if structure == 'neighbors':\n # then count the number of neighbor\n ops = self.genie_ops_on_device_alias('bgp', device, alias)\n rs = [R(['info', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', 'session_state', '([e|E]stablished)'])]\n\n elif structure == 'routes':\n # then count the number of routes\n ops = self.genie_ops_on_device_alias('bgp', device, alias)\n rs = [R(['table', 'instance', '(?P<instance>.*)', 'vrf', '(?P<vrf>.*)', 'address_family', '(?P<af>.*)', 'prefixes', '(?P<routes>.*)', '(?P<rest>.*)'])]\n\n elif protocol == 'ospf':\n # Load ospf\n if structure == 'neighbors':\n # then count the number of neighbor\n ops = self.genie_ops_on_device_alias('ospf', device, alias)\n rs = [R(['info', 'vrf', '(?P<vrf>.*)', 'address_family', '(?P<af>.*)', 'instance', '(?P<instance>.*)', 'areas', '(?P<areas>.*)', '(?P<mode>.*)', '(?P<interface>.*)', 'neighbors', '(?P<neighbors>.*)', 'state', '([f|F]ull)'])]\n\n elif protocol == 'interface':\n if structure == 'up':\n # then count the number of interface\n ops = self.genie_ops_on_device_alias('interface', device, alias)\n rs = [R(['info', '(?P<interface>.*)', 'oper_status', '([u|U]p)'])]\n\n count = len(find([ops], *rs, filter_=False, all_keys=True))\n if count != int(number):\n self.builtin.fail(\"Expected '{e}', but found '{f}'\".format(e=number,\n f=count))\n\n @keyword('Verify NTP is synchronized on device \"${device:[^\"]+}\"')\n def verify_ntp_synchronized(self, device):\n '''Verify that NTP is synchronized on this device\n\n Supports the same functionality as the alias keyword.\n '''\n return self.verify_ntp_synchronized_alias(device)\n\n @keyword('Verify NTP is synchronized on device \"${device:[^\"]+}\" '\n 'using alias \"${alias:[^\"]+}\"')\n def verify_ntp_synchronized_alias(self, device, alias=None):\n '''Verify that NTP is synchronized on this device\n\n verify NTP is synchronized on device \"<device>\"\n '''\n\n ops = self.genie_ops_on_device_alias('ntp', device, alias)\n rs = [R(['info', 'clock_state', 'system_status', 'associations_address',\n '(?P<neighbors>.*)'])]\n output = find([ops], *rs, filter_=False, all_keys=True)\n\n if not output:\n self.builtin.fail(\"{} does not have NTP synchronized\".format(device))\n\n @keyword('Verify NTP is synchronized with \"${server:[^\"]+}\" on '\n 'device \"${device:[^\"]+}\"')\n def verify_ntp_synchronized_server(self, server, device):\n '''Verify that a specific server is the synchronized ntp server\n\n Supports the same functionality as the alias keyword.\n '''\n return self.verify_ntp_synchronized_server_alias(server, device)\n\n @keyword('Verify NTP is synchronized with \"${server:[^\"]+}\" on device '\n '\"${device:[^\"]+}\" using alias \"${alias:[^\"]+}\"')\n def verify_ntp_synchronized_server_alias(self, server, device, alias=None):\n '''Verify that a specific server is the synchronized ntp server\n\n verify \"1.1.1.1\" is synchronized ntp server on device \"<device>\"\n '''\n\n ops = self.genie_ops_on_device_alias('ntp', device, alias)\n rs = [R(['info', 'clock_state', 'system_status', 'associations_address',\n '(?P<neighbors>.*)'])]\n output = find([ops], *rs, filter_=False, all_keys=True)\n\n if not output:\n self.builtin.fail(\"No synchronized server could be found! Was \"\n \"expected '{}' to be synchronized\".format(server))\n\n if not output[0][0] == server:\n self.builtin.fail(\"Expected synchronized server to be '{}', but \"\n \"found '{}'\".format(server, output[0][0]))\n\n @keyword('Profile the system for \"${feature:[^\"]+}\" on devices '\n '\"${device:[^\"]+}\" as \"${name:[^\"]+}\"')\n def profile_system(self, feature, device, name):\n '''Profile system as per the provided features on the devices\n '''\n return self._profile_the_system(feature=feature,\n device=device,\n context='cli',\n name=name,\n alias=None)\n\n @keyword('Profile the system for \"${feature:[^\"]+}\" on devices '\n '\"${device:[^\"]+}\" as \"${name:[^\"]+}\" '\n 'using alias \"${alias:[^\"]+}\"')\n def profile_system_alias(self, feature, device, name, alias=None):\n '''Profile system as per the provided features on the devices\n filtered using alias\n '''\n\n try:\n device = self._search_device(alias).name\n except KeyError:\n msg = [\"'{alias}' is not found in the testbed yaml file.\".format(\n alias=alias)]\n\n self.builtin.fail('\\n'.join(msg))\n\n return self._profile_the_system(feature=feature,\n device=device,\n context='cli',\n name=name,\n alias=alias)\n\n def _profile_the_system(self, feature, device, context, name, alias):\n '''Profile system as per the provided features on the devices\n '''\n profiled = {}\n\n for dev in device.split(';'):\n\n for fet in feature.split(';'):\n\n if fet not in profiled:\n profiled[fet] = {}\n if dev not in profiled[fet]:\n profiled[fet][dev] = {}\n\n if fet == 'config':\n log.info(\"Start learning device configuration\")\n profiled[fet][dev] = self._profile_config(dev)\n else:\n log.info(\"Start learning feature {f}\".format(f=fet))\n learnt_feature = self.genie_ops_on_device_alias_context(\n feature=fet.strip(), alias=None, device=dev)\n\n profiled[fet][dev] = learnt_feature\n\n\n if os.path.isdir(os.path.dirname(name)):\n # the user provided a file to save as pickle\n pickle_file = pickle(profiled, pts_name = name)\n log.info('Saved system profile as file: %s' % pickle_file)\n else:\n self.testscript.parameters[name] = profiled\n log.info('Saved system profile as variable %s' % name)\n\n def _profile_config(self, device):\n device_handle = self._search_device(device)\n config = Config(device_handle.execute('show running-config'))\n config.tree()\n return config\n\n @keyword('Compare profile \"${pts:[^\"]+}\" with \"${pts_compare:[^\"]+}\" on '\n 'devices \"${devices:[^\"]+}\"')\n def compare_profile(self, pts, pts_compare, devices):\n '''Compare system profiles taken as snapshots during the run'''\n\n if os.path.isfile(pts):\n compare1 = unpickle(pts)\n else:\n compare1 = self.testscript.parameters[pts]\n\n if os.path.isfile(pts_compare):\n compare2 = unpickle(pts_compare)\n else:\n compare2 = self.testscript.parameters[pts_compare]\n\n\n\n exclude_list = ['device', 'maker', 'diff_ignore', 'callables',\n '(Current configuration.*)', 'ops_schema']\n\n try:\n if 'exclude' in self.pts_datafile:\n exclude_list.extend(self.pts_datafile['exclude'])\n except AttributeError:\n pass\n\n msg = []\n for fet in compare1:\n failed = []\n feature_exclude_list = exclude_list.copy()\n\n # Get the information too from the pts_data\n try:\n feature_exclude_list.extend(self.pts_datafile[fet]['exclude'])\n except (KeyError, AttributeError):\n pass\n\n for dev in compare1[fet]:\n # Only compare for the specified devices\n if dev not in devices:\n continue\n dev_exclude = feature_exclude_list.copy()\n try:\n dev_exclude.extend(compare1[fet][dev].exclude)\n # TODO - better fix,\n dev_exclude.remove(None)\n except (AttributeError, ValueError):\n pass\n\n diff = Diff(compare1[fet][dev], compare2[fet][dev],\n exclude=dev_exclude)\n\n diff.findDiff()\n\n if len(diff.diffs):\n failed.append((dev, diff))\n\n if failed:\n msg.append('\\n' + '*'*10)\n msg.append(\"Comparison between {pts} and \"\n \"{OPS} is different for feature '{f}' \"\n \"for device:\\n\".format(pts=pts, OPS=pts_compare,\n f=fet))\n for device, diff in failed:\n msg.append(\"'{d}'\\n{diff}\".format(d=device,\n diff=diff))\n\n else:\n message = \"Comparison between {pts} and \"\\\n \"{OPS} is identical\\n\".format(pts=pts,\n OPS=pts_compare)\n # print out message\n log.info(message)\n\n if msg:\n self.builtin.fail('\\n'.join(msg))\n\n message = 'All Feature were identical on all devices'\n self.builtin.pass_execution(message)\n\n def _run_genie_trigger_verification(self, alias, device, context,\n name):\n try:\n device_handle = self._search_device(device)\n except Exception as e:\n raise Exception(\"Could not find '{d}'\".format(d=device))\n\n genie_discovery = GenieScriptDiscover(self.testscript)\n\n # To call the __iter__ of the discovery which will force\n # The generator to return all the elements\n sections = list(genie_discovery)\n\n # Remove both common sections\n testcases = sections[1:-1]\n\n # Its possible multiple devices were found, only\n # keep the one with the correct device\n tc_to_run = []\n for tc in testcases:\n # Make sure the device match the right device and\n # Make sure it match the name, as\n # Or logic could match more than expected\n if tc.parameters['uut'] != device_handle or\\\n not re.match(name+'\\.', tc.uid):\n continue\n tc_to_run.append(tc)\n\n # Make sure only len of 1\n if len(tc_to_run) == 0:\n raise Exception(\"Could not find '{r}'\".format(r=name))\n\n if len(tc_to_run) != 1:\n raise Exception(\"Requested to run '{r}' but more than one was \"\n \"found '{v}'\".format(r=name,\n v=', '.join(tc_to_run)))\n\n # Get the testcase class\n cls = tc_to_run[0]\n # Add to Cls the context if any\n\n # Set the tags\n tags = cls.groups if hasattr(cls, 'groups') else []\n\n # Found our testcase - Now Execute it\n try:\n # Make sure its reset, as we dont need some of these functionalities\n executer.reset()\n reporter.reset()\n result = cls()\n except Exception as e:\n # No need, as pyats has already logged the error\n pass\n\n # Maps the result RobotFramework\n self._convert_result(result, name, ' '.join(tags))\n\n def _add_abstraction_datafiles(self, datafile, name, device, context):\n '''Add context abstraction'''\n\n if name not in datafile or 'devices' not in datafile[name]:\n return datafile\n\n if device.name in datafile[name]['devices']:\n dev = device.name\n elif device.alias in datafile[name]['devices']:\n dev = device.alias\n else:\n return datafile\n\n # Nothing under device\n # Or device does not have abstraction\n if 'devices_attributes' not in datafile[name] or\\\n datafile[name]['devices_attributes'] == 'None' or\\\n 'abstraction' not in datafile[name]['devices_attributes']:\n # Then add it at the trigger/verification level\n self._add_abstraction_at_level(datafile=datafile[name],\n context=context)\n else:\n # This there is information at device level and abstraction is there\n # Then add at device level\n self._add_abstraction_at_level(\\\n datafile=datafile[name]['devices_attributes'][dev],\n context=context)\n return datafile\n\n def _add_abstraction_at_level(self, datafile, context):\n # If abstraction does not exists, just add it\n # If it already exists, then overwrite the information\n if 'abstraction' not in datafile:\n datafile['abstraction'] = {}\n\n datafile['abstraction']['context'] = context\n if 'order' not in datafile['abstraction']:\n datafile['abstraction']['order'] = []\n if 'context' not in datafile['abstraction']['order']:\n datafile['abstraction']['order'].append('context')\n\n def _convert_result(self, result, name, tags):\n ''''\n pyATS RobotFramework Reason\n Passed Pass Passed is a pass\n Failed Fail Failed is a fail\n Aborted Fail An abort is because of a failure\n Errored Fail An error is because of a failure\n Skipped Pass A skip is not a failure\n Blocked Pass A block is not a failure\n Passx Pass Passx is a pass with exception\n '''\n fail_group = [Failed, Aborted, Errored]\n pass_group = [Passed, Skipped, Blocked, Passx]\n\n if result in fail_group:\n self.builtin.fail('{n} has {r}'.format(n=name, r=result.name),\n tags)\n\n if result in pass_group:\n self.builtin.pass_execution('{n} has {r}'.format(n=name,\n r=result.name),\n tags)\n\n raise Exception('{r} is not a supported result'.format(r=result.name))\n\n def _search_device(self, name):\n try:\n # Find hostname and alias\n return self.testbed.devices[name]\n except KeyError:\n raise KeyError(\"Unknown device {}\".format(name))\n except AttributeError as e:\n raise AttributeError(\"It is mandatory to 'use genie testbed ' \"\n \"command first.\") from e\n\n def _load_genie_datafile(self):\n # Load the datafiles\n variables = self.builtin.get_variables()\n datafiles = []\n\n trigger_datafile = None\n if '${trigger_datafile}' in variables:\n trigger_datafile = variables['${trigger_datafile}']\n\n verification_datafile = None\n if '${verification_datafile}' in variables:\n verification_datafile = variables['${verification_datafile}']\n\n pts_datafile = None\n if '${pts_datafile}' in variables:\n pts_datafile = variables['${pts_datafile}']\n\n self.trigger_datafile, self.verification_datafile, pts_datafile , *_ =\\\n self.testscript._validate_datafiles(self.testbed,\n trigger_datafile,\n\t\t\t\t\t\t verification_datafile,\n\t\t\t\t\t\t pts_datafile,\n\t\t\t\t\t\t None, None)\n\n if self.trigger_datafile:\n self.trigger_datafile = self.testscript._load(self.trigger_datafile,\n TriggerdatafileLoader)\n if self.verification_datafile:\n self.verification_datafile = self.testscript._load(self.verification_datafile,\n VerificationdatafileLoader)\n self.pts_datafile = self.testscript._load(pts_datafile,\n PtsdatafileLoader)\n\n\nclass Testscript(object):\n pass\n", "id": "2643414", "language": "Python", "matching_score": 3.7329752445220947, "max_stars_count": 0, "path": "pkgs/robot-pkg/src/genie/libs/robot/GenieRobot.py" }, { "content": "from genie.conf.base import API\r\nfrom robot.libraries.BuiltIn import BuiltIn\r\n\r\nimport logging\r\nlog = logging.getLogger(__name__)\r\n\r\nDOC_LINK = 'https://pubhub.devnetcloud.com/media/genie-feature-browser/docs/#/apis/'\r\n\r\nclass GenieRobotApis:\r\n ROBOT_LIBRARY_SCOPE = \"TEST CASE\"\r\n def __init__(self):\r\n self.builtin = BuiltIn()\r\n self.api = API()\r\n\r\n def get_keyword_names(self):\r\n return self.api.function_data.keys()\r\n\r\n def get_keyword_documentation(self, kw):\r\n if kw == '__intro__':\r\n return \"Available networking APIs provided by Genie\"\r\n\r\n return ''.join(['''Checkout this url for detailed doc on this keyword:\r\n \r\n ''', DOC_LINK, kw])\r\n\r\n def run_keyword(self, name, args, kwargs):\r\n try:\r\n self.testbed = self.builtin.get_library_instance('genie.libs.robot.GenieRobot').testbed\r\n device_name = kwargs.get('device')\r\n\r\n # if function takes device, pass device, if no then dont pass\r\n if device_name:\r\n device_handler = self._search_device(device_name)\r\n kwargs.pop('device', None)\r\n return self.api.get_api(name.strip().replace(' ', '_'), device_handler)(device_handler, *args, **kwargs)\r\n else:\r\n return self.api.get_api(name.strip().replace(' ', '_'))(*args, **kwargs)\r\n except RuntimeError:\r\n # No GenieRobot\r\n log.error('No testbed is found, did you import \"genie.libs.robot.GenieRobot\"?')\r\n\r\n\r\n def _search_device(self, name):\r\n try:\r\n # Find hostname and alias\r\n return self.testbed.devices[name]\r\n except KeyError:\r\n raise KeyError(\"Unknown device {}\".format(name))\r\n except AttributeError as e:\r\n raise AttributeError(\r\n 'No testbed found, did you use keyword \"use genie testbed \\\"${testbed}\\\"\"?') from e\r\n", "id": "7783736", "language": "Python", "matching_score": 1.5504517555236816, "max_stars_count": 0, "path": "pkgs/robot-pkg/src/genie/libs/robot/GenieRobotApis.py" }, { "content": "import time\nimport logging\nfrom ats import aetest\nfrom ats.utils.objects import find, R\nfrom genie.utils.loadattr import str_to_list\nfrom genie.harness.base import Trigger\n\nlog = logging.getLogger()\n\n\nclass Blitz(Trigger):\n '''Apply some configuration, validate some keys and remove configuration'''\n\n def check_parsed_key(self, key, output, step):\n keys = str_to_list(key)\n with step.start(\"Verify that '{k}' is in the \"\n \"output\".format(k=key)) as step:\n reqs = R(list(keys))\n found = find([output], reqs, filter_=False,\n all_keys=True)\n if not found:\n step.failed(\"Could not find '{k}'\"\n .format(k=key))\n else:\n log.info(\"Found {f}\".format(f=found))\n\n def check_output(self, key, output, step, style):\n msg = \"Verify that '{k}' is {style}d the \"\\\n \"output\".format(k=key, style=style)\n with step.start(msg) as step:\n key = str(key)\n if style == 'include':\n if key not in output:\n step.failed(\"Could not find '{k}'\"\n .format(k=key))\n else:\n log.info(\"Found {k}\".format(k=key))\n elif style == 'exclude':\n if key in output:\n step.failed(\"Could find '{k}'\"\n .format(k=key))\n else:\n log.info(\"Not Found {k}\".format(k=key))\n else:\n raise Exception(\"{s} not supported\")\n\n def _configure(self, data, testbed):\n if not data:\n log.info('Nothing to configure')\n return\n\n if 'devices' not in data:\n log.info('No devices to apply configuration on')\n return\n\n for dev, config in data['devices'].items():\n device = testbed.devices[dev]\n\n # if config is a dict, then try apply config with api\n if isinstance(config, dict):\n for c in config:\n function = config[c].get('api')\n if not function:\n self.error('No API function is found, the config must be a string or a dict contatining the key \"api\"')\n\n args = config[c].get('arguments')\n if 'device' in args:\n arg_device = testbed.devices[args['device']]\n args['device'] = arg_device\n getattr(device.api, function)(**args)\n\n # if not a dict then apply config directly\n else:\n device.configure(config)\n\n if 'sleep' in data:\n log.info('Sleeping for {s} seconds to stabilize '\n 'new configuration'.format(s=data['sleep']))\n time.sleep(data['sleep'])\n\n def _validate(self, data, testbed, steps):\n if not data:\n log.info('Nothing to validate')\n return\n\n if 'devices' not in data:\n log.info('No devices to data configuration on')\n return\n\n for dev, command in data['devices'].items():\n device = testbed.devices[dev]\n for i, data in sorted(command.items()):\n command = data.get('command')\n function = data.get('api')\n # if command is given, validate with parser\n if command:\n with steps.start(\"Verify the output of '{c}'\".format(c=command),\n continue_=True) as step:\n if 'parsed' in data:\n output = device.parse(command)\n for key in data['parsed']:\n self.check_parsed_key(key, output, step)\n if 'include' in data:\n output = device.execute(command)\n for key in data['include']:\n self.check_output(key, output, step, 'include')\n if 'exclude' in data:\n output = device.execute(command)\n for key in data['exclude']:\n self.check_output(key, output, step, 'exclude')\n\n # if no command given, validate with api function\n elif function:\n with steps.start(function) as step:\n try:\n args = data.get('arguments')\n if 'device' in args:\n arg_device = testbed.devices[args['device']]\n args['device'] = arg_device\n result = getattr(device.api, function)(**args)\n except Exception as e:\n step.failed('Verification \"{}\" failed : {}'.format(function, str(e)))\n else:\n if result:\n step.passed()\n else:\n step.failed('Failed to {}'.format(function))\n else:\n self.error('No command or API found for verification # {}.'.format(i))\n\n @aetest.setup\n def apply_configuration(self, testbed, configure=None):\n '''Apply configuration on the devices'''\n return self._configure(configure, testbed)\n\n @aetest.test\n def validate_configuration(self, steps, testbed, validate_configure=None):\n '''Validate configuration on the devices'''\n return self._validate(validate_configure, testbed, steps)\n\n @aetest.test\n def remove_configuration(self, testbed, unconfigure=None):\n '''remove configuration on the devices'''\n return self._configure(unconfigure, testbed)\n\n @aetest.test\n def validate_unconfiguration(self, steps, testbed, validate_unconfigure=None):\n '''Validate unconfiguration on the devices'''\n return self._validate(validate_unconfigure, testbed, steps)\n", "id": "6314230", "language": "Python", "matching_score": 1.5293622016906738, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/blitz/blitz.py" }, { "content": "\"\"\"\n\"\"\"\n\n# Python\nimport time\nimport logging\n\n# ATS\nfrom ats.log.utils import banner\nfrom ats.results import Passed, Failed, Skipped, Passx\n\n# Genie\nfrom genie.harness.libs.prepostprocessor.processors import report\n\n# OSPF\nfrom genie.libs.sdk.apis.iosxe.ospf.get import get_ospf_session_count\n\n# MPLS\nfrom genie.libs.sdk.apis.iosxe.mpls.get import get_mpls_ldp_session_count\n\n# Arp\nfrom genie.libs.sdk.apis.iosxe.arp.get import get_arp_table_count\n\n# Bridge Domain\nfrom genie.libs.sdk.apis.iosxe.bridge_domain.get import (\n get_bridge_domain_bridge_domain_mac_count,\n)\n\n# Routing\nfrom genie.libs.sdk.apis.iosxe.routing.get import (\n get_routing_route_count_all_vrf,\n)\n\n# BGP\nfrom genie.libs.sdk.apis.iosxe.bgp.get import (\n get_bgp_session_count,\n get_bgp_external_internal_neighbor_count,\n)\n\n# Logger\nlog = logging.getLogger(__name__)\n\n\n# ==============================================================================\n# processor: verify_state\n# ==============================================================================\n\n\n@report\ndef verify_state(\n section,\n iteration=5,\n interval=60,\n arp_entry_count=10,\n bgp_route_count=1000,\n ldp_neighbor_count=2,\n mac_entry_count=10,\n ospf_neighbor_count=2,\n):\n\n \"\"\"Trigger Pre-Processor:\n * verify state:\n \"\"\"\n\n log.info(banner(\"processor: 'verify_state'\"))\n\n # Find uut and TGN devices\n dev_name = section.parameters[\"uut\"].name\n uut = section.parameters[\"testbed\"].devices[dev_name]\n tgn_devices = section.parameters[\"testbed\"].find_devices(type=\"tgn\")\n\n if not tgn_devices:\n log.error(\"Traffic generator devices not found in testbed YAML\")\n section.result = verify_state.result = Failed\n # section.failed(goto=['common_cleanup'])\n return\n\n for i in range(iteration):\n log.info(\n \"Verifying state: attempt {} out of {}\".format(i + 1, iteration)\n )\n\n # Start all protocols\n for dev in tgn_devices:\n try:\n if not dev.is_connected():\n dev.connect(via=\"tgn\")\n dev.start_all_protocols(wait_time=10)\n except Exception as e:\n section.result = Failed\n log.error(e)\n log.error(\n \"Error while starting all protocols on traffic generator \"\n \"device '{}'\".format(dev.name)\n )\n log.info(\n \"Verify state failed: sleeping {} seconds before \"\n \"retrying.\".format(interval)\n )\n time.sleep(interval)\n continue\n\n # Get count for protocals\n try:\n # Getting BGP session count\n count_bgp = get_bgp_session_count(device=uut)\n\n # Getting number of internal and external BGP neighbors\n ibgp_neighbor_count, ebgp_neighbor_count = get_bgp_external_internal_neighbor_count(\n device=uut\n )\n\n # Getting OSPF session count\n count_ospf = get_ospf_session_count(device=uut)\n\n # Getting LDP neighbor count\n count_mpls = get_mpls_ldp_session_count(device=uut)\n\n # Getting route count for all vrf\n count_route = get_routing_route_count_all_vrf(uut)\n\n # Getting MAC count\n count_mac = get_bridge_domain_bridge_domain_mac_count(device=uut)\n\n # Getting ARP table count\n count_arp = get_arp_table_count(device=uut)\n except Exception as e:\n section.result = Failed\n log.error(e)\n log.info(\n \"Verify state failed: sleeping {} seconds before \"\n \"retrying.\".format(interval)\n )\n time.sleep(interval)\n continue\n\n log.info(\n \"Verify BGP session is greater than number of \"\n \"External BGP neighbors or Internal BGP neighbors\"\n )\n\n if count_bgp < ebgp_neighbor_count or count_bgp < ibgp_neighbor_count:\n section.result = Failed\n log.error(\n \"Failed: BGP session count is {} and it is smaller \"\n \"than the number of external BGP neighbors {} and \"\n \"internal BGP neighbors {}\".format(\n count_bgp, ebgp_neighbor_count, ibgp_neighbor_count\n )\n )\n\n log.info(\n \"Verify state failed: sleeping {} seconds before \"\n \"retrying.\".format(interval)\n )\n time.sleep(interval)\n continue\n else:\n log.info(\n \"Passed: BGP session count is {} and it is greater than \"\n \"the number of external BGP neighbors {} or \"\n \"internal BGP neighbors {}\".format(\n count_bgp, ebgp_neighbor_count, ibgp_neighbor_count\n )\n )\n\n log.info(\n \"Verify OSPF session is greater than {}\".format(\n ospf_neighbor_count\n )\n )\n\n if count_ospf < ospf_neighbor_count:\n section.result = Failed\n log.error(\n \"Failed: OSPF session count is {} and it is smaller \"\n \"than minimal {}\".format(count_ospf, ospf_neighbor_count)\n )\n log.info(\n \"Verify state failed: sleeping {} seconds before \"\n \"retrying.\".format(interval)\n )\n time.sleep(interval)\n continue\n else:\n log.info(\n \"Passed: OSPF session count is {} and it is greater than \"\n \"minimal {}\".format(count_ospf, ospf_neighbor_count)\n )\n\n log.info(\n \"Verify LDP session is greater than {}\".format(ldp_neighbor_count)\n )\n\n if count_mpls < ldp_neighbor_count:\n section.result = Failed\n log.error(\n \"Failed: LDP session count is {} and it is smaller \"\n \"than minimal {}\".format(count_mpls, ldp_neighbor_count)\n )\n log.info(\n \"Verify state failed: sleeping {} seconds before \"\n \"retrying.\".format(interval)\n )\n time.sleep(interval)\n continue\n else:\n log.info(\n \"Passed: LDP session count is {} and it is greater than \"\n \"minimal {}\".format(count_mpls, ldp_neighbor_count)\n )\n\n log.info(\n \"Verify route count is greater than {}\".format(bgp_route_count)\n )\n\n if count_route < bgp_route_count:\n section.result = Failed\n log.error(\n \"Failed: Route count is {} and it is smaller than \"\n \"minimal {}\".format(count_route, bgp_route_count)\n )\n log.info(\n \"Verify state failed: sleeping {} seconds before \"\n \"retrying.\".format(interval)\n )\n time.sleep(interval)\n continue\n else:\n log.info(\n \"Passed: Route count is {} and it is greater than \"\n \"minimal {}\".format(count_route, bgp_route_count)\n )\n\n log.info(\"Verify MAC count is greater than {}\".format(mac_entry_count))\n\n if count_mac < mac_entry_count:\n section.result = Failed\n log.error(\n \"Failed: MAC count is {} and it is smaller than \"\n \"minimal {}\".format(count_mac, mac_entry_count)\n )\n log.info(\n \"Verify state failed: sleeping {} seconds before \"\n \"retrying.\".format(interval)\n )\n time.sleep(interval)\n continue\n else:\n log.info(\n \"Passed: MAC count is {} and it is greater than minimal {}\".format(\n count_mac, mac_entry_count\n )\n )\n\n log.info(\n \"Verify ARP table count is greater than {}\".format(arp_entry_count)\n )\n\n if count_arp < arp_entry_count:\n section.result = Failed\n log.error(\n \"Failed: ARP count is {} and it is smaller than \"\n \"minimal {}\".format(count_arp, arp_entry_count)\n )\n log.info(\n \"Verify state failed: sleeping {} seconds before \"\n \"retrying.\".format(interval)\n )\n time.sleep(interval)\n continue\n else:\n log.info(\n \"Passed: ARP count is {} and it is greater than minimal {}\".format(\n count_arp, arp_entry_count\n )\n )\n\n section.result = Passed\n break\n\n if not section.result:\n verify_state.result = section.result\n # section.failed(goto=['common_cleanup'])\n", "id": "1120321", "language": "Python", "matching_score": 2.979276180267334, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/prepostprocessor/processors.py" }, { "content": "'''\nProcessors for Genie Triggers\n'''\n\n# Python\nimport time\nimport logging\n\n# ATS\nfrom ats.aetest import Testcase\nfrom ats.aetest import reporter\nfrom ats.log.utils import banner\nfrom ats.log import managed_handlers\nfrom ats.results import TestResult, Passed, Failed, Skipped, Passx, Aborted, Errored\nfrom ats.aetest.base import TestableId\nfrom ats.datastructures import AttrDict\nfrom unicon.eal.dialogs import Statement, Dialog\nfrom unicon.core.errors import SubCommandFailure\n\n# Genie\nfrom genie.libs import sdk\nfrom genie.abstract import Lookup\nfrom genie.harness.utils import connect_device\nfrom genie.harness.exceptions import GenieTgnError\nfrom genie.utils.profile import pickle_traffic, unpickle_traffic, unpickle_stream_data\nimport re\nimport sys \nimport time\nimport csv\nimport os\nimport shutil\nimport pdb\n# Logger\nlog = logging.getLogger(__name__)\n\nname_mapping = {}\n\ndef report(func):\n def wrapper(section, *args, **kwargs):\n start_reporter(section, func)\n try:\n result = func(section, *args, **kwargs)\n finally:\n func.result = wrapper.result\n stop_reporter(section, func)\n return result\n return wrapper\n\ndef start_reporter(section, func, name=None):\n if not issubclass(type(section), Testcase):\n # If section, dont start\n return\n name = name if name else func.__name__\n if name in name_mapping:\n name = name_mapping[name]\n func.uid = TestableId(name, parent=section)\n func.logfile = managed_handlers.tasklog.logfile\n func.source = AttrDict()\n func.source.name = __file__\n func.source.location = func.__code__.co_firstlineno\n func.description = func.__doc__\n func.result = Passed\n reporter.start_section(func)\n\ndef stop_reporter(section, func):\n section.result += func.result\n if not issubclass(type(section), Testcase):\n # If section, dont stop\n return\n reporter.stop_section(func)\n\ndef _get_connection_class(section):\n\n conn_class_name = None\n for dev in section.parameters['testbed'].find_devices(type='tgn'):\n for con in dev.connections:\n try:\n conn_class_name = dev.connections[con]['class'].__name__\n except:\n continue\n return conn_class_name\n\n# ==============================================================================\n# processor: send_arp\n# ==============================================================================\n\n@report\ndef send_arp(section, arp_wait_time=30):\n\n '''Trigger Processor:\n * Send ARP on traffic generator device\n '''\n\n # Init\n\n log.info(banner(\"processor: 'send_arp'\"))\n\n # Find TGN devices\n tgn_devices = section.parameters['testbed'].find_devices(type='tgn')\n if not tgn_devices:\n log.info(\"SKIP: Traffic generator devices not found in testbed YAML\")\n return\n\n for dev in tgn_devices:\n if dev.name not in section.parent.mapping_data['devices']:\n log.info(\"Traffic generator devices not specified in --devices\")\n return\n\n # Connect to TGN\n try:\n dev.connect(via='tgn')\n except GenieTgnError as e:\n log.error(e)\n log.error(\"Unable to connect to traffic generator device '{}'\".\\\n format(dev.name))\n send_arp.result = Failed\n section.result += send_arp.result\n\n else:\n log.info(\"Connected to traffic generator device '{}'\".\\\n format(dev.name))\n send_arp.result = Passed\n section.result += send_arp.result\n\n # Stop traffic on TGN\n try:\n dev.send_arp(wait_time=arp_wait_time)\n except GenieTgnError as e:\n log.error(e)\n log.error(\"Unable to send ARP on '{}'\".format(dev.name))\n send_arp.result = Failed\n else:\n log.info(\"Send ARP on '{}'\".format(dev.name))\n send_arp.result = Passed\n\n\n# ==============================================================================\n# processor: send_ns\n# ==============================================================================\n\n@report\ndef send_ns(section, ns_wait_time=30):\n\n '''Trigger Processor:\n * Send NS on traffic generator device\n '''\n\n # Init\n\n log.info(banner(\"processor: 'send_ns'\"))\n\n # Find TGN devices\n tgn_devices = section.parameters['testbed'].find_devices(type='tgn')\n if not tgn_devices:\n log.info(\"SKIP: Traffic generator devices not found in testbed YAML\")\n return\n\n for dev in tgn_devices:\n if dev.name not in section.parent.mapping_data['devices']:\n log.info(\"Traffic generator devices not specified in --devices\")\n return\n\n # Connect to TGN\n try:\n dev.connect(via='tgn')\n except GenieTgnError as e:\n log.error(e)\n log.error(\"Unable to connect to traffic generator device '{}'\".\\\n format(dev.name))\n send_ns.result = Failed\n section.result += send_ns.result\n\n else:\n log.info(\"Connected to traffic generator device '{}'\".\\\n format(dev.name))\n send_ns.result = Passed\n section.result += send_ns.result\n\n # Stop traffic on TGN\n try:\n dev.send_ns(wait_time=ns_wait_time)\n except GenieTgnError as e:\n log.error(e)\n log.error(\"Unable to send NS on '{}'\".format(dev.name))\n send_ns.result = Failed\n else:\n log.info(\"Send NS on '{}'\".format(dev.name))\n send_ns.result = Passed\n\n\n# ==============================================================================\n# processor: apply_traffic\n# ==============================================================================\n\n@report\ndef apply_traffic(section, apply_wait_time=30):\n\n '''Trigger Processor:\n * Applying traffic on traffic generator device\n '''\n\n # Init\n\n log.info(banner(\"processor: 'apply_traffic'\"))\n\n # Find TGN devices\n tgn_devices = section.parameters['testbed'].find_devices(type='tgn')\n if not tgn_devices:\n log.info(\"SKIP: Traffic generator devices not found in testbed YAML\")\n return\n\n for dev in tgn_devices:\n if dev.name not in section.parent.mapping_data['devices']:\n log.info(\"Traffic generator devices not specified in --devices\")\n return\n\n # Connect to TGN\n try:\n dev.connect(via='tgn')\n except GenieTgnError as e:\n log.error(e)\n log.error(\"Unable to connect to traffic generator device '{}'\".\\\n format(dev.name))\n apply_traffic.result = Failed\n section.result += apply_traffic.result\n\n else:\n log.info(\"Connected to traffic generator device '{}'\".\\\n format(dev.name))\n apply_traffic.result = Passed\n section.result += apply_traffic.result\n\n # Stop traffic on TGN\n try:\n dev.apply_traffic(wait_time=apply_wait_time)\n except GenieTgnError as e:\n log.error(e)\n log.error(\"Unable to apply traffic on '{}'\".format(dev.name))\n apply_traffic.result = Failed\n else:\n log.info(\"Applying traffic on '{}'\".format(dev.name))\n apply_traffic.result = Passed\n\n\n# ==============================================================================\n# processor: create_genie_statistics_view\n# ==============================================================================\n\n@report\ndef create_genie_statistics_view(section, view_create_interval=30, view_create_iteration=5, disable_tracking=False, disable_port_pair=False):\n\n '''Trigger Processor:\n * Creates GENIE traffic statistics view on traffic generator device\n * This processor is useful if we want to check compare traffic profile\n after we do stop_traffic and apply_traffic in a trigger. \n apply_traffic will delete the existing GENIE statistic view. \n '''\n\n # Init\n\n log.info(banner(\"processor: 'create_genie_statistics_view'\"))\n\n # Find TGN devices\n tgn_devices = section.parameters['testbed'].find_devices(type='tgn')\n if not tgn_devices:\n log.info(\"SKIP: Traffic generator devices not found in testbed YAML\")\n return\n\n for dev in tgn_devices:\n if dev.name not in section.parent.mapping_data['devices']:\n log.info(\"Traffic generator devices not specified in --devices\")\n return\n\n # Connect to TGN\n try:\n dev.connect(via='tgn')\n except GenieTgnError as e:\n log.error(e)\n log.error(\"Unable to connect to traffic generator device '{}'\".\\\n format(dev.name))\n create_genie_statistics_view.result = Failed\n section.result += create_genie_statistics_view.result\n\n else:\n log.info(\"Connected to traffic generator device '{}'\".\\\n format(dev.name))\n create_genie_statistics_view.result = Passed\n section.result += create_genie_statistics_view.result\n\n # Creating GENIE traffic view on TGN\n try:\n dev.create_genie_statistics_view(view_create_interval=view_create_interval, \\\n view_create_iteration=view_create_iteration, \\\n disable_tracking=disable_tracking, \\\n disable_port_pair=disable_port_pair)\n except GenieTgnError as e:\n log.error(e)\n log.error(\"Unable to create GENIE traffic statistics view on '{}'\".format(dev.name))\n create_genie_statistics_view.result = Failed\n else:\n log.info(\"Creating GENIE traffic statistic view on '{}'\".format(dev.name))\n create_genie_statistics_view.result = Passed\n\n\n######################################################################################################################################################\n", "id": "8420109", "language": "Python", "matching_score": 3.4170000553131104, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/processors/processors.py" }, { "content": "from .processors import send_arp, \\\n send_ns, \\\n apply_traffic, \\\n create_genie_statistics_view\n", "id": "5590411", "language": "Python", "matching_score": 1, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/processors/__init__.py" }, { "content": "from .arp import *\n", "id": "10579551", "language": "Python", "matching_score": 0.10119901597499847, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/arp/__init__.py" }, { "content": "'''\nDevice class for devices with common TGN OS (ixia/spirent/agilent)\n'''\n\n# Genie Devices\nimport genie.libs.conf.device\nimport genie.libs.conf.device.cisco\n\n\nclass Device(genie.libs.conf.device.cisco.Device):\n '''Device class for devices with common TGN OS (ixia/spirent/agilent)'''\n\n '''__init__ instantiates a single connection instance.'''\n def __init__(self,\n tgn_skip_configuration = False, \n tgn_enable = False,\n tgn_traffic_convergence_threshold = 60.0, \n tgn_reference_rate_threshold = 100.0, \n tgn_first_sample_threshold = 15.0,\n tgn_disable_traffic_post_execution = False,\n tgn_traffic_loss_recovery_threshold = 5.0,\n tgn_traffic_loss_tolerance_percentage = 15.0,\n tgn_enable_traffic_loss_check = True,\n tgn_config_post_device_config = True,\n tgn_profile_snapshot_threshold = 1200.0,\n tgn_routing_threshold = 120.0,\n tgn_port_list = '',\n tgn_arp_wait_time=60.0,\n tgntcl_enable_arp = False,\n tgntcl_learn_after_n_samples = 1,\n tgntcl_stream_sample_rate_percentage = 10.0,\n tgntcl_wait_multiplier = 1, *args, **kwargs):\n\n super().__init__(*args, **kwargs)\n\n self.tgn_skip_configuration = tgn_skip_configuration\n self.tgn_enable = tgn_enable\n self.tgn_traffic_convergence_threshold = tgn_traffic_convergence_threshold\n self.tgn_reference_rate_threshold = tgn_reference_rate_threshold\n self.tgn_first_sample_threshold = tgn_first_sample_threshold\n self.tgn_disable_traffic_post_execution = tgn_disable_traffic_post_execution\n self.tgn_traffic_loss_recovery_threshold = tgn_traffic_loss_recovery_threshold\n self.tgn_traffic_loss_tolerance_percentage = tgn_traffic_loss_tolerance_percentage\n self.tgn_enable_traffic_loss_check = tgn_enable_traffic_loss_check\n self.tgn_config_post_device_config = tgn_config_post_device_config\n self.tgn_profile_snapshot_threshold = tgn_profile_snapshot_threshold\n self.tgn_routing_threshold = tgn_routing_threshold\n self.tgn_port_list = tgn_port_list\n self.tgn_arp_wait_time = tgn_arp_wait_time\n self.tgntcl_enable_arp = tgntcl_enable_arp\n self.tgntcl_learn_after_n_samples = tgntcl_learn_after_n_samples\n self.tgntcl_stream_sample_rate_percentage = tgntcl_stream_sample_rate_percentage\n self.tgntcl_wait_multiplier = tgntcl_wait_multiplier", "id": "5590180", "language": "Python", "matching_score": 1.9129629135131836, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/device/statictgn/device.py" }, { "content": "\"\"\"Common functions for traffic checks\"\"\"\n\n# Python\nimport re\nimport logging\n\n# Genie\nfrom genie.utils.timeout import Timeout\nfrom genie.harness.exceptions import GenieTgnError\n\n# Commons\nfrom genie.libs.sdk.apis.utils import analyze_rate\n\nlog = logging.getLogger(__name__)\n\n\ndef set_traffic_transmit_rate(\n testbed, traffic_stream, set_rate, tolerance, max_time, check_interval\n):\n \"\"\"Set stream transmit rate\n\n Args:\n testbed (`obj`): Testbed object\n traffic_stream (`str`): Traffic stream name\n set_rate (`int`): Traffic set rate\n tolerance (`int`): Traffic tolerance\n max_time (`int`): Retry maximum time\n check_interval (`int`): Interval in seconds to do recheck\n\n Returns:\n None\n Raises:\n KeyError: Could not find device on testbed\n Exception: Failed to set transmit rate\n \"\"\"\n try:\n ixia = testbed.devices[\"IXIA\"]\n except KeyError:\n raise KeyError(\"Could not find IXIA device on testbed\")\n\n rate, rate_unit, original_rate = analyze_rate(set_rate)\n\n # IXIA statistics doesn't have 'Gbps' option\n if rate_unit == \"Gbps\":\n original_rate = original_rate * 1000\n rate_unit = \"Mbps\"\n\n # Set the transmit rate\n try:\n ixia.set_layer2_bit_rate(\n traffic_stream=traffic_stream,\n rate=original_rate,\n rate_unit=rate_unit,\n start_traffic=False,\n )\n except Exception as e:\n raise Exception(\"Failed to set the transmit rate due to: {}\".format(e))\n\n\ndef check_traffic_transmitted_rate(\n testbed,\n traffic_stream,\n set_rate,\n tolerance,\n max_time,\n check_interval,\n check_stream=True,\n):\n \"\"\"Check transmitted rate was set correctly or not\n\n Args:\n testbed (`obj`): Testbed object\n traffic_stream (`str`): Traffic stream name\n set_rate (`int`): Traffic set rate\n tolerance (`int`): Traffic tolerance\n max_time (`int`): Retry maximum time\n check_interval (`int`): Interval in seconds to do recheck\n\n Returns:\n None\n Raises:\n KeyError: Could not find device on testbed\n Exception: Traffic drops found\n \"\"\"\n try:\n ixia = testbed.devices[\"IXIA\"]\n except KeyError:\n raise KeyError(\"Could not find IXIA device on testbed\")\n\n rate, rate_unit, original_rate = analyze_rate(set_rate)\n\n # IXIA statistics doesn't have 'Gbps' option\n if rate_unit == \"Gbps\":\n original_rate = original_rate * 1000\n rate_unit = \"Mbps\"\n\n # Start the traffic stream\n ixia.start_traffic_stream(traffic_stream, check_stream)\n\n log.info(\n \"Verify the transmitted rate was set correctly, \"\n \"will retry for {} every {} seconds\".format(max_time, check_interval)\n )\n\n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n\n # Print IXIA traffic streams when doing a traffic check/retrieve call\n ixia.create_traffic_streams_table()\n\n # Check the transmitted rate was set correctly\n transmitted_rate = ixia.get_traffic_items_statistics_data(\n traffic_stream=traffic_stream,\n traffic_data_field=\"Tx Rate ({})\".format(rate_unit),\n )\n\n expected_rate, expected_rate_unit, original_rate, tolerance_margin = get_traffic_rates(\n str(original_rate), tolerance\n )\n\n if (\n float(transmitted_rate) < original_rate + tolerance_margin\n and float(transmitted_rate) > original_rate - tolerance_margin\n ):\n log.info(\n \"Transmitted rate '{transmitted_rate}{rate_unit}' is within \"\n \"the set rate '{rate}{rate_unit}', tolerance is {tolerance}%\".format(\n transmitted_rate=transmitted_rate,\n rate=original_rate,\n rate_unit=rate_unit,\n tolerance=tolerance,\n )\n )\n return\n else:\n timeout.sleep()\n\n raise Exception(\n \"Retrieved transmit rate for traffic stream '{}' is '{}' while \"\n \"we did set '{}{}', tolerance is {}%\".format(\n traffic_stream,\n transmitted_rate,\n original_rate,\n rate_unit,\n tolerance,\n )\n )\n\n\ndef check_traffic_expected_rate(\n testbed, traffic_stream, expected_rate, tolerance\n):\n \"\"\"Check the expected rate\n\n Args:\n testbed (`obj`): Testbed object\n traffic_stream (`str`): Traffic stream name\n expected_rate (`str`): Traffic expected received rate\n tolerance (`str`): Traffic loss tolerance percentage\n\n Returns:\n None\n Raises:\n KeyError: Could not find device on testbed\n Exception: Traffic drops found\n \"\"\"\n try:\n ixia = testbed.devices[\"IXIA\"]\n except KeyError:\n raise KeyError(\"Could not find IXIA device on testbed\")\n\n # Print IXIA traffic streams when doing a traffic check/retrieve call\n ixia.get_traffic_item_statistics_table([\"Rx Rate (Mbps)\"])\n\n # Retrieve the expected rate\n retrieved_rate = float(\n ixia.get_traffic_items_statistics_data(\n traffic_stream=traffic_stream, traffic_data_field=\"Rx Rate (Mbps)\"\n )\n )\n\n if \">\" in expected_rate:\n expected_rate, expected_rate_unit, original_rate, tolerance_margin = get_traffic_rates(\n expected_rate.split(\">\")[1], tolerance\n )\n\n if retrieved_rate > original_rate - tolerance_margin:\n log.info(\n \"Expected rate for traffic stream '{}' is '{}' (tolerance {}%) and got '{}' {}\".format(\n traffic_stream,\n original_rate,\n tolerance,\n retrieved_rate,\n expected_rate_unit,\n )\n )\n else:\n raise Exception(\n \"Expected rate for traffic stream '{}' is '{}' (tolerance {}%,\"\n \" greater than {}) and got '{}' {}\".format(\n traffic_stream,\n original_rate,\n tolerance,\n original_rate - tolerance_margin,\n retrieved_rate,\n expected_rate_unit,\n )\n )\n else:\n expected_rate, expected_rate_unit, original_rate, tolerance_margin = get_traffic_rates(\n expected_rate, tolerance\n )\n\n if (\n retrieved_rate < original_rate + tolerance_margin\n and retrieved_rate > original_rate - tolerance_margin\n ):\n log.info(\n \"Expected rate for traffic stream '{}' is '{}' (tolerance {}%)\"\n \" and got '{}' {}\".format(\n traffic_stream,\n original_rate,\n tolerance,\n retrieved_rate,\n expected_rate_unit,\n )\n )\n else:\n raise Exception(\n \"Expected rate for traffic stream '{}' is '{}' (tolerance {}%,\"\n \" {}<>{}) and got '{}' {}\".format(\n traffic_stream,\n original_rate,\n tolerance,\n original_rate - tolerance_margin,\n original_rate + tolerance_margin,\n retrieved_rate,\n expected_rate_unit,\n )\n )\n\n\ndef check_traffic_drop_count(testbed, traffic_stream, drop_count):\n \"\"\"Check for the drop count\n\n Args:\n testbed (`obj`): Testbed object\n traffic_stream (`str`): Traffic stream name\n drop_count (`str`): Expected drop count\n\n Returns:\n None\n Raises:\n KeyError: Could not find device on testbed\n Exception: Traffic drops found\n \"\"\"\n try:\n ixia = testbed.devices[\"IXIA\"]\n except KeyError:\n raise KeyError(\"Could not find IXIA device on testbed\")\n\n # Stop all traffic streams\n ixia.stop_traffic()\n\n try:\n # Print IXIA traffic streams when doing a traffic check/retrieve call\n ixia.get_traffic_item_statistics_table([\"Frames Delta\"])\n\n # Check for no drops\n dropped_frames = ixia.get_traffic_items_statistics_data(\n traffic_stream=traffic_stream, traffic_data_field=\"Frames Delta\"\n )\n except GenieTgnError as e:\n raise Exception(\n \"Couldn't extract the dropped IXIA frames for traffic flow {}\".format(\n traffic_stream\n )\n )\n\n if int(dropped_frames) <= int(drop_count):\n log.info(\n \"Dropped IXIA frames for traffic flow '{}' is '{}' which is less than\"\n \" or equal to the expected drop count '{}'\".format(\n traffic_stream, dropped_frames, drop_count\n )\n )\n return\n else:\n raise Exception(\n \"Dropped IXIA frames for traffic flow '{}' is '{}' which is greater than\"\n \" the expected drop count '{}'\".format(\n traffic_stream, dropped_frames, drop_count\n )\n )\n\n\ndef get_traffic_rates(expected_rate, tolerance):\n \"\"\"Retrieve the formated traffic rates and tolerance margin\n\n Args:\n expected_rate (`str`): Expected traffic rate\n tolerance (`str`): Tolerance margin\n\n Returns:\n expected_rate, expected_rate_unit, original_rate, tolerance_margin\n Raise:\n Exception: Failed analyzing rate\n \"\"\"\n try:\n expected_rate, expected_rate_unit, original_rate = analyze_rate(\n expected_rate\n )\n except Exception as e:\n raise Exception(\"{}\".format(e))\n\n # Calculate tolerance\n tolerance_margin = float(original_rate) * (tolerance / 100)\n\n return expected_rate, expected_rate_unit, original_rate, tolerance_margin\n", "id": "3722454", "language": "Python", "matching_score": 2.3821887969970703, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/traffic/checks.py" }, { "content": "\"\"\"Utility type functions that do not fit into another category\"\"\"\n\n# Python\nimport logging\nimport re\nimport jinja2\nimport shlex, subprocess\nimport time\nfrom time import strptime\nfrom datetime import datetime\nfrom netaddr import IPAddress\n\n# pyATS\nfrom pyats.easypy import runtime\n\n# Genie\nfrom genie.utils.config import Config\nfrom genie.utils.diff import Diff\nfrom genie.libs.parser.utils.common import Common\nfrom genie.utils.timeout import Timeout\n\n# unicon\nfrom unicon.eal.dialogs import Dialog, Statement\nfrom unicon.core.errors import ConnectionError\nfrom unicon.plugins.generic.statements import default_statement_list\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef _cli(device, cmd, timeout, prompt):\n \"\"\" Send command to device and get the output\n\n Args:\n device (`obj`): Device object\n cmd (`str`): Command\n timeout (`int`): Timeout in second\n prompt (`obj`): Unicon statement\n Returns:\n output (`obj`): Output\n \"\"\"\n # Create a dialog\n state = device.state_machine.current_state\n pattern = device.state_machine.get_state(state).pattern\n\n device.send(cmd)\n statements = []\n statements.append(prompt)\n statements.append(Statement(pattern=pattern))\n statements.extend(device.state_machine.default_dialog)\n statements.extend(default_statement_list)\n dialog = Dialog(statements)\n output = dialog.process(device.spawn, timeout=timeout)\n\n return output\n\n\ndef tabber(device, cmd, expected, timeout=20):\n \"\"\" Verify if tab works as expected on device\n\n Args:\n device (`obj`): Device object\n cmd (`str`): Command\n expected (`str`): Expected output\n timeout (`int`): Timeout in second\n Returns:\n None\n \"\"\"\n # Create a new state for prompt# cmd\n state = device.state_machine.current_state\n pattern = device.state_machine.get_state(state).pattern\n pattern_mark = \"{b}{c}.*{e}\".format(b=pattern[:-1], c=cmd, e=pattern[-1])\n\n prompt = Statement(\n pattern=pattern_mark,\n action=\"send(\\x03)\",\n args=None,\n loop_continue=True,\n continue_timer=False,\n )\n\n output = _cli(device, cmd + \"\\t\", timeout, prompt)\n\n # Remove sent command\n output = output.match_output.replace(cmd, \"\", 1).replace(\"^C\", \"\")\n output = escape_ansi(output)\n # Find location where command begins, and remove white space at the end\n trim_output = output.splitlines()[1]\n trim_output = trim_output[trim_output.find(cmd) :].strip()\n\n if not expected == trim_output:\n raise Exception(\"'{e}' is not in output\".format(e=expected))\n\n\ndef question_mark(device, cmd, expected, timeout=20, state=\"enable\"):\n \"\"\" Verify if ? works as expected on device\n\n Args:\n device (`obj`): Device object\n cmd (`str`): Command\n expected (`str`): Expected output\n timeout (`int`): Timeout in second\n state (`str`): Cli state\n Returns:\n None\n \"\"\"\n output = question_mark_retrieve(device, cmd, timeout, state)\n\n # Find if expected exists in the output\n if expected not in output:\n raise Exception(\"'{e}' is not in output\".format(e=expected))\n\n\ndef question_mark_retrieve(device, cmd, timeout=20, state=\"enable\"):\n \"\"\" Retrieve output after pressing ? on device\n\n Args:\n device (`obj`): Device object\n cmd (`str`): Command\n timeout (`int`): Timeout in second\n state (`str`): Cli state\n Returns:\n output (`str`): Output\n \"\"\"\n # Create a new state for prompt# cmd\n pattern = device.state_machine.get_state(state).pattern\n if state == \"config\":\n # then remove all except last line\n tmp_cmd = cmd.splitlines()[-1]\n pattern_mark = pattern[:-1] + tmp_cmd + pattern[-1]\n else:\n pattern_mark = pattern[:-1] + cmd + pattern[-1]\n\n prompt = Statement(\n pattern=pattern_mark,\n action=\"send(\\x03)\",\n args=None,\n loop_continue=True,\n continue_timer=False,\n )\n output = _cli(device, cmd + \"?\", timeout, prompt)\n\n # Remove sent command\n output = output.match_output.replace(cmd, \"\", 1).replace(\"^C\", \"\")\n output = escape_ansi(output)\n return output\n\n\ndef escape_ansi(line):\n ansi_escape = re.compile(r\"(?:\\x1B[@-_]|[\\x80-\\x9F])[0-?]*[ -/]*[@-~]\")\n return ansi_escape.sub(\"\", line)\n\n\ndef time_to_int(time):\n \"\"\" Cast time string to int in second\n\n Args:\n time(`str`): time string\n Returns:\n out(`int`): time in second\n \"\"\"\n out = 0\n # support patterns like ['00:00:00', '2d10h', '1w2d']\n p = re.compile(\n r\"^(?P<time>(\\d+):(\\d+):(\\d+))?(?P<dh>(\\d+)d(\\d+)h)?\"\n \"(?P<wd>(\\d+)w(\\d)+d)?$\"\n )\n m = p.match(time)\n if m:\n group = m.groupdict()\n if group[\"time\"]:\n out = (\n int(m.group(2)) * 3600 + int(m.group(3)) * 60 + int(m.group(4))\n )\n elif group[\"dh\"]:\n out = int(m.group(6)) * 3600 * 24 + int(m.group(7)) * 3600\n elif group[\"wd\"]:\n out = (\n int(m.group(9)) * 3600 * 24 * 7 + int(m.group(10)) * 3600 * 24\n )\n return out\n\n\ndef get_unconfig_line(config_dict, line):\n \"\"\" unconfigure specific line\n\n Args:\n config_dict (`str`): Config dict\n line (`str`): line to unconfig\n Returns:\n unconfig (`list`): list of unconfig strings\n \"\"\"\n unconfig = []\n\n try:\n line_dict = config_dict[line]\n except Exception:\n raise Exception(\n \"line '{}' is not in running config output\".format(line)\n )\n\n unconfig.append(line)\n for key in line_dict.keys():\n unconfig.append(\"no \" + key)\n\n return unconfig\n\n\ndef get_config_dict(config):\n \"\"\" Cast config to Configuration dict\n\n Args:\n config ('str'): config string\n Returns:\n Configuration dict\n \"\"\"\n cfg = Config(config)\n cfg.tree()\n return cfg.config\n\n\ndef compare_config_dicts(a, b, exclude=None):\n \"\"\" Compare two configuration dicts and return the differences\n\n Args:\n a (`dict`): Configuration dict\n b (`dict`): Configuration dict\n exclude (`list`): List of item to ignore. Supports Regex. \n Regex must begins with ( )\n Returns:\n out (`str`): differences\n \"\"\"\n excludes = [r\"(^Load|Time|Build|Current|Using|exit|end)\"]\n if exclude:\n excludes.extend(exclude)\n\n diff = Diff(a, b, exclude=excludes)\n diff.findDiff()\n\n return str(diff)\n\n\ndef copy_pcap_file(testbed, filename):\n \"\"\"Copy pcap filename to runtime directory for analysis\n\n Args:\n testbed (`obj`): Testbed object\n filename (`str`): Pcap filename\n\n Returns:\n None\n\n Raises:\n pyATS Results\n \"\"\"\n\n if \"port\" in testbed.servers[\"scp\"][\"custom\"]:\n command = (\n \"sshpass -p {password} scp -P {port} {user}@{add}:\"\n \"/{serv_loc}/{file} {loc}/{file}\".format(\n password=testbed.servers[\"scp\"][\"password\"],\n port=testbed.servers[\"scp\"][\"custom\"][\"port\"],\n user=testbed.servers[\"scp\"][\"username\"],\n add=testbed.servers[\"scp\"][\"address\"],\n serv_loc=testbed.servers[\"scp\"][\"custom\"][\"loc\"],\n file=filename,\n loc=runtime.directory,\n )\n )\n else:\n # In case of VIRL testbed where is no specific port\n # to connect to the server from\n command = (\n \"sshpass -p {password} scp {user}@{add}:\"\n \"/{serv_loc}/{file} {loc}/{file}\".format(\n password=testbed.servers[\"scp\"][\"password\"],\n user=testbed.servers[\"scp\"][\"username\"],\n add=testbed.servers[\"scp\"][\"address\"],\n serv_loc=testbed.servers[\"scp\"][\"custom\"][\"loc\"],\n file=filename,\n loc=runtime.directory,\n )\n )\n\n log.info(\n \"Copy pcap file '{file}' to '{loc}' for packet analysis\".format(\n file=filename, loc=runtime.directory\n )\n )\n\n args = shlex.split(command)\n try:\n p = subprocess.check_output(args)\n except Exception as e:\n log.error(e)\n raise Exception(\n \"Issue while copying pcap file to runtime directory\"\n \" '{loc}'\".format(loc=runtime.directory)\n )\n\n pcap = \"{loc}/{file}\".format(file=filename, loc=runtime.directory)\n\n return pcap\n\n\ndef get_neighbor_address(ip):\n \"\"\"Get the neighbor address in a subnet /30\n\n Args:\n ip (`str`): Ip address to get the neighbor for\n\n Returns:\n None\n \"\"\"\n\n # Get the neighbor IP address\n ip_list = ip.split(\".\")\n last = int(ip_list[-1])\n\n if last % 2 == 0:\n ip_list[-1] = str(last - 1)\n else:\n ip_list[-1] = str(last + 1)\n\n return \".\".join(ip_list)\n\n\ndef has_configuration(configuration_dict, configuration):\n \"\"\" Verifies if configuration is present\n Args:\n configuration_dict ('dict'): Dictionary containing configuration\n configuration ('str'): Configuration to be verified \n Returns:\n True if configuration is found\n \"\"\"\n\n for k, v in configuration_dict.items():\n if configuration in k:\n return True\n if isinstance(v, dict):\n if has_configuration(v, configuration):\n return True\n return False\n\n\ndef int_to_mask(mask_int):\n \"\"\" Convert int to mask\n Args:\n mask_int ('int'): prefix length is convert to mask \n Returns:\n mask value\n \"\"\"\n bin_arr = [\"0\" for i in range(32)]\n for i in range(int(mask_int)):\n bin_arr[i] = \"1\"\n tmpmask = [\"\".join(bin_arr[i * 8 : i * 8 + 8]) for i in range(4)]\n tmpmask = [str(int(tmpstr, 2)) for tmpstr in tmpmask]\n return \".\".join(tmpmask)\n\n\ndef mask_to_int(mask):\n \"\"\" Convert mask to int\n Args:\n mask ('str'): mask to int \n Returns:\n int value\n \"\"\"\n return sum(bin(int(x)).count(\"1\") for x in mask.split(\".\"))\n\n\ndef copy_file_from_tftp_ftp(testbed, filename, pro):\n \"\"\"Copy file to runtime directory for analysis\n\n Args:\n testbed (`obj`): Testbed object\n filename (`str`): File name\n pro (`str`): Transfer protocol\n Returns:\n None\n Raises:\n pyATS Results\n \"\"\"\n if \"port\" in testbed.servers[pro][\"custom\"]:\n command = (\n \"sshpass -p {svr[password]} scp -P {svr[custom][port]} \"\n \"{svr[username]}@{svr[address]}:\"\n \"/{svr[custom][loc]}/{file} {loc}/{file}\".format(\n svr=testbed.servers[pro], file=filename, loc=runtime.directory\n )\n )\n else:\n # In case of VIRL testbed where is no specific port\n # to connect to the server from\n command = (\n \"sshpass -p {svr[password]} scp {svr[username]}@{svr[address]}:\"\n \"/{svr[custom][loc]}/{file} {loc}/{file}\".format(\n svr=testbed.servers[pro], file=filename, loc=runtime.directory\n )\n )\n\n log.info(\n \"Copy {pro} file '{file}' to '{loc}' for later analysis\".format(\n pro=pro, file=filename, loc=runtime.directory\n )\n )\n\n args = shlex.split(command)\n try:\n p = subprocess.check_output(args)\n except Exception as e:\n log.error(e)\n raise Exception(\n \"Issue while copying file to runtime directory\"\n \" '{loc}'\".format(loc=runtime.directory)\n )\n\n path = \"{loc}/{file}\".format(file=filename, loc=runtime.directory)\n\n return path\n\n\ndef load_jinja(\n path,\n file,\n vrf_name,\n bandwidth,\n packet_size,\n ref_packet_size,\n time_interval,\n ipp4_bps,\n ipp2_bw_percent,\n ipp0_bw_percent,\n interface,\n):\n \"\"\"Use Jinja templates to build the device configuration\n\n Args:\n device (`obj`): Device object\n vrf_name (`str`): Vrf name to be used in configuration\n bandwidth (`int`): In bps, bandwidth for traffic flow\n packet_size (`int`): Config packet size\n ref_packet_size (`int`): Refrenced packet size\n time_interval (`float`): In seconds, used for calculating bc\n ipp4_bps (`int`): In bps, bandwidth for IPP4 traffic\n ipp2_bw_percent (`int`): In percents, bandwidth for IPP2 traffic\n ipp0_bw_percent (`int`): In percents, bandwidth for IPP0 traffic\n interface (`str`): Where to apply the configured policies\n\n Returns:\n out\n \"\"\"\n\n env = jinja2.Environment(loader=jinja2.FileSystemLoader(searchpath=path))\n\n template = env.get_template(file)\n out = template.render(\n vrf_name=vrf_name,\n bandwidth=bandwidth,\n packet_size=packet_size,\n ref_packet_size=ref_packet_size,\n time_interval=time_interval,\n ipp4_bps=ipp4_bps,\n ipp2_bw_percent=ipp2_bw_percent,\n ipp0_bw_percent=ipp0_bw_percent,\n interface=interface,\n )\n\n return out\n\n\ndef get_time_source_from_output(output):\n \"\"\" Parse out 'Time Source' value from output\n Time source output example : 'Time source is NTP, 23:59:38.461 EST Thu Jun 27 2019'\n\n Args:\n output ('str'): Text output from command\n Returns:\n Datetime object\n Format : datetime(year, month, day, hour, minute, second, microseconds)\n \"\"\"\n\n r1 = re.compile(\n r\"Time\\ssource\\sis\\sNTP\\,\\s\\.*(?P<hour>\\d+)\\:(?P<minute>\\d+)\\:\"\n \"(?P<seconds>\\d+)\\.(?P<milliseconds>\\d+)\\s(?P<time_zone>\"\n \"\\S+)\\s(?P<day_of_week>\\S+)\\s(?P<month>\\S+)\\s(?P<day>\\d+)\"\n \"\\s(?P<year>\\d+)\"\n )\n\n for line in output.splitlines():\n line = line.strip()\n\n result = r1.match(line)\n if result:\n group = result.groupdict()\n hour = int(group[\"hour\"])\n minute = int(group[\"minute\"])\n second = int(group[\"seconds\"])\n milliseconds = int(group[\"milliseconds\"])\n month = strptime(group[\"month\"], \"%b\").tm_mon\n day = int(group[\"day\"])\n year = int(group[\"year\"])\n\n return datetime(\n year, month, day, hour, minute, second, milliseconds * 1000\n )\n\n\ndef get_delta_time_from_outputs(output_before, output_after):\n \"\"\" Get delta time from Time source of two outputs \n Time source example: 'Time source is NTP, 23:59:38.461 EST Thu Jun 27 2019'\n\n Args:\n output_before ('str'): Text output from show command\n output_after ('str'): Text output from show command\n Returns:\n Time delta in seconds\n \"\"\"\n\n time_source_before = get_time_source_from_output(output_before)\n time_source_after = get_time_source_from_output(output_after)\n\n return (time_source_after - time_source_before).total_seconds()\n\n\ndef analyze_rate(rate):\n \"\"\" Get the traffic rate and the corresponding unit\n\n Args:\n rate (`str`): Passed rate as a string\n\n Returns:\n rate (`int`): Traffic rate\n rate_unit (`str`): Traffic rate unit\n original_rate (`str`): Original Traffic rate\n \"\"\"\n\n no_unit = False\n\n if isinstance(rate, int):\n rate = str(rate)\n no_unit = True\n\n parse = re.compile(\n r\"^(?P<original_rate>[0-9\\.]+)(?P<rate_unit>[A-Za-z\\%]+)?$\"\n )\n m = parse.match(rate)\n if m:\n parsed_rate = m.groupdict()[\"original_rate\"]\n try:\n original_rate = int(parsed_rate)\n except:\n original_rate = float(parsed_rate)\n\n if m.groupdict()[\"rate_unit\"]:\n rate_unit = m.groupdict()[\"rate_unit\"]\n if \"M\" in rate_unit:\n rate_unit = \"Mbps\"\n rate = original_rate * 1000000\n elif \"K\" in rate_unit:\n rate_unit = \"Kbps\"\n rate = original_rate * 1000\n elif \"G\" in rate_unit:\n rate_unit = \"Gbps\"\n rate = original_rate * 1000000000\n elif \"%\" in rate_unit:\n rate = original_rate\n elif no_unit:\n # Case when recreating policy map on other interfaces\n # Bandwidth was already converetd before\n rate_unit = None\n rate = int(rate)\n else:\n rate_unit = None\n\n return rate, rate_unit, original_rate\n else:\n raise Exception(\n \"The provided rate is not in the correct \"\n \"format in the trigger data file\"\n )\n\n\ndef reconnect_device_with_new_credentials(\n device,\n testbed,\n username,\n password_tacacs,\n password_enable=None,\n password_line=None,\n connection_alias=None,\n):\n \"\"\" Reconnect device\n Args:\n device ('obj'): Device object\n max_time ('int'): Max time in seconds trying to connect to device\n interval ('int'): Interval in seconds of checking connection\n sleep_disconnect ('int'): Waiting time after device disconnection\n Raise:\n ConnectionError\n Returns:\n N/A\n \"\"\"\n device_name = device.name\n device.destroy()\n device = testbed.devices[device_name]\n device.tacacs.username = username\n device.passwords.tacacs = password_tacacs\n\n if password_enable:\n device.passwords.enable = password_enable\n\n if password_line:\n device.passwords.line = password_line\n\n if connection_alias:\n device.connect(via=connection_alias)\n else:\n device.connect()\n\n return device\n\n\ndef destroy_connection(device):\n \"\"\" Destroy connection device\n Args:\n device ('obj'): Device object\n\n \"\"\"\n log.info(\"Destroying current connection\")\n device.destroy_all()\n log.info(\"Connection destroyed\")\n\n\ndef configure_device(device, config):\n \"\"\"shut interface\n\n Args:\n device (`obj`): Device object\n config (`str`): Configuration to apply\n \"\"\"\n try:\n device.configure(config)\n except Exception as e:\n raise Exception(\"{}\".format(e))\n return\n\n\ndef reconnect_device(device, max_time=300, interval=30, sleep_disconnect=30):\n \"\"\" Reconnect device\n Args:\n device ('obj'): Device object\n max_time ('int'): Max time in seconds trying to connect to device\n interval ('int'): Interval in seconds of checking connection\n sleep_disconnect ('int'): Waiting time after device disconnection\n Raise:\n ConnectionError\n Returns:\n N/A\n \"\"\"\n destroy_connection(device=device)\n\n time.sleep(sleep_disconnect)\n timeout = Timeout(max_time=max_time, interval=interval)\n\n while timeout.iterate():\n try:\n device.connect()\n except Exception as e:\n log.info(\"Device {dev} is not connected\".format(dev=device.name))\n destroy_connection(device=device)\n timeout.sleep()\n continue\n\n if device.is_connected():\n break\n\n timeout.sleep()\n\n if not device.is_connected():\n raise ConnectionError(\n \"Could not reconnect to device {dev}\".format(dev=device.name)\n )\n\n log.info(\"Reconnected to device {dev}\".format(dev=device.name))\n\ndef netmask_to_bits(net_mask):\n \"\"\" Convert netmask to bits\n Args:\n net_mask ('str'): Net mask IP address\n ex.) net_mask = '255.255.255.255'\n Raise:\n None\n Returns:\n Net mask bits\n \"\"\"\n return IPAddress(net_mask).netmask_bits()\n\ndef bits_to_netmask(bits):\n \"\"\" Convert bits to netmask\n Args:\n bits ('int'): bits to converts\n ex.) bits = 32\n Raise:\n None\n Returns:\n Net mask\n \"\"\"\n mask = (0xffffffff >> (32 - bits)) << (32 - bits)\n return (str( (0xff000000 & mask) >> 24) + '.' +\n str( (0x00ff0000 & mask) >> 16) + '.' +\n str( (0x0000ff00 & mask) >> 8) + '.' +\n str( (0x000000ff & mask)))", "id": "10951213", "language": "Python", "matching_score": 2.8439834117889404, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/utils.py" }, { "content": "'''CLI utilities'''\n\ntry:\n import ats.tcl\n from ats.tcl import tclstr\nexcept Exception:\n pass\n\nimport re\nimport functools\nimport logging\nlogger = logging.getLogger(__name__)\n\n__all__ = (\n 'clean_cli_output',\n 'config_cli_to_tree',\n 'cli_tree_to_config',\n)\n\n\ndef clean_cli_output(output, cmd=None, os=None,\n return_dict=False,\n remove_prompt=True, remove_timestamp=True):\n d = {\n 'cmd': None,\n 'timestamp': None,\n 'prompt': None,\n }\n try:\n output = tclstr(output)\n except NameError:\n pass\n\n from genie.libs.conf.utils import ansi\n\n output = re.sub(r'\\t', r' ', output)\n output = re.sub(r'\\r+\\n', r'\\n', output)\n output = re.sub(r'.*\\r', r'', output, re.MULTILINE)\n output = re.sub(ansi.re_generic, r'', output)\n output = re.sub(r'\\s+$', r'', output, re.MULTILINE)\n\n if cmd:\n m = re.match(r'^(?P<cmd>(?:do )?' + re.escape(cmd) + r')(?:\\n|$)', output)\n if m:\n d.update(m.groupdict())\n output = output[:m.start(0)] + output[m.end(0):]\n\n if remove_timestamp:\n # Extract timestamp in the form \"Thu May 29 01:56:01.913 UTC\" or \"Wed Jul 27 07:13:33 UTC 2011\"\n # Only find it on first or second line and remove the whole line from the output\n m = re.search(r'''\n \\A # From start of string...\n (?:.*\\n)? # Skip the first line, or not\n (?P<line> # The timestamp line\n #\\r* # Skip carriage returns (not needed based on previous cleaning)\n (?P<timestamp>\n (?:Mon|Tue|Wed|Thu|Fri|Sat|Sun) # Day of week name\n \\ [A-Za-z]{3} # Month name\n \\ +\\d+ # Day of month\n \\ \\d+:\\d+:\\d+ # Time\n (?:\\.\\d+)? # Optional milliseconds\n \\ [A-Za-z]{3,} # Timezone\n (?:\\ \\d{4})? # Optional year\n )\n #[\\ \\t\\r]* # Skip spaces (not needed based on previous cleaning)\n (?:\\n|\\Z) # End of line (or string)\n )\n ''', output, re.VERBOSE | re.MULTILINE)\n if m:\n d['timestamp'] = m.group('timestamp')\n output = output[:m.start('line')] + output[m.end('line'):]\n\n if remove_prompt:\n for once in [1]:\n m = re.search(r'''\n (?:^|\\n)\n (?P<prompt>\n (?P<prompt_location>D?RP/\\d+/(?:RP|RSP)?\\d+/(?:CPU)?\\d+):\n (?:\n (?P<prompt_hostname>[\\w-]+)\n (?:\n \\((?P<prompt_mode>[^)]+)\\)\n )?\n (?P<prompt_mark>[\\#>])\n )?\n )\n $\n ''', output, re.VERBOSE)\n if m:\n # IOS-XR:\n # RP/0/0/CPU0:\n # RP/0/0/CPU0:JSTVXR-R1#\n # RP/0/0/CPU0:JSTVXR-R1(config)#\n d.update(m.groupdict())\n output = output[:m.start(0)] + output[m.end(0):]\n break\n m = re.search(r'(?:^|\\n)(?P<prompt>[\\w-]+(\\([^\\)]+\\)){0,2}#)$', output)\n if m:\n # IOS / NX-OS:\n # N7K-Get-well-R1#\n # JSTVNX-R1(config)(xmlin)#\n d.update(m.groupdict())\n output = output[:m.start(0)] + output[m.end(0):]\n break\n m = re.search(r'(^|\\n)(?P<prompt>\\w+@[\\w-]+[#>])$', output)\n if m:\n # Juniper:\n # admin@MX4#\n d.update(m.groupdict())\n output = output[:m.start(0)] + output[m.end(0):]\n break\n\n if return_dict:\n d['output'] = output\n return d\n else:\n return output\n\n\ndef config_cli_to_tree(cli, *, os=None, strip=False, sort=False, keylist=False,\n consistency_checks=False, keep_all=False,\n keep_empty=False, keep_comments=False,\n keep_closures=False):\n try:\n cli = tclstr(cli)\n except NameError:\n pass\n\n if keep_all:\n keep_empty = True\n keep_comments = True\n keep_closures = True\n\n tcl_imported = True\n\n try:\n from ats.tcl.internal import DictionaryCompare\n except ImportError:\n tcl_imported = False\n pass\n\n def _DictionaryCompare_index0(first, second):\n if tcl_imported:\n return DictionaryCompare(first[0], second[0])\n else:\n return 0\n\n cli = clean_cli_output(cli, os=os)\n if keylist:\n cli = cli.replace('.', '_')\n\n if os == 'junos':\n\n if strip:\n cli = re.sub(r'^ +| +$', r'', cli, re.MULTILINE)\n\n if keep_comments:\n # Put inline comments on their own line\n cli = re.sub(r'; #', r';\\n#', cli)\n else:\n # Remove inline comments\n cli = re.sub(r'; #.*', r'', cli)\n\n # Protect sub-modes with braces (Adds an extra open brace)\n cli = re.sub(r'^.*\\S(?= +\\{$)', r'{{&}', cli, re.MULTILINE)\n\n if keep_comments:\n # Protect comment lines\n if keylist:\n cli = re.sub(r'^ *#.*', r'{{&} {}}', cli, re.MULTILINE)\n else:\n cli = re.sub(r'^ *#.*', r'{{&}}', cli, re.MULTILINE)\n else:\n # Remove comment lines\n cli = re.sub(r'^ *#.*', r'', cli, re.MULTILINE)\n\n # Protect value config lines (drop the ;)\n if keylist:\n cli = re.sub(r'^(.*\\S);$', r'{{\\1} {}}', cli, re.MULTILINE)\n else:\n cli = re.sub(r'^(.*\\S);$', r'{{\\1}}', cli, re.MULTILINE)\n\n # Add an extra close brace to end of sub-modes\n cli = re.sub(r'^ *\\}$', r'}}', cli, re.MULTILINE)\n\n # Need to get rid of the non-canonical string representations\n # and recurse into children\n if sort:\n\n def _clean_cli(clie):\n clie = ats.tcl.cast_list(clie)\n if len(clie) == 2:\n return (clie[0], tuple(sorted(\n (_clean_cli(e) for e in ats.tcl.cast_list(clie[1])),\n key=functools.cmp_to_key(_DictionaryCompare_index0))))\n else:\n return (clie[0], None)\n\n tree = tuple(sorted(\n (_clean_cli(e) for e in ats.tcl.cast_list(cli)),\n key=functools.cmp_to_key(_DictionaryCompare_index0)))\n else:\n\n def _clean_cli(clie):\n clie = ats.tcl.cast_list(clie)\n if len(clie) == 2:\n return (clie[0], tuple(\n (_clean_cli(e) for e in ats.tcl.cast_list(clie[1]))))\n else:\n return (clie[0], None)\n\n tree = tuple(\n (_clean_cli(e) for e in ats.tcl.cast_list(cli)))\n\n return tree\n\n else:\n\n is_nxos = os == 'nxos'\n\n lvl = 0\n lvl_indent = {\n lvl: 0,\n }\n lvl_tree = {\n lvl: [],\n }\n\n def _wrap_up_one_lvl():\n # Generic code to wrap up lvl's tree to children and move up to lvl-=1\n nonlocal lvl\n nonlocal lvl_tree\n children = lvl_tree[lvl]\n if sort:\n children = sorted(\n children,\n key=functools.cmp_to_key(_DictionaryCompare_index0))\n lvl -= 1\n lvl_tree[lvl][-1] = (lvl_tree[lvl][-1][0], tuple(children))\n\n lines = cli.splitlines()\n for iline, my_line in enumerate(lines):\n keep_line = True\n\n if re.match(r'^Building configuration', my_line) \\\n or re.match(r'^Current configuration', my_line) \\\n or re.match(r'^!!? Last configuration change at', my_line) \\\n or re.match(r'^!!? NVRAM config last updated at', my_line) \\\n or re.match(r'^!(Command|Time):', my_line):\n continue\n\n my_indent = len(re.match(r'^ *', my_line).group(0))\n if strip:\n my_line = my_line.strip()\n\n if consistency_checks:\n if is_nxos:\n if my_indent % 2:\n pass # TODO\n elif os is not None:\n if my_indent > lvl_indent[lvl] + 1:\n pass # TODO\n\n if my_indent > lvl_indent[lvl]:\n\n # Scenario:\n #\n # a (l=0, i=0)\n # b (l=1, i=1)\n # c (l=?, i=2)\n\n # Action: New level.\n #\n # a (l=0, i=0)\n # b (l=1, i=1)\n # c (l=2, i=2)\n\n lvl += 1\n lvl_indent[lvl] = my_indent\n lvl_tree[lvl] = []\n\n else:\n while my_indent < lvl_indent[lvl]:\n\n # a (l=0, i=0)\n # b (l=1, i=1)\n # c (l=2, i=2)\n # d (l=?, i<2)\n\n if my_indent <= lvl_indent[lvl - 1]:\n # Scenarios:\n #\n # a (l=0, i=0)\n # b (l=1, i=1)\n # c (l=2, i=2)\n # d (l=?, i=1)\n #\n # a (l=0, i=0)\n # b (l=1, i=1)\n # c (l=2, i=2)\n # d (l=?, i=0)\n\n # Action: Wrap up c(l=lvl) as child of b(l=lvl-1)\n #\n # a (l=0, i=0)\n # b (l=1, i=1) { c }\n # d (l=?, i=1)\n #\n # a (l=0, i=0)\n # b (l=1, i=1) { c }\n # d (l=?, i=0)\n\n _wrap_up_one_lvl()\n\n else:\n # { lvl_indent[lvl - 1] < my_indent < lvl_indent[lvl] }\n\n # Scenario:\n #\n # a (l=0, i=0)\n # b (l=1, i=1)\n # c (l=2, i=3)\n # d (l=?, i=2)\n\n # Action: Fix bad indentation of previous block to match current\n #\n # a (l=0, i=0)\n # b (l=1, i=1)\n # c (l=2, i=2)\n # d (l=?, i=2)\n\n if consistency_checks:\n pass # TODO\n\n lvl_indent[lvl] = my_indent\n break\n\n max_lvl = lvl\n\n for once in [1]:\n m = re.match(r'^ *exit$', my_line)\n if m:\n\n # Scenario:\n #\n # a (l=0, i=0)\n # b (l=1, i=1)\n # \"exit\" (l=2, i=2)\n\n # Action: Force current level to exit\n #\n # a (l=0, i=0)\n # b (l=1, i=1)\n\n if lvl:\n max_lvl = lvl - 1\n elif consistency_checks:\n pass # TODO\n keep_line = keep_closures\n break\n\n m = re.match(r'^ *quit$', my_line)\n if m:\n\n # Scenario:\n #\n # a (l=0, i=0)\n # b (l=1, i=1)\n # \"quit\" (l=2, i=2)\n\n # Action: Force all levels to exit\n #\n # a (l=0, i=0)\n # b (l=1, i=1)\n\n max_lvl = 0\n keep_line = keep_closures\n break\n\n m = re.match(r'^end$', my_line)\n if m:\n\n # Scenario:\n #\n # a (l=0, i=0)\n # b (l=1, i=1)\n # \"end\" (l=2, i=0)\n\n # Action: None\n #\n # a (l=0, i=0)\n # b (l=1, i=1)\n\n max_lvl = 0\n keep_line = keep_closures\n\n if consistency_checks and iline != len(lines) - 1:\n pass # TODO\n break\n\n m = re.match(r'^ *!', my_line)\n if m:\n keep_line = keep_comments\n break\n\n m = re.match(r'^$', my_line)\n if m:\n keep_line = keep_empty\n break\n\n if keep_line:\n if keylist:\n lvl_tree[lvl].append((my_line, ()))\n else:\n lvl_tree[lvl].append((my_line, None))\n\n while lvl > max_lvl:\n _wrap_up_one_lvl()\n\n while lvl:\n _wrap_up_one_lvl()\n\n # assert lvl == 0\n\n tree = lvl_tree[0]\n if sort:\n tree = sorted(\n tree,\n key=functools.cmp_to_key(_DictionaryCompare_index0))\n\n return tuple(tree)\n\n\ndef cli_tree_to_config(cli_tree, *, os=None):\n\n def _merge_cli_tree(line1, subcli1, ctx_lines=(), indent=''):\n line1 = line1.strip()\n lines = []\n if line1 != 'end':\n lines.append(indent + line1)\n sub_tree = ctx_lines + (line1,)\n sub_indent = indent + ' '\n sub_lines = []\n for line2, subcli2 in subcli1 or ():\n sub_lines += _merge_cli_tree(line2, subcli2, sub_tree, sub_indent)\n if sub_lines:\n lines += sub_lines\n m = re.match(r'^(?P<kw1>\\S+)', line1)\n assert m, line1\n kw1 = m.group('kw1')\n if kw1 == 'if' \\\n and len(ctx_lines) == 1 \\\n and re.match(r'^route-policy ', ctx_lines[0]):\n pass # else or endif follows\n elif kw1 == 'else' \\\n and len(ctx_lines) == 1 \\\n and re.match(r'^route-policy ', ctx_lines[0]):\n pass # endif follows\n elif kw1 in (\n 'route-policy',\n 'community-set',\n ) \\\n and len(ctx_lines) == 0:\n pass # end-policy/end-set follows\n else:\n # Need exit\n sub_exit = sub_indent + 'exit'\n if sub_lines[-1] != sub_exit:\n lines.append(sub_exit)\n return lines\n\n lines = []\n for line1, subcli1 in cli_tree or ():\n lines += _merge_cli_tree(line1, subcli1)\n\n return '\\n'.join(lines)\n\n", "id": "4704911", "language": "Python", "matching_score": 1.4078541994094849, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/topology_mapper/cli.py" }, { "content": "import logging\nimport re\nfrom deepdiff import DeepDiff\nfrom ats.log.utils import banner\nfrom .utility import DataRetriever\n\n\nclass CliVerify:\n \"\"\"Using a config from generated replays, produce CLI verification.\"\"\"\n\n def __init__(self, action, testbed, data, logger):\n self.connection = action.get('action')\n self.uut = testbed.devices[action.get('device', 'uut')]\n self.log = logger\n # check if connected\n if not hasattr(self.uut, 'cli'):\n self.uut.connect(alias='cli', via=self.connection)\n elif hasattr(self.uut, 'cli') and not self.uut.cli.connected:\n self.uut.cli.connect()\n self.cmd, self.returns = DataRetriever.get_data(action, data)\n self.operation = action.get('operation')\n self.cc = CiscoConfig()\n\n def run_cli(self):\n \"\"\"Execute CLI commands.\"\"\"\n if not self.cmd:\n return\n\n resp = 'NO RESPONSE'\n result = True\n\n try:\n self.log.debug('CLI SEND:\\n{0}'.format(self.cmd))\n resp = getattr(self.uut.cli, self.operation)(self.cmd)\n # TODO diff it? do a before after? just look for return?\n except Exception as e:\n self.log.error(\"CLI command failed:\\n{0}\\nERROR:\\n{1}\".format(\n self.cmd, e\n ))\n raise e\n\n if self.returns:\n added, removed = self.cc.diffs(\n self.cc.normalize(self.returns),\n self.cc.normalize(resp)\n )\n if added:\n self.log.error('Extra CLI:\\n{0}'.format(added))\n self.log.error(banner('CLI VERIFICATION FAILED'))\n result = False\n if removed:\n self.log.error('Missing CLI:\\n{0}'.format(removed))\n self.log.error(banner('CLI VERIFICATION FAILED'))\n result = False\n if result:\n self.log.debug(banner('CLI VERIFICATION SUCCEDED'))\n return result\n\n def before_rpc(self, cmd, counter, xpath, kind=''):\n \"\"\"Collect CLI config before RPC is run\n\n - Create test is run first so needs a fresh pre-config.\n - Merge test is run second and has same pre-config as create.\n - Replace test is run third and has same pre-config as create.\n - Delete test is run fourth and needs a fresh pre-config because\n base config was changed by replace.\n - Remove test is run last and will use same pre-config as delete.\n\n Args:\n cmd (str): Show command.\n counter (str): Common for all tests of a single suite\n xpath (str): Common for all tests of a single suite.\n kind (str): Contains \"basic \" plus create, merge, replace, delete,\n or remove.\n \"\"\"\n index = counter + xpath\n replay_type = kind[kind.find('basic ') + 6:]\n\n if not cmd:\n cmd = 'show running'\n\n if replay_type in ['create', 'delete']:\n # These replays need a fresh pre-config to base diffs on\n resp = self.run_ssh(cmd, op='exec')\n self.common_cli_base[index] = self.cc.normalize(resp)\n elif replay_type in ['merge', 'remove', 'replace']:\n # Pre-config bases are same as create and delete pre-configs\n if index not in self.common_cli_base:\n resp = self.run_ssh(cmd, op='exec')\n self.common_cli_base[index] = self.cc.normalize(resp)\n\n def after_rpc(self, cmd, counter, xpath, kind):\n \"\"\"Collect CLI config after RPC is run\n\n - Create test is run first and needs a fresh post-config.\n - Merge test is run second and uses same diff as create.\n - Replace test is run third, uses same pre-config as create,\n and needs a fresh post-config.\n - Delete test is run fourth, uses same pre-config as create,\n and needs a fresh post-config.\n - Remove is run last, uses the same diff as delete.\n\n Args:\n cmd (str): Show command.\n counter (str): Common for all tests of a single suite\n xpath (str): Common for all tests of a single suite.\n kind (str): Contains \"basic \" plus create, merge, replace, delete,\n or remove.\n Returns:\n (list): Expected CLI after RPC is run.\n \"\"\"\n expect = []\n index = counter + xpath\n replay_type = kind[kind.find('basic ') + 6:]\n\n if not cmd:\n cmd = 'show running'\n\n if replay_type in ['create', 'replace', 'delete']:\n cfg_pre = self.common_cli_base.get(index, [])\n resp = self.run_ssh(cmd, op='exec')\n cfg_post = self.cc.normalize(resp)\n added, removed = self.cc.diffs(cfg_pre, cfg_post)\n\n self.log.debug('{0}\\nadded: {1}\\nremoved: {2}\\n'.format(\n ' - '.join([counter, xpath, replay_type]),\n added,\n removed)\n )\n expect = added + removed\n self.common_cli_diff[index] = expect\n elif replay_type in ['merge', 'remove']:\n expect = self.common_cli_diff.get(index, [])\n\n return ''.join(expect)\n\n def close(self):\n \"\"\"Shut down open session.\"\"\"\n if self.uut.cli.connected:\n self.uut.cli.disconnect()\n\n\nclass CiscoConfig:\n \"\"\"CiscoConfig processes CLI commands to detect differences.\n\n Normalize CLI of 2 examples and determine differences.\n \"\"\"\n\n skip = [\"enable\", \"config\", \"t\", \"configure\", \"end\", \"show\",\n \"terminal\", \"commit\", \"#\", \"!\", \"<rpc\", \"Building\"]\n\n timestamps = [\"mon\", \"tue\", \"wed\", \"thu\", \"fri\", \"sat\", \"sun\", \"jan\",\n \"feb\", \"mar\", \"apr\", \"may\", \"jun\", \"jul\", \"aug\", \"sep\",\n \"oct\", \"nov\", \"dec\"]\n\n timeregex = re.compile(\n \"^(([0-1]?[0-9])|([2][0-3])):([0-5]?[0-9])(:([0-5]?[0-9]))?\"\n )\n\n def _handle_intf(self, intf):\n # might be \"interface GigabitEthernet 1/0/1\"\n # or \"interface GigabitEthernet1/0/1\"\n intf_list = intf.split()\n intf_list.remove('interface')\n return \"interface \" + ''.join(intf_list)\n\n def _handle_username(self, username):\n # might be \"username lab password 0 mypassword\"\n # or \"username lab password mypassword\"\n return username.replace(\"password 0\", \"password\", 1)\n\n def _handle_exit(self, line):\n # clear out \"exit\" if found\n if len(line.split()) == 1:\n return None\n return line\n\n special_handles = {\"interface\": _handle_intf,\n \"username\": _handle_username,\n \"exit\": _handle_exit}\n\n def _check_special_handles(self, line):\n\n if line.split()[0] in self.special_handles.keys():\n line = self.special_handles[line.split()[0]](self, line)\n return line\n\n def _check_timestamps(self, line):\n if line[:3].lower() in self.timestamps:\n for item in line.split():\n if self.timeregex.match(item):\n return True\n return False\n\n def normalize(self, cfg):\n \"\"\"Removes uninteresting CLI and returns structured data.\n\n Remove comments, organize blocks of config data,\n skip maintainence operations, and other tasks.\n\n NOTE: Copying and pasting a show running-config may have\n compare errors only specific to the show command so it is\n best to copy the config from device to file and use that text.\n\n Args:\n cfg (str): CLI text from a configuration copied off a Cisco device.\n Returns:\n listt: Configuration lines of interest.\n \"\"\"\n clean_cfg = []\n\n for line in cfg.splitlines():\n\n if not line.strip():\n # empty line\n continue\n if \"--More--\" in line:\n # pick up anything that may be included after the \"More\"\n line = line[line.find('--More--') + 8:]\n if not line.split():\n # emptied line\n continue\n if line.startswith('#'):\n continue\n if line.startswith('!'):\n continue\n if line.startswith('Current configuration'):\n continue\n if line.rstrip().endswith(\"#\"):\n continue\n if line.split()[0] in self.skip:\n continue\n if self._check_timestamps(line):\n continue\n line = self._check_special_handles(line)\n if line is None:\n continue\n\n clean_cfg.append(line.strip())\n\n return clean_cfg\n\n def diffs(self, cli_base, cli_after):\n \"\"\"Identify the difference between 2 lists of normalized CLI text.\n\n Args:\n cli_base (list): CLI lines before added configuration.\n cli_after (list): CLI lines after added configuration.\n Returns:\n str: Lines of configuration that has changed\n \"\"\"\n added_cli = []\n removed_cli = []\n\n ddiff = DeepDiff(cli_base, cli_after,\n ignore_order=True,\n report_repetition=True,\n view='tree')\n added = ddiff.get('iterable_item_added', [])\n removed = ddiff.get('iterable_item_removed', [])\n repetition = ddiff.get('repetition_change', [])\n\n for add in added:\n added_cli.append(add.t2 + '\\n')\n\n for remove in removed:\n removed_cli.append('-' + remove.t1 + '\\n')\n\n for rep in repetition:\n # Include line before repeated CLI\n new_indexes = rep.repetition.get('new_indexes', [])\n old_indexes = rep.repetition.get('old_indexes', [])\n if rep.repetition.get(\n 'old_repeat', 0) < rep.repetition.get(\n 'new_repeat', 0):\n for new_index in new_indexes:\n line = rep.up.t2_child_rel.parent[new_index - 1]\n for old_index in old_indexes:\n if line == rep.up.t1_child_rel.parent[old_index - 1]:\n break\n else:\n added_cli.append(line + '\\n' + rep.t2 + '\\n')\n else:\n for old_index in old_indexes:\n line = rep.up.t1_child_rel.parent[old_index - 1]\n for new_index in new_indexes:\n if line == rep.up.t2_child_rel.parent[new_index - 1]:\n break\n else:\n removed_cli.append(line + '\\n-' + rep.t1 + '\\n')\n\n return (added_cli, removed_cli)\n", "id": "6867650", "language": "Python", "matching_score": 2.326526403427124, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/pipeline/cliverify.py" }, { "content": "import os\nimport time\nfrom datetime import datetime\nfrom .yangexec import run_netconf\nfrom .cliverify import CliVerify\n\n\ndef run_cli(cls, action, data, testbed):\n cliv = CliVerify(action, testbed, data, cls.log)\n return cliv.run_cli()\n\n\ndef run_yang(cls, action, data, testbed):\n protocol = action.get('protocol')\n if protocol == 'netconf':\n return run_netconf(action, data, testbed, cls.log)\n return True\n\n\ndef run_sleep(cls, action, data, testbed):\n time.sleep(action.get('time', 0))\n return True\n\n\ndef run_repeat(cls, action, data, testbed):\n cls.log.info('{0}'.format(action))\n return True\n\n\ndef run_empty(cls, action={}, data={}, testbed={}):\n cls.log.error('NOT IMPLEMENTED: {0}\\n{1}'.format(\n action.get('action', 'missing'),\n action\n )\n )\n return True\n\n\ndef run_timestamp(cls, action, data, testbed):\n graph = action.get('storage', '')\n precision = action.get('precision', 0)\n category = action.get('category', '')\n n = datetime.now()\n cls.log.debug('TIMESTAMP: DATE: {0} TIME: {1}'.format(\n n.strftime(\"%Y-%m-%d\"),\n n.strftime(\"%H:%M:%S.%f\")\n ))\n return True\n\n\nactiondict = {\n 'cli': run_cli,\n 'yang': run_yang,\n 'sleep': run_sleep,\n 'repeat': run_repeat,\n 'timestamp': run_timestamp,\n 'empty': run_empty\n}\n\n\nclass ActionMeta(type):\n\n @classmethod\n def __prepare__(metacls, name, bases):\n return actiondict\n\n def __new__(cls, name, bases, actiondict):\n return type.__new__(cls, name, bases, actiondict)\n", "id": "72917", "language": "Python", "matching_score": 1.4543960094451904, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/pipeline/actions.py" }, { "content": "import sys\nimport argparse\nimport time\nimport logging\nimport yaml\nimport yamlordereddictloader\nfrom jinja2 import Template\nfrom ats import aetest\nfrom ats.log.utils import banner\nfrom ats.utils.objects import find, R\nfrom genie.utils.loadattr import str_to_list\nfrom genie.harness.base import Trigger\nfrom genie.libs.sdk.triggers.blitz.blitz import Blitz\n\nfrom .actions import ActionMeta\n\n\nlog = logging.getLogger(__name__)\n# sys.argv - pass in --option to to pyats run. This will show up ?\n\n\nclass TestSpec(Trigger):\n \"\"\"Model Pipeline Test Specification.\"\"\"\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n tb_dict = self.parameters['testbed']._to_dict()\n self.variables = tb_dict.get('custom', {}).get('variables', {})\n self.data = {}\n self.actions = []\n self.action_runner = ActionRunner()\n \n @property\n def data(self):\n return self._data\n\n @data.setter\n def data(self, param_data):\n self.variables.update(self.parameters.get('variables', {}))\n if not param_data:\n self._data = {}\n else:\n data_str = yaml.dump(\n param_data,\n Dumper=yamlordereddictloader.Dumper\n )\n data_str = self._substitute_variables(data_str)\n self._data = yaml.load(\n data_str,\n Loader=yamlordereddictloader.Loader\n )\n\n @property\n def actions(self):\n return self._actions\n\n @actions.setter\n def actions(self, param_actions):\n if not param_actions:\n self._actions = []\n else:\n actions_str = yaml.dump(\n param_actions,\n Dumper=yamlordereddictloader.Dumper\n )\n actions_str = self._substitute_variables(actions_str)\n self._actions = yaml.load(\n actions_str,\n Loader=yamlordereddictloader.Loader\n )\n\n def _substitute_variables(self, data_str):\n \"\"\"Replace variables with valid values.\"\"\"\n try:\n # default is {{ myvaraible }}\n tpl = Template(data_str)\n data_str = tpl.render(self.variables)\n # for xpath, variable is _- myvariable -_\n tpl2 = Template(data_str,\n variable_start_string=\"_-\",\n variable_end_string=\"-_\")\n data_str = tpl2.render(self.variables)\n except TypeError:\n pass\n return data_str\n\n def _step_test(self, step, testbed):\n if not self.data:\n self.data = self.parameters.get('data', {})\n if not self.actions:\n self.actions = self.parameters.get('test_actions', {})\n\n for action in self.actions:\n name = 'RUN ' + action.get('action', 'unknown')\n with step.start(name.upper()) as test_step:\n self.action_runner.run_banner(action)\n self.action_runner.run_log(action)\n if not self.action_runner.run(action, self.data, testbed):\n test_step.failed()\n\n @aetest.test\n def run_pipeline_test(self, testbed, steps, suites={}):\n \"\"\"Run test actions defined in Model Pipeline tests.\"\"\"\n # argparse here to catch sys.argv\n self._step_test(steps, testbed)\n\n\nclass ActionRunner(metaclass=ActionMeta):\n\n def __init__(self):\n self.log = logging.getLogger(__name__)\n self.log.setLevel(logging.DEBUG)\n\n def run(self, action={'action': 'empty'}, params={}, testbed={}):\n name = action.get('action', 'empty')\n if not hasattr(self, name):\n name = 'empty'\n return getattr(self, name)(action, params, testbed)\n\n def run_banner(self, action):\n if 'banner' in action:\n self.log.debug(banner(action['banner']))\n\n def run_log(self, action):\n if 'log' in action:\n self.log.debug(action['log'])\n", "id": "8456312", "language": "Python", "matching_score": 2.1036674976348877, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/pipeline/testspec.py" }, { "content": "from .testspec import TestSpec\n\n__all__ = (\n 'TestSpec'\n)", "id": "4106296", "language": "Python", "matching_score": 0.1644422709941864, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/pipeline/__init__.py" }, { "content": "\"\"\"PasswordType type implementation\n\"\"\"\n\n__all__ = (\n 'PasswordType',\n )\n\nfrom enum import Enum\n\n\nclass PasswordType(Enum):\n clear = 'clear'\n encrypted = proprietary = 'encrypted'\n md5 = 'md5'\n\n", "id": "6314792", "language": "Python", "matching_score": 0.8509811162948608, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/base/password_type.py" }, { "content": "''' triggerClear template'''\n\n# import ats\nfrom ats import aetest\n\n# import genie infra\nfrom genie.harness.base import Trigger\n\n\nclass TriggerClear(Trigger):\n ''' Template for all Clear triggers\n\n TriggerClear will follow the following steps:\n 1. Take pre snapshot for ops before clear action.\n 2. Execute clear command.\n 3. Verify the clear command to see if it works.\n '''\n\n @aetest.test\n def verify_prerequisite(self):\n raise NotImplementedError\n\n @aetest.test\n def clear(self):\n raise NotImplementedError\n\n @aetest.test\n def verify_clear(self):\n raise NotImplementedError\n", "id": "2274314", "language": "Python", "matching_score": 2.801140785217285, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/template/clear.py" }, { "content": "''' triggerDisableEnable template'''\n\n# import ats\nfrom ats import aetest\n\n# import genie infra\nfrom genie.harness.base import Trigger\n\n\nclass TriggerDisableEnable(Trigger):\n ''' Template for all DisableEnable triggers\n\n triggerDisableEnable will follow the following steps:\n\n 1. Extract the information from ops object.\n\n 2. Store configurations via checkpoint or tftp file.\n\n 3. Disable feature via cli \"no feature <feature>\".\n\n 4. Verify if the feature disabled by \"show feature\".\n\n 5. Enable feature via checkpoint or tftp file.\n\n 6. Verify if the feature enabled by \"show feature\" and\n compare ops to original ops before the action.\n\n '''\n\n @aetest.test\n def verify_prerequisite(self):\n raise NotImplementedError\n\n @aetest.test\n def save_configuration(self):\n raise NotImplementedError\n\n @aetest.test\n def disable(self):\n raise NotImplementedError\n\n @aetest.test\n def verify_disable(self):\n raise NotImplementedError\n\n @aetest.test\n def restore_configuration(self):\n raise NotImplementedError\n\n @aetest.test\n def verify_initial_state(self):\n raise NotImplementedError\n\n\nclass TriggerDisableEnableReqHandler(Trigger):\n ''' Template for all DisableEnable triggers\n\n triggerDisableEnable will follow the following steps:\n\n 1. Extract the information from ops object.\n\n 2. Store configurations via checkpoint or tftp file.\n\n 3. Handle the configuration dependencies before disabling \n the feature if needed. Ex. the portchannel interfaces\n should be removed when disabling lacp feature.\n\n 4. Disable feature via cli \"no feature <feature>\".\n\n 5. Verify if the feature disabled by \"show feature\".\n\n 6. Handle the configurations dependencies before enabling\n the feature if needed. Ex. Adding dynamic vlans back.\n\n 7. Enable feature via checkpoint or tftp file.\n\n 8. Verify if the feature enabled by \"show feature\" and\n compare ops to original ops before the action.\n\n '''\n\n @aetest.test\n def verify_prerequisite(self):\n raise NotImplementedError\n\n @aetest.test\n def save_configuration(self):\n raise NotImplementedError\n\n @aetest.test\n def pre_handle_dependency(self):\n raise NotImplementedError\n\n @aetest.test\n def disable(self):\n raise NotImplementedError\n\n @aetest.test\n def verify_disable(self):\n raise NotImplementedError\n\n @aetest.test\n def post_handle_dependency(self):\n raise NotImplementedError\n\n @aetest.test\n def restore_configuration(self):\n raise NotImplementedError\n\n @aetest.test\n def verify_initial_state(self):\n raise NotImplementedError\n", "id": "3154342", "language": "Python", "matching_score": 2.407987356185913, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/template/disableenable.py" }, { "content": "'''HA triggers template'''\n\nfrom ats import aetest\nfrom genie.harness.base import Trigger\n\nclass TriggerSwitchover(Trigger):\n ''' Template for all Switchover triggers\n\n A Switchover trigger is defined in 4 main steps:\n\n 1. Verify if we can do switchover on the device,\n get RP information.\n 2. Do switchover\n 3. Verify the active and standby RP are switched\n 4. Update the global/local verifications, and PTS if\n the features are enabled.\n '''\n @aetest.setup\n def verify_prerequisite(self, uut, abstract, steps, timeout):\n raise NotImplementedError\n\n @aetest.test\n def switchover(self, uut, abstract, steps):\n raise NotImplementedError\n\n @aetest.test\n def verify_switchover(self, uut, abstract, steps):\n raise NotImplementedError\n\n @aetest.test\n def update_platform(self, uut, abstract, steps):\n raise NotImplementedError\n\n\nclass TriggerReload(Trigger):\n ''' Template for all Reload triggers\n\n A Switchover Reload is defined in 4 main steps:\n\n 1. Verify if we can do reload on the device,\n get RP information.\n 2. Do reload\n 3. Verify the device status is back ready.\n 4. Update the global/local verifications, and PTS if\n the features are enabled.\n '''\n @aetest.setup\n def verify_prerequisite(self, uut, abstract, steps, timeout):\n raise NotImplementedError\n\n @aetest.test\n def reload(self, uut, abstract, steps):\n raise NotImplementedError\n\n @aetest.test\n def verify_reload(self, uut, abstract, steps):\n raise NotImplementedError\n\n @aetest.test\n def update_platform(self, uut, abstract, steps):\n raise NotImplementedError\n\n\nclass TriggerIssu(Trigger):\n ''' Template for all ISSU triggers\n\n An ISSU trigger is defined in 4 main steps:\n\n 1. Verify if we can perform ISSU on the device, get RP information.\n 2. Perform ISSU on standby RP\n 3. Verify the image has been changed on both active/standby RPs\n 4. Update the global/local verifications, and PTS if\n the features are enabled.\n '''\n\n @aetest.setup\n def verify_prerequisite(self, uut, abstract, steps, timeout):\n raise NotImplementedError\n\n @aetest.setup\n def prepare_issu(self, uut, abstract):\n raise NotImplementedError\n\n @aetest.test\n def perform_issu(self, uut, abstract, steps):\n raise NotImplementedError\n\n @aetest.test\n def verify_issu(self, uut, abstract, steps):\n raise NotImplementedError\n\n @aetest.test\n def update_platform(self, uut, abstract, steps):\n raise NotImplementedError\n", "id": "10174221", "language": "Python", "matching_score": 2.778808355331421, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/template/ha.py" }, { "content": "'''TriggerCheckCommands template'''\n\nfrom ats import aetest\nfrom genie.harness.base import Trigger\n\n\nclass TriggerCheckCommands(Trigger):\n ''' Template for all TriggerCheckCommands triggers\n '''\n\n @aetest.setup\n def stack_show_switch(self):\n raise NotImplementedError\n\n @aetest.test\n def verify_show_inventory(self):\n raise NotImplementedError\n \n @aetest.test\n def verify_show_version(self):\n raise NotImplementedError\n\n @aetest.test\n def stack_ha_redundancy_state(self):\n raise NotImplementedError\n \n @aetest.test\n def verify_show_module(self):\n raise NotImplementedError\n \n @aetest.test\n def verify_show_environment(self):\n raise NotImplementedError \n \n @aetest.test\n def verify_show_platform(self):\n raise NotImplementedError\n\n @aetest.test\n def verify_show_power_inline(self):\n raise NotImplementedError\n\n", "id": "6434038", "language": "Python", "matching_score": 2.854020595550537, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/template/checkcommands.py" }, { "content": "'''Common implementation for checkcommands triggers'''\n\n# import genie.libs\nfrom genie.libs.sdk.triggers.template.checkcommands import \\\n TriggerCheckCommands as CheckCommandsTemplate\n\n\nclass TriggerCheckCommands(CheckCommandsTemplate):\n '''Trigger class for check show commands action'''\n pass", "id": "1661180", "language": "Python", "matching_score": 0.7116069793701172, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/xe_sanity/checkcommands/checkcommands.py" }, { "content": "''' implementation for Vlan shut/noshut triggers'''\n\n# import python\nimport time\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.shutnoshut.shutnoshut import \\\n TriggerShutNoShut\n\n", "id": "47943", "language": "Python", "matching_score": 0.5940656065940857, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/shutnoshut/vlan/shutnoshut.py" }, { "content": "'''Implementation for TRM unconfigconfig triggers'''\n", "id": "8677964", "language": "Python", "matching_score": 0.8991696238517761, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/unconfigconfig/trm/unconfigconfig.py" }, { "content": "'''Implementation for vxlan addremove triggers'''\n\n", "id": "10027297", "language": "Python", "matching_score": 1, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/addremove/trm/addremove.py" }, { "content": "from .vxlan import *\n", "id": "2032157", "language": "Python", "matching_score": 0, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/vxlan/__init__.py" }, { "content": "\"\"\"Utility functions for Model Pipeline Testing.\"\"\"\n\n\nclass DataRetriever:\n\n def _get_data(self, data_content, content_type, source, data):\n if content_type in ['string', 'opfields']:\n return data_content.get(source)\n elif content_type == 'xpath':\n namespace = data_content.get('namespace')\n if isinstance(namespace, dict):\n # Already retrieved this from data reference\n return data_content\n else:\n # Get the reference namespace from data\n namespace = data.get(namespace)\n data_content['namespace'] = namespace.get('content')\n return data_content\n elif content_type == 'reference':\n return self.get_reference_data(\n data_content,\n data_content.get(source),\n source\n )\n elif content_type == 'file':\n return self.get_file_data(\n data_content.get('filename'),\n source\n )\n\n @classmethod\n def get_content_data(cls, action, data):\n content_idx = action.get('content', {})\n if not content_idx:\n return []\n content = data.get(content_idx)\n if not content:\n # no expected content data\n return content\n if isinstance(content, dict):\n content_type = content.get('type', 'string')\n else:\n return content\n return cls._get_data(cls, content, content_type, 'content', data)\n\n @classmethod\n def get_returns_data(cls, action, data):\n content_idx = action.get('returns', {})\n if not content_idx:\n return []\n content = data.get(content_idx)\n if not content:\n # no expected return data\n return content\n if isinstance(content, dict):\n content_type = content.get('type', 'string')\n else:\n return content\n return cls._get_data(cls, content, content_type, 'returns', data)\n\n @classmethod\n def get_reference_data(cls, ref, data, source):\n ref_data = data.get(ref)\n ref_type = ref_data.get('type')\n if ref_type == 'reference':\n return cls.get_reference_data(cls, ref_data.get(source), source)\n else:\n return cls._get_data(cls, ref_data, ref_type, source)\n\n @classmethod\n def get_file_data(cls, filename, source):\n # TODO: add this handling\n return ''\n\n @classmethod\n def get_data(cls, action, data):\n return (\n cls.get_content_data(action, data),\n cls.get_returns_data(action, data)\n )\n", "id": "5074495", "language": "Python", "matching_score": 0.8514403700828552, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/pipeline/utility.py" }, { "content": "import os\nimport re\nimport json\nimport logging\nfrom six import string_types\nfrom cisco_gnmi.client import proto\n\nlog = logging.getLogger(__name__)\n\n\ndef get_prefix(origin):\n # TODO: calculate a prefix instead of combining config?\n if origin == 'openconfig':\n # No prefix support for openconfig\n return None\n prefix_path = proto.gnmi_pb2.Path()\n return prefix_path\n\n\ndef combine_configs(payload, last_xpath, cfg):\n \"\"\"Walking from end to finish, 2 xpaths merge, so combine them.\n |--config\n |---last xpath config--|\n ----| |--config\n |\n | pick these up --> |--config\n |---this xpath config--|\n |--config\n Parameters\n ----------\n payload: dict of partial payload\n last_xpath: last xpath that was processed\n xpath: colliding xpath\n config: dict of values associated to colliding xpath\n \"\"\"\n xpath, config, is_key = cfg\n lp = last_xpath.split(\"/\")\n xp = xpath.split(\"/\")\n base = []\n top = \"\"\n for i, seg in enumerate(zip(lp, xp)):\n if seg[0] != seg[1]:\n top = seg[1]\n break\n base = \"/\" + \"/\".join(xp[i:])\n cfg = (base, config, False)\n extended_payload = {top: xpath_to_json([cfg])}\n payload.update(extended_payload)\n return payload\n\n\ndef xpath_to_json(configs, last_xpath=\"\", payload={}):\n \"\"\"Try to combine Xpaths/values into a common payload (recursive).\n\n Parameters\n ----------\n configs: tuple of xpath/value dict\n last_xpath: str of last xpath that was recusivly processed.\n payload: dict being recursively built for JSON transformation.\n\n Returns\n -------\n dict of combined xpath/value dict.\n \"\"\"\n for i, cfg in enumerate(configs, 1):\n xpath, config, is_key = cfg\n if last_xpath and xpath not in last_xpath:\n # Branched config here |---config\n # |---last xpath config--|\n # --| |---config\n # |---this xpath config\n payload = combine_configs(payload, last_xpath, cfg)\n return xpath_to_json(configs[i:], xpath, payload)\n xpath_segs = xpath.split(\"/\")\n xpath_segs.reverse()\n for seg in xpath_segs:\n if not seg:\n continue\n if payload:\n if is_key:\n if seg in payload:\n if isinstance(payload[seg], list):\n payload[seg].append(config)\n elif isinstance(payload[seg], dict):\n payload[seg].update(config)\n else:\n payload.update(config)\n payload = {seg: [payload]}\n else:\n config.update(payload)\n payload = {seg: config}\n return xpath_to_json(configs[i:], xpath, payload)\n else:\n if is_key:\n payload = {seg: [config]}\n else:\n payload = {seg: config}\n return xpath_to_json(configs[i:], xpath, payload)\n return payload\n\n\n# Pattern to detect keys in an xpath\nRE_FIND_KEYS = re.compile(r\"\\[.*?\\]\")\n\n\ndef get_payload(configs):\n \"\"\"Common Xpaths were detected so try to consolidate them.\n\n Parameter\n ---------\n configs: list of {xpath: {name: value}} dicts\n \"\"\"\n # Number of updates are limited so try to consolidate into lists.\n xpaths_cfg = []\n first_key = set()\n # Find first common keys for all xpaths_cfg of collection.\n for config in configs:\n xpath = next(iter(config.keys()))\n\n # Change configs to tuples (xpath, config) for easier management\n xpaths_cfg.append((xpath, config[xpath]))\n\n xpath_split = xpath.split(\"/\")\n for seg in xpath_split:\n if \"[\" in seg:\n first_key.add(seg)\n break\n\n # Common first key/configs represents one GNMI update\n updates = []\n for key in first_key:\n update = []\n remove_cfg = []\n for config in xpaths_cfg:\n xpath, cfg = config\n if key in xpath:\n update.append(config)\n else:\n for k, v in cfg.items():\n if '[{0}=\"{1}\"]'.format(k, v) not in key:\n break\n else:\n # This cfg sets the first key so we don't need it\n remove_cfg.append((xpath, cfg))\n if update:\n for upd in update:\n # Remove this config out of main list\n xpaths_cfg.remove(upd)\n for rem_cfg in remove_cfg:\n # Sets a key in update path so remove it\n xpaths_cfg.remove(rem_cfg)\n updates.append(update)\n break\n\n # Add remaining configs to updates\n if xpaths_cfg:\n updates.append(xpaths_cfg)\n\n # Combine all xpath configs of each update if possible\n xpaths = []\n compressed_updates = []\n for update in updates:\n xpath_consolidated = {}\n config_compressed = []\n for seg in update:\n xpath, config = seg\n if xpath in xpath_consolidated:\n xpath_consolidated[xpath].update(config)\n else:\n xpath_consolidated[xpath] = config\n config_compressed.append((xpath, xpath_consolidated[xpath]))\n xpaths.append(xpath)\n\n # Now get the update path for this batch of configs\n common_xpath = os.path.commonprefix(xpaths)\n cfg_compressed = []\n keys = []\n\n # Need to reverse the configs to build the dict correctly\n config_compressed.reverse()\n compressed_count = 0\n for seg in config_compressed:\n is_key = False\n prepend_path = \"\"\n xpath, config = seg\n end_path = xpath[len(common_xpath):]\n if not end_path:\n prepend_path = common_xpath\n elif end_path.startswith(\"[\"):\n # Don't start payload with a list\n tmp = common_xpath.split(\"/\")\n prepend_path = \"/\" + tmp.pop()\n common_xpath = \"/\".join(tmp)\n end_path = prepend_path + end_path\n\n # Building json, need to identify configs that set keys\n for key in keys:\n if [k for k in config.keys() if k in key]:\n is_key = True\n keys += re.findall(RE_FIND_KEYS, end_path)\n cfg_compressed.append((end_path, config, is_key))\n compressed_count += 1\n\n update = (common_xpath, cfg_compressed)\n compressed_updates.append(update)\n\n updates = []\n if compressed_count == 1:\n common_xpath, cfg = compressed_updates[0]\n xpath, payload, is_key = cfg[0]\n updates.append({xpath: payload})\n else:\n for update in compressed_updates:\n common_xpath, cfgs = update\n payload = xpath_to_json(cfgs)\n updates.append({common_xpath: payload})\n return updates\n\n\ndef xml_path_to_path_elem(request, prefix=False):\n \"\"\"Convert XML Path Language 1.0 Xpath to gNMI Path/PathElement.\n\n Modeled after YANG/NETCONF Xpaths.\n\n References:\n * https://www.w3.org/TR/1999/REC-xpath-19991116/#location-paths\n * https://www.w3.org/TR/1999/REC-xpath-19991116/#path-abbrev\n * https://tools.ietf.org/html/rfc6020#section-6.4\n * https://tools.ietf.org/html/rfc6020#section-9.13\n * https://tools.ietf.org/html/rfc6241\n\n Parameters\n ---------\n request: dict containing request namespace and nodes to be worked on.\n namespace: dict of <prefix>: <namespace>\n nodes: list of dict\n <xpath>: Xpath pointing to resource\n <value>: value to set resource to\n <edit-op>: equivelant NETCONF edit-config operation\n\n Returns\n -------\n tuple: namespace_modules, message dict, origin\n namespace_modules: dict of <prefix>: <module name>\n Needed for future support.\n message dict: 4 lists containing possible updates, replaces,\n deletes, or gets derived form input nodes.\n origin str: DME, device, or openconfig\n \"\"\"\n\n paths = []\n message = {\n \"update\": [],\n \"replace\": [],\n \"delete\": [],\n \"get\": [],\n }\n if \"nodes\" not in request:\n # TODO: raw rpc?\n return paths\n else:\n namespace_modules = {}\n origin = \"DME\"\n for prefix, nspace in request.get(\"namespace\", {}).items():\n if \"/Cisco-IOS-\" in nspace:\n module = nspace[nspace.rfind(\"/\") + 1 :]\n elif \"/cisco-nx\" in nspace: # NXOS lowercases namespace\n module = \"Cisco-NX-OS-device\"\n elif \"/openconfig.net\" in nspace:\n module = \"openconfig-\"\n module += nspace[nspace.rfind(\"/\") + 1 :]\n elif \"urn:ietf:params:xml:ns:yang:\" in nspace:\n module = nspace.replace(\"urn:ietf:params:xml:ns:yang:\", \"\")\n if module:\n namespace_modules[prefix] = module\n\n for node in request.get(\"nodes\", []):\n if \"xpath\" not in node:\n log.error(\"Xpath is not in message\")\n else:\n xpath = node[\"xpath\"]\n value = node.get(\"value\", \"\")\n edit_op = node.get(\"edit-op\", \"\")\n\n for pfx, mod in namespace_modules.items():\n if pfx not in xpath:\n continue\n if \"Cisco-IOS-\" in mod:\n origin = 'rfc7951'\n mod += \":\"\n elif 'openconfig' in mod:\n origin = 'openconfig'\n mod = ''\n elif 'Cisco-NX-OS' in mod:\n origin = 'device'\n mod = ''\n # Adjust prefixes of xpaths\n xpath = xpath.replace(pfx + \":\", mod)\n if isinstance(value, string_types):\n value = value.replace(pfx + \":\", mod)\n\n if edit_op:\n if edit_op in [\"create\", \"merge\", \"replace\"]:\n xpath_lst = xpath.split(\"/\")\n name = xpath_lst.pop()\n xpath = \"/\".join(xpath_lst)\n if edit_op == \"replace\":\n if not message[\"replace\"]:\n message[\"replace\"] = [{xpath: {name: value}}]\n else:\n message[\"replace\"].append({xpath: {name: value}})\n else:\n if not message[\"update\"]:\n message[\"update\"] = [{xpath: {name: value}}]\n else:\n message[\"update\"].append({xpath: {name: value}})\n elif edit_op in [\"delete\", \"remove\"]:\n if message[\"delete\"]:\n message[\"delete\"].add(xpath)\n else:\n message[\"delete\"] = set(xpath)\n else:\n message[\"get\"].append(xpath)\n return namespace_modules, message, origin\n\n\nif __name__ == \"__main__\":\n from pprint import pprint as pp\n\n request = {\n \"namespace\": {\"oc-acl\": \"http://openconfig.net/yang/acl\"},\n \"nodes\": [\n {\n \"value\": \"testacl\",\n \"xpath\": \"/oc-acl:acl/oc-acl:acl-sets/oc-acl:acl-set/name\",\n \"edit-op\": \"merge\",\n },\n {\n \"value\": \"ACL_IPV4\",\n \"xpath\": \"/oc-acl:acl/oc-acl:acl-sets/oc-acl:acl-set/type\",\n \"edit-op\": \"merge\",\n },\n {\n \"value\": \"10\",\n \"xpath\": '/oc-acl:acl/oc-acl:acl-sets/oc-acl:acl-set[name=\"testacl\"][type=\"ACL_IPV4\"]/oc-acl:acl-entries/oc-acl:acl-entry/oc-acl:sequence-id',\n \"edit-op\": \"merge\",\n },\n {\n \"value\": \"20.20.20.1/32\",\n \"xpath\": '/oc-acl:acl/oc-acl:acl-sets/oc-acl:acl-set[name=\"testacl\"][type=\"ACL_IPV4\"]/oc-acl:acl-entries/oc-acl:acl-entry[sequence-id=\"10\"]/oc-acl:ipv4/oc-acl:config/oc-acl:destination-address',\n \"edit-op\": \"merge\",\n },\n {\n \"value\": \"IP_TCP\",\n \"xpath\": '/oc-acl:acl/oc-acl:acl-sets/oc-acl:acl-set[name=\"testacl\"][type=\"ACL_IPV4\"]/oc-acl:acl-entries/oc-acl:acl-entry[sequence-id=\"10\"]/oc-acl:ipv4/oc-acl:config/oc-acl:protocol',\n \"edit-op\": \"merge\",\n },\n {\n \"value\": \"10.10.10.10/32\",\n \"xpath\": '/oc-acl:acl/oc-acl:acl-sets/oc-acl:acl-set[name=\"testacl\"][type=\"ACL_IPV4\"]/oc-acl:acl-entries/oc-acl:acl-entry[sequence-id=\"10\"]/oc-acl:ipv4/oc-acl:config/oc-acl:source-address',\n \"edit-op\": \"merge\",\n },\n {\n \"value\": \"DROP\",\n \"xpath\": '/oc-acl:acl/oc-acl:acl-sets/oc-acl:acl-set[name=\"testacl\"][type=\"ACL_IPV4\"]/oc-acl:acl-entries/oc-acl:acl-entry[sequence-id=\"10\"]/oc-acl:actions/oc-acl:config/oc-acl:forwarding-action',\n \"edit-op\": \"merge\",\n },\n ],\n }\n modules, message, origin = xml_path_to_path_elem(request)\n pp(modules)\n pp(message)\n pp(origin)\n \"\"\"\n # Expected output\n =================\n {'oc-acl': 'openconfig-acl'}\n {'delete': [],\n 'get': [],\n 'replace': [],\n 'update': [{'/acl/acl-sets/acl-set': {'name': 'testacl'}},\n {'/acl/acl-sets/acl-set': {'type': 'ACL_IPV4'}},\n {'/acl/acl-sets/acl-set[name=\"testacl\"][type=\"ACL_IPV4\"]/acl-entries/acl-entry': {'sequence-id': '10'}},\n {'/acl/acl-sets/acl-set[name=\"testacl\"][type=\"ACL_IPV4\"]/acl-entries/acl-entry[sequence-id=\"10\"]/ipv4/config': {'destination-address': '20.20.20.1/32'}},\n {'/acl/acl-sets/acl-set[name=\"testacl\"][type=\"ACL_IPV4\"]/acl-entries/acl-entry[sequence-id=\"10\"]/ipv4/config': {'protocol': 'IP_TCP'}},\n {'/acl/acl-sets/acl-set[name=\"testacl\"][type=\"ACL_IPV4\"]/acl-entries/acl-entry[sequence-id=\"10\"]/ipv4/config': {'source-address': '10.10.10.10/32'}},\n {'/acl/acl-sets/acl-set[name=\"testacl\"][type=\"ACL_IPV4\"]/acl-entries/acl-entry[sequence-id=\"10\"]/actions/config': {'forwarding-action': 'DROP'}}]}\n 'openconfig'\n \"\"\"\n", "id": "3154763", "language": "Python", "matching_score": 2.230100154876709, "max_stars_count": 15, "path": "connector/src/yang/connector/xpath_util.py" }, { "content": "import logging\nimport traceback\nfrom copy import deepcopy\nfrom jinja2 import Template\nfrom ncclient.operations import RaiseMode\nfrom ats.log.utils import banner\nfrom .rpcbuilder import YSNetconfRPCBuilder\nfrom .rpcverify import RpcVerify\nfrom .utility import DataRetriever\n\nlog = logging.getLogger(__name__)\nlog.setLevel(logging.DEBUG)\n\n\ndef insert_variables(text, variables):\n if not text or not variables:\n # no op\n return text\n tplt_standard = Template(text)\n # standard identifiers for template\n new_text = tplt_standard.render(variables)\n # replay generator uses special identifiers in template\n tplt_special = Template(new_text,\n variable_start_string='_-',\n variable_end_string='-_')\n\n return tplt_special.render(variables)\n\n\ndef try_lock(uut, target, timer=30, sleeptime=1):\n \"\"\"Tries to lock the datastore to perform edit-config operation.\n\n Attempts to acquire the lock on the datastore. If exception thrown,\n retries the lock on the datastore till the specified timer expires.\n\n Helper function to :func:`lock_datastore`.\n\n Args:\n session (NetconfSession): active session\n target (str): Datastore to be locked\n timer: lock retry counter.\n sleeptime: sleep timer.\n\n Returns:\n bool: True if datastore was successfully locked, else False.\n \"\"\"\n lock_retry_errors = ['lock-denied', 'resource-denied',\n 'in-use', 'operation-failed']\n for counter in range(1, timer+1):\n ret = uut.nc.lock(target=target)\n if ret.ok:\n return True\n retry = False\n if ret.error.tag in lock_retry_errors:\n retry = True\n if not retry:\n log.error('ERROR - CANNOT ACQUIRE LOCK - {0}'.format(\n ret.error.tag))\n break\n elif counter < timer:\n log.info(\"RETRYING LOCK - {0}\".format(counter))\n sleep(sleeptime)\n else:\n log.error('ERROR - LOCKING FAILED. RETRY TIMER EXCEEDED!!!')\n return False\n\n\ndef netconf_send(uut, rpcs, lock=True, lock_retry=40, timeout=30):\n \"\"\"Handle NETCONF messaging with exceptions caught by pyATS.\"\"\"\n if not uut.nc.connected:\n uut.nc.connect()\n\n result = []\n\n for nc_op, kwargs in rpcs:\n\n try:\n ret = ''\n\n if nc_op == 'edit-config':\n if lock:\n try_lock(uut, kwargs['target'], timer=lock_retry)\n\n ret = uut.nc.edit_config(**kwargs)\n if ret.ok and kwargs.get('target', '') == 'candidate':\n ret = uut.nc.commit()\n if lock:\n uut.nc.unlock(target=kwargs['target'])\n\n elif nc_op == 'commit':\n ret = uut.nc.commit()\n\n elif nc_op == 'get-config':\n ret = uut.nc.get_config(**kwargs)\n\n elif nc_op == 'get':\n ret = uut.nc.get(**kwargs)\n\n elif nc_op == 'rpc':\n target = 'running'\n if 'edit-config' in rpcs and lock:\n if 'candidate/>' in rpcs:\n target = 'candidate'\n try_lock(uut, target, timer=lock_retry)\n\n # raw return\n reply = uut.nc.request(rpcs)\n\n if 'edit-config' in rpcs and lock:\n uut.nc.unlock(target)\n return reply\n\n if ret.ok:\n result.append((nc_op, str(ret)))\n\n else:\n log.error(\"NETCONF Reply with error(s):\")\n\n for rpcerror in ret.errors:\n if rpcerror.message:\n log.error(\"ERROR MESSAGE - {0}\".format(\n rpcerror.message))\n\n if hasattr(ret, 'xml') and ret.xml is not None:\n result.append((nc_op, ret.xml))\n except Exception:\n if lock:\n uut.nc.unlock(target=kwargs['target'])\n log.error(traceback.format_exc())\n result.append(('traceback', ''))\n continue\n\n return result\n\n\ndef gen_ncclient_rpc(rpc_data, prefix_type=\"minimal\"):\n \"\"\"Construct the XML Element(s) needed for the given config dict.\n\n Helper function to :func:`gen_rpc_api`.\n\n Creates lxml Element instances specific to what :mod:`ncclient` is looking\n for per netconf protocol operation.\n\n .. note::\n Unlike :func:`gen_raw_rpc`, the XML generated here will NOT be declared\n to the netconf 1.0 namespace but instead any NETCONF XML elements\n will be left un-namespaced.\n\n This is so that :mod:`ncclient` can select the appropriate\n namespace (1.0, 1.1, etc.) as needed for the session.\n\n Args:\n cfgd (dict): Relevant keys - 'proto-op', 'dsstore', 'modules'.\n prefix_namespaces (str): One of \"always\" (prefer namespace prefixes) or\n \"minimal\" (prefer unprefixed namespaces)\n\n Returns:\n list: of lists [protocol operation, kwargs], or None\n\n Raises:\n ysnetconf.RpcInputError: if cfgd is invalid;\n see :meth:`YSNetconfRPCBuilder.get_payload`.\n \"\"\"\n if not rpc_data:\n log.warning(\"No configuration sent for RPC generation\")\n return None\n\n datastore = rpc_data.get('datastore')\n prt_op = rpc_data['operation']\n with_defaults = rpc_data.get('with-defaults', '')\n\n # Add prefixes for all NETCONF containers\n rpcbuilder = YSNetconfRPCBuilder(prefix_namespaces=\"always\")\n\n container = None\n\n if prt_op == 'edit-config':\n container = rpcbuilder.netconf_element('config')\n elif prt_op == 'get-config':\n container = rpcbuilder.netconf_element('filter')\n elif prt_op == 'get':\n container = rpcbuilder.netconf_element('filter')\n elif prt_op == 'action':\n container = rpcbuilder.yang_element('action')\n else:\n container = rpcbuilder.netconf_element('TEMPORARY')\n\n # Now create the builder for the payload\n rpcbuilder = YSNetconfRPCBuilder(\n prefix_namespaces=prefix_type,\n nsmap=rpc_data.get('namespace', {}),\n netconf_ns=None\n )\n # XML so all the values must be string or bytes type\n nodes = []\n for node in rpc_data['nodes']:\n if 'value' in node:\n node['value'] = str(node['value'])\n nodes.append(node)\n\n rpcbuilder.get_payload(nodes, container)\n\n kwargs = {}\n if prt_op == \"rpc\":\n # The outer container is temporary - the child element(s) created\n # should be the actual raw RPC(s), which is what we want to return\n return [[prt_op, {'rpc_command': elem}] for elem in container]\n\n if prt_op == 'edit-config':\n kwargs['target'] = datastore\n if len(container):\n kwargs['config'] = container\n elif prt_op == 'get-config':\n kwargs['source'] = datastore\n if len(container):\n kwargs['filter'] = container\n if with_defaults:\n kwargs['with_defaults'] = with_defaults\n elif prt_op == 'get':\n if len(container):\n kwargs['filter'] = container\n if with_defaults:\n kwargs['with_defaults'] = with_defaults\n elif prt_op == 'action':\n kwargs['rpc_command'] = container\n\n return prt_op, kwargs\n\n\ndef run_netconf(action, data, testbed, logger):\n \"\"\"Form NETCONF message and send to testbed.\"\"\"\n uut = testbed.devices[action.get('device', 'uut')]\n # check if connected\n if not hasattr(uut, 'nc'):\n uut.connect(alias='nc', via='netconf')\n uut.nc.raise_mode = RaiseMode.NONE\n elif hasattr(uut, 'nc') and not uut.nc.connected:\n uut.nc.connect()\n rpc_verify = RpcVerify(\n log=logger,\n capabilities=list(uut.nc.server_capabilities)\n )\n rpc_data, opfields = DataRetriever.get_data(action, data)\n if not rpc_data:\n logger.error('NETCONF message data index not present')\n return False\n ds = action.get('datastore', '')\n if not ds:\n if len(rpc_verify.datastore) > 1:\n log.info('Choosing {0} datastore'.format(rpc_verify.datastore[0]))\n ds = rpc_verify.datastore[0]\n elif len(rpc_verify.datastore) == 1:\n ds = rpc_verify.datastore[0]\n log.info('Default datastore: {0}'.format(ds))\n else:\n log.warning('No datastore in device capabilities; using \"running\"')\n ds = 'running'\n\n rpc_data['datastore'] = ds\n rpc_data['operation'] = action['operation']\n # TODO: add custom rpc support?\n prt_op, kwargs = gen_ncclient_rpc(rpc_data)\n\n result = netconf_send(uut, [(prt_op, kwargs)])\n\n # rpc-reply should show up in NETCONF log\n if not result:\n log.error(banner('NETCONF rpc-reply NOT RECIEVED'))\n return False\n\n errors = [(op, res) for op, res in result if '<rpc-error>' in res]\n\n if errors:\n log.error(banner('NETCONF MESSAGE ERRORED'))\n return False\n\n if rpc_data['operation'] == 'edit-config':\n # Verify the get-config TODO: what do we do with custom rpc's?\n rpc_clone = deepcopy(rpc_data)\n rpc_clone['operation'] = 'get-config'\n rpc_clone['datastore'] = 'running'\n for node in rpc_clone.get('nodes'):\n if 'value' in node:\n node.pop('value')\n if 'edit-op' in node:\n node.pop('edit-op')\n prt_op, kwargs = gen_ncclient_rpc(rpc_clone)\n resp_xml = netconf_send(uut, [(prt_op, kwargs)])\n resp_elements = rpc_verify.process_rpc_reply(resp_xml)\n return rpc_verify.verify_rpc_data_reply(resp_elements, rpc_data)\n elif rpc_data['operation'] == 'get':\n if not opfields:\n log.error(banner('No NETCONF data to compare rpc-reply to.'))\n return False\n resp_elements = rpc_verify.process_rpc_reply(resp_xml)\n return rpc_verify.process_operational_state(resp_elements, opfields)\n\n return True\n", "id": "9428617", "language": "Python", "matching_score": 3.8016200065612793, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/pipeline/yangexec.py" }, { "content": "\"\"\"netconf.py module is a wrapper around the ncclient package.\"\"\"\n\nimport re\nimport atexit\nimport logging\nimport subprocess\nimport datetime\nimport lxml.etree as et\nfrom time import sleep\nfrom threading import Thread, Event\nfrom ncclient import manager\nfrom ncclient import operations\nfrom ncclient import transport\nfrom ncclient.operations.retrieve import GetReply\nfrom ncclient.devices.default import DefaultDeviceHandler\nfrom ncclient.operations.errors import TimeoutExpiredError\n\ntry:\n from pyats.connections import BaseConnection\n from pyats.utils.secret_strings import to_plaintext\n from pyats.log.utils import banner\nexcept ImportError:\n class BaseConnection:\n pass\n\n# create a logger for this module\nlogger = logging.getLogger(__name__)\n\nnccl = logging.getLogger(\"ncclient\")\n# The 'Sending' messages are logged at level INFO.\n# The 'Received' messages are logged at level DEBUG.\n\n\nclass NetconfSessionLogHandler(logging.Handler):\n \"\"\"Logging handler that pretty prints ncclient XML.\"\"\"\n\n parser = et.XMLParser(recover=True)\n\n def emit(self, record):\n if hasattr(record, 'session'):\n try:\n # If the message contains XML, pretty-print it\n record.args = list(record.args)\n\n for i in range(len(record.args)):\n try:\n arg = None\n if isinstance(record.args[i], str):\n arg = record.args[i].encode(\"utf-8\")\n elif isinstance(record.args[i], bytes):\n arg = record.args[i]\n if not arg:\n continue\n start = arg.find(b\"<\")\n end = arg.rfind(b\"]]>]]>\") # NETCONF 1.0 terminator\n if end == -1:\n end = arg.rfind(b\">\")\n if end != -1:\n # Include the '>' character in our range\n end += 1\n if start != -1 and end != -1:\n elem = et.fromstring(arg[start:end], self.parser)\n if elem is None:\n continue\n\n text = et.tostring(elem, pretty_print=True,\n encoding=\"utf-8\")\n record.args[i] = (arg[:start] +\n text +\n arg[end:]).decode()\n except Exception:\n # Pretty print issue so leave record unchanged\n continue\n\n record.args = tuple(record.args)\n except Exception:\n # Unable to handle record so leave it unchanged\n pass\n\n\nnccl.addHandler(NetconfSessionLogHandler())\n\n\nclass Netconf(manager.Manager, BaseConnection):\n '''Netconf\n\n Implementation of NetConf connection to devices (NX-OS, IOS-XR or IOS-XE),\n based on pyATS BaseConnection and ncclient.\n\n YAML Example::\n\n devices:\n asr22:\n type: 'ASR'\n tacacs:\n login_prompt: \"login:\"\n password_prompt: \"Password:\"\n username: \"admin\"\n passwords:\n tacacs: admin\n enable: admin\n line: admin\n connections:\n a:\n protocol: telnet\n ip: \"1.2.3.4\"\n port: 2004\n vty:\n protocol : telnet\n ip : \"2.3.4.5\"\n netconf:\n class: yang.connector.Netconf\n ip : \"2.3.4.5\"\n port: 830\n username: admin\n password: <PASSWORD>\n\n Code Example::\n\n >>> from pyats.topology import loader\n >>> testbed = loader.load('/users/xxx/xxx/asr22.yaml')\n >>> device = testbed.devices['asr22']\n >>> device.connect(alias='nc', via='netconf')\n >>> device.nc.connected\n True\n >>> netconf_request = \"\"\"\n ... <rpc message-id=\"101\"\n ... xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\n ... <get>\n ... <filter>\n ... <native xmlns=\"http://cisco.com/ns/yang/ned/ios\">\n ... <version>\n ... </version>\n ... </native>\n ... </filter>\n ... </get>\n ... </rpc>\n ... \"\"\"\n >>> reply = device.nc.request(netconf_request)\n >>> print(reply)\n <?xml version=\"1.0\" encoding=\"UTF-8\"?>\n <rpc-reply xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\"\n message-id=\"101\"><data>\n <native xmlns=\"http://cisco.com/ns/yang/ned/ios\">\n <version>16.3</version></native></data></rpc-reply>\n >>> device.nc.disconnect()\n >>> device.nc.connected\n False\n >>>\n\n Attributes\n ----------\n timeout : `int`\n Timeout value in seconds which is used by paramiko channel. By\n default this value is 30 seconds.\n\n client_capabilities : `object`\n Object ncclient.capabilities.Capabilities representing the client's\n capabilities.\n\n server_capabilities : `object`\n Object ncclient.capabilities.Capabilities representing the server's\n capabilities, and it has a list of data models the server supports.\n\n async_mode : `boolean`\n Specify whether operations are executed asynchronously (True) or\n synchronously (False). The default value is False.\n '''\n\n def __init__(self, *args, **kwargs):\n '''\n __init__ instantiates a single connection instance.\n '''\n # set defaults\n kwargs.setdefault('timeout', 30)\n\n # instanciate BaseConnection\n # (could use super...)\n BaseConnection.__init__(self, *args, **kwargs)\n\n # shortwire Ncclient device handling portion\n # and create just the DeviceHandler\n device_handler = DefaultDeviceHandler()\n\n # create the session instance\n session = transport.SSHSession(device_handler)\n\n # load known_hosts file (if available)\n if kwargs.get('hostkey_verify'):\n session.load_known_hosts()\n\n # instanciate ncclient Manager\n # (can't use super due to mro change)\n manager.Manager.__init__(\n self, session=session, device_handler=device_handler,\n timeout=self.timeout)\n\n self.active_notifications = {}\n\n @property\n def session(self):\n '''session\n\n High-level api: return the SSH session object.\n\n Returns\n -------\n\n object\n The SSH session that was created by ncclient.transport.SSHSession.\n '''\n\n return self._session\n\n def connect(self):\n '''connect\n\n High-level api: opens the NetConf connection and exchanges\n capabilities. Since topology YAML file is parsed by BaseConnection,\n the following parameters can be specified in your YAML file.\n\n Parameters\n ----------\n\n host : `string`\n Hostname or IP address to connect to.\n port : `int`, optional\n By default port is 830, but some devices use the default SSH port\n of 22 so this may need to be specified.\n timeout : `int`, optional\n An optional keyed argument to set timeout value in seconds. By\n default this value is 30 seconds.\n username : `string`\n The username to use for SSH authentication.\n password : `string`\n The password used if using password authentication, or the\n passphrase to use for unlocking keys that require it.\n key_filename : `string`\n a filename where a the private key to be used can be found.\n allow_agent : `boolean`\n Enables querying SSH agent (if found) for keys. The default value\n is True.\n hostkey_verify : `boolean`\n Enables hostkey verification from ~/.ssh/known_hosts. The default\n value is False.\n look_for_keys : `boolean`\n Enables looking in the usual locations for ssh keys\n (e.g. ~/.ssh/id_*). The default value is True.\n ssh_config : `string`\n Enables parsing of an OpenSSH configuration file, if set to its\n path, e.g. ~/.ssh/config or to True. If the value is True,\n ncclient uses ~/.ssh/config. The default value is None.\n\n Raises\n ------\n\n Exception\n If the YAML file does not have correct connections section, or\n establishing transport to ip:port is failed, ssh authentication is\n failed, or other transport failures.\n\n Note\n ----\n\n There is no return from this method. If something goes wrong, an\n exception will be raised.\n\n\n YAML Example::\n\n devices:\n asr22:\n type: 'ASR'\n tacacs:\n login_prompt: \"login:\"\n password_prompt: \"Password:\"\n username: \"admin\"\n passwords:\n tacacs: admin\n enable: admin\n line: admin\n connections:\n a:\n protocol: telnet\n ip: \"172.16.17.32\"\n port: 2004\n vty:\n protocol : telnet\n ip : \"2.3.4.5\"\n netconf:\n class: yang.connector.Netconf\n ip : \"172.16.17.32\"\n port: 830\n username: admin\n password: <PASSWORD>\n\n Code Example::\n\n >>> from pyats.topology import loader\n >>> testbed = loader.load('/users/xxx/xxx/asr22.yaml')\n >>> device = testbed.devices['asr22']\n >>> device.connect(alias='nc', via='netconf')\n >>>\n\n Expected Results::\n\n >>> device.nc.connected\n True\n >>> for iter in device.nc.server_capabilities:\n ... print(iter)\n ...\n urn:ietf:params:xml:ns:yang:smiv2:RFC-1215?module=RFC-1215\n urn:ietf:params:xml:ns:yang:smiv2:SNMPv2-TC?module=SNMPv2-TC\n ...\n >>>\n '''\n\n if self.connected:\n return\n\n logger.debug(self.session)\n if not self.session.is_alive():\n self._session = transport.SSHSession(self._device_handler)\n\n # default values\n defaults = {\n 'host': None,\n 'port': 830,\n 'timeout': 30,\n 'username': None,\n 'password': <PASSWORD>,\n 'key_filename': None,\n 'allow_agent': False,\n 'hostkey_verify': False,\n 'look_for_keys': False,\n 'ssh_config': None,\n }\n defaults.update(self.connection_info)\n\n # remove items\n disregards = ['class', 'model', 'protocol',\n 'async_mode', 'raise_mode', 'credentials']\n defaults = {k: v for k, v in defaults.items() if k not in disregards}\n\n # rename ip -> host, cast to str type\n if 'ip' in defaults:\n defaults['host'] = str(defaults.pop('ip'))\n\n # rename user -> username\n if 'user' in defaults:\n defaults['username'] = str(defaults.pop('user'))\n\n # check credentials\n if self.connection_info.get('credentials'):\n try:\n defaults['username'] = str(\n self.connection_info['credentials']['netconf']['username'])\n except Exception:\n pass\n try:\n defaults['password'] = to_plaintext(\n self.connection_info['credentials']['netconf']['password'])\n except Exception:\n pass\n\n # support sshtunnel\n if 'sshtunnel' in defaults:\n from unicon.sshutils import sshtunnel\n try:\n tunnel_port = sshtunnel.auto_tunnel_add(self.device, self.via)\n if tunnel_port:\n defaults['host'] = self.device.connections[self.via] \\\n .sshtunnel.tunnel_ip\n defaults['port'] = tunnel_port\n except AttributeError as err:\n raise AttributeError(\"Cannot add ssh tunnel. \\\n Connection %s may not have ip/host or port.\\n%s\"\n % (self.via, err))\n del defaults['sshtunnel']\n\n defaults = {k: getattr(self, k, v) for k, v in defaults.items()}\n\n try:\n self.session.connect(**defaults)\n logger.info('NETCONF CONNECTED')\n except Exception:\n if self.session.transport:\n self.session.close()\n raise\n\n @atexit.register\n def cleanup():\n if self.session.transport:\n self.session.close()\n\n def disconnect(self):\n '''disconnect\n\n High-level api: closes the NetConf connection.\n '''\n\n self.session.close()\n\n def subscribe(self, request):\n \"\"\" Creates a notification listener and mark it as active \"\"\"\n notifier = Notification(self, request=request)\n notifier.start()\n if request['format']['request_mode'] == 'ON_CHANGE':\n # Get ready for trigger event\n notifier.event_triggered = True\n self.active_notifications[self] = notifier\n\n def notify_wait(self, steps):\n \"\"\" Activate notification listener and check results \"\"\"\n notifier = self.active_notifications.get(self)\n if notifier:\n if steps.result.code != 1:\n notifier.stop()\n del self.active_notifications[self]\n return\n notifier.event_triggered = True\n logger.info(banner('NOTIFICATION EVENT TRIGGERED'))\n wait_for_sample = notifier.sample_interval - 1\n cntr = 1.0\n while cntr < float(notifier.stream_max):\n logger.info('Listening for notifications from subscribe stream, {} seconds elapsed'.format(\n cntr)\n )\n cntr += 1\n if notifier.result is not None and wait_for_sample <= 0:\n notifier.stop()\n if notifier.result is True:\n steps.passed(\n '\\n' + banner('NOTIFICATION RESPONSE PASSED')\n )\n else:\n steps.failed(\n '\\n' + banner('NOTIFICATION RESPONSE FAILED')\n )\n break\n sleep(1)\n wait_for_sample -= 1\n else:\n notifier.stop()\n steps.failed(\n '\\n' + banner('STREAM TIMED OUT WITHOUT RESPONSE')\n )\n\n if self in self.active_notifications:\n del self.active_notifications[self]\n\n def configure(self, msg):\n '''configure\n\n High-level api: configure is a common method of console, vty and ssh\n sessions, however it is not supported by this Netconf class. This is\n just a placeholder in case someone mistakenly calls config method in a\n netconf session. An Exception is thrown out with explanation.\n\n Parameters\n ----------\n\n msg : `str`\n Any config CLI need to be sent out.\n\n Raises\n ------\n\n Exception\n configure is not a supported method of this Netconf class.\n '''\n\n raise Exception('configure is not a supported method of this Netconf '\n 'class, since a more suitable method, edit_config, is '\n 'recommended. There are nine netconf operations '\n 'defined by RFC 6241, and edit-config is one of them. '\n 'Also users can build any netconf requst, including '\n 'invalid netconf requst as negative test cases, in '\n 'XML format and send it by method request.')\n\n def execute(self, operation, *args, **kwargs):\n '''execute\n\n High-level api: The fact that most connection classes implement\n execute method lead us to add this method here as well.\n Supported operations are get, get_config, get_schema, dispatch,\n edit_config, copy_config, validate, commit, discard_changes,\n delete_config, lock, unlock, close_session, kill_session,\n poweroff_machine and reboot_machine. Refer to ncclient document for\n more details.\n '''\n\n # allow for operation string type\n if type(operation) is str:\n try:\n cls = manager.OPERATIONS[operation]\n except KeyError:\n raise ValueError('No such operation \"%s\".\\n'\n 'Supported operations are: %s' %\n (operation, list(manager.OPERATIONS.keys())))\n else:\n cls = operation\n\n time1 = datetime.datetime.now()\n reply = super().execute(cls, *args, **kwargs)\n time2 = datetime.datetime.now()\n reply.elapsed = time2 - time1\n return reply\n\n def request(self, msg, timeout=30, return_obj=False):\n '''request\n\n High-level api: sends message through NetConf session and returns with\n a reply. Exception is thrown out either the reply is in wrong\n format or timout. Users can modify timeout value (in seconds) by\n passing parameter timeout. Users may want to set a larger timeout when\n making a large query.\n\n Parameters\n ----------\n\n msg : `str`\n Any message need to be sent out in XML format. The message can be\n in wrong format if it is a negative test case. Because ncclient\n tracks same message-id in both rpc and rpc-reply, missing\n message-id in your rpc may cause exception when receiving\n rpc-reply. Most other wrong format rpc's can be sent without\n exception.\n timeout : `int`, optional\n An optional keyed argument to set timeout value in seconds. Its\n default value is 30 seconds.\n return_obj : `boolean`, optional\n Normally a string is returned as a reply. In other cases, we may\n want to return a RPCReply object, so we can access some attributes,\n e.g., reply.ok or reply.elapsed.\n\n Returns\n -------\n\n str or RPCReply\n The reply from the device in string. If something goes wrong, an\n exception will be raised. If return_obj=True, the reply is a\n RPCReply object.\n\n\n Raises\n ------\n\n Exception\n If NetConf is not connected, or there is a timeout when receiving\n reply.\n\n\n Code Example::\n\n >>> from pyats.topology import loader\n >>> testbed = loader.load('/users/xxx/xxx/asr_20_22.yaml')\n >>> device = testbed.devices['asr22']\n >>> device.connect(alias='nc', via='netconf')\n >>> netconf_request = \"\"\"\n ... <rpc message-id=\"101\"\n ... xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\n ... <get>\n ... <filter>\n ... <native xmlns=\"http://cisco.com/ns/yang/Cisco-IOS-XE-native\">\n ... <version>\n ... </version>\n ... </native>\n ... </filter>\n ... </get>\n ... </rpc>\n ... \"\"\"\n >>> reply = device.nc.request(netconf_request)\n >>>\n\n Expected Results::\n\n >>> print(reply)\n <?xml version=\"1.0\" encoding=\"UTF-8\"?>\n <rpc-reply xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\"\n message-id=\"101\"><data>\n <native xmlns=\"http://cisco.com/ns/yang/ned/ios\">\n <version>16.3</version></native></data></rpc-reply>\n >>>\n '''\n\n rpc = RawRPC(session=self.session,\n device_handler=self._device_handler,\n timeout=timeout,\n raise_mode=operations.rpc.RaiseMode.NONE)\n\n # identify message-id\n m = re.search(r'message-id=\"([A-Za-z0-9_\\-:# ]*)\"', msg)\n if m:\n rpc._id = m.group(1)\n rpc._listener.register(rpc._id, rpc)\n logger.debug(\n 'Found message-id=\"%s\" in your rpc, which is good.', rpc._id)\n else:\n logger.warning('Cannot find message-id in your rpc. You may '\n 'expect an exception when receiving rpc-reply '\n 'due to missing message-id.')\n\n if return_obj:\n return rpc._request(msg)\n else:\n return rpc._request(msg).xml\n\n def __getattr__(self, method):\n # avoid the __getattr__ from Manager class\n if hasattr(manager, 'VENDOR_OPERATIONS') and method \\\n in manager.VENDOR_OPERATIONS or method in manager.OPERATIONS:\n return super().__getattr__(method)\n else:\n raise AttributeError(\"'%s' object has no attribute '%s'\"\n % (self.__class__.__name__, method))\n\n\nclass NetconfEnxr():\n \"\"\"Subclass using POSIX pipes to Communicate NETCONF messaging.\"\"\"\n\n chunk = re.compile('(\\n#+\\\\d+\\n)')\n rpc_pipe_err = \"\"\"\n <rpc-reply xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\n <rpc-error>\n <error-type>transport</error-type>\n <error-tag>resource-denied</error-tag>\n <error-severity>error</error-severity>\n <error-message>No pipe data returned</error-message>\n </rpc-error>\n </rpc-reply>\"\"\"\n\n def __init__(self, *args, **kwargs):\n self.manager = None\n self.proc = None\n self.buf = None\n self.server_capabilities = None\n\n def get_rpc(self, elements):\n \"\"\"Return string representation of lxml element with rpc.\"\"\"\n rpc_element = et.Element(\n 'rpc',\n attrib={'message-id': '101'},\n nsmap={None: \"urn:ietf:params:xml:ns:netconf:base:1.0\"}\n )\n rpc_element.append(elements)\n return et.tostring(rpc_element,\n pretty_print=True).decode()\n\n def recv_data(self):\n \"\"\"Retrieve data from process pipe.\"\"\"\n if not self.proc:\n logger.info('Not connected.')\n else:\n buf = ''\n while True:\n # TODO: Could be better...1 byte at a time...\n # but, too much buffer and it deadlocks!!\n data = self.proc.stdout.read(1)\n\n if not data:\n return GetReply(self.rpc_pipe_err)\n\n buf += data\n\n if buf.endswith('\\n##'):\n buf = buf[:-3]\n break\n\n logger.info(buf)\n buf = buf[buf.find('<'):]\n reply = re.sub(self.chunk, '', buf)\n return GetReply(reply)\n\n def request(self, rpc):\n return self.send_cmd(rpc)\n\n def configure(self, msg):\n '''configure\n\n High-level api: configure is a common method of console, vty and ssh\n sessions, however it is not supported by this NetconfEnxr class. This is\n just a placeholder in case someone mistakenly calls config method in a\n netconf session. An Exception is thrown out with explanation.\n\n Parameters\n ----------\n\n msg : `str`\n Any config CLI need to be sent out.\n\n Raises\n ------\n\n Exception\n configure is not a supported method of this Netconf class.\n '''\n\n raise Exception('configure is not a supported method of this NetconfEnxr '\n 'class, since a more suitable method, edit_config, is '\n 'recommended. There are nine netconf operations '\n 'defined by RFC 6241, and edit-config is one of them. '\n 'Also users can build any netconf requst, including '\n 'invalid netconf requst as negative test cases, in '\n 'XML format and send it by method request.')\n\n def send_cmd(self, rpc):\n \"\"\"Send a message to process pipe.\"\"\"\n if not self.proc:\n logger.info('Not connected.')\n else:\n if et.iselement(rpc):\n if not rpc.tag.endswith('rpc'):\n rpc = self.get_rpc(rpc)\n else:\n rpc = et.tostring(rpc, pretty_print=True).decode()\n rpc_str = '\\n#' + str(len(rpc)) + '\\n' + rpc + '\\n##\\n'\n logger.info(rpc_str)\n self.proc.stdin.write(rpc_str)\n self.proc.stdin.flush()\n\n return self.recv_data()\n\n def edit_config(self, target=None, config=None, **kwargs):\n \"\"\"Send edit-config.\"\"\"\n target = target\n config = config\n target_element = et.Element('target')\n et.SubElement(target_element, target)\n edit_config_element = et.Element('edit-config')\n edit_config_element.append(target_element)\n edit_config_element.append(config)\n return self.send_cmd(self.get_rpc(edit_config_element))\n\n def get_config(self, source=None, filter=None, **kwargs):\n \"\"\"Send get-config.\"\"\"\n source = source\n filter = filter\n source_element = et.Element('source')\n et.SubElement(source_element, source)\n get_config_element = et.Element('get-config')\n get_config_element.append(source_element)\n get_config_element.append(filter)\n return self.send_cmd(self.get_rpc(get_config_element))\n\n def get(self, filter=None, **kwargs):\n filter_arg = filter\n get_element = et.Element('get')\n if isinstance(filter_arg, tuple):\n type, filter_content = filter_arg\n if type == \"xpath\":\n get_element.attrib[\"select\"] = filter_content\n elif type == \"subtree\":\n filter_element = et.Element('filter')\n filter_element.append(filter_content)\n get_element.append(filter_element)\n else:\n get_element.append(filter_arg)\n return self.send_cmd(self.get_rpc(get_element))\n\n def commit(self, **kwargs):\n commit_element = et.Element('commit')\n return self.send_cmd(self.get_rpc(commit_element))\n\n def discard_changes(self, **kwargs):\n discard_element = et.Element('discard-changes')\n return self.send_cmd(self.get_rpc(discard_element))\n\n def lock(self, target=None, **kwargs):\n target = target\n store_element = et.Element(target)\n target_element = et.Element('target')\n target_element.append(store_element)\n lock_element = et.Element('lock')\n lock_element.append(target_element)\n return self.send_cmd(self.get_rpc(lock_element))\n\n def unlock(self, target=None, **kwargs):\n target = target\n store_element = et.Element(target)\n target_element = et.Element('target')\n target_element.append(store_element)\n unlock_element = et.Element('unlock')\n unlock_element.append(target_element)\n return self.send_cmd(self.get_rpc(unlock_element))\n\n def dispatch(self, rpc_command=None, **kwargs):\n rpc = rpc_command\n return self.send_cmd(rpc)\n\n @property\n def connected(self):\n \"\"\"Check for active connection.\"\"\"\n\n return self.server_capabilities is not None and self.proc.poll() \\\n is None\n\n def connect(self, timeout=None):\n \"\"\"Connect to ENXR pipe.\"\"\"\n if self.connected:\n msg = 'Already connected'\n\n CMD = ['netconf_sshd_proxy', '-i', '0', '-o', '1', '-u', 'lab']\n BUFSIZE = 8192\n\n p = subprocess.Popen(CMD, bufsize=BUFSIZE,\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT,\n universal_newlines=True)\n\n buf = ''\n try:\n while True:\n data = p.stdout.read(1)\n if not data:\n logger.info('No data received for hello')\n p.terminate()\n return\n\n buf += data\n if buf.endswith(']]>]]>'):\n buf = buf[buf.find('<'):-6]\n logger.info('Hello received')\n break\n\n p.stdin.write(\n '<?xml version=\"1.0\" encoding=\"UTF-8\"?><hello '\n 'xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\"><capabilities>'\n '<capability>urn:ietf:params:netconf:base:1.1</capability>'\n '</capabilities></hello>]]>]]>'\n )\n p.stdin.flush()\n self.proc = p\n self.buf = ''\n elements = et.fromstring(buf)\n self.server_capabilities = [e.text for e in elements.iter()\n if hasattr(e, 'text')]\n # TODO: Notification stream interferes with get-schema\n msg = \"NETCONF CONNECTED PIPE\"\n except:\n msg = 'Not connected, Something went wrong'\n return msg\n\n def disconnect(self):\n \"\"\"Disconnect from ENXR pipe.\"\"\"\n if self.connected:\n self.proc.terminate()\n logger.info(\"NETCONF DISCONNECT PIPE\")\n\n\nclass RawRPC(operations.rpc.RPC):\n '''RawRPC\n\n A modified ncclient.operations.rpc.RPC class. This is for internal use\n only.\n '''\n\n def _request(self, msg):\n '''_request\n\n Override method _request in class ncclient.operations.RPC, so it can\n handle raw rpc requests in string format without validating your rpc\n request syntax. When your rpc-reply is received, in most cases, it\n simply returns rpc-reply again in string format, except one scenario:\n If message-id is missing or message-id received does not match that in\n rpc request, ncclient will raise an OperationError.\n '''\n\n logger.debug('Requesting %r' % self.__class__.__name__)\n logger.info('Sending rpc...')\n logger.info(msg)\n time1 = datetime.datetime.now()\n self._session.send(msg)\n if not self._async:\n logger.debug('Sync request, will wait for timeout=%r' %\n self._timeout)\n self._event.wait(self._timeout)\n if self._event.isSet():\n time2 = datetime.datetime.now()\n self._reply.elapsed = time2 - time1\n logger.info('Receiving rpc-reply after {:.3f} sec...'.\n format(self._reply.elapsed.total_seconds()))\n logger.info(self._reply)\n return self._reply\n else:\n logger.info('Timeout. No rpc-reply received.')\n raise TimeoutExpiredError('ncclient timed out while waiting '\n 'for an rpc-reply.')\n\n\nclass Notification(Thread):\n \"\"\" Listens for notifications, decodes, and verifies if any exists \"\"\"\n def __init__(self, device, **request):\n Thread.__init__(self)\n self.device = device\n self.log = logging.getLogger(__name__)\n self.log.setLevel(logging.DEBUG)\n self._stop_event = Event()\n self.request = request\n self._event_triggered = False\n self._stopped = False\n\n @property\n def event_triggered(self):\n return self._event_triggered\n\n @event_triggered.setter\n def event_triggered(self, event_triggered):\n self._event_triggered = event_triggered\n\n @property\n def request(self):\n return self._request\n\n @request.setter\n def request(self, request={}):\n \"\"\" Sets the request property and propagates request's properties to the class \"\"\"\n request_data = request['request']\n self.returns = request_data.get('returns')\n self.response_verify = request_data.get('verifier')\n self.decode_response = request_data.get('decode')\n self.namespace = request_data.get('namespace')\n self.sub_mode = request_data['format'].get('sub_mode', 'SAMPLE')\n self.encoding = request_data['format'].get('encoding', 'PROTO')\n self.sample_interval = request_data['format'].get('sample_interval', 10)\n if self.sub_mode == 'ON_CHANGE':\n self.sample_interval = 0\n self.stream_max = request_data['format'].get('stream_max', 0)\n self.time_delta = 0\n self.result = None\n self._event_triggered = False\n self._request = request_data\n\n def run(self):\n \"\"\" Start taking notifications until subscribe stream times out.\"\"\"\n t1 = datetime.datetime.now()\n t2 = datetime.datetime.now()\n td = t2 - t1\n # Wait until after first sample period if sampling\n wait_for_sample = self.sample_interval - 1\n\n try:\n while self.time_delta < self.stream_max:\n t2 = datetime.datetime.now()\n td = t2 - t1\n\n if self.stopped():\n self.time_delta = self.stream_max\n self.log.info(\"Terminating notification thread\")\n break\n if self.stream_max:\n t2 = datetime.datetime.now()\n td = t2 - t1\n self.time_delta = td.seconds\n if td.seconds > self.stream_max:\n self.stop()\n break\n\n notif = self.device.take_notification(timeout=1)\n\n if notif and wait_for_sample <= 0:\n resp_elements = self.decode_response(\n notif.notification_xml\n )\n if resp_elements and self.returns:\n self.result = self.response_verify(\n resp_elements, self.returns\n )\n self.stop()\n break\n wait_for_sample -= 1\n except Exception as exc:\n self.result = str(exc)\n self.log.error(str(exc))\n\n def stop(self):\n self.log.info(\"Stopping notification stream\")\n self._stop_event.set()\n\n def stopped(self):\n return self._stop_event.is_set()\n", "id": "3422247", "language": "Python", "matching_score": 4.099145889282227, "max_stars_count": 0, "path": "connector/src/yang/connector/netconf.py" }, { "content": "import os\nimport re\nimport logging\nfrom lxml import etree\nfrom ncclient import manager, operations, transport, xml_\nfrom yang.connector import Netconf\n\nfrom .model import Model, ModelDownloader, ModelCompiler\nfrom .config import Config\nfrom .errors import ModelError, ModelMissing\nfrom .composer import Tag, Composer\n\n# create a logger for this module\nlogger = logging.getLogger(__name__)\n\nnc_url = xml_.BASE_NS_1_0\nyang_url = 'urn:ietf:params:xml:ns:yang:1'\ntailf_url = 'http://tail-f.com/ns/netconf/params/1.1'\nncEvent_url = xml_.NETCONF_NOTIFICATION_NS\nconfig_tag = '{' + nc_url + '}config'\nfilter_tag = '{' + nc_url + '}filter'\nspecial_prefixes = {\n nc_url: 'nc',\n yang_url: 'yang',\n tailf_url: 'tailf',\n ncEvent_url: 'ncEvent',\n }\n\n\nclass ModelDevice(Netconf):\n '''ModelDevice\n\n Abstraction of a device that supports NetConf protocol and YANG models.\n This is a subclass of yang.connector.Netconf with some enhancements.\n\n Attributes\n ----------\n namespaces : `list`\n A list of tuples. Each tuple has three elements: model name, model\n prefix, and model URL. This attribute is only available after\n scan_models() is called.\n\n models_loadable : `list`\n A list of models this ModelDevice instance supports. The information is\n retrived from attribute server_capabilities.\n\n models_loaded : `list`\n A list of models this ModelDevice instance has loaded. Loading a model\n means the ModelDevice instance has obtained schema infomation of the\n model.\n\n compiler : `ModelCompiler`\n An instance of ModelCompiler.\n\n models : `dict`\n A dictionary of loaded models. Dictionary keys are model names, and\n values are Model instances.\n\n roots : `dict`\n A dictionary of roots in loaded models. Dictionary keys are roots in\n `{url}tagname` notation, and values are model names.\n '''\n\n def __init__(self, *args, **kwargs):\n '''\n __init__ instantiates a ModelDevice instance.\n '''\n\n Netconf.__init__(self, *args, **kwargs)\n self.models = {}\n self.nodes = {}\n self.compiler = None\n self._models_loadable = None\n\n def __repr__(self):\n return '<{}.{} object at {}>'.format(self.__class__.__module__,\n self.__class__.__name__,\n hex(id(self)))\n\n @property\n def namespaces(self):\n if self.compiler is None:\n raise ValueError('please first call scan_models() to build '\n 'up supported namespaces of a device')\n else:\n device_namespaces = []\n for m in self.compiler.dependencies.findall('./module'):\n device_namespaces.append((m.get('id'),\n m.get('prefix'),\n m.findtext('namespace')))\n return device_namespaces\n\n @property\n def models_loadable(self):\n if self._models_loadable is not None:\n return self._models_loadable\n NC_MONITORING = xml_.NETCONF_MONITORING_NS\n YANG_LIB = 'urn:ietf:params:netconf:capability:yang-library'\n YANG_LIB_1_0 = YANG_LIB + ':1.0'\n NC_MONITORING_FILTER = \"\"\"\n <filter xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\" type=\"subtree\">\n <netconf-state xmlns=\"urn:ietf:params:xml:ns:yang:ietf-netconf-monitoring\">\n <schemas/>\n </netconf-state>\n </filter>\n \"\"\"\n YANG_LIB_FILTER = \"\"\"\n <filter xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\" type=\"subtree\">\n <modules-state xmlns=\"urn:ietf:params:xml:ns:yang:ietf-yang-library\">\n <module/>\n </modules-state>\n </filter>\n \"\"\"\n\n # RFC7895\n if [c for c in self.server_capabilities\n if c[:len(NC_MONITORING)] == NC_MONITORING]:\n n = {'nc': nc_url, 'ncm': NC_MONITORING}\n p = '/nc:rpc-reply/nc:data/ncm:netconf-state/ncm:schemas' \\\n '/ncm:schema/ncm:identifier'\n try:\n reply = super().execute(operations.retrieve.Get,\n filter=NC_MONITORING_FILTER)\n if reply.ok:\n self._models_loadable = \\\n [n.text for n in reply.data.xpath(p, namespaces=n)]\n self._models_loadable.sort()\n except Exception as e:\n logger.warning(\n \"Error when sending Netconf GET of /netconf-state/schemas \"\n \"from YANG module 'ietf-netconf-monitoring':\\n{}\"\n .format(e))\n else:\n if reply.ok:\n return self._models_loadable\n else:\n logger.warning(\n \"Error in Netconf reply when getting \"\n \"/netconf-state/schemas from YANG module \"\n \"'ietf-netconf-monitoring':\\n{}\".format(reply))\n\n # RFC7950 section 5.6.4\n if [c for c in self.server_capabilities\n if c[:len(YANG_LIB_1_0)] == YANG_LIB_1_0]:\n n = {'nc': nc_url, 'yanglib': YANG_LIB}\n p = '/nc:rpc-reply/nc:data/yanglib:modules-state' \\\n '/yanglib:module/yanglib:name'\n try:\n reply = super().execute(operations.retrieve.Get,\n filter=YANG_LIB_FILTER)\n if reply.ok:\n self._models_loadable = \\\n [n.text for n in reply.data.xpath(p, namespaces=n)]\n self._models_loadable.sort()\n except Exception as e:\n logger.warning(\n \"Error when sending Netconf GET of /modules-state/module \"\n \"from YANG module 'ietf-yang-library':\\n{}\".format(e))\n else:\n if reply.ok:\n return self._models_loadable\n else:\n logger.warning(\n \"Error in Netconf reply when getting \"\n \"/modules-state/module from YANG module \"\n \"'ietf-yang-library':\\n{}\".format(reply))\n\n # RFC6020 section 5.6.4\n regexp_str = r'module=([a-zA-Z0-9-]+)\\&{0,1}'\n modules = []\n for capability in iter(self.server_capabilities):\n match = re.search(regexp_str, capability)\n if match:\n modules.append(match.group(1))\n self._models_loadable = sorted(modules)\n return self._models_loadable\n\n @property\n def models_loaded(self):\n return sorted(self.models.keys())\n\n @property\n def roots(self):\n roots = {}\n for model in self.models.values():\n roots.update({r: model.name for r in model.roots})\n return roots\n\n def scan_models(self, folder='./yang', download='check'):\n '''scan_models\n\n High-level api: Download models from the device by <get-schema>\n operation defined in RFC6022, and analyze dependencies among models\n using pyang package.\n\n Parameters\n ----------\n\n folder : `str`\n A path to a folder that stores YANG files downloaded.\n\n download : `str`\n A string is either `check` or `force`. If it is `check`, the content\n in the folder is compared with self.server_capabilities. Downloading\n will be skipped if the checking says good. If it is `force`,\n downloading starts without checking.\n\n Returns\n -------\n\n None\n Nothing returns.\n\n\n YAML Example::\n\n devices:\n asr22:\n type: 'ASR'\n tacacs:\n login_prompt: \"login:\"\n password_prompt: \"Password:\"\n username: \"admin\"\n passwords:\n tacacs: admin\n enable: admin\n line: admin\n connections:\n a:\n protocol: telnet\n ip: \"1.2.3.4\"\n port: 2004\n vty:\n protocol : telnet\n ip : \"2.3.4.5\"\n netconf:\n class: yang.ncdiff.ModelDevice\n ip : \"2.3.4.5\"\n port: 830\n username: admin\n password: <PASSWORD>\n\n Code Example::\n\n >>> from pyats.topology import loader\n >>> testbed = loader.load('/users/yuekyang/projects/asr21.yaml')\n >>> device = testbed.devices['asr21']\n >>> device.connect(alias='nc', via='netconf')\n >>>\n >>> device.nc.scan_models()\n ...\n >>>\n '''\n\n d = ModelDownloader(self, folder)\n if download == 'force':\n d.download_all(check_before_download=False)\n elif download == 'check':\n d.download_all(check_before_download=True)\n self.compiler = ModelCompiler(folder)\n\n def load_model(self, model):\n '''load_model\n\n High-level api: Load schema information by compiling the model using\n pyang package.\n\n Parameters\n ----------\n\n model : `str`\n Model name.\n\n Returns\n -------\n\n Model\n An instance of Model.\n\n\n YAML Example::\n\n devices:\n asr22:\n type: 'ASR'\n tacacs:\n login_prompt: \"login:\"\n password_prompt: \"Password:\"\n username: \"admin\"\n passwords:\n tacacs: admin\n enable: admin\n line: admin\n connections:\n a:\n protocol: telnet\n ip: \"1.2.3.4\"\n port: 2004\n vty:\n protocol : telnet\n ip : \"2.3.4.5\"\n netconf:\n class: yang.ncdiff.ModelDevice\n ip : \"2.3.4.5\"\n port: 830\n username: admin\n password: <PASSWORD>\n\n Code Example::\n\n >>> from pyats.topology import loader\n >>> testbed = loader.load('/users/yuekyang/projects/asr21.yaml')\n >>> device = testbed.devices['asr21']\n >>> device.connect(alias='nc', via='netconf')\n >>> device.nc.scan_models()\n >>>\n >>> m = device.nc.load_model('openconfig-system')\n >>> print(m)\n ...\n >>>\n '''\n\n if os.path.isfile(model):\n file_name, file_ext = os.path.splitext(model)\n if file_ext.lower() == '.xml':\n logger.debug('Read model file {}'.format(model))\n with open(model, 'r') as f:\n xml = f.read()\n parser = etree.XMLParser(remove_blank_text=True)\n tree = etree.XML(xml, parser)\n m = Model(tree)\n else:\n raise ValueError(\"'{}' is not a file with extension 'xml'\"\n .format(model))\n elif model in self.models_loadable:\n if self.compiler is None:\n raise ValueError('please first call scan_models() to build '\n 'up supported namespaces of a device')\n else:\n m = self.compiler.compile(model)\n else:\n raise ValueError(\"argument 'model' {} needs to be either a model \"\n \"name or a compiled model xml file\".format(model))\n if m.name in self.models:\n self.nodes = {k: v for k, v in self.nodes.items()\n if self.roots[k.split(' ')[0]] != m.name}\n logger.info('Model {} is reloaded'.format(m.name))\n else:\n logger.info('Model {} is loaded'.format(m.name))\n self.models[m.name] = m\n return m\n\n def execute(self, operation, *args, **kwargs):\n '''execute\n\n High-level api: Supported operations are get, get_config, get_schema,\n dispatch, edit_config, copy_config, validate, commit, discard_changes,\n delete_config, lock, unlock, close_session, kill_session,\n poweroff_machine and reboot_machine. Since ModelDevice is a subclass of\n manager in ncclient package, any method supported by ncclient is\n available here. Refer to ncclient document for more details.\n '''\n\n def pop_models():\n models = kwargs.pop('models', None)\n if models is None:\n return None\n else:\n if isinstance(models, str):\n return [models]\n else:\n return models\n\n def check_models(models):\n missing_models = set(models) - set(self.models_loaded)\n if missing_models:\n raise ModelMissing('please load model {} by calling '\n 'method load_model() of device {}'\n .format(str(list(missing_models))[1:-1],\n self))\n\n def build_filter(models, roots):\n if 'filter' in kwargs:\n logger.warning(\"argument 'filter' is ignored as argument \"\n \"'models' is specified\")\n if isinstance(models, str):\n models = [models]\n check_models(models)\n filter_ele = etree.Element(filter_tag, type='subtree')\n for root in roots:\n etree.SubElement(filter_ele, root)\n filter_xml = etree.tostring(filter_ele,\n encoding='unicode',\n pretty_print=False)\n logger.debug(\"argument 'filter' is set to '{}'\".format(filter_xml))\n return filter_ele\n\n def get_access_type(model_name, root):\n check_models([model_name])\n node = list(self.models[model_name].tree.iterchildren(tag=root))[0]\n return node.get('access')\n\n # allow for operation string type\n if type(operation) is str:\n try:\n cls = manager.OPERATIONS[operation]\n except KeyError:\n supported_operations = list(manager.OPERATIONS.keys())\n raise ValueError(\"supported operations are {}, but not '{}'\"\n .format(str(supported_operations)[1:-1],\n operation))\n else:\n cls = operation\n if cls == operations.retrieve.Get:\n models = pop_models()\n if models is not None:\n check_models(models)\n roots = [k for k, v in self.roots.items()\n if v in models and\n (get_access_type(v, k) == 'read-write' or\n get_access_type(v, k) == 'read-only')]\n if not roots:\n raise ValueError('no readable roots found in your '\n 'models: {}'.format(str(models)[1:-1]))\n kwargs['filter'] = build_filter(models, roots)\n elif cls == operations.retrieve.GetConfig:\n if not args and 'source' not in kwargs:\n args = tuple(['running'])\n models = pop_models()\n if models is not None:\n check_models(models)\n roots = [k for k, v in self.roots.items()\n if v in models and\n get_access_type(v, k) == 'read-write']\n if not roots:\n raise ValueError('no writable roots found in your '\n 'models: {}'.format(str(models)[1:-1]))\n kwargs['filter'] = build_filter(models, roots)\n elif cls == operations.edit.EditConfig:\n if args and isinstance(args[0], Config):\n args_list = list(args)\n args_list[0] = args[0].ele\n args = tuple(args_list)\n if 'target' not in kwargs and \\\n 'urn:ietf:params:netconf:capability:candidate:1.0' not in \\\n self.server_capabilities and \\\n 'urn:ietf:params:netconf:capability:writable-running:1.0' in \\\n self.server_capabilities:\n kwargs['target'] = 'running'\n reply = super().execute(cls, *args, **kwargs)\n if isinstance(reply, operations.rpc.RPCReply):\n reply.ns = self._get_ns(reply._root)\n if getattr(transport, 'notify', None) and \\\n isinstance(reply, transport.notify.Notification):\n reply.ns = self._get_ns(reply._root_ele)\n return reply\n\n def take_notification(self, block=True, timeout=None):\n '''take_notification\n\n High-level api: Receive notification messages.\n\n Parameters\n ----------\n\n block : `bool`\n True if this is a blocking call.\n\n timeout : `int`\n Timeout value in seconds.\n\n Returns\n -------\n\n Notification\n An instance of Notification in ncclient package.\n\n\n Code Example::\n\n >>> reply = device.nc.take_notification(block=True, timeout=60)\n >>> assert(reply.ok)\n >>> print(reply)\n >>>\n '''\n\n reply = super().take_notification(block=block, timeout=timeout)\n if isinstance(reply, operations.rpc.RPCReply):\n reply.ns = self._get_ns(reply._root)\n if getattr(transport, 'notify', None) and \\\n isinstance(reply, transport.notify.Notification):\n reply.ns = self._get_ns(reply._root_ele)\n return reply\n\n def extract_config(self, reply, type='netconf'):\n '''extract_config\n\n High-level api: Extract config from a rpc-reply of get-config or get\n message.\n\n Parameters\n ----------\n\n reply : `RPCReply`\n An instance of RPCReply in ncclient package. It has to be a\n successful reply in order to extract config, since there is no\n config data in an errored reply.\n\n Returns\n -------\n\n Config\n An instance of Config, which represents a config state of the\n device.\n\n\n Code Example::\n\n >>> reply = device.nc.get_config(models='openconfig-interfaces')\n >>> assert(reply.ok)\n >>> config1 = device.nc.extract_config(reply)\n >>>\n >>> reply = device.nc.get(models='openconfig-interfaces')\n >>> assert(reply.ok)\n >>> config2 = device.nc.extract_config(reply)\n >>>\n >>> config1 == config2\n True\n >>>\n '''\n\n def remove_read_only(parent):\n for child in parent.getchildren():\n schema_node = self.get_schema_node(child)\n if schema_node.get('access') == 'read-only':\n parent.remove(child)\n elif len(child) > 0:\n remove_read_only(child)\n\n config = Config(self, reply)\n remove_read_only(config.ele)\n return config\n\n def get_schema_node(self, config_node):\n '''get_schema_node\n\n High-level api: Given an Element node in config, get_schema_node returns\n a schema node (defined in RFC 6020), which is an Element node in the\n schema tree.\n\n Parameters\n ----------\n\n config_node : `Element`\n An Element node in config tree.\n\n Returns\n -------\n\n Element\n A schema node, or None when nothing can be found.\n\n Raises\n ------\n\n ModelError\n If identifier is not unique in a namespace.\n\n\n Code Example::\n\n >>> device.nc.load_model('openconfig-interfaces')\n >>> reply = device.nc.get_config(models='openconfig-interfaces')\n >>> config = device.nc.extract_config(reply)\n >>> print(config)\n ...\n >>> config.ns\n ...\n >>> config_nodes = config.xpath('/nc:config/oc-if:interfaces/oc-if:interface[oc-if:name=\"GigabitEthernet0/0\"]')\n >>> config_node = config_nodes[0]\n >>>\n >>> device.nc.get_schema_node(config_node)\n <Element {http://openconfig.net/yang/interfaces}interface at 0xf11acfcc>\n >>>\n '''\n\n def get_child(parent, tag):\n children = [i for i in parent.iter(tag=tag)\n if i.attrib['type'] != 'choice' and\n i.attrib['type'] != 'case' and\n is_parent(parent, i)]\n if len(children) == 1:\n return children[0]\n elif len(children) > 1:\n if parent.getparent() is None:\n raise ModelError(\"more than one root has tag '{}'\"\n .format(tag))\n else:\n raise ModelError(\"node {} has more than one child with \"\n \"tag '{}'\"\n .format(self.get_xpath(parent), tag))\n else:\n return None\n\n def is_parent(node1, node2):\n ancestors = {id(a): a for a in node2.iterancestors()}\n ids_1 = set([id(a) for a in node1.iterancestors()])\n ids_2 = set([id(a) for a in node2.iterancestors()])\n if not ids_1 < ids_2:\n return False\n for i in ids_2 - ids_1:\n if ancestors[i] is not node1 and \\\n ancestors[i].attrib['type'] != 'choice' and \\\n ancestors[i].attrib['type'] != 'case':\n return False\n return True\n\n n = Composer(self, config_node)\n config_path = n.path\n if ' '.join(config_path) in self.nodes:\n return self.nodes[' '.join(config_path)]\n if len(config_path) > 1:\n parent = self.get_schema_node(config_node.getparent())\n if parent is None:\n return None\n else:\n child = get_child(parent, config_node.tag)\n if child is not None:\n self.nodes[' '.join(config_path)] = child\n return child\n else:\n tree = self.models[n.model_name].tree\n child = get_child(tree, config_node.tag)\n if child is not None:\n self.nodes[' '.join(config_path)] = child\n return child\n\n def get_model_name(self, node):\n '''get_model_name\n\n High-level api: Given an Element node in config tree or schema tree,\n get_model_name returns the model name that the node belongs to.\n\n Parameters\n ----------\n\n node : `Element`\n an Element node in config tree or schema tree.\n\n Returns\n -------\n\n str\n Model name.\n\n\n Code Example::\n\n >>> device.nc.get_model_name(config_node)\n 'openconfig-interfaces'\n >>>\n '''\n\n return Composer(self, node).model_name\n\n def get_xpath(self, node, type=Tag.XPATH, instance=True):\n '''get_xpath\n\n High-level api: Given a config or schema node, get_xpath returns an\n xpath of the node, which starts from the model root. Each identifier\n uses the `prefix:tagname` notation if argument 'type' is not specified.\n\n Parameters\n ----------\n\n node : `Element`\n A config or schema node.\n\n type : `tuple`\n A tuple constant defined in yang.ncdiff.Tag. Most commonly it could\n be Tag.XPATH or Tag.LXML_XPATH.\n\n instance : `bool`\n True if the xpath returned points to an instance. The xpath could\n point to a list or leaf-list when instance=False.\n\n Returns\n -------\n\n str\n An xpath of the config or schema node, which starts from the model\n root.\n\n\n Code Example::\n\n >>> device.nc.get_xpath(config_node)\n '/oc-if:interfaces/interface[name=\"GigabitEthernet0/0\"]'\n >>> device.nc.get_xpath(config_node, type=Tag.LXML_XPATH)\n '/oc-if:interfaces/oc-if:interface[oc-if:name=\"GigabitEthernet0/0\"]'\n >>>\n >>> device.nc.get_xpath(schema_node)\n '/oc-if:interfaces/interface'\n >>>\n '''\n\n return Composer(self, node).get_xpath(type, instance=instance)\n\n def convert_tag(self, default_ns, tag, src=Tag.LXML_ETREE, dst=Tag.YTOOL):\n '''convert_tag\n\n High-level api: Convert a tag or an identifier from one notation to\n another. Notations are defined by tuple constants in yang.ncdiff.Tag.\n\n Parameters\n ----------\n\n default_ns : `str`\n The default namespace. Usually it's the namespace of parent node. It\n could be a model name, a model prefix, or a model URL, depending on\n your argument 'src'. An empty string is considered as none default\n namespace.\n\n tag : `str`\n A tag or an identifier of a config node or a schema node.\n\n src : `tuple`\n The type of notation the input tag is, which is a tuple constant\n defined in yang.ncdiff.Tag. Most commonly it could be Tag.XPATH or\n Tag.LXML_XPATH.\n\n dst : `tuple`\n The type of notation we want, which is a tuple constant defined in\n yang.ncdiff.Tag. Most commonly it could be Tag.XPATH or\n Tag.LXML_XPATH.\n\n Returns\n -------\n\n tuple\n A tuple that has two elements: The first element is the namespace of\n argument 'tag'. It could be a model name, a model prefix, or a model\n URL, depending on your argument 'src'. The second element is the\n converted tag or identifier, which is in notation specified by\n argument 'dst'.\n\n\n Code Example::\n\n >>> device.nc.convert_tag('',\n '{http://openconfig.net/yang/interfaces}interface',\n dst=Tag.JSON_NAME)\n ('http://openconfig.net/yang/interfaces', 'openconfig-interfaces:interface')\n >>>\n '''\n\n def possible_part1():\n if src[0] == Tag.NAME:\n return [i[0] for i in self.namespaces]\n elif src[0] == Tag.PREFIX:\n return [i[1] for i in self.namespaces] + \\\n list(special_prefixes.values())\n else:\n return [i[2] for i in self.namespaces] + \\\n list(special_prefixes.keys())\n\n def split_tag(tag):\n ret = re.search(src[2][0], tag)\n if ret:\n if ret.group(1) in possible_part1():\n return (ret.group(1), ret.group(2))\n else:\n return ('', tag)\n else:\n return ('', tag)\n\n def format_tag(tag_ns, tag_name):\n if tag_ns:\n return dst[2][1].format(tag_ns, tag_name)\n else:\n return tag_name\n\n def convert(ns):\n matches = [i for i in self.namespaces if i[src[0]] == ns]\n c = len(matches)\n if c > 1:\n raise ModelError(\"device supports more than one {} '{}': {}\"\n .format(Tag.STR[src[0]], ns, matches))\n if c == 1:\n return matches[0][dst[0]]\n if src[0] != Tag.NAME and dst[0] != Tag.NAME:\n special = [('', v, k) for k, v in special_prefixes.items()]\n matches = [i for i in special if i[src[0]] == ns]\n if len(matches) == 1:\n return matches[0][dst[0]]\n raise ValueError(\"device does not support {} '{}' \"\n \"when parsing tag '{}'\"\n .format(Tag.STR[src[0]], ns, tag))\n\n tag_ns, tag_name = split_tag(tag)\n if src[1] == Tag.NO_OMIT and not tag_ns:\n raise ValueError(\"tag '{}' does not contain prefix or namespace \"\n \"but it is supposed to be Tag.NO_OMIT\"\n .format(tag))\n elif not tag_ns:\n tag_ns = default_ns\n if dst[1] == Tag.NO_OMIT:\n return tag_ns, format_tag(convert(tag_ns), tag_name)\n elif dst[1] == Tag.OMIT_BY_INHERITANCE:\n if default_ns == tag_ns:\n return default_ns, format_tag('', tag_name)\n else:\n return tag_ns, format_tag(convert(tag_ns), tag_name)\n elif dst[1] == Tag.OMIT_BY_MODULE:\n if default_ns == tag_ns:\n return default_ns, format_tag('', tag_name)\n else:\n return default_ns, format_tag(convert(tag_ns), tag_name)\n else:\n raise ValueError(\"unknown value '{}' in class Tag\".format(dst[1]))\n\n def _get_ns(self, reply):\n '''_get_ns\n\n Low-level api: Return a dict of nsmap.\n\n Parameters\n ----------\n\n reply : `Element`\n rpc-reply as an instance of Element.\n\n Returns\n -------\n\n dict\n A dict of nsmap.\n '''\n\n def get_prefix(url):\n if url in special_prefixes:\n return special_prefixes[url]\n for i in self.namespaces:\n if url == i[2]:\n return i[1]\n return None\n\n root = reply.getroottree()\n urls = set()\n for node in root.iter():\n urls.update([u for p, u in node.nsmap.items()])\n ret = {url: get_prefix(url) for url in urls}\n i = 0\n for url in [url for url in ret if ret[url] is None]:\n logger.warning('{} cannot be found in namespaces of any '\n 'models'.format(url))\n ret[url] = 'ns{:02d}'.format(i)\n i += 1\n return {p: u for u, p in ret.items()}\n", "id": "10879806", "language": "Python", "matching_score": 4.967522621154785, "max_stars_count": 15, "path": "ncdiff/src/yang/ncdiff/device.py" }, { "content": "\"\"\"yang.ncdiff module defines a set of classes that calculate diff of two\nconfigs, and predict config when a diff is applied on a config. A config is the\npayload of a Netconf get-config reply, and a diff is the payload of a\nedit-config message.\"\"\"\n\n# metadata\n__version__ = '21.10'\n__author__ = '<NAME> <<EMAIL>>'\n__contact__ = '<EMAIL>'\n__copyright__ = 'Cisco Systems, Inc.'\n\n\nimport json\nimport pprint\nimport requests\nfrom lxml import etree\nfrom ncclient import operations, transport\nfrom collections import OrderedDict\n\nfrom .model import Model, ModelDownloader, ModelCompiler, ModelDiff\nfrom .device import ModelDevice\nfrom .config import Config, ConfigDelta\nfrom .composer import Tag\nfrom .runningconfig import RunningConfigDiff\nfrom cisco_gnmi.proto import gnmi_pb2, gnmi_pb2_grpc\n\ndef _repr_rpcreply(self):\n return '<{}.{} {} at {}>'.format(self.__class__.__module__,\n self.__class__.__name__,\n self._root.tag,\n hex(id(self)))\n\ndef _repr_notification(self):\n return '<{}.{} {} at {}>'.format(self.__class__.__module__,\n self.__class__.__name__,\n self._root_ele.tag,\n hex(id(self)))\n\ndef _str_rpcreply(self):\n self.parse()\n xml_str = etree.tostring(self._root, encoding='unicode')\n xml_ele = etree.XML(xml_str, etree.XMLParser(remove_blank_text=True))\n return etree.tostring(xml_ele, encoding='unicode', pretty_print=True)\n\ndef _str_notification(self):\n xml_str = etree.tostring(self._root_ele, encoding='unicode')\n xml_ele = etree.XML(xml_str, etree.XMLParser(remove_blank_text=True))\n return etree.tostring(xml_ele, encoding='unicode', pretty_print=True)\n\ndef _str_request(self):\n ret = '{} {}'.format(self.method, self.url)\n headers = {k.lower(): v.lower() for k, v in self.headers.items()}\n if 'content-type' in headers:\n ret += '\\nContent-Type: {}'.format(headers['content-type'])\n if self.method in ['POST', 'PUT', 'PATCH']:\n if headers['content-type'] == 'application/yang-data+json':\n d = json.loads(self.data, object_pairs_hook=OrderedDict)\n ret += '\\n\\n{}'.format(json.dumps(d, indent=2))\n if headers['content-type'] == 'application/yang-data+xml':\n e = etree.XML(self.data,\n etree.XMLParser(remove_blank_text=True))\n ret += '\\n\\n{}'.format(etree.tostring(e, encoding='unicode',\n pretty_print=True))\n return ret\n\ndef _str_response(self):\n http_versions = {10: 'HTTP/1.0', 11: 'HTTP/1.1'}\n ret = '{} {}'.format(self.status_code, self.reason)\n if self.raw.version in http_versions:\n ret = http_versions[self.raw.version] + ' ' + ret\n for k, v in self.headers.items():\n ret += '\\n{}: {}'.format(k, v)\n if self.text:\n ret += '\\n\\n' + self.text\n return ret\n\ndef xpath_rpcreply(self, *args, **kwargs):\n if 'namespaces' not in kwargs:\n kwargs['namespaces'] = self.ns\n return self._root.xpath(*args, **kwargs)\n else:\n return self._root.xpath(*args, **kwargs)\n\ndef xpath_notification(self, *args, **kwargs):\n if 'namespaces' not in kwargs:\n kwargs['namespaces'] = self.ns\n return self._root_ele.xpath(*args, **kwargs)\n else:\n return self._root_ele.xpath(*args, **kwargs)\n\ndef ns_help(self):\n pprint.pprint(self.ns)\n\noperations.rpc.RPCReply.__repr__ = _repr_rpcreply\noperations.rpc.RPCReply.__str__ = _str_rpcreply\noperations.rpc.RPCReply.xpath = xpath_rpcreply\noperations.rpc.RPCReply.ns_help = ns_help\n\nif getattr(transport, 'notify', None):\n transport.notify.Notification.__repr__ = _repr_notification\n transport.notify.Notification.__str__ = _str_notification\n transport.notify.Notification.xpath = xpath_notification\n transport.notify.Notification.ns_help = ns_help\n\nrequests.Request.__str__ = _str_request\nrequests.Response.__str__ = _str_response\n", "id": "2975273", "language": "Python", "matching_score": 2.709398031234741, "max_stars_count": 0, "path": "ncdiff/src/yang/ncdiff/__init__.py" }, { "content": "#! /usr/bin/env python\nimport sys\nfrom pprint import pprint as pp\nimport json\nfrom collections import OrderedDict\nimport yaml\nimport yamlordereddictloader\n\n\ndef load(filename):\n #\n # Load test YAML file as OrderedDict:\n #\n test = yaml.load(\n open(filename),\n Loader=yamlordereddictloader.Loader\n )\n\n print('\\n\\nYAML to DICT\\n\\n')\n pp(test)\n #\n # Dump test to a JSON file:\n #\n with open(filename.replace('.yaml', '.json'), 'w') as fd:\n json.dump(test, fd, indent=2)\n #\n # Load test JSON file as OrderedDict:\n #\n with open(filename.replace('.yaml', '.json')) as fd:\n test = json.load(fd, object_pairs_hook=OrderedDict)\n print('\\n\\nJSON to DICT\\n\\n')\n pp(test)\n\n\nif __name__ == '__main__':\n if len(sys.argv) > 1:\n filename = sys.argv[1]\n else:\n filename = 'test-schema-example.yaml'\n\n load(filename)\n", "id": "227725", "language": "Python", "matching_score": 0.3611917793750763, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/pipeline/tests/test_yaml.py" }, { "content": "from .topology_mapper import *\n\n# import the loader\n# this forces the appearance of topology_mapper.loader\n# because it's a class obj appearing as a module\nfrom . import loader\n", "id": "11349076", "language": "Python", "matching_score": 0.7488078474998474, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/topology_mapper/__init__.py" }, { "content": "'''\nIOSXE Genie Conf using YANG for feature Standby.\n'''\n\n# Python\nfrom abc import ABC\nimport warnings\nimport string\n\n# Genie\nfrom genie.conf.base import Interface\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import YangConfig\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\n\n# YDK\ntry:\n from ydk.models.cisco_iosxe_native import Cisco_IOS_XE_native as ned\n from ydk.types import DELETE, Empty\n from ydk.services import CRUDService\n\n # patch a netconf provider\n from ydk.providers import NetconfServiceProvider as _NetconfServiceProvider\n from ydk.providers._provider_plugin import _ClientSPPlugin\n from ydk.services import CodecService\n from ydk.providers import CodecServiceProvider\n\n class NetconfServiceProvider(_NetconfServiceProvider):\n def __init__(self, device):\n if 'yang' not in device.mapping:\n # Want it, but dont have a connection? \n raise Exception(\"Missing connection of \"\n \"type 'yang' in the device \"\n \"mapping '{map}'\".format(map=device.mapping))\n alias = device.mapping['yang']\n dev = device.connectionmgr.connections[alias]\n\n super().__init__(address=str(dev.connection_info.ip),\n port=dev.connection_info.port,\n username=dev.connection_info.username,\n password=dev.connection_info.password,\n protocol = 'ssh')\n\n self.sp_instance = _ClientSPPlugin(self.timeout,\n use_native_client=False)\n\n self.sp_instance._nc_manager = dev\n def _connect(self, *args, **kwargs): pass\nexcept:\n pass\n\n\nclass Hsrp(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, devices=None, apply=True, attributes=None,\n unconfig=False, **kwargs):\n\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n hsrp_config = []\n\n # loop over all interfaces\n for sub, attributes2 in attributes.mapping_values(\n 'interface_attr', keys=self.interface_attr.keys()):\n\n hsrp_config.append(sub.build_config(apply=False,\n attributes=attributes2, unconfig=unconfig, **kwargs))\n\n # instantiate crud service\n crud_service = CRUDService()\n\n if apply:\n for interface in hsrp_config:\n interface.apply()\n \n else:\n ydks = []\n if unconfig:\n for interface in hsrp_config:\n ydks.append(interface)\n else:\n for interface in hsrp_config:\n ydks.append(interface)\n\n return ydks\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n \n assert not apply\n crud_service = CRUDService()\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n intf_name = attributes.value('interface_name')\n\n if intf_name.startswith('Gig'):\n ydk_obj = ned.Native.Interface.Gigabitethernet()\n elif intf_name.startswith('Ten'):\n ydk_obj = ned.Native.Interface.Tengigabitethernet()\n\n # Set the interface name\n keep = {string.digits + '//'}\n ydk_obj.name = ''.join(i for i in attributes.value(\n 'interface_name') if i in keep)\n\n # Standby Object\n standby_obj = ydk_obj.Standby()\n ydk_obj.standby = standby_obj\n\n # =============\n # leaf: version\n # =============\n # interface <interface_name>\n # standby version <version>\n if attributes.value('version') == 2:\n version_obj = ydk_obj.standby.VersionEnum.Y_2\n standby_obj.version = version_obj\n elif attributes.value('version') == 1:\n version_obj = ydk_obj.standby.VersionEnum.Y_1\n standby_obj.version = version_obj\n\n # =========\n # leaf: bfd\n # =========\n # interface <interface_name>\n # standby <bfd>\n if attributes.value('bfd'):\n standby_obj.bfd = Empty()\n\n # ===========\n # leaf: delay\n # ===========\n # interface <interface_name>\n # standby delay minimum <minimum_delay>\\\n # reload <reload_delay>\n if attributes.value('minimum_delay') and \\\n attributes.value('reload_delay'):\n delay_obj = standby_obj.Delay()\n delay_obj.minimum = int(attributes.value('minimum_delay'))\n delay_obj.reload = int(attributes.value('reload_delay'))\n standby_obj.delay = delay_obj\n elif attributes.value('minimum_delay'):\n delay_obj = standby_obj.Delay()\n delay_obj.minimum = int(attributes.value('minimum_delay'))\n standby_obj.delay = delay_obj\n \n # =================\n # leaf: mac-refresh\n # =================\n # interface <interface_name>\n # standby mac-refresh <mac_refresh>\n if attributes.value('mac_refresh'):\n standby_obj.mac_refresh = \\\n int(attributes.value('mac_refresh'))\n\n # =============\n # leaf: use-bia\n # =============\n # interface <interface_name>\n # standby use-bia scope interface\n if attributes.value('use_bia'):\n use_bia_obj = ydk_obj.standby.UseBia()\n scope_obj = ydk_obj.standby.UseBia.Scope()\n scope_obj.interface = Empty()\n use_bia_obj.scope = scope_obj\n standby_obj.use_bia = use_bia_obj\n\n # ==================\n # leaf: standby-list\n # ==================\n if attributes.value('group_number') is not None:\n\n # ==================\n # leaf: group_number\n # ==================\n standby_list_obj = ydk_obj.standby.StandbyList()\n standby_list_obj.group_number = \\\n int(attributes.value('group_number'))\n\n # ====================\n # leaf: authentication\n # ====================\n if attributes.value('authentication_word'):\n # interface <interface_name>\n # standby <group_number> authentication \\\n # <authentication_word>\n authentication_obj = standby_list_obj.Authentication()\n authentication_obj.word = \\\n attributes.value('authentication_word')\n standby_list_obj.authentication = authentication_obj\n \n elif attributes.value('authentication_text'):\n # interface <interface_name>\n # standby <group_number> authentication text \\\n # <authentication_text>\n authentication_obj = standby_list_obj.Authentication()\n authentication_obj.word = \\\n attributes.value('authentication_text')\n standby_list_obj.authentication = authentication_obj\n\n elif attributes.value('authentication_md5_keychain'):\n # interface <interface_name>\n # standby <group_number> authentication \\\n # md5 key-chain <authentication_md5_keychain>\n authentication_obj = standby_list_obj.Authentication()\n md5_obj = authentication_obj.Md5()\n md5_obj.key_chain = \\\n attributes.value('authentication_md5_keychain')\n authentication_obj.md5 = md5_obj\n standby_list_obj.authentication = authentication_obj\n\n elif attributes.value('authentication_md5_keystring'):\n # interface <interface_name>\n # standby <group_number> authentication \\\n # md5 key-string <authentication_md5_keystring>\n authentication_obj = standby_list_obj.Authentication()\n md5_obj = authentication_obj.Md5()\n key_string_obj = md5_obj.KeyString()\n key_string_obj.string = \\\n attributes.value('authentication_md5_keystring')\n md5_obj.key_string = key_string_obj\n authentication_obj.md5 = md5_obj\n standby_list_obj.authentication = authentication_obj\n\n # ============\n # leaf: follow\n # ============\n # interface <interface_name>\n # standby <group_number> follow <follow>\n if attributes.value('follow'):\n standby_list_obj.follow = attributes.value('follow')\n\n # ========\n # leaf: ip\n # ========\n # interface <interface_name>\n # standby <group_number> ip <ip_address>\n if attributes.value('ip_address'):\n ip_obj = standby_list_obj.Ip()\n ip_obj.address = attributes.value('ip_address')\n standby_list_obj.ip = ip_obj\n\n # ==========\n # leaf: ipv6\n # ==========\n # interface <interface_name>\n # standby <group_number> ip <ip_address>\n if attributes.value('ipv6_address') == 'autoconfig':\n ipv6_obj = standby_list_obj.Ipv6Enum.autoconfig\n standby_list_obj.ipv6 = ipv6_obj\n else:\n standby_list_obj.ipv6 = \\\n attributes.value('ipv6_address')\n\n # =================\n # leaf: mac-address\n # =================\n # interface <interface_name>\n # standby <group_number> mac-address <mac_address>\n if attributes.value('mac_address'):\n standby_list_obj.mac_address = \\\n attributes.value('mac_address')\n\n # ==========\n # leaf: name\n # ==========\n # interface <interface_name>\n # standby <group_number> name <group_name>\n if attributes.value('group_name'):\n standby_list_obj.name = attributes.value('group_name')\n\n # =============\n # leaf: preempt\n # =============\n if attributes.value('preempt') and \\\n attributes.value('preempt_minimum_delay') and \\\n attributes.value('preempt_reload_delay') and \\\n attributes.value('preempt_sync_delay'):\n # interface <interface_name>\n # standby <group_number> preempt delay \\\n # minimum <preempt_minimum_delay> reload \\\n # <preempt_reload> sync <preempt_sync>\n preempt_obj = standby_list_obj.Preempt()\n preempt_obj_delay = preempt_obj.Delay()\n preempt_obj_delay.minimum = \\\n int(attributes.value('preempt_minimum_delay'))\n preempt_obj_delay.reload = \\\n int(attributes.value('preempt_reload_delay'))\n preempt_obj_delay.sync = \\\n int(attributes.value('preempt_sync_delay'))\n preempt_obj.delay = preempt_obj_delay\n standby_list_obj.preempt = preempt_obj\n elif attributes.value('preempt') and \\\n attributes.value('preempt_minimum_delay') and \\\n attributes.value('preempt_reload_delay'):\n # interface <interface_name>\n # standby <group_number> preempt delay \\\n # minimum <preempt_minimum_delay> \\\n # reload <preempt_reload>\n preempt_obj = standby_list_obj.Preempt()\n preempt_obj_delay = preempt_obj.Delay()\n preempt_obj_delay.minimum = \\\n int(attributes.value('preempt_minimum_delay'))\n preempt_obj_delay.reload = \\\n int(attributes.value('preempt_reload_delay'))\n preempt_obj.delay = preempt_obj_delay\n standby_list_obj.preempt = preempt_obj\n elif attributes.value('preempt') and \\\n attributes.value('preempt_minimum_delay'):\n # interface <interface_name>\n # standby <group_number> preempt delay \\\n # minimum <preempt_minimum_delay>\n preempt_obj = standby_list_obj.Preempt()\n preempt_obj_delay = preempt_obj.Delay()\n preempt_obj_delay.minimum = \\\n int(attributes.value('preempt_minimum_delay'))\n preempt_obj.delay = preempt_obj_delay\n standby_list_obj.preempt = preempt_obj\n elif attributes.value('preempt'):\n # interface <interface_name>\n # standby <group_number> preempt delay\n preempt_obj = standby_list_obj.Preempt()\n preempt_obj_delay = preempt_obj.Delay()\n preempt_obj.delay = preempt_obj_delay\n standby_list_obj.preempt = preempt_obj\n\n # ==============\n # leaf: priority\n # ==============\n # interface <interface_name>\n # standby <group_number> priority <priority>\n if attributes.value('priority'):\n standby_list_obj.priority = \\\n int(attributes.value('priority'))\n\n # ==============\n # leaf: redirect\n # ==============\n # TODO: this is a bug\n\n # ============\n # leaf: timers\n # ============\n if attributes.value('hello_interval_seconds') and \\\n attributes.value('holdtime_seconds'):\n # interface <interface_name>\n # standby <group_number> timers \\\n # <hello_interval_seconds> <holdtime_seconds>\n timers_obj = standby_list_obj.Timers()\n hello_interval_obj = timers_obj.HelloInterval()\n hello_interval_obj.seconds = \\\n int(attributes.value('hello_interval_seconds'))\n timers_obj.hello_interval = hello_interval_obj\n hold_time_obj = timers_obj.HoldTime()\n hold_time_obj.seconds = \\\n attributes.value('holdtime_seconds')\n timers_obj.hold_time = hold_time_obj\n standby_list_obj.timers = timers_obj\n elif attributes.value('hello_interval_msec') and \\\n attributes.value('holdtime_msec'):\n # interface <interface_name>\n # standby <group_number> timers msec \\\n # <hello_interval_msec> msec <holdtime_msec>\n timers_obj = standby_list_obj.Timers()\n hello_interval_obj = timers_obj.HelloInterval()\n hello_interval_obj.msec = \\\n int(attributes.value('hello_interval_msec'))\n timers_obj.hello_interval = hello_interval_obj\n hold_time_obj = timers_obj.HoldTime()\n hold_time_obj.msec = attributes.value('holdtime_msec')\n timers_obj.hold_time = hold_time_obj\n standby_list_obj.timers = timers_obj\n\n # ===========\n # leaf: track\n # ===========\n if attributes.value('track_object') and \\\n attributes.value('priority_decrement'):\n # interface <interface_name>\n # standby <group_number> track object <track_object> \\\n # decrement <priority_decrement>\n track_obj = standby_list_obj.Track()\n track_obj.number = attributes.value('track_object')\n track_obj.decrement = \\\n int(attributes.value('priority_decrement'))\n standby_list_obj.track.append(track_obj)\n elif attributes.value('track_object') and \\\n attributes.value('track_shutdown'):\n # interface <interface_name>\n # standby <group_number> track object <track_object> \\\n # shutdown\n track_obj = standby_list_obj.Track()\n track_obj.number = attributes.value('track_object')\n track_obj.shutdown = Empty()\n standby_list_obj.track.append(track_obj)\n\n # Add standby_list_obj to standby_obj\n standby_obj.standby_list.append(standby_list_obj)\n\n if unconfig:\n return YangConfig(device=self.device,\n ydk_obj=ydk_obj,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.delete)\n else:\n return YangConfig(device=self.device,\n ydk_obj=ydk_obj,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.create)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, \n unconfig=True, **kwargs)\n\n", "id": "10067386", "language": "Python", "matching_score": 5.483813762664795, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/hsrp/iosxe/yang/hsrp.py" }, { "content": "\"\"\"Implement IOS-XR (iosxr) Specific Configurations for Vrf objects.\n\"\"\"\n\n# Table of contents:\n# class Vrf:\n# class DeviceAttributes:\n# def build_config/build_unconfig:\n# class AddressFamilyAttributes:\n# def build_config/build_unconfig:\n\nfrom abc import ABC\nimport warnings\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.libs.conf.address_family import AddressFamily,\\\n AddressFamilySubAttributes\nfrom genie.conf.base.config import YangConfig\ntry:\n from ydk.models.ydkmodels import Cisco_IOS_XR_infra_rsi_cfg as xr_infra_rsi_cfg\n from ydk.types import DELETE, Empty\n from ydk.services import CRUDService\n\n from ydk.services import CodecService\n from ydk.providers import CodecServiceProvider\n\n # patch a netconf provider\n from ydk.providers import NetconfServiceProvider as _NetconfServiceProvider\n from ydk.providers._provider_plugin import _ClientSPPlugin\n\n class NetconfServiceProvider(_NetconfServiceProvider):\n\n def __init__(self, device):\n if 'yang' not in device.mapping:\n # Want it, but dont have a connection? \n raise Exception(\"Missing connection of \"\n \"type 'yang' in the device \"\n \"mapping '{map}'\".format(map=device.mapping))\n alias = device.mapping['yang']\n dev = device.connectionmgr.connections[alias]\n\n super().__init__(address=str(dev.connection_info.ip),\n port=dev.connection_info.port,\n username=dev.connection_info.username,\n password=dev.connection_info.password,\n protocol = 'ssh')\n\n self.sp_instance = _ClientSPPlugin(self.timeout,\n use_native_client=False)\n\n self.sp_instance._nc_manager = dev\n\n def _connect(self, *args, **kwargs): pass\nexcept Exception:\n pass\n\nclass Vrf(object):\n\n class DeviceAttributes(object):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n vrfs = xr_infra_rsi_cfg.Vrfs()\n if not unconfig:\n vrf = vrfs.Vrf()\n if attributes.value('name'):\n vrf.vrf_name = attributes.value('name')\n vrf.create = Empty()\n\n vrfs.vrf.append(vrf)\n #crud_service = CRUDService()\n #ncp = NetconfServiceProvider(self.device)\n #x = crud_service.read(ncp, vrf)\n #abc = YangConfig(device=self.device, ydk_obj=x, ncp=ncp, crud_service=crud_service)\n #print(abc)\n # iosxr: vrf vrf1 / address-family ipv4 unicast (config-vrf-af)\n for key, sub, attributes2 in attributes.mapping_items(\n 'address_family_attr', keys=self.address_families, sort=True):\n sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig, vrf=vrf)\n\n # instantiate crud service\n crud_service = CRUDService()\n if apply:\n\n # create netconf connection\n ncp = NetconfServiceProvider(self.device)\n\n crud_service.create(ncp, vrfs)\n\n if unconfig:\n crud_service.delete(ncp, vrfs)\n else:\n if unconfig:\n return YangConfig(device=self.device,\n ydk_obj=vrfs,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.delete)\n else:\n return YangConfig(device=self.device,\n ydk_obj=vrfs,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.create)\n\n #return YangConfig(device=self.device,\n # ydk_obj=ydk_obj,\n # ncp=NetconfServiceProvider,\n # crud_service=crud_service.delete)\n\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class AddressFamilyAttributes(object):\n\n def build_config(self, vrf, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n assert not kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n af = vrf.afs.Af()\n af.topology_name = \"default\"\n af.create = Empty()\n if attributes.value('address_family').value == 'ipv4 unicast':\n af.af_name = xr_infra_rsi_cfg.VrfAddressFamilyEnum.ipv4\n af.saf_name = xr_infra_rsi_cfg.VrfSubAddressFamilyEnum.unicast\n\n if attributes.value('address_family').value == 'ipv6 unicast':\n af.af_name = xr_infra_rsi_cfg.VrfAddressFamilyEnum.ipv6\n af.saf_name = xr_infra_rsi_cfg.VrfSubAddressFamilyEnum.unicast\n\n if unconfig and attributes.iswildcard:\n if attributes.value('address_family').value == 'ipv4 unicast':\n af.af_name = None\n af.saf_name = None\n\n if attributes.value('address_family').value == 'ipv6 unicast':\n af.af_name = None\n af.saf_name = None\n\n vrf.afs.af.append(af)\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n", "id": "4000910", "language": "Python", "matching_score": 4.924057483673096, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/vrf/iosxr/yang/vrf.py" }, { "content": "\n# Python\nfrom abc import ABC\n\n# xBU_shared genie package\nfrom genie.libs.conf.vrf import VrfSubAttributes\n\n# Genie package\nfrom genie.conf.base.attributes import AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base import Interface\nfrom genie.conf.base.config import YangConfig\ntry:\n from ydk.models.ned import ned\n from ydk.types import DELETE, Empty\n from ydk.services import CRUDService\n\n # patch a netconf provider\n from ydk.providers import NetconfServiceProvider as _NetconfServiceProvider\n from ydk.providers._provider_plugin import _ClientSPPlugin\n from ydk.services import CodecService\n from ydk.providers import CodecServiceProvider\n\n class NetconfServiceProvider(_NetconfServiceProvider):\n def __init__(self, device):\n if 'yang' not in device.mapping:\n # Want it, but dont have a connection? \n raise Exception(\"Missing connection of \"\n \"type 'yang' in the device \"\n \"mapping '{map}'\".format(map=device.mapping))\n alias = device.mapping['yang']\n dev = device.connectionmgr.connections[alias]\n\n super().__init__(address=str(dev.connection_info.ip),\n port=dev.connection_info.port,\n username=dev.connection_info.username,\n password=dev.connection_info.password,\n protocol = 'ssh')\n\n self.sp_instance = _ClientSPPlugin(self.timeout,\n use_native_client=False)\n\n self.sp_instance._nc_manager = dev\n def _connect(self, *args, **kwargs): pass\nexcept:\n pass\n\n\nclass Vlan(ABC):\n\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, \n unconfig=False, **kwargs):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n # No yang models found for NXOS so it has been built following\n # the IOSXE ned models for now.\n ydk_obj = ned.Native.Vlan()\n\n vlan_config = []\n for sub, attributes2 in attributes.mapping_values(\n 'access_map_attr', \n keys=self.vlan_configuration_attr.keys()):\n vlan_config.extend(sub.build_config(apply=False, \n attributes=attributes2,\n unconfig=unconfig, \n ydk_obj=ydk_obj, \n **kwargs))\n\n for sub, attributes2 in attributes.mapping_values(\n 'vlan_configuration_attr', \n keys=self.vlan_configuration_attr.keys()):\n vlan_config.extend(sub.build_config(apply=False, \n attributes=attributes2,\n unconfig=unconfig, \n ydk_obj=ydk_obj, \n **kwargs))\n\n for sub, attributes2 in attributes.mapping_values(\n 'interface_attr', \n keys=self.vlan_configuration_attr.keys()):\n vlan_config.extend(sub.build_config(apply=False, \n attributes=attributes2,\n unconfig=unconfig, \n ydk_obj=ydk_obj, \n **kwargs))\n\n # iosxe: vlan 1000 (config-vlan)\n id = attributes.value('vlan_id', force = True)\n\n if id:\n vlan = ydk_obj.VlanList()\n vlan.id = int(id)\n ydk_obj.vlan_list.append(vlan)\n\n # instantiate crud service\n crud_service = CRUDService()\n\n if apply:\n\n # create netconf connection\n ncp = NetconfServiceProvider(self.device)\n\n if unconfig:\n crud_service.delete(ncp,ydk_obj)\n else:\n crud_service.create(ncp, ydk_obj)\n else:\n ydks = []\n\n if unconfig:\n ydks.append(YangConfig(device=self.device,\n ydk_obj=ydk_obj,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.delete))\n else:\n ydks.append(YangConfig(device=self.device,\n ydk_obj=ydk_obj,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.create))\n\n return ydks\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class AccessMapAttributes(ABC):\n\n def build_config(self, ydk_obj, apply=True, \n attributes=None, unconfig=False, **kwargs):\n assert not apply\n # instantiate crud service\n crud_service = CRUDService()\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if unconfig:\n return YangConfig(device=self.device,\n ydk_obj=ydk_obj,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.delete)\n else:\n return YangConfig(device=self.device,\n ydk_obj=ydk_obj,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.create)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, \n unconfig=True, **kwargs)\n\n\n class VlanConfigurationAttributes(ABC):\n\n def build_config(self, ydk_obj, apply=True, \n attributes=None, unconfig=False, **kwargs):\n assert not apply\n # instantiate crud service\n crud_service = CRUDService()\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if unconfig:\n return YangConfig(device=self.device,\n ydk_obj=ydk_obj,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.delete)\n else:\n return YangConfig(device=self.device,\n ydk_obj=ydk_obj,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.create)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, \n unconfig=True, **kwargs)\n\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, ydk_obj, apply=True, \n attributes=None, unconfig=False, **kwargs):\n assert not apply\n # instantiate crud service\n crud_service = CRUDService()\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if unconfig:\n return YangConfig(device=self.device,\n ydk_obj=ydk_obj,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.delete)\n else:\n return YangConfig(device=self.device,\n ydk_obj=ydk_obj,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.create)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, \n unconfig=True, **kwargs)\n\n", "id": "2559251", "language": "Python", "matching_score": 5.443981170654297, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/vlan/nxos/yang/vlan.py" }, { "content": "\n# Python\nfrom abc import ABC\n\n# Genie package\nfrom genie.conf.base.attributes import AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base import Interface\nfrom genie.conf.base.config import YangConfig\ntry:\n from ydk.models.ydkmodels import Cisco_IOS_XR_ifmgr_cfg as xr_ifmgr_cfg\n from ydk.providers._provider_plugin import _ClientSPPlugin\n from ydk.types import DELETE, Empty\n from ydk.services import CRUDService\n from ydk.services import CodecService\n from ydk.providers import CodecServiceProvider\n # patch a netconf provider\n from ydk.providers import NetconfServiceProvider as _NetconfServiceProvider\n\n\n class NetconfServiceProvider(_NetconfServiceProvider):\n def __init__(self, device):\n if 'yang' not in device.mapping:\n # Want it, but dont have a connection? \n raise Exception(\"Missing connection of \"\n \"type 'yang' in the device \"\n \"mapping '{map}'\".format(map=device.mapping))\n alias = device.mapping['yang']\n dev = device.connectionmgr.connections[alias]\n\n super().__init__(address=str(dev.connection_info.ip),\n port=dev.connection_info.port,\n username=dev.connection_info.username,\n password=<PASSWORD>.connection_info.password,\n protocol = 'ssh')\n\n self.sp_instance = _ClientSPPlugin(self.timeout,\n use_native_client=False)\n\n self.sp_instance._nc_manager = dev\n def _connect(self, *args, **kwargs): pass\nexcept:\n pass\n\n\nclass Vlan(ABC):\n\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, \n **kwargs):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n ydk_obj = xr_ifmgr_cfg.InterfaceConfigurations()\n\n vlan_config = []\n\n for sub, attributes2 in attributes.mapping_values(\n 'interface_attr', \n keys=self.interface_attr):\n vlan_config.extend(sub.build_config(apply=False, \n attributes=attributes2,\n unconfig=unconfig, \n ydk_obj=ydk_obj, \n **kwargs))\n\n # instantiate crud service\n crud_service = CRUDService()\n\n if apply:\n\n # create netconf connection\n ncp = NetconfServiceProvider(self.device)\n\n if unconfig:\n crud_service.delete(ncp,ydk_obj)\n else:\n crud_service.create(ncp, ydk_obj)\n\n for cfg in vlan_config:\n cfg.apply()\n else:\n ydks = []\n for cfg in vlan_config:\n ydks.append(cfg)\n\n return ydks\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class AccessMapAttributes(ABC):\n\n def build_config(self, ydk_obj, apply=True, \n attributes=None, unconfig=False, **kwargs):\n assert not apply\n # instantiate crud service\n crud_service = CRUDService()\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, \n unconfig=True, **kwargs)\n\n\n class VlanConfigurationAttributes(ABC):\n\n def build_config(self, ydk_obj, apply=True, attributes=None, \n unconfig=False, **kwargs):\n assert not apply\n # instantiate crud service\n crud_service = CRUDService()\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, \n unconfig=True, **kwargs)\n\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, ydk_obj, apply=True, \n attributes=None, unconfig=False, **kwargs):\n assert not apply\n # instantiate crud service\n crud_service = CRUDService()\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if attributes.value('eth_encap_type1'):\n self.interface.eth_encap_type1 = \\\n attributes.value('eth_encap_type1')\n\n if attributes.value('eth_encap_val1'):\n self.interface.eth_encap_val1 = \\\n attributes.value('eth_encap_val1')\n\n if attributes.value('eth_encap_type2'):\n self.interface.eth_encap_type2 = \\\n attributes.value('eth_encap_type2')\n\n if attributes.value('eth_encap_val2'):\n self.interface.eth_encap_val2 = \\\n attributes.value('eth_encap_val2')\n\n if unconfig:\n if attributes.attributes != None:\n vlan_config = self.interface.build_unconfig(apply=False, \n attributes=\\\n attributes.attributes)\n else:\n vlan_config = self.interface.build_unconfig(apply=False,\n attributes=\\\n {'eth_encap_type1': None,\n 'eth_encap_val1': None,\n 'eth_encap_type2': None, \n 'eth_encap_val2': None})\n else:\n vlan_config = self.interface.build_config(apply=False)\n\n return (vlan_config)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, \n unconfig=True, **kwargs)\n\n", "id": "9995671", "language": "Python", "matching_score": 5.411381244659424, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/vlan/iosxr/yang/vlan.py" }, { "content": "'''\n Interface classes for iosxe OS.\n'''\n\n__all__ = (\n 'Interface',\n 'PhysicalInterface',\n 'VirtualInterface',\n 'LoopbackInterface',\n 'EthernetInterface'\n)\n\nimport re\nimport contextlib\nimport abc\nimport weakref\nimport string\nfrom enum import Enum\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import ConfigurableBase\nfrom genie.conf.base.exceptions import UnknownInterfaceTypeError\nfrom genie.conf.base.attributes import SubAttributes, KeyedSubAttributes, SubAttributesDict,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import YangConfig\n\nfrom genie.libs.conf.base import \\\n MAC, \\\n IPv4Address, IPv4Interface, \\\n IPv6Address, IPv6Interface\n\nfrom genie.libs.conf.l2vpn import PseudowireNeighbor\nfrom genie.libs.conf.l2vpn.pseudowire import EncapsulationType\n\nimport genie.libs.conf.interface\n\ntry:\n from ydk.models.ydkmodels import Cisco_IOS_XR_ifmgr_cfg as xr_ifmgr_cfg\n from ydk.types import DELETE, Empty\n from ydk.services import CRUDService\n from ydk.services import CodecService\n from ydk.providers import CodecServiceProvider\n # patch a netconf provider\n from ydk.providers import NetconfServiceProvider as _NetconfServiceProvider\n\n from ydk.providers._provider_plugin import _ClientSPPlugin\n\n class NetconfServiceProvider(_NetconfServiceProvider):\n\n def __init__(self, device):\n\n if 'yang' not in device.mapping:\n # Want it, but dont have a connection?\n raise Exception(\"Missing connection of \"\n \"type 'yang' in the device \"\n \"mapping '{map}'\".format(map=device.mapping))\n alias = device.mapping['yang']\n dev = device.connectionmgr.connections[alias]\n\n super().__init__(address=str(dev.connection_info.ip),\n port=dev.connection_info.port,\n username=dev.connection_info.username,\n password=dev.connection_info.password,\n protocol = 'ssh')\n\n self.sp_instance = _ClientSPPlugin(self.timeout,\n use_native_client=False)\n\n self.sp_instance._nc_manager = dev\n\n def _connect(self, *args, **kwargs): pass\nexcept Exception:\n pass\n\nclass ConfigurableInterfaceNamespace(ConfigurableBase):\n\n def __init__(self, interface=None):\n assert interface\n self._interface = interface\n\n _interface = None\n\n @property\n def interface(self):\n return self._interface\n\n @property\n def testbed(self):\n return self.interface.testbed\n\n @property\n def device(self):\n return self.interface.device\n\n\nclass Interface(genie.libs.conf.interface.Interface):\n \"\"\" base Interface class for IOS-XE devices\n \"\"\"\n\n def __new__(cls, *args, **kwargs):\n\n factory_cls = cls\n if cls is Interface:\n try:\n name = kwargs['name']\n except KeyError:\n raise TypeError('\\'name\\' argument missing')\n d_parsed = genie.libs.conf.interface.ParsedInterfaceName(\n name, kwargs.get('device', None))\n try:\n factory_cls = cls._name_to_class_map[d_parsed.type]\n except KeyError:\n pass\n\n if factory_cls is not cls:\n self = factory_cls.__new__(factory_cls, *args, **kwargs)\n elif super().__new__ is object.__new__:\n self = super().__new__(factory_cls)\n else:\n self = super().__new__(factory_cls, *args, **kwargs)\n return self\n\n bandwidth = managedattribute(\n name='bandwidth',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n description = managedattribute(\n name='description',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n mtu = managedattribute(\n name='mtu',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n shutdown = managedattribute(\n name='shutdown',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with self._build_config_create_interface_submode_context(configurations):\n self._build_config_interface_submode(configurations=configurations, attributes=attributes, unconfig=unconfig)\n\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n @abc.abstractmethod\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\nclass PhysicalInterface(Interface, genie.libs.conf.interface.PhysicalInterface):\n\n @abc.abstractmethod\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nclass VirtualInterface(Interface, genie.libs.conf.interface.VirtualInterface):\n\n @abc.abstractmethod\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nclass LoopbackInterface(VirtualInterface, genie.libs.conf.interface.LoopbackInterface):\n\n _interface_name_types = (\n 'Loopback',\n )\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n interface_configurations = xr_ifmgr_cfg.InterfaceConfigurations()\n interface_configuration = interface_configurations.InterfaceConfiguration()\n interface_configuration.active = \"act\"\n interface_configuration.interface_name = attributes.value('name')\n interface_configuration.interface_virtual = Empty()\n # name is a mandatory arguments\n #keep = string.digits\n #ydk_obj.name = int(''.join(i for i in attributes.value('name') if i in keep))\n\n if unconfig and attributes.iswildcard:\n pass\n else:\n vrf = attributes.value('vrf')\n if vrf:\n interface_configuration.vrf = vrf.name\n \n ipv4 = attributes.value('ipv4')\n if ipv4:\n primary = interface_configuration.ipv4_network.addresses.Primary()\n primary.address = str(ipv4.ip)\n primary.netmask = str(ipv4.netmask)\n interface_configuration.ipv4_network.addresses.primary = primary\n # instantiate crud service\n crud_service = CRUDService()\n if apply:\n\n # create netconf connection\n ncp = NetconfServiceProvider(self.device)\n\n\n if unconfig:\n crud_service.delete(ncp, interface_configuration)\n else:\n crud_service.create(ncp, interface_configuration)\n else:\n if unconfig:\n return YangConfig(device=self.device, unconfig=unconfig,\n ncp=NetconfServiceProvider,\n ydk_obj=interface_configuration,\n crud_service=crud_service.delete)\n else:\n return YangConfig(device=self.device, unconfig=unconfig,\n ncp=NetconfServiceProvider,\n ydk_obj=interface_configuration,\n crud_service=crud_service.create)\n\nclass EthernetInterface(PhysicalInterface, genie.libs.conf.interface.EthernetInterface):\n\n _interface_name_types = (\n 'Ethernet', # TODO verify\n 'FastEthernet',\n # TODO more?\n )\n\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n interface_configurations = xr_ifmgr_cfg.InterfaceConfigurations()\n interface_configuration = interface_configurations.InterfaceConfiguration()\n interface_configuration.active = \"act\"\n interface_configuration.interface_name = attributes.value('name')\n if unconfig and attributes.iswildcard:\n interface_configuration = DELETE()\n else:\n shutdown = attributes.value('shutdown')\n if shutdown is not None:\n if unconfig:\n # Special case: unconfiguring always applies shutdown\n interface_configuration.shutdown = Empty()\n elif shutdown:\n interface_configuration.shutdown = Empty()\n else:\n interface_configuration.shutdown = DELETE()\n\n ipv4 = attributes.value('ipv4')\n primary = interface_configuration.ipv4_network.addresses.Primary()\n if ipv4:\n primary.address = str(ipv4.ip)\n primary.netmask = str(ipv4.netmask)\n\n vrf = attributes.value('vrf')\n if vrf:\n interface_configuration.vrf = vrf.name\n\n # instantiate crud service\n crud_service = CRUDService()\n if apply:\n\n # create netconf connection\n ncp = NetconfServiceProvider(self.device)\n\n return crud_service.create(ncp, interface_configuration)\n else:\n return YangConfig(device=self.device, unconfig=unconfig,\n ncp=NetconfServiceProvider,\n interface_configuration=interface_configuration,\n crud_service=crud_service.create)\n\nclass GigabitEthernetInterface(PhysicalInterface):\n\n _interface_name_types = (\n 'GigabitEthernet',\n )\n\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n interface_configurations = xr_ifmgr_cfg.InterfaceConfigurations()\n # crud_service = CRUDService()\n # ncp = NetconfServiceProvider(self.device)\n # x = crud_service.read(ncp, interface_configurations)\n # abc = YangConfig(device=self.device, ydk_obj=x, ncp=ncp, crud_service=crud_service)\n # print(abc)\n interface_configuration = interface_configurations.InterfaceConfiguration()\n interface_configuration.active = \"act\"\n interface_configuration.interface_name = attributes.value('name')\n shutdown = attributes.value('shutdown')\n\n if shutdown is not None:\n if unconfig:\n # Special case: unconfiguring always applies shutdown\n interface_configuration.shutdown = Empty()\n elif shutdown:\n interface_configuration.shutdown = Empty()\n\n else:\n interface_configuration.shutdown = DELETE()\n\n vrf = attributes.value('vrf')\n if vrf:\n if unconfig:\n interface_configuration.vrf = DELETE()\n else:\n interface_configuration.vrf = vrf.name\n\n ipv4 = attributes.value('ipv4')\n if ipv4:\n primary = interface_configuration.ipv4_network.addresses.Primary()\n if unconfig:\n primary.address = DELETE()\n primary.netmask = DELETE()\n interface_configuration.ipv4_network.addresses.primary = primary\n else:\n primary.address = str(ipv4.ip)\n primary.netmask = str(ipv4.netmask)\n interface_configuration.ipv4_network.addresses.primary = primary\n\n # In Cisco-IOS-XR-l2-eth-infra-cfg.yang, augmentation section\n # augment \"/a1:interface-configurations/a1:interface-configuration\"\n # container ethernet-service is defined\n eth_encap_type1 = attributes.value('eth_encap_type1')\n eth_encap_val1 = attributes.value('eth_encap_val1')\n # eth_encap_type2 = attributes.value('eth_encap_type2')\n eth_encap_val2 = attributes.value('eth_encap_val2')\n\n if eth_encap_type1:\n interface_configuration.ethernet_service\\\n .local_traffic_default_encapsulation\\\n .outer_tag_type = eth_encap_type1\n\n if eth_encap_val1:\n interface_configuration.ethernet_service\\\n .local_traffic_default_encapsulation\\\n .outer_vlan_id = eth_encap_val1\n\n # if eth_encap_type2:\n # interface_configuration.encapsulation.encapsulation = \\\n # eth_encap_type2\n\n if eth_encap_val2:\n interface_configuration.ethernet_service\\\n .local_traffic_default_encapsulation\\\n .inner_vlan_id = eth_encap_val2\n\n # instantiate crud service\n crud_service = CRUDService()\n\n if apply:\n\n # create netconf connection\n ncp = NetconfServiceProvider(self.device)\n\n crud_service.create(ncp, interface_configuration)\n else:\n return YangConfig(device=self.device,\n ydk_obj=interface_configuration,\n ncp=NetconfServiceProvider,\n crud_service=crud_service.create)\n\nInterface._build_name_to_class_map()\n\n", "id": "12565388", "language": "Python", "matching_score": 6.735452651977539, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/interface/iosxr/yang/interface.py" }, { "content": "'''\n Interface classes for ios OS.\n'''\n\n__all__ = (\n 'Interface',\n 'PhysicalInterface',\n 'VirtualInterface',\n 'LoopbackInterface',\n 'EthernetInterface',\n 'SubInterface',\n)\n\nimport re\nimport contextlib\nimport abc\nfrom enum import Enum\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import ConfigurableBase\nfrom genie.conf.base.exceptions import UnknownInterfaceTypeError\nfrom genie.conf.base.attributes import SubAttributes, KeyedSubAttributes, SubAttributesDict,\\\n AttributesHelper\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\n\nfrom genie.libs.conf.base import \\\n MAC, \\\n IPv4Address, IPv4Interface, \\\n IPv6Address, IPv6Interface\n\nimport genie.libs.conf.interface\n\nclass ConfigurableInterfaceNamespace(ConfigurableBase):\n\n def __init__(self, interface=None):\n assert interface\n self._interface = interface\n\n _interface = None\n\n @property\n def interface(self):\n return self._interface\n\n @property\n def testbed(self):\n return self.interface.testbed\n\n @property\n def device(self):\n return self.interface.device\n\n\nclass Interface(genie.libs.conf.interface.Interface):\n \"\"\" base Interface class for IOS devices\n \"\"\"\n\n def __new__(cls, *args, **kwargs):\n\n factory_cls = cls\n if cls is Interface:\n try:\n name = kwargs['name']\n except KeyError:\n raise TypeError('\\'name\\' argument missing')\n d_parsed = genie.libs.conf.interface.ParsedInterfaceName(\n name, kwargs.get('device', None))\n if d_parsed.subintf:\n factory_cls = SubInterface\n else:\n try:\n factory_cls = cls._name_to_class_map[d_parsed.type]\n except KeyError:\n pass\n\n if factory_cls is not cls:\n self = factory_cls.__new__(factory_cls, *args, **kwargs)\n elif super().__new__ is object.__new__:\n self = super().__new__(factory_cls)\n else:\n self = super().__new__(factory_cls, *args, **kwargs)\n return self\n\n description = managedattribute(\n name='description',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n shutdown = managedattribute(\n name='shutdown',\n default=False,\n type=(None, managedattribute.test_istype(bool)))\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with self._build_config_create_interface_submode_context(configurations):\n self._build_config_interface_submode(configurations=configurations, attributes=attributes, unconfig=unconfig)\n\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n\n def _build_config_create_interface_submode_context(self, configurations):\n return configurations.submode_context('interface {}'.format(self.name))\n\n def _build_config_interface_submode(self, configurations, attributes, unconfig):\n\n # ios: interface {name} / description some line data\n v = attributes.value('description')\n if v:\n if v is True:\n pass # TODO Create a usefull default description\n configurations.append_line('description {}'.format(v))\n\n # ios: interface {name} / ip address 1.1.1.1 255.255.255.255 \n configurations.append_line(\n attributes.format('ip address {ipv4.ip} {ipv4.netmask}'))\n\n # ios: interface {name} / shutdown\n # enabled\n enabled = attributes.value('enabled')\n if enabled is not None:\n if enabled:\n config_cmd = 'no shutdown'\n unconfig_cmd = 'shutdown'\n else:\n config_cmd = 'shutdown'\n unconfig_cmd = 'no shutdown'\n configurations.append_line(\n attributes.format(config_cmd),\n unconfig_cmd=unconfig_cmd)\n # Compatibility\n else:\n shutdown = attributes.value('shutdown')\n if shutdown is not None:\n if unconfig:\n # Special case: unconfiguring always applies shutdown\n configurations.append_line('shutdown', raw=True)\n elif shutdown:\n configurations.append_line('shutdown', raw=True)\n else:\n configurations.append_line('no shutdown', raw=True)\n \n @abc.abstractmethod\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nclass PhysicalInterface(Interface, genie.libs.conf.interface.PhysicalInterface):\n\n @abc.abstractmethod\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nclass VirtualInterface(Interface, genie.libs.conf.interface.VirtualInterface):\n\n def _build_config_interface_submode(self, configurations, attributes, unconfig):\n\n # Virtual interfaces can be fully unconfigured\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n super()._build_config_interface_submode(configurations, attributes, unconfig)\n\n @abc.abstractmethod\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nclass LoopbackInterface(VirtualInterface, genie.libs.conf.interface.LoopbackInterface):\n\n _interface_name_types = (\n 'Loopback',\n )\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nclass EthernetInterface(PhysicalInterface, genie.libs.conf.interface.EthernetInterface):\n\n _interface_name_types = (\n 'Ethernet', # TODO verify\n 'FastEthernet',\n 'GigabitEthernet', # TODO verify\n 'TenGigabitEthernet', # TODO verify\n 'TwentyFiveGigabitEthernet', # TODO verify\n 'HundredGigabitEthernet', # TODO verify\n 'FortyGigabitEthernet', # TODO verify\n # TODO more?\n )\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if unconfig and attributes.iswildcard:\n configurations.append_line('default interface {}'.format(self.name),raw=True)\n\n else:\n with self._build_config_create_interface_submode_context(configurations):\n self._build_config_interface_submode(configurations=configurations, attributes=attributes, unconfig=unconfig)\n\n if apply:\n if configurations:\n self.device.configure(configurations, fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def _build_config_interface_submode(self, configurations, attributes, unconfig):\n\n super()._build_config_interface_submode(configurations=configurations,\n attributes=attributes,\n unconfig=unconfig)\n\n # ios: interface {name} / mac-address aaaa.bbbb.cccc\n configurations.append_line(attributes.format('mac-address {mac_address}'))\n\n # ios: interface {name} / negotiation auto\n v = attributes.value('auto_negotiation')\n if v is not None:\n if v:\n configurations.append_line('negotiation auto',unconfig_cmd = 'default negotiation auto')\n else:\n if not unconfig:\n configurations.append_line('no negotiation auto',unconfig_cmd = 'default negotiation auto')\n\nclass SubInterface(VirtualInterface,\n genie.libs.conf.interface.SubInterface):\n '''Class for ios sub-interfaces'''\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n\nInterface._build_name_to_class_map()", "id": "9631010", "language": "Python", "matching_score": 2.4121761322021484, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/interface/ios/interface.py" }, { "content": "#!/usr/bin/env python\n\nimport unittest\nfrom unittest.mock import Mock\n\nfrom ats.topology import Device as AtsDevice\nfrom ats.topology import Interface as AtsInterface\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\nfrom genie.conf.base.interface import\\\n PhysicalInterface as geniePhysicalInterface,\\\n VirtualInterface as genieVirtualInterface,\\\n PseudoInterface as geniePseudoInterface,\\\n LoopbackInterface as genieLoopbackInterface\nfrom genie.libs.conf.interface import\\\n ParsedInterfaceName, \\\n Interface as xbuInterface,\\\n PhysicalInterface, PhysicalInterface as xbuPhysicalInterface,\\\n SubInterface, SubInterface as xbuSubInterface,\\\n VirtualInterface, VirtualInterface as xbuVirtualInterface,\\\n PseudoInterface, PseudoInterface as xbuPseudoInterface,\\\n EthernetInterface, EthernetInterface as xbuEthernetInterface,\\\n TunnelInterface, TunnelInterface as xbuTunnelInterface,\\\n TunnelTeInterface, TunnelTeInterface as xbuTunnelTeInterface,\\\n NamedTunnelTeInterface, NamedTunnelTeInterface as xbuNamedTunnelTeInterface\nfrom genie.libs.conf.interface.iosxr import\\\n Interface as iosxrInterface,\\\n PhysicalInterface as iosxrPhysicalInterface,\\\n SubInterface as iosxrSubInterface,\\\n VirtualInterface as iosxrVirtualInterface,\\\n PseudoInterface as iosxrPseudoInterface,\\\n EthernetInterface as iosxrEthernetInterface\nfrom genie.libs.conf.base import MAC, IPv4Interface, IPv6Interface\n\n\nclass test_interface(TestCase):\n\n def test_ParsedInterfaceName(self):\n\n # TODO net_module module rack slot instance port subport cpu rsip\n d_parsed = ParsedInterfaceName(' Loopback 0 ')\n self.assertEqual(d_parsed.type, 'Loopback')\n self.assertEqual(d_parsed.number, '0')\n self.assertEqual(d_parsed.subintf_sep, None)\n self.assertEqual(d_parsed.subintf, None)\n self.assertEqual(d_parsed.reconstruct(), 'Loopback0')\n d_parsed = ParsedInterfaceName('Ethernet0/0')\n self.assertEqual(d_parsed.type, 'Ethernet')\n self.assertEqual(d_parsed.number, '0/0')\n self.assertEqual(d_parsed.subintf_sep, None)\n self.assertEqual(d_parsed.subintf, None)\n self.assertEqual(d_parsed.reconstruct(), 'Ethernet0/0')\n d_parsed = ParsedInterfaceName('GigabitEthernet0/0/0/0')\n self.assertEqual(d_parsed.type, 'GigabitEthernet')\n self.assertEqual(d_parsed.number, '0/0/0/0')\n self.assertEqual(d_parsed.subintf_sep, None)\n self.assertEqual(d_parsed.subintf, None)\n self.assertEqual(d_parsed.reconstruct(), 'GigabitEthernet0/0/0/0')\n d_parsed = ParsedInterfaceName('MgmtEth0/RP0/CPU0/0')\n self.assertEqual(d_parsed.type, 'MgmtEth')\n self.assertEqual(d_parsed.number, '0/RP0/CPU0/0')\n self.assertEqual(d_parsed.subintf_sep, None)\n self.assertEqual(d_parsed.subintf, None)\n self.assertEqual(d_parsed.reconstruct(), 'MgmtEth0/RP0/CPU0/0')\n d_parsed = ParsedInterfaceName(' GigabitEthernet 0/0/0/0.0 ')\n self.assertEqual(d_parsed.type, 'GigabitEthernet')\n self.assertEqual(d_parsed.number, '0/0/0/0')\n self.assertEqual(d_parsed.subintf_sep, '.')\n self.assertEqual(d_parsed.subintf, '0')\n self.assertEqual(d_parsed.reconstruct(), 'GigabitEthernet0/0/0/0.0')\n d_parsed = ParsedInterfaceName('GigabitEthernet0/0/0/0:0')\n self.assertEqual(d_parsed.type, 'GigabitEthernet')\n self.assertEqual(d_parsed.number, '0/0/0/0')\n self.assertEqual(d_parsed.subintf_sep, ':')\n self.assertEqual(d_parsed.subintf, '0')\n self.assertEqual(d_parsed.reconstruct(), 'GigabitEthernet0/0/0/0:0')\n d_parsed = ParsedInterfaceName('tunnel-te1')\n self.assertEqual(d_parsed.type, 'tunnel-te')\n self.assertEqual(d_parsed.number, '1')\n self.assertEqual(d_parsed.subintf_sep, None)\n self.assertEqual(d_parsed.subintf, None)\n self.assertEqual(d_parsed.reconstruct(), 'tunnel-te1')\n d_parsed = ParsedInterfaceName('GCC0')\n self.assertEqual(d_parsed.type, 'GCC0')\n self.assertEqual(d_parsed.number, None)\n self.assertEqual(d_parsed.subintf_sep, None)\n self.assertEqual(d_parsed.subintf, None)\n self.assertEqual(d_parsed.reconstruct(), 'GCC0')\n d_parsed = ParsedInterfaceName('g0')\n self.assertEqual(d_parsed.type, 'g0')\n self.assertEqual(d_parsed.number, None)\n self.assertEqual(d_parsed.subintf_sep, None)\n self.assertEqual(d_parsed.subintf, None)\n self.assertEqual(d_parsed.reconstruct(), 'g0')\n d_parsed = ParsedInterfaceName('GCC1')\n self.assertEqual(d_parsed.type, 'GCC1')\n self.assertEqual(d_parsed.number, None)\n self.assertEqual(d_parsed.subintf_sep, None)\n self.assertEqual(d_parsed.subintf, None)\n self.assertEqual(d_parsed.reconstruct(), 'GCC1')\n d_parsed = ParsedInterfaceName('OTU3E20/0/0/0')\n self.assertEqual(d_parsed.type, 'OTU3E2')\n self.assertEqual(d_parsed.number, '0/0/0/0')\n self.assertEqual(d_parsed.subintf_sep, None)\n self.assertEqual(d_parsed.subintf, None)\n self.assertEqual(d_parsed.reconstruct(), 'OTU3E20/0/0/0')\n\n def test_init(self):\n\n Genie.testbed = Testbed()\n dev1 = Device(name='PE1', os='iosxr')\n\n with self.assertRaises(TypeError):\n intf1 = Interface()\n with self.assertRaises(TypeError):\n intf1 = Interface(device=dev1)\n intf1 = Interface(device=dev1, name='GigabitEthernet0/0/0/1')\n intf1.device = None # forget it!\n\n #ats_dev1 = AtsDevice(name='PE1')\n ats_dev1 = None\n ats_intf1 = AtsInterface(device=ats_dev1,\n name='GigabitEthernet0/0/0/1',\n os='iosxr',\n type='ethernet')\n intf1 = Interface(device=dev1,\n name='GigabitEthernet0/0/0/1')\n\n self.assertTrue(isinstance(intf1, Interface))\n self.assertTrue(isinstance(intf1, xbuInterface))\n self.assertTrue(isinstance(intf1, iosxrInterface))\n self.assertTrue(isinstance(intf1, geniePhysicalInterface))\n self.assertTrue(isinstance(intf1, xbuPhysicalInterface))\n self.assertTrue(isinstance(intf1, iosxrPhysicalInterface))\n #self.assertTrue(isinstance(intf1, EthernetInterface))\n self.assertTrue(isinstance(intf1, xbuEthernetInterface))\n self.assertTrue(isinstance(intf1, iosxrEthernetInterface))\n\n intf2 = Interface(device=intf1.device,\n name=intf1.name + '.1')\n\n self.assertTrue(isinstance(intf2, Interface))\n self.assertTrue(isinstance(intf2, xbuInterface))\n self.assertTrue(isinstance(intf2, iosxrInterface))\n self.assertTrue(isinstance(intf2, genieVirtualInterface))\n self.assertTrue(isinstance(intf2, xbuVirtualInterface))\n self.assertTrue(isinstance(intf2, iosxrVirtualInterface))\n #self.assertTrue(isinstance(intf2, genieSubInterface))\n self.assertTrue(isinstance(intf2, xbuSubInterface))\n self.assertTrue(isinstance(intf2, iosxrSubInterface))\n\n self.assertEqual(intf1.ipv4, None)\n self.assertEqual(intf1.ipv6, None)\n self.assertEqual(intf1.mac_address, None)\n self.assertEqual(intf1.burnin_mac_address, None)\n self.assertEqual(intf1.effective_mac_address, None)\n\n intf1.ipv4 = '1.2.3.4/24'\n self.assertEqual(intf1.ipv4, IPv4Interface('1.2.3.4/24'))\n self.assertTrue(isinstance(intf1.ipv4, IPv4Interface))\n\n intf1.ipv4 = None\n self.assertIs(intf1.ipv4, None)\n\n with self.assertRaises(ValueError):\n intf1.ipv4 = 'abc'\n if False:\n # TODO -- Setting without a prefix uses 32 bits!\n with self.assertRaises(ValueError):\n intf1.ipv4 = '1.2.3.4'\n else:\n intf1.ipv4 = '1.2.3.4'\n self.assertEqual(intf1.ipv4, IPv4Interface('1.2.3.4/32'))\n\n intf1.ipv6 = 'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b/80'\n self.assertEqual(intf1.ipv6, IPv6Interface('fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b/80'))\n self.assertTrue(isinstance(intf1.ipv6, IPv6Interface))\n\n intf1.burnin_mac_address = 'a.b.c'\n self.assertTrue(isinstance(intf1.burnin_mac_address, MAC))\n self.assertEqual(intf1.mac_address, None)\n self.assertEqual(intf1.burnin_mac_address, MAC('a.b.c'))\n self.assertEqual(intf1.effective_mac_address, intf1.burnin_mac_address)\n del intf1.burnin_mac_address\n\n intf1.mac_address = 'a.b.c2'\n self.assertTrue(isinstance(intf1.mac_address, MAC))\n self.assertEqual(intf1.mac_address, MAC('a.b.c2'))\n self.assertEqual(intf1.burnin_mac_address, None)\n self.assertEqual(intf1.effective_mac_address, intf1.mac_address)\n del intf1.mac_address\n\n intf1.mac_address = 'a.b.c3'\n intf1.burnin_mac_address = 'a.b.c4'\n self.assertEqual(intf1.mac_address, MAC('a.b.c3'))\n self.assertEqual(intf1.burnin_mac_address, MAC('a.b.c4'))\n self.assertEqual(intf1.effective_mac_address, intf1.mac_address)\n del intf1.mac_address\n del intf1.burnin_mac_address\n\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "3297774", "language": "Python", "matching_score": 2.628756284713745, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/interface/tests/test_interface.py" }, { "content": "#!/usr/bin/env python\n\nimport collections\nimport types\nimport unittest\nfrom unittest.mock import Mock\n\nfrom genie.conf import Genie\n\nfrom genie.libs.conf.address_family import AddressFamily\nfrom genie.libs.conf.base import MAC, IPv4Address, IPv6Address, IPv4Network\nfrom genie.libs.conf.base import RouteDistinguisher\nfrom genie.libs.conf.bgp import BgpIpv4UnicastRoute\nfrom genie.libs.conf.bgp import BgpL2vpnEvpnRoute\nfrom genie.libs.conf.evpn import ESI\n\n\nclass test_bgp_ipv4_unicast_route(unittest.TestCase):\n\n def test_init(self):\n\n rte_min = BgpIpv4UnicastRoute('0.0.0.0/0')\n rte_max = BgpIpv4UnicastRoute('255.255.255.255/32')\n\n rte = BgpIpv4UnicastRoute('1.2.3.4')\n self.assertIs(rte.af, AddressFamily.ipv4_unicast)\n self.assertEqual(rte.type, rte.Type.ip)\n self.assertEqual(rte.ip, IPv4Address('1.2.3.4'))\n self.assertEqual(rte.ip_network, IPv4Network('1.2.3.4/32'))\n self.assertEqual(rte.prefix_length, 32)\n self.assertEqual(str(rte), '1.2.3.4/32')\n rte2 = BgpIpv4UnicastRoute(rte)\n self.assertEqual(rte, rte2)\n self.assertTrue(rte_min < rte < rte_max)\n self.assertFalse(rte != rte2)\n\n rte = BgpIpv4UnicastRoute('1.2.3.0/24')\n self.assertIs(rte.af, AddressFamily.ipv4_unicast)\n self.assertEqual(rte.type, rte.Type.ip)\n self.assertEqual(rte.ip, IPv4Address('1.2.3.0'))\n self.assertEqual(rte.prefix_length, 24)\n self.assertEqual(rte.ip_network, IPv4Network('1.2.3.0/24'))\n self.assertEqual(str(rte), '1.2.3.0/24')\n rte2 = BgpIpv4UnicastRoute(rte)\n self.assertEqual(rte, rte2)\n self.assertTrue(rte_min < rte < rte_max)\n self.assertFalse(rte != rte2)\n\nclass test_bgp_l2vpn_evpn_route(unittest.TestCase):\n\n def test_init(self):\n\n rte_min = BgpL2vpnEvpnRoute('[1][0.0.0.0.0][0]')\n rte_max = BgpL2vpnEvpnRoute('[5][65535][128][ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff]')\n\n rte = BgpL2vpnEvpnRoute('[1][1.2.3.4.5][3]')\n self.assertIs(rte.af, AddressFamily.l2vpn_evpn)\n self.assertEqual(rte.type, rte.Type.ethernet_ad)\n self.assertEqual(rte.esi, ESI('0.1.0.2.0.3.0.4.0.5'))\n self.assertEqual(rte.rd, None)\n self.assertEqual(rte.eth_tag, 3)\n self.assertEqual(rte.mac_length, 0)\n self.assertEqual(rte.mac, None)\n self.assertEqual(rte.ip_length, 0)\n self.assertEqual(rte.ip, None)\n self.assertEqual(rte.prefix_length, 120)\n self.assertEqual(str(rte), '[1][0001.0002.0003.0004.0005][3]/120')\n rte2 = BgpL2vpnEvpnRoute(rte)\n self.assertEqual(rte, rte2)\n self.assertTrue(rte_min < rte < rte_max)\n self.assertFalse(rte != rte2)\n\n rte = BgpL2vpnEvpnRoute('[1][100:1][1.2.3.4.5][3]')\n self.assertIs(rte.af, AddressFamily.l2vpn_evpn)\n self.assertEqual(rte.type, rte.Type.ethernet_ad)\n self.assertEqual(rte.esi, ESI('0.1.0.2.0.3.0.4.0.5'))\n self.assertEqual(rte.rd, RouteDistinguisher('100:1'))\n self.assertEqual(rte.eth_tag, 3)\n self.assertEqual(rte.mac_length, 0)\n self.assertEqual(rte.mac, None)\n self.assertEqual(rte.ip_length, 0)\n self.assertEqual(rte.ip, None)\n self.assertEqual(rte.prefix_length, 184)\n self.assertEqual(str(rte), '[1][100:1][0001.0002.0003.0004.0005][3]/184')\n rte2 = BgpL2vpnEvpnRoute(rte)\n self.assertEqual(rte, rte2)\n self.assertTrue(rte_min < rte < rte_max)\n self.assertFalse(rte != rte2)\n\n rte = BgpL2vpnEvpnRoute('[2][3][48][a.b.c][0]')\n self.assertIs(rte.af, AddressFamily.l2vpn_evpn)\n self.assertEqual(rte.type, rte.Type.mac)\n self.assertEqual(rte.esi, None)\n self.assertEqual(rte.rd, None)\n self.assertEqual(rte.eth_tag, 3)\n self.assertEqual(rte.mac_length, 48)\n self.assertEqual(rte.mac, MAC('a.b.c'))\n self.assertEqual(rte.ip_length, 0)\n self.assertEqual(rte.ip, None)\n self.assertEqual(rte.prefix_length, 104)\n self.assertEqual(str(rte), '[2][3][48][000a.000b.000c][0]/104')\n rte2 = BgpL2vpnEvpnRoute(rte)\n self.assertEqual(rte, rte2)\n self.assertTrue(rte_min < rte < rte_max)\n self.assertFalse(rte != rte2)\n\n rte = BgpL2vpnEvpnRoute('[2][3][48][a.b.c][32][1.2.3.4]')\n self.assertIs(rte.af, AddressFamily.l2vpn_evpn)\n self.assertEqual(rte.type, rte.Type.mac)\n self.assertEqual(rte.esi, None)\n self.assertEqual(rte.rd, None)\n self.assertEqual(rte.eth_tag, 3)\n self.assertEqual(rte.mac_length, 48)\n self.assertEqual(rte.mac, MAC('a.b.c'))\n self.assertEqual(rte.ip_length, 32)\n self.assertEqual(rte.ip, IPv4Address('1.2.3.4'))\n self.assertEqual(rte.prefix_length, 136)\n self.assertEqual(str(rte), '[2][3][48][000a.000b.000c][32][1.2.3.4]/136')\n rte2 = BgpL2vpnEvpnRoute(rte)\n self.assertEqual(rte, rte2)\n self.assertTrue(rte_min < rte < rte_max)\n self.assertFalse(rte != rte2)\n\n rte = BgpL2vpnEvpnRoute('[2][3][48][a.b.c][128][1::2]')\n self.assertIs(rte.af, AddressFamily.l2vpn_evpn)\n self.assertEqual(rte.type, rte.Type.mac)\n self.assertEqual(rte.esi, None)\n self.assertEqual(rte.rd, None)\n self.assertEqual(rte.eth_tag, 3)\n self.assertEqual(rte.mac_length, 48)\n self.assertEqual(rte.mac, MAC('a.b.c'))\n self.assertEqual(rte.ip_length, 128)\n self.assertEqual(rte.ip, IPv6Address('fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b'))\n self.assertEqual(rte.prefix_length, 232)\n self.assertEqual(str(rte), '[2][3][48][000a.000b.000c][128][1::2]/232')\n rte2 = BgpL2vpnEvpnRoute(rte)\n self.assertEqual(rte, rte2)\n self.assertTrue(rte_min < rte < rte_max)\n self.assertFalse(rte != rte2)\n\n rte = BgpL2vpnEvpnRoute('[3][3][32][1.2.3.4]')\n self.assertIs(rte.af, AddressFamily.l2vpn_evpn)\n self.assertEqual(rte.type, rte.Type.inclusive_multicast)\n self.assertEqual(rte.esi, None)\n self.assertEqual(rte.rd, None)\n self.assertEqual(rte.eth_tag, 3)\n self.assertEqual(rte.mac_length, 0)\n self.assertEqual(rte.mac, None)\n self.assertEqual(rte.ip_length, 32)\n self.assertEqual(rte.ip, IPv4Address('1.2.3.4'))\n self.assertEqual(rte.prefix_length, 80)\n self.assertEqual(str(rte), '[3][3][32][1.2.3.4]/80')\n rte2 = BgpL2vpnEvpnRoute(rte)\n self.assertEqual(rte, rte2)\n self.assertTrue(rte_min < rte < rte_max)\n self.assertFalse(rte != rte2)\n\n rte = BgpL2vpnEvpnRoute('[3][3][128][1::2]')\n self.assertIs(rte.af, AddressFamily.l2vpn_evpn)\n self.assertEqual(rte.type, rte.Type.inclusive_multicast)\n self.assertEqual(rte.esi, None)\n self.assertEqual(rte.rd, None)\n self.assertEqual(rte.eth_tag, 3)\n self.assertEqual(rte.mac_length, 0)\n self.assertEqual(rte.mac, None)\n self.assertEqual(rte.ip_length, 128)\n self.assertEqual(rte.ip, IPv6Address('fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b'))\n self.assertEqual(rte.prefix_length, 176)\n self.assertEqual(str(rte), '[3][3][128][1::2]/176')\n rte2 = BgpL2vpnEvpnRoute(rte)\n self.assertEqual(rte, rte2)\n self.assertTrue(rte_min < rte < rte_max)\n self.assertFalse(rte != rte2)\n\n rte = BgpL2vpnEvpnRoute('[4][1.2.3.4.5]')\n self.assertIs(rte.af, AddressFamily.l2vpn_evpn)\n self.assertEqual(rte.type, rte.Type.ethernet_segment)\n self.assertEqual(rte.esi, ESI('0.1.0.2.0.3.0.4.0.5'))\n self.assertEqual(rte.rd, None)\n self.assertEqual(rte.eth_tag, None)\n self.assertEqual(rte.mac_length, 0)\n self.assertEqual(rte.mac, None)\n self.assertEqual(rte.ip_length, 0)\n self.assertEqual(rte.ip, None)\n self.assertEqual(rte.prefix_length, 88)\n self.assertEqual(str(rte), '[4][0001.0002.0003.0004.0005]/88')\n rte2 = BgpL2vpnEvpnRoute(rte)\n self.assertEqual(rte, rte2)\n self.assertTrue(rte_min < rte < rte_max)\n self.assertFalse(rte != rte2)\n\n rte = BgpL2vpnEvpnRoute('[4][1.2.3.4.5][32][1.2.3.4]')\n self.assertIs(rte.af, AddressFamily.l2vpn_evpn)\n self.assertEqual(rte.type, rte.Type.ethernet_segment)\n self.assertEqual(rte.esi, ESI('0.1.0.2.0.3.0.4.0.5'))\n self.assertEqual(rte.rd, None)\n self.assertEqual(rte.eth_tag, None)\n self.assertEqual(rte.mac_length, 0)\n self.assertEqual(rte.mac, None)\n self.assertEqual(rte.ip_length, 32)\n self.assertEqual(rte.ip, IPv4Address('1.2.3.4'))\n self.assertEqual(rte.prefix_length, 120)\n self.assertEqual(str(rte), '[4][0001.0002.0003.0004.0005][32][1.2.3.4]/120')\n rte2 = BgpL2vpnEvpnRoute(rte)\n self.assertEqual(rte, rte2)\n self.assertTrue(rte_min < rte < rte_max)\n self.assertFalse(rte != rte2)\n\n rte = BgpL2vpnEvpnRoute('[4][1.2.3.4.5][128][1::2]')\n self.assertIs(rte.af, AddressFamily.l2vpn_evpn)\n self.assertEqual(rte.type, rte.Type.ethernet_segment)\n self.assertEqual(rte.esi, ESI('0.1.0.2.0.3.0.4.0.5'))\n self.assertEqual(rte.rd, None)\n self.assertEqual(rte.eth_tag, None)\n self.assertEqual(rte.mac_length, 0)\n self.assertEqual(rte.mac, None)\n self.assertEqual(rte.ip_length, 128)\n self.assertEqual(rte.ip, IPv6Address('fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b'))\n self.assertEqual(rte.prefix_length, 216)\n self.assertEqual(str(rte), '[4][0001.0002.0003.0004.0005][128][fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b]/216')\n rte2 = BgpL2vpnEvpnRoute(rte)\n self.assertEqual(rte, rte2)\n self.assertTrue(rte_min < rte < rte_max)\n self.assertFalse(rte != rte2)\n\n rte = BgpL2vpnEvpnRoute('[5][3][32][1.2.3.4]')\n self.assertIs(rte.af, AddressFamily.l2vpn_evpn)\n self.assertEqual(rte.type, rte.Type.ip)\n self.assertEqual(rte.esi, None)\n self.assertEqual(rte.rd, None)\n self.assertEqual(rte.eth_tag, 3)\n self.assertEqual(rte.mac_length, 0)\n self.assertEqual(rte.mac, None)\n self.assertEqual(rte.ip_length, 32)\n self.assertEqual(rte.ip, IPv4Address('1.2.3.4'))\n self.assertEqual(rte.prefix_length, 80)\n self.assertEqual(str(rte), '[5][3][32][1.2.3.4]/80')\n rte2 = BgpL2vpnEvpnRoute(rte)\n self.assertEqual(rte, rte2)\n self.assertTrue(rte_min < rte < rte_max)\n self.assertFalse(rte != rte2)\n\n rte = BgpL2vpnEvpnRoute('[5][3][24][1.2.3.0]')\n self.assertIs(rte.af, AddressFamily.l2vpn_evpn)\n self.assertEqual(rte.type, rte.Type.ip)\n self.assertEqual(rte.esi, None)\n self.assertEqual(rte.rd, None)\n self.assertEqual(rte.eth_tag, 3)\n self.assertEqual(rte.mac_length, 0)\n self.assertEqual(rte.mac, None)\n self.assertEqual(rte.ip_length, 24)\n self.assertEqual(rte.ip, IPv4Address('1.2.3.0'))\n self.assertEqual(rte.prefix_length, 80)\n self.assertEqual(str(rte), '[5][3][24][1.2.3.0]/80')\n rte2 = BgpL2vpnEvpnRoute(rte)\n self.assertEqual(rte, rte2)\n self.assertTrue(rte_min < rte < rte_max)\n self.assertFalse(rte != rte2)\n\n rte = BgpL2vpnEvpnRoute('[5][3][128][1::2]')\n self.assertIs(rte.af, AddressFamily.l2vpn_evpn)\n self.assertEqual(rte.type, rte.Type.ip)\n self.assertEqual(rte.esi, None)\n self.assertEqual(rte.rd, None)\n self.assertEqual(rte.eth_tag, 3)\n self.assertEqual(rte.mac_length, 0)\n self.assertEqual(rte.mac, None)\n self.assertEqual(rte.ip_length, 128)\n self.assertEqual(rte.ip, IPv6Address('fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b'))\n self.assertEqual(rte.prefix_length, 176)\n self.assertEqual(str(rte), '[5][3][128][1::2]/176')\n rte2 = BgpL2vpnEvpnRoute(rte)\n self.assertEqual(rte, rte2)\n self.assertTrue(rte_min < rte < rte_max)\n self.assertFalse(rte != rte2)\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "5181403", "language": "Python", "matching_score": 4.365505218505859, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/bgp/tests/test_bgp_prefix.py" }, { "content": "\"\"\"BgpPrefix type implementation\n\"\"\"\n\n__all__ = (\n 'BgpRoute',\n 'BgpIpv4UnicastRoute',\n 'BgpL2vpnEvpnRoute',\n)\n\nimport abc\nfrom copy import copy\nimport re\nfrom enum import IntEnum, Enum\nimport functools\n\nfrom genie.decorator import managedattribute\nfrom genie.libs.conf.address_family import AddressFamily\nfrom genie.libs.conf.base import MAC\nfrom genie.libs.conf.base import RouteDistinguisher\nfrom genie.libs.conf.base import \\\n ip_address, \\\n ip_network, ip_network as _ip_network, \\\n IPv4Address, IPv6Address, \\\n IPv4Network\nfrom genie.libs.conf.evpn.esi import ESI\n\n\n@functools.total_ordering\nclass BgpRoute(abc.ABC):\n\n @staticmethod\n def create(af, *args, **kwargs):\n if af is AddressFamily.ipv4_unicast:\n cls = BgpIpv4UnicastRoute\n elif af is AddressFamily.l2vpn_evpn:\n cls = BgpL2vpnEvpnRoute\n else:\n raise NotImplementedError(af)\n return cls(*args, **kwargs)\n\n @property\n @abc.abstractmethod\n def af(self):\n raise NotImplementedError\n\n @property\n @abc.abstractmethod\n def type(self):\n raise NotImplementedError\n\n @property\n @abc.abstractmethod\n def prefix_length(self):\n raise NotImplementedError\n\n @abc.abstractmethod\n def __eq__(self, other):\n if not isinstance(other, BgpRoute):\n try:\n other = BgpRoute(other)\n except Exception:\n return NotImplemented\n return self.af == other.af\n\n @abc.abstractmethod\n def __lt__(self, other):\n if not isinstance(other, BgpRoute):\n try:\n other = BgpRoute(other)\n except Exception:\n return NotImplemented\n return self.af < other.af\n\n @abc.abstractmethod\n def __hash__(self):\n # TODO rest is mutable!\n return hash(self.af)\n\n def __repr__(self):\n return '%s(%r)' % (self.__class__.__name__, str(self))\n\n def __copy__(self):\n return self.__class__(self)\n\n @abc.abstractmethod\n def __str__(self):\n raise NotImplementedError\n\n\nclass BgpIpv4UnicastRouteType(IntEnum):\n ip = 1\n\n\nclass BgpIpv4UnicastRoute(BgpRoute):\n\n af = managedattribute(\n name='af',\n default=AddressFamily.ipv4_unicast,\n read_only=True)\n\n Type = BgpIpv4UnicastRouteType\n\n type = managedattribute(\n name='type',\n type=Type)\n\n ip = managedattribute(\n name='ip',\n default=None,\n type=(None, IPv4Address))\n\n prefix_length = managedattribute(\n name='prefix_length',\n type=int)\n\n @property\n def ip_network(self):\n return IPv4Network('{}/{}'.format(self.ip, self.prefix_length))\n\n @ip_network.setter\n def ip_network(self, value):\n ip_network = IPv4Network(value)\n self.ip = ip_network.network_address\n self.prefix_length = ip_network.prefixlen\n\n def __init__(self, value=None, **kwargs):\n if value is None:\n if 'type' not in kwargs:\n raise TypeError('type argument mandatory.')\n for attr in (\n 'type',\n 'prefix_length',\n 'ip',\n ):\n v = kwargs.pop(attr, None)\n if v is not None:\n setattr(self, attr, v)\n if kwargs:\n raise TypeError('Unexpected keyword arguments: {}'\\\n .format(', '.join(kwargs.keys())))\n return\n\n if kwargs:\n raise TypeError('Provide either value or kwargs, not both.')\n\n if isinstance(value, BgpIpv4UnicastRoute):\n # Copy constructor\n for attr in (\n 'type',\n 'prefix_length',\n 'ip',\n ):\n v = getattr(value, attr)\n if v is not None:\n setattr(self, attr, v)\n return\n\n if isinstance(value, (str, IPv4Address, IPv4Network)):\n # '1.2.3.4', '1.2.3.0/24'\n self.ip_network = IPv4Network(value)\n self.type = BgpIpv4UnicastRouteType.ip\n return\n\n raise TypeError(value)\n\n def __eq__(self, other):\n if not isinstance(other, BgpRoute):\n try:\n other = BgpRoute(other)\n except Exception:\n return NotImplemented\n supeq = super().__eq__(other)\n if not supeq or supeq is NotImplemented:\n return supeq\n assert isinstance(other, BgpIpv4UnicastRoute)\n return (\n self.type,\n self.prefix_length,\n self.ip,\n ) == (\n other.type,\n other.prefix_length,\n other.ip,\n )\n\n def __lt__(self, other):\n if not isinstance(other, BgpRoute):\n try:\n other = BgpRoute(other)\n except Exception:\n return NotImplemented\n suplt = super().__lt__(other)\n if suplt or not isinstance(other, BgpIpv4UnicastRoute):\n return suplt\n return (\n self.type,\n self.prefix_length,\n self.ip is not None, self.ip,\n ) < (\n other.type,\n other.prefix_length,\n other.ip is not None, other.ip,\n )\n\n def __hash__(self):\n return super().__hash__()\n\n def __str__(self):\n s = ''\n if self.type is BgpIpv4UnicastRouteType.ip:\n # 1.2.3.4/32\n s += '{self.ip_network}'\\\n .format(self=self)\n else:\n raise RuntimeError(self.type)\n return s\n\n\nclass BgpL2vpnEvpnRouteType(IntEnum):\n ethernet_ad = 1\n mac = 2\n inclusive_multicast = 3\n ethernet_segment = 4\n ip = 5\n\n\nclass BgpL2vpnEvpnRoute(BgpRoute):\n\n af = managedattribute(\n name='af',\n default=AddressFamily.l2vpn_evpn,\n read_only=True)\n\n Type = BgpL2vpnEvpnRouteType\n\n type = managedattribute(\n name='type',\n type=Type)\n\n esi = managedattribute(\n name='esi',\n default=None,\n type=(None, ESI))\n\n rd = managedattribute(\n name='rd',\n default=None,\n type=(None, RouteDistinguisher))\n\n eth_tag = managedattribute(\n name='eth_tag',\n default=None,\n type=(None, int))\n\n mac_length = managedattribute(\n name='mac_length',\n type=int)\n\n @mac_length.defaulter\n def mac_length(self):\n mac = self.mac\n if mac is None:\n return 0\n return 48\n\n mac = managedattribute(\n name='mac',\n default=None,\n type=(None, MAC))\n\n ip_length = managedattribute(\n name='ip_length',\n type=int)\n\n @ip_length.defaulter\n def ip_length(self):\n return self.ip_max_length\n\n @property\n def ip_max_length(self):\n ip = self.ip\n if ip is None:\n return 0\n if isinstance(ip, IPv4Address):\n return 32\n if isinstance(ip, IPv6Address):\n return 128\n raise RuntimeError(ip)\n\n ip = managedattribute(\n name='ip',\n default=None,\n type=(None, ip_address))\n\n @property\n def ip_network(self):\n return _ip_network('{}/{}'.format(self.ip, self.ip_length))\n\n @ip_network.setter\n def ip_network(self, value):\n ip_network = _ip_network(value)\n self.ip = ip_network.network_address\n self.ip_length = ip_network.prefixlen\n\n prefix_length = managedattribute(\n name='prefix_length',\n type=managedattribute.test_istype(int))\n\n @prefix_length.defaulter\n def prefix_length(self):\n if self.type is BgpL2vpnEvpnRouteType.ethernet_ad:\n return 120 + (64 if self.rd is not None else 0)\n if self.type is BgpL2vpnEvpnRouteType.mac:\n return 104 + self.ip_max_length\n if self.type is BgpL2vpnEvpnRouteType.inclusive_multicast:\n return 48 + self.ip_max_length\n if self.type is BgpL2vpnEvpnRouteType.ethernet_segment:\n return 88 + self.ip_max_length\n if self.type is BgpL2vpnEvpnRouteType.ip:\n return 48 + self.ip_max_length\n raise RuntimeError(self.type)\n\n def __init__(self, value=None, **kwargs):\n if value is None:\n if 'type' not in kwargs:\n raise TypeError('type argument mandatory.')\n for attr in (\n 'type',\n 'esi',\n 'rd',\n 'eth_tag',\n 'ip_length',\n 'ip',\n 'mac_length',\n 'mac',\n 'prefix_length',\n ):\n v = kwargs.pop(attr, None)\n if v is not None:\n setattr(self, attr, v)\n if kwargs:\n raise TypeError('Unexpected keyword arguments: {}'\\\n .format(', '.join(kwargs.keys())))\n return\n\n if kwargs:\n raise TypeError('Provide either value or kwargs, not both.')\n\n if isinstance(value, BgpL2vpnEvpnRoute):\n # Copy constructor\n for attr in (\n 'type',\n 'esi',\n 'rd',\n 'eth_tag',\n 'ip_length',\n 'ip',\n 'mac_length',\n 'mac',\n 'prefix_length',\n ):\n v = getattr(value, attr)\n if v is not None:\n setattr(self, attr, v)\n return\n\n if isinstance(value, str):\n m = re.match(r'^\\[(?P<type>[0-9]+)\\]', value)\n if not m:\n raise ValueError(value)\n self.type = BgpL2vpnEvpnRouteType(int(m.group('type')))\n m = re.match(r'^(?P<value>.+)/(?P<prefix_length>\\d+)$', value)\n if m:\n value = m.group('value')\n self.prefix_length = int(m.group('prefix_length'))\n if self.type is BgpL2vpnEvpnRouteType.ethernet_ad:\n # [Type][ESI][ETag] -----> EVI AD Route\n # [Type][RD][ESI][ETag] -----> Per ES AD Route\n m = re.match(r'^'\n r'\\[[0-9]+\\]'\n r'(?:\\[(?P<rd>[^\\]]+)\\])?'\n r'\\[(?P<esi>[^\\]]+)\\]'\n r'\\[(?P<eth_tag>[0-9]+)\\]'\n r'$', value)\n if not m:\n raise ValueError(value)\n for attr, v in m.groupdict().items():\n setattr(self, attr, v)\n elif self.type is BgpL2vpnEvpnRouteType.mac:\n # [Type][ETag][MAC Len][MAC Addr][IP Addr Len][IP Addr]\n m = re.match(r'^'\n r'\\[[0-9]+\\]'\n r'\\[(?P<eth_tag>[0-9]+)\\]'\n r'\\[(?P<mac_length>[0-9]+)\\]'\n r'\\[(?P<mac>[A-Fa-f0-9.]+)\\]'\n r'\\[(?P<ip_length>[0-9]+)\\]'\n r'(?:\\[(?P<ip>[A-Fa-f0-9:.]+)\\])?'\n r'$', value)\n if not m:\n raise ValueError(value)\n for attr, v in m.groupdict().items():\n setattr(self, attr, v)\n elif self.type is BgpL2vpnEvpnRouteType.inclusive_multicast:\n # [Type][ETag][IP Addr Len][IP Addr]\n m = re.match(r'^'\n r'\\[[0-9]+\\]'\n r'\\[(?P<eth_tag>[0-9]+)\\]'\n r'\\[(?P<ip_length>[0-9]+)\\]'\n r'\\[(?P<ip>[A-Fa-f0-9:.]+)\\]'\n r'$', value)\n if not m:\n raise ValueError(value)\n for attr, v in m.groupdict().items():\n setattr(self, attr, v)\n elif self.type is BgpL2vpnEvpnRouteType.ethernet_segment:\n # [Type][ESI][IP Addr Len][IP Addr]\n m = re.match(r'^'\n r'\\[[0-9]+\\]'\n r'\\[(?P<esi>[^\\]]+)\\]'\n r'(?:'\n r'\\[(?P<ip_length>[0-9]+)\\]'\n r'\\[(?P<ip>[A-Fa-f0-9:.]+)\\]'\n r')?'\n r'$', value)\n if not m:\n raise ValueError(value)\n d = m.groupdict()\n if d['ip_length'] is None:\n d['ip_length'] = 0\n for attr, v in d.items():\n setattr(self, attr, v)\n elif self.type is BgpL2vpnEvpnRouteType.ip:\n # [Type][ETag][IP Addr Len][IP Addr]\n m = re.match(r'^'\n r'\\[[0-9]+\\]'\n r'\\[(?P<eth_tag>[0-9]+)\\]'\n r'\\[(?P<ip_length>[0-9]+)\\]'\n r'\\[(?P<ip>[A-Fa-f0-9:.]+)\\]'\n r'$', value)\n if not m:\n raise ValueError(value)\n for attr, v in m.groupdict().items():\n setattr(self, attr, v)\n else:\n raise RuntimeError(self.type)\n return\n\n raise TypeError(value)\n\n def __eq__(self, other):\n if not isinstance(other, BgpRoute):\n try:\n other = BgpRoute(other)\n except Exception:\n return NotImplemented\n supeq = super().__eq__(other)\n if not supeq or supeq is NotImplemented:\n return supeq\n assert isinstance(other, BgpL2vpnEvpnRoute)\n return (\n self.type,\n self.esi, self.rd, self.eth_tag,\n self.ip_length, self.ip,\n self.mac_length, self.mac,\n ) == (\n other.type,\n other.esi, other.rd, other.eth_tag,\n other.ip_length, other.ip,\n other.mac_length, other.mac,\n )\n\n def __lt__(self, other):\n if not isinstance(other, BgpRoute):\n try:\n other = BgpRoute(other)\n except Exception:\n return NotImplemented\n suplt = super().__lt__(other)\n if suplt or not isinstance(other, BgpL2vpnEvpnRoute):\n return suplt\n return (\n self.type,\n self.esi is not None, self.esi,\n self.rd is not None, self.rd,\n self.eth_tag is not None, self.eth_tag,\n self.ip_length, self.ip.version if self.ip is not None else 0, self.ip,\n self.mac_length, self.mac is not None, self.mac,\n ) < (\n other.type,\n other.esi is not None, other.esi,\n other.rd is not None, other.rd,\n other.eth_tag is not None, other.eth_tag,\n other.ip_length, other.ip.version if other.ip is not None else 0, other.ip,\n other.mac_length, other.mac is not None, other.mac,\n )\n\n def __hash__(self):\n return super().__hash__()\n\n def __str__(self):\n s = '[{self.type.value}]'.format(self=self)\n if self.type is BgpL2vpnEvpnRouteType.ethernet_ad:\n # [Type][ESI][ETag] -----> EVI AD Route\n # [Type][RD][ESI][ETag] -----> Per ES AD Route\n if self.rd is not None:\n s += '[{self.rd}]'.format(self=self)\n s += '[{self.esi:tx.xx.xx.xx.xx}][{self.eth_tag}]'\\\n .format(self=self)\n elif self.type is BgpL2vpnEvpnRouteType.mac:\n # [Type][ETag][MAC Len][MAC Addr][IP Addr Len][IP Addr]\n s += '[{self.eth_tag}][{self.mac_length}][{self.mac}]'\\\n '[{self.ip_length}]'.format(self=self)\n if self.ip_length:\n s += '[{self.ip}]'.format(self=self)\n elif self.type is BgpL2vpnEvpnRouteType.inclusive_multicast:\n # [Type][ETag][IP Addr Len][IP Addr]\n s += '[{self.eth_tag}]'\\\n '[{self.ip_length}][{self.ip}]'\\\n .format(self=self)\n elif self.type is BgpL2vpnEvpnRouteType.ethernet_segment:\n # [Type][ESI][IP Addr Len][IP Addr]\n s += '[{self.esi:tx.xx.xx.xx.xx}]'.format(self=self)\n if self.ip_length:\n s += '[{self.ip_length}][{self.ip}]'\\\n .format(self=self)\n elif self.type is BgpL2vpnEvpnRouteType.ip:\n # [Type][ETag][IP Addr Len][IP Addr]\n s += '[{self.eth_tag}]'\\\n '[{self.ip_length}][{self.ip}]'\\\n .format(self=self)\n else:\n raise RuntimeError(self.type)\n s += '/{}'.format(self.prefix_length)\n return s\n\n", "id": "3311642", "language": "Python", "matching_score": 3.070499897003174, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/bgp/bgp_prefix.py" }, { "content": "\nfrom enum import Enum\nimport functools\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.attributes import KeyedSubAttributes\n\n\n@functools.total_ordering\nclass AFI(Enum):\n '''IANA Address Family Identifiers.\n\n value: Printable/readable/Cisco-ized string.\n iana_number: IANA-assigned numerical value.\n\n See: http://www.iana.org/assignments/address-family-numbers/address-family-numbers.xhtml\n '''\n\n ipv4 = ('ipv4', 1)\n ipv6 = ('ipv6', 2)\n l2vpn = ('l2vpn', 25)\n link_state = ('link-state', 16388)\n\n def __new__(cls, value, iana_number):\n e = object.__new__(cls)\n e._value_ = value\n e.iana_number = iana_number\n return e\n\n def __repr__(self):\n return '%s.%s' % (\n self.__class__.__name__, self._name_)\n\n def __int__(self):\n return self.iana_number\n\n def __eq__(self, other):\n if not isinstance(other, AddressFamily):\n return NotImplemented\n return self is other\n\n def __lt__(self, other):\n if not isinstance(other, AddressFamily):\n return NotImplemented\n # Alphabetical\n return self.value < other.value\n\n __hash__ = Enum.__hash__\n\n\n@functools.total_ordering\nclass SAFI(Enum):\n '''IANA Subsequent Address Family Identifiers.\n\n value: Printable/readable/Cisco-ized string.\n iana_number: IANA-assigned numerical value.\n\n See: http://www.iana.org/assignments/safi-namespace/safi-namespace.xhtml\n '''\n\n unicast = ('unicast', 1)\n multicast = ('multicast', 2)\n bothcast = ('bothcast', 3)\n labeled_unicast = ('labeled-unicast', 4)\n mvpn = ('mvpn', 5)\n l2vpn_mspw = ('l2vpn mspw', 6)\n tunnel = ('tunnel', 64)\n l2vpn_vpls_vpws = ('l2vpn vpls-vpws', 65)\n l2vpn_vpls = ('l2vpn vpls', 65)\n mdt = ('mdt', 66)\n l2vpn_evpn = ('l2vpn evpn', 70)\n link_state = ('link-state', 71)\n vpn_unicast = ('vpn unicast', 128)\n vpn_multicast = ('vpn multicast', 129)\n rt_filter = ('rt-filter', 132)\n flowspec = ('flowspec', 133)\n vpn_flowspec = ('vpn flowspec', 134)\n\n def __new__(cls, value, iana_number):\n e = object.__new__(cls)\n e._value_ = value\n e.iana_number = iana_number\n return e\n\n def __repr__(self):\n return '%s.%s' % (\n self.__class__.__name__, self._name_)\n\n def __int__(self):\n return self.iana_number\n\n def __eq__(self, other):\n if not isinstance(other, AddressFamily):\n return NotImplemented\n return self is other\n\n def __lt__(self, other):\n if not isinstance(other, AddressFamily):\n return NotImplemented\n # Alphabetical\n return self.value < other.value\n\n __hash__ = Enum.__hash__\n\n\n@functools.total_ordering\nclass AddressFamily(Enum):\n\n ipv4 = ('ipv4', AFI.ipv4, None)\n ipv6 = ('ipv6', AFI.ipv6, None)\n\n ipv4_flowspec = ('ipv4 flowspec', AFI.ipv4, SAFI.flowspec)\n ipv4_labeled_unicast = ('ipv4 labeled-unicast', AFI.ipv4, SAFI.labeled_unicast)\n ipv4_mdt = ('ipv4 mdt', AFI.ipv4, SAFI.mdt)\n ipv4_multicast = ('ipv4 multicast', AFI.ipv4, SAFI.multicast)\n ipv4_mvpn = ('ipv4 mvpn', AFI.ipv4, SAFI.mvpn)\n ipv4_rt_filter = ('ipv4 rt-filter', AFI.ipv4, SAFI.rt_filter)\n ipv4_tunnel = ('ipv4 tunnel', AFI.ipv4, SAFI.tunnel)\n ipv4_unicast = ('ipv4 unicast', AFI.ipv4, SAFI.unicast)\n ipv6_flowspec = ('ipv6 flowspec', AFI.ipv6, SAFI.flowspec)\n ipv6_labeled_unicast = ('ipv6 labeled-unicast', AFI.ipv6, SAFI.labeled_unicast)\n ipv6_multicast = ('ipv6 multicast', AFI.ipv6, SAFI.multicast)\n ipv6_mvpn = ('ipv6 mvpn', AFI.ipv6, SAFI.mvpn)\n ipv6_unicast = ('ipv6 unicast', AFI.ipv6, SAFI.unicast)\n l2vpn_evpn = ('l2vpn evpn', AFI.l2vpn, SAFI.l2vpn_evpn)\n l2vpn_mspw = ('l2vpn mspw', AFI.l2vpn, SAFI.l2vpn_mspw)\n l2vpn_vpls_vpws = ('l2vpn vpls-vpws', AFI.l2vpn, SAFI.l2vpn_vpls_vpws)\n l2vpn_vpls = ('l2vpn vpls', AFI.l2vpn, SAFI.l2vpn_vpls)\n link_state_link_state = ('link-state link-state', AFI.link_state, SAFI.link_state)\n link_state = ('link-state', AFI.link_state, None)\n vpnv4_flowspec = ('vpnv4 flowspec', AFI.ipv4, SAFI.vpn_flowspec)\n vpnv4_multicast = ('vpnv4 multicast', AFI.ipv4, SAFI.vpn_multicast)\n vpnv4_unicast = ('vpnv4 unicast', AFI.ipv4, SAFI.vpn_unicast)\n vpnv6_flowspec = ('vpnv6 flowspec', AFI.ipv6, SAFI.vpn_flowspec)\n vpnv6_multicast = ('vpnv6 multicast', AFI.ipv6, SAFI.vpn_multicast)\n vpnv6_unicast = ('vpnv6 unicast', AFI.ipv6, SAFI.vpn_unicast)\n\n def __new__(cls, value, afi, safi):\n e = object.__new__(cls)\n e._value_ = value\n e.afi = afi\n e.safi = safi\n return e\n\n def __repr__(self):\n return '%s.%s' % (\n self.__class__.__name__, self._name_)\n\n def __eq__(self, other):\n if not isinstance(other, AddressFamily):\n return NotImplemented\n return self is other\n\n def __lt__(self, other):\n if not isinstance(other, AddressFamily):\n return NotImplemented\n # Alphabetical\n return self.value < other.value\n\n __hash__ = Enum.__hash__\n\n\nclass AddressFamilySubAttributes(KeyedSubAttributes):\n\n address_family = managedattribute(\n name='address_family',\n read_only=True) # key\n\n def __init__(self, parent, key):\n self._address_family = key\n super().__init__(parent=parent)\n\n @classmethod\n def _sanitize_key(cls, key):\n if key is None:\n # allow indexing with a address_family=None\n pass\n else:\n key = AddressFamily(key)\n return key\n\n @classmethod\n def _assert_key_allowed(cls, key):\n if key is None:\n # allow indexing with a address_family=None\n pass\n elif key not in AddressFamily:\n raise KeyError(\n '{cls} only accepts AddressFamily values, not {key!r}'.\n format(cls=cls.__name__, key=key))\n allowed_keys = getattr(cls, 'allowed_keys', None)\n if allowed_keys is not None:\n if key not in allowed_keys:\n raise KeyError(\n '{cls} only accepts {allowed_keys}, not {key!r}'.\\\n format(cls=cls.__name__,\n allowed_keys=allowed_keys,\n key=key))\n\n", "id": "7387391", "language": "Python", "matching_score": 1.907874584197998, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/address_family/__init__.py" }, { "content": "\n__all__ = (\n 'IPv4Neighbor',\n 'IPv4LsrNeighbor',\n 'IPv6Neighbor',\n # Abstract classes:\n 'Neighbor',\n 'IPNeighbor',\n)\n\nimport functools\nimport abc\nimport ipaddress\nimport re\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.attributes import KeyedSubAttributes\n\nfrom genie.libs.conf.base import ABCBase\n\n\n@functools.total_ordering\nclass Neighbor(ABCBase):\n '''Base for all Neighbor subclasses.'''\n\n @abc.abstractmethod\n def _neighbor_comparison_tokens(self):\n return (self.testbed,)\n\n def __hash__(self):\n '''Subclasses are encouraged to override.'''\n return 0\n\n def __eq__(self, other):\n if not isinstance(other, Neighbor):\n return NotImplemented\n return self._neighbor_comparison_tokens() \\\n == other._neighbor_comparison_tokens()\n\n def __lt__(self, other):\n if not isinstance(other, Neighbor):\n return NotImplemented\n return self._neighbor_comparison_tokens() \\\n < other._neighbor_comparison_tokens()\n\n\nclass IPNeighbor(Neighbor):\n\n ip = managedattribute(\n name='ip',\n read_only=True, # read-only hash key\n doc='''IPv4Address or IPv6Address (mandatory)''')\n\n def __new__(cls, *args, **kwargs):\n\n factory_cls = cls\n if cls is IPNeighbor:\n if not kwargs and len(args) == 1 \\\n and isinstance(args[0], IPNeighbor):\n # Copy constructor\n factory_cls = type(args[0])\n else:\n if not kwargs and len(args) == 1:\n ip = args[0]\n else:\n try:\n ip = kwargs['ip']\n except KeyError:\n raise TypeError('\\'ip\\' argument missing')\n if isinstance(ip, (ipaddress.IPv4Interface,\n ipaddress.IPv6Interface)):\n ip = ip.ip\n elif isinstance(ip, (ipaddress.IPv4Address,\n ipaddress.IPv6Address)):\n pass\n else:\n ip = ipaddress.ip_address(ip)\n if isinstance(ip, ipaddress.IPv4Address):\n factory_cls = IPv4Neighbor\n elif isinstance(ip, ipaddress.IPv6Address):\n factory_cls = IPv6Neighbor\n else:\n raise ValueError(ip)\n\n if factory_cls is not cls:\n self = factory_cls.__new__(factory_cls, *args, **kwargs)\n elif super().__new__ is object.__new__:\n self = super().__new__(factory_cls)\n else:\n self = super().__new__(factory_cls, *args, **kwargs)\n return self\n\n def __str__(self):\n return str(self.ip)\n\n def __repr__(self):\n return '{}(\\'{}\\')'.format(\n self.__class__.__name__,\n self.ip)\n\n def __init__(self, ip, **kwargs):\n if isinstance(ip, (ipaddress.IPv4Interface, ipaddress.IPv6Interface)):\n ip = ip.ip\n elif isinstance(ip, (ipaddress.IPv4Address, ipaddress.IPv6Address)):\n pass\n else:\n ip = ipaddress.ip_address(ip)\n self._ip = ip\n super().__init__(**kwargs)\n\n def _neighbor_comparison_tokens(self):\n return super()._neighbor_comparison_tokens() + (\n 'ip', self.ip.version, self.ip,\n )\n\n def __hash__(self):\n return hash(self.ip)\n\n\nclass IPv4Neighbor(IPNeighbor):\n\n def __init__(self, ip, **kwargs):\n if not kwargs:\n if isinstance(ip, IPv4Neighbor):\n # Copy constructor\n ip = ip.ip\n if type(ip) is not ipaddress.IPv4Address:\n if isinstance(ip, ipaddress.IPv4Interface):\n ip = ip.ip\n else:\n ip = ipaddress.IPv4Address(ip)\n super().__init__(ip=ip, **kwargs)\n\n\nclass IPv6Neighbor(IPNeighbor):\n\n def __init__(self, ip, **kwargs):\n if not kwargs:\n if isinstance(ip, IPv6Neighbor):\n # Copy constructor\n ip = ip.ip\n if type(ip) is not ipaddress.IPv6Address:\n if isinstance(ip, ipaddress.IPv6Interface):\n ip = ip.ip\n else:\n ip = ipaddress.IPv6Address(ip)\n super().__init__(ip=ip, **kwargs)\n\n\nclass IPv4LsrNeighbor(IPv4Neighbor):\n\n label_space = managedattribute(\n name='label_space',\n read_only=True)\n\n def __init__(self, ip, label_space=None, **kwargs):\n if label_space is None and not kwargs:\n if isinstance(ip, IPv4LsrNeighbor):\n # Copy constructor\n ip, label_space = ip.ip, ip.label_space\n elif type(ip) is str:\n m = re.match(r'^(?P<ip>\\d+\\.\\d+\\.\\d+\\.\\d+)(?::(?P<label_space>\\d+))?$', ip)\n if m:\n ip, label_space = m.groups()\n # all other cases should be handled in super().__init__\n if label_space is None:\n label_space = 0\n self._label_space = int(label_space)\n super().__init__(ip=ip, **kwargs)\n\n def __str__(self):\n return super().__str__() + ':' + str(self.label_space)\n\n def __repr__(self):\n return '{}(\\'{}:{}\\')'.format(\n self.__class__.__name__,\n self.ip,\n self.label_space)\n\n def _neighbor_comparison_tokens(self):\n return super()._neighbor_comparison_tokens() + (\n 'label_space', self.label_space,\n )\n\n\nclass IPv4NeighborSubAttributes(KeyedSubAttributes):\n\n neighbor = managedattribute(\n name='neighbor',\n read_only=True, # key\n doc='''IPv4Neighbor key''')\n\n def __init__(self, parent, key):\n self._neighbor = key\n super().__init__(parent=parent)\n\n @classmethod\n def _sanitize_key(cls, key):\n try:\n key = IPv4Neighbor(key)\n except ValueError:\n pass\n return key\n\n @classmethod\n def _assert_key_allowed(cls, key):\n if type(key) is not IPv4Neighbor:\n raise KeyError(\n '{cls} only accepts IPv4Neighbor types, not {key!r}'.\n format(cls=cls.__name__, key=key))\n\n\nclass IPv4LsrNeighborSubAttributes(KeyedSubAttributes):\n\n neighbor = managedattribute(\n name='neighbor',\n read_only=True, # key\n doc='''IPv4LsrNeighbor key''')\n\n def __init__(self, parent, key):\n self._neighbor = key\n super().__init__(parent=parent)\n\n @classmethod\n def _sanitize_key(cls, key):\n try:\n key = IPv4LsrNeighbor(key)\n except ValueError:\n pass\n return key\n\n @classmethod\n def _assert_key_allowed(cls, key):\n if type(key) is not IPv4LsrNeighbor:\n raise KeyError(\n '{cls} only accepts IPv4LsrNeighbor types, not {key!r}'.\n format(cls=cls.__name__, key=key))\n\n\nclass IPv6NeighborSubAttributes(KeyedSubAttributes):\n\n neighbor = managedattribute(\n name='neighbor',\n read_only=True, # key\n doc='''IPv6Neighbor key''')\n\n def __init__(self, parent, key):\n self._neighbor = key\n super().__init__(parent=parent)\n\n @classmethod\n def _sanitize_key(cls, key):\n try:\n key = IPv6Neighbor(key)\n except ValueError:\n pass\n return key\n\n @classmethod\n def _assert_key_allowed(cls, key):\n if type(key) is not IPv6Neighbor:\n raise KeyError(\n '{cls} only accepts IPv6Neighbor types, not {key!r}'.\n format(cls=cls.__name__, key=key))\n\n\nclass IPNeighborSubAttributes(KeyedSubAttributes):\n\n neighbor = managedattribute(\n name='neighbor',\n read_only=True, # key\n doc='''IPNeighbor (IPv4Neighbor or IPv6Neighbor) key''')\n\n def __init__(self, parent, key):\n self._neighbor = key\n super().__init__(parent=parent)\n\n @classmethod\n def _sanitize_key(cls, key):\n try:\n key = IPv4Neighbor(key)\n except ValueError:\n try:\n key = IPv6Neighbor(key)\n except ValueError:\n pass\n return key\n\n @classmethod\n def _assert_key_allowed(cls, key):\n if type(key) not in (IPv4Neighbor, IPv6Neighbor):\n raise KeyError(\n '{cls} only accepts IPNeighbor (v4/v6) types, not {key!r}'.\n format(cls=cls.__name__, key=key))\n\n\nclass IPLsrNeighborSubAttributes(KeyedSubAttributes):\n\n neighbor = managedattribute(\n name='neighbor',\n read_only=True, # key\n doc='''IPv4LsrNeighbor or IPv6Neighbor key''')\n\n def __init__(self, parent, key):\n self._neighbor = key\n super().__init__(parent=parent)\n\n @classmethod\n def _sanitize_key(cls, key):\n try:\n key = IPv4LsrNeighbor(key)\n except ValueError:\n try:\n key = IPv6Neighbor(key)\n except ValueError:\n pass\n return key\n\n @classmethod\n def _assert_key_allowed(cls, key):\n if type(key) not in (IPv4LsrNeighbor, IPv6Neighbor):\n raise KeyError(\n '{cls} only accepts IPv4LsrNeighbor and IPv6Neighbor types, not {key!r}'.\n format(cls=cls.__name__, key=key))\n\n", "id": "6048333", "language": "Python", "matching_score": 2.617820978164673, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/base/neighbor.py" }, { "content": "\n__all__ = (\n 'IPv4Address',\n 'IPv4Interface',\n 'IPv4Network',\n 'IPv6Address',\n 'IPv6Interface',\n 'IPv6Network',\n 'ip_address',\n 'ip_interface',\n 'ip_network',\n 'IPv4AddressRange',\n 'IPv6AddressRange',\n 'IPv4InterfaceRange',\n 'IPv6InterfaceRange',\n)\n\nfrom ipaddress import IPv4Address, IPv4Interface, IPv4Network\nfrom ipaddress import IPv6Address, IPv6Interface, IPv6Network\nfrom ipaddress import ip_address, ip_interface, ip_network\n\nfrom genie.conf.base.utils import IPv4AddressRange, IPv6AddressRange\nfrom genie.conf.base.utils import IPv4InterfaceRange, IPv6InterfaceRange\n\n", "id": "7263572", "language": "Python", "matching_score": 1.3173409700393677, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/base/ipaddress.py" }, { "content": "\n__all__ = (\n 'MAC',\n 'MACRange',\n)\n\nfrom genie.conf.base.utils import MAC, MACRange\n\n", "id": "3462418", "language": "Python", "matching_score": 0.4207169711589813, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/base/mac.py" }, { "content": "\n__all__ = (\n 'MulticastGroup',\n )\n\nfrom genie.conf.base import Base\n\n\nclass MulticastGroup(Base):\n\n def __init__(self):\n raise NotImplementedError\n\n", "id": "5823698", "language": "Python", "matching_score": 0.9378209710121155, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/mcast/mcast_group.py" }, { "content": "__all__ = (\n #'ABCBaseMeta',\n 'ABCBase',\n)\n\nfrom genie.conf.base.base import Base as genieBase\nfrom abc import ABCMeta as _ABCMeta, ABC as _ABC\n\n\nclass ABCBaseMeta(_ABCMeta, type(genieBase)):\n '''Metaclass for declaring Abstract Base Classes (ABCs) derived from\n genie.conf.base's Base.'''\n pass\n\n\nclass ABCBase(genieBase, _ABC, metaclass=ABCBaseMeta):\n '''Class for declaring Abstract Base Classes (ABCs) derived from\n genie.conf.base's Base.'''\n pass\n\n", "id": "3719341", "language": "Python", "matching_score": 1.375993251800537, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/base/abc.py" }, { "content": "__all__ = (\n 'Routing',\n)\n\nfrom genie.conf.base import ConfigurableBase\n\nclass Routing(ConfigurableBase):\n \"\"\"Common base class for all routing features (Ospf, Isis, Bgp, ...)\"\"\"\n pass\n\n", "id": "7911121", "language": "Python", "matching_score": 1.0637032985687256, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/base/routing.py" }, { "content": "\n# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# ATS\nfrom ats.topology import Device\n\n# Genie\nfrom genie.libs.ops.ospf.iosxr.ospf import Ospf\nfrom genie.libs.ops.ospf.iosxr.tests.ospf_output import OspfOutput\n\n# iosxr show_ospf\nfrom genie.libs.parser.iosxr.show_ospf import ShowOspfVrfAllInclusiveInterface,\\\n ShowOspfVrfAllInclusiveNeighborDetail,\\\n ShowOspfVrfAllInclusive,\\\n ShowOspfVrfAllInclusiveShamLinks,\\\n ShowOspfVrfAllInclusiveVirtualLinks,\\\n ShowOspfMplsTrafficEngLink,\\\n ShowOspfVrfAllInclusiveDatabaseRouter,\\\n ShowOspfVrfAllInclusiveDatabaseExternal,\\\n ShowOspfVrfAllInclusiveDatabaseNetwork,\\\n ShowOspfVrfAllInclusiveDatabaseSummary,\\\n ShowOspfVrfAllInclusiveDatabaseOpaqueArea\n\n# iosxr show_ospf\nfrom genie.libs.parser.iosxr.show_protocol import ShowProtocolsAfiAllAll\n\noutputs = {}\n\n# Set values\noutputs['show protocols afi-all all'] = OspfOutput.ShowProtocolsAfiAllAll\noutputs['show ospf vrf all-inclusive'] = OspfOutput.ShowOspfVrfAllInclusive\noutputs['show ospf vrf all-inclusive sham-links'] = OspfOutput.ShowOspfVrfAllInclusiveShamLinks\noutputs['show ospf vrf all-inclusive virutal-links'] = OspfOutput.ShowOspfVrfAllInclusiveVirtualLinks\noutputs['show ospf mpls traffic-eng link'] = OspfOutput.ShowOspfMplsTrafficEngLink\noutputs['show ospf vrf all-inclusive database router'] = OspfOutput.ShowOspfVrfAllInclusiveDatabaseRouter\noutputs['show ospf vrf all-inclusive database external'] = OspfOutput.ShowOspfVrfAllInclusiveDatabaseExternal\noutputs['show ospf vrf all-inclusive database summary'] = OspfOutput.ShowOspfVrfAllInclusiveDatabaseNetwork\noutputs['show ospf vrf all-inclusive database network'] = OspfOutput.ShowOspfVrfAllInclusiveDatabaseSummary\noutputs['show ospf vrf all-inclusive database opaque-area'] = OspfOutput.ShowOspfVrfAllInclusiveDatabaseOpaqueArea\noutputs['show ospf vrf all-inclusive interface'] = OspfOutput.ShowOspfVrfAllInclusiveInterface\noutputs['show ospf vrf all-inclusive neighbors detail'] = OspfOutput.ShowOspfVrfAllInclusiveNeighborDetail\n\n\ndef mapper(key):\n return outputs[key]\n\n\nclass test_ospf(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'iosxr'\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n self.device.connectionmgr.connections['cli'] = self.device\n\n def test_custom_output(self):\n self.maxDiff = None\n ospf = Ospf(device=self.device)\n\n # Set outputs\n ospf.maker.outputs[ShowProtocolsAfiAllAll] = {\n '': OspfOutput.ShowProtocolsAfiAllAll}\n ospf.maker.outputs[ShowOspfVrfAllInclusive] = {\n \"{'vrf':'VRF1'}\": OspfOutput.ShowOspfVrfAllInclusive_custom}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveShamLinks] = {\n \"{'vrf':'VRF1'}\": OspfOutput.ShowOspfVrfAllInclusiveShamLinks}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveVirtualLinks] = {\n \"{'vrf':'VRF1'}\": OspfOutput.ShowOspfVrfAllInclusiveVirtualLinks}\n ospf.maker.outputs[ShowOspfMplsTrafficEngLink] = {\n '': OspfOutput.ShowOspfMplsTrafficEngLink}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseRouter] = {\n \"{'vrf':'VRF1'}\": OspfOutput.ShowOspfVrfAllInclusiveDatabaseRouter_custom}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseExternal] = {\n \"{'vrf':'VRF1'}\": OspfOutput.ShowOspfVrfAllInclusiveDatabaseExternal_custom}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseNetwork] = {\n \"{'vrf':'VRF1'}\": OspfOutput.ShowOspfVrfAllInclusiveDatabaseNetwork_custom}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseSummary] = {\n \"{'vrf':'VRF1'}\": OspfOutput.ShowOspfVrfAllInclusiveDatabaseSummary_custom}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseOpaqueArea] = {\n \"{'vrf':'VRF1'}\": OspfOutput.ShowOspfVrfAllInclusiveDatabaseOpaqueArea_custom}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveInterface] = {\n \"{'interface':'GigabitEthernet0/0/0/1','vrf':'VRF1'}\": OspfOutput.ShowOspfVrfAllInclusiveInterface_custom}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveNeighborDetail] = {\n \"{'interface':'GigabitEthernet0/0/0/1','neighbor':'10.36.3.3','vrf':'VRF1'}\":\n OspfOutput.ShowOspfVrfAllInclusiveNeighborDetail_custom}\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Learn the feature\n ospf.learn(vrf='VRF1', interface='GigabitEthernet0/0/0/1', neighbor='10.36.3.3')\n\n # Verify Ops was created successfully\n self.assertEqual(ospf.info, OspfOutput.OspfInfo_custom)\n\n def test_complete_output(self):\n self.maxDiff = None\n ospf = Ospf(device=self.device)\n \n # Set outputs\n ospf.maker.outputs[ShowProtocolsAfiAllAll] = {'':OspfOutput.ShowProtocolsAfiAllAll}\n ospf.maker.outputs[ShowOspfVrfAllInclusive] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusive}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveShamLinks] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveShamLinks}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveVirtualLinks] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveVirtualLinks}\n ospf.maker.outputs[ShowOspfMplsTrafficEngLink] = {'':OspfOutput.ShowOspfMplsTrafficEngLink}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseRouter] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveDatabaseRouter}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseExternal] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveDatabaseExternal}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseNetwork] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveDatabaseNetwork}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseSummary] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveDatabaseSummary}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseOpaqueArea] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveDatabaseOpaqueArea}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveInterface] = {\"{'interface':'','vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveInterface}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveNeighborDetail] = {\"{'interface':'','neighbor':'','vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveNeighborDetail}\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Learn the feature\n ospf.learn()\n\n # Verify Ops was created successfully\n self.assertEqual(ospf.info, OspfOutput.OspfInfo)\n\n def test_selective_attribute(self):\n self.maxDiff = None\n ospf = Ospf(device=self.device)\n \n # Set outputs\n ospf.maker.outputs[ShowProtocolsAfiAllAll] = {'':OspfOutput.ShowProtocolsAfiAllAll}\n ospf.maker.outputs[ShowOspfVrfAllInclusive] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusive}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveShamLinks] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveShamLinks}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveVirtualLinks] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveVirtualLinks}\n ospf.maker.outputs[ShowOspfMplsTrafficEngLink] = {'':OspfOutput.ShowOspfMplsTrafficEngLink}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseRouter] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveDatabaseRouter}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseExternal] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveDatabaseExternal}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseNetwork] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveDatabaseNetwork}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseSummary] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveDatabaseSummary}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseOpaqueArea] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveDatabaseOpaqueArea}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveInterface] = {\"{'interface':'','vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveInterface}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveNeighborDetail] = {\"{'interface':'','neighbor':'','vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveNeighborDetail}\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Learn the feature\n ospf.learn()\n\n # Check selective attribute\n self.assertEqual(1713, ospf.info['vrf']['VRF1']['address_family']\\\n ['ipv4']['instance']['1']['areas']\\\n ['0.0.0.1']['database']['lsa_types'][1]\\\n ['lsas']['10.229.11.11 10.229.11.11']\\\n ['ospfv2']['header']['age'])\n\n def test_empty_output(self):\n self.maxDiff = None\n ospf = Ospf(device=self.device)\n \n # Set outputs\n ospf.maker.outputs[ShowProtocolsAfiAllAll] = {'':{}}\n ospf.maker.outputs[ShowOspfVrfAllInclusive] = {\"{'vrf':''}\":{}}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveShamLinks] = {\"{'vrf':''}\":{}}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveVirtualLinks] = {\"{'vrf':''}\":{}}\n ospf.maker.outputs[ShowOspfMplsTrafficEngLink] = {'':{}}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseRouter] = {\"{'vrf':''}\":{}}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseExternal] = {\"{'vrf':''}\":{}}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseNetwork] = {\"{'vrf':''}\":{}}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseSummary] = {\"{'vrf':''}\":{}}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseOpaqueArea] = {\"{'vrf':''}\":{}}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveInterface] = {\"{'interface':'','vrf':''}\":{}}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveNeighborDetail] = {\"{'interface':'','neighbor':'','vrf':''}\":{}}\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Learn the feature\n ospf.learn()\n\n # Verify attribute is missing\n with self.assertRaises(AttributeError):\n ospf.info['vrf']\n\n\n def test_missing_attributes(self):\n self.maxDiff = None\n ospf = Ospf(device=self.device)\n \n # Set outputs\n ospf.maker.outputs[ShowProtocolsAfiAllAll] = {'':{}}\n ospf.maker.outputs[ShowOspfVrfAllInclusive] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusive}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveShamLinks] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveShamLinks}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveVirtualLinks] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveVirtualLinks}\n ospf.maker.outputs[ShowOspfMplsTrafficEngLink] = {'':OspfOutput.ShowOspfMplsTrafficEngLink}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseRouter] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveDatabaseRouter}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseExternal] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveDatabaseExternal}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseNetwork] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveDatabaseNetwork}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseSummary] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveDatabaseSummary}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveDatabaseOpaqueArea] = {\"{'vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveDatabaseOpaqueArea}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveInterface] = {\"{'interface':'','vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveInterface}\n ospf.maker.outputs[ShowOspfVrfAllInclusiveNeighborDetail] = {\"{'interface':'','neighbor':'','vrf':''}\":OspfOutput.ShowOspfVrfAllInclusiveNeighborDetail}\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Learn the feature\n ospf.learn()\n\n # Verify key not created due to ouput missing\n with self.assertRaises(KeyError):\n single_value_preference = ospf.info['vrf']['default']\\\n ['address_family']['ipv4']['instance']\\\n ['1']['preference']['single_value']['all']\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "5343240", "language": "Python", "matching_score": 2.8465120792388916, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/ospf/iosxr/tests/test_ospf.py" }, { "content": "\"\"\"Common get info functions for OSPF\"\"\"\n\n# Python\nimport os\nimport logging\nimport re\n\n# pyATS\nfrom ats.easypy import runtime\nfrom ats.utils.objects import find, R\n\n# Genie\nfrom genie.utils.config import Config\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\nfrom genie.utils.timeout import Timeout\nfrom genie.libs.sdk.libs.utils.normalize import GroupKeys\n\n# LOGGING\nfrom genie.libs.sdk.apis.iosxe.logging.get import get_logging_logs\n\n# Utils\nfrom genie.libs.sdk.apis.iosxe.startup_config.get import (\n get_startup_config_dict,\n)\nfrom genie.libs.sdk.apis.iosxe.running_config.get import (\n get_running_config_section_dict,\n)\n\nlog = logging.getLogger(__name__)\n\n\ndef get_ospf_router_self_originate_metric(device, vrf, ospf_process_id):\n \"\"\" Get the OSPF advertised metric value\n\n Args:\n device ('obj'): Device object\n vrf (`str`): Vrf value - Default\n ospf_process_id (`int`): OSPF process ID\n\n Returns:\n Dictionary for metric\n\n Raises:\n SchemaEmptyParserError\n KeyError\n \"\"\"\n metric_dict = {}\n\n try:\n out = device.parse(\"show ip ospf database router self-originate\")\n except SchemaEmptyParserError as e:\n return metric_dict\n\n if out:\n try:\n for area in out[\"vrf\"][vrf][\"address_family\"][\"ipv4\"][\"instance\"][\n ospf_process_id\n ][\"areas\"]:\n for lsa_type in out[\"vrf\"][vrf][\"address_family\"][\"ipv4\"][\n \"instance\"\n ][ospf_process_id][\"areas\"][area][\"database\"][\"lsa_types\"]:\n for lsa in out[\"vrf\"][vrf][\"address_family\"][\"ipv4\"][\n \"instance\"\n ][ospf_process_id][\"areas\"][area][\"database\"][\"lsa_types\"][\n lsa_type\n ][\n \"lsas\"\n ]:\n for link in out[\"vrf\"][vrf][\"address_family\"][\"ipv4\"][\n \"instance\"\n ][ospf_process_id][\"areas\"][area][\"database\"][\n \"lsa_types\"\n ][\n lsa_type\n ][\n \"lsas\"\n ][\n lsa\n ][\n \"ospfv2\"\n ][\n \"body\"\n ][\n \"router\"\n ][\n \"links\"\n ]:\n if (\n out[\"vrf\"][vrf][\"address_family\"][\"ipv4\"][\n \"instance\"\n ][ospf_process_id][\"areas\"][area][\"database\"][\n \"lsa_types\"\n ][\n lsa_type\n ][\n \"lsas\"\n ][\n lsa\n ][\n \"ospfv2\"\n ][\n \"body\"\n ][\n \"router\"\n ][\n \"links\"\n ][\n link\n ][\n \"type\"\n ]\n == \"stub network\"\n ):\n continue\n metric_dict.setdefault(link, {})\n for topology in out[\"vrf\"][vrf][\"address_family\"][\n \"ipv4\"\n ][\"instance\"][ospf_process_id][\"areas\"][area][\n \"database\"\n ][\n \"lsa_types\"\n ][\n lsa_type\n ][\n \"lsas\"\n ][\n lsa\n ][\n \"ospfv2\"\n ][\n \"body\"\n ][\n \"router\"\n ][\n \"links\"\n ][\n link\n ][\n \"topologies\"\n ]:\n metric = out[\"vrf\"][vrf][\"address_family\"][\n \"ipv4\"\n ][\"instance\"][ospf_process_id][\"areas\"][area][\n \"database\"\n ][\n \"lsa_types\"\n ][\n lsa_type\n ][\n \"lsas\"\n ][\n lsa\n ][\n \"ospfv2\"\n ][\n \"body\"\n ][\n \"router\"\n ][\n \"links\"\n ][\n link\n ][\n \"topologies\"\n ][\n topology\n ][\n \"metric\"\n ]\n metric_dict[link].setdefault(topology, metric)\n return metric_dict\n except KeyError as e:\n log.error(\n \"Failed in retrieving OSPF advertised metric value, \"\n \"Error: {}\".format(str(e))\n )\n return metric_dict\n return metric_dict\n\n\ndef get_ospf_area_of_interface(\n device,\n interface,\n process_id,\n vrf=\"default\",\n address_family=\"ipv4\",\n output=None,\n):\n \"\"\" Get area value of an interface\n Args:\n device ('obj'): Device object\n interface ('str'): Interface name\n vrf ('str'): VRF name\n process_id ('str'): Process id\n address_family ('str'): Address family\n Returns:\n area ('str'): If area found\n None: If area not found\n Raises:\n ValueError: Command found more than one area\n \"\"\"\n\n log.info(\n \"Getting area of interface {interface}\".format(interface=interface)\n )\n if not output:\n try:\n output = device.parse(\n \"show ip ospf interface {interface}\".format(\n interface=interface\n )\n )\n except SchemaEmptyParserError:\n log.info(\"Could not find any area\")\n return None\n\n if process_id:\n areas = list(\n output[\"vrf\"]\n .get(vrf, {})\n .get(\"address_family\", {})\n .get(address_family, {})\n .get(\"instance\", {})\n .get(process_id, {})\n .get(\"areas\", {})\n .keys()\n )\n\n if len(areas) > 1:\n raise ValueError(\n \"Command has returned more than one area. The following \"\n \"areas have been returned:\\n{areas}\".format(\n areas=\"\\n\".join(areas)\n )\n )\n\n area = areas[0]\n\n log.info(\"Found area {area}\".format(area=area))\n\n return area\n\n\ndef get_ospf_process_number(\n device, vrf=\"default\", interface=None, output=None\n):\n \"\"\" Get ospf process number\n\n Args:\n device ('obj'): device to run on\n vrf ('str'): vrf to search under\n interface ('str') interface to serach under\n output ('dict'): Output from parser otherwise will get from device\n\n Returns:\n None if error occured\n str: ospf process number\n\n Raises:\n SchemaEmptyParserError\n \"\"\"\n if not output:\n try:\n if interface:\n output = device.parse(\n \"show ip ospf interface {interface}\".format(\n interface=interface\n )\n )\n else:\n output = device.parse(\"show ip ospf\")\n except SchemaEmptyParserError:\n return None\n\n if (\n output\n and \"vrf\" in output\n and vrf in output[\"vrf\"]\n and \"address_family\" in output[\"vrf\"][vrf]\n and \"ipv4\" in output[\"vrf\"][vrf][\"address_family\"]\n ):\n for number in output[\"vrf\"][vrf][\"address_family\"][\"ipv4\"].get(\n \"instance\", {}\n ):\n return number\n\n return None\n\n\ndef get_ospf_neighbors_in_state(\n device, state, neighbor_interface=None, in_state=True\n):\n \"\"\" Get ospf neighbor ip_addresses that are in {state} - show\n ip ospf neighbor\n\n Args:\n device ('obj'): device to run on\n neighbor_interface ('str'): Neighbor interface name\n state ('str'): full/sub-string of the state you want \n search for\n in_state ('bool'): Check if state is in state provided\n\n Returns:\n list of ospf neighbor ip_addresses\n ex: ['192.168.0.1', '192.168.0.2', ...]\n\n Raises:\n SchemaEmptyParserError\n \"\"\"\n\n neighbor_addresses = []\n\n if state:\n state = state.lower()\n if in_state:\n log.info(\n \"Getting all ospf neighbors that are\"\n \" in state: '{}'.\".format(state)\n )\n else:\n log.info(\n \"Getting all ospf neighbors that are\"\n \" not in state: '{}'.\".format(state)\n )\n else:\n log.info(\"Getting all ospf neighbors\")\n\n try:\n if neighbor_interface:\n out = device.parse(\n \"show ip ospf neighbor {intf}\".format(intf=neighbor_interface)\n )\n else:\n out = device.parse(\"show ip ospf neighbor\")\n except SchemaEmptyParserError:\n return neighbor_addresses\n\n if out and \"interfaces\" in out:\n for neighbor_interface in out[\"interfaces\"]:\n for neighbor in out[\"interfaces\"][neighbor_interface].get(\n \"neighbors\", {}\n ):\n if not state:\n neighbor_addresses.append(neighbor)\n else:\n output_state = (\n out[\"interfaces\"][neighbor_interface][\"neighbors\"][\n neighbor\n ].get(\"state\", \"\")\n ).lower()\n if not output_state:\n continue\n if in_state:\n if state in output_state:\n neighbor_addresses.append(neighbor)\n else:\n if state not in output_state:\n neighbor_addresses.append(neighbor)\n\n return neighbor_addresses\n\n\ndef get_ospf_neighbors_not_in_state(device, state):\n \"\"\" Get ospf neighbor ip_addresses that are in {state} - show\n ip ospf neighbor\n\n Args:\n device ('obj'): device to run on\n state ('str'): full/sub-string of the state you want \n search against\n\n Returns:\n list of ospf neighbor ip_addresses\n ex: ['192.168.0.1', '192.168.0.2', ...]\n\n \"\"\"\n return get_ospf_neighbors_in_state(\n device=device, state=state, in_state=False\n )\n\n\ndef get_ospf_neighbors(device, neighbor_interface=None):\n \"\"\" Get ospf neighbor ip_addresses - show\n ip ospf neighbor\n\n Args:\n device ('obj'): device to run on\n neighbor_interface ('str'): Neighbor interface name\n\n Returns:\n list of ospf neighbor ip_addresses\n ex: ['192.168.0.1', '192.168.0.2', ...]\n\n \"\"\"\n return get_ospf_neighbors_in_state(\n device=device, neighbor_interface=neighbor_interface, state=None\n )\n\n\ndef get_ospf_neighbors_using_interface(device, interface):\n \"\"\" Get ospf neighbor ip_addresses that are under the specified interface\n\n Args:\n device ('obj'): device to use\n interface ('str'): interface to search under\n\n Returns:\n list of ospf neighbor ip_addresses\n\n Raises:\n SchemaEmptyParserError\n \"\"\"\n neighbor_addresses = []\n\n log.info(\"Getting all ospf neighbors under {}\".format(interface))\n\n try:\n out = device.parse(\"show ip ospf neighbor\")\n except SchemaEmptyParserError:\n return neighbor_addresses\n\n if out and \"interfaces\" in out and interface in out[\"interfaces\"]:\n\n for neighbor in out[\"interfaces\"][interface].get(\"neighbors\", {}):\n neighbor_addresses.append(neighbor)\n\n return neighbor_addresses\n\n\ndef get_router_ospf_section_running_config(device, ospf_process_id):\n \"\"\" Get router OSPF section from running-config\n Args:\n device ('obj'): Device object\n ospf_process_id ('int'): OSPF router process id\n Returns:\n Dict with section\n \"\"\"\n\n section = \"router ospf {ospf_process_id}\".format(\n ospf_process_id=ospf_process_id\n )\n return get_running_config_section_dict(\n device=device, section=section\n )\n\n\ndef get_router_ospf_section_startup_config(device, ospf_process_id):\n \"\"\" Get router OSPF section from startup-config\n Args:\n device ('obj'): Device object\n ospf_process_id ('int'): OSPF router process id\n Returns:\n Dict with section\n \"\"\"\n\n section = \"router ospf {ospf_process_id}\".format(\n ospf_process_id=ospf_process_id\n )\n return get_startup_config_dict(\n device=device, section=section\n )\n\n\ndef get_ospf_session_count(device):\n \"\"\" Get ospf seesion count\n\n Args:\n device(`str`): Device str\n \n Returns:\n integer: ospf session count\n \n Raises:\n SchemaEmptyParserError\n \"\"\"\n ospf_session_count = 0\n\n try:\n output_ospf = device.parse(\"show ip ospf neighbor\")\n except SchemaEmptyParserError:\n return ospf_session_count\n\n for intf in output_ospf[\"interfaces\"]:\n ospf_session_count += len(\n output_ospf[\"interfaces\"].get(intf, {}).get(\"neighbors\", {}).keys()\n )\n\n return ospf_session_count\n\n\ndef get_ospf_interfaces(device, bgp_as):\n \"\"\" Retrieve interface for ospf using BGP AS number\n\n Args:\n device ('obj'): Device object\n bgp_as ('int'): BGP AS number\n\n Returns:\n List of interfaces\n\n Raises:\n SchemaEmptyParserError\n \"\"\"\n try:\n out = device.parse(\"show ip ospf interface brief\")\n except SchemaEmptyParserError:\n return None\n\n try:\n areas_dict = out[\"instance\"][str(bgp_as)][\"areas\"]\n except KeyError:\n return None\n\n interfaces = []\n if areas_dict:\n for area in areas_dict.keys():\n interfaces.extend(areas_dict[area][\"interfaces\"].keys())\n return interfaces\n\n\ndef get_ospf_process_id_and_area(\n device, vrf=\"default\", interface=None, address_family=\"ipv4\"\n):\n \"\"\" Get ospf process id and area\n\n Args:\n device ('obj'): device to run on\n vrf ('str'): vrf to search under\n interface ('str') interface to serach under\n address_family (`str`): Address family name\n\n Returns:\n None if error occured\n tuple: ospf process number and area\n ex.) (1234, ['0.0.0.4', '0.0.0.8'])\n First element that is 1234 is process number\n Second element that is ['0.0.0.4', '0.0.0.8'] is list of areas\n Raises:\n None\n \"\"\"\n\n try:\n if interface:\n out = device.parse(\n \"show ip ospf interface {interface}\".format(\n interface=interface\n )\n )\n else:\n out = device.parse(\"show ip ospf\")\n except SchemaEmptyParserError:\n return None, None\n\n if (\n out\n and \"vrf\" in out\n and vrf in out[\"vrf\"]\n and \"address_family\" in out[\"vrf\"][vrf]\n and address_family in out[\"vrf\"][vrf][\"address_family\"]\n ):\n for number, areas in out[\"vrf\"][vrf][\"address_family\"][address_family][\n \"instance\"\n ].items():\n if \"areas\" in areas:\n return number, list(areas[\"areas\"])\n\n return None, None\n\ndef get_ospf_global_block_range(device, process_id, output=None):\n ''' Get global block range from segment-routing\n Args:\n device ('obj'): Device object\n process_id ('str'): Ospf process id\n output ('dict'): Optional. Parsed output of command 'show ip ospf segment-routing'\n Returns:\n tuple: (\n int: Global range minimum\n int: Global range maximum\n )\n Raises:\n None\n '''\n\n log.info('Getting global block range from segment-routing')\n\n if not output:\n try:\n output = device.parse('show ip ospf segment-routing')\n except SchemaEmptyParserError:\n log.info('Could not find any block range '\n 'information for process {id}'.format(id=process_id))\n return None, None\n\n srgb_min=output['process_id'].get(process_id, {}).get('global_block_srgb', {}).get('range', {}).get('start', None)\n srgb_max=output['process_id'].get(process_id, {}).get('global_block_srgb', {}).get('range', {}).get('end', None)\n\n if (srgb_min and srgb_max):\n log.info('Found range {rmin} - {rmax}'.format(rmin=srgb_min, rmax=srgb_max))\n\n elif not (srgb_min or srgb_max):\n log.info('Could not find any range information')\n elif not srgb_min:\n log.info('Could not find minimum range information')\n elif not srgb_max:\n log.info('Could not find maximum range information')\n\n return srgb_min, srgb_max\n\ndef get_ospf_local_block_range(device, process_id, output=None):\n ''' Get local block range from segment-routing\n Args:\n device ('obj'): Device object\n process_id ('str'): Ospf process id\n output ('dict'): Optional. Parsed output of command 'show ip ospf segment-routing'\n Returns:\n tuple: (\n int: Local range minimum\n int: Local range maximum\n )\n Raises:\n None\n '''\n\n log.info('Getting local block range from segment-routing')\n\n if not output:\n try:\n output = device.parse('show ip ospf segment-routing')\n except SchemaEmptyParserError:\n log.info('Could not find any block range '\n 'information for process {id}'.format(id=process_id))\n return None, None\n\n srlb_min=output['process_id'].get(process_id, {}).get('local_block_srlb', {}).get('range', {}).get('start', None)\n srlb_max=output['process_id'].get(process_id, {}).get('local_block_srlb', {}).get('range', {}).get('end', None)\n\n if (srlb_min and srlb_max):\n log.info('Found range {rmin} - {rmax}'.format(rmin=srlb_min, rmax=srlb_max))\n\n elif not (srlb_min or srlb_max):\n log.info('Could not find any range information')\n elif not srlb_min:\n log.info('Could not find min range information')\n elif not srlb_max:\n log.info('Could not find max range information')\n\n return srlb_min, srlb_max\n\ndef get_ospf_segment_routing_lb_srlb_base_and_range(\n device, process_id, router_id\n):\n \"\"\" Gets 'SRLB Base' and 'SRLB Range' values\n\n Args:\n device ('obj'): Device to use\n process_id ('str'): Ospf process_id\n router_id ('str'): Which router_id entry to use\n\n Returns:\n if can filter down to one result:\n (('int'): SRLB Base value, ('dict'): Output from parser)\n\n Raises:\n None\n \"\"\"\n try:\n output = device.parse(\"show ip ospf segment-routing local-block\")\n except SchemaEmptyParserError:\n return None, None\n\n reqs_base = R(\n [\n \"instance\",\n process_id,\n \"areas\",\n \"(?P<area>.*)\",\n \"router_id\",\n router_id,\n \"srlb_base\",\n \"(?P<srlb_base>.*)\",\n ]\n )\n found_base = find(output, reqs_base, filter_=False, all_keys=True)\n if not found_base:\n return None, None\n\n reqs_range = R(\n [\n \"instance\",\n process_id,\n \"areas\",\n \"(?P<area>.*)\",\n \"router_id\",\n router_id,\n \"srlb_range\",\n \"(?P<srlb_range>.*)\",\n ]\n )\n found_range = find(output, reqs_range, filter_=False, all_keys=True)\n if not found_range:\n return None, None\n\n return found_base[0][0], found_range[0][0]\n\n\ndef get_ospf_segment_routing_gb_srgb_base_and_range(\n device,\n process_id,\n router_id\n):\n \"\"\" Gets 'SRGB Base' and 'SRGB Range' values\n\n Args:\n device ('obj'): Device to use\n process_id ('int'): Ospf process_id\n router_id ('str'): Which router_id entry to use\n\n Returns:\n if can filter down to one result:\n (('int'): SRGB Base value, ('dict'): Output from parser)\n if cannot filter due to lack of arguments:\n ([{key:value},{key:value}], ('dict'): Output from parser)\n\n Raises:\n None\n \"\"\"\n try:\n output = device.parse(\"show ip ospf segment-routing global-block\")\n except SchemaEmptyParserError:\n return None, None\n\n reqs_base = R(\n [\n \"process_id\",\n process_id,\n \"routers\",\n router_id,\n 'srgb_base',\n \"(?P<srgb_base>.*)\",\n ]\n )\n found_base = find(output, reqs_base, filter_=False, all_keys=True)\n if not found_base:\n return None, None\n\n reqs_range = R(\n [\n \"process_id\",\n process_id,\n \"routers\",\n router_id,\n 'srgb_range',\n \"(?P<srgb_range>.*)\",\n ]\n )\n found_range = find(output, reqs_range, filter_=False, all_keys=True)\n if not found_range:\n return None, None\n\n return found_base[0][0], found_range[0][0]\n\n\ndef get_ospf_neighbor_address_in_state(device, state=None):\n \"\"\" Gets the ospf neighbors address' in state\n\n Args:\n device ('obj'): Device to use\n state ('str'): full/sub-string of the state you want\n search for\n\n Returns:\n ('list'): of ospf neighbor address' in state\n\n Raises:\n N/A\n \"\"\"\n try:\n out = device.parse(\"show ip ospf neighbor\")\n except SchemaEmptyParserError:\n return []\n\n addresses = []\n\n for intf in out.get(\"interfaces\", {}):\n for neighbor in out[\"interfaces\"][intf].get(\"neighbors\", {}):\n if not state:\n addresses.append(out[\"interfaces\"][intf][\"neighbors\"][neighbor].get(\"address\"))\n elif state.lower() in out[\"interfaces\"][intf][\"neighbors\"][neighbor].get(\"state\", \"\").lower():\n addresses.append(out[\"interfaces\"][intf][\"neighbors\"][neighbor].get(\"address\"))\n\n return addresses\n\n\ndef get_ospf_sr_adj_sid_and_neighbor_address(device, process_id, neighbor_addresses=None):\n \"\"\" Gets adjacency sids and corresponding neighbor address.\n\n Args:\n device ('obj'): Device to use\n process_id ('str'): Ospf process id\n neighbor_addresses ('list'): If provided, function will only return adj-sid/neighbor_address\n pairs that exist in the list\n\n Returns:\n {(192.168.0.1, 123), (192.168.0.2, 231), ...}\n \"\"\"\n try:\n out = device.parse('show ip ospf segment-routing adjacency-sid')\n except SchemaEmptyParserError:\n return {}\n\n ret_dict = {}\n\n for sid in out.get(\"process_id\", {}).get(process_id, {}).get(\"adjacency_sids\", {}):\n neighbor_address = out[\"process_id\"][process_id][\"adjacency_sids\"][sid].get(\"neighbor_address\")\n\n if neighbor_addresses:\n if neighbor_address and neighbor_address in neighbor_addresses:\n ret_dict.update({neighbor_address: sid})\n else:\n if neighbor_address:\n ret_dict.update({neighbor_address: sid})\n\n return ret_dict\n\n\ndef get_ospf_interface_affinity_bits(device, interface):\n \"\"\" Get affinity bits value of an ospf interface\n Args:\n device ('obj'): Device object\n interface ('str'): Interface name\n Returns:\n bits ('str'): Affinity bits\n \"\"\"\n\n log.info(\"Getting Affinity bits of interface {intf}\".format(intf=interface))\n\n cmd = 'show ip ospf interface {intf}'.format(intf=interface)\n try:\n out = device.parse(cmd)\n except Exception as e:\n log.error(\"Failed to parse '{cmd}': {e}\".format(cmd=cmd, e=e))\n return None\n\n reqs = R(['vrf','(.*)',\n 'address_family','(.*)',\n 'instance','(.*)','areas','(.*)',\n 'interfaces','(.*)','teapp','(.*)',\n 'affinity','bits','(?P<bits>.*)'])\n found = find([out], reqs, filter_=False, all_keys=True)\n if found:\n keys = GroupKeys.group_keys(reqs=reqs.args, ret_num={}, \n source=found, all_keys=True)\n bits = keys[0]['bits']\n log.info(\"Get affinity bits '{bits}' on {intf}\".format(bits=bits, intf=interface))\n return bits\n else:\n log.error(\"Failed to get affinity bits on {intf}\".format(intf=interface))\n return None\n", "id": "8045035", "language": "Python", "matching_score": 4.560178279876709, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/ospf/get.py" }, { "content": "\"\"\"Common get info functions for interface\"\"\"\n\n# Python\nimport os\nimport logging\nimport re\nimport time\nfrom ipaddress import IPv4Network, IPv4Address\n\n# unicon\nfrom unicon.core.errors import SubCommandFailure\n\n# pyATS\nfrom ats.easypy import runtime\nfrom ats.utils.objects import find, R\nfrom pyats.datastructures.logic import Not\n\n# Genie\nfrom genie.utils.config import Config\nfrom genie.libs.parser.utils.common import Common\nfrom genie.libs.sdk.libs.utils.normalize import GroupKeys\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\n# libs\nfrom genie.libs.sdk.apis.utils import (\n int_to_mask,\n get_config_dict,\n question_mark_retrieve,\n get_delta_time_from_outputs,\n)\n\nfrom genie.libs.sdk.apis.iosxe.running_config.get import (\n get_running_config_section_dict,\n)\n\nlog = logging.getLogger(__name__)\n\n\ndef get_neighbor_interface_and_device(device, interface_alias):\n \"\"\" Get neighbor interface and device from topology\n\n Args:\n device (`obj`): Device object\n interface_alias (`str`): interface alias\n\n Returns:\n Tuple: (str: neighbor interface, obj: neighbor device)\n\n Raises:\n None\n \"\"\"\n interface = device.interfaces[interface_alias].name\n link = device.interfaces[interface_alias].link\n interface_list = link.find_interfaces(device__name=Not(device.name))\n\n if interface_list:\n neighbor_interface = interface_list[0]\n log.info(\n \"Found interface {intf} on {device.name} has neighbor \"\n \"interface {neighbor_intf.name} on {neighbor_intf.device.name}\".format(\n intf=interface, device=device, neighbor_intf=neighbor_interface\n )\n )\n return neighbor_interface.name, neighbor_interface.device\n else:\n return None, None\n\n\ndef get_interface_mtu_size(device, interface):\n \"\"\" Get interface MTU\n\n Args:\n device (`obj`): Device object\n interface (`str`): Interface name\n\n Returns:\n None\n mtu (`int`): mtu bytes\n\n Raises:\n None\n \"\"\"\n try:\n out = device.parse(\n \"show interfaces {interface}\".format(interface=interface)\n )\n except SchemaEmptyParserError:\n return\n\n return out[interface][\"mtu\"]\n\n\ndef get_interface_mtu_config_range(device, interface):\n \"\"\" Get MTU config range\n\n Args:\n device (`obj`): Device object\n interface (`str`): Interface name\n\n Returns:\n None\n range_dict (`dict`): MTU range dict\n ex: {'min':30, 'max':360, range: '30-360'}\n\n Raises:\n None\n \"\"\"\n range_dict = {}\n\n cmd = \"conf t\\ninterface {interface}\\nmtu \".format(interface=interface)\n out = question_mark_retrieve(device, cmd, state=\"config\")\n\n p = re.compile(r\"<(?P<min>\\d+)-(?P<max>\\d+)>\")\n m = p.search(out)\n if m:\n range_dict.update({\"range\": m.group()})\n range_dict.update({\"min\": int(m.groupdict()[\"min\"])})\n range_dict.update({\"max\": int(m.groupdict()[\"max\"])})\n return range_dict\n else:\n return\n\n\ndef get_interface_mac_address(device, interface):\n \"\"\" Get interface mac address from device\n\n Args:\n device (`obj`): Device object\n interface(`str`): Interface name\n\n Returns:\n None\n interface mac address\n\n Raises:\n None\n \"\"\"\n log.info(\"Getting mac address for {} on {}\".format(interface, device.name))\n\n try:\n out = device.parse(\"show interfaces {}\".format(interface))\n except SchemaEmptyParserError:\n return\n\n return out[interface][\"mac_address\"]\n\n\ndef get_interface_without_service_policy(\n device, interface_type, virtual_interface=False\n):\n \"\"\" Find a interface without service-policy\n\n Args:\n device (`obj`): Device object\n interface_type (`str`): Interface type\n virtual_interface ('bool'): flag for matching virtual interfaces\n\n Returns:\n None\n interface (`str`): Interface name\n\n Raises:\n None\n \"\"\"\n if not virtual_interface:\n p = re.compile(r\"interface +(?P<intf>(?!\\S+\\.\\S*)\\S+)\")\n else:\n p = re.compile(r\"interface +(?P<intf>\\S+)\")\n\n config_dict = get_running_config_section_dict(\n device, \"interface\"\n )\n for intf, config in config_dict.items():\n if intf.startswith(\"interface \" + interface_type):\n cfg = \"\\n\".join(config)\n if \"service-policy\" not in cfg:\n try:\n return p.search(intf).groupdict()[\"intf\"]\n except AttributeError:\n continue\n else:\n return\n\n\ndef get_interface_qlimit_bytes(device, interface):\n \"\"\" Get interface qlimit in bytes\n\n Args:\n device (`obj`): Device object\n interface (`str`): Interface name\n\n Returns:\n None\n qlimit_bytes (`int`): Interface qlimit_bytes\n\n Raises:\n None\n \"\"\"\n try:\n out = device.parse(\n \"show platform hardware qfp active infrastructure bqs \"\n \"queue output default interface {interface}\".format(\n interface=interface\n )\n )\n except SchemaEmptyParserError:\n return\n\n reqs = R(\n [\n interface,\n \"index\",\n \"(?P<index>.*)\",\n \"software_control_info\",\n \"qlimit_bytes\",\n \"(?P<qlimit>.*)\",\n ]\n )\n found = find([out], reqs, filter_=False, all_keys=True)\n if found:\n keys = GroupKeys.group_keys(\n reqs=reqs.args, ret_num={}, source=found, all_keys=True\n )\n return keys[0][\"qlimit\"]\n else:\n return\n\n\ndef get_interface_ip_address(device, interface):\n \"\"\" Get interface ip_address from device\n\n Args:\n interface('str'): Interface to get address\n device ('obj'): Device object\n\n Returns:\n None\n interface ip_address ('str')\n\n Raises:\n None\n \"\"\"\n log.info(\n \"Getting interface address for {interface} on {device}\".format(\n interface=interface, device=device.name\n )\n )\n\n cmd = \"show ip interface brief {i}\".format(i=interface)\n try:\n out = device.parse(cmd)\n except SubCommandFailure:\n log.error(\"Invalid command\")\n except Exception as e:\n log.error(\"Failed to parse '{cmd}': {e}\".format(cmd=cmd, e=e))\n return\n\n address = out[\"interface\"].get(interface, {}).get(\"ip_address\", None)\n if interface not in out[\"interface\"]:\n return\n elif (\n \"ip_address\" not in out[\"interface\"][interface]\n or address == \"unassigned\"\n ):\n return\n\n return out[\"interface\"][interface][\"ip_address\"]\n\n\ndef get_interface_loopback_ip_address(device, num=1):\n \"\"\" Gets all loopback interface ip_address' up to number specified\n\n Args:\n device ('obj'): device to use\n num ('int'): max number of loopback interfaces to get ip_address from\n\n Returns:\n list: [(interface with Loopback address, ip_address), ...]\n\n Raises:\n None\n \"\"\"\n try:\n out = device.parse(\"show ip interface brief | include Loopback\")\n except SchemaEmptyParserError:\n return []\n\n count = 0\n ip_addresses = []\n for intf in sorted(out[\"interface\"].keys()):\n if \"Loopback\" in intf:\n count += 1\n ip_addresses.append((intf, out[\"interface\"][intf][\"ip_address\"]))\n if count == num:\n break\n\n return ip_addresses\n\n\ndef get_unused_loopback_interface(device):\n \"\"\" Gets the first un-used loopback interface\n\n Args:\n device ('obj'): device to use\n\n returns:\n string: first unused loopback\n\n Raises:\n None\n \"\"\"\n try:\n out = device.parse(\"show ip interface brief | include Loopback\")\n except SchemaEmptyParserError:\n return \"Loopback0\"\n\n if out:\n # Get last used loopback address\n loopback = sorted(out[\"interface\"].keys())[-1]\n\n # get loopback number and increment by 1\n return \"Loopback{}\".format(int(loopback[len(\"Loopback\") :]) + 1)\n\n\ndef get_interface_with_mask(device, netmask=\"30\", address_family=\"ipv4\"):\n \"\"\" Gets interface:ip_address with specific mask\n\n Args:\n device('obj'): device to use\n netmask('str'): netmask the interface must have\n address_family('str'): address_family to search under\n\n Returns:\n (None, None)\n (interface('str'), ip_address('str'))\n\n Raises:\n None\n \"\"\"\n try:\n out = device.parse(\"show interfaces\")\n except SchemaEmptyParserError:\n return None, None\n\n for intf in out:\n if address_family in out[intf]:\n for ip in out[intf][address_family]:\n if out[intf][address_family][ip][\"prefix_length\"] == str(\n netmask\n ):\n ip_address = out[intf][address_family][ip][\"ip\"]\n interface = intf\n return interface, ip_address\n\n return None, None\n\n\ndef get_interface_with_up_state(\n device, interface_type, virtual_interface=None\n):\n \"\"\" Get a interface which is up\n\n Args:\n device ('obj'): Device object\n interface_type ('str'): Interface type\n virtual_interface ('bool'): Flag for logical interface\n if is None, return physical or logical\n if is True, return only logical\n if is False, return only physical\n Returns:\n None\n interface name ('str')\n\n Raises:\n None\n \"\"\"\n cmd = \"show ip interface | include ^{type}\".format(type=interface_type)\n try:\n out = device.parse(cmd)\n except SchemaEmptyParserError:\n return\n\n for interface, interface_data in out.items():\n if interface_data[\"oper_status\"] == \"up\":\n if virtual_interface is None:\n return interface\n elif virtual_interface == (\".\" in interface):\n return interface\n\n\ndef get_interface_carrier_delay(device, interface, delay_type):\n \"\"\" Get interface carrier delay\n\n Args:\n device ('obj'): Device object\n interface ('str'): Interface name\n delay_type ('str'): Carrier delay type: 'up', 'down'\n\n Returns:\n None\n delay in seconds ('str')\n\n Raises:\n None\n \"\"\"\n try:\n out = device.parse(\"show interfaces {intf}\".format(intf=interface))\n except SchemaEmptyParserError:\n return\n\n intf_dict = out[interface]\n key = \"carrier_delay_\" + delay_type\n if key in intf_dict:\n return intf_dict[key]\n\n\ndef get_interface_ip_and_mask(device, interface, prefix=False):\n \"\"\" Get interface ip address and mask\n\n Args:\n device (`obj`): Device object\n interface (`str`): Interface name\n prefix (`bool`): return ip with prefix if True\n otherwise return ip and mask\n\n Returns:\n Tuple: (None, None)\n Tuple: (str: interface ip address, str: interface mask)\n\n Raises:\n None\n \"\"\"\n ip = mask = None\n try:\n out = device.parse(\"show interfaces {}\".format(interface))\n except SchemaEmptyParserError:\n return None, None\n\n for interface, interface_data in out.items():\n if \"ipv4\" in interface_data.keys():\n for ipv4, ip_data in interface_data[\"ipv4\"].items():\n if prefix:\n ip = ipv4\n else:\n ip = ip_data[\"ip\"]\n mask = int_to_mask(ip_data[\"prefix_length\"])\n break\n\n return ip, mask\n\n\ndef get_interface_interfaces(device, link_name=None, opposite=False, num=0):\n \"\"\" Get interface and device\n\n Args:\n device ('obj'): Device object\n link_name ('str'): link name\n opposite ('bool'): find opposite device interface\n num ('int'): num of interface to return\n\n Returns:\n topology dictionary\n\n Raises:\n None\n \"\"\"\n if link_name:\n link = device.interfaces[link_name].link\n intf_list = link.find_interfaces(\n device__name=Not(device.name) if opposite else device.name\n )\n else:\n intf_list = device.find_interfaces()\n\n if intf_list:\n intf_list.sort()\n\n if num > 0 and num <= len(intf_list):\n return intf_list[num - 1]\n\n return intf_list\n else:\n return {}\n\n\ndef get_interface_interfaces_under_vrf(device, vrf):\n \"\"\" Get interfaces configured under specific Vrf\n\n Args:\n device (`obj`): Device object\n vrf (`str`): Vrf name\n\n Returns:\n interfaces (`list`): List of interfaces\n\n Raises:\n None\n \"\"\"\n\n try:\n out = device.parse(\"show vrf {}\".format(vrf))\n except SchemaEmptyParserError:\n return []\n\n if (\n out\n and \"vrf\" in out\n and vrf in out[\"vrf\"]\n and \"interfaces\" in out[\"vrf\"][vrf]\n ):\n interfaces = out[\"vrf\"][vrf][\"interfaces\"]\n return interfaces\n\n return []\n\n\ndef get_interface_running_config(device, interface):\n \"\"\" Get interface configuration from show running-config interface {interface}\n \n Args:\n device ('obj'): Device object\n interface ('str'): interface name\n\n Return:\n Dictionary with running interface configuration\n\n Raises:\n None\n \"\"\"\n\n interface = Common.convert_intf_name(interface)\n\n try:\n output = device.execute(\n \"show running-config interface {interface}\".format(\n interface=interface\n )\n )\n except SubCommandFailure:\n return {}\n\n return get_config_dict(output)\n\n\ndef get_interface_packet_counter(\n device, interface, counter_field, output=None\n):\n \"\"\" Returns packet counters for given interface\n\n Args:\n device ('obj') : Device object\n interface ('str'): Interface name\n output ('dict'): Parsed output from 'show interfaces' command\n\n Returns\n counter: number of output packet\n\n if any error or no counter_field was found return None\n - to separate 0 packet and None value\n\n Raises:\n None\n \"\"\"\n if not output:\n try:\n output = device.parse(\n \"show interfaces {intf}\".format(intf=interface)\n )\n except SchemaEmptyParserError:\n return\n\n counter = output[interface].get(\"counters\", {}).get(counter_field, None)\n return counter\n\n\ndef get_neighboring_device_interface(device, testbed, interface):\n \"\"\" Get neighbor device interface\n\n Args:\n device ('obj'): Device object\n testbed ('obj'): Testbed object\n interface ('str'): interface name\n\n Returns:\n Dictionary: topology\n\n Raises:\n None\n \"\"\"\n log.info(\n \"Finding the neighbor device of the uplink interface : {interface}\".format(\n interface=interface\n )\n )\n\n topology_devices = {}\n link = testbed.find_links().pop()\n for it in link.find_interfaces():\n if it.device == device:\n uplink_var = \"uplink1\"\n else:\n uplink_var = \"uplink2\"\n topology_devices.setdefault(uplink_var, {}).setdefault(\"name\", it.name)\n topology_devices[uplink_var][\"device\"] = it.device\n\n if topology_devices[\"uplink2\"][\"name\"]:\n log.info(\n \"Successfully found neighbor device : {neighbor_device} \"\n \"for interface : {interface}\".format(\n neighbor_device=topology_devices[\"uplink2\"][\"name\"],\n interface=topology_devices[\"uplink2\"][\"device\"],\n )\n )\n else:\n return {}\n return topology_devices\n\n\ndef get_interface_connected_adjacent_router_interfaces(\n device, link_name, num=1\n):\n \"\"\" Get list of connected interfaces from adjacents routers\n\n Args:\n device ('obj'): Device object\n link_name ('str'): Interface alias in topology\n num ('int'): Number of interfaces to return\n\n Returns:\n List: EthernetInterface objects\n\n Raises:\n None\n \"\"\"\n\n try:\n interface = device.interfaces[link_name]\n except KeyError:\n return\n\n remote_interfaces = list(interface.remote_interfaces)\n\n if not remote_interfaces:\n return\n\n return remote_interfaces[0:num]\n\n\ndef get_bundled_interface(device, port_channel, exclude_interface=None):\n \"\"\" Pick up Port channel bundled interface\n\n Args:\n device (`obj`): Device object\n port_channel (`str`): Port Channel Interface\n exclude_interface ('str'): interface to skip\n\n Returns:\n String: Interface\n\n Raises:\n None\n \"\"\"\n out = device.parse(\"show etherchannel summary\")\n\n if (\n out\n and \"interfaces\" in out\n and port_channel.capitalize() in out[\"interfaces\"]\n and \"members\" in out[\"interfaces\"][port_channel.capitalize()]\n ):\n for intf in out[\"interfaces\"][port_channel.capitalize()][\"members\"]:\n if out[\"interfaces\"][port_channel.capitalize()][\"members\"][intf][\n \"bundled\"\n ]:\n if exclude_interface and intf == exclude_interface:\n continue\n return intf\n\n\ndef get_interface_address_mask_running_config(device, interface, address_family=None):\n \"\"\" Get interface address and mask from show running-config interface {interface}\n Args:\n device ('obj'): Device object\n interface ('str'): Interface name\n address_family ('str'): Not used in IOSXE. Address family\n\n Returns:\n (Interface IP address, Interface Mask)\n\n Raise:\n None\n \"\"\"\n\n # ip address 192.168.10.254 255.255.255.0\n r1 = re.compile(r\"ip\\s+address\\s+(?P<address>\\S+)\\s+(?P<mask>\\S+)\")\n\n interface = Common.convert_intf_name(interface)\n\n try:\n output = device.execute(\n \"show running-config interface {interface}\".format(\n interface=interface\n )\n )\n except SubCommandFailure:\n return None, None\n\n if not output:\n return None, None\n\n for line in output.splitlines():\n line = line.strip()\n\n result = r1.match(line)\n if result:\n group = result.groupdict()\n ip_address = group[\"address\"]\n mask = group[\"mask\"]\n return ip_address, mask\n\n\ndef get_interface_packet_output_rate(device, interface, seconds=60):\n \"\"\" Get rate of output packet in interface seconds apart\n\n Args:\n device ('obj'): Device object\n interface ('str'): Interface name\n seconds ('int'): Seconds to wait between show commands\n\n Returns:\n Traffic rate\n\n if any error return None\n - to separate rate 0.0 and None value\n\n Raises:\n None\n \"\"\"\n\n if seconds <= 0:\n return\n\n try:\n\n output_before = device.execute(\n \"show interfaces {intf}\".format(intf=interface)\n )\n\n log.info(\"Waiting {secs} seconds\".format(secs=seconds))\n time.sleep(seconds)\n\n output_after = device.execute(\n \"show interfaces {intf}\".format(intf=interface)\n )\n\n parsed_output_before = device.parse(\n \"show interfaces {intf}\".format(intf=interface),\n output=output_before,\n )\n parsed_output_after = device.parse(\n \"show interfaces {intf}\".format(intf=interface),\n output=output_after,\n )\n\n delta_time = get_delta_time_from_outputs(\n output_before=output_before, output_after=output_after\n )\n\n counter_before = get_interface_packet_counter(\n device=device,\n interface=interface,\n counter_field=\"out_pkts\",\n output=parsed_output_before,\n )\n if counter_before is None:\n return\n\n counter_after = get_interface_packet_counter(\n device=device,\n interface=interface,\n counter_field=\"out_pkts\",\n output=parsed_output_after,\n )\n if counter_after is None:\n return\n\n output_rate = round((counter_after - counter_before) / delta_time, 2)\n\n except SchemaEmptyParserError as e:\n return\n except ValueError as e:\n return\n\n log.info(\n \"Packet output rate for interface {intf} is {count}\".format(\n intf=interface, count=output_rate\n )\n )\n\n return output_rate\n\n\ndef get_interface_switchport_access_vlan(device, interface):\n \"\"\" Returns access_vlan id for interface on device\n\n Args:\n device ('obj'): device to run on\n interface ('str'): interface to search under\n\n Returns:\n str access_vlan\n\n Raise:\n None\n \"\"\"\n log.info(\"Getting access_vlan under {}\".format(interface))\n\n try:\n out = device.parse(\"show interfaces switchport\")\n except SchemaEmptyParserError:\n return None\n\n if out and interface in out and \"access_vlan\" in out[interface]:\n return out[interface][\"access_vlan\"]\n\n\ndef get_interface_netmask(ip_address):\n \"\"\" Get netmask of ip address' class\n\n Args:\n ip_address ('str'): ipv4 address\n\n Returns:\n ip address mask\n\n Raises:\n None\n \"\"\"\n\n class_a = IPv4Address(\"127.0.0.0\")\n class_b = IPv4Address(\"172.16.17.32\")\n class_c = IPv4Address(\"172.16.58.3\")\n ip_addr = IPv4Address(ip_address)\n ip_class = [(\"/8\", class_a), (\"/16\", class_b), (\"/24\", class_c)]\n\n for e in ip_class:\n if ip_addr < e[1]:\n return e[0]\n\n\ndef get_interface_port_channel_members(device, interface):\n \"\"\" Get interface members\n\n Args:\n device ('obj'): Device object\n interface ('str'): interface to search member for\n\n Returns:\n interface members\n\n Raises:\n None\n \"\"\"\n try:\n out = device.parse(\"show interfaces {}\".format(interface))\n except SchemaEmptyParserError:\n return\n\n try:\n return out[interface][\"port_channel\"][\"port_channel_member_intfs\"]\n except KeyError:\n return\n", "id": "773242", "language": "Python", "matching_score": 4.037642955780029, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/interface/get.py" }, { "content": "\"\"\"Common get info functions for OSPF\"\"\"\n\n# Python\nimport re\nimport logging\n\n# unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef get_interface_address_mask_running_config(device, interface, address_family):\n \"\"\" Get interface address and mask from show running-config interface {interface}\n Args:\n device ('obj'): Device object\n interface ('str'): Interface name\n address_family ('str'): Address family\n\n Returns:\n (Interface IP address, Interface Mask)\n\n Raise:\n None\n \"\"\"\n try:\n output = device.execute('show configuration interfaces {interface}'\n .format(interface=interface))\n except SubCommandFailure:\n return None, None\n\n if not output:\n return None, None\n\n if address_family in ['ipv4', 'inet']:\n # address 192.168.0.1/32\n p1 = re.compile(r'address +(?P<ip>[\\d\\.]+)/(?P<mask>\\d+);')\n elif address_family in ['ipv6', 'inet6']:\n # address fc00:e968:6179::de52:7100/128\n p1 = re.compile(r'address +(?P<ip>[\\w\\:]+)/(?P<mask>\\d+);')\n else:\n log.info('Must provide one of the following address families: \"ipv4\", \"ipv6\", \"inet\", \"inet6\"')\n return None, None\n\n match = p1.findall(output)\n if match:\n return match[0][0], device.api.int_to_mask(int(match[0][1]))\n\n return None, None\n", "id": "70824", "language": "Python", "matching_score": 3.3438096046447754, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/junos/interface/get.py" }, { "content": "\"\"\"Common configure functions for junos interface\"\"\"\n\n# Python\nimport logging\n\n# unicon\nfrom unicon.eal.dialogs import Dialog, Statement\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef default_interface(device, interfaces):\n \"\"\" Reset junos interface configuration \n\n Args:\n device (`obj`): Device object\n interfaces (`list`): List of interfaces to be defaulted\n Returns:\n None\n \"\"\"\n dialog = Dialog(\n [\n Statement(\n pattern=r\"Delete everything under this level?.*\",\n action=\"sendline(yes)\",\n loop_continue=True,\n continue_timer=False,\n )\n ]\n )\n\n for intf in interfaces:\n config_cmd = [\"edit interfaces {}\".format(intf), \"delete\"]\n\n try:\n device.configure(config_cmd, reply=dialog)\n log.info(\"Successfully defaulted {}\".format(intf))\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Couldn't default {interface}. Error:\\n{error}\".format(\n interface=intf, error=e\n )\n )\n\n\ndef shut_interface(device, interface):\n \"\"\" Shut interface on device\n\n Args:\n device (`obj`): Device object\n interface (`str`): Interface name\n\n Returns:\n None\n\n Raises:\n SubCommandFailure\n \"\"\"\n\n config_command = \"set interfaces {interface} disable\".format(\n interface=interface\n )\n\n log.info(\n \"Disabling interface {interface} on device {device}\".format(\n interface=interface, device=device.name\n )\n )\n\n try:\n device.configure(config_command)\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not disable interface {interface} on device {device}\".format(\n interface=interface, device=device.name\n )\n )\n\n log.info(\n \"Interface {interface} is disabled on device {device}\".format(\n interface=interface, device=device.name\n )\n )\n\n\ndef unshut_interface(device, interface):\n \"\"\" Unshut interface on device\n\n Args:\n device (`obj`): Device object\n interface (`str`): Interface name\n\n Returns:\n None\n\n Raises:\n SubCommandFailure\n \"\"\"\n config_command = \"delete interfaces {interface} disable\".format(\n interface=interface\n )\n\n log.info(\n \"Enabling interface {interface} on device {device}\".format(\n interface=interface, device=device.name\n )\n )\n\n try:\n device.configure(config_command)\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not enabled interface {interface} on device {device}\".format(\n interface=interface, device=device.name\n )\n )\n\n log.info(\n \"Interface {interface} is enabled on device {device}\".format(\n interface=interface, device=device.name\n )\n )\n", "id": "6069742", "language": "Python", "matching_score": 2.417609930038452, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/junos/interface/configure.py" }, { "content": "'''HA useful function'''\n\n# Python\nimport time\n\n# unicon\nfrom unicon.eal.dialogs import Statement, Dialog\n\nfrom ..ha import HA as HA_iosxe\n\n\nclass HA(HA_iosxe):\n\n def _reloadLc(self, lc):\n \"\"\"Do the reload LC action for c3850(edison) devices.\n\n Args:\n Mandatory:\n lc (`str`) : LC slot number need to reload.\n\n Raises:\n Unicon errors\n\n Example:\n >>> _reloadLc(lc='1')\n \"\"\"\n # unicon\n dialog = Dialog([\n Statement(pattern=r'\\[yes\\/no\\].*',\n action='sendline(y)',\n loop_continue=True,\n continue_timer=False),\n Statement(pattern=r'\\(y\\/n\\)\\?.*',\n action='sendline(y)',\n loop_continue=True,\n continue_timer=False)\n ])\n # # Execute command to reload LC\n self.device.execute('reload slot {}'.format(lc), reply=dialog)\n time.sleep(5)\n", "id": "2607584", "language": "Python", "matching_score": 0.8725467920303345, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/abstracted_libs/iosxe/c3850/ha.py" }, { "content": "__all__ = (\n 'DeviceFeature',\n 'LinkFeature',\n 'InterfaceFeature',\n 'consolidate_feature_args',\n)\n\nfrom genie.conf.base import DeviceFeature, LinkFeature, InterfaceFeature\n\ndef consolidate_feature_args(feature, devices=None, interfaces=None, links=None):\n '''Consolidate feature arguments.\n\n Example::\n\n devices, interfaces, links = consolidate_feature_args(\n feature_obj, devices=devices, links=links)\n\n Returns:\n 3-`tuple` of `set` of devices, interfaces, and links\n '''\n\n if devices is None and interfaces is None and links is None:\n # No parameters specified, use defaults from feature.\n devices = feature.devices if isinstance(feature, DeviceFeature) else ()\n interfaces = feature.interfaces if isinstance(feature, InterfaceFeature) else ()\n links = feature.links if isinstance(feature, LinkFeature) else ()\n else:\n # Some parameters specified, just replace None values\n devices = devices or ()\n interfaces = interfaces or ()\n links = links or ()\n\n # Convert all to sets\n devices = set(devices)\n interfaces = set(interfaces)\n links = set(links)\n\n # Expand links into interfaces and then into devices\n interfaces.update(*(link.interfaces for link in links))\n devices.update(interface.device for interface in interfaces)\n\n return devices, interfaces, links\n\n", "id": "8253294", "language": "Python", "matching_score": 0.46291157603263855, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/base/feature.py" }, { "content": "'''ANSI constants'''\n\nre_CSIesc = r'(?:\\x1B\\[)' # Control Sequence Initiator (ANSI X3.64)\nre_CSIiso = r'(?:\\x9B)' # Control Sequence Introducer (ISO 8859)\nre_CSI = r'(?:' + re_CSIesc + r'|' + re_CSIiso + r')'\nre_arg = r'(?:\\d*|\"[^\"]*\")'\nre_generic = r'(?:' + re_CSI + re_arg + r'(?:;' + re_arg + r')*[A-Za-z])'\n\n", "id": "9986556", "language": "Python", "matching_score": 0.39332085847854614, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/utils/ansi.py" }, { "content": "\"\"\"Common verification functions for l2vpn\"\"\"\n\n# Python\nimport logging\nimport copy\nfrom prettytable import PrettyTable\n\n\n# import Steps\nfrom pyats.aetest.steps import Steps\n\n# Genie\nfrom genie.utils.timeout import Timeout\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\nfrom genie.libs.parser.utils.common import Common\n\n# Common\nfrom genie.libs.parser.utils.common import Common\n\nlog = logging.getLogger(__name__)\n\n\ndef verify_l2vpn_storm_control_configuration(\n device, interface, service_instance_id, storm_control\n):\n \"\"\" Verify storm-control configuration is applied\n\n Args:\n device ('obj'): device object\n interface ('str'): interface name\n service_instance_id:('int'): service instance id\n storm_control('list'): list of storm control configurations\n ex.)\n [\n {\n 'traffic_flow': 'unicast',\n 'name': 'cir',\n 'val': 8000\n },\n {\n 'traffic_flow': 'broadcast',\n 'name': 'cir',\n 'val': 8000\n },\n {\n 'traffic_flow': 'multicast',\n 'name': 'cir',\n 'val': 8000\n }\n ]\n Returns:\n None\n Raises:\n None\n \"\"\"\n try:\n out = device.parse(\n \"show ethernet service instance id \"\n \"{} interface {} detail\".format(\n service_instance_id, Common.convert_intf_name(interface)\n )\n )\n except SchemaEmptyParserError:\n return False\n\n x = PrettyTable()\n x.field_names = [\n \"Traffic flow\",\n \"Configuration expected\",\n \"Configuration found\",\n ]\n config_found = True\n for sc in storm_control:\n\n sc_traffic_flow = sc.get(\"traffic_flow\", \"\")\n sc_name = sc.get(\"name\", \"\")\n sc_val = sc.get(\"val\", \"\")\n\n row_val = [sc_traffic_flow.title()]\n row_val.append(\n \"{}\".format(\n \"storm-control {} {} {}\".format(\n sc_traffic_flow, sc_name, sc_val\n )\n )\n )\n\n try:\n val = out[\"service_instance\"][service_instance_id][\"interfaces\"][\n interface\n ][\"micro_block_type\"][\"Storm-Control\"][\n \"storm_control_{}_cir\".format(sc_traffic_flow)\n ]\n if sc_val != int(val):\n config_found = False\n row_val.append(\n \"{}\".format(\n \"storm-control {} {} {}\".format(\n sc_traffic_flow, sc_name, int(val)\n )\n )\n )\n else:\n row_val.append(\n \"{}\".format(\n \"storm-control {} {} {}\".format(\n sc_traffic_flow, sc_name, sc_val\n )\n )\n )\n\n except KeyError as e:\n row_val.append(\n \"Configuration not found for {}:\".format(sc_traffic_flow)\n )\n log.error(\n \"Key: '{}' is not found from the command output\".format(str(e))\n )\n config_found = False\n x.add_row(row_val)\n\n log.info(x)\n return config_found\n\n\ndef is_l2vpn_storm_control_packet_count_increased(\n intial_discard_packets, current_discard_packets\n):\n \"\"\" Verify packet count has increased\n \n Args:\n intial_discard_packets ('dict'): previous dictionary of packet counts for flow groups\n ex.) \n {\n 'broadcast': 234234,\n 'unicast': 123123\n }\n current_discard_packets ('dict'): current dictionary of packet counts for flow groups\n ex.) \n {\n 'broadcast': 234534,\n 'unicast': 123523\n }\n Returns:\n True\n False\n Raises:\n None\n \"\"\"\n\n result = True\n for k, v in current_discard_packets.items():\n log.info(\n \"Getting current StormControl Discard packet\"\n \" count for {}\".format(k)\n )\n\n key = k.lower().replace(\" \", \"_\")\n val = intial_discard_packets.get(k, 0)\n\n if v <= val:\n result = False\n log.info(\"Packet count has not increased\")\n else:\n log.info(\n \"Packet count for {}: \\n\"\n \" Initial: {}\\n\"\n \" Current: {}\".format(k.title().replace(\"_\", \" \"), val, v)\n )\n return result\n", "id": "11650003", "language": "Python", "matching_score": 2.972902536392212, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/l2vpn/verify.py" }, { "content": "\"\"\"Common configure functions for bgp\"\"\"\n\n# Python\nimport logging\nimport re\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef configure_l2vpn_storm_control(\n device, interface, service_instance_id, storm_control\n):\n \"\"\" Configures storm control under service instance\n\n Args:\n device('obj'): device to configure\n interface('str'): interface name\n service_instance_id:('int'): service instance id\n storm_control('list'): list of storm control configurations\n ex.)\n [\n {\n 'traffic_flow': 'unicast',\n 'name': 'cir',\n 'val': 8000\n },\n {\n 'traffic_flow': 'broadcast',\n 'name': 'cir',\n 'val': 8000\n },\n {\n 'traffic_flow': 'multicast',\n 'name': 'cir',\n 'val': 8000\n }\n ]\n Returns:\n N/A\n Raises:\n SubCommandFailure\n \"\"\"\n log.info(\n \"Configuring storm control under service \"\n \"instance: {} and interface: {}\".format(service_instance_id, interface)\n )\n\n config = []\n config.append(\"interface {}\\n\".format(interface))\n\n config.append(\"service instance {} ethernet\\n\".format(service_instance_id))\n\n for sc in storm_control:\n traffic_flow = sc[\"traffic_flow\"]\n name = sc[\"name\"]\n val = sc[\"val\"]\n\n config.append(\n \"storm-control {} {} {}\\n\".format(traffic_flow, name, val)\n )\n try:\n device.configure(\"\".join(config))\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Configuration failed for storm control under service \"\n \"instance: {} and interface: {} with exception: {}\".format(\n service_instance_id, interface, str(e)\n )\n )\n", "id": "6429453", "language": "Python", "matching_score": 1.8014824390411377, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/l2vpn/configure.py" }, { "content": "\"\"\"Common configure functions for mpls on junos\"\"\"\n\n# Python\nimport logging\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef config_mpls_ldp_on_interface(device, interface):\n \"\"\" Config ldp on interface on junos device\n\n Args:\n device (`obj`): Device object\n interface (`str`): Interface name\n Return:\n None\n \"\"\"\n log.info(\"Configuring ldp on {} on {}\".format(interface, device.name))\n\n try:\n device.configure(\"set protocols ldp interface {}\".format(interface))\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Could not configure ldp on {interface}. Error:\\n{error}\".format(\n interface=interface, error=e\n )\n )\n\n\ndef remove_mpls_ldp_from_interface(device, interface):\n \"\"\" Remove ldp on interface on junos device\n\n Args:\n device (`obj`): Device object\n interface (`str`): Interface name\n Return:\n None\n \"\"\"\n log.info(\"Removing ldp on {} on {}\".format(interface, device.name))\n\n try:\n device.configure(\"delete protocols ldp interface {}\".format(interface))\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Could not remove ldp configuration on {interface}. Error:\\n{error}\".format(\n interface=interface, error=e\n )\n )\n", "id": "4485849", "language": "Python", "matching_score": 4.733667850494385, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/junos/mpls/configure.py" }, { "content": "\"\"\"Common configure functions for mpls\"\"\"\n\n# Python\nimport logging\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\n# Genie\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\nlog = logging.getLogger(__name__)\n\n\ndef config_mpls_ldp_on_interface(device, interface):\n \"\"\" Config ldp on interface\n\n Args:\n device (`obj`): Device object\n interface (`str`): Interface name\n Return:\n None\n Raise:\n SubCommandFailure: Failed configuring interface\n \"\"\"\n log.info(\"Configuring ldp on {} on {}\".format(interface, device.name))\n\n try:\n device.configure([\"interface {}\".format(interface), \"mpls ip\"])\n except SubCommandFailure:\n raise SubCommandFailure(\n 'Could not activate \"mpls ip\" on interface {interface}'.format(\n interface=interface\n )\n )\n\n\ndef remove_mpls_ldp_from_interface(device, interface):\n \"\"\" Remove ldp on interface\n\n Args:\n device (`obj`): Device object\n interface (`str`): Interface name\n Return:\n None\n Raise:\n SubCommandFailure: Failed configuring interface\n \"\"\"\n log.info(\"Removing ldp on {} on {}\".format(interface, device.name))\n\n try:\n device.configure([\"interface {}\".format(interface), \"no mpls ip\"])\n except SubCommandFailure:\n raise SubCommandFailure(\n 'Could not remove \"mpls ip\" from interface {interface}'.format(\n interface=interface\n )\n )\n", "id": "6826915", "language": "Python", "matching_score": 0.5604486465454102, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/mpls/configure.py" }, { "content": "import jinja2\r\n\r\n# Unicon\r\nfrom unicon.core.errors import SubCommandFailure\r\n\r\n# Eem\r\nfrom genie.libs.sdk.apis.iosxe.eem.verify import is_eem_applet_configured\r\n\r\n\r\n# Jinja2\r\nfrom jinja2.exceptions import TemplateNotFound\r\n\r\n\r\ndef remove_eem_applet(device, applet_name):\r\n \"\"\" Remove EEM applet from running config\r\n\r\n Args:\r\n device ('obj'): Device object\r\n applet_name ('str'): Applet name\r\n Raises:\r\n SubCommandFailure\r\n Returns:\r\n None\r\n \"\"\"\r\n\r\n if is_eem_applet_configured(device=device, applet_name=applet_name):\r\n\r\n try:\r\n device.configure(\r\n \"no event manager applet {name}\".format(name=applet_name)\r\n )\r\n except Exception:\r\n raise SubCommandFailure(\r\n \"Could not remove EEM applet {name} from device \"\r\n \"{dev} configuration\".format(name=applet_name, dev=device.name)\r\n )\r\n", "id": "6368875", "language": "Python", "matching_score": 2.9907400608062744, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/eem/configure.py" }, { "content": "# Running config\r\nfrom genie.libs.sdk.apis.iosxe.running_config.get import (\r\n get_running_config,\r\n)\r\n\r\n\r\ndef is_eem_applet_configured(device, applet_name):\r\n \"\"\" Verify if EEM applet is configured in running config\r\n\r\n Args:\r\n device ('obj') : Device object\r\n applet_name ('str') : Applet name\r\n Returns:\r\n True\r\n False\r\n Raises:\r\n None\r\n \"\"\"\r\n\r\n keyword = \"event manager applet {name}\".format(name=applet_name)\r\n\r\n try:\r\n\r\n running_config = get_running_config(\r\n device=device, keyword=keyword\r\n )\r\n except Exception:\r\n return False\r\n\r\n return True\r\n", "id": "6198279", "language": "Python", "matching_score": 0.16637521982192993, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/eem/verify.py" }, { "content": "'''Common verify functions for controllers'''\r\n# Python\r\nimport re\r\nimport logging\r\n\r\n# Genie\r\nfrom genie.utils.timeout import Timeout\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef verify_optics_in_state(device, optics, rx_power, controller_state='up', led_state='green', \r\n max_time=60, check_interval=20):\r\n ''' Verify optics state\r\n\r\n Args:\r\n device (`obj`): Device object\r\n optics (`str`): Optics port\r\n rx_power (`float`): Expected RX power\r\n controller_state (`str`): Expected controller state\r\n led_state (`str`): Expected LED state\r\n max_time (`int`): Max time\r\n check_interval (`int`): Check interval\r\n Returns:\r\n result (`bool`): Verified result\r\n '''\r\n cmd = 'show controllers optics {}'.format(optics)\r\n timeout = Timeout(max_time, check_interval)\r\n\r\n while timeout.iterate():\r\n try:\r\n out = device.parse(cmd)\r\n except Exception as e:\r\n log.error(\"Failed to parse '{}':\\n{}\".format(cmd, e))\r\n timeout.sleep()\r\n continue\r\n\r\n cs = out.get(optics, {}).get('controller_state', '').lower()\r\n ls = out.get(optics, {}).get('led_state', '').lower()\r\n rx = out.get(optics, {}).get('optics_status', {}).get('rx_power', '')\r\n\r\n # -30 dBm\r\n p = re.compile(r'(?P<power>\\S+) +dBm')\r\n m = p.search(rx)\r\n if m:\r\n rx = float(m.groupdict()['power'])\r\n\r\n log.info(\"Optics {} Controller State is {}, expected value is {}\"\r\n .format(optics, cs, controller_state))\r\n\r\n log.info(\"Optics {} LED State is {}, expected value is {}\"\r\n .format(optics, ls, led_state))\r\n\r\n log.info(\"Optics {} RX Power is {} dBm, expected value is higher than {} dBm\"\r\n .format(optics, rx, rx_power))\r\n \r\n # if rx is not float\r\n if (m and cs == controller_state.lower() and \r\n ls == led_state.lower() and \r\n rx >= rx_power):\r\n return True\r\n\r\n timeout.sleep()\r\n\r\n return False\r\n\r\n\r\ndef verify_coherentDSP_in_state(device, dsp, controller_state='up', derived_state='in service', \r\n max_time=60, check_interval=20):\r\n ''' Verify coherentDSP state\r\n\r\n Args:\r\n device (`obj`): Device object\r\n dsp (`str`): CoherentDSP port\r\n controller_state (`str`): Expected controller state\r\n derived_state (`str`): Expected derived state\r\n max_time (`int`): Max time\r\n check_interval (`int`): Check interval\r\n Returns:\r\n result (`bool`): Verified result\r\n '''\r\n cmd = 'show controllers coherentDSP {}'.format(dsp)\r\n timeout = Timeout(max_time, check_interval)\r\n\r\n while timeout.iterate():\r\n try:\r\n out = device.parse(cmd)\r\n except Exception as e:\r\n log.error(\"Failed to parse '{}':\\n{}\".format(cmd, e))\r\n timeout.sleep()\r\n continue\r\n\r\n cs = out.get(dsp, {}).get('controller_state', '').lower()\r\n ds = out.get(dsp, {}).get('derived_state', '').lower()\r\n\r\n log.info(\"CoherentDSP {} controller state is {}, expected value is {}\"\r\n .format(dsp, cs, controller_state))\r\n\r\n log.info(\"CoherentDSP {} derived state is {}, expected value is {}\"\r\n .format(dsp, ds, derived_state))\r\n\r\n if cs == controller_state.lower() and ds == derived_state.lower():\r\n return True\r\n \r\n timeout.sleep()\r\n \r\n return False\r\n", "id": "7958912", "language": "Python", "matching_score": 2.4621713161468506, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxr/controllers/verify.py" }, { "content": "'''Common verify functions for interface'''\r\n# Python\r\nimport logging\r\nfrom ipaddress import IPv4Interface\r\n\r\n# Genie\r\nfrom genie.utils.timeout import Timeout\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef verify_bundle_in_state(device, bundle, interface, status='up', link_state='active', \r\n max_time=60, check_interval=20):\r\n ''' Verify bundle state\r\n\r\n Args:\r\n device (`obj`): Device object\r\n bundle (`str`): Bundle name\r\n interface (`bool`): Bundle interface\r\n status (`str`): Expected bundle status\r\n link_state (`str`): Expected line state\r\n max_time (`int`): Max time\r\n check_interval (`int`): Check interval\r\n Returns:\r\n result (`bool`): Verified result\r\n '''\r\n cmd = 'show bundle {}'.format(bundle)\r\n timeout = Timeout(max_time, check_interval)\r\n\r\n while timeout.iterate():\r\n try:\r\n out = device.parse(cmd)\r\n except Exception as e:\r\n log.error(\"Failed to parse '{}':\\n{}\".format(cmd, e))\r\n timeout.sleep()\r\n continue\r\n\r\n oper_status = out.get('interfaces', {}).get(bundle, {})\\\r\n .get('oper_status', '').lower()\r\n\r\n ls = out.get('interfaces', {}).get(bundle, {}).get('port', {}).get(interface, {})\\\r\n .get('link_state', '').lower()\r\n\r\n log.info(\"Bundle {} status is {}, expected value is {}\"\r\n .format(bundle, oper_status, status))\r\n\r\n log.info(\"Interface {} link state is {}, expected value is {}\"\r\n .format(interface, ls, link_state))\r\n\r\n if oper_status == status.lower() and ls == link_state.lower():\r\n return True\r\n \r\n timeout.sleep()\r\n \r\n return False\r\n\r\n\r\ndef verify_interface_in_state(device, interface, verify_status=True, oper_status='up', \r\n line_protocol='up', verify_ip=True, ip='', subnet='', \r\n max_time=60, check_interval=20):\r\n ''' Verify interface state and ip\r\n\r\n Args:\r\n device (`obj`): Device object\r\n interface (`str`): Interfaces name\r\n verify_status (`bool`): To verify interface status\r\n oper_status (`str`): Expected oper status\r\n line_protocol (`str`): Expected line protocol status\r\n verify_ip (`bool`): To verify interface ip\r\n ip (`str`): Expected interface ip\r\n subnet (`str`): Expected interface ip subnet\r\n max_time (`int`): Max time\r\n check_interval (`int`): Check interval\r\n Returns:\r\n result (`bool`): Verified result\r\n '''\r\n cmd = 'show interfaces {}'.format(interface)\r\n timeout = Timeout(max_time, check_interval)\r\n\r\n while timeout.iterate():\r\n try:\r\n out = device.parse(cmd)\r\n except Exception as e:\r\n log.error(\"Failed to parse '{}':\\n{}\".format(cmd, e))\r\n timeout.sleep()\r\n continue\r\n \r\n result = True\r\n if verify_status:\r\n oper_status_v = out.get(interface, {}).get('oper_status', '').lower()\r\n line_protocol_v = out.get(interface, {}).get('line_protocol', '').lower()\r\n\r\n log.info(\"Interface {} status is {}, expected value is {}\"\r\n .format(interface, oper_status_v, oper_status))\r\n log.info(\"Interface {} line protocol is {}, expected value is {}\"\r\n .format(interface, line_protocol_v, line_protocol))\r\n\r\n if (oper_status_v != oper_status.lower() or \r\n line_protocol_v != line_protocol.lower()):\r\n result = False\r\n\r\n if verify_ip:\r\n ipaddr = out.get(interface, {}).get('ipv4', {}).keys()\r\n if ipaddr:\r\n ipv4 = IPv4Interface(list(ipaddr)[0])\r\n else:\r\n log.error(\"Interface {} doesn't have ipv4 address\".format(interface))\r\n timeout.sleep()\r\n continue\r\n\r\n log.info(\"Interface {} IP is {}, expected value is {}\"\r\n .format(interface, ipv4.ip, ip))\r\n if str(ipv4.ip) != ip:\r\n result = False\r\n\r\n if subnet:\r\n log.info(\"Interface {} IP netmask is {}, expected value is {}\"\r\n .format(interface, ipv4.netmask, subnet))\r\n if str(ipv4.netmask) != subnet:\r\n result = False\r\n\r\n if result:\r\n return True\r\n\r\n timeout.sleep()\r\n\r\n return False\r\n", "id": "10157238", "language": "Python", "matching_score": 1.0839216709136963, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxr/interface/verify.py" }, { "content": "'''Common verify functions for running-config'''\r\n# Python\r\nimport logging\r\n\r\n# Genie\r\nfrom genie.utils.timeout import Timeout\r\n\r\n# running-config get\r\nfrom genie.libs.sdk.apis.iosxr.running_config.get import \\\r\n get_running_config_hostname\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef verify_hostname(device, expected_hostname, max_time=60, check_interval=20):\r\n ''' Verify hostname is matched with expected name\r\n\r\n Args:\r\n device (`obj`): Device object\r\n expected_hostname (`str`): Expected hostname\r\n max_time (`int`): Max time\r\n check_interval (`int`): Check interval\r\n Returns:\r\n result (`bool`): Verified result\r\n '''\r\n timeout = Timeout(max_time, check_interval)\r\n\r\n while timeout.iterate():\r\n try:\r\n name = get_running_config_hostname(device)\r\n except Exception as e:\r\n log.error(e)\r\n timeout.sleep()\r\n continue\r\n\r\n log.info(\"Device hostname is {}, expected value is {}\"\r\n .format(name, expected_hostname))\r\n if name == expected_hostname:\r\n return True\r\n \r\n timeout.sleep()\r\n \r\n return False\r\n", "id": "10316200", "language": "Python", "matching_score": 1.2612769603729248, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxr/running_config/verify.py" }, { "content": "'''Common verify functions for platform'''\r\n# Python\r\nimport logging\r\n\r\n# Genie\r\nfrom genie.utils.timeout import Timeout\r\n\r\n# Platform get\r\nfrom genie.libs.sdk.apis.iosxr.platform.get import get_module_info\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef verify_module_serial_num(device, module, expected_serial_num, \r\n max_time=60, check_interval=20):\r\n ''' Verify module serial number is matched with expected number\r\n\r\n Args:\r\n device (`obj`): Device object\r\n module (`str`): Module name\r\n expected_serial_num (`str`): Expected serial number\r\n max_time (`int`): Max time\r\n check_interval (`int`): Check interval\r\n Returns:\r\n result (`bool`): Verified result\r\n '''\r\n timeout = Timeout(max_time, check_interval)\r\n\r\n while timeout.iterate():\r\n try:\r\n sn = get_module_info(device, module, key='sn')\r\n except Exception as e:\r\n log.error(e)\r\n timeout.sleep()\r\n continue\r\n\r\n log.info(\"Module {} serial number is {}, expected value is {}\"\r\n .format(module, sn, expected_serial_num))\r\n if sn == expected_serial_num:\r\n return True\r\n \r\n timeout.sleep()\r\n \r\n return False\r\n", "id": "9147085", "language": "Python", "matching_score": 2.3724865913391113, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxr/platform/verify.py" }, { "content": "'''Common get info functions for platform'''\r\n# Python\r\nimport re\r\nimport logging\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef get_module_info(device, module, key='sn'):\r\n ''' Get a module's infomation\r\n\r\n Args:\r\n device (`obj`): Device object\r\n module (`str`): Module name\r\n key (`str`): Key name\r\n Returns:\r\n field (`str`): Field value\r\n '''\r\n log.info(\"Getting module '{}' key '{}' from {}\".format(\r\n module, key, device.name))\r\n try:\r\n out = device.parse('show inventory')\r\n except Exception as e:\r\n log.error(\"Failed to parse 'show inventory' on {}:\\n{}\"\r\n .format(device.name, e))\r\n raise Exception from e\r\n\r\n if module in out['module_name']:\r\n if key in out['module_name'][module]:\r\n return out['module_name'][module][key]\r\n else:\r\n raise Exception(\"module '{}' doesn't have a key named '{}'\"\r\n .format(module, key))\r\n else:\r\n raise Exception(\"Can not find a module name '{}' on device {}\"\r\n .format(module, device.name))\r\n", "id": "1154635", "language": "Python", "matching_score": 0.7006308436393738, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxr/platform/get.py" }, { "content": "# Python\r\nimport logging\r\nimport re\r\n\r\n# PyATS\r\nfrom ats.utils.objects import R, find\r\n\r\n# Genie\r\nfrom genie.utils.diff import Diff\r\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\r\nfrom genie.utils.timeout import Timeout\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef get_platform_standby_rp(device, max_time=1200, interval=120):\r\n \"\"\" Get standby router slot on device\r\n Args:\r\n device ('obj'): Device object\r\n max_time ('int'): Max time in seconds retrieving router information\r\n interval ('int'): Interval in seconds retrieving router information\r\n Raise:\r\n None\r\n Return:\r\n Integer: Number of RP\r\n \"\"\"\r\n\r\n log.info(\"Getting standby slot\")\r\n\r\n rs = R(\r\n [\"slot\", \"(?P<val1>.*)\", \"rp\", \"(?P<val2>.*)\", \"state\", \"ok, standby\"]\r\n )\r\n\r\n timeout = Timeout(max_time=max_time, interval=interval)\r\n while timeout.iterate():\r\n try:\r\n output = device.parse(\"show platform\")\r\n except SchemaEmptyParserError:\r\n timeout.sleep()\r\n continue\r\n\r\n ret = find([output], rs, filter_=False, all_keys=True)\r\n if ret:\r\n standby_rp = ret[0][1][1]\r\n srp = re.search(\"(?P<srp>(\\d))\", standby_rp).groupdict()[\"srp\"]\r\n if srp:\r\n log.info(\r\n \"Standby RP on '{dev}' is: '{standby_rp}'\".format(\r\n dev=device.name, standby_rp=standby_rp\r\n )\r\n )\r\n return srp\r\n\r\n timeout.sleep()\r\n\r\n return None\r\n\r\n\r\ndef get_diffs_platform(platform_before, platform_after):\r\n \"\"\" Check differences between two parsed outputs from 'show platform'\r\n\r\n Args:\r\n platform_before ('str'): Parsed output from 'show platform'\r\n platform_after ('str'): Parsed output from 'show platform'\r\n Return:\r\n True\r\n False\r\n Raises:\r\n None\r\n \"\"\"\r\n\r\n dd = Diff(platform_before, platform_after, exclude=[\"insert_time\"])\r\n dd.findDiff()\r\n\r\n for slot in platform_after[\"slot\"]:\r\n for rp_lc in platform_after[\"slot\"][slot]:\r\n for type_ in platform_after[\"slot\"][slot][rp_lc]:\r\n state_after = platform_after[\"slot\"][slot][rp_lc][type_][\r\n \"state\"\r\n ]\r\n\r\n state_before = (\r\n platform_before[\"slot\"]\r\n .get(slot, {})\r\n .get(rp_lc, {})\r\n .get(type_)\r\n .get(\"state\", False)\r\n )\r\n\r\n if not state_before:\r\n log.info(\r\n \"Found differences between outputs:\\n{out}\".format(\r\n out=dd\r\n )\r\n )\r\n return False\r\n\r\n for subslot in platform_before[\"slot\"][slot][rp_lc].get(\r\n \"subslot\", []\r\n ):\r\n\r\n subslot_state = (\r\n platform_after[\"slot\"][slot][rp_lc]\r\n .get(subslot, {})\r\n .get(\"state\", False)\r\n )\r\n\r\n if not subslot_state:\r\n log.info(\r\n \"Found differences between outputs:\\n{out}\".format(\r\n out=dd\r\n )\r\n )\r\n\r\n if \"ok\" in state_after and \"ok\" in state_before:\r\n continue\r\n else:\r\n log.info(\r\n \"Found differences between outputs:\\n{out}\".format(\r\n out=dd\r\n )\r\n )\r\n return False\r\n return True\r\n", "id": "296952", "language": "Python", "matching_score": 4.283339023590088, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/platform/get.py" }, { "content": "import logging\r\n\r\n# Genie\r\nfrom genie.utils.timeout import Timeout\r\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\r\n\r\n# PyATS\r\nfrom ats.utils.objects import R, find\r\n\r\n# PLATFORM\r\nfrom genie.libs.sdk.apis.iosxe.platform.get import get_diffs_platform\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef is_platform_slot_in_state(\r\n device, slot, state=\"ok, active\", max_time=1200, interval=120\r\n):\r\n \"\"\" Verify if slot is in state\r\n\r\n Args:\r\n device ('obj'): Device object\r\n slot ('str'): Slot number\r\n state ('str'): State being checked\r\n max_time ('int'): Max time checking\r\n interval ('int'): Interval checking\r\n Return:\r\n True\r\n False\r\n Raises:\r\n None\r\n \"\"\"\r\n log.info(\"Verifying state of slot {slot}\".format(slot=slot))\r\n timeout = Timeout(max_time=max_time, interval=interval)\r\n\r\n rs = R([\"slot\", slot, \"rp\", \"(?P<val2>.*)\", \"state\", state])\r\n\r\n while timeout.iterate():\r\n try:\r\n output = device.parse(\"show platform\")\r\n except SchemaEmptyParserError:\r\n timeout.sleep()\r\n continue\r\n\r\n ret = find([output], rs, filter_=False, all_keys=True)\r\n if ret:\r\n log.info(\r\n \"Slot {slot} reached state '{state}'\".format(\r\n slot=slot, state=state\r\n )\r\n )\r\n return True\r\n\r\n timeout.sleep()\r\n\r\n return False\r\n\r\n\r\ndef verify_changes_platform(\r\n device, platform_before, platform_after, max_time=1200, interval=120\r\n):\r\n \"\"\" Verify if there are changes between outputs from 'show platform'\r\n Args:\r\n device ('obj'): Device object\r\n platform_before ('str'): Parsed output from 'show platform'\r\n platform_after ('str'): Parsed output from 'show platform'\r\n max_time ('int'): Max time in seconds retrying\r\n interval ('int'): Interval of each retry\r\n Return:\r\n True\r\n False\r\n Raises:\r\n None\r\n \"\"\"\r\n\r\n timeout = Timeout(max_time=max_time, interval=interval)\r\n while timeout.iterate():\r\n if get_diffs_platform(\r\n platform_before=platform_before, platform_after=platform_after\r\n ):\r\n return True\r\n else:\r\n try:\r\n platform_after = device.parse(\"show platform\")\r\n except SchemaEmptyParserError:\r\n pass\r\n\r\n timeout.sleep()\r\n\r\n return False\r\n", "id": "3289728", "language": "Python", "matching_score": 2.117271900177002, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/platform/verify.py" }, { "content": "\"\"\"Common verify functions for mpls\"\"\"\n\n# Python\nimport logging\nfrom netaddr import IPNetwork\n\n# pyats\nfrom pyats.utils.objects import find, R\n\n# Genie\nfrom genie.utils.timeout import Timeout\nfrom genie.libs.sdk.libs.utils.normalize import GroupKeys\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\n# Mpls\nfrom genie.libs.sdk.apis.iosxe.mpls.get import get_mpls_ldp_peer_state\n\nlog = logging.getLogger(__name__)\n\n\ndef verify_mpls_forwarding_table_outgoing_label(\n device, ip, expected_label=\"\", same_as_local=False,\n max_time=30, check_interval=10):\n \"\"\" Verify local and remote binding labels for ipv4\n\n Args:\n device (`obj`): Device object\n ip (`str`): IP address\n expected_label (`str`): Expected label\n same_as_local (`bool`):\n True if verify outgoing labels with local label\n False if verify outgoing labels with expected label\n max_time (`int`): Max time, default: 30\n check_interval (`int`): Check interval, default: 10\n Returns:\n result (`bool`): Verified result\n \"\"\"\n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n result = True\n\n try:\n out = device.parse('show mpls forwarding-table {}'.format(ip))\n except SchemaEmptyParserError:\n log.info(\"Device output is empty.\")\n result = False\n timeout.sleep()\n continue\n\n reqs = R(['vrf', '(.*)',\n 'local_label', '(?P<local_label>.*)',\n 'outgoing_label_or_vc', '(?P<outgoing_label>.*)',\n 'prefix_or_tunnel_id', '(?P<prefix>.*)',\n 'outgoing_interface', '(?P<interface>.*)',\n 'next_hop', '(?P<next_hop>.*)'])\n found = find([out], reqs, filter_=False, all_keys=True)\n\n if found:\n keys = GroupKeys.group_keys(reqs=reqs.args, ret_num={},\n source=found, all_keys=True)\n for route in keys:\n if same_as_local:\n log.info(\"Interface {route[interface]} has local label \"\n \"'{route[local_label]}' and outgoing label \"\n \"'{route[outgoing_label]}'\".format(route=route))\n if str(route['outgoing_label']) != str(route['local_label']):\n result = False\n else:\n log.info(\"Interface {route[interface]} outgoing label is \"\n \"'{route[outgoing_label]}', exepected to have label \"\n \"'{expected}'\".format(route=route, expected=expected_label))\n if str(route['outgoing_label']) != str(expected_label):\n result = False\n else:\n log.error(\"Could not find any mpls route for {}\".format(ip))\n result = False\n\n if result is True:\n return result\n\n timeout.sleep()\n\n return result\n\n\ndef is_interface_igp_sync_mpls_enabled(\n interface, device, vrf=\"\", parsed_output=\"\"\n):\n \"\"\" Verifies if interface has LDP IGP sync enabled \n from command 'show mpls ldp igp sync'\n \n Args:\n parsed_output ('dict') : Output from parser\n interface ('str') : Interface being checked\n vrf ('str') : vrf name\n device ('str') : Device to be executed commands\n Raises:\n None\n\n Returns\n True\n False\n\n \"\"\"\n\n if not parsed_output:\n try:\n parsed_output = device.parse(\n \"show mpls ldp igp sync interface {intf}\".format(\n intf=interface\n )\n )\n except SchemaEmptyParserError:\n raise SchemaEmptyParserError(\n \"Fail to parse 'show mpls ldp igp sync \"\n \"interface {intf}' command\".format(intf=interface)\n )\n\n vrf = vrf if vrf else \"default\"\n\n try:\n igp_synchronization_enabled = (\n parsed_output[\"vrf\"]\n .get(vrf, {})\n .get(\"interface\", {})\n .get(interface, {})\n .get(\"ldp\", {})\n .get(\"igp_synchronization_enabled\", False)\n )\n\n sync_achieved = (\n parsed_output[\"vrf\"]\n .get(vrf, {})\n .get(\"interface\", {})\n .get(interface, {})\n .get(\"sync\", {})\n .get(\"status\", {})\n .get(\"sync_achieved\", False)\n )\n except KeyError:\n return False\n\n return igp_synchronization_enabled and sync_achieved\n\n\ndef verify_mpls_binding_label(device, ipv4, vrf=None):\n \"\"\" Verify local and remote binding labels for ipv4\n\n Args:\n device (`obj`): Device object\n vrf (`str`): Vrf name\n ipv4 (`str`): ipv4 with prefix\n Returns:\n verified result\n Raises:\n None\n \"\"\"\n result = []\n try:\n out = device.parse(\"show mpls ldp bindings\")\n except SchemaEmptyParserError:\n return result\n vrf = vrf if vrf else \"default\"\n lib_dict = None\n try:\n lib_dict = out[\"vrf\"][vrf][\"lib_entry\"]\n except KeyError as ke:\n log.error(\"Could not find key, error: {}\".format(str(ke)))\n return False\n\n if lib_dict and ipv4 in lib_dict:\n local = lib_dict[ipv4].get(\"label_binding\").get(\"label\")\n remote = lib_dict[ipv4].get(\"remote_binding\").get(\"label\")\n if local and remote:\n result.append(\n \"Local label for {ipv4} is {local}\".format(\n ipv4=ipv4, local=list(local)\n )\n )\n result.append(\n \"Remote label for {ipv4} is {remote}\".format(\n ipv4=ipv4, remote=list(remote)\n )\n )\n else:\n return result\n\n return \"\\n\".join(result)\n\n\ndef is_mpls_ldp_neighbor_in_state(\n device, interface, state, max_time=60, check_interval=10\n):\n \"\"\" Checks if ldp neighbor is in state\n\n Args:\n device ('obj'): device to use\n interface ('str'): interface to search under\n state ('str'): state\n\n return:\n True\n False\n Raises:\n None\n \"\"\"\n log.info(\"Checking if ldp neighbor is in state: {}\".format(state))\n timeout = Timeout(max_time, check_interval)\n\n while timeout.iterate():\n current_state = get_mpls_ldp_peer_state(device, interface)\n if current_state and state in current_state:\n return True\n\n timeout.sleep()\n\n return False\n\n\ndef verify_mpls_forwarding_table_has_prefix_in_subnet_range(device, subnet, max_time=30, check_interval=10):\n \"\"\" Verifies local label for entries with a prefix inside subnet\n\n Args:\n device ('obj'): Device to use\n subnet ('str'): Subnet to verify inside\n max_time ('int'): Max time to check\n check_interval ('int'): How often to check\n\n returns:\n True/False\n\n raises:\n N/A\n \"\"\"\n log.info('Checking atleast one entry has a prefix in subnet {subnet} range'\n .format(subnet=subnet))\n\n try:\n subnet = IPNetwork(subnet)\n except Exception:\n log.info('Bad subnet provided')\n return False\n\n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n try:\n out = device.parse('show mpls forwarding-table')\n except SchemaEmptyParserError:\n log.info('Parser output is empty')\n timeout.sleep()\n continue\n\n reqs = R(['vrf',\n '(.*)',\n 'local_label',\n '(?P<local_label>.*)',\n 'outgoing_label_or_vc',\n '(.*)',\n 'prefix_or_tunnel_id',\n '(?P<prefix>.*)',\n 'outgoing_interface',\n '(.*)', ])\n\n found = find([out], reqs, filter_=False, all_keys=True)\n if found:\n keys = GroupKeys.group_keys(reqs=reqs.args, ret_num={}, source=found, all_keys=True)\n\n for key in keys:\n try:\n prefix = IPNetwork(key['prefix'])\n except Exception:\n continue\n\n if prefix in subnet:\n return True\n\n return False\n\n\ndef verify_mpls_forwarding_table_local_label_for_subnet(device, subnet, min_range, max_range, in_range=True, max_time=60, check_interval=15):\n \"\"\" Verifies local label for entries with a prefix inside subnet\n\n Args:\n device ('obj'): Device to use\n subnet ('str'): Subnet to verify inside\n min_range ('int'): Minimum label\n max_range ('int'): Maximum label\n in_range ('bool'): True to verify between min_range/max_range, False to verify outside\n max_time ('int'): Max time to check\n check_interval ('int'): How often to check\n\n returns:\n True/False\n\n raises:\n N/A\n \"\"\"\n\n log.info('Checking all entries where the prefix falls inside subnet {subnet} range'\n .format(subnet=subnet))\n\n try:\n subnet = IPNetwork(subnet)\n except Exception:\n log.info('Bad subnet provided')\n return False\n\n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n result = True\n\n try:\n out = device.parse('show mpls forwarding-table')\n except SchemaEmptyParserError:\n log.info('Parser output is empty')\n timeout.sleep()\n continue\n\n reqs = R(['vrf',\n '(.*)',\n 'local_label',\n '(?P<local_label>.*)',\n 'outgoing_label_or_vc',\n '(.*)',\n 'prefix_or_tunnel_id',\n '(?P<prefix>.*)',\n 'outgoing_interface',\n '(.*)'])\n\n found = find([out], reqs, filter_=False, all_keys=True)\n if found:\n keys = GroupKeys.group_keys(reqs=reqs.args, ret_num={}, source=found, all_keys=True)\n\n for key in keys:\n try:\n prefix = IPNetwork(key['prefix'])\n except Exception:\n continue\n\n if prefix in subnet:\n if in_range and min_range <= key['local_label'] <= max_range:\n continue\n elif in_range and not min_range <= key['local_label'] <= max_range:\n log.info('Entry with prefix {prefix} has label {label} which is outside '\n 'given range {range}. Expected to be inside.'\n .format(prefix=prefix,\n label=key['local_label'],\n range='{}-{}'.format(min_range, max_range)))\n result = False\n elif not in_range and min_range <= key['local_label'] <= max_range:\n log.info('Entry with prefix {prefix] has label {label} which is inside '\n 'given range {range}. Expected to be outside.'\n .format(prefix=prefix,\n label=key['local_label'],\n range='{}-{}'.format(min_range, max_range)))\n result = False\n elif not in_range and not min_range <= key['local_label'] <= max_range:\n continue\n\n if result:\n return True\n\n timeout.sleep()\n\n return False\n", "id": "11407980", "language": "Python", "matching_score": 6.3398895263671875, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/mpls/verify.py" }, { "content": "\"\"\"Common verify functions for MPLS\"\"\"\n\n# Python\nimport logging\n\n# pyats\nfrom pyats.utils.objects import find, R\n\n# Genie\nfrom genie.utils.timeout import Timeout\nfrom genie.libs.sdk.libs.utils.normalize import GroupKeys\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\nlog = logging.getLogger(__name__)\n\n\ndef verify_segment_routing_gb_range(\n device, expected_label_min, expected_label_max, max_time=30, check_interval=10):\n \"\"\" Verify segment routing global block range\n\n Args:\n device (`obj`): Device object\n expected_label_min (`int`): Segment routing global block start\n expected_label_max (`int`): Segment routing global block end\n max_time (`int`): Max time, default: 30\n check_interval (`int`): Check interval, default: 10\n Returns:\n result (`bool`): Verified result\n \"\"\"\n\n try:\n out = device.parse('show mpls label table detail')\n except SchemaEmptyParserError:\n log.info(\"Device output is empty.\")\n return False\n\n reqs = R(['table', '(.*)',\n 'label', '(?P<label>.*)',\n 'label_type', '(?P<label_type>[Lbl\\-blk.*])',\n '(?P<start_label>.*)'])\n\n found = find([out], reqs, filter_=False, all_keys=True)\n\n if found:\n for item in found:\n if 'Lbl-blk SRGB' in item[1]:\n # Those are the reachability checks\n if expected_label_min == item[0]['start_label'] and \\\n item[0]['start_label'] + item[0]['size'] > expected_label_max:\n return True\n return False\n\n\ndef is_interface_igp_sync_mpls_enabled(\n interface, device, vrf=\"\", parsed_output=\"\"\n):\n \"\"\" Verifies if interface has LDP IGP sync enabled \n from command 'show mpls ldp igp sync'\n \n Args:\n parsed_output ('dict') : Output from parser\n interface ('str') : Interface being checked\n vrf ('str') : vrf name\n device ('str') : Device to be executed commands\n Raises:\n None\n\n Returns\n True\n False\n\n \"\"\"\n\n if not parsed_output:\n try:\n parsed_output = device.parse(\n \"show mpls ldp igp sync interface {intf}\".format(\n intf=interface\n )\n )\n except SchemaEmptyParserError:\n raise SchemaEmptyParserError(\n \"Fail to parse 'show mpls ldp igp sync \"\n \"interface {intf}' command\".format(intf=interface)\n )\n\n vrf = vrf if vrf else \"default\"\n\n try:\n igp_synchronization_enabled = (\n parsed_output[\"vrf\"]\n .get(vrf, {})\n .get(\"interface\", {})\n .get(interface, {})\n .get(\"ldp\", {})\n .get(\"igp_synchronization_enabled\", False)\n )\n\n sync_achieved = (\n parsed_output[\"vrf\"]\n .get(vrf, {})\n .get(\"interface\", {})\n .get(interface, {})\n .get(\"sync\", {})\n .get(\"status\", {})\n .get(\"sync_achieved\", False)\n )\n except KeyError:\n return False\n\n return igp_synchronization_enabled and sync_achieved\n\n\ndef verify_mpls_binding_label(device, prefix, vrf=None):\n \"\"\" Verify local and remote binding labels for prefix\n\n Args:\n device (`obj`): Device object\n vrf (`str`): Vrf name\n prefix (`str`): ipv4/ipv6\n Returns:\n verified result\n Raises:\n None\n \"\"\"\n result = []\n try:\n out = device.parse(\"show mpls ldp bindings\")\n except SchemaEmptyParserError:\n return result\n vrf = vrf if vrf else \"default\"\n lib_dict = None\n try:\n lib_dict = out[\"vrf\"][vrf][\"lib_entry\"]\n except KeyError as ke:\n log.error(\"Could not find key, error: {}\".format(str(ke)))\n return False\n\n if lib_dict and prefix in lib_dict:\n local = lib_dict[prefix].get(\"label_binding\").get(\"label\")\n remote = lib_dict[prefix].get(\"remote_binding\").get(\"label\")\n if local and remote:\n result.append(\n \"Local label for {prefix} is {local}\".format(\n prefix=prefix, local=list(local)\n )\n )\n result.append(\n \"Remote label for {prefix} is {remote}\".format(\n prefix=prefix, remote=list(remote)\n )\n )\n else:\n return result\n\n return \"\\n\".join(result)\n\n\ndef is_mpls_ldp_neighbor_in_state(\n device, interface, state, max_time=60, check_interval=10\n):\n \"\"\" Checks if ldp neighbor is in state\n\n Args:\n device ('obj'): device to use\n interface ('str'): interface to search under\n state ('str'): state\n\n return:\n True\n False\n Raises:\n None\n \"\"\"\n log.info(\"Checking if ldp neighbor is in state: {}\".format(state))\n timeout = Timeout(max_time, check_interval)\n\n while timeout.iterate():\n current_state = device.apis.get_mpls_ldp_peer_state(interface)\n if current_state and state in current_state:\n return True\n\n timeout.sleep()\n\n return False\n", "id": "5739681", "language": "Python", "matching_score": 3.3750295639038086, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxr/mpls/verify.py" }, { "content": "\"\"\"Common verification functions for Segment-Routing\"\"\"\n\n# Python\nimport re\nimport logging\n\n# pyATS\nfrom genie.utils.timeout import Timeout\nfrom ats.utils.objects import find, R\n\n# Genie\nfrom genie.utils.timeout import Timeout\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\nfrom genie.libs.sdk.libs.utils.normalize import GroupKeys\n\nlog = logging.getLogger(__name__) \n\n\ndef verify_segment_routing_policy_attributes(device, policy, expected_bsid=None, \n expected_mode='dynamic', expected_state='programmed', policy_dict=None, \n max_time=30, check_interval=10):\n \"\"\" Verify segment-routing policy attributes is as expected\n using 'show segment-routing traffic-eng policy name {policy}'\n \n Args:\n device (`obj`): Device object\n policy (`str`): Policy name\n expected_bsid (`int`): Expected Binding SID\n expected_mode (`str`): Expected allocation mode\n expected_state (`str`): Expected binding state\n policy_dict (`dict`): Policy dict from parser output \n IOSXE Parser - ShowSegmentRoutingTrafficEngPolicy\n cmd - show segment-routing traffic-eng policy all\n max_time (`int`): Max time, default: 30\n check_interval (`int`): Check interval, default: 10\n Returns\n result (`bool`): Verified result\n sid (`int`): Binding sid\n \"\"\"\n cmd = 'show segment-routing traffic-eng policy name {policy}'.format(policy=policy)\n timeout = Timeout(max_time, check_interval)\n\n while timeout.iterate():\n\n if policy_dict is None:\n try:\n out = device.parse(cmd)\n except Exception as e:\n log.error(\"Failed to parse '{cmd}': {e}\".format(cmd=cmd, e=e))\n timeout.sleep()\n continue\n else:\n out = policy_dict\n\n bsid_dict = out.get(policy, {}).get('attributes', {}).get('binding_sid', {})\n\n if bsid_dict:\n for key, value in bsid_dict.items():\n sid = key\n mode = value.get('allocation_mode', '')\n state = value.get('state', '')\n else:\n log.error(\"No binding SID was found in policy '{policy}'\"\n .format(policy=policy))\n timeout.sleep()\n continue\n\n check_sid = True\n if expected_bsid is None:\n log.info(\"Policy {policy} binding SID is {sid}, expected it to \"\n \"be an integer\".format(policy=policy, sid=sid))\n check_sid = isinstance(sid, int)\n else:\n log.info(\"Policy {policy} binding SID is {sid}, expected value \"\n \"is {expected_bsid}\".format(policy=policy,\n sid=sid, expected_bsid=expected_bsid))\n check_sid = str(sid) == str(expected_bsid)\n\n log.info(\"Policy {policy} allocation mode is {mode}, expected value \"\n \"is {expected_mode}\".format(policy=policy,\n mode=mode, expected_mode=expected_mode))\n log.info(\"Policy {policy} binding state is {state}, expected value \"\n \"is {expected_state}\".format(policy=policy,\n state=state, expected_state=expected_state))\n\n if (mode.lower() == expected_mode.lower() and \n state.lower() == expected_state.lower() and check_sid):\n return True, sid\n\n timeout.sleep()\n\n return False, None\n\n\ndef verify_segment_routing_policy_state(device, policy, expected_admin='up', \n expected_oper='up', max_time=30, check_interval=10):\n \"\"\" Verify segment-routing policy state is as expected (Admin/Operational)\n using 'show segment-routing traffic-eng policy name {policy}'\n \n Args:\n device (`obj`): Device object\n policy (`str`): Policy name\n expected_admin (`str`): Expected admin state\n expected_oper (`str`): Expected operational state\n max_time (`int`): Max time, default: 30\n check_interval (`int`): Check interval, default: 10\n Returns\n result (`bool`): Verified result\n \"\"\"\n cmd = 'show segment-routing traffic-eng policy name {policy}'.format(policy=policy)\n timeout = Timeout(max_time, check_interval)\n\n while timeout.iterate():\n try:\n out = device.parse(cmd)\n except Exception as e:\n log.error(\"Failed to parse '{cmd}': {e}\".format(cmd=cmd, e=e))\n timeout.sleep()\n continue\n\n admin = out.get(policy, {}).get('status', {}).get('admin', '')\n oper = out.get(policy, {}).get('status', {}).\\\n get('operational', {}).get('state', '')\n\n log.info(\"Policy {policy} Admin state is {admin}, expected state \"\n \"is {expected_admin}\".format(policy=policy, admin=admin,\n expected_admin=expected_admin))\n log.info(\"Policy {policy} Operational state is {oper}, expected \"\n \"state is {expected_oper}\".format(policy=policy,\n oper=oper, expected_oper=expected_oper))\n\n if (admin.lower() == expected_admin.lower() and \n oper.lower() == expected_oper.lower()):\n return True\n\n timeout.sleep()\n\n return False\n\n\ndef verify_segment_routing_policy_hops(device, policy, segment_list, \n max_time=30, check_interval=10):\n \"\"\" Verify segment-routing policy hops with order and extract labels\n using 'show segment-routing traffic-eng policy name {policy}'\n \n Args:\n device (`obj`): Device object\n policy (`str`): Policy name\n segment_list (`list`): Segment list to verify\n max_time (`int`): Max time, default: 30\n check_interval (`int`): Check interval, default: 10\n Returns\n result (`bool`): Verified result\n labels (`list`): Hops labels\n \"\"\"\n cmd = 'show segment-routing traffic-eng policy name {policy}'.format(policy=policy)\n timeout = Timeout(max_time, check_interval)\n\n while timeout.iterate():\n try:\n out = device.parse(cmd)\n except Exception as e:\n log.error(\"Failed to parse '{cmd}': {e}\".format(cmd=cmd, e=e))\n timeout.sleep()\n continue\n\n # Get label or ip value to verify (with order)\n slist = []\n p = re.compile(r'[\\d\\.]+$')\n for line in segment_list:\n value = p.search(line).group()\n if value:\n slist.append(value)\n\n reqs = R([policy,'candidate_paths',\n 'preference','(?P<preference>.*)',\n 'path_type','explicit',\n '(?P<category>.*)','(?P<name>.*)',\n 'hops','(?P<hops>.*)'])\n found = find([out], reqs, filter_=False, all_keys=True)\n\n labels = []\n result = True\n if found:\n item = found[0][0]\n if len(item) == len(slist):\n for index, dct in sorted(item.items()):\n s_value = slist[index-1]\n sid = dct.get('sid', '')\n local_address = dct.get('local_address', '')\n remote_address = dct.get('remote_address', '')\n log.info(\"Expect '{val}' is present in label '{sid}', \"\n \"or local_address '{local_address}', or \"\n \"remote_address '{remote_address}'\".format(\n val=s_value, sid=sid, \n local_address=local_address, \n remote_address=remote_address))\n if (s_value == str(sid) or \n s_value == local_address or \n s_value == remote_address):\n labels.append(sid)\n else:\n result = False\n else:\n log.error(\"The length of segment list does not match:\\n\"\n \"Configured value: {conf} Operational value: {oper}\"\n .format(conf=len(item), oper=len(slist)))\n result = False\n else:\n log.error(\"Failed to find any hop in policy '{policy}'\".format(policy=policy))\n result = False\n\n if result:\n return result, labels\n\n timeout.sleep()\n\n return False, None\n\n\ndef verify_segment_routing_dynamic_metric_type(device, policy, expected_type='TE', \n max_time=30, check_interval=10):\n \"\"\" Verify segment-routing metric type under dynamic path with active state\n using 'show segment-routing traffic-eng policy name {policy}'\n \n Args:\n device (`obj`): Device object\n policy (`str`): Policy name\n expected_type (`str`): Expected metric type\n max_time (`int`): Max time, default: 30\n check_interval (`int`): Check interval, default: 10\n Returns\n result (`bool`): Verified result\n \"\"\"\n cmd = 'show segment-routing traffic-eng policy name {policy}'.format(policy=policy)\n timeout = Timeout(max_time, check_interval)\n\n while timeout.iterate():\n try:\n out = device.parse(cmd)\n except Exception as e:\n log.error(\"Failed to parse '{cmd}': {e}\".format(cmd=cmd, e=e))\n timeout.sleep()\n continue\n\n reqs = R([policy,'candidate_paths',\n 'preference','(?P<preference>.*)',\n 'path_type','dynamic','(?P<path>.*)'])\n found = find([out], reqs, filter_=False, all_keys=True)\n\n for item in found:\n if item[0]['status'].lower() == 'active':\n metric_type = item[0]['metric_type']\n break\n else:\n log.error(\"Failed to find a dynamic path in active state\")\n timeout.sleep()\n continue\n\n log.info(\"Policy {policy} active dynamic path's metric_type is \"\n \"{metric_type}, expected type is {expected_type}\".format(\n policy=policy, metric_type=metric_type, \n expected_type=expected_type))\n\n if (metric_type.lower() == expected_type.lower()):\n return True\n\n timeout.sleep()\n \n return False\n\n\ndef verify_sid_in_segment_routing(device, address_family=\"ipv4\", local=False):\n \"\"\" Verifies if SID is found in segment-routing\n from command 'show segment-routing mpls connected-prefix-sid-map ipv4' or\n from command 'show segment-routing mpls connected-prefix-sid-map local ipv4'\n \n Args:\n device (`obj`): Device to be executed command\n address_family (`str`): Address family name\n local (`bool`): Flag to check command with local\n\n Raises:\n None\n Returns\n True/False\n\n \"\"\"\n\n try:\n if local:\n out = device.parse(\n \"show segment-routing mpls connected-prefix-sid-map local {}\".format(\n address_family\n ),\n local=True,\n )\n else:\n out = device.parse(\n \"show segment-routing mpls connected-prefix-sid-map {}\".format(\n address_family\n )\n )\n except (SchemaEmptyParserError):\n return False\n sid_count = 0\n try:\n sid_count = len(\n out[\"segment_routing\"][\"bindings\"][\"connected_prefix_sid_map\"][\n address_family\n ][\n \"ipv4_prefix_sid\"\n if address_family is \"ipv4\"\n else \"ipv6_prefix_sid\"\n ].keys()\n )\n except KeyError:\n pass\n return sid_count != 0\n\n\ndef verify_status_of_segment_routing(device, state=\"ENABLED\"):\n \"\"\" Verifies if state matches expected_state state in segment-routing\n from command 'show segment-routing mpls state'\n\n Args:\n device (`obj`): Device to be executed command\n state (`str`): Expected state\n Raises:\n None\n Returns\n True/False\n\n \"\"\"\n\n state_found = None\n try:\n out = device.parse(\"show segment-routing mpls state\")\n except (SchemaEmptyParserError):\n return False\n try:\n state_found = out[\"sr_mpls_state\"]\n except KeyError:\n return False\n return state_found.upper() == state.upper()\n\n\ndef verify_ip_and_sid_in_segment_routing(device, address_sid_dict, algorithm, \n address_family='ipv4', local=False, max_time=90, check_interval=10, \n expected_result=True):\n \"\"\" Verifies if IP address and SID is present in Segment Routing\n from command 'show segment-routing mpls connected-prefix-sid-map local <address_family>' or\n from command 'show segment-routing mpls connected-prefix-sid-map <address_family>'\n Args:\n device (`obj`): Device to be executed command\n address_sid_dict (`dict`): Dictionary containing ip address and SID as key and value pair\n ex.)\n {\n '10.4.1.1/32': 1,\n '10.4.1.2/32': 2,\n } \n algorithm (`str`): Algorithm to check\n ex.) \n algorithm = 'ALGO_0'\n address_family (`str`): Address family\n local (`bool`): Flag to check command with local\n max_time ('int'): maximum time to wait\n check_interval ('int'): how often to check\n expected_result ('bool'): Expected result\n set expected_result = False if method should fail\n set expected_result = True if method should pass (default value)\n \n Raises:\n None\n Returns\n True/False\n\n \"\"\"\n\n prefix_mapping = {\n 'ipv4': 'ipv4_prefix_sid',\n 'ipv6': 'ipv6_prefix_sid'\n }\n\n prefix_mapping_local = {\n 'ipv4': 'ipv4_prefix_sid_local',\n 'ipv6': 'ipv6_prefix_sid_local'\n }\n \n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n out = None\n try:\n if local:\n out = device.parse(\n \"show segment-routing mpls connected-prefix-sid-map local {}\".format(address_family)\n )\n else:\n out = device.parse(\n \"show segment-routing mpls connected-prefix-sid-map {}\".format(address_family)\n )\n except (SchemaEmptyParserError):\n pass\n \n found_local = None\n found = None \n \n for ip_address, sid in address_sid_dict.items():\n \n # find using Prefix SID local\n # It will use ipv4_prefix_sid_local or ipv6_prefix_sid_local as key for search data\n # based on address_family provided\n sid = str(sid)\n if out:\n reqs_local = R(\n ['segment_routing',\n 'bindings',\n 'local_prefix_sid',\n address_family,\n prefix_mapping_local[address_family],\n ip_address,\n 'algorithm',\n algorithm,\n 'sid',\n sid]\n )\n \n # find using just Prefix SID\n # It will use ipv4_prefix_sid or ipv6_prefix_sid as key for search data\n # based on address_family provided\n reqs = R(\n ['segment_routing',\n 'bindings',\n 'connected_prefix_sid_map',\n address_family,\n prefix_mapping[address_family],\n ip_address,\n 'algorithm',\n algorithm,\n 'sid',\n sid]\n )\n \n found_local = find([out], reqs_local, filter_=False, all_keys=True)\n found = find([out], reqs, filter_=False, all_keys=True)\n \n # Returns false if SID is not found Prefix SID or Prefix SID local\n if not expected_result and (not found_local or not found):\n return expected_result\n \n if expected_result and found_local and found:\n return expected_result\n \n timeout.sleep()\n\n return False\n\n\ndef verify_segment_routing_lb_range(\n device,\n expected_minimum=None,\n expected_maximum=None,\n max_time=30,\n check_interval=10,\n):\n \"\"\" Verifies the segment routing lb range is as expected\n\n Args:\n device ('obj'): device to use\n expected_minimum ('int'): expected label range minimum to compare against. Ignored if None\n expected_maximum ('int'): expected label range maximum to compare against. Ignored if None\n max_time ('int'): maximum time to keep checking\n check_interval ('int'): how often to check\n\n Returns:\n True/False\n\n Raises:\n None\n \"\"\"\n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n minimum, maximum = device.api.get_segment_routing_lb_range(\n device=device\n )\n if not (\n (expected_minimum and expected_minimum != minimum)\n or (expected_maximum and expected_maximum != maximum)\n ):\n return True\n\n if expected_minimum and expected_minimum != minimum:\n log.info(\n \"Actual minimum of {actual} does not equal expected minimum of {expected}\".format(\n actual=minimum, expected=expected_minimum\n )\n )\n\n if expected_maximum and expected_maximum != maximum:\n log.info(\n \"Actual maximum of {actual} does not equal expected maximum of {expected}\".format(\n actual=maximum, expected=expected_maximum\n )\n )\n\n timeout.sleep()\n\n return False\n\n\ndef verify_segment_routing_gb_range(\n device,\n expected_minimum=None,\n expected_maximum=None,\n max_time=30,\n check_interval=10,\n):\n \"\"\" Verifies the segment routing gb range is as expected\n\n Args:\n device ('obj'): device to use\n expected_minimum ('int'): expected label range minimum to compare against. Ignored if None\n expected_maximum ('int'): expected label range maximum to compare against. Ignored if None\n max_time ('int'): maximum time to keep checking\n check_interval ('int'): how often to check\n\n Returns:\n True/False\n\n Raises:\n None\n \"\"\"\n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n minimum, maximum = device.api.get_segment_routing_gb_range(\n device=device\n )\n if not (\n (expected_minimum and expected_minimum != minimum)\n or (expected_maximum and expected_maximum != maximum)\n ):\n return True\n\n if expected_minimum and expected_minimum != minimum:\n log.info(\n \"Actual minimum of {actual} does not equal expected minimum of {expected}\".format(\n actual=minimum, expected=expected_minimum\n )\n )\n\n if expected_maximum and expected_maximum != maximum:\n log.info(\n \"Actual maximum of {actual} does not equal expected maximum of {expected}\".format(\n actual=maximum, expected=expected_maximum\n )\n )\n\n timeout.sleep()\n\n return False\n\n\ndef verify_ip_and_sid_in_segment_routing_mapping_server(device, address_sid_dict, address_family, \n algorithm, mapping_server, max_time=300, check_interval=30, expected_result=True, output=None):\n \"\"\" Verifies if IP address and SID is present in Segment Routing mapping server\n from show segment-routing mpls mapping-server {address_family}'\n Args:\n device (`obj`): Device to be executed command\n address_family (`str`): Address family\n address_sid_dict (`dict`): Dictionary containing ip address and SID as key and value pair\n ex.)\n {\n '10.4.1.1/32': 1,\n '10.4.1.2/32': 2,\n } \n algorithm (`str`): Algorithm to check\n ex.) \n algorithm = 'ALGO_0' \n mapping_server (`str`): mapping server to check\n ex.)\n mapping_server = 'PREFIX_SID_EXPORT_MAP' or\n mapping_server = 'PREFIX_SID_REMOTE_EXPORT_MAP'\n max_time ('int'): maximum time to wait\n check_interval ('int'): how often to check\n expected_result ('bool'): Expected result\n set expected_result = False if method should fail\n set expected_result = True if method should pass (default value)\n \n Raises:\n None\n Returns\n True/False\n\n \"\"\"\n\n mapping_dict_export = {\n 'ipv4': 'ipv4_prefix_sid_export_map',\n 'ipv6': 'ipv6_prefix_sid_export_map',\n }\n\n mapping_dict_remote_export = {\n 'ipv4': 'ipv4_prefix_sid_remote_export_map',\n 'ipv6': 'ipv6_prefix_sid_remote_export_map',\n }\n \n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n try:\n out = None\n if output:\n out = device.parse(\n \"show segment-routing mpls mapping-server {}\".format(address_family),\n output=output\n )\n else:\n out = device.parse(\n \"show segment-routing mpls mapping-server {}\".format(address_family)\n )\n output = None\n except (SchemaEmptyParserError):\n pass\n \n found = None\n\n for ip_address, sid in address_sid_dict.items():\n \n # find using Prefix SID local\n # It will use ipv4_prefix_sid_local or ipv6_prefix_sid_local as key for search data\n # based on address_family provided\n if out:\n reqs = R(\n ['segment_routing',\n 'bindings',\n 'mapping_server',\n 'policy',\n mapping_server.lower(),\n address_family,\n 'mapping_entry',\n ip_address,\n 'algorithm',\n algorithm,\n 'sid',\n sid]\n )\n \n found = find([out], reqs, filter_=False, all_keys=True)\n \n # Returns false if SID is not found Prefix SID or Prefix SID local\n if not expected_result and not found:\n return expected_result\n if expected_result and found:\n return expected_result\n \n if not found:\n timeout.sleep()\n\n return False\n\n\ndef verify_segment_routing_traffic_eng_policies(device, admin_status=None, operational_status=None,\n metric_type=None, path_accumulated_metric=None, path_status=None,\n max_time=30, check_interval=10):\n \"\"\" Verifies all configured traffic_eng policies have specific configurations\n\n Args:\n device ('obj'): Device to use\n admin_status ('str'): Admin status to verify\n operational_status ('str'): Operational status to verify\n metric_type ('str'): Metric type to verify\n path_status ('str'): Path status to verify\n max_time ('int'): Maximum amount of time to keep checking\n check_interval ('int'): How often to check\n\n Returns:\n True/False\n\n Raises:\n N/A\n \"\"\"\n if (not admin_status and\n not operational_status and\n not metric_type and\n not path_status and\n not path_accumulated_metric):\n log.info('Must provide at-least one optional argument to verify')\n return False\n\n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n try:\n out = device.parse('show segment-routing traffic-eng policy all')\n except SchemaEmptyParserError:\n log.info('Parser output is empty')\n timeout.sleep()\n continue\n\n for policy in out:\n admin = out[policy].get('status', {}).get('admin', '')\n if admin_status and admin_status not in admin:\n log.info('Expected admin status is \"{admin_status}\" actual is \"{admin}\"'\n .format(admin_status=admin_status,\n admin=admin))\n break\n\n operational = out[policy].get('status', {}).get('operational', {}).get('state', '')\n if operational_status and operational_status not in admin:\n log.info('Expected operational status is \"{operational_status}\" actual is \"{operational}\"'\n .format(operational_status=operational_status,\n operational=operational))\n break\n\n for preference in out[policy].get('candidate_paths', {}).get('preference', {}):\n if out[policy]['candidate_paths']['preference'][preference].get('path_type'):\n\n path_type_dict = out[policy]['candidate_paths']['preference'][preference]['path_type']\n\n if 'dynamic' in path_type_dict:\n metric = path_type_dict['dynamic'].get('metric_type', '')\n status = path_type_dict['dynamic'].get('status', '')\n accumulated_metric = path_type_dict['dynamic'].get('path_accumulated_metric', '')\n elif 'explicit' in path_type_dict:\n segment = list(path_type_dict['explicit'].get('segment_list', {}))[0]\n metric = path_type_dict['explicit'].get('segment_list', {}).get(segment, {}).get('metric_type', '')\n status = path_type_dict['explicit'].get('segment_list', {}).get(segment, {}).get('status', '')\n accumulated_metric = None # Not possible from schema perspective but needed for logic\n else:\n log.info('Path type not defined in api call.')\n break\n\n if metric_type and metric_type not in metric:\n log.info('Expected metric type is \"{expected}\" actual is \"{actual}\"'\n .format(expected=metric_type,\n actual=metric))\n break\n\n if path_status and path_status not in status:\n log.info('Expected path status is \"{expected}\" actual is \"{actual}\"'\n .format(expected=path_status,\n actual=status))\n break\n\n if (path_accumulated_metric and\n type(accumulated_metric) is int and\n path_accumulated_metric != accumulated_metric):\n log.info('Expected path accumulated metric is \"{expected}\" '\n 'actual is \"{actual}\"'\n .format(expected=path_accumulated_metric,\n actual=accumulated_metric))\n break\n else:\n continue\n break\n\n else:\n return True\n timeout.sleep()\n\n return False\n", "id": "9831665", "language": "Python", "matching_score": 5.660412788391113, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/segment_routing/verify.py" }, { "content": "\"\"\" Common get functions for segment-routing \"\"\"\n\n# Python\nimport re\nimport logging\n\n# pyATS\nfrom pyats.utils.objects import find, R\n\n# Genie\nfrom genie.libs.sdk.libs.utils.normalize import GroupKeys\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\n# Running-Config\nfrom genie.libs.sdk.apis.iosxe.running_config.get import (\n get_running_config_section_dict,\n)\n\nlog = logging.getLogger(__name__)\n\n\ndef get_segment_routing_policy_active_path_hop_labels(device, policy, policy_dict=None):\n \"\"\" Find a segement-routing policy in expected state\n\n Args:\n device ('obj'): Device object\n policy ('str'): Policy name\n policy_dict ('dict'): Policy dict from parser output\n IOSXE Parser - ShowSegmentRoutingTrafficEngPolicy\n cmd - show segment-routing traffic-eng policy all\n Returns:\n labels ('list'): Hop labels\n \"\"\"\n labels = []\n cmd = 'show segment-routing traffic-eng policy name {policy}'.format(policy=policy)\n if policy_dict is None:\n try:\n out = device.parse(cmd)\n except Exception as e:\n log.error(\"Failed to parse '{cmd}': {e}\".format(cmd=cmd, e=e))\n return labels\n else:\n out = policy_dict\n\n # Check explicit path\n reqs = R([policy,'candidate_paths',\n 'preference','(?P<preference>.*)',\n 'path_type','explicit',\n '(?P<category>.*)','(?P<name>.*)',\n 'status','(?P<status>.*)'])\n explicit = find([out], reqs, filter_=False, all_keys=True)\n if explicit:\n keys = GroupKeys.group_keys(reqs=reqs.args, ret_num={},\n source=explicit, all_keys=True)\n \n for item in keys:\n if item['status'] == 'active':\n path_index = item['preference']\n\n reqs2 = R([policy,'candidate_paths',\n 'preference',path_index,\n 'path_type','explicit',\n '(?P<category>.*)','(?P<name>.*)',\n 'hops','(?P<hops>.*)'])\n hops = find([out], reqs2, filter_=False, all_keys=True)\n if hops:\n hop = hops[0][0]\n for value in hop.values():\n sid = value.get('sid', '')\n labels.append(sid)\n\n return labels\n\n # Check dynamic path if no active path in explicit path\n reqs = R([policy,'candidate_paths',\n 'preference','(?P<preference>.*)',\n 'path_type','dynamic',\n 'status','(?P<status>.*)'])\n dynamic = find([out], reqs, filter_=False, all_keys=True)\n if dynamic:\n keys = GroupKeys.group_keys(reqs=reqs.args, ret_num={},\n source=dynamic, all_keys=True)\n\n for item in keys:\n if item['status'] == 'active':\n path_index = item['preference']\n\n reqs2 = R([policy,'candidate_paths',\n 'preference',path_index,\n 'path_type','dynamic',\n 'hops','(?P<hops>.*)'])\n hops = find([out], reqs2, filter_=False, all_keys=True)\n if hops:\n hop = hops[0][0]\n for value in hop.values():\n sid = value.get('sid', '')\n labels.append(sid)\n\n return labels\n\n\ndef get_segment_routing_policy_in_state(device, expected_admin='up', expected_oper='up'):\n \"\"\" Find a segement-routing policy in expected state\n\n Args:\n device ('obj'): Device object\n expected_admin ('str'): Expected admin state\n expected_oper ('str'): Expected operational state\n Returns:\n policy ('str'): Policy name\n \"\"\"\n cmd = 'show segment-routing traffic-eng policy all'\n try:\n out = device.parse(cmd)\n except Exception as e:\n log.error(\"Failed to parse '{cmd}': {e}\".format(cmd=cmd, e=e))\n return None\n\n for policy in out.keys():\n admin = out.get(policy, {}).get('status', {}).get('admin', '')\n oper = out.get(policy, {}).get('status', {}).\\\n get('operational', {}).get('state', '')\n\n if (admin.lower() == expected_admin.lower() and \n oper.lower() == expected_oper.lower()):\n return policy\n else:\n log.info(\"Failed to find a policy with admin state {admin} \"\n \"and oper state {oper}\".format(admin=expected_admin,\n oper=expected_oper))\n return None\n\n\ndef get_segment_routing_sid_map_configuration(device, address_family=\"ipv4\"):\n \"\"\" Get Segment routing SID map configuration\n\n Args:\n device ('str'): Device str\n address_family ('str'): Address family\n Returns:\n Dictionary with ip address as key and sid as value\n ex.)\n {\n '192.168.1.1': '1',\n '192.168.1.2': '2'\n }\n \"\"\"\n out = get_running_config_section_dict(\n device=device, section=\"segment-routing\"\n )\n\n sid_dict = {}\n\n if not out:\n return None\n\n p1 = re.compile(r\"^(?P<ip_address>\\S+) index (?P<sid>\\d+) range \\d+$\")\n\n connected_prefix_sid_maps = out[\"segment-routing mpls\"][\n \"connected-prefix-sid-map\"\n ][\"address-family {}\".format(address_family)].keys()\n\n for key in connected_prefix_sid_maps:\n key = key.strip()\n m = p1.match(key)\n if m:\n group = m.groupdict()\n sid_dict.update({group[\"ip_address\"]: group[\"sid\"]})\n continue\n\n return sid_dict\n\n\ndef get_segment_routing_lb_range(device):\n \"\"\" Gets segement-routing local block range\n\n Args:\n device ('obj'): device to use\n\n Returns:\n ('int', 'int'): label_min, label_max\n\n Raises:\n N/A\n \"\"\"\n try:\n out = device.parse(\"show segment-routing mpls lb\")\n except SchemaEmptyParserError:\n return None, None\n\n return out.get(\"label_min\"), out.get(\"label_max\")\n\n\ndef get_segment_routing_gb_range(device):\n \"\"\" Gets segement-routing global block range\n\n Args:\n device ('obj'): device to use\n\n Returns:\n ('int', 'int'): label_min, label_max\n\n Raises:\n None\n \"\"\"\n try:\n out = device.parse(\"show segment-routing mpls gb\")\n except SchemaEmptyParserError:\n return None, None\n\n return out.get(\"label_min\"), out.get(\"label_max\")\n", "id": "8559939", "language": "Python", "matching_score": 2.0979979038238525, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/segment_routing/get.py" }, { "content": "\"\"\"Common get info functions for interface\"\"\"\n\n# Python\nimport os\nimport logging\nimport re\n\n# Genie\nfrom genie.metaparser.util import merge_dict\n\n# pyATS\nfrom genie.metaparser.util.exceptions import (\n SchemaEmptyParserError,\n SchemaUnsupportedKeyError,\n)\n\n# Interface\nfrom genie.libs.sdk.apis.iosxe.interface.get import (\n get_interface_running_config,\n)\n\nlog = logging.getLogger(__name__)\n\n\ndef get_policy_map_running_policy_map(device, policy_map):\n \"\"\" Get running policy-map configuration\n\n Args:\n device (`obj`): Device object\n policy_map (`str`): Policy map name\n\n Returns:\n None\n\n Raises:\n SchemaUnsupportedKeyError\n \"\"\"\n\n try:\n out = device.parse(\"show run policy-map {}\".format(policy_map))\n except SchemaUnsupportedKeyError as e:\n # Need to catch if there is unexpected configuration\n log.error(\n \"Unexpected configuration found under \"\n \"{policy_map}: {e}\".format(policy_map=policy_map, e=e)\n )\n return None\n except SchemaEmptyParserError:\n return None\n except Exception as e:\n log.error(\n \"Failed to parse show run policy-map {}, Error: {}\".format(\n policy_map, str(e)\n )\n )\n return None\n return out\n\n\ndef get_policy_map_configurational_policy_map(device, interfaces):\n \"\"\" Get policy-map running configuration\n\n Args:\n device (`obj`): Device object\n interfaces (`list`): List of interfaces\n\n Returns:\n policy-map configurational dictionary\n \"\"\"\n\n out = {}\n policy_out = {}\n for interface in interfaces:\n out[interface] = get_interface_running_config(device, interface)\n\n service_policies = []\n\n for item in out[interface]:\n if interface in item:\n for service_policy_item in out[interface][item]:\n if \"service-policy\" in service_policy_item:\n service_policies.append(service_policy_item[21:].strip())\n\n for service_policy in service_policies:\n if \"in\" in service_policy:\n direction = \"input\"\n else:\n direction = \"output\"\n output = get_policy_map_running_policy_map(device, service_policy)\n if not output:\n continue\n policy_out.setdefault(direction, {})\n policy_out[direction] = output\n for class_name in policy_out[direction][\"policy_map\"][service_policy][\n \"class\"\n ]:\n for item in policy_out[direction][\"policy_map\"][service_policy][\n \"class\"\n ][class_name]:\n if \"service_policy\" in item:\n nested = policy_out[direction][\"policy_map\"][\n service_policy\n ][\"class\"][class_name][item]\n nested_policy_out = get_policy_map_running_policy_map(\n device, nested\n )\n if not nested_policy_out:\n continue\n new_nested_policy_out = {}\n new_nested_policy_out.setdefault(\n \"policy_map\", {}\n ).setdefault(service_policy, {}).setdefault(\n \"child_policy_name\", {}\n )\n new_nested_policy_out[\"policy_map\"][service_policy][\n \"child_policy_name\"\n ] = nested_policy_out[\"policy_map\"]\n merge_dict(policy_out[direction], new_nested_policy_out)\n\n return policy_out\n\n\ndef get_policy_map_operational_policy_map_on_interface(device, interface):\n \"\"\" Get operational policy-map on an interface\n\n Args:\n device (`obj`): Device object\n interface (`str`): Interface name\n\n Returns:\n Device output parsed dictionary\n \"\"\"\n try:\n out = device.parse(\"show policy-map interface {}\".format(interface))\n except SchemaEmptyParserError as e:\n log.error(\n \"Failed to parse show policy-map interface {}\".format(interface)\n )\n return None\n\n return out\n\n\ndef get_policy_map_policy_map_packet_count(\n device, interface, direction=\"output\"\n):\n \"\"\" Get policy-map packet count\n\n Args:\n device (`obj`): Device object\n interface (`str`): Interface name\n direction ('str'): input/output pkt direction\n\n Returns:\n class_map_out_packets: Packet count\n\n Raises:\n KeyError\n \"\"\"\n\n class_map_dict = {}\n\n out = get_policy_map_operational_policy_map_on_interface(device, interface)\n\n if not out:\n return class_map_dict\n\n for policy_name_out in (\n out.get(interface)\n .get(\"service_policy\")\n .get(direction)\n .get(\"policy_name\")\n ):\n for child_policy_name_out in out[interface][\"service_policy\"][\n direction\n ][\"policy_name\"][policy_name_out][\"child_policy_name\"]:\n for class_map in out[interface][\"service_policy\"][direction][\n \"policy_name\"\n ][policy_name_out][\"child_policy_name\"][child_policy_name_out][\n \"class_map\"\n ]:\n try:\n class_map_dict[class_map] = out[interface][\n \"service_policy\"\n ][direction][\"policy_name\"][policy_name_out][\n \"child_policy_name\"\n ][\n child_policy_name_out\n ][\n \"class_map\"\n ][\n class_map\n ][\n \"packets\"\n ]\n except KeyError:\n return None\n\n return class_map_dict\n\n\ndef get_policy_map_policy_map_ip_precedence(\n device, interface, direction=\"input\"\n):\n \"\"\" Gets policy-map ip precedence per stream\n\n Args:\n device (`obj`): Device object\n interface (`str`): Interface name\n direction ('str'): input/output pkt direction\n\n Returns:\n stream ip precedece ('dict')\n \"\"\"\n\n out = get_policy_map_operational_policy_map_on_interface(device, interface)\n\n ipp_dict = {}\n\n if not out:\n return ipp_dict\n\n for policy_name_out in (\n out.get(interface)\n .get(\"service_policy\")\n .get(direction)\n .get(\"policy_name\")\n ):\n if (\n \"child_policy_name\"\n in out[interface][\"service_policy\"][direction][\"policy_name\"][\n policy_name_out\n ]\n ):\n for child_policy_name_out in out[interface][\"service_policy\"][\n direction\n ][\"policy_name\"][policy_name_out][\"child_policy_name\"]:\n for class_map in out[interface][\"service_policy\"][direction][\n \"policy_name\"\n ][policy_name_out][\"child_policy_name\"][child_policy_name_out][\n \"class_map\"\n ]:\n if (\n \"qos_set\"\n in out[interface][\"service_policy\"][direction][\n \"policy_name\"\n ][policy_name_out][\"child_policy_name\"][\n child_policy_name_out\n ][\n \"class_map\"\n ][\n class_map\n ]\n ):\n precedence = out[interface][\"service_policy\"][\n direction\n ][\"policy_name\"][policy_name_out][\"child_policy_name\"][\n child_policy_name_out\n ][\n \"class_map\"\n ][\n class_map\n ][\n \"qos_set\"\n ].get(\n \"ip precedence\"\n )\n if precedence:\n ipp_dict[class_map] = next(iter(precedence))\n else:\n for class_map in out[interface][\"service_policy\"][direction][\n \"policy_name\"\n ][policy_name_out][\"class_map\"]:\n if (\n \"qos_set\"\n in out[interface][\"service_policy\"][direction][\n \"policy_name\"\n ][policy_name_out][\"class_map\"][class_map]\n ):\n precedence = out[interface][\"service_policy\"][direction][\n \"policy_name\"\n ][policy_name_out][\"class_map\"][class_map][\"qos_set\"].get(\n \"ip precedence\"\n )\n if precedence:\n ipp_dict[class_map] = next(iter(precedence))\n return ipp_dict\n\n\ndef get_policy_map_class_maps(device, policy_map, control_plane_policy):\n \"\"\" Get class map dictionary\n\n Args:\n device (`obj`): Device object\n control_plane_policy (`str`): Control policy name\n policy_map ('str'): policy map name\n Returns:\n class map dictionary\n \"\"\"\n try:\n out = device.parse(\"show policy-map {}\".format(policy_map))\n except SchemaEmptyParserError as e:\n return None\n try:\n class_maps = out[\"Control Plane\"][\"service_policy\"][\"input\"][\n \"policy_name\"\n ][control_plane_policy][\"class_map\"]\n except KeyError as e:\n return None\n\n return class_maps\n", "id": "12261517", "language": "Python", "matching_score": 2.422961473464966, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/policy_map/get.py" }, { "content": "\"\"\"Common configure functions for control-plane\"\"\"\n# Python\nimport logging\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef configure_control_plane_control_plane_policy(device, config):\n\n \"\"\" Configure control plane policy\n\n Args:\n device ('obj'): Device object\n config('list'): List of commands to configure\n ex.)\n [\n {\n 'policy_name': 'Control_Plane_In',\n 'remove': False\n },\n {\n 'policy_name': 'Control_Plane_In',\n 'remove': True\n }\n ]\n Returns:\n None\n Raises:\n SubCommandFailure: Failed configuring control plane policy\n\n \"\"\"\n\n conf = []\n conf.append(\"control-plane\\n\")\n for c in config:\n if \"remove\" in c:\n conf.append(\n \"no service-policy input {}\\n\".format(c[\"policy_name\"])\n )\n else:\n conf.append(\"service-policy input {}\\n\".format(c[\"policy_name\"]))\n try:\n device.configure(\"\".join(conf))\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure control plane policy on device {device}\".format(\n device=device.name\n )\n )\n", "id": "8867744", "language": "Python", "matching_score": 0.9859715700149536, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/control_plane/configure.py" }, { "content": "\"\"\"Common configure functions for ISIS\"\"\"\n\n# Python\nimport logging\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef configure_isis_net_address(device, process_id, net_address):\n \"\"\" Configure ISIS network address\n\n Args:\n device ('obj'): Device object\n process_id ('str'): Router ISIS process ID\n net_address ('str'): Net Address\n Returns:\n None\n Raises:\n SubCommandFailure\n \"\"\"\n\n try:\n device.configure(\n \"router isis {process_id}\\n\"\n \" net {net_address}\\n\"\n \" !\\n\".format(\n process_id=process_id,\n net_address=net_address,\n )\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure ISIS on {device}\".format(\n device=device.name,\n )\n )\n\ndef configure_isis_metric_style(device, process_id,\n address_family, metric_style):\n \"\"\" Configure ISIS metric style\n\n Args:\n device ('obj'): Device object\n process_id ('str'): Router ISIS process ID\n address_family ('str'): Address family to be configured\n metric_style ('str'): Metric style\n Returns:\n None\n Raises:\n SubCommandFailure\n \"\"\"\n\n try:\n device.configure(\n \"router isis {process_id}\\n\"\n \" address-family {address_family}\\n\"\n \" metric-style {metric_style}\\n\"\n \" !\\n\".format(\n process_id=process_id,\n address_family=address_family,\n metric_style=metric_style,\n )\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure ISIS on {device}\".format(\n device=device.name,\n )\n )\n\ndef configure_isis_prefix_sid(device, process_id, interface, \n prefix_sid, address_family):\n \"\"\" Configure ISIS prefix-sid\n\n Args:\n device ('obj'): Device object\n process_id ('str'): Router ISIS process ID\n interface ('str'): Interface to configure\n prefix_sid ('str'): Prefix-Sid\n address_family ('str'): Address family to be configured\n Returns:\n None\n Raises:\n SubCommandFailure\n \"\"\"\n\n try:\n device.configure(\n \"router isis {process_id}\\n\"\n \" interface {interface}\\n\"\n \" address-family {address_family}\\n\"\n \" prefix-sid absolute {prefix_sid}\\n\"\n \" !\\n\"\n \" !\\n\".format(\n process_id=process_id,\n interface=interface,\n prefix_sid=prefix_sid,\n address_family=address_family,\n )\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure ISIS on {device}\".format(\n device=device.name,\n )\n )\n\ndef configure_isis_metric(device, process_id, interface, metric, address_family):\n \"\"\" Configure ISIS metric\n\n Args:\n device ('obj'): Device object\n process_id ('str'): Router ISIS process ID\n interface ('str'): Interface to configure\n metric ('str'): Metric value\n address_family ('str'): Address family to be configured\n Returns:\n None\n Raises:\n SubCommandFailure\n \"\"\"\n\n try:\n device.configure(\n \"router isis {process_id}\\n\"\n \" interface {interface}\\n\"\n \" address-family {address_family}\\n\"\n \" metric {metric}\\n\"\n \" !\\n\"\n \" !\\n\".format(\n process_id=process_id,\n interface=interface,\n metric=metric,\n address_family=address_family,\n )\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure ISIS on {device}\".format(\n device=device.name,\n )\n )\n\ndef configure_isis_md5_authentication(device, process_id, interface, hello_password):\n \"\"\" Configure MD5 authentication\n\n Args:\n device ('obj'): Device object\n process_id ('str'): Router ISIS process ID\n interface ('str'): Interface to configure\n hello_password ('str'): Authentication password\n Returns:\n None\n Raises:\n SubCommandFailure\n \"\"\"\n\n try:\n device.configure(\n \"router isis {process_id}\\n\"\n \" interface {interface}\\n\"\n \" hello-password hmac-md5 {hello_password}\\n\"\n \" !\\n\"\n \"!\\n\".format(\n process_id=process_id,\n interface=interface,\n hello_password=<PASSWORD>,\n )\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure MD5 authentication under interface {interface}\".format(\n interface=interface,\n )\n )\n", "id": "10211150", "language": "Python", "matching_score": 2.605604410171509, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxr/isis/configure.py" }, { "content": "\"\"\"Common configure functions for Segment Routing\"\"\"\n\n# Python\nimport logging\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef configure_segment_routing_gb_range(device, label_min, label_max):\n \"\"\" Add segment routing\n\n Args:\n device ('obj'): Device object\n label_min (`int`): Segment routing global block start\n label_max (`int`): Segment routing global block end\n\n Returns:\n None\n\n Raises:\n SubCommandFailure\n \"\"\"\n\n try:\n device.configure(\n \"segment-routing\\n\"\n \" global-block {label_min} {label_max}\\n\"\n \"!\\n\".format(\n label_min=label_min,\n label_max=label_max,\n )\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not add segment routing\")\n\ndef configure_segment_routing_sr_prefer(device, process_id, address_family):\n \"\"\" Configure segment routing sr prefer\n\n Args:\n device ('obj'): Device object\n process_id ('str'): Router ISIS process ID\n address_family ('str'): Address family to be configured\n\n Returns:\n None\n\n Raises:\n SubCommandFailure\n \"\"\"\n\n try:\n device.configure(\n \"router isis {process_id}\\n\"\n \" address-family {address_family}\\n\"\n \" segment-routing mpls sr-prefer\\n\"\n \" !\\n\".format(\n process_id=process_id,\n address_family=address_family,\n )\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure segment routing\")\n\ndef configure_segment_routing_prefix_sid_index(device, process_id, interface,\n prefix_sid_index, address_family):\n \"\"\" Configure segment routing prefix-sid index\n\n Args:\n device ('obj'): Device object\n process_id ('str'): Router ISIS process ID\n interface (`str`): Interface to configure\n prefix_sid_index (`int`): Prefix-sid index\n address_family ('str'): Address family to be configured\n\n Returns:\n None\n\n Raises:\n SubCommandFailure\n \"\"\"\n\n try:\n device.configure(\n \"router isis {process_id}\\n\"\n \" interface {interface}\\n\"\n \" address-family {address_family}\\n\"\n \" prefix-sid index {prefix_sid_index}\\n\"\n \" !\\n\"\n \" !\\n\".format(\n process_id=process_id,\n interface=interface,\n prefix_sid_index=prefix_sid_index,\n address_family=address_family,\n )\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure segment routing\")\n", "id": "6782141", "language": "Python", "matching_score": 1.2350965738296509, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxr/segment_routing/configure.py" }, { "content": "\"\"\"Common configure functions for bgp\"\"\"\n\n# Python\nimport logging\nimport re\nimport jinja2\n\n# Genie\nfrom genie.utils.timeout import Timeout\n\n# Utils\nfrom genie.libs.sdk.apis.iosxe.running_config.get import (\n get_running_config_section_dict,\n)\n\n# Steps\nfrom pyats.aetest.steps import Steps\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef configure_bgp_import_path_selection(\n device, bgp_as, address_family, vrf, selection_type\n):\n \"\"\" Configures import path selection on BGP router\n Args:\n device('obj'): device to configure\n bgp_as('str'): bgp id\n address_family:('str'): address family\n vrf('str'): vrf name\n type('str'): type of selection to configure\n Returns:\n N/A\n Raises:\n SubCommandFailure: Failed executing command\n \"\"\"\n log.info('Configuring \"import path selection {}\"'.format(selection_type))\n try:\n device.configure(\n \"router bgp {bgp_as}\\n\"\n \"address-family {address_family} vrf {vrf}\\n\"\n \"import path selection {selection_type}\".format(\n bgp_as=bgp_as,\n address_family=address_family,\n vrf=vrf,\n selection_type=selection_type,\n )\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure import path selection on BGP router {bgp_as}\".format(\n bgp_as=bgp_as\n )\n )\n\n\ndef configure_bgp_router_id(device, bgp_as, router_id):\n \"\"\" Configures router-id on BGP router\n\n Args:\n device('obj'): device to configure on\n bgp_as('str'): bgp_as to configure\n router_id('str'): router_id of device\n Return:\n N/A\n Raises:\n SubCommandFailure: Failed executing command\n \"\"\"\n\n log.info(\n \"Configuring router BGP on {hostname}\\n\"\n \" -local AS number: {bgp_as}\\n\"\n \" -bgp router-id: {router_id}\".format(\n hostname=device.hostname, bgp_as=bgp_as, router_id=router_id\n )\n )\n\n try:\n device.configure(\n \"router bgp {bgp_as}\\n\"\n \"bgp router-id {router_id}\\n\".format(\n bgp_as=bgp_as, router_id=router_id\n )\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure router-id {router_id} on \"\n \"BGP router {bgp_as}\".format(router_id=router_id, bgp_as=bgp_as)\n )\n\n\ndef configure_bgp_neighbor(\n device,\n bgp_as,\n neighbor_as,\n neighbor_address,\n source_interface=None,\n ebgp=None,\n):\n \"\"\" Configures bgp neighbor on bgp router\n\n Args:\n device('obj'): device to configure on\n bgp_as('str'): bgp_as to configure\n neighbor_as('str'): neighbor_as to configure\n neighbor_address('str'): address of neighbor\n source_interface('str'): used to configure update-source on neighbor\n Returns:\n N/A\n Raises:\n SubCommandFailure: Failed executing command\n \"\"\"\n log_msg = (\n \"Configuring BGP on {hostname}\\n\"\n \" -local AS number: {bgp_as}\\n\"\n \" -remote AS number: {neighbor_as}\\n\"\n \" -neighbor: {neighbor_address}\".format(\n hostname=device.hostname,\n bgp_as=bgp_as,\n neighbor_as=neighbor_as,\n neighbor_address=neighbor_address,\n )\n )\n\n cmd = (\n \"router bgp {bgp_as}\\n\"\n \"neighbor {neighbor_address} remote-as {neighbor_as}\".format(\n bgp_as=bgp_as,\n neighbor_as=neighbor_as,\n neighbor_address=neighbor_address,\n )\n )\n\n if source_interface:\n log_msg += \"\\n -update-source: {}\".format(source_interface)\n cmd += \"\\nneighbor {neighbor_address} update-source {source_interface}\".format(\n neighbor_address=neighbor_address,\n source_interface=source_interface,\n )\n\n if ebgp:\n log_msg += \"\\n -ebgp-multihop: {}\".format(ebgp)\n cmd += \"\\nneighbor {neighbor_address} ebgp-multihop {ebgp}\".format(\n neighbor_address=neighbor_address, ebgp=ebgp\n )\n\n log.info(log_msg)\n try:\n device.configure(cmd)\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Coult not configure bgp neighbor {neighbor_as} \"\n \"on router {bgp_as}\".format(neighbor_as=neighbor_as, bgp_as=bgp_as)\n )\n\n\ndef configure_bgp_soo_on_inbound_from_neighbor(\n device, soo_rt, bgp_as, vrf, neighbor_address\n):\n \"\"\" Configures extended community SoO on inbound from neighbor using soo_rt\n\n Args:\n device('obj'): device to execute on\n soo_rt('str'): route to configure SoO with\n bgp_as('str'): what router bgp to configure on\n vrf('str'): what vrf to configure on\n neighbor_address('str'): what neighbor to configure on\n Returns:\n N/A\n Raises:\n SubCommandFailure: Failed executing configure commands\n\n \"\"\"\n log.info(\n \"Configuring extended community SoO with {} value on the \"\n \"inbound from {}\".format(soo_rt, neighbor_address)\n )\n try:\n device.configure(\n \"route-map setsoo permit 10\\n\"\n \"set extcommunity soo {}\".format(soo_rt)\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure extended community SoO {}\".format(soo_rt)\n )\n try:\n device.configure(\n \"router bgp {}\\n\"\n \"address-family ipv4 vrf {}\\n\"\n \"neighbor {} route-map \"\n \"setsoo in\".format(bgp_as, vrf, neighbor_address)\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure SoO in from neighbor {}\".format(\n neighbor_address\n )\n )\n\n\ndef configure_prefix_list_prefix_list_to_bgp_neighbor(\n device, bgp_as, address_family, vrf, prefix_list=None\n):\n \"\"\" Configure prefix list to bgp neighbor\n\n Args:\n device ('obj'): Device object\n bgp_as (str): bgp AS number\n vrf ('str'): vrf name\n address_family ('str'): address family\n prefix_list ('list'): A list of dictionaries following below format:\n [{\n 'neighbor': neighbor address,\n 'prefix_list': prefix,\n 'direction': direction\n }]\n ex.) \n [\n {\n 'neighbor': '192.168.1.4',\n 'prefix_list': 'in'\n 'direction': 'in'\n },\n {\n 'neighbor': '192.168.1.5',\n 'prefix_list': 'out'\n 'direction': 'out'\n }\n ]\n Returns:\n N/A\n Raises:\n SubCommandFailure: Failed executing configure commands\n TypeError: prefix_list is not a list\n \"\"\"\n config = []\n\n config.append(\"router bgp {bgp_as}\".format(bgp_as=bgp_as))\n\n config.append(\n \"\\naddress-family {address_family} vrf {vrf}\".format(\n address_family=address_family, vrf=vrf\n )\n )\n\n if not isinstance(prefix_list, list):\n raise TypeError(\"prefix_list must be a list\")\n\n for pf in prefix_list:\n config.append(\n \"\\nneighbor {neighbor} prefix-list {prefix_list} {direction}\".format(\n neighbor=pf[\"neighbor\"],\n prefix_list=pf[\"prefix_list\"],\n direction=pf[\"direction\"],\n )\n )\n\n try:\n device.configure(\"\".join(config))\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure prefix-liston router bgp \"\n \"{bgp_as}\".format(bgp_as=bgp_as)\n )\n\n\ndef configure_maximum_prefix_to_bgp_neighbor(\n device, bgp_as, address_family, vrf, maximum_prefix=None\n):\n \"\"\" Configure maximum prefix to router bgp neighbor\n\n Args:\n device ('obj'): Device object\n bgp_as (str): bgp AS number\n vrf ('str'): vrf name\n address_family ('str'): address family\n maximum_prefix ('list'): A list of dictionaries following below format:\n [{\n 'neighbor': neighbor address,\n 'maximum_prefix': maximum prefix number\n }]\n ex.) \n [\n {\n 'neighbor': '192.168.1.6,\n 'maximum_prefix': 5,\n }\n ]\n Returns:\n None\n Raises:\n SubCommandFailure: Failed executing configure commands\n TypeError: maximum_prefix is not a list\n\n \"\"\"\n config = []\n\n config.append(\"router bgp {bgp_as}\".format(bgp_as=bgp_as))\n\n config.append(\n \"\\naddress-family {address_family} vrf {vrf}\".format(\n address_family=address_family, vrf=vrf\n )\n )\n\n if not isinstance(maximum_prefix, list):\n raise ValueError(\"prefix_list must be a list\")\n\n for pf in maximum_prefix:\n config.append(\n \"\\nneighbor {neighbor} maximum-prefix {maximum_prefix}\".format(\n neighbor=pf[\"neighbor\"], maximum_prefix=pf[\"maximum_prefix\"]\n )\n )\n\n try:\n device.configure(\"\".join(config))\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure maximum prefixes on bgp \"\n \"router {bgp_as}\".format(bgp_as=bgp_as)\n )\n\n\ndef configure_route_map_route_map_to_bgp_neighbor(\n device,\n bgp_as,\n address_family=\"\",\n route_map=None,\n vrf=\"\",\n vrf_address_family=\"\",\n):\n \"\"\" Configure route map to bgp neighbors\n\n Args:\n device ('obj'): Device object\n bgp_as ('int'): BGP AS number\n address_family ('str'): address family\n vrf ('str'): vrf name\n vrf_address_family ('str'): address family for vrf\n route_map ('list'): route map list which contains dictionary\n dictionary contains following 5 keys:\n neighbor ('str'): neighbor value\n route_map ('str'): route-map name\n direction ('str'): direction type\n ex.)\n [\n {\n 'neighbor': '192.168.60.10',\n 'route_map': 'community_test_out',\n 'direction': 'out'\n },\n {\n 'neighbor': '192.168.60.11',\n 'route_map': 'community_test_out',\n 'direction': 'out'\n },\n {\n 'neighbor': '192.168.6.10',\n 'route_map': 'community_test_in',\n 'direction': 'in'\n },\n ]\n Returns:\n N/A\n Raises:\n SubCommandFailure: Failed executing configure commands\n TypeError: route_map is not a list\n \"\"\"\n\n # router bgp 65109\n # address-family vpnv4\n # neighbor 192.168.36.119 route-map community_test_out out\n # neighbor 192.168.36.120 route-map community_test_out out\n # address-family ipv4 vrf\n # neighbor 192.168.10.253 route-map community_test_in in\n\n if route_map is None:\n route_map = []\n\n config = []\n config.append(\"router bgp {bgp_as}\\n\".format(bgp_as=bgp_as))\n\n if address_family:\n config.append(\n \"address-family {address_family}\\n\".format(\n address_family=address_family\n )\n )\n\n if route_map:\n\n if not isinstance(route_map, list):\n raise TypeError(\"route_map must be a list\")\n\n for routemap in route_map:\n direction = routemap[\"direction\"]\n if direction == \"in\" and vrf and vrf_address_family:\n config.append(\n \"address-family {vrf_address_family} vrf {vrf}\\n\".format(\n vrf_address_family=vrf_address_family, vrf=vrf\n )\n )\n\n config.append(\n \"neighbor {neighbor} route-map {route_map_name} {route_map_direction}\\n\".format(\n neighbor=routemap[\"neighbor\"],\n route_map_name=routemap[\"route_map\"],\n route_map_direction=direction,\n )\n )\n try:\n device.configure(\"\".join(config))\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Failed to configure route map to bgp neighbors\"\n )\n\n\ndef configure_bgp_neighbor_activate(\n device, address_family, bgp_as, neighbor_address, steps=Steps()\n):\n \"\"\" Activate bgp neighbor on bgp router \n\n Args:\n device ('obj') : Device to be configured\n bgp_as ('str') : Bgp Id to be added to configuration\n neighbor_address ('str') : Address of neighbor to be added to configuration\n address_family ('str') : Address family to be configured\n steps('obj') : Context manager steps\n Returns:\n N/A\n Raises:\n SubCommandFailure: Failed executing configure commands\n \n \"\"\"\n with steps.start(\n \"Configure device {dev}\".format(dev=device.name), continue_=True\n ) as step:\n\n try:\n device.configure(\n [\n \"router bgp {bgp_as}\".format(bgp_as=bgp_as),\n \"address-family {address_family}\".format(\n address_family=address_family\n ),\n \"neighbor {device_address} activate\".format(\n device_address=neighbor_address\n ),\n ]\n )\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Could not ativate bgp neighbor on bgp \"\n \"router {bgp_as}\".format(bgp_as=bgp_as)\n )\n\n\ndef configure_shut_bgp_neighbors(\n device, bgp_as, neighbors=None, address_family=None, vrf=None, noshut=False\n):\n \"\"\" Configures shut/enable on bgp neighbors if provided otherwise the ones found in running config\n\n Args:\n device ('obj'): device under test\n bgp_as ('int'): router bgp_as to configure on\n address_family ('str'): address_family to configure under\n vrf ('str'): vrf to configure under\n neighbors ('list'): List of neighbors to shut/enable\n noshut ('bool'): does the opposite of shut if True\n Returns: \n N/A\n Raises:\n SubCommandFailure: Failed executing configure commands\n ValueError: Some information is missing\n\n \"\"\"\n search = None\n if noshut:\n if neighbors:\n log.info(\"Enabling router bgp neighbors {}\".format(neighbors))\n elif address_family and vrf:\n log.info(\n \"Enabling router bgp neighbors under address_family {} and vrf {}\".format(\n address_family, vrf\n )\n )\n search = \"address-family {} vrf {}\".format(address_family, vrf)\n elif address_family:\n log.info(\n \"Enabling router bgp neighbors under address_family {}\".format(\n address_family\n )\n )\n search = \"address-family {}\".format(address_family)\n else:\n if neighbors:\n log.info(\"Shutting down router bgp neighbors {}\".format(neighbors))\n elif address_family and vrf:\n log.info(\n \"Shutting down router bgp neighbors under address_family {} and vrf {}\".format(\n address_family, vrf\n )\n )\n search = \"address-family {} vrf {}\".format(address_family, vrf)\n elif address_family:\n log.info(\n \"Shutting down router bgp neighbors under address_family {}\".format(\n address_family\n )\n )\n search = \"address-family {}\".format(address_family)\n\n p1_active_neighbor = re.compile(\n r\"^neighbor +(?P<neighbor>[\\d\\.]+) +activate\"\n )\n p2_shutdown_neighbor = re.compile(\n r\"^neighbor +(?P<neighbor>[\\d\\.]+) +shutdown\"\n )\n p3_neighbor = re.compile(r\"^neighbor +(?P<neighbor>[\\d\\.]+)\")\n cmd = \"router bgp {}\\n\".format(bgp_as)\n\n if neighbors:\n if noshut:\n for neighbor in neighbors:\n cmd += \"no neighbor {} shutdown\".format(neighbor)\n else:\n for neighbor in neighbors:\n cmd += \"neighbor {} shutdown\".format(neighbor)\n try:\n device.configure(cmd)\n except SubCommandFailure:\n if noshut:\n raise SubCommandFailure(\"Could not enable bgp neighbors\")\n else:\n raise SubCommandFailure(\"Could not shut bgp neighbors\")\n\n else:\n already_shut = []\n config_dict = get_running_config_section_dict(\n device, \"router bgp\"\n )\n if config_dict:\n for sub_level in config_dict.get(\n \"router bgp {}\".format(bgp_as), {}\n ):\n\n # Following if/else block is used for neighbors under 'router bgp id' level\n if noshut:\n m = p2_shutdown_neighbor.match(sub_level)\n if m:\n cmd += \"no neighbor {} shutdown\\n\".format(\n m.groupdict()[\"neighbor\"]\n )\n else:\n m = p3_neighbor.match(sub_level)\n if m:\n if m.groupdict()[\"neighbor\"] not in already_shut:\n already_shut.append(m.groupdict()[\"neighbor\"])\n cmd += \"neighbor {} shutdown\\n\".format(\n m.groupdict()[\"neighbor\"]\n )\n\n # Following if block is used for neighbors under address_family level\n if search and search in sub_level:\n\n # enter address-family\n cmd += sub_level + \"\\n\"\n\n # shut / no shut neighbor\n for command in config_dict[\"router bgp {}\".format(bgp_as)][\n sub_level\n ]:\n if noshut:\n m = p2_shutdown_neighbor.match(command)\n if m:\n cmd += \"no neighbor {} shutdown\\n\".format(\n m.groupdict()[\"neighbor\"]\n )\n else:\n m = p1_active_neighbor.match(command)\n if m:\n cmd += \"neighbor {} shutdown\\n\".format(\n m.groupdict()[\"neighbor\"]\n )\n\n # exit address-family\n cmd += \"exit-address-family\\n\"\n\n if \"neighbor\" in cmd:\n try:\n device.configure(cmd)\n except SubCommandFailure:\n if noshut:\n raise SubCommandFailure(\n \"Could not enable bgp neighbors\"\n )\n else:\n raise SubCommandFailure(\"Could not shut bgp neighbors\")\n\n else:\n if vrf:\n raise ValueError(\n \"No neighbors found in running config \"\n \"under {} address_family and {} vrf.\".format(\n address_family, vrf\n )\n )\n else:\n raise ValueError(\n \"No neighbors found in running config \"\n \"under {} address_family.\".format(address_family)\n )\n else:\n raise ValueError(\"No running configuration under router bgp.\")\n\n\ndef configure_no_shut_bgp_neighbors(\n device, bgp_as, neighbors=None, address_family=None, vrf=None\n):\n \"\"\" Enables bgp neighbors if provided otherwise it enabled the ones found in running config\n\n Args:\n device ('obj'): device under test\n bgp_id ('int'): router bgp_id to configure on\n address_family ('str'): address_family to configure under\n vrf ('str'): vrf to configure under\n neighbors('list'): Libs with BGP neighbors\n Returns:\n N/A\n Raises:\n SubCommandFailure: Failed executing configure commands\n ValueError: Some information is missing\n \"\"\"\n try:\n configure_shut_bgp_neighbors(\n device, bgp_as, neighbors, address_family, vrf, noshut=True\n )\n except SubCommandFailure as e:\n raise SubCommandFailure(e)\n except ValueError as e:\n raise ValueError(str(e))\n\n\ndef configure_bgp_neighbor_remote_as(\n device, bgp_as, vrf, neighbor_as, neighbor_address, address_family\n):\n \"\"\" Configure destination in vrf\n Args:\n device ('obj'): Device object\n bgp_as ('str'): Router bgp\n vrf ('str'): Vrf name\n neighbor_as ('str'): Destination\n neighbor_address ('str'): Neighbor address\n address_family ('str'): Address family\n Returns:\n N/A\n Raises:\n SubCommandFailure: Failed executing configure commands\n \"\"\"\n\n try:\n device.configure(\n [\n \"router bgp {as_n}\".format(as_n=bgp_as),\n \"address-family {address_family} vrf {vrf}\".format(\n vrf=vrf, address_family=address_family\n ),\n \"neighbor {neighbor} remote-as {neighbor_as}\".format(\n neighbor=neighbor_address, neighbor_as=neighbor_as\n ),\n ]\n )\n\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure destination {dest} on \"\n \"on device {dev}\".format(dest=neighbor_as, dev=device.name)\n )\n\n\ndef remove_bgp_configuration(device, bgp_as):\n \"\"\" Remove bgp configuration\n Args:\n device ('obj'): Device object\n bgp_as ('str'): Router bgp\n Returns:\n N/A\n Raises:\n SubCommandFailure: Failed executing configure commands\n \"\"\"\n\n try:\n device.configure(\"no router bgp {as_n}\".format(as_n=bgp_as))\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not remove BGP router {bgp_as} \"\n \"configuration from device {dev}\".format(\n bgp_as=bgp_as, dev=device.name\n )\n )\n\n\ndef configure_bgp_neighbor_as_override(\n device, vrf, bgp_as, address_family, neighbor_address\n):\n \"\"\" Configure AS override in VRF\n Args:\n device ('obj'): Device object\n bgp_as ('str'): Router bgp\n vrf ('str'): Vrf name\n neighbor_address ('str'): Neighbor address\n address_family ('str'): Address family\n Returns:\n N/A\n Raises:\n SubCommandFailure: Failed executing configure commands\n \"\"\"\n\n try:\n device.configure(\n [\n \"router bgp {as_n}\".format(as_n=bgp_as),\n \"address-family {address_family} vrf {vrf}\".format(\n vrf=vrf, address_family=address_family\n ),\n \"neighbor {neighbor} as-override\".format(\n neighbor=neighbor_address\n ),\n ]\n )\n\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure as-override on {dev} \"\n \"for vrf {vrf}\".format(vrf=vrf, dev=device.name)\n )\n\n\ndef configure_bgp_additional_paths(device, bgp_as):\n \"\"\" Configure additional_paths on bgp router\n\n Args:\n device ('obj'): device to use\n bgp_as ('int'): bgp router to configure\n Returns:\n N/A\n Raises:\n SubCommandFailure: Failed executing configure commands\n \"\"\"\n log.info(\"Configuring bgp router {} with additional paths\".format(bgp_as))\n try:\n device.configure(\n [\n \"router bgp {}\".format(bgp_as),\n \"bgp additional-paths select all\",\n \"bgp additional-paths send receive\",\n \"bgp additional-paths install\",\n ]\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure additional paths on bgp \"\n \"router {}\".format(bgp_as)\n )\n\n\ndef configure_bgp_advertise_additional_paths(device, bgp_as, neighbor):\n \"\"\" Configures advertisement for additional paths\n\n Args:\n device ('obj'): device to configure\n bgp_as ('str'): router bgp number\n neighbor ('str'): neighbor to advertise to\n Returns:\n N/A\n Raises:\n SubCommandFailure: Failed executing configure commands\n \"\"\"\n log.info(\n \"Configuring bgp router {} with advertisement of additional-paths\".format(\n bgp_as\n )\n )\n try:\n device.configure(\n [\n \"router bgp {}\".format(bgp_as),\n \"neighbor {} advertise additional-paths all\".format(neighbor),\n ]\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure advertisement for \"\n \"additional paths on bgp router {}\".format(bgp_as)\n )\n\n\ndef configure_bgp_address_advertisement(\n device, bgp_as, address_family, ip_address, mask\n):\n \"\"\" Configure address advertisement on router bgp\n\n Args:\n device ('obj'): device to use\n bgp_as ('int'): bgp router to configure\n address_family ('str'): address family to configure under\n ip_address ('str'): ip address\n mask ('str'): mask\n Returns:\n N/A\n Raises:\n SubCommandFailure: Failed executing configure commands\n \"\"\"\n log.info(\n \"Enabling address advertisement for ip: {} mask {}\".format(\n ip_address, mask\n )\n )\n try:\n device.configure(\n [\n \"router bgp {}\".format(bgp_as),\n \"address-family {}\".format(address_family),\n \"network {} mask {}\".format(ip_address, mask),\n ]\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure address advertisement on \"\n \"router bgp {bgp_as}\".format(bgp_as)\n )\n", "id": "5191522", "language": "Python", "matching_score": 2.956875801086426, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/bgp/configure.py" }, { "content": "\"\"\"Common clear functions for bgp\"\"\"\r\n\r\n# Python\r\nimport logging\r\n\r\n# Unicon\r\nfrom unicon.core.errors import SubCommandFailure\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef clear_ip_bgp_vrf_af_soft(device, vrf, address_family, bgp_as):\r\n \"\"\" BGP soft clear using vrf, address family and AS number\r\n\r\n Args:\r\n device ('obj'): device object\r\n vrf ('str'): vrf name\r\n address_family ('str'): address family\r\n as_number ('int'): BGP AS number\r\n Returns:\r\n N/A\r\n Raises:\r\n SubCommandFailure: Failed executing command\r\n \"\"\"\r\n try:\r\n device.execute(\r\n \"clear ip bgp vrf {vrf} {address_family} {as_number} soft\".format(\r\n vrf=vrf, address_family=address_family, as_number=bgp_as\r\n )\r\n )\r\n except SubCommandFailure:\r\n raise SubCommandFailure(\r\n \"Could not reset bgp connections on device {device}\".format(\r\n device=device.name\r\n )\r\n )\r\n\r\n\r\ndef clear_bgp_neighbors_soft(device, direction=\"\", neighbor_address=\"ALL\"):\r\n \"\"\" Reset connection to a BGP neighbor or to all neighbors, diretion in or out\r\n Args:\r\n device ('obj') : Device object\r\n direction ('str'): Direction type:\r\n ex.)\r\n direction = \"in\"\r\n direction = \"out\"\r\n neighbor (`str`): Neighbor address\r\n Returns:\r\n N/A\r\n Raises:\r\n SubCommandFailure: Failed executing command\r\n \"\"\"\r\n if neighbor_address.upper() == \"ALL\":\r\n if direction:\r\n cmd = \"clear ip bgp * soft {}\".format(direction)\r\n else:\r\n cmd = \"clear ip bgp * soft\"\r\n else:\r\n if direction:\r\n cmd = \"clear ip bgp {} soft {}\".format(neighbor_address, direction)\r\n else:\r\n cmd = \"clear ip bgp {} soft\".format(neighbor_address)\r\n\r\n try:\r\n device.execute(cmd)\r\n except SubCommandFailure as e:\r\n raise SubCommandFailure(\r\n \"Could not soft clear device {dev}\".format(dev=device.name)\r\n )\r\n", "id": "696127", "language": "Python", "matching_score": 1.6976667642593384, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/bgp/clear.py" }, { "content": "\"\"\"Common get info functions for vrf\"\"\"\n\n# Python\nimport logging\n\n# genie\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\nlog = logging.getLogger(__name__)\n\n\ndef get_vrf_vrfs(device):\n \"\"\" Get all vrfs from device\n\n Args:\n device ('obj'): Device object\n\n Returns:\n out ('dict'): Vrf dictionary\n\n Raises:\n None\n \"\"\"\n log.info(\n \"Getting all vrfs on '{dev_name}'\".format(dev_name=device.hostname)\n )\n cmd = \"show vrf detail\"\n\n try:\n out = device.parse(cmd)\n except SchemaEmptyParserError:\n return False\n\n return out\n\n\ndef get_vrf_route_distinguisher(vrf, device):\n \"\"\" Get default route distinguisher from show command\n\n Args:\n vrf ('str') : VRF value\n device ('obj') : Device object\n\n Returns:\n route_distinguisher ('str'): Route distinguisher value\n False\n\n Raises:\n Exception\n \"\"\"\n\n log.info(\n \"Getting the route distinguisher value for {vrf} on device \"\n \"{device}\".format(vrf=vrf, device=device.name)\n )\n\n command = \"show vrf {vrf}\".format(vrf=vrf)\n\n try:\n output = device.parse(command)\n except SchemaEmptyParserError:\n return False\n except Exception:\n raise Exception(\n \"Could not execute parser command \" \"'{cmd}'\".format(cmd=command)\n )\n\n route_distinguisher = output[\"vrf\"][vrf].get(\"route_distinguisher\", None)\n\n if not route_distinguisher:\n raise Exception(\n \"Parser did not return any results for \"\n \"vrf {vrf}\".format(vrf=vrf)\n )\n\n log.info(\n \"Found route distinguisher {rd} on device {device}\".format(\n device=device.name, rd=route_distinguisher\n )\n )\n\n return route_distinguisher\n\n\ndef get_vrf_interface(device, vrf):\n \"\"\" Gets the subinterfaces for vrf\n\n Args:\n device ('obj'): device to run on\n vrf ('str'): vrf to search under\n\n Returns:\n interfaces('list'): List of interfaces under specified vrf\n None\n\n Raises:\n None\n \"\"\"\n log.info(\"Getting the interfaces under vrf {vrf}\".format(vrf=vrf))\n\n try:\n out = device.parse(\"show vrf {vrf}\".format(vrf=vrf))\n except SchemaEmptyParserError:\n return None\n\n if out and \"vrf\" in out and vrf in out[\"vrf\"]:\n return out[\"vrf\"][vrf].get(\"interfaces\", None)\n\n\ndef get_vrf_route_targets(\n device, address_family, rt_type, vrf=None, route_distinguisher=None\n):\n \"\"\" Get route target value from a device\n\n Args:\n address_family ('str'): address family value\n rt_type ('str'): route target type\n ex.) rt_type = 'import' OR\n rt_type = 'export' OR\n rt_type = 'both'\n vrf('str'): vrf name\n route_distinguisher ('str'): route distinguisher value\n\n Returns:\n Route target value\n None\n\n Raises:\n None\n \"\"\"\n\n log.info(\n \"Getting route target of type {rt_type} for device {dev_name}\".format(\n rt_type=rt_type, dev_name=device.name\n )\n )\n\n cli_command = [\"show vrf detail {vrf}\", \"show vrf detail\"]\n if vrf:\n cmd = cli_command[0].format(vrf=vrf)\n else:\n cmd = cli_command[1]\n\n try:\n raw_out = device.execute(cmd)\n out = device.parse(cmd, output=raw_out)\n except SchemaEmptyParserError:\n return None\n\n if not vrf:\n vrf = \"default\"\n\n try:\n if not route_distinguisher:\n route_distinguisher = out[vrf][\"route_distinguisher\"]\n if \"multicast\" not in raw_out:\n address_family = address_family.split()[0]\n route_targets = out[vrf][\"address_family\"][address_family][\n \"route_targets\"\n ][route_distinguisher]\n\n if (\n route_targets[\"rt_type\"] == rt_type\n or route_targets[\"rt_type\"] == \"both\"\n ):\n return route_targets[\"route_target\"]\n\n except KeyError as e:\n return None\n\n return None\n", "id": "3530840", "language": "Python", "matching_score": 3.042466163635254, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/vrf/get.py" }, { "content": "\"\"\"Common verify functions for vpn\"\"\"\n\n# Python\nimport os\nimport logging\nimport re\nfrom genie.utils.timeout import Timeout\n\n# Common\n\n# VRF\nfrom genie.libs.sdk.apis.iosxe.vrf.get import get_vrf_route_targets\n\nlog = logging.getLogger(__name__)\n\n\ndef verify_vpn_route_targets(\n device,\n route_targets,\n rt_type,\n address_family,\n vrf=None,\n route_distinguisher=None,\n max_time=15,\n check_interval=5,\n):\n \"\"\" Verify route target are imported, exported or both\n\n Args:\n device ('obj'): Device object\n route_targets ('list'): list of route targets to check\n ex.)\n [\n '65109:4005',\n '65109:4006'\n ]\n rt_type ('str'): route target type\n ex.) rt_type = 'import' OR\n rt_type = 'export' OR\n rt_type = 'both'\n address_family ('str'): address family to check\n vrf ('str'): vrf name\n route_distinguisher ('str'): route distinguisher value\n max_time (int): Maximum wait time for the trigger,\n in second. Default: 15\n check_interval (int): Wait time between iterations when looping is needed,\n in second. Default: 5\n \n Returns:\n True\n False\n \"\"\"\n\n # Check if both route targets exists on device\n if route_targets:\n if not isinstance(route_targets, list):\n log.error(\"route_targets must be list\")\n return False\n\n result = True\n\n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n result = True\n for rt in route_targets:\n current_rt = None\n try:\n current_rt = get_vrf_route_targets(\n device=device,\n rt_type=rt_type,\n address_family=address_family,\n vrf=vrf,\n route_distinguisher=route_distinguisher,\n )\n except Exception as e:\n log.error(str(e))\n\n if not current_rt:\n log.info(\n \"Route target of type {} not found for VRF {} on device {}\".format(\n rt_type, vrf, device.name\n )\n )\n result = False\n if result:\n return result\n timeout.sleep()\n return result\n", "id": "11533680", "language": "Python", "matching_score": 1.8421770334243774, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/vpn/verify.py" }, { "content": "\"\"\" Common verify functions for flow \"\"\"\n\n# Python\nimport logging\n\n# Genie\nfrom genie.utils.timeout import Timeout\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\nlog = logging.getLogger(__name__)\n\n\ndef verify_flow_with_source_and_destination_exists(\n device,\n flow_monitor,\n source_address,\n destination_address,\n max_time=60,\n check_interval=10,\n):\n \"\"\" Verifies a flow under flow_monitor with specified\n source and destination address' exist\n\n Args:\n device ('obj'): Device to use\n flow_monitor ('str'): Flow monitor to search under\n source_address ('str'): Source address to match\n destination_address ('str'): Destination address to match\n max_time ('int'): Max time to keep checking\n check_interval ('int'): How often to check\n\n Raises:\n N/A\n\n Returns:\n True/False\n \"\"\"\n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n flow_address_pairs = device.api.get_flows_src_dst_address_pairs(\n device=device, flow_monitor=flow_monitor\n )\n\n if (source_address, destination_address) in flow_address_pairs:\n return True\n\n timeout.sleep()\n\n return False\n\n\ndef verify_flow_exporter_records_added_and_sent_are_equal(\n device, exporter, max_time=30, check_interval=10\n):\n \"\"\" Verifies that flow exporter records added and sent are equal\n\n Args:\n device ('obj'): Device to use\n exporter ('str'): Exporter name\n max_time ('int'): Max time to keep checking\n check_interval ('int'): How often to check\n\n Raises:\n N/A\n\n Returns:\n True/False\n \"\"\"\n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n try:\n output = device.parse(\n \"show flow exporter {exporter} statistics\".format(\n exporter=exporter\n )\n )\n except SchemaEmptyParserError:\n return False\n\n for client in (\n output.get(\"flow_exporter\", {})\n .get(exporter, {})\n .get(\"client_send_stats\", {})\n ):\n if exporter in client:\n added = (\n output[\"flow_exporter\"][exporter][\"client_send_stats\"][\n client\n ]\n .get(\"records_added\", {})\n .get(\"total\")\n )\n\n sent = (\n output[\"flow_exporter\"][exporter][\"client_send_stats\"][\n client\n ]\n .get(\"records_added\", {})\n .get(\"sent\")\n )\n\n log.info(\n \"Records added is: {added}. Records sent is {sent}\".format(\n added=added, sent=sent\n )\n )\n \n if 0 < added == sent > 0:\n return True\n\n timeout.sleep()\n\n return False\n", "id": "4673524", "language": "Python", "matching_score": 3.4934115409851074, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/flow/verify.py" }, { "content": "\"\"\" Common get info function for flow \"\"\"\n\n# Python\nimport logging\n\n# Genie\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\nlog = logging.getLogger(__name__)\n\n\ndef get_flows_src_dst_address_pairs(device, flow_monitor):\n \"\"\" Gets flows under flow_monitor and returns source and destination address pairs\n\n Args:\n device ('obj'): Device to use\n flow_monitor ('str'): Flow monitor name\n\n Raises:\n N/A\n\n Returns:\n [('source_address', 'destination_address'), ...]\n \"\"\"\n log.info('Getting all source and destination address pairs under flow monitor {name}'\n .format(name=flow_monitor))\n\n try:\n output = device.parse('show flow monitor {name} cache format table'\n .format(name=flow_monitor))\n except SchemaEmptyParserError:\n return []\n\n pairs = []\n\n # All hardcoded keys are mandatory in the parser\n for src in output.get('ipv4_src_addr', {}):\n for dst in output['ipv4_src_addr'][src]['ipv4_dst_addr']:\n pairs.append((src, dst))\n\n return pairs\n", "id": "12093071", "language": "Python", "matching_score": 0.936393678188324, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/flow/get.py" }, { "content": "\"\"\"Common get info functions for UDP\"\"\"\n\n# Python\nimport logging\n\n\nlog = logging.getLogger(__name__)\n\n\ndef analyze_udp_in_mpls_packets(\n packets, ip_address, ttl, packet_count, destination_port\n):\n \"\"\" Analyze passed packets\n\n Args:\n packets('str'): Packets to analyze\n ip_address ('str'): Destination IP address\n ttl (`int`): Time to live\n packet_count (`int`): Packet count to check during\n packet analysis\n destination_port (`int`): Destination port start \"traceroute\"\n Returns:\n pkt count\n Raises:\n None \n \"\"\"\n try:\n from scapy.all import load_contrib, UDP\n except ImportError:\n raise ImportError('scapy is not installed, please install it by running: '\n 'pip install scapy') from None\n count = 0\n\n for pkt in packets:\n # Analyze MPLS packets\n if pkt.type == 34887:\n load_contrib(\"mpls\")\n mpls_pkt = MPLS(pkt[\"Raw\"])\n\n if (\n mpls_pkt.haslayer(\"IP\")\n and mpls_pkt[\"IP\"].dst == ip_address\n and mpls_pkt[\"IP\"].ttl == ttl\n and mpls_pkt.haslayer(\"UDP\")\n ):\n\n # Catch the start of source and destination ports\n if count == 0:\n sport_count = mpls_pkt[\"UDP\"].sport\n dport_count = destination_port\n log.info(\n \"Found a packet that meets the requirement:\\nDestination:\\t{\"\n \"ip_pkt_dst}\\nDestination Port:\\t{dst_port}\\nSource:\\t\\t{\"\n \"ip_pkt_src}\\nSource Port:\\t{src_port}\\nUDP Packet:\\t{\"\n \"mpls_pkt}\\n\".format(\n ip_pkt_dst=mpls_pkt[\"IP\"].dst,\n dst_port=dport_count,\n ip_pkt_src=mpls_pkt[\"IP\"].src,\n src_port=sport_count,\n mpls_pkt=\"True\" if mpls_pkt[\"UDP\"] else \"False\",\n )\n )\n count += 1\n continue\n\n # Verify source and destination ports are incrementing\n if mpls_pkt[\"UDP\"].sport != sport_count + 1:\n log.info(\n \"Source port didn't increment to \"\n \"{source_port} as expected; instead it is {sp}\".format(\n source_port=sport_count + 1,\n destination_port=dport_count + 1,\n sp=mpls_pkt[\"UDP\"].sport,\n )\n )\n return None, 0\n elif mpls_pkt[\"UDP\"].dport != dport_count + 1:\n log.info(\n \"destination port didn't increment to \"\n \"{destination_port} as expected; instead \"\n \"it is {dp}\".format(\n source_port=sport_count + 1,\n destination_port=dport_count + 1,\n dp=mpls_pkt[\"UDP\"].dport,\n )\n )\n return None, 0\n else:\n count += 1\n sport_count += 1\n dport_count += 1\n log.info(\n 'Found a packet that \"meets\" the requirement:\\nDestination:\\t{'\n \"ip_pkt_dst}\\nDestination Port:\\t{dst_port}\\nSource:\\t\\t{\"\n \"ip_pkt_src}\\nSource Port:\\t{src_port}\\nUDP Packet:\\t{\"\n \"mpls_pkt}\\n\".format(\n ip_pkt_dst=mpls_pkt[\"IP\"].dst,\n dst_port=dport_count,\n ip_pkt_src=mpls_pkt[\"IP\"].src,\n src_port=sport_count,\n mpls_pkt=\"True\" if mpls_pkt[\"UDP\"] else \"False\",\n )\n )\n\n if count == packet_count:\n return pkt, count\n\n return None, count\n", "id": "5351874", "language": "Python", "matching_score": 3.373854637145996, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/udp/get.py" }, { "content": "\"\"\"Common verify info functions for IP Precedence\"\"\"\n\n# Python\nimport logging\n\nlog = logging.getLogger(__name__)\n\n\ndef verify_ip_precedence_ip_precedence(packets):\n \"\"\"Verify that all packets have mapped IP precedence value to EXP\n\n Args:\n packets('obj'): Packets to analyze\n\n Returns:\n True / False\n \n Raises:\n None\n \"\"\"\n try:\n from scapy.all import load_contrib\n except ImportError:\n raise ImportError('scapy is not installed, please install it by running: '\n 'pip install scapy') from None\n log.info(\n \"Verifying that all the packets have mapped IP precedence value to EXP\"\n )\n load_contrib(\"mpls\")\n not_matched = False\n no_check = True\n for pkt in packets:\n if pkt.haslayer(\"Raw\"):\n mpls_pkt = MPLS(pkt[\"Raw\"])\n if mpls_pkt.haslayer(\"IP\"):\n no_check = False\n log.info(\n \"Analyzing the following packet:\"\n \"\\n-------------------------------\\n{}\".format(\n mpls_pkt.show(dump=True)\n )\n )\n tos = \"{0:08b}\".format(mpls_pkt[\"IP\"].tos)\n cos = \"{0:03b}\".format(mpls_pkt[\"MPLS\"].cos)\n if tos[0:3] != cos:\n not_matched = True\n log.info(\n \"MPLS EXP 'COS' value didn't match the IP Precedence 'TOS'\"\n )\n else:\n log.info(\n \"MPLS EXP 'COS' value matched the IP Precedence 'TOS'\"\n )\n\n if no_check:\n log.info(\n \"Didn't find any 'IPv4' protocol packets to \"\n \"analyze out of the {} packets\".format(len(packets))\n )\n\n if not_matched:\n return False\n\n return True\n", "id": "3750531", "language": "Python", "matching_score": 1.8642995357513428, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/ip_precedence/verify.py" }, { "content": "\"\"\"Common get info functions for DNS\"\"\"\n\n# Python\nimport os\nimport logging\nimport re\n\nlog = logging.getLogger(__name__)\n\n\ndef verify_dns_packets(packets, ip_address):\n \"\"\" Analyze the DNS packets\n\n Args:\n packets('str'): Packets to analyze\n ip_address('str'): The captured IP address\n\n Returns:\n True\n False\n \"\"\"\n\n for pkt in packets:\n if (\n pkt.haslayer(\"IP\")\n and pkt.haslayer(\"DNS\")\n and pkt[\"DNS\"].haslayer(\"DNS Resource Record\")\n ):\n if (\n pkt[\"DNS\"][\"DNS Resource Record\"].rdata\n and ip_address == pkt[\"DNS\"][\"DNS Resource Record\"].rdata\n ):\n log.info(\n \"Here is the packet meeting \"\n \"the requirements:\\n{pkt}\".format(pkt=pkt.show(dump=True))\n )\n return True\n\n return False\n", "id": "6277804", "language": "Python", "matching_score": 1.482900619506836, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/dns/verify.py" }, { "content": "\"\"\"Common verify functions for TACACS server\"\"\"\n\n# Python\nimport logging\nimport re\n\nlog = logging.getLogger(__name__)\n\n\ndef verify_ping_from_server(server, ip_address, count, interface):\n \"\"\"Verify ping from server\n\n Args:\n server (`obj`): Server Device object\n ip_address (`str`): IP address to ping\n count (`int`): repeat ping count\n interface (`str`): source ip/interface value\n Returns:\n True\n False\n Raises:\n None\n \"\"\"\n try:\n out = server.ping(addr=ip_address, count=count, interface=interface)\n except Exception as e:\n log.error(\n \"Ping from server failed with address\"\n \" {} and source ip {}\".format(ip_address, interface)\n )\n return False\n\n p1 = re.compile(\n r\"{} +packets +transmitted, +{} +received, +0%\"\n \" +packet +loss, +time +\\w+\".format(count, count)\n )\n m = p1.search(out)\n if m:\n return True\n else:\n return False\n", "id": "3541578", "language": "Python", "matching_score": 1.4461575746536255, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/server/verify.py" }, { "content": "# Steps\r\nfrom pyats.aetest.steps import Steps\r\n\r\n# Unicon\r\nfrom unicon.core.errors import SubCommandFailure\r\n\r\n\r\ndef configure_syslog_server(device, server):\r\n \"\"\" Configure Syslog servers\r\n\r\n Args:\r\n device ('obj') : Device to be configured server\r\n server ('str'): Syslog server to be configured \r\n steps ('obj'): Context manager object\r\n Returns:\r\n None\r\n Raises:\r\n SubCommandFailure\r\n \"\"\"\r\n\r\n try:\r\n device.configure(\"logging host {ip_address}\".format(ip_address=server))\r\n except SubCommandFailure as e:\r\n raise SubCommandFailure(\r\n \"Failed to configure syslog server \"\r\n \"with ip address {ip} on device {dev}\".format(\r\n ip=server, dev=device.name\r\n )\r\n )\r\n", "id": "3846221", "language": "Python", "matching_score": 1.2571890354156494, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/syslog/configure.py" }, { "content": "\"\"\"Common verify functions for syslog\"\"\"\n\nimport re\nimport logging\n\nlog = logging.getLogger(__name__)\n\n\ndef verify_syslog_interface_shut(interface, server, output):\n \"\"\" Verify log for interface shut message\n\n Args:\n interface ('str'): Interface to be verified\n output ('obj'): Tcpdump output\n server ('str'): Syslog server address\n Returns:\n True\n False\n Raises:\n None\n \"\"\"\n\n # <133>2143: Jun 7 02:45:48.206 EST: %LINK-5-CHANGED: Interface GigabitEthernet5, changed state to administratively down\n r1 = re.compile(\n r\".+\\%LINK\\-\\d+\\-CHANGED\\:\\s+Interface\\s+(?P<interface>\\S+)\"\n \"\\,\\s+changed\\s+state\\s+to\\s+(administratively\\s+down)\"\n )\n for line in output:\n result = r1.match(line.load.decode())\n\n if line.dst == server and result:\n group = result.groupdict()\n\n if group[\"interface\"] == interface:\n log.info(line.show(dump=True))\n return True\n else:\n log.error(\n \"Interface shutdown information not received \"\n \"on server {srv} for interface {intf}\".format(\n intf=interface, srv=server\n )\n )\n\n return False\n\n\ndef verify_syslog_interface_up(interface, server, output):\n \"\"\" Verify log for interface up message\n\n Args:\n interface ('str'): Interface to be verified\n output ('obj'): Tcpdump output\n server ('str'): Syslog server address\n Returns:\n True\n False\n Raises:\n None\n \"\"\"\n\n # <133>2143: Jun 7 02:53:26.222 EST: %LINK-3-UPDOWN: Interface GigabitEthernet4, changed state to up\n r1 = re.compile(\n \".+\\%LINK\\-3\\-UPDOWN\\:\\s+Interface\\s+(?P<interface>\\S+)\\,\\s\"\n \"+changed\\s+state\\s+to\\s+(up)\"\n )\n\n for line in output:\n result = r1.match(line.load.decode())\n\n if line.dst == server and result:\n group = result.groupdict()\n\n if group[\"interface\"] == interface:\n log.info(line.show(dump=True))\n return True\n\n log.error(\n \"Interface no shutdown information not received \"\n \"on server {srv} for interface {intf}\".format(\n intf=interface, srv=server\n )\n )\n\n return False\n\n\ndef verify_syslog_ospf_neighbor_up(interface, server, output):\n \"\"\" Verify log for ospf neighbor up message\n\n Args:\n interface ('str'): Interface to be verified\n output ('obj'): Tcpdump output\n server ('str'): Syslog server address\n Returns:\n True\n False\n Raises:\n None\n\n \"\"\"\n\n # <133>2143: Jun 7 02:49:10.999 EST: %OSPF-5-ADJCHG: Process 65109, Nbr 10.169.197.252 on GigabitEthernet2 from LOADING to FULL, Loading Done\n r1 = re.compile(\n r\".+\\%OSPF\\-\\d+\\-ADJCHG\\:\\s+Process\\s+\\d+\\,\\s+Nbr\\s+\\S+\\s+\"\n \"on\\s+(?P<interface>\\S+)\\s+from\\s+LOADING\\s+to\\s+FULL\\,\\s\"\n \"+Loading\\s+Done\"\n )\n\n for line in output:\n result = r1.match(line.load.decode())\n\n if line.dst == server and result:\n group = result.groupdict()\n\n if group[\"interface\"] == interface:\n log.info(line.show(dump=True))\n return True\n log.error(\n \"Interface no shutdown information not received \"\n \"on server {srv} for interface {intf}\".format(\n intf=interface, srv=server\n )\n )\n\n return False\n\n\ndef verify_syslog_ospf_neighbor_down(interface, server, output):\n \"\"\" Verify log for ospf neighbor down message\n\n Args:\n interface ('str'): Interface to be verified\n output ('obj'): Tcpdump output\n server ('str'): Syslog server address\n Returns:\n True\n False\n Raises:\n None\n \"\"\"\n\n # <133>2143: Jun 7 02:45:47.289 EST: %OSPF-5-ADJCHG: Process 65109, Nbr 10.169.197.252 on GigabitEthernet2 from FULL to DOWN, Neighbor Down: Interface down or detached\n r1 = re.compile(\n r\".+\\%OSPF\\-\\d+\\-ADJCHG\\:\\s+Process\\s+\\d+\\,\\s+Nbr\\s+\\S+\\s+\"\n \"on\\s+(?P<interface>\\S+)\\s+from\\s+FULL\\s+to\\s+DOWN\\,\\s+\"\n \"Neighbor\\s+Down\\:\\s+Interface\\s+down\\s+or\\s+detached\"\n )\n\n for line in output:\n result = r1.match(line.load.decode())\n\n if line.dst == server and result:\n group = result.groupdict()\n\n if group[\"interface\"] == interface:\n log.info(line.show(dump=True))\n return True\n log.error(\n \"OSPF neighbor state information not received \"\n \"on server {srv} for interface {intf}\".format(\n intf=interface, srv=server\n )\n )\n\n return False\n\n\ndef verify_syslog_bgp_peer_up(server, output):\n \"\"\" Verify log for bgp peer up message\n\n Args:\n output ('obj'): Tcpdump output\n server ('str'): Syslog server address\n Returns:\n True\n False\n Raises:\n None\n\n \"\"\"\n\n # <133>2143: Jun 7 02:49:14.705 EST: %BGP-5-ADJCHANGE: neighbor 192.168.36.220 Up\n r1 = re.compile(r\".+\\%BGP\\-\\d+\\-ADJCHANGE\\:\\s+neighbor\\s+\\S+\\s+Up\")\n\n for line in output:\n result = r1.match(line.load.decode())\n\n if line.dst == server and result:\n log.info(line.show(dump=True))\n return True\n else:\n log.error(\n \"BGP peer state information not received \"\n \"on server {srv}\".format(srv=server)\n )\n\n return False\n\n\ndef verify_syslog_bgp_peer_down(server, output):\n \"\"\" Verify log for bgp peer down message\n\n Args:\n output ('obj'): Tcpdump output\n server ('str'): Syslog server address\n Returns:\n True\n False\n Raises:\n None\n\n \"\"\"\n\n # <133>2143: Jun 7 02:49:14.705 EST: %BGP-5-ADJCHANGE: neighbor 192.168.36.220 Down\n r1 = re.compile(r\".+\\%BGP\\-\\d+\\-ADJCHANGE\\:\\s+neighbor\\s+\\S+\\s+Down.*\")\n\n for line in output:\n\n result = r1.match(line.load.decode())\n\n if line.dst == server and result:\n log.info(line.load.decode())\n return True\n log.error(\n \"BGP peer state information not received \"\n \"on server {srv}\".format(srv=server)\n )\n\n return False\n\n\ndef verify_syslog_interface_link_up(device, interface, server, output):\n \"\"\" Verify link up message in syslog server\n\n Args:\n device ('obj'): Device object\n interface ('str'): Interface to be verified\n output ('obj'): Tcpdump output\n server ('str'): Syslog server address\n Returns:\n True\n False\n Raises:\n None\n \"\"\"\n\n # Jun 7 02:53:26.222 EST: %LINK-3-UPDOWN: Interface GigabitEthernet4, changed state to up\n r1 = re.compile(\n \".+\\%LINK\\-3\\-UPDOWN\\:\\s+Interface\\s+(?P<interface>\\S+)\\,\\s\"\n \"+changed\\s+state\\s+to\\s+up\"\n )\n\n for line in output:\n\n result = r1.match(line.load.decode())\n\n if line.dst == server and result:\n\n group = result.groupdict()\n\n if group[\"interface\"] == interface:\n log.info(line.show(dump=True))\n return True\n\n log.error(\n \"Link state information not received \"\n \"on server {srv} for device {uut}\".format(uut=device.name, srv=server)\n )\n\n return False\n\n\ndef verify_syslog_interface_link_down(device, interface, server, output):\n \"\"\" Verify link down message in syslog server\n\n Args:\n device ('obj'): Device object\n interface ('str'): Interface to be verified\n output ('obj'): Tcpdump output\n server ('str'): Syslog server address\n Returns:\n True\n False\n Raises:\n None\n\n \"\"\"\n # Jun 7 02:53:26.222 EST: %LINK-3-UPDOWN: Interface GigabitEthernet4, changed state to down\n r1 = re.compile(\n \".+\\%LINK\\-3\\-UPDOWN\\:\\s+Interface\\s+(?P<interface>\\S+)\\,\\s\"\n \"+changed\\s+state\\s+to\\s+down\"\n )\n\n for line in output:\n result = r1.match(line.load.decode())\n\n if line.dst == server and result:\n group = result.groupdict()\n\n if group[\"interface\"] == interface:\n log.info(line.show(dump=True))\n return True\n log.error(\n \"Link state information not received \"\n \"on server {srv} for device {uut}\".format(uut=device.name, srv=server)\n )\n\n return False\n\n\ndef is_syslog_message_received(message, server, output):\n \"\"\" Verify if a message was received in Syslog server.\n It needs a output from a tcpdump command\n\n Args: \n device ('obj'): Device object\n output ('obj'): Tcpdump output\n server ('str'): Syslog server address\n message ('str'): Message to be verified in Syslog server\n Returns:\n True\n False\n Raises:\n None\n \"\"\"\n\n for packet in output:\n try:\n if packet.dst == server and message in packet.load.decode():\n log.info(packet.show(dump=True))\n log.info(\n \"Message '{message}' has been found in Syslog \"\n \"server {ip}\".format(message=message, ip=server)\n )\n return True\n\n # Some packets raise UnicodeDecodeError\n # It will continue to next packet\n except UnicodeDecodeError:\n continue\n\n log.info(\n \"Message '{message}' has not been found in Syslog \"\n \"server {ip}\".format(message=message, ip=server)\n )\n\n return False\n", "id": "4330197", "language": "Python", "matching_score": 1.6605026721954346, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/syslog/verify.py" }, { "content": "\"\"\"Common configure functions for OSPF\"\"\"\n\n# Python\nimport logging\n\n# Genie\nfrom genie.libs.parser.utils.common import Common\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\n# Ospf\nfrom genie.libs.sdk.apis.iosxe.ospf.get import (\n get_router_ospf_section_running_config,\n)\n\nfrom genie.libs.sdk.apis.utils import has_configuration\n\nlog = logging.getLogger(__name__)\n\n\ndef configure_ospf_max_metric_router_lsa_on_startup(\n device, ospf_process_id, metric_value\n):\n \"\"\"Configure max-metric router-lsa on start-up\n\n Args:\n device (`obj`): Device object\n ospf_process_id (`int`): OSPF process id\n metric_value (`int`): Metric value to be configured\n\n Returns:\n None\n\n Raises:\n SubCommandFailure\n \"\"\"\n cfg_cmd = [\n \"router ospf {}\".format(ospf_process_id),\n \"max-metric router-lsa on-startup {}\".format(metric_value),\n ]\n\n try:\n device.configure(cfg_cmd)\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Failed in configuring max-metric {metric_value}\"\n \" on device {device}, Error: {error}\".format(\n metric_value=metric_value, device=device, error=e\n )\n ) from e\n\n\ndef configure_shut_ospf(device, ospf_process_id):\n \"\"\" Configure shut on ospf process\n\n Args:\n device (`obj`): device to execute on\n ospf_process_id (`int`): ospf process number\n\n Return:\n None\n\n Raises:\n SubCommandFailure\n \"\"\"\n log.info(\n 'Configuring \"shutdown\" on \"router ospf {}\"'.format(ospf_process_id)\n )\n try:\n device.configure(\"router ospf {}\\n\" \"shutdown\".format(ospf_process_id))\n except SubCommandFailure as e:\n raise SubCommandFailure(\n 'Failed in configuring \"shutdown\" with '\n \"ospf process {ospf_process} on device {device}, \"\n \"Error: {error}\".format(\n ospf_process=ospf_process_id, device=device, error=e\n )\n ) from e\n\n\ndef configure_no_shut_ospf(device, ospf_process_id):\n \"\"\" Configure no shut on ospf process\n\n Args:\n device (`obj`): device to execute on\n ospf_process_id (`int`): ospf process number\n\n Return:\n None\n\n Raises:\n SubCommandFailure\n \"\"\"\n log.info(\n 'Configuring \"no shutdown\" on \"router ospf {}\"'.format(ospf_process_id)\n )\n try:\n device.configure(\n \"router ospf {}\\n\" \"no shutdown\".format(ospf_process_id)\n )\n except SubCommandFailure as e:\n raise SubCommandFailure(\n 'Failed in configuring \"no shutdown\" with '\n \"ospf process {ospf_process} on device {device}, \"\n \"Error: {error}\".format(\n ospf_process=ospf_process_id, device=device, error=e\n )\n ) from e\n\n\ndef remove_ospf_max_metric_configuration(device, ospf_process_id):\n \"\"\" Remove max-metric from running-config under ospf\n\n Args:\n device (`obj`): Device object \n ospf_process_id (`str`): Router OSPF id\n\n Return:\n None\n \n Raises:\n SubCommandFailure\n \"\"\"\n\n section_dict = get_router_ospf_section_running_config(\n device=device, ospf_process_id=ospf_process_id\n )\n if section_dict:\n if has_configuration(\n configuration_dict=section_dict,\n configuration=\"max-metric router-lsa\",\n ):\n try:\n device.configure(\n [\n \"router ospf {ospf_process_id}\".format(\n ospf_process_id=ospf_process_id\n ),\n \"no max-metric router-lsa\",\n ]\n )\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Failed in removing max-metric from \"\n \"running-config under ospf {ospf_process_id} on device {device}, \"\n \"Error: {e}\".format(\n ospf_process_id=ospf_process_id,\n device=device.name,\n e=str(e),\n )\n ) from e\n else:\n raise Exception(\n \"Router OSPD id {ospf_process_id} is not \"\n \"configured in device {device}\".format(\n ospf_process_id=ospf_process_id, device=device.name\n )\n )\n\n\ndef configure_ospf_passive_interface(device, interface, ospf_process_id):\n \"\"\"Configure passive interface\n\n Args:\n device (`obj`): Device object\n ospf_process_id (`int`): ospf process id\n interface (`list`): interfaces to configure\n ex.)\n interface = ['tenGigabitEthernet0/4/0']\n\n Return:\n None\n \n Raises:\n SubCommandFailure\n \"\"\"\n\n config = [\"router ospf {}\".format(ospf_process_id)]\n\n if not isinstance(interface, list):\n interface = [interface]\n\n for intf in interface:\n config.append(\n \"passive-interface {}\".format(Common.convert_intf_name(intf))\n )\n\n try:\n device.configure(config)\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Failed in configuring passive interfaces {interface} \"\n \"with OSPF process id {ospf_process_id} on device {device}, \"\n \"Error: {e}\".format(\n interface=interface,\n ospf_process_id=ospf_process_id,\n device=device.name,\n e=str(e),\n )\n ) from e\n\n\ndef remove_ospf_passive_interface(device, interface, ospf_process_id):\n \"\"\"Remove passive interface\n\n Args:\n device (`obj`): Device object\n ospf_process_id (`int`): OSPF process id\n interface (`list`): interfaces to configure\n ex.)\n interface = ['tenGigabitEthernet0/4/0']\n\n Return:\n None\n \n Raises:\n SubCommandFailure\n \"\"\"\n config = [\"router ospf {}\".format(ospf_process_id)]\n\n if not isinstance(interface, list):\n interface = [interface]\n\n for intf in interface:\n config.append(\n \"no passive-interface {}\".format(Common.convert_intf_name(intf))\n )\n\n try:\n device.configure(config)\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Failed in removing passive interfaces {interface}\"\n \"with OSPF process id {ospf_process_id}\"\n \" on device {device}, Error: {e}\".format(\n interface=interface,\n ospf_process_id=ospf_process_id,\n device=device.name,\n e=str(e),\n )\n ) from e\n\n\ndef configure_ospf_cost(device, interface, ospf_cost):\n \"\"\"configure ospf cost\n\n Args:\n device (`obj`): Device object\n ospf_cost (`int`): Ospf cost value\n interface (`str`): interface to configure\n ex.)\n interface = 'tenGigabitEthernet0/4/0'\n\n Return:\n None\n \n Raises:\n SubCommandFailure\n \"\"\"\n config = []\n config.append(\"interface {}\".format(interface))\n config.append(\"ip ospf cost {}\".format(ospf_cost))\n try:\n device.configure(\"\\n\".join(config))\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Ospf cost {ospf_cost} is not configured on device\"\n \" {device} for interface {interface}\".format(\n ospf_cost=ospf_cost, device=device, interface=interface\n )\n ) from e\n\n\ndef configure_ospf_networks(device, ospf_process_id, ip_address, netmask, area):\n \"\"\" Configures ospf on networks\n\n Args:\n device ('obj'): Device to use\n ospf_process_id ('str'): Process id for ospf process\n ip_address ('list'): List of ip_address' to configure\n netmask ('str'): Netmask to use\n area ('str'): Area to configure under\n\n Returns:\n N/A\n\n Raises:\n SubCommandFailure\n \"\"\"\n cmd = ['router ospf {pid}'.format(pid=ospf_process_id)]\n\n for ip in ip_address:\n cmd.append('network {ip_address} {netmask} area {area}'\n .format(ip_address=ip, netmask=netmask, area=area))\n\n device.configure(cmd)", "id": "2668815", "language": "Python", "matching_score": 4.584869384765625, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/ospf/configure.py" }, { "content": "\"\"\"Common configure functions for OSPF\"\"\"\n\n# Python\nimport logging\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef configure_ospf_passive_interface(device, interface, area):\n \"\"\"configure passive interface\n\n Args:\n device (`obj`): Device object\n interface (`str`): interface to configure\n ex.)\n interface = 'tenGigabitEthernet0/4/0'\n area (`str`): IP address of area\n\n Returns:\n None\n \n Raise:\n SubCommandFailure\n \"\"\"\n config = []\n config.append(\n \"set protocols ospf area {} interface \"\n \"{} passive\\n\".format(area, interface)\n )\n\n try:\n device.configure(\"\".join(config))\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Could not configure passive on {interface}. Error:\\n{error}\".format(\n interface=interface, error=e\n )\n )\n\n\ndef remove_ospf_passive_interface(device, interface, area):\n \"\"\"remove passive interface on junos device\n\n Args:\n device (`obj`): Device object\n interface (`str`): interface to configure\n ex.)\n interface = 'tenGigabitEthernet0/4/0'\n area (`str`): IP address of area\n\n Returns:\n None\n \n Raise:\n SubCommandFailure\n \"\"\"\n config = []\n config.append(\n \"delete protocols ospf area {} interface \"\n \"{} passive\\n\".format(area, interface)\n )\n\n try:\n device.configure(\"\".join(config))\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Could not remove passive configuration on {interface}. Error:\\n{error}\".format(\n interface=interface, error=e\n )\n )\n", "id": "4668571", "language": "Python", "matching_score": 2.860736131668091, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/junos/ospf/configure.py" }, { "content": "\"\"\"Common configure functions for bfd\"\"\"\n\n# Python\nimport logging\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef configure_bfd_on_interface(\n device, interface, interval, min_rx, multiplier\n):\n \"\"\" Configures bfd on interface\n\n Args:\n device ('obj'): device to use\n interface ('str'): interface to configure\n interval ('str'): interval\n min_rx ('str'): min_rx\n multiplier ('str'): multiplier\n Returns:\n None\n Raises:\n SubCommandFailure: Failed configuring bfd on interface\n\n \"\"\"\n log.info(\n \"Configuring bfd with interval={}, min_rx={}, multiplier={}, on \"\n \"interface {}\".format(interval, min_rx, multiplier, interface)\n )\n\n try:\n device.configure(\n [\n \"interface {}\".format(interface),\n \"bfd interval {} min_rx {} multiplier {}\".format(\n interval, min_rx, multiplier\n ),\n ]\n )\n\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure bfd on interface {interface}\".format(\n interface=interface\n )\n )\n\n\ndef enable_bfd_on_ospf(device, interface):\n \"\"\" Enabled bfd on ospf protocol on interface\n\n Args:\n device ('obj'): device to use\n interface ('str'): interface to configure under\n Returns:\n None\n Raises:\n SubCommandFailure: Failed configuring bfd on ospf protocol\n \"\"\"\n log.info(\"Enabling bfd on ospf protocol\")\n try:\n device.configure([\"interface {}\".format(interface), \"ip ospf bfd\"])\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not enable bfd on ospf protocol on interface {interface}\".format(\n interface=interface\n )\n )\n\n\ndef disable_bfd_on_ospf(device, interface):\n \"\"\" Disables bfd on ospf protocol\n\n Args:\n device ('obj'): device to use\n interface ('str'): interface to configure under\n Returns:\n None\n Raises:\n SubCommandFailure: Failed disabling bfd on ospf protocol\n \"\"\"\n log.info(\"Disabling bfd on ospf protocol\")\n try:\n device.configure([\"interface {}\".format(interface), \"no ip ospf bfd\"])\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not disable bfd on ospf protocol on interface {interface}\".format(\n interface=interface\n )\n )\n\n\ndef enable_bfd_static_route(device, interface, ip_address):\n \"\"\" Enables bfd static route on device\n\n Args:\n device ('obj'): device to use\n interface ('str'): interface to configure under\n ip_address ('str'): ip address of destination\n Returns:\n None\n Raises:\n SubCommandFailure: Failed enabling bfd static rout on device\n \"\"\"\n log.info(\n \"Enabling bfd static route on {} to {}\".format(interface, ip_address)\n )\n try:\n device.configure(\n [\"ip route static bfd {} {}\".format(interface, ip_address)]\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure bfd static route on interface {interface}\".format(\n interface=interface\n )\n )\n", "id": "6690062", "language": "Python", "matching_score": 1.5538541078567505, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/bfd/configure.py" }, { "content": "\"\"\"Common configure functions for arp\"\"\"\n\n# Python\nimport logging\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef configure_arp_timeout(device, interface, timeout):\n \"\"\" Config arp timeout on interface\n\n Args:\n device (`obj`): Device object\n interface (`str`): Interface name\n timeout (`int`): timeout in second\n Returns:\n None\n Raises:\n SubCommandFailure\n \"\"\"\n log.info(\n \"Configuring arp timeout on interface {} with value {}\".format(\n interface, timeout\n )\n )\n try:\n device.configure(\n [\n \"interface {}\".format(interface),\n \"arp timeout {}\".format(timeout),\n ]\n )\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Failed in configuring arp timeout \"\n \"on interface {interface} with value {timeout} \"\n \"on device {device}, \"\n \"Error: {e}\".format(\n interface=interface,\n timeout=timeout,\n device=device.name,\n e=str(e),\n )\n ) from e\n\n\ndef remove_arp_timeout(device, interface):\n \"\"\" Remove arp timeout configuration\n\n Args:\n device (`obj`): Device object\n interface (`str`): Interface name\n Returns:\n None\n Raises:\n SubCommandFailure\n \"\"\"\n log.info(\"Removing arp timeout on interface {}\".format(interface))\n try:\n device.configure([\"interface {}\".format(interface), \"no arp timeout\"])\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Failed in removing arp timeout \"\n \"on interface {interface} \"\n \"on device {device}, \"\n \"Error: {e}\".format(\n interface=interface, device=device.name, e=str(e)\n )\n ) from e\n\n\ndef configure_static_arp(device, ip_address, mac_address):\n \"\"\" Configure static arp\n\n Args:\n device (`obj`): Device object\n ip_address (`str`): IP address\n mac_address (`str`): MAC address\n Returns:\n None\n Raises:\n SubCommandFailure\n \"\"\"\n log.info(\"Configuring static arp\")\n try:\n device.configure(\n \"arp {ip_address} {mac_address} arpa\".format(\n ip_address=ip_address, mac_address=mac_address\n )\n )\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Failed in configuring static arp \"\n \"with IP address {ip_address} \"\n \"and MAC address {mac_address} \"\n \"on device {device}, \"\n \"Error: {e}\".format(\n ip_address=ip_address,\n mac_address=mac_address,\n device=device.name,\n e=str(e),\n )\n ) from e\n", "id": "11467857", "language": "Python", "matching_score": 1.6717021465301514, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/arp/configure.py" }, { "content": "\"\"\"Utility type functions for interacting with Jinja templates\"\"\"\n\n# Python\nimport logging\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\ndef change_configuration_using_jinja_templates(device, template, **kwargs):\n \"\"\"Use Jinja templates to change the device configuration\n\n Args:\n device (`obj`): Device object\n template (`obj`): Jinja template to be used in configuration\n parameters (`dict`): Dictionary of all the parameters\n to be passed to the Jinja template\n\n Returns:\n None\n Raises:\n SubCommandFailure: Failed configuring device\n \"\"\"\n\n out = template.render(**kwargs)\n\n try:\n device.configure(out)\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Failed in applying the following \"\n \"configuration:\\n{config}, error:\\n{e}\".format(config=out, e=e)\n )\n\n log.info(\"Successfully changed configuration using the jinja template\")\n", "id": "11341538", "language": "Python", "matching_score": 0.7433637380599976, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/jinja/configure.py" }, { "content": "\"\"\"Common verify functions for archive\"\"\"\n\n# Python\nimport logging\n\nlog = logging.getLogger(__name__)\n\n\ndef compare_archive_config_dicts(dict1, dict2, exclude=None):\n \"\"\" Checks for differences between two 'show archive' dictionaries\n\n Args:\n dict1('dict'): first dictionary\n dict2('dict'): seconds dictionary\n\n Returns:\n list of commands that are different between the two configs\n Raises:\n None\n \"\"\"\n if not exclude:\n exclude = []\n\n log.info(\"Gathering differences between archive configs\")\n\n if \"list_of_commands\" in dict1:\n list_of_commands1 = dict1[\"list_of_commands\"]\n elif \"diff\" in dict1:\n list_of_commands1 = dict1[\"diff\"]\n\n if \"list_of_commands\" in dict2:\n list_of_commands2 = dict2[\"list_of_commands\"]\n elif \"diff\" in dict2:\n list_of_commands2 = dict2[\"diff\"]\n\n diff = list(set(list_of_commands2) - set(list_of_commands1))\n diff.extend(list(set(list_of_commands1) - set(list_of_commands2)))\n for exc in exclude:\n try:\n diff.remove(exc)\n except ValueError:\n continue\n\n return diff\n", "id": "6407337", "language": "Python", "matching_score": 2.2044994831085205, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/archive/verify.py" }, { "content": "\"\"\"Common get info functions for archive\"\"\"\n\n# Python\nimport logging\n\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\nlog = logging.getLogger(__name__)\n\n\ndef get_archive_config_incremental_diffs(device, file):\n \"\"\" Gets the archive incremental diffs from file\n\n Args:\n device ('obj'): Device object\n file('str'): file to get diff from\n Returns:\n Parser output\n None\n Raises:\n None\n\n \"\"\"\n log.info(\"Getting archive config incremental-diffs from {}\".format(file))\n try:\n out = device.parse(\n \"show archive config incremental-diffs \" \"{}\".format(file)\n )\n except SchemaEmptyParserError:\n return None\n\n return out\n\n\ndef get_archive_config_differences(device, file1, file2):\n \"\"\" Gets the archive differences between {file1} and {file2}\n\n Args:\n device ('obj'): Device object\n file1('str'): file1 to get diff from\n file2('str'): file2 to get diff from\n\n Returns:\n Parser output\n None\n Raises:\n None\n \"\"\"\n log.info(\n \"Getting archive differences between {} and {}\".format(file1, file2)\n )\n try:\n out = device.parse(\n \"show archive config differences {} \" \"{}\".format(file1, file2)\n )\n except SchemaEmptyParserError:\n return None\n\n return out\n", "id": "8334923", "language": "Python", "matching_score": 0.6343634724617004, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/archive/get.py" }, { "content": "import logging\r\n\r\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef is_version_image_installed(device, image):\r\n \"\"\" Verify if image is installed on device\r\n Args:\r\n device ('str'): Device object\r\n image ('str'): Image being verified\r\n Raise:\r\n None\r\n Return:\r\n True\r\n False\r\n \"\"\"\r\n\r\n try:\r\n output = device.parse(\"show version\")\r\n except SchemaEmptyParserError as e:\r\n return False\r\n\r\n installed_image = output[\"version\"][\"system_image\"]\r\n\r\n if installed_image in image:\r\n return True\r\n else:\r\n log.error(\"Running image is {image}\".format(image=installed_image))\r\n\r\n return False\r\n", "id": "9193372", "language": "Python", "matching_score": 2.960688591003418, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/version/verify.py" }, { "content": "\"\"\" Common get info functions for system \"\"\"\n\n# Python\nimport logging\n\n# Genie\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\nlog = logging.getLogger(__name__)\n\n\ndef get_current_system_datetime(device):\n \"\"\" Returns current time of system\n\n Args:\n device ('obj'): device to use\n\n Returns:\n current time ('str')\n Raises:\n None\n \"\"\"\n log.info(\"Getting current system time\")\n\n try:\n out = device.parse(\"show clock\")\n except SchemaEmptyParserError:\n return None\n\n if out and \"time\" in out and \"month\" in out and \"day\" in out:\n return \"{} {} {}\".format(out[\"month\"], out[\"day\"], out[\"time\"])\n", "id": "3911961", "language": "Python", "matching_score": 1.4489980936050415, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/system/get.py" }, { "content": "\"\"\"Common get info functions for routing\"\"\"\n\n# Python\nimport os\nimport logging\nimport re\n\n# Genie\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\nlog = logging.getLogger(__name__)\n\n\ndef get_arp_table_count(device):\n \"\"\"\n Get arp table count\n\n Args:\n device(`obj`): Device str\n Returns:\n arp table count\n \"\"\"\n try:\n output_arp = device.parse(\"show arp summary\")\n except SchemaEmptyParserError as e:\n log.error(\n \"Failed to parse 'show arp summary', Error: {}\".format(str(e))\n )\n return None\n\n arp_count = output_arp[\"total_num_of_entries\"].get(\"arp_table_entries\", 0)\n\n return arp_count\n", "id": "1398898", "language": "Python", "matching_score": 1.3879115581512451, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/arp/get.py" }, { "content": "from genie.metaparser.util.exceptions import SchemaEmptyParserError\r\n\r\n\r\ndef get_processes_five_seconds_cpu_usage(device):\r\n \"\"\" Get average CPU usage for last 5 seconds\r\n\r\n Args:\r\n device ('obj'): Device objecte\r\n\r\n Returns:\r\n CPU usage for last 5 seconds\r\n None\r\n Raises:\r\n None\r\n \"\"\"\r\n\r\n try:\r\n output = device.parse(\"show processes cpu\")\r\n except SchemaEmptyParserError:\r\n return None\r\n\r\n return output[\"five_sec_cpu_total\"]\r\n", "id": "10230663", "language": "Python", "matching_score": 0.4532235860824585, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/processes/get.py" }, { "content": "'''IOSXE ASR1K implementation for ISSU triggers'''\n\n# Python\nimport sys\nimport argparse\nimport logging\n\n# ATS\nfrom ats import aetest\nfrom ats.utils.objects import R\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.ha.ha import TriggerIssu as CommonIssu\n\nlog = logging.getLogger(__name__)\n\n# Trigger required data settings\n# Which key to exclude for Platform Ops comparison\nplatform_exclude = ['maker', 'rp_uptime', 'sn', 'main_mem',\n 'switchover_reason', 'config_register']\n\n\nclass TriggerIssu(CommonIssu):\n \"\"\"Do ISSU on device.\"\"\"\n\n __description__ = \"\"\"\"Do ISSU on device.\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n active_rp: `str`\n standby_rp: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Platform Ops object and store the state of active rp ans standby rp\n if has any, otherwise, SKIP the trigger\n 2. Do Issue on device.\n 3. Learn Platform Ops again and the ops are the same as the Ops in step 1\n\n \"\"\"\n\n # Parse argv for '--issu_upgrade_image'\n def parse_args(self, argv):\n parser = argparse.ArgumentParser()\n parser.add_argument('--issu_upgrade_image',\n default=None,\n help='URL path of the ISSU upgrade image')\n self.parameters['upgrade_image'] = parser.parse_args(argv).issu_upgrade_image\n\n mapping = Mapping(\\\n requirements={\\\n 'ops.platform.platform.Platform':{\n 'requirements': [\\\n [['slot', 'rp', '(?P<active_rp>.*)', 'state', 'ok, active'],\n ['slot', 'rp', '(?P<active_rp>.*)', 'issu', 'in_progress', False]],\n [['slot', 'rp', '(?P<standby_rp>.*)', 'state', 'ok, standby'],\n ['slot', 'rp', '(?P<standby_rp>.*)', 'issu', 'in_progress', False]]],\n 'all_keys': True,\n 'exclude': platform_exclude}},\n verify_ops={\\\n 'ops.platform.platform.Platform':{\n 'requirements': [\\\n ['slot', 'rp', '(?P<active_rp>.*)', 'state', 'ok, active'],\n ['slot', 'rp', '(?P<standby_rp>.*)', 'state', 'ok, standby']],\n 'exclude': platform_exclude}},\n num_values={'active_rp':1, 'standby_rp':1})\n", "id": "11933127", "language": "Python", "matching_score": 4.105739116668701, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/ha/issu/iosxe/asr1k/issu.py" }, { "content": "'''IOSXR implementation for Reload triggers'''\n\n# import python\nimport logging\n\n# import ats\nfrom ats import aetest\nfrom ats.utils.objects import R\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.ha.ha import \\\n TriggerReload as CommonReload, \\\n TriggerReloadLc\n\nlog = logging.getLogger(__name__)\n\n# Trigger required data settings\n# Which key to exclude for Platform Ops comparison\nplatform_exclude = ['maker', 'total_free_bytes', 'rp_uptime']\n\n\nclass TriggerReload(CommonReload):\n \"\"\"Reload the whole device.\"\"\"\n \n __description__ = \"\"\"Reload the whole device.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn Platform Ops object and store the \"IOS XR RUN|OK\" slot(s)\n if has any, otherwise, SKIP the trigger\n 2. Do reload by command \"admin reload location all\"\n 3. Learn Platform Ops again and verify the state of RP(s) is \n \"Active|Standby\", verify every LC status is \"IOS XR RUN|OK\",\n and verify left attributes from the ops are the same as the Ops in step 1\n 4. Update platform PTS if feature pts is enabled,\n Update global/local veirifications if enabled\n\n \"\"\"\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.platform.platform.Platform':{\n 'requirements': [\\\n [['slot', 'rp', '(?P<rp>.*)',\n 'state', 'IOS XR RUN']],\n [['slot', 'lc', '(?P<lc>.*)',\n 'subslot', '(?P<subslot>.*)',\n 'state', '(?P<state>IOS XR RUN|OK)']],\n [['virtual_device', '(?P<virtual_device>.*)',\n 'membership', '(?P<member>.*)',\n 'vd_ms_red_state', '(?P<vd_ms_red_state>Primary|Backup)']],\n ],\n 'all_keys': True,\n 'exclude': platform_exclude}},\n verify_ops={'ops.platform.platform.Platform':{\n 'requirements': [\\\n ['slot', 'rp', '(?P<rp>.*)',\n 'state', 'IOS XR RUN'],\n ['slot', 'rp', '(?P<rp>.*)',\n 'redundancy_state', '(Active|Standby)'],\n ['virtual_device', '(?P<virtual_device>.*)',\n 'membership', '(?P<member>.*)',\n 'vd_ms_red_state', '(Primary|Backup)'],\n ['virtual_device', '(?P<virtual_device>.*)',\n 'vd_dSDRsc_nod', '([\\w\\/]+)'],\n ['virtual_device', '(?P<virtual_device>.*)',\n 'vd_dSDRsc_partner_node', '([\\w\\/]+)'],\n ['slot', 'lc', '(?P<lc>.*)',\n 'subslot', '(?P<subslot>.*)',\n 'state', '(IOS XR RUN|OK)']],\n 'exclude': platform_exclude}},\n num_values={'rp': 'all', 'lc': 'all', 'subslot': 'all',\n 'virtual_device': 'all', 'member': 'all'})\n\n\nclass TriggerReloadActiveRP(TriggerReloadLc):\n \"\"\"Reload active supervisor node on device.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n lcRole (`str`): The role of LC which is 'active'\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn Platform Ops object and store the \"active\" RP and \"standby\" RP\n if has any, otherwise, SKIP the trigger\n 2. Do reload by command \"admin reload location <lc>\"\n 3. Learn Platform Ops again and verify the roles of \n \"active\" RP and \"standby\" RP are swapped,\n and verify left attributes from the ops are the same as the Ops in step 1\n 4. Update platform PTS if feature pts is enabled,\n Update global/local veirifications if enabled\n\n \"\"\"\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.platform.platform.Platform':{\n 'requirements': [\\\n [['slot', 'rp', '(?P<active_rp>.*)',\n 'redundancy_state', 'Active'],\n ['slot', 'rp', '(?P<active_rp>.*)',\n 'state', 'IOS XR RUN']],\n [['slot', 'rp', '(?P<standby_rp>.*)',\n 'redundancy_state', 'Standby'],\n ['slot', 'rp', '(?P<standby_rp>.*)',\n 'state', 'IOS XR RUN']],\n [['virtual_device', '(?P<virtual_device>.*)',\n 'membership', '(?P<active_device>.*)',\n 'vd_ms_red_state', 'Primary'],\n ['virtual_device', '(?P<virtual_device>.*)',\n 'membership', '(?P<standby_device>.*)',\n 'vd_ms_red_state', 'Backup']],\n [['redundancy_communication', True]],\n ],\n 'all_keys': True,\n 'exclude': platform_exclude}},\n verify_ops={'ops.platform.platform.Platform':{\n 'requirements': [\\\n ['slot', 'rp', '(?P<active_rp>.*)',\n 'redundancy_state', 'Standby'],\n ['slot', 'rp', '(?P<active_rp>.*)',\n 'state', 'IOS XR RUN'],\n ['slot', 'rp', '(?P<standby_rp>.*)',\n 'redundancy_state', 'Active'],\n ['slot', 'rp', '(?P<standby_rp>.*)',\n 'state', 'IOS XR RUN'],\n\n ['virtual_device', '(?P<virtual_device>.*)',\n 'membership', '(?P<active_device>.*)',\n 'vd_ms_red_state', 'Backup'],\n ['virtual_device', '(?P<virtual_device>.*)',\n 'membership', '(?P<standby_device>.*)',\n 'vd_ms_red_state', 'Primary'],\n\n ['virtual_device', '(?P<virtual_device>.*)',\n 'vd_dSDRsc_nod', '(?P<standby_device>.*)'],\n ['virtual_device', '(?P<virtual_device>.*)',\n 'vd_dSDRsc_partner_node', '(?P<active_device>.*)']],\n 'exclude': platform_exclude}},\n num_values={'active_rp':1, 'standby_rp':1,\n 'virtual_device': 1, 'active_device':1,\n 'standby_device': 1})\n\n\nclass TriggerReloadStandbyRP(TriggerReloadLc):\n \"\"\"Reload standby supervisor node on device.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n lcRole (`str`): The role of LC which is 'standby'\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn Platform Ops object and store the \"standby\" RP\n if has any, otherwise, SKIP the trigger\n 2. Do reload by command \"admin reload location <lc>\"\n 3. Learn Platform Ops again and the ops are the same as the Ops in step 1\n 4. Update platform PTS if feature pts is enabled,\n Update global/local veirifications if enabled\n\n \"\"\"\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.platform.platform.Platform':{\n 'requirements': [\\\n ['virtual_device', '(?P<virtual_device>.*)',\n 'membership', '(?P<standby_device>.*)',\n 'vd_ms_red_state', 'Backup'],\n ['virtual_device', '(?P<virtual_device>.*)',\n 'membership', '(?P<standby_device>.*)',\n 'vd_ms_status', 'IOS XR RUN'],\n ],\n 'exclude': platform_exclude}},\n verify_ops={'ops.platform.platform.Platform':{\n 'requirements': [\\\n ['virtual_device', '(?P<virtual_device>.*)',\n 'membership', '(?P<standby_device>.*)',\n 'vd_ms_red_state', 'Backup']],\n 'exclude': platform_exclude}},\n num_values={'virtual_device':1, 'standby_device': 1})\n\n\nclass TriggerReloadOirEdge(TriggerReloadLc):\n \"\"\"Reload MPA node on device.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n lcRole (`str`): The role of LC which is 'lc'\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn Platform Ops object and store the \"MPA\" LC\n if has any, otherwise, SKIP the trigger\n 2. Do reload by command \"admin reload location <lc>\"\n 3. Learn Platform Ops again and the ops are the same as the Ops in step 1\n 4. Update platform PTS if feature pts is enabled,\n Update global/local veirifications if enabled\n\n \"\"\"\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.platform.platform.Platform':{\n 'requirements': [\\\n ['virtual_device', '(?P<virtual_device>.*)',\n 'membership', '(?P<lc>.*)',\n 'vd_ms_type', 'LC'],\n ['virtual_device', '(?P<virtual_device>.*)',\n 'membership', '(?P<lc>.*)',\n 'vd_ms_status', 'IOS XR RUN'],\n ],\n 'exclude': platform_exclude}},\n verify_ops={'ops.platform.platform.Platform':{\n 'requirements': [\\\n ['virtual_device', '(?P<virtual_device>.*)',\n 'membership', '(?P<lc>.*)',\n 'vd_ms_status', 'IOS XR RUN'],],\n 'exclude': platform_exclude}},\n num_values={'lc':'all', 'virtual_device':'all'})\n", "id": "9490860", "language": "Python", "matching_score": 2.9108033180236816, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/ha/reload/iosxr/reload.py" }, { "content": "'''HA Main function'''\n\n# Python\nimport time\nimport logging\n\n# genie\nfrom genie.utils.timeout import TempResult\nfrom genie.harness.utils import connect_device, disconnect_device\n\n# unicon\nfrom unicon.core.errors import SubCommandFailure\nfrom unicon.eal.dialogs import Statement, Dialog\n\n# module logger\nlog = logging.getLogger(__name__)\n\n\nclass HA(object):\n \"\"\"Class to handle HA related actions\"\"\"\n\n def __init__(self, device=None, filetransfer=None):\n \"\"\"built-in __init__\n\n instantiates each HA.\n\n Arguments\n ---------\n device (`obj`): Device object\n filetransfer (`obj`): filetransferutils object\n \"\"\"\n self.device = device\n self.filetransfer = filetransfer\n\n def get_debug_plugin(self, debug_plugin):\n pass\n\n def switchover(self, steps, timeout):\n \"\"\"Do the switchover action and reconnect to router after switchover.\n\n Args:\n Mandatory:\n steps (`obj`) : Step object to represent each step taken.\n timeout (`obj`) : \n max_time (int): Maximum wait time for the trigger,\n in second. Default: 180\n interval (int): Wait time between iterations when looping is needed,\n in second. Default: 15\n\n Returns:\n AETEST Step Result\n\n\n Raises:\n None\n\n Example:\n >>> switchover(steps=ats.aetest.Steps(),\n timeout=genie.utils.timeout.Timeout(\n max_time=180,\n interval=15))\n \"\"\"\n with steps.start('Switchover', continue_=True) as step:\n try:\n self._switchover()\n except SubCommandFailure:\n pass\n except NotImplementedError:\n step.failed('Failed to switchover: Switchover Aborted')\n except Exception as e:\n raise Exception(str(e))\n \n self._reconnect(steps=steps, timeout=timeout)\n\n def reload(self, steps, timeout, reload_timeout, config_lock_retry_sleep, config_lock_retries):\n \"\"\"Do the reload the whole box action and\n reconnect to router after reload.\n\n Args:\n Mandatory:\n steps (`obj`) : Step object to represent each step taken.\n timeout (`obj`) : \n max_time (int): Maximum wait time for the trigger,\n in second. Default: 180\n interval (int): Wait time between iterations when looping is needed,\n in second. Default: 15\n\n Returns:\n AETEST Step Result\n\n\n Raises:\n None\n\n Example:\n >>> reload(steps=ats.aetest.Steps(),\n timeout=genie.utils.timeout.Timeout(\n max_time=180,\n interval=15))\n \"\"\"\n with steps.start('Reloading the device {}'.format(self.device.name),\n continue_=True) as step:\n # unicon\n dialog = Dialog([\n Statement(pattern=r'\\(y\\/n\\) +\\[n\\].*',\n action='sendline(y)',\n loop_continue=True,\n continue_timer=False),\n Statement(pattern=r'Save\\? *\\[yes\\/no\\]:.*',\n action='sendline(y)',\n loop_continue=True,\n continue_timer=False)\n ])\n\n # store original error pattern\n origin_err_pat = self.device.settings.ERROR_PATTERN.copy()\n\n # TODO - update more with issues when seeing\n # update the error pattern\n self.device.settings.ERROR_PATTERN.append(\"Write failed: Broken pipe\")\n \n try:\n self.device.reload(dialog=dialog, \n timeout=reload_timeout,\n config_lock_retry_sleep=config_lock_retry_sleep,\n config_lock_retries=config_lock_retries)\n \n except SubCommandFailure:\n # read the capture setting errors\n try:\n out = self.device.spawn.read()\n except Exception:\n out = getattr(getattr(self.device, 'spawn', None), 'buffer', None)\n\n if 'Write failed' in str(out):\n log.warning('Please Notice failures during reload: %s' % str(out))\n except Exception as e:\n raise Exception(str(e))\n\n # revert the error pattern\n self.device.settings.ERROR_PATTERN = origin_err_pat.copy()\n \n self._reconnect(steps=steps, timeout=timeout)\n\n def reloadLc(self, steps, timeout, lc):\n \"\"\"Do the reload the LC action and reconnect to router\n after reload if lost connection.\n\n Args:\n Mandatory:\n steps (`obj`) : Step object to represent each step taken.\n timeout (`obj`) : \n max_time (int): Maximum wait time for the trigger,\n in second. Default: 180\n interval (int): Wait time between iterations when looping is needed,\n in second. Default: 15\n lc (`str`) : LC slot number need to reload.\n\n Returns:\n AETEST Step Result\n\n\n Raises:\n None\n\n Example:\n >>> reloadLc(steps=ats.aetest.Steps(),\n timeout=genie.utils.timeout.Timeout(\n max_time=180,\n interval=15),\n lc = '27')\n \"\"\"\n\n with steps.start('Reload LC {}'.format(lc), continue_=True) as step:\n try:\n self._reloadLc(lc=lc)\n except SubCommandFailure:\n pass\n except Exception as e:\n raise Exception(str(e))\n\n # check if reload the active supervisor LC\n # if so, need reconnection\n try:\n self.device.execute('show clock')\n except:\n self._reconnect(steps=steps, timeout=timeout)\n\n def reloadFabric(self, steps, timeout, fabric):\n \"\"\"\n Args:\n Mandatory:\n steps (`obj`) : Step object to represent each step taken.\n timeout (`obj`) : \n max_time (int): Maximum wait time for the trigger,\n in second. Default: 180\n interval (int): Wait time between iterations when looping is needed,\n in second. Default: 15\n fabric (`str`) : Fabric module number need to reload.\n\n Returns:\n AETEST Step Result\n\n\n Raises:\n None\n\n Example:\n >>> reloadFabric(steps=ats.aetest.Steps(),\n timeout=genie.utils.timeout.Timeout(\n max_time=180,\n interval=15),\n fabric = '1')\n \"\"\"\n\n with steps.start('Reload Fabric {}'.format(fabric), continue_=True) as step:\n try:\n self._reloadFabric(fabric=fabric)\n time.sleep(10)\n except SubCommandFailure:\n pass\n except Exception as e:\n raise Exception(str(e))\n\n # check if reload the active fabric\n # if so, need reconnection\n try:\n self.device.execute('show clock')\n except:\n self._reconnect(steps=steps, timeout=timeout)\n\n def prepare_issu(self, steps, upgrade_image):\n \"\"\"Prepare the device to be ready to perform ISSU.\n\n Args:\n Mandatory:\n steps (`obj`) : Step object to represent each step taken.\n timeout (`obj`) :\n max_time (int): Maximum wait time for the trigger,\n in second. Default: 180\n interval (int): Wait time between iterations when looping is needed,\n in second. Default: 15\n\n Returns:\n AETEST Step Result\n\n Raises:\n None\n\n Example:\n >>> prepare_issu(steps=ats.aetest.Steps(),\n timeout=genie.utils.timeout.Timeout(\n max_time=180,\n interval=15))\n \"\"\"\n\n with steps.start('Prepare device for ISSU', continue_=True) as step:\n try:\n self._prepare_issu(steps=steps, upgrade_image=upgrade_image)\n except SubCommandFailure:\n pass\n except Exception as e:\n raise Exception(str(e))\n\n def perform_issu(self, steps, upgrade_image):\n \"\"\"Perform all the ISSU steps in sequence and reconnect to the device\n\n Args:\n Mandatory:\n steps (`obj`) : Step object to represent each step taken.\n timeout (`obj`) :\n max_time (int): Maximum wait time for the trigger,\n in second. Default: 180\n interval (int): Wait time between iterations when looping is needed,\n in second. Default: 15\n\n Returns:\n AETEST Step Result\n\n Raises:\n None\n\n Example:\n >>> perform_issu(steps=ats.aetest.Steps(),\n timeout=genie.utils.timeout.Timeout(\n max_time=180,\n interval=15))\n \"\"\"\n\n with steps.start('Perform ISSU', continue_=True) as step:\n try:\n self._perform_issu(steps=steps, upgrade_image=upgrade_image)\n except Exception as e:\n raise Exception(str(e))\n\n def _reconnect(self, steps, timeout, sleep_disconnect=30):\n \"\"\"Disconnect and reconnect to router within given timeout.\n\n Args:\n Mandatory:\n steps (`obj`) : Step object to represent each step taken.\n timeout (`obj`) : \n max_time (int): Maximum wait time for the trigger,\n in second. Default: 180\n interval (int): Wait time between iterations when looping is needed,\n in second. Default: 15\n Optional:\n sleep_disconnect (`int`) : Break between issue the command and the\n HA action really take place,\n in second. Default: 30\n\n Returns:\n AETEST Step Result\n\n\n Raises:\n None\n\n Example:\n >>> _reconnect(steps=ats.aetest.Steps(),\n timeout=genie.utils.timeout.Timeout(\n max_time=180,\n interval=15))\n \"\"\"\n with steps.start('Disconnecting device {}'.format(self.device.name),\n continue_=True) as step:\n disconnect_device(self.device)\n time.sleep(sleep_disconnect)\n\n with steps.start('Reconnecting to device {}'.format(self.device.name),\n continue_=True) as step:\n temp = TempResult(container=step)\n while timeout.iterate():\n try:\n connect_device(self.device)\n except Exception as e:\n temp.failed('Could not reconnect to the device',\n from_exception=e)\n # incase console is existed but cannot enable the device.\n # conf mode is not active when standby RP is coming up\n try:\n disconnect_device(self.device)\n except:\n pass\n timeout.sleep()\n continue\n temp.passed('Reconnected to the device')\n break\n temp.result()\n", "id": "10108681", "language": "Python", "matching_score": 5.661049842834473, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/abstracted_libs/ha.py" }, { "content": "'''HA IOSXR implement function'''\n\n# import python\nimport time\n\n# Genie Libs\nfrom ..ha import HA as HA_main\n\n# unicon\nfrom unicon.eal.dialogs import Statement, Dialog\nfrom unicon.core.errors import SubCommandFailure\n\n\nclass HA(HA_main):\n\n def _switchover(self):\n \"\"\"Do the switchover action for IOSXR devices.\n\n Raises:\n Unicon errors\n\n Example:\n >>> _switchover()\n \"\"\"\n # unicon\n self.device.execute('redundancy switchover')\n\n def reload(self, steps, timeout):\n \"\"\"Do the reload the whole box action and\n reconnect to router after reload.\n\n Args:\n Mandatory:\n steps (`obj`) : Step object to represent each step taken.\n timeout (`obj`) : \n max_time (int): Maximum wait time for the trigger,\n in second. Default: 180\n interval (int): Wait time between iterations when looping is needed,\n in second. Default: 15\n\n Raises:\n Exception\n\n Example:\n >>> reload(steps=ats.aetest.Steps(),\n timeout=genie.utils.timeout.Timeout(\n max_time=180,\n interval=15))\n \"\"\"\n with steps.start('Reloading the device {}'.format(self.device.name),\n continue_=True) as step:\n try:\n self.device.execute('admin reload location all')\n except SubCommandFailure:\n pass\n except Exception as e:\n raise Exception(str(e))\n \n self._reconnect(steps=steps, timeout=timeout)\n\n def _reloadLc(self, lc):\n \"\"\"Do the reload LC action for asr1k devices.\n\n Args:\n Mandatory:\n lc (`str`) : LC slot number need to reload.\n\n Raises:\n Unicon errors\n\n Example:\n >>> _reloadLc(lc='27')\n \"\"\"\n # unicon\n dialog = Dialog([\n Statement(pattern=r'Proceed\\[y\\/n\\]\\?.*',\n action='sendline(y)',\n loop_continue=True,\n continue_timer=False),\n Statement(pattern=r'\\(y\\/n\\)\\?.*',\n action='sendline(y)',\n loop_continue=True,\n continue_timer=False)\n ])\n # Execute command to reload LC\n self.device.execute('admin reload location {}'.format(lc), reply=dialog)\n time.sleep(5)\n", "id": "9492153", "language": "Python", "matching_score": 5.677226543426514, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/abstracted_libs/iosxr/ha.py" }, { "content": "'''HA IOSXRv implement function'''\n\n# Genie Libs\nfrom ..ha import HA as HA_main\n\n# unicon\nfrom unicon.eal.dialogs import Statement, Dialog\nfrom unicon.core.errors import SubCommandFailure\n\n\nclass HA(HA_main):\n\n def reload(self, steps, timeout):\n \"\"\"Do the reload the whole box action and\n reconnect to router after reload.\n\n Args:\n Mandatory:\n steps (`obj`) : Step object to represent each step taken.\n timeout (`obj`) : \n max_time (int): Maximum wait time for the trigger,\n in second. Default: 180\n interval (int): Wait time between iterations when looping is needed,\n in second. Default: 15\n\n Returns:\n AETEST Step Result\n\n\n Raises:\n None\n\n Example:\n >>> reload(steps=ats.aetest.Steps(),\n timeout=genie.utils.timeout.Timeout(\n max_time=180,\n interval=15))\n \"\"\"\n # unicon\n dialog = Dialog([\n Statement(pattern=r'\\[no,yes\\].*',\n action='sendline(y)',\n loop_continue=True,\n continue_timer=False),\n Statement(pattern=r'\\(y\\/n\\)\\?.*',\n action='sendline(y)',\n loop_continue=True,\n continue_timer=False)\n ])\n with steps.start('Reloading the device {}'.format(self.device.name),\n continue_=True) as step:\n try:\n self.device.execute('admin reload location 0/RP0 all', reply=dialog)\n except SubCommandFailure:\n pass\n except Exception as e:\n raise Exception(str(e))\n \n self._reconnect(steps=steps, timeout=timeout)\n\n\n\n\n", "id": "1107995", "language": "Python", "matching_score": 1.5688283443450928, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/abstracted_libs/iosxr/iosxrv/ha.py" }, { "content": "# python\nimport re\nimport logging\nfrom os import path\n\n# unicon\nfrom unicon.eal.dialogs import Statement, Dialog\n\n# Genie\nfrom genie.utils.timeout import Timeout\n\n# Abstract\nfrom genie.abstract import Lookup\n\n# Parser\nfrom genie.libs import parser\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\nfrom genie.libs.parser.iosxe.show_platform import ShowRedundancy, \\\n ShowVersion, \\\n ShowPlatform\n# import ats\nfrom ats import aetest\nfrom ats.log.utils import banner\nfrom ats.utils.objects import find, R\n\n# import genie.libs\nfrom genie.libs import sdk\nfrom genie.libs.sdk.triggers.xe_sanity.checkcommands.libs\\\n .iosxe.checkcommands import get_requirements\n\n# module logger\nlog = logging.getLogger(__name__)\n\n\ndef enter_shell(device, timeout=60):\n \"\"\"Enter shell prompt on IOSXE deivces by using command\n \"request platform software system shell switch active R0\"\n\n Args:\n Mandatory:\n device (`obj`) : Device object.\n Optional:\n timeout (`int`) : Command execution timeout.\n\n Returns:\n None\n\n Raises:\n None\n\n Example:\n >>> enter_shell(device=Device())\n \"\"\"\n\n # Run workon.sh on the RP shell\n dialog = Dialog([\n Statement(pattern=r'continue\\? +\\[y\\/n\\]',\n action='sendline(y)',\n loop_continue=True,\n continue_timer=False)\n ])\n # store original pattern\n enable_state = device.state_machine.get_state('enable')\n device.origin_pattern = enable_state._pattern\n # Completely remove the enable state pattern and update it with new pattern.\n enable_state._pattern = [r'(.*)\\:\\/]\\$']\n # enter shell\n device.execute('request platform software system shell switch active R0', reply=dialog)\n \n\ndef exit_shell(device):\n \"\"\"Exit shell prompt on IOSXE deivces by using command\n \"exit\"\n\n Args:\n Mandatory:\n device (`obj`) : Device object.\n\n Returns:\n None\n\n Raises:\n None\n\n Example:\n >>> exit_shell(device=Device())\n \"\"\"\n\n enable_state = device.state_machine.get_state('enable')\n\n # revert back the pattern\n enable_state._pattern = device.origin_pattern\n\n # get out of shell\n device.execute('exit')\n\n\ndef save_device_information(device, **kwargs):\n \"\"\"Save running-configuration to startup-config.\n This is for general IOSXE devices.\n\n Args:\n Mandatory:\n device (`obj`) : Device object.\n\n Raises:\n None\n\n Example:\n >>> save_device_information(device=Device())\n \"\"\"\n # configure config-register\n device.configure('config-register 0x2102')\n\n # save all configuration to startup for all slots \n dialog = Dialog([\n Statement(pattern=r'Destination +filename +\\[.*\\].*',\n action='sendline()',\n loop_continue=True,\n continue_timer=False)\n ])\n device.execute('copy running-config nvram:startup-config',\n reply=dialog)\n device.execute('write memory')\n\ndef stack_ha_redundancy_state(device, timeout, platform_pts=None):\n \"\"\"Stack HA Redundancy SSO State Check.\n\n Args:\n Mandatory:\n device (`obj`) : Device object.\n\n Returns:\n None\n\n Raises:\n Exception: When show redundancy parser is errored\n AssertionError: When expected status is not reached\n\n Example:\n >>> early_stack_ha_redundancy_state(device=Device('R1'),\n timeout=timeout)\n \"\"\"\n # check first from pts to see if the redundancy is ready\n if platform_pts and hasattr(platform_pts, 'redundancy_mode') and \\\n 'sso' in platform_pts.redundancy_mode and \\\n hasattr(platform_pts, 'redundancy_communication') and \\\n platform_pts.redundancy_communication:\n log.info('System redundancy mode is sso and redundancy_communication is enabled')\n pass_flag = True\n else:\n # learn it from show version in loop\n while timeout.iterate():\n\n # initial flag\n pass_flag = True\n\n # get output from show redundancy\n try:\n output = ShowRedundancy(device).parse()\n except Exception as e:\n raise Exception('Cannot get output from \"show redundancy\"',\n from_exception=e)\n\n # Check if HA redundancy status is sso\n try:\n if 'sso' not in output['red_sys_info']['conf_red_mode'] or \\\n 'Up' not in output['red_sys_info']['communications'] or \\\n 'Duplex' not in output['red_sys_info']['hw_mode']:\n log.warning('The System does not reach to \"sso\"')\n timeout.sleep()\n pass_flag = False\n continue\n for slot in output['slot']:\n if 'ACTIVE' not in output['slot'][slot]['curr_sw_state'] and \\\n 'STANDBY HOT' not in output['slot'][slot]['curr_sw_state']:\n log.warning('The {} does not reach to \"ACTIVE|STANDBY HOT\"'\n .format(slot))\n timeout.sleep()\n pass_flag = False\n continue\n except Exception as e:\n log.warning(e)\n timeout.sleep()\n pass_flag = False\n raise AssertionError('Redundancy status does not reach to \"SSO\"',\n from_exception=e)\n continue\n break\n\ndef process_check(device):\n \"\"\"Verify that all critical process are up and running\n\n Args:\n Mandatory:\n device (`obj`) : Device object.\n\n Returns:\n None\n\n Raises:\n Exception: Cannot enter into shell get the output\n AssertionError: When expected status is not reached\n AttributeError: Process is not in the output\n\n Example:\n >>> process_check(device=Device())\n \"\"\"\n \n def parse_shell_process_output(output):\n \"\"\"parse the output from shell command \"workon.sh RP 0 status\"\n\n Args:\n Mandatory:\n output (`str`) : Device output.\n\n Returns:\n Dictionary\n\n Raises:\n None\n\n Example:\n >>> parse_shell_process_output(output=\n '''run_caf_sh unavail (respawn:global:optional)''')\n >>> {'run_caf_sh': {'state': 'up',\n 'options': 'respawn:global:optional'}}\n \"\"\"\n # initial return dictionary\n ret_dict = {}\n\n # set pattern\n p = re.compile(r'^(?P<name>[\\w\\_\\-]+) +(?P<state>(up|unavail)) +'\n '\\((?P<options>[\\w\\:\\_\\-]+)\\)$')\n\n for line in output.splitlines():\n line = line.strip()\n # auto_upgrade_client_sh up (respawn:global:optional)\n # run_caf_sh unavail (respawn:global:optional)\n m = p.match(line)\n if m:\n group = m.groupdict()\n name = group.pop('name')\n ret_dict.setdefault(name, {}).update(\n {k:v for k,v in group.items()})\n return ret_dict\n\n # get into shell\n try:\n enter_shell(device)\n except Exception as e:\n raise Exception(e)\n\n # get output from shell\n try:\n out = device.execute('workon.sh RP 0 status', timeout=60)\n except Exception as e:\n raise Exception(e)\n\n # exit shell\n try:\n exit_shell(device)\n except Exception as e:\n raise Exception(e)\n\n # parse shell output\n ret = parse_shell_process_output(out)\n if not ret:\n raise Exception('No Processes found in the output')\n\n # check below processes\n process_list = ['fman_rp', 'hman', 'linux_iosd_image', 'sif_mgr',\n 'periodic_sh', 'platform_mgr', 'plogd', 'psd',\n 'sort_files_by_inode_sh','stack_mgr']\n\n # check each process's status is up\n for process in process_list:\n if process in ret:\n if 'up' not in ret[process]['state']:\n log.warning('Process {p} is {s}, should be up!!!'\n .format(p=process, s=ret[process]['state']))\n raise AssertionError('Process {p} is {s}, should be up!!!'\n .format(p=process, s=ret[process]['state']))\n else:\n log.info('Process {p} is {s} as expected.'\n .format(p=process, s=ret[process]['state']))\n else:\n log.warning('Process {p} is not found'.format(p=process))\n raise AttributeError('Process {p} is not found'.format(p=process))\n\n for process in ['btrace_rotate_sh', 'btrace_rotate', 'btman', 'btrace_manager']:\n try:\n if 'up' not in ret[process]['state']:\n raise AssertionError('Process {p} is {s}, should be up!!!'\n .format(p=process, s=ret[process]['state']))\n else:\n btrace_failed = False\n log.info('Process {p} is {s} as expected.'\n .format(p=process, s=ret[process]['state']))\n break\n except Exception:\n btrace_failed = True\n continue\n\n # check btrace process states\n if btrace_failed:\n log.warning('Process btrace_rotate_sh is not found in the output')\n raise AttributeError('Process btrace_rotate_sh is not found in the output')\n\ndef chasfs_properties(device, timeout):\n \"\"\"Verify that Chasfs properties are updated by Stack Manager\n\n Args:\n Mandatory:\n device (`obj`) : Device object.\n\n Returns:\n None\n\n Raises:\n AssertionError: When expected status is not reached\n AttributeError: Active/Standby switch missing from the list\n Exception: Cannot get into shell\n\n Example:\n >>> chasfs_properties(device=Device())\n \"\"\"\n # get full stack/active stacks/active/standby\n if device.active and device.standby:\n # print out the expected stack information\n log.info('Expected Stack State:\\n'\n 'Master(Active RP): {}\\nStandby: {}\\nMembers:{}'\n .format(device.active, device.standby,device.members))\n else:\n raise AttributeError('There is no Active/Standby '\n 'switches on device {}'.format(device.name))\n\n # get into shell\n try:\n enter_shell(device)\n except Exception as e:\n raise Exception(e)\n\n # create check mapping for what should check for each command\n # {command: check_pattern}\n switch_check_mapping = {\n 'mode': {\n 'command': 'echo -n chassis_role{n}:; cat /tmp/chassis/{n}/chassis_role; echo;',\n 'pattern': 'chassis_role{n}\\:(?P<value>{role})'\n },\n 'state': {\n 'command': 'echo -n chassis_state{n}:; cat /tmp/chassis/{n}/state; echo;',\n 'pattern': 'chassis_state{n}\\:(?P<value>compatible)'\n },\n 'MAC address': {\n 'command': 'echo -n macaddr{n}:; cat /tmp/chassis/{n}/macaddr; echo;',\n 'pattern': 'macaddr{n}: *(?P<value>{mac_p})'\n }\n }\n system_check_mapping = {\n 'Local comm_up': {\n 'command': 'echo -n local_comm_up:;cat /tmp/chassis/local/stack_mgr/comm_up; echo;',\n 'pattern': 'local_comm_up:(?P<value>success)' \n },\n 'active controller': {\n 'command': 'echo -n local_rp_mastership_global-active-rp:;cat /tmp/chassis/'\\\n 'local/rp/chasfs/mastership/global-active-rp; echo;',\n 'pattern': 'local_rp_mastership_global\\-active\\-rp:(?P<value>[0-9]\\/rp)' \n },\n 'standby controller': {\n 'command': 'echo -n local_rp_mastership_global-standby-rp:;cat /tmp/chassis/'\\\n 'local/rp/chasfs/mastership/global-standby-rp; echo;',\n 'pattern': 'local_rp_mastership_global\\-standby\\-rp:(?P<value>[0-9]\\/rp)'\n },\n 'standby': {\n 'command': 'echo -n local_rp_mastership_standby-rp-ehsa-state:;cat /tmp/'\\\n 'chassis/local/rp/chasfs/mastership/standby-rp-ehsa-state; echo;',\n 'pattern': 'local_rp_mastership_standby\\-rp\\-ehsa\\-state:(?P<value>standby)' \n },\n 'sso-ready': {\n 'command': 'echo -n local_rp_mastership_standby-rp-state:;cat /tmp/chassis/local/'\\\n 'rp/chasfs/mastership/standby-rp-state; echo;',\n 'pattern': 'local_rp_mastership_standby\\-rp\\-state:(?P<value>sso\\-ready)' \n } \n }\n mac_pattern = '(?:(?:[0-9A-Fa-f]){2}[:-]){5}(?:[0-9A-Fa-f]){2}'\n\n while timeout.iterate():\n\n # initial flag\n pass_flag = True\n\n # Check active|standby|members: role, state, mac\n for key, item in switch_check_mapping.items():\n\n # compose the slots dictionary\n slots = {'controller': device.active, 'controller': device.standby}\n slots.update({'member': v for v in device.members})\n\n # check each slots\n for role, switch in slots.items():\n # get the output from shell \n try:\n out = device.execute(item['command'].format(n=switch), timeout=60)\n out = out.splitlines()[-2].strip()\n except Exception:\n out = ''\n\n # match the pattern\n if 'MAC' in key:\n p = re.compile(r'{}'.format(item['pattern']\\\n .format(n=switch, mac_p=mac_pattern)))\n elif 'mode' in key:\n p = re.compile(r'{}'.format(item['pattern']\\\n .format(n=switch, role=role)))\n else:\n p = re.compile(r'{}'.format(item['pattern']\\\n .format(n=switch)))\n m = p.match(out)\n if m:\n log.info('{k} is as expected: {v}'\n .format(k=key, v=m.groupdict()['value']))\n else:\n log.warning('{k} is not as expected.\\nexpected: {e}\\npoll value: {v}'\n .format(k=key, e=p.pattern, v=out))\n pass_flag = False\n break\n\n # Check system: state, local common_up, controller, sso-ready\n for key, item in system_check_mapping.items():\n # get the output from shell\n try:\n out = device.execute(item['command'], timeout=60)\n out = out.splitlines()[-2].strip()\n except Exception:\n out = ''\n p = re.compile(r'{}'.format(item['pattern']))\n m = p.match(out)\n if m:\n log.info('{k} is as expected: {v}'\n .format(k=key, v=m.groupdict()['value']))\n else:\n log.warning('{k} is not as expected.\\nexpected: {e}\\npoll value: {v}'\n .format(k=key, e=p.pattern, v=out))\n pass_flag = False\n break\n if pass_flag:\n break\n else: \n timeout.sleep()\n continue\n\n # exit shell\n try:\n exit_shell(device)\n except Exception as e:\n raise Exception(e)\n\n if not pass_flag:\n raise AssertionError('Chasfs state Cannot stabilized')\n\n\ndef learn_system(device, steps, platform_pts=None):\n \"\"\"Learn and store the system properties\n\n Args:\n testbed (`obj`): Testbed object\n\n Returns:\n None\n\n Raises:\n pyATS Results\n \"\"\"\n\n # learn show version\n if not platform_pts:\n with steps.start(\"Store image type/router type from 'show version' on {}\"\n .format(device.name)) as step:\n try:\n output = ShowVersion(device=device).parse()\n device.image_type = output['version']['image_type']\n device.router_type = output['version']['rtr_type']\n except Exception as e:\n log.warning(e)\n step.passx('Cannot get required router info on {}'.format(device.name))\n\n log.info('Image type: {}\\nRouter type: {}'\n .format(device.image_type, device.router_type))\n\n # learn show version\n with steps.start(\"Store switches info from 'show platform' on {}\"\n .format(device.name)) as step:\n\n req = {'active': [['slot', '(?P<switch>.*)', 'rp', '(.*)', 'role', 'Active']],\n 'standby': [['slot', '(?P<switch>.*)', 'rp', '(.*)', 'role', 'Standby']],\n 'members': [['slot', '(?P<switch>.*)', 'rp', '(.*)', 'role', 'Member']]}\n\n try:\n output = ShowPlatform(device=device).parse()\n ret = get_requirements(requirements=req, output=output)\n device.active = ret['active'][0]['switch']\n device.standby = ret['standby'][0]['switch']\n device.members = [i['switch'] for i in ret['members']]\n except Exception as e:\n log.warning(e)\n step.passx('Cannot get required router info on {}'.format(device.name))\n\n log.info('Active Switch: {}\\nStandby Switch: {}\\nMember Switch: {}'\n .format(device.active, device.standby, device.members))\n else:\n with steps.start(\"Store image type/router type from PTS on {}\"\n .format(device.name)) as step:\n try:\n # device.image_type = platform_pts.image_type\n device.image_type = 'developer image'\n device.router_type = platform_pts.rtr_type\n except Exception as e:\n log.warning(e)\n step.passx('Cannot get required router info on {}'.format(device.name))\n\n log.info('Image type: {}\\nRouter type: {}'\n .format(device.image_type, device.router_type))\n\n with steps.start(\"Store switches info from PTS on {}\"\n .format(device.name)) as step:\n\n req = {'active': [['slot', 'rp', '(?P<switch>.*)', 'swstack_role', 'Active'],\n ['slot', 'rp', '(?P<switch>.*)', 'state', 'Ready']],\n 'standby': [['slot', 'rp', '(?P<switch>.*)', 'swstack_role', 'Standby'],\n ['slot', 'rp', '(?P<switch>.*)', 'state', 'Ready']],\n 'members': [['slot', 'rp', '(?P<switch>.*)', 'swstack_role', 'Member'],\n ['slot', 'rp', '(?P<switch>.*)', 'state', 'Ready']]}\n\n try:\n ret = get_requirements(requirements=req, output=platform_pts)\n device.active = ret['active'][0]['switch']\n device.standby = ret['standby'][0]['switch']\n device.members = [i['switch'] for i in ret['members']]\n except Exception as e:\n log.warning(e)\n step.passx('Cannot get required switches info on {}'.format(device.name))\n\n log.info('Active Switch: {}\\nStandby Switch: {}\\nMember Switch: {}'\n .format(device.active, device.standby, device.members))\n\ndef get_default_dir(device):\n \"\"\" Get the default directory of this device\n\n Args:\n Mandatory:\n device (`obj`) : Device object.\n\n Returns:\n default_dir (`str`): Default directory of the system\n\n Raises:\n Exception\n\n Example:\n >>> get_default_dir(device=device)\n \"\"\"\n\n try:\n lookup = Lookup.from_device(device)\n parsed_dict = lookup.parser.show_platform.Dir(device=device).parse()\n default_dir = parsed_dict['dir']['dir'].replace('/', '')\n except SchemaEmptyParserError as e:\n raise Exception(\"No output when executing 'dir' command\") from e\n except Exception as e:\n raise Exception(\"Unable to execute or parse 'dir' command\") from e\n\n # Return default_dir to caller\n log.info(\"Default directory on '{d}' is '{dir}'\".format(d=device.name,\n dir=default_dir))\n return default_dir\n\n\n@aetest.subsection\ndef check_xe_sanity_device_ready(self, testbed, steps,\n max_time=60, interval=10):\n \"\"\"Check redudancy status, critial processes status and chassis properties\n\n Args:\n testbed (`obj`): Testbed object\n steps (`obj`): aetest steps object\n\n Returns:\n None\n\n Raises:\n pyATS Results\n \"\"\" \n log.info(banner('Check redudancy status,\\n'\n 'critial processes status,\\n'\n 'chassis properties'))\n # get uut\n devices = testbed.find_devices(alias='uut')\n\n for uut in devices:\n lookup = Lookup.from_device(uut)\n # get platform pts\n \n platform_pts = self.parameters.get('pts', {}).get('platform', {}).get('uut', None)\n\n # check redudancy\n with steps.start(\"Perform Redundancy Check on device {} - \"\n \"to check if device reach 'SSO' status\".format(uut.name)) as step:\n\n # create timeout object\n timeout = Timeout(max_time=int(max_time),\n interval=int(interval))\n try:\n lookup.sdk.libs.abstracted_libs\\\n .subsection.stack_ha_redundancy_state(\n device=uut, timeout=timeout, platform_pts=platform_pts)\n except Exception as e:\n step.passx('Redundancy state SSO not reached in the stack',\n from_exception=e)\n\n if hasattr(uut, 'workers'):\n with uut.allocate() as worker:\n # check Process\n with steps.start(\"Verify that all critical process are up \"\n \"and running on device {}\".format(uut.name)) as step:\n try:\n lookup.sdk.libs.abstracted_libs\\\n .subsection.process_check(device=worker)\n except Exception as e:\n step.passx('Processes verification test failed')\n\n # check Chasfs\n with steps.start(\"Verify that Chasfs properties are updated \"\n \"by Stack Manager on device {}\".format(uut.name)) as step:\n try:\n lookup.sdk.libs.abstracted_libs\\\n .subsection.chasfs_properties(device=worker, timeout=timeout)\n except Exception as e:\n step.passx('Chasfs verification test failed\\n{}'.format(e))\n else:\n # check Process\n with steps.start(\"Verify that all critical process are up \"\n \"and running on device {}\".format(uut.name)) as step:\n try:\n lookup.sdk.libs.abstracted_libs\\\n .subsection.process_check(device=uut)\n except Exception as e:\n step.passx('Processes verification test failed')\n\n # check Chasfs\n with steps.start(\"Verify that Chasfs properties are updated \"\n \"by Stack Manager on device {}\".format(uut.name)) as step:\n try:\n lookup.sdk.libs.abstracted_libs\\\n .subsection.chasfs_properties(device=uut, timeout=timeout)\n except Exception as e:\n step.passx('Chasfs verification test failed\\n{}'.format(e))\n\n\ndef configure_replace(device, file_location, timeout=60):\n device.execute('configure replace {} force'.format(file_location), timeout=timeout)\n", "id": "9418609", "language": "Python", "matching_score": 2.0490918159484863, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/abstracted_libs/ios/subsection.py" }, { "content": "'''Common implementation for ProcessRestart Cli trigger'''\n\nimport time\nimport logging\n\nfrom ats import aetest\n\nfrom genie.utils.diff import Diff\nfrom genie.libs.sdk.triggers.template.processrestart import \\\n TriggerProcessRestart as ProcessRestartTemplate\n\nlog = logging.getLogger(__name__)\n\nclass TriggerProcessRestart(ProcessRestartTemplate):\n '''Trigger class for ProcessCliRestart action'''\n\n @aetest.setup\n def verify_prerequisite(self, uut, abstract, steps):\n '''Learn Ops object and verify the requirements.\n\n If the requirements are not satisfied, then skip to the next\n testcase.\n\n Args:\n uut (`obj`): Device object.\n abstract (`obj`): Abstract object.\n steps (`step obj`): aetest step object\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n\n self.lib = abstract.sdk.triggers.processrestart.\\\n libs.processrestart.ProcessRestartLib(\\\n device=uut,\n process=self.process,\n abstract=abstract,\n verify_exclude=self.verify_exclude,\n obj=self)\n\n try:\n self.lib.process_information()\n except Exception as e:\n self.skipped(\"Issue getting information about '{p}' \"\n \"process\".format(p=self.process), from_exception=e,\n goto=['next_tc'])\n\n self.print_local_verifications()\n\n @aetest.test\n def restart(self, uut, abstract, steps, repeat_restart=1, sleep_restart=0):\n '''Send configuration to shut\n\n Args:\n uut (`obj`): Device object.\n abstract (`obj`): Abstract object.\n steps (`step obj`): aetest step object\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n\n try:\n if self.method == 'crash':\n if hasattr (self.lib, \"crash_restart\"):\n output = self.lib.crash_restart()\n else:\n self.skipped(\"ProcessCrashRestart is not supported on \"\n \"the device type: '{p}'\".format(p=uut.type))\n elif self.method == 'cli':\n if hasattr (self.lib, \"cli_restart\"):\n output = self.lib.cli_restart()\n else:\n self.skipped(\"ProcessCliRestart is not supported on \"\n \"the device type: '{p}'\".format(p=uut.type))\n except Exception as e:\n self.failed(\"Failed to restart '{p}' \".\\\n format(p=self.process), from_exception=e)\n\n if sleep_restart:\n log.info(\"Sleeping for {s} before next \"\n \"restart\".format(s=sleep_restart))\n time.sleep(0)\n\n @aetest.test\n def verify_restart(self, uut, abstract, steps, timeout, repeat_restart=1):\n '''Verify if the shut command shut the feature correctly and\n as expected\n\n Args:\n uut (`obj`): Device object.\n abstract (`obj`): Abstract object.\n steps (`step obj`): aetest step object\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n try:\n if self.method == 'crash':\n output = self.lib.verify_restart(repeat_restart=repeat_restart,\n steps=steps, timeout=timeout)\n elif self.method == 'cli':\n output = self.lib.verify_restart(repeat_restart=repeat_restart,\n steps=steps, timeout=timeout)\n except Exception as e:\n self.failed(\"Issue getting information about '{p}' \"\n \"process\".format(p=self.process), from_exception=e)\n", "id": "11720178", "language": "Python", "matching_score": 1.7833912372589111, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/processrestart/processrestart.py" }, { "content": "'''Common implementation for clear triggers'''\n\n# python import\nimport time\nimport logging\n\n# ats import\nfrom ats import aetest\nfrom ats.utils.objects import R\n\n# Genie Libs import\nfrom genie.libs.sdk.triggers.template.clear import TriggerClear\n\nlog = logging.getLogger(__name__)\n\n\nclass TriggerClear(TriggerClear):\n '''Trigger class for Clear action'''\n\n @aetest.setup\n def verify_prerequisite(self, uut, abstract, steps, timeout):\n '''Learn Ops object and verify the requirements.\n\n If the requirements are not satisfied, then skip to the next\n testcase.\n\n Args:\n uut (`obj`): Device object.\n abstract (`obj`): Abstract object.\n steps (`step obj`): aetest step object\n timeout (`timeout obj`): Timeout Object\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n\n try:\n self.pre_snap = self.mapping.learn_ops(device=uut,\n abstract=abstract,\n steps=steps,\n timeout=timeout)\n except Exception as e:\n self.errored(\"Section failed due to: '{e}'\".format(e=e))\n\n for stp in steps.details:\n if stp.result.name == 'skipped':\n self.skipped('Cannot learn the feature', goto=['next_tc'])\n\n self.print_local_verifications()\n\n @aetest.test\n def clear(self, uut):\n '''Actual clear action by using cli\n\n Args:\n uut (`obj`): Device object\n\n Returns:\n None\n\n Raises:\n Failed: When cli command could not be sent correctly to the\n device\n '''\n # Take a timestamp\n # This timestamp is used to calculate amount of time that learn_poll\n # takes\n self.pre_time = time.time()\n \n # replace the regexp when there are some\n for cmd in self.clear_cmd:\n if '(?P' in cmd:\n req = self.mapping._path_population([cmd.split()], uut)\n else:\n req = [cmd]\n for cmd in req:\n # combine command\n if isinstance(cmd, list):\n exec_cmd = ''\n for item in cmd:\n exec_cmd += '%s ' % str(item)\n cmd = exec_cmd\n\n # execute commands \n log.info(\"Execute Clear Command '{}'\".format(cmd))\n try:\n uut.execute(cmd)\n except Exception as e:\n self.failed(\"Issue while sending '{c}'\".format(c=cmd),\n from_exception=e)\n\n @aetest.test\n def verify_clear(self, uut, abstract, steps, timeout=None):\n '''Compares the snapshot from save_snapshot and this section,\n then verifies the clear action if needed\n\n Args:\n uut (`obj`): Device object\n\n Returns:\n None\n\n Raises:\n Failed: When state of the device did not revert to what it was\n before clear command\n Errored: When looking for particular mapping_extra_args which\n does not exists\n '''\n # update the verify_ops callable with required information\n for req in self.mapping._verify_ops_dict.values():\n for item in req.get('requirements', {}):\n if not callable(item[0]):\n continue\n ret = item[0]\n ret.keywords.update({'uut': uut, 'pre_time': self.pre_time})\n\n try:\n self.mapping.verify_ops(device=uut, abstract=abstract,\n steps=steps)\n except Exception as e:\n self.failed(\"The clear verification has failed\", from_exception=e)\n\n\ndef verify_clear_callable(ops, uut, pre_time, verify_func, mapping, **kwargs):\n\n # If no verify attribute\n if not kwargs.get('verify_func_args', None):\n return\n\n # in case of inherit, introduce local var\n # for holding class glob vars\n verify_func_args = kwargs['verify_func_args'].copy()\n \n # populate r_object path\n reqs = []\n if 'r_obj' in verify_func_args:\n reqs.extend(mapping._path_population(verify_func_args['r_obj'], uut))\n\n # store the populate path back to self.verify_func_args as R object\n extra_args = {}\n if reqs:\n verify_func_args['r_obj'] = []\n for req in reqs:\n verify_func_args['r_obj'].append(R(req))\n\n # diff the pre and post time to compare the uptime\n # + 1 is fuzzy time that may diff from routers timing and script\n compare_time = int(time.time() - pre_time + 1)\n\n # update the mapping extra_args with variables\n for key, value in verify_func_args.items():\n if isinstance(value, str):\n # get the value from the inital ops to compare\n if value.startswith('(?P'):\n value = mapping._populate_path([[value]], ops.device, mapping.keys)\n verify_func_args[key] = value[0][0]\n else:\n if locals().get(value) or locals().get(value) == 0:\n verify_func_args[key] = locals().get(value)\n else:\n verify_func_args[key] = value\n\n # compare the attributes that may changed as expected\n if 'r_obj' in verify_func_args:\n back_up = verify_func_args['r_obj'].copy()\n for r in back_up:\n verify_func_args['r_obj'] = [r]\n verify_func(**verify_func_args)\n", "id": "6839856", "language": "Python", "matching_score": 4.401546955108643, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/clear/clear.py" }, { "content": "'''Common implementation for disable enable triggers for\n extra handling when disable the features'''\n\n# import python\nimport time\nimport logging\n\n# import ats\nfrom ats import aetest\nfrom ats.utils.objects import find, R\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.template.disableenable import \\\n TriggerDisableEnableReqHandler\nfrom genie.libs.sdk.libs.utils.triggeractions import CheckFeatureStatus\n\nlog = logging.getLogger(__name__)\n\n\nclass TriggerDisableEnableReqHandler(TriggerDisableEnableReqHandler):\n '''Trigger class for DisableEnable extra handling\n before/after disable/enable action'''\n\n @aetest.setup\n def verify_prerequisite(self, uut, abstract, steps, timeout):\n '''Learn Ops object and verify the requirements.\n\n If the requirements are not satisfied, then skip to the next\n testcase.\n\n Args:\n uut (`obj`): Device object.\n abstract (`obj`): Abstract object.\n steps (`step obj`): aetest step object\n timeout (`timeout obj`): Timeout Object\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n\n try:\n self.pre_snap = self.mapping.learn_ops(device=uut,\n abstract=abstract,\n steps=steps,\n timeout=timeout)\n except Exception as e:\n self.errored(\"Section failed due to: '{e}'\".format(e=e))\n\n for stp in steps.details:\n if stp.result.name == 'skipped':\n self.skipped('Cannot learn the feature', goto=['next_tc'])\n\n self.print_local_verifications()\n\n @aetest.test\n def save_configuration(self, uut, method, abstract):\n '''Save current configuration\n\n Can be either done via TFTP or checkpoint feature (If exists for OS)\n\n Args:\n uut (`obj`): Device object.\n method (`str`): Save method to save the configuration.\n For now accpet \"local\" and \"checkpoint\"\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n self.lib = abstract.sdk.libs.abstracted_libs.restore.Restore()\n try:\n self.lib.save_configuration(uut, method, abstract)\n except Exception as e:\n self.failed('Saving the configuration failed', from_exception=e,\n goto=['next_tc'])\n\n @aetest.test\n def pre_handle_dependency(self, uut):\n ''' Handle the required steps before can disable the feature\n Ex. LACP feature needs remove the port-channel interfaces\n before disable the feaure\n\n Args:\n uut (`obj`): Device object.\n\n Returns:\n None\n\n Raises:\n None\n '''\n log.info('Handle feature dependencies *before* disabling feature')\n if hasattr(self, 'handler'):\n if 'pre' in self.handler:\n try:\n self.handler['pre'](uut)\n except Exception as e:\n self.failed('Failed to do pre handle due to',\n from_exception=e)\n\n @aetest.test\n def disable(self, uut, abstract, steps):\n ''' Disable the feature on the uut device\n\n Args:\n uut (`obj`): Device object.\n abstract (`obj`): Abstract object.\n steps (`step obj`): aetest step object\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n\n try:\n self.mapping.unconfigure(device=uut, abstract=abstract, steps=steps)\n except Exception as e:\n self.failed('Failed to disable the feature', from_exception=e)\n\n @aetest.test\n def verify_disable(self, uut, abstract):\n ''' Verify if the feature is disabled\n\n Args:\n uut (`obj`): Device object.\n abstract (`obj`): Abstract object.\n\n Returns:\n None\n\n Raises:\n Failed: The feature is not disabled as expected\n '''\n # check if the feature is disabled\n # Not using Ops but parsers because two reason\n # 1) disable/enable feature and 'show feature' only support by NXOS\n # 2) 'show feature' is more accurate command to check the feature\n # status rather then Ops commands.\n try:\n CheckFeatureStatus.check_feature_status(device=uut, expect='disabled',\n feature_name=self.feature_name,\n abstract=abstract)\n except Exception as e:\n self.failed('{n} is not disabled'\n .format(n=self.feature_name), from_exception=e)\n\n @aetest.test\n def post_handle_dependency(self, uut):\n ''' Handle the lost configurations which not in the running-config\n after enable the feature.\n Ex. Fabricpath feature needs delete profiles\n after enable the feaure\n\n Args:\n uut (`obj`): Device object.\n\n Returns:\n None\n\n Raises:\n None\n '''\n log.info('Handle feature dependencies after disabling feature')\n if hasattr(self, 'handler') and 'post' in self.handler:\n try:\n self.handler['post'](uut)\n except Exception as e:\n self.failed('Failed to do post handle due to',\n from_exception=e)\n\n @aetest.test\n def restore_configuration(self, uut, method, abstract):\n '''Rollback the configuration\n\n Can be either done via TFTP or checkpoint feature (If exists for OS)\n\n Args:\n uut (`obj`): Device object.\n method (`str`): Save method from trigger datafile.\n Only accpet \"local\" and \"checkpoint\"\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n try:\n self.lib.restore_configuration(uut, method, abstract)\n except Exception as e:\n self.failed('Failed to restore the configuration', from_exception=e)\n\n @aetest.test\n def verify_initial_state(self, uut, abstract, steps):\n '''Verify if the feature is enabled And\n Verify the configuration and device state is back to what\n it was at the begining of the trigger\n\n Args:\n uut (`obj`): Device object.\n steps (`step obj`): aetest step object\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n try:\n CheckFeatureStatus.check_feature_status(device=uut, expect='enabled',\n feature_name=self.feature_name,\n abstract=abstract)\n except Exception as e:\n self.failed('{n} is not enabled'\n .format(n=self.feature_name), from_exception=e)\n\n try:\n self.post_snap = self.mapping.verify_with_initial(\\\n device=uut,\n abstract=abstract,\n steps=steps)\n except Exception as e:\n self.failed(\"Failed to restore\", from_exception=e)\n", "id": "2836907", "language": "Python", "matching_score": 3.915531635284424, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/disableenable/disableenablereqhandler.py" }, { "content": "# python\nimport logging\n\n# abstract\nfrom genie.abstract import Lookup\n\n# Genie Libs\nfrom genie.libs import sdk, parser\n\nlog = logging.getLogger(__name__)\n\n\nclass Restore(object):\n\n def __init__(self, device=None):\n self.abstract = Lookup.from_device(device, packages={'sdk': sdk, 'parser': parser})\n self.lib = self.abstract.sdk.libs.abstracted_libs.restore.Restore()\n\n def save_configuration(self, device, method, abstract, default_dir):\n try:\n self.lib.save_configuration(device, method, self.abstract, default_dir)\n except Exception as e:\n self.failed('Saving the configuration failed', from_exception=e)\n\n def restore_configuration(self, device, method, abstract, iteration=10,\n interval=60):\n try:\n self.lib.restore_configuration(device, method, self.abstract)\n except Exception as e:\n self.failed('Restoring the configuration failed', from_exception=e)\n\n def create_delete_checkpoint(self, device, name, action):\n self.lib.create_delete_checkpoint(device, name, action)\n\n def rollback_checkpoint(self, device, name):\n self.lib.rollback_checkpoint(device, name)\n\n def check_checkpoint_status(self, device, name, abstract, expect='create'):\n self.lib.check_checkpoint_status(device, name, self.abstract, expect)", "id": "3228854", "language": "Python", "matching_score": 0.8136244416236877, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/abstracted_libs/restore.py" }, { "content": "# python\nimport logging\n\nlog = logging.getLogger(__name__)\n\n\nclass ClearLogging(object):\n\n def clear_logging(self, device):\n try:\n device.execute('clear logging')\n except Exception as e:\n self.failed('Failed to clear logging', from_exception=e)\n", "id": "11011383", "language": "Python", "matching_score": 2.6368956565856934, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/abstracted_libs/clear_logging.py" }, { "content": "# python\nimport logging\n\nlog = logging.getLogger(__name__)\n\n\nclass ShowLogging(object):\n\n def show_logging(self, device):\n try:\n return device.execute('show logging')\n except Exception as e:\n self.failed('Failed to show logging', from_exception=e)\n", "id": "12136316", "language": "Python", "matching_score": 1, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/libs/abstracted_libs/show_logging.py" }, { "content": "from .device import *\n", "id": "5606426", "language": "Python", "matching_score": 1, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/device/tgen/__init__.py" }, { "content": "from .device import *", "id": "5087965", "language": "Python", "matching_score": 0.037907861173152924, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/device/ixiarestpy/__init__.py" }, { "content": "\"\"\"Common utils functions for VIRL STD\"\"\"\n\n# Python\nimport logging\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef launch_simulation(\n device, simulation_name, simulation_data, connection_alias=\"rest\"\n):\n \"\"\"launch simulation\n\n Args:\n device (`obj`): Device object\n simulation_name (`str`): simulation name\n simulation_data (`str`): simulation data \n connection_alias (`str`): connection_alias\n\n Returns:\n result: result of launching simulation\n \n Raise:\n None\n \"\"\"\n try:\n device.connect(via=connection_alias)\n except Exception as e:\n log.info(\"Failed to connect to device {}: {}\".format(device.name, e))\n\n try:\n out = device.post(\n \"/simengine/rest/launch?session={}\".format(simulation_name),\n simulation_data,\n )\n except Exception as e:\n out = e\n\n return out\n\n\ndef stop_simulation(device, simulation_name, connection_alias=\"rest\"):\n \"\"\"Stop simulation\n\n Args:\n device (`obj`): Device object\n simulation_name ('str'): simulation name\n connection_alias (`str`): connection alias\n\n Returns:\n\n result: result of GET command\n \n Raise:\n None\n \"\"\"\n try:\n device.connect(via=connection_alias)\n except Exception as e:\n log.info(\"Failed to connect to device {}: {}\".format(device.name, e))\n\n try:\n out = device.get(\"/simengine/rest/stop/{}\".format(simulation_name))\n except Exception as e:\n out = e\n\n return out\n", "id": "7953135", "language": "Python", "matching_score": 2.3930070400238037, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/virl/std/utils.py" }, { "content": "\"\"\"Common get functions for VIRL STD\"\"\"\n\n# Python\nimport logging\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef get_simulations(device, connection_alias=\"rest\"):\n \"\"\"Get simulations list\n\n Args:\n device (`obj`): Device object\n connection_alias (`str`): connection alias\n\n Returns:\n list: list of simulation names\n \n Raise:\n None\n \"\"\"\n try:\n device.connect(via=connection_alias)\n except Exception as e:\n log.info(\"Failed to connect to device {}: {}\".format(device.name, e))\n\n try:\n out = device.get(\"/simengine/rest/list\")\n except Exception as e:\n log.info(\"Failed to get simulation list: {}\".format(e))\n\n sim_list = []\n for sim in out.get(\"simulations\", {}).keys():\n sim_list.append(sim)\n\n return sim_list\n\n\ndef get_node_summary(device, simulation_name, connection_alias=\"rest\"):\n \"\"\"Get node summary\n\n Args:\n device (`obj`): Device object\n simulation_name (`str`): simulation name\n connection_alias (`str`): connection alias\n\n Returns:\n dict: node summary info\n \n Raise:\n None\n \"\"\"\n try:\n device.connect(via=connection_alias)\n except Exception as e:\n log.info(\"Failed to connect to device {}: {}\".format(device.name, e))\n\n try:\n out = device.get(\"/simengine/rest/nodes/{}\".format(simulation_name))\n except Exception as e:\n log.info(\"Failed to get node summary: {}\".format(e))\n\n nodes = out[simulation_name]\n return nodes\n\n\ndef get_node_list(device, simulation_name):\n \"\"\"Get node list\n\n Args:\n device (`obj`): Device object\n simulation_name: simulation name\n\n Returns:\n list: node list\n \n Raise:\n None\n \"\"\"\n nodes = get_node_summary(device, simulation_name)\n return nodes.keys()\n", "id": "354457", "language": "Python", "matching_score": 2.0597848892211914, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/virl/std/get.py" }, { "content": "'''Common implementation for VIRL triggers'''\n# python import\nimport logging\nimport time\n\n# ats import\nfrom ats import aetest\n\n# Genie\nfrom genie.utils.timeout import Timeout\nfrom genie.harness.base import Trigger\nfrom genie.libs.sdk.apis.virl.std.utils import (launch_simulation,\n stop_simulation)\nfrom genie.libs.sdk.apis.virl.std.get import get_simulations\nfrom genie.libs.sdk.apis.virl.std.verify import (verify_simulation,\n verify_node_state)\n\nlog = logging.getLogger(__name__)\n\n\nclass TriggerStopStartSimulation(Trigger):\n '''Trigger class to start/stop simulation'''\n\n @aetest.setup\n def get_simulations(self, uut):\n ''' Get simulations\n\n Args:\n uut (`obj`): Device object\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n\n log.info(\"Getting list of simulations.\")\n try:\n self.sim_list = get_simulations(uut)\n log.info(\"Successfully got the list of simulations: {}\".format(self.sim_list))\n except Exception as e:\n raise Exception(\"Unable to get list of simulations.\")\n\n @aetest.test\n def stop_simulation(self, uut, simulation_name):\n ''' Stop simulations\n\n Args:\n uut (`obj`): Device object\n simulation_name (`str`): simulation name\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n\n log.info(\"stopping simulation {}\".format(simulation_name))\n try:\n result = stop_simulation(uut, simulation_name)\n if 'instead of the expected status code' not in str(result):\n log.info(\"Successfully stopped simulation {}\".format(simulation_name))\n elif '404' in str(result):\n log.info(\"Simulation {} is not running. No need to stop.\".format(simulation_name))\n else:\n self.failed(\"Failed to stop simulation {}\".format(simulation_name))\n \n except Exception as e:\n self.failed(\"Unable to stop simulation {}: {}\".format(simulation_name, e))\n\n # make sure the simulation is stopped\n log.info(\"Checking if the simulation {} is stopped\".format(simulation_name))\n try:\n result = verify_simulation(uut, simulation_name, exist = False)\n if 'instead of the expected status code' not in str(result):\n log.info(\"Successfully verified simulation {} doesn't exist.\".format(simulation_name))\n else:\n self.failed(\"Failed to verify if simulation {} is stopped\".format(simulation_name))\n \n except Exception as e:\n self.failed(\"Unable to verify simulation {}: {}\".format(simulation_name, e))\n \n @aetest.test\n def launch_simulation(self, uut, simulation_name, virl_file):\n ''' Launch simulation\n\n Args:\n uut (`obj`): Device object\n simulation_name (`str`): simulation name\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n\n log.info(\"Opening virl file: {}.\".format(virl_file))\n try:\n with open(virl_file) as f:\n virl_data = f.read()\n except Exception as e:\n self.failed(\"Failed to open virl file {}: {}\".format(virl_file, e))\n \n log.info(\"Launching simulation {}\".format(simulation_name))\n try:\n result = launch_simulation(uut, simulation_name, virl_data)\n if 'instead of the expected status code' not in str(result):\n log.info(\"Successfully launched simulation {}\".format(simulation_name))\n else:\n self.failed(\"Failed to launch simulation {}\".format(simulation_name))\n \n except Exception as e:\n self.failed(\"Unable to launch simulation {}: {}\".format(simulation_name, e))\n\n # Verify if all nodes are ACTIVE\n log.info(\"Getting list of all nodes on simulation {}\".format(simulation_name))\n try:\n result = verify_node_state(uut, simulation_name)\n if 'instead of the expected status code' not in str(result):\n log.info(\"Successfully verified all nodes are ACTIVE on simulation {}\".format(simulation_name))\n else:\n self.failed(\"Failed to verified all nodes are ACTIVE on simulation {}\".format(simulation_name))\n except Exception as e:\n self.failed(\"Unable to verify all nodes are ACTIVE on simulation {}: {}\".format(simulation_name, e))\n \n \n\n \n", "id": "11082963", "language": "Python", "matching_score": 2.1723344326019287, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/virl/virl.py" }, { "content": "\"\"\"Common get functions for VIRL STD\"\"\"\n\n# Python\nimport logging\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\n# Genie\nfrom genie.utils.timeout import Timeout\nfrom genie.libs.sdk.apis.virl.std.get import get_node_summary, get_simulations\n\nlog = logging.getLogger(__name__)\n\n\ndef verify_node_reachable(\n device, simulation_name, node_name=None, max_time=300, check_interval=15\n):\n \"\"\"Verify node reachable\n\n Args:\n device (`obj`): Device object\n simulation_name ('str'): simulation name\n node_name ('str'): node name\n max_time (`int`): maximum wait time in seconds. Default: 300\n check_interval (`int`): Wait time between iterations when looping\\\n is needed in secnods. Default: 15\n\n Returns:\n boolean: True/False\n \n Raise:\n None\n \"\"\"\n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n nodes = []\n if node_name:\n nodes.append(node_name)\n else:\n nodes = get_node_summary(device, simulation_name)\n for node in nodes:\n if nodes[node][\"state\"] != \"reachable\":\n break\n else:\n return True\n timeout.sleep()\n return False\n\n\ndef verify_node_state(\n device,\n simulation_name,\n node_name=None,\n max_time=300,\n check_interval=15,\n state=\"ACTIVE\",\n):\n \"\"\"Verify node reachable\n\n Args:\n device (`obj`): Device object\n simulation_name ('str'): simulation name\n node_name ('str'): node name\n max_time (`int`): maximum wait time in seconds. Default: 300\n check_interval (`int`): Wait time between iterations when looping\\\n is needed in secnods. Default: 15\n state (`str`): state to verify\n\n Returns:\n state ('str'): node state\n \n Raise:\n None\n \"\"\"\n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n nodes = []\n if node_name:\n nodes.append(node_name)\n else:\n nodes = get_node_summary(device, simulation_name)\n for node in nodes:\n if nodes[node][\"state\"] != state:\n break\n else:\n return True\n timeout.sleep()\n return False\n\n\ndef verify_simulation(\n device, simulation_name, max_time=60, check_interval=10, exist=True\n):\n \"\"\"Verify if simulation exists\n\n Args:\n device (`obj`): Device object\n simulation_name (`str`): simulation name\n max_time (`int`): maximum wait time in seconds. Default: 60\n check_interval (`int`): Wait time between iterations when looping\\\n is needed in secnods. Default: 10\n exist (`Bool`): True if simulation exists. False for opposite check.\n\n Returns:\n Bool: True/False\n Raise:\n None\n \"\"\"\n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n simulations = get_simulations(device)\n if exist:\n if simulation_name in simulations:\n return True\n else:\n if simulation_name not in simulations:\n return True\n timeout.sleep()\n return False\n", "id": "1167922", "language": "Python", "matching_score": 1.5375031232833862, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/virl/std/verify.py" }, { "content": "'''Common implementation for sleep triggers'''\n# python import\nimport logging\nimport time\n\n# ats import\nfrom ats import aetest\n\n# Genie Libs import\nfrom genie.libs.sdk.triggers.template.sleep import TriggerSleep\n\nlog = logging.getLogger(__name__)\n\n\nclass TriggerSleep(TriggerSleep):\n '''Trigger class for Sleep action'''\n\n @aetest.test\n def sleep(self, uut, sleep_time, message_time):\n ''' Trigger will sleep for 'sleep_time' seconds given by the user\n and will populate a message with the remaining sleep time every \n 'message_time' seconds.\n '''\n\n log.info('Sleeping for {f} seconds'.format(f=sleep_time))\n\n for iteration in range(sleep_time, 0, -1):\n time.sleep(1)\n if (iteration % message_time) == 0 and (iteration != sleep_time):\n log.info('{f} seconds remaining'.format(f=iteration))\n", "id": "4155831", "language": "Python", "matching_score": 2.183272123336792, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/sleep/sleep.py" }, { "content": "''' triggerSleep template'''\n\n# import ats\nfrom ats import aetest\n\n# import genie infra\nfrom genie.harness.base import Trigger\n\n\nclass TriggerSleep(Trigger):\n ''' Template for all Sleep triggers\n\n TriggerSleep will sleep with no action.\n '''\n\n @aetest.test\n def sleep(self):\n raise NotImplementedError\n", "id": "497879", "language": "Python", "matching_score": 1.4644943475723267, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/template/sleep.py" }, { "content": "'''TriggerShutNoShut template'''\n\nfrom ats import aetest\nfrom genie.harness.base import Trigger\n\n# Genie Libs\nfrom genie.libs.sdk.libs.abstracted_libs import sleep_processor\n\n@aetest.processors(pre=[sleep_processor])\nclass TriggerShutNoShut(Trigger):\n ''' Template for all TriggerShutNoShut triggers\n\n A ShutNoShut trigger is defined in 5 main steps:\n\n 1. Verify if we should execute the trigger. This is known this by\n verifying if certain requirements are satisfied.\n 2. Shut a particular feature/interface/configuration.\n 3. Verify it has been shut correctly and Operational state is as\n expected.\n 4. Unshut the particular feature/interface/configuration.\n 5. Verify it has been unshut correctly and operation state is\n back to what it was at the begining.\n '''\n\n @aetest.setup\n def verify_prerequisite(self):\n raise NotImplementedError\n\n @aetest.test\n def shut(self):\n raise NotImplementedError\n\n @aetest.test\n def verify_shut(self):\n raise NotImplementedError\n\n @aetest.test\n def unshut(self):\n raise NotImplementedError\n\n @aetest.test\n def verify_initial_state(self):\n raise NotImplementedError\n\n", "id": "5514844", "language": "Python", "matching_score": 4.821462631225586, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/template/shutnoshut.py" }, { "content": "'''TriggerUnconfigConfig template'''\n\nfrom ats import aetest\nfrom genie.harness.base import Trigger\n\n# Genie Libs\nfrom genie.libs.sdk.libs.abstracted_libs import sleep_processor\n\n@aetest.processors(pre=[sleep_processor])\nclass TriggerUnconfigConfig(Trigger):\n ''' Template for all TriggerUnconfigConfig triggers\n\n A UnconfigConfig trigger is defined in 6 main steps:\n\n 1. Verify if we should execute the trigger. This is known by\n verifying if certain requirements are satisfied.\n 2. Prepare mechanism to revert configuration for the end step.\n 3. Unconfigure certain configuration.\n 4. Verify it has been unconfigured correctly and Operational state is\n as expected.\n 5. Revert the configuration.\n 6. Verify it has been reverted correctly and operation state is\n back to what it was at the begining.\n '''\n\n @aetest.setup\n def verify_prerequisite(self):\n raise NotImplementedError\n\n @aetest.test\n def save_configuration(self):\n raise NotImplementedError\n\n @aetest.test\n def unconfigure(self):\n raise NotImplementedError\n\n @aetest.test\n def verify_unconfigure(self):\n raise NotImplementedError\n\n @aetest.test\n def restore_configuration(self):\n raise NotImplementedError\n\n @aetest.test\n def verify_initial_state(self):\n raise NotImplementedError\n", "id": "4316579", "language": "Python", "matching_score": 1.4803036451339722, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/template/unconfigconfig.py" }, { "content": "'''IOSXE implementation for Reload triggers'''\n\n# import ats\nfrom ats import aetest\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.ha.ha import \\\n TriggerReload as CommonReload, \\\n TriggerReloadLc\n\n\nclass TriggerReload(CommonReload):\n\n @aetest.setup\n def verify_prerequisite(self, uut, abstract, steps, timeout):\n '''Learn Ops object and verify the requirements.\n\n If the requirements are not satisfied, then skip to the next\n testcase.\n\n Args:\n uut (`obj`): Device object.\n abstract (`obj`): Abstract object.\n steps (`step obj`): aetest step object\n timeout (`timeout obj`): Timeout Object\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n self.skipped('No implementation for generic iosxe HA reload',\n goto=['next_tc'])\n\n\nclass TriggerReloadActiveRP(CommonReload):\n\n @aetest.setup\n def verify_prerequisite(self, uut, abstract, steps, timeout):\n '''Learn Ops object and verify the requirements.\n\n If the requirements are not satisfied, then skip to the next\n testcase.\n\n Args:\n uut (`obj`): Device object.\n abstract (`obj`): Abstract object.\n steps (`step obj`): aetest step object\n timeout (`timeout obj`): Timeout Object\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n self.skipped('No implementation for generic iosxe HA reload',\n goto=['next_tc'])\n\n\nclass TriggerReloadStandbyRP(CommonReload):\n\n @aetest.setup\n def verify_prerequisite(self, uut, abstract, steps, timeout):\n '''Learn Ops object and verify the requirements.\n\n If the requirements are not satisfied, then skip to the next\n testcase.\n\n Args:\n uut (`obj`): Device object.\n abstract (`obj`): Abstract object.\n steps (`step obj`): aetest step object\n timeout (`timeout obj`): Timeout Object\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n self.skipped('No implementation for generic iosxe HA reload',\n goto=['next_tc'])\n\n\nclass TriggerReloadMember(TriggerReloadLc):\n\n @aetest.setup\n def verify_prerequisite(self, uut, abstract, steps, timeout):\n '''Learn Ops object and verify the requirements.\n\n If the requirements are not satisfied, then skip to the next\n testcase.\n\n Args:\n uut (`obj`): Device object.\n abstract (`obj`): Abstract object.\n steps (`step obj`): aetest step object\n timeout (`timeout obj`): Timeout Object\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n self.skipped('No implementation for generic iosxe HA reload',\n goto=['next_tc'])\n\n\nclass TriggerReloadActiveFP(TriggerReloadLc):\n\n @aetest.setup\n def verify_prerequisite(self, uut, abstract, steps, timeout):\n '''Learn Ops object and verify the requirements.\n\n If the requirements are not satisfied, then skip to the next\n testcase.\n\n Args:\n uut (`obj`): Device object.\n abstract (`obj`): Abstract object.\n steps (`step obj`): aetest step object\n timeout (`timeout obj`): Timeout Object\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n self.skipped('No implementation for generic iosxe HA reload',\n goto=['next_tc'])\n ", "id": "342405", "language": "Python", "matching_score": 4.003129005432129, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/ha/reload/iosxe/reload.py" }, { "content": "'''IOSXE implementation for Switchover triggers'''\n\n# import ats\nfrom ats import aetest\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.ha.ha import \\\n TriggerSwitchover as CommonSwitchover\n\n\nclass TriggerSwitchover(CommonSwitchover):\n\n @aetest.setup\n def verify_prerequisite(self, uut, abstract, steps, timeout):\n '''Learn Ops object and verify the requirements.\n\n If the requirements are not satisfied, then skip to the next\n testcase.\n\n Args:\n uut (`obj`): Device object.\n abstract (`obj`): Abstract object.\n steps (`step obj`): aetest step object\n timeout (`timeout obj`): Timeout Object\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n self.skipped('No implementation for generic iosxe HA switchover',\n goto=['next_tc'])\n", "id": "3130093", "language": "Python", "matching_score": 0.5931317806243896, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/ha/switchover/iosxe/switchover.py" }, { "content": "'''\nImplementation for Mrib Restart triggers\n'''\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.processrestart.processrestart import TriggerProcessRestart\n\nexclude = ['last_restart_date', 'state_start_date',\n 'last_terminate_reason', 'reboot_state',\n 'previous_pid']\n\n\nclass TriggerProcessCrashRestartMrib(TriggerProcessRestart):\n \"\"\"Restart the running Mrib process(es) with linux command \"kill -6 <process>\",\n expecting process crashes and generates a core. This will cause device to switchover.\"\"\"\n \n __description__ = \"\"\"Restart the running Mrib process(es) with linux command \"kill -6 <process>\",\n expecting process crashes and generates a core. This will cause device to switchover.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn Mrib process(es) with command \"show system internal sysmgr service name mrib\",\n and store the \"running\" process(es) if has any, otherwise, SKIP the trigger\n 2. Restart the learned Mrib process(es) from step 1 with command \"kill -6 <process>\"\n in linux shell mode\n 3. Reconnect to the router due to the router switchover after restarting process mrib.\n Verify the pid of Mrib process(es) from step 2 is changed,\n restart count of Mrib process(es) from step 2 is increased by 1,\n the count of \"SYSMGR-2-SERVICE_CRASHED:\" in log is 1 per Mrib process from step 2,\n and only 1 core generated on mrib per Mrib process from step 2\n\n \"\"\"\n process = 'mrib'\n method = 'crash'\n crash_method = '6'\n verify_exclude = exclude\n\n\nclass TriggerProcessKillRestartMrib(TriggerProcessRestart):\n \"\"\"Restart the running Mrib process(es) with Linux command \"kill -9 <process>\".\"\"\"\n\n __description__ = \"\"\"Restart the running Mrib process(es) with Linux command \"kill -9 <process>\".\n This will cause device to switchover.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn Mrib process(es) with command \"show system internal sysmgr service name mrib\",\n and store the \"running\" process(es) if has any, otherwise, SKIP the trigger\n 2. Restart the learned Mrib process(es) from step 1 with command \"kill -9 <process>\"\n in linux shell mode\n 3. Reconnect to the router due to the router switchover after restarting process mrib.\n Verify the pid of Mrib process(es) from step 2 is changed,\n and restart count of Mrib process(es) from step 2 is increased by 1\n\n \"\"\"\n process = 'mrib'\n method = 'crash'\n crash_method = '9'\n verify_exclude = exclude\n", "id": "2002618", "language": "Python", "matching_score": 5.38026762008667, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/processrestart/mrib/nxos/processrestart.py" }, { "content": "'''\nImplementation for Sysmgr Restart triggers\n'''\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.processrestart.processrestart import TriggerProcessRestart\n\nexclude = ['last_restart_date', 'state_start_date',\n 'last_terminate_reason', 'reboot_state',\n 'previous_pid']\n\n\nclass TriggerProcessCrashRestartSysmgr(TriggerProcessRestart):\n \"\"\"Restart the running Sysmgr process(es) with linux command \"kill -6 <process>\",\n expecting process crashes and generates a core. This will cause device to switchover.\"\"\"\n \n __description__ = \"\"\"Restart the running Sysmgr process(es) with linux command \"kill -6 <process>\",\n expecting process crashes and generates a core. This will cause device to switchover.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn Sysmgr process(es) with command \"show processes | include sysmgr\",\n and store the \"running\" process(es) if has any, otherwise, SKIP the trigger\n 2. Restart the learned Sysmgr process(es) from step 1 with command \"kill -6 <process>\"\n in linux shell mode\n 3. Reconnect to the router due to the router switchover after restarting process sysmgr.\n Verify the pid of Sysmgr process(es) from step 2 is changed,\n restart count of Sysmgr process(es) from step 2 is increased by 1,\n and only 1 core generated on sysmgr per Sysmgr process from step 2\n\n \"\"\"\n process = 'sysmgr'\n method = 'crash'\n crash_method = '6'\n verify_exclude = exclude\n\n\nclass TriggerProcessKillRestartSysmgr(TriggerProcessRestart):\n \"\"\"Restart the running Sysmgr process(es) with Linux command \"kill -9 <process>\".\n This will cause device to switchover.\"\"\"\n\n __description__ = \"\"\"Restart the running Sysmgr process(es) with Linux command \"kill -9 <process>\".\n This will cause device to switchover.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn Sysmgr process(es) with command \"show processes | include sysmgr\",\n and store the \"running\" process(es) if has any, otherwise, SKIP the trigger\n 2. Restart the learned Sysmgr process(es) from step 1 with command \"kill -9 <process>\"\n in linux shell mode.\n 3. Reconnect to the router due to the router switchover after restarting process sysmgr.\n Verify the pid of Sysmgr process(es) from step 2 is changed,\n and restart count of Sysmgr process(es) from step 2 is increased by 1\n\n \"\"\"\n process = 'sysmgr'\n method = 'crash'\n crash_method = '9'\n verify_exclude = exclude\n", "id": "3403839", "language": "Python", "matching_score": 4.343227386474609, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/processrestart/sysmgr/nxos/processrestart.py" }, { "content": "'''\nImplementation for FeatureMgr Restart triggers\n'''\n\n# Genie Libs\nfrom genie.libs.sdk.triggers.processrestart.processrestart import TriggerProcessRestart\n\nexclude = ['last_restart_date', 'state_start_date',\n 'last_terminate_reason', 'reboot_state',\n 'previous_pid']\n\nclass TriggerProcessCrashRestartFeatureMgr(TriggerProcessRestart):\n \"\"\"Restart the running feature-mgr process(es) with linux command \"kill -6 <process>\",\n expecting process crashes and generates a core.\"\"\"\n \n __description__ = \"\"\"Restart the running feature-mgr process(es) with linux command \"kill -6 <process>\",\n expecting process crashes and generates a core.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn feature-mgr process(es) with command \"show system internal sysmgr service name feature-mgr\",\n and store the \"running\" process(es) if has any, otherwise, SKIP the trigger\n 2. Restart the learned feature-mgr process(es) from step 1 with command \"kill -6 <process>\"\n in linux shell mode\n 3. Verify the pid of feature-mgr process(es) from step 2 is changed,\n restart count of feature-mgr process(es) from step 2 is increased by 1,\n the count of \"SYSMGR-2-SERVICE_CRASHED:\" in log is 1 per feature-mgr process from step 2,\n and only 1 core generated on feature-mgr per feature-mgr process from step 2\n\n \"\"\"\n process = 'feature-mgr'\n method = 'crash'\n crash_method = '6'\n verify_exclude = exclude\n\nclass TriggerProcessKillRestartFeatureMgr(TriggerProcessRestart):\n \"\"\"Restart the running feature-mgr process(es) with Linux command \"kill -9 <process>\".\"\"\"\n\n __description__ = \"\"\"Restart the running feature-mgr process(es) with Linux command \"kill -9 <process>\"\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n\n steps:\n 1. Learn feature-mgr process(es) with command \"show system internal sysmgr service name feature-mgr\",\n and store the \"running\" process(es) if has any, otherwise, SKIP the trigger\n 2. Restart the learned feature-mgr process(es) from step 1 with command \"kill -9 <process>\"\n in linux shell mode\n 3. Verify the pid of feature-mgr process(es) from step 2 is changed,\n and restart count of feature-mgr process(es) from step 2 is increased by 1\n\n \"\"\"\n process = 'feature-mgr'\n method = 'crash'\n crash_method = '9'\n verify_exclude = exclude\n", "id": "3666037", "language": "Python", "matching_score": 1.8550093173980713, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/processrestart/feature_mgr/nxos/processrestart.py" }, { "content": "'''NXOS implementation for trm disable/enable triggers'''\n\n# import python\nimport time\n\nfrom ats import aetest\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.disableenable.disableenable import \\\n TriggerDisableEnable\nfrom ats.utils.objects import NotExists\n\n# Which key to exclude Ops comparison\ntrm_exclude = ['maker','uptime']\n\nclass TriggerDisableEnableNgmvpn(TriggerDisableEnable):\n \"\"\"Disable and enable feature ngmvpn when it is enabled.\"\"\"\n\n __description__ = \"\"\"Disable and enable feature ngmvpn when it is enabled.\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n\n steps:\n 1. Learn Vxlan Ops object and verify if ngmvpn is enabled, if not, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Disable nv overlay feature with command \"no feature ngmvpn\"\n via Vxlan Conf object\n 4. Verify the state of feature ngmvpn overlay is \"disabled\"\n 5. Recover the device configurations to the one in step 2\n 6. Verify the state of feature nv overlay is \"enabled\" and\n learn Vxlan Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n mapping = Mapping(requirements={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', 'enabled_ngmvpn', True]],\n 'kwargs': {'attributes': ['nve[enabled_ngmvpn]']},\n 'all_keys': True,\n 'exclude': trm_exclude}},\n config_info={'conf.vxlan.Vxlan': {\n 'requirements': [['device_attr', '{uut}', 'enabled_ngmvpn', True]],\n 'verify_conf': False}},\n verify_ops={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', 'enabled_ngmvpn', False]],\n 'kwargs': {'attributes': ['nve[enabled_ngmvpn]']},\n 'exclude': trm_exclude }},\n num_values={})\n\n feature_name = 'ngmvpn'\n\n", "id": "4058054", "language": "Python", "matching_score": 6.548811435699463, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/disableenable/trm/nxos/disableenable.py" }, { "content": "'''NXOS implementation for Vxlan disable/enable triggers'''\n\n# import python\nimport time\n\nfrom ats import aetest\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.disableenable.disableenable import \\\n TriggerDisableEnable\n\n# Which key to exclude Ops comparison\nvxlan_exclude = ['maker','uptime','up_time']\n\nmultisite_exclude = ['elapsedtime','keepalive','remoteport',\n 'keepaliverecvd','keepalivesent','lastread','lastwrite',\n 'msgrecvd','msgsent','neighbortableversion',\n 'tableversion','rtrefreshsent','updatesrecvd','updatessent',\n 'bytessent','bytesrecvd','localport','connsdropped',\n 'connsestablished','opensrecvd','openssent','prefixversion','fd']\n\n\nclass TriggerDisableEnableNveOverlay(TriggerDisableEnable):\n \"\"\"Disable and enable feature nv overlay when it is enabled.\"\"\"\n\n __description__ = \"\"\"Disable and enable feature nv overlay when it is enabled.\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n\n steps:\n 1. Learn Vxlan Ops object and verify if nv overlay is enabled, if not, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Disable nv overlay feature with command \"no feature nve overlay\"\n via Vxlan Conf object\n 4. Verify the state of feature nv overlay is \"disabled\"\n 5. Recover the device configurations to the one in step 2\n 6. Verify the state of feature nv overlay is \"enabled\" and\n learn Vxlan Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n mapping = Mapping(requirements={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', 'enabled_nv_overlay', True]],\n 'kwargs': {'attributes': ['nve','l2route']},\n 'all_keys': True,\n 'exclude': vxlan_exclude + ['tx_id','peer_id']}},\n config_info={'conf.vxlan.Vxlan': {\n 'requirements': [['device_attr', '{uut}', 'enabled_nv_overlay', True]],\n 'verify_conf': False}},\n verify_ops={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', 'enabled_nv_overlay', False]],\n 'kwargs': {'attributes': ['nve','l2route']},\n 'exclude': vxlan_exclude + ['l2route']}},\n num_values={})\n\n feature_name = 'nve'\n\nclass TriggerDisableEnableVnSegmentVlanWithNvOverlay(TriggerDisableEnable):\n \"\"\"Disable and enable feature vn Segment vlan when it is enabled.\"\"\"\n\n __description__ = \"\"\"Disable and enable feature vn Segment vlan when it is enabled.\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n\n steps:\n 1. Learn Vxlan Ops object and verify if vn segment vlan is enabled, if not, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Disable vn segment vlan feature with these two commands \"no feature nv overlay\" and\n \"no feature vn-segment-vlan-based\" via Vxlan Conf object\n 4. Verify the state of feature vn-segment-vlan-based is \"disabled\"\n 5. Recover the device configurations to the one in step 2\n 6. Verify the state of feature vn-segment-vlan-based is \"enabled\" and\n learn Vxlan Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n mapping = Mapping(requirements={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', 'enabled_vn_segment_vlan_based', True]],\n 'kwargs': {'attributes': ['nve','l2route','bgp_l2vpn_evpn']},\n 'exclude': vxlan_exclude + multisite_exclude + ['tx_id','peer_id','flags','pathnr',\n 'bestpathnr','totalpaths','prefix','advertisedto',\n 'resettime','resetreason','memoryused','prefixreceived',\n 'totalnetworks']}},\n config_info={'conf.vxlan.Vxlan': {\n 'requirements': [['device_attr', '{uut}', 'enabled_vn_segment_vlan_based', True]],\n 'verify_conf': False}},\n verify_ops={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', 'enabled_vn_segment_vlan_based', False]],\n 'kwargs': {'attributes': ['nve','l2route','bgp_l2vpn_evpn']},\n 'exclude': vxlan_exclude + ['l2route','bgp_l2vpn_evpn']}},\n num_values={})\n\n feature_name = 'vnseg_vlan'\n", "id": "2312498", "language": "Python", "matching_score": 5.74399471282959, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/disableenable/vxlan/nxos/disableenable.py" }, { "content": "'''NXOS implementation for Vlan disable/enable triggers'''\n\n# import python\nimport time\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.disableenable.disableenable import \\\n TriggerDisableEnable\n\n\n# Which key to exclude for Mcast Ops comparison\nvlan_exclude = ['maker']\n\n\nclass TriggerDisableEnableVlanInterface(TriggerDisableEnable):\n \"\"\"Disable and enable feature interface-vlan when it is enabled.\"\"\"\n\n __description__ = \"\"\"Disable and enable feature interface-vlan when it is enabled.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n\n steps:\n 1. Learn Vlan Ops object and verify if interface-vlan is enabled, if not, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Disable interface-vlan feature with command \"no feature interface-vlan\"\n via Vlan Conf object\n 4. Verify the state of feature interface-vlan is \"disabled\"\n 5. Recover the device configurations to the one in step 2\n 6. Verify the state of feature interface-vlan is \"enabled\" and \n learn Vlan Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n mapping = Mapping(requirements={'ops.vlan.vlan.Vlan':{\n 'requirements': [['info', 'vlans', 'interface_vlan_enabled', True]],\n 'kwargs': {'attributes':['info']},\n 'exclude': vlan_exclude}},\n config_info={'conf.vlan.Vlan':{\n 'requirements':[['device_attr', '{uut}','interface_vlan_enabled', 'disabled']],\n 'verify_conf': False}},\n verify_ops={'ops.vlan.vlan.Vlan':{\n 'requirements': [['info', 'vlans', 'interface_vlan_enabled', False]],\n 'kwargs': {'attributes': ['info']},\n 'exclude': vlan_exclude}})\n\n\n feature_name = 'interface-vlan'\n\n\nclass TriggerDisableEnableVnSegmentVlan(TriggerDisableEnable):\n \"\"\"Disable and enable feature vn-segment-vlan-based when it is enabled.\"\"\"\n \n __description__ = \"\"\"Disable and enable feature vn-segment-vlan-based when it is enabled.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n\n steps:\n 1. Learn Vlan Ops object and verify if vn-segment-vlan-based is enabled,\n if not, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Disable vn-segment-vlan-based feature with command \"no feature vn-segment-vlan-based\"\n via Vlan Conf object\n 4. Verify the state of feature vn-segment-vlan-based is \"disabled\"\n 5. Recover the device configurations to the one in step 2\n 6. Verify the state of feature vn-segment-vlan-based is \"enabled\" and \n learn Vlan Ops again and verify it is the same as the Ops in step 1\n\n\n \"\"\"\n\n mapping = Mapping(requirements={'ops.vlan.vlan.Vlan':{\n 'requirements': [['info', 'vlans', 'vn_segment_vlan_based_enabled', True]],\n 'kwargs': {'attributes':['info']},\n 'exclude': vlan_exclude}},\n config_info={'conf.vlan.Vlan':{\n 'requirements':[['device_attr', '{uut}','vn_segment_vlan_based_enabled', 'disabled']],\n 'verify_conf': False}},\n verify_ops={'ops.vlan.vlan.Vlan':{\n 'requirements': [['info', 'vlans', 'vn_segment_vlan_based_enabled', False]],\n 'kwargs': {'attributes': ['info']},\n 'exclude': vlan_exclude}})\n\n feature_name = 'vnseg_vlan'", "id": "7207028", "language": "Python", "matching_score": 4.743884086608887, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/disableenable/vlan/nxos/disableenable.py" }, { "content": "'''NXOS implementation for hsrp disable/enable triggers'''\n\n# import python\nimport time\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.disableenable.disableenable import \\\n TriggerDisableEnable\n\n\n# Which key to exclude for HSRP Ops comparison\nhsrp_exclude = ['maker']\n\n\nclass TriggerDisableEnableHsrp(TriggerDisableEnable):\n \"\"\"Disable and enable feature hsrp when there is hsrp group(s).\"\"\"\n \n __description__ = \"\"\"Disable and enable feature hsrp when there is hsrp group(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n address_family: `str`\n version: `int`\n groups: `int`\n standby_router: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n\n steps:\n 1. Learn HSRP Ops object and store the HSRP group(s)\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Disable HSRP feature with command \"no feature hsrp\"\n via HSRP Conf object\n 4. Verify the state of feature hsrp is \"disabled\"\n 5. Recover the device configurations to the one in step 2\n 6. Verify the state of feature hsrp is \"enabled\" and \n learn HSRP Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.hsrp.hsrp.Hsrp':{\n 'requirements':[\n ['info', '(?P<interface>.*)',\\\n 'address_family', '(?P<address_family>.*)',\\\n 'version', '(?P<version>.*)',\\\n 'groups', '(?P<groups>.*)',\\\n 'standby_router',\\\n '(?P<standby_router>).*'],\n ],\n 'exclude': hsrp_exclude}},\n config_info={'conf.hsrp.Hsrp':{\n 'requirements':[\n ['device_attr', '{uut}',\\\n 'enabled', True]],\n 'verify_conf':False}})\n\n # feature name\n # used for creating checkpoint name and log information\n feature_name = 'hsrp_engine'\n", "id": "1428497", "language": "Python", "matching_score": 3.7783780097961426, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/disableenable/hsrp/nxos/disableenable.py" }, { "content": "'''NXOS Implementation for Pim modify triggers'''\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.modify.modify import TriggerModify\n\n# Which key to exclude for Pim Ops comparison\npim_exclude = ['maker']\n\n\nclass TriggerModifyPimNeighborFilter(TriggerModify):\n \"\"\"Modify dynamically learned PIM interface(s)'s neighbor-filter then restore the\n configuration by reapplying the whole running configuration.\"\"\"\n\n __description__ = \"\"\"Modify dynamically learned PIM interface(s)'s neighbor-filter\n then restore the configuration by reapplying the whole running configuration.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n vrf: `str`\n address_family: `str`\n interface: `str`\n neighbor_filter: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Pim Ops object and store the PIM interface(s)'s neighbor-filter\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the learned PIM interface(s)'s neighbor-filter from step 1 \n with Pim Conf object\n 4. Verify the PIM interface(s)'s neighbor-filter from step 3 is\n reflected in device configuration\n 5. Recover the device configurations to the one in step 2\n 6. Learn Pim Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n MODIFY_NAME = 'modified_pim_neighbor_policy'\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={\n 'ops.pim.pim.Pim':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'address_family', '(?P<address_family>.*)',\n 'neighbor_filter', '(?P<neighbor_filter>.*)'],\n ['info', 'vrf', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'address_family', '(?P<address_family>.*)',\n 'oper_status', 'up']],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][interfaces][(.*)][address_family][(.*)][oper_status]',\n 'info[vrf][(.*)][interfaces][(.*)][address_family][(.*)][neighbor_filter]']},\n 'exclude': pim_exclude}},\n config_info={'conf.pim.Pim':{\n 'requirements':[\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'address_family_attr', '(?P<address_family>.*)', 'interface_attr',\n '(?P<interface>.*)', 'neighbor_filter', MODIFY_NAME]],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={\n 'ops.pim.pim.Pim':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'address_family', '(?P<address_family>.*)',\n 'neighbor_filter', MODIFY_NAME],\n ['info', 'vrf', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'address_family', '(?P<address_family>.*)',\n 'oper_status', 'up']],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][interfaces][(.*)][address_family][(.*)][oper_status]',\n 'info[vrf][(.*)][interfaces][(.*)][address_family][(.*)][neighbor_filter]']},\n 'exclude': pim_exclude}},\n num_values={'vrf': 1, 'address_family': 1, 'interface': 1})", "id": "5998660", "language": "Python", "matching_score": 7.1715312004089355, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/modify/pim/nxos/modify.py" }, { "content": "'''NXOS Implementation for Igmp modify triggers'''\n\n# python\nfrom copy import deepcopy \n\n# pyats\nfrom ats import aetest\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.modify.modify import TriggerModify\n\n# pyats\nfrom ats.utils.objects import Not, NotExists\n\n# Which key to exclude for Igmp Ops comparison\nigmp_exclude = ['maker', 'expire', 'up_time']\n\n\nclass TriggerModifyIgmpVersion(TriggerModify):\n \"\"\"Modify dynamically learned enabled Igmp interface(s) version then restore the\n configuration by reapplying the whole running configuration.\"\"\"\n\n __description__ = \"\"\"Modify dynamically learned enabled Igmp interface(s) version\n then restore the configuration by reapplying the whole running configuration.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n vrf: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n\n steps:\n 1. Learn Igmp Ops object and store the Igmp interface version\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the learned Igmp interface version from step 1 \n with Igmp Conf object\n 4. Verify the Igmp interface version from step 3 is reflected in device configuration\n 5. Recover the device configurations to the one in step 2\n 6. Learn Igmp Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={\n 'ops.igmp.igmp.Igmp':{\n 'requirements':[\\\n ['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'enable', True],\n ['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'version', '(?P<version>2)']],\n 'all_keys': True,\n 'kwargs':{'attributes': [\n 'info[vrfs][(.*)]']},\n 'exclude': igmp_exclude}},\n config_info={'conf.igmp.Igmp':{\n 'requirements':[\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'interface_attr', '(?P<interface>.*)', 'version',\n 3]],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={\n 'ops.igmp.igmp.Igmp':{\n 'requirements':[\\\n ['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'version', 3]],\n 'kwargs':{'attributes': [\n 'info[vrfs][(.*)]']},\n 'exclude': igmp_exclude}},\n num_values={'vrf': 1, 'interface': 1})\n", "id": "1809368", "language": "Python", "matching_score": 5.378241539001465, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/modify/igmp/nxos/modify.py" }, { "content": "'''Implementation for vlan modify triggers'''\nimport re\nimport logging\n\n\nlog = logging.getLogger(__name__)\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.modify.modify import TriggerModify\nfrom ats import aetest\n\n# Which key to exclude for VXLAN Ops comparison\nvlan_exclude = ['maker', 'uptime']\n\n\nclass TriggerModifyVlanVnsegment(TriggerModify):\n \"\"\"Modify and revert the vnsegemnt for dynamically learned vlan(s).\"\"\"\n\n __description__ = \"\"\"Modify and revert the vnsegemnt for dynamically learned vlan(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n vlan: `str`\n vn_segment: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n\n steps:\n 1. Learn Vlan Ops object and store the vlan which is active and has vnsegment.\n SKIP the trigger if there is no VLAN(s) found\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the vnsegment of the learned VLAN from step 1 with VLAN Conf object\n 4. Verify the vnsegment of the learned VLAN from step 3\n changes to the modified value in step 3,\n 5. Recover the device configurations to the one in step 2\n 6. Learn VLAN Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n VN_SEGMENT = 1111\n\n @aetest.test\n def modify_configuration(self, uut, abstract, steps):\n '''Modify configuration on the uut device\n\n Args:\n uut (`obj`): Device object.\n abstract (`obj`): Abstract object.\n steps (`step obj`): aetest step object\n\n Returns:\n None\n\n Raises:\n pyATS Results\n '''\n # Flap vlan for config change to take effect\n cmd = \"vlan (?P<vlan>.*)\\n\" \\\n \" no vn-segment\\n\" \\\n \" vn-segment {}\\n\".format(self.VN_SEGMENT)\n x = re.findall(r'\\S+|\\n', cmd)\n req = self.mapping._path_population([x], uut)\n req_str = []\n for item in req[0]:\n req_str.append(str(item))\n\n cmd = ' '.join(req_str)\n try:\n uut.configure(cmd)\n except Exception as e:\n self.failed('Failed to modify the configuration', from_exception=e)\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictates which key to verify\n mapping = Mapping(requirements={'ops.vlan.vlan.Vlan':{\n 'requirements':[['info', 'vlans', '(?P<vlan>.*)', 'state', 'active'],\n ['info', 'vlans', '(?P<vlan>.*)', 'vn_segment_id', '(?P<vn_segment>^(?!1111).*)$']],\n 'kwargs':{'attributes':['info[vlans][(.*)][vn_segment_id]',\n 'info[vlans][(.*)][state]']},\n 'all_keys':True,\n 'exclude': vlan_exclude}},\n config_info={'conf.vlan.Vlan':{\n 'requirements':[['device_attr', '{uut}', 'vlan_attr', '(?P<vlan>.*)',\\\n 'vn_segment_id', VN_SEGMENT]],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={'ops.vlan.vlan.Vlan':{\n 'requirements':[['info','vlans','(?P<vlan>.*)','state','active'],\n ['info','vlans','(?P<vlan>.*)','vn_segment_id',VN_SEGMENT]],\n 'kwargs':{'attributes':['info[vlans][(.*)][vn_segment_id]',\n 'info[vlans][(.*)][state]']},\n 'exclude': vlan_exclude}},\n num_values={'vlan': 1})\n\n\n\n", "id": "9412270", "language": "Python", "matching_score": 5.205382347106934, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/modify/vlan/nxos/modify.py" }, { "content": "'''NXOS Implementation for Msdp modify triggers'''\n\n# python\nfrom copy import deepcopy \n\n# pyats\nfrom ats import aetest\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.modify.modify import TriggerModify\n\n# pyats\nfrom ats.utils.objects import Not, NotExists\n\n# Which key to exclude for Msdp Ops comparison\nmsdp_exclude = ['maker', 'elapsed_time', 'discontinuity_time',\n 'keepalive', 'total', 'up_time', 'expire', 'remote',\n 'last_message_received', 'num_of_comparison', 'rpf_failure',\n 'total_accept_count', 'total_reject_count', 'notification']\n\n# Which key to exclude for Interface Ops comparison\ninterface_exclude = ['maker', 'last_change','in_rate','in_rate_pkts',\n 'out_rate', 'out_rate_pkts', 'in_octets',\n 'in_pkts', 'in_unicast_pkts', 'out_octets',\n 'out_pkts', 'out_unicast_pkts', 'out_multicast_pkts',\n 'in_multicast_pkts', 'last_clear', 'in_broadcast_pkts',\n 'out_broadcast_pkts', 'bandwidth', 'load_interval',\n 'port_speed', 'in_crc_errors', 'in_discards',\n 'unnumbered', '(Tunnel.*)', 'accounting']\n\n\nclass TriggerModifyMsdpOriginatorId(TriggerModify):\n \"\"\"Modify dynamically learned MSDP peer(s) originator-id then restore the\n configuration by reapplying the whole running configuration.\"\"\"\n\n __description__ = \"\"\"Modify dynamically learned MSDP peer(s) originator-id then restore the\n configuration by reapplying the whole running configuration.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n vrf: `str`\n originator_id: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Msdp Ops object and store the MSDP originator-id\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the learned MSDP originator-id from step 1 \n with Msdp Conf object\n 4. Verify the MSDP originator-id from step 3 is reflected in device configuration\n 5. Recover the device configurations to the one in step 2\n 6. Learn Msdp Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'global',\n 'originator_id', '(?P<originator_id>.*)']],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][global][originator_id]',\n 'info[vrf][(.*)][peer][(.*)][session_state]']},\n 'exclude': msdp_exclude},\n 'ops.interface.interface.Interface':{\n 'requirements':[\\\n ['info', '(?P<modify_originator_id>.*)', 'ipv4',\n '(?P<ipv4>.*)', 'ip', '(?P<diff_originator_ip>.*)'],\n ['info', '(?P<modify_originator_id>.*)', 'vrf',\n '(?P<vrf>.*)'],\n ['info', '(?P<modify_originator_id>.*)', 'oper_status',\n 'up']],\n 'all_keys': True,\n 'kwargs':{'attributes': [\n 'info[(.*)][ipv4][(.*)][ip]',\n 'info[(.*)][vrf]',\n 'info[(.*)][oper_status]']},\n 'exclude': interface_exclude}},\n config_info={'conf.msdp.Msdp':{\n 'requirements':[\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'originating_rp', '(?P<modify_originator_id>.*)']],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'global',\n 'originator_id', '(?P<diff_originator_ip>.*)']],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][global][originator_id]',\n 'info[vrf][(.*)][peer][(.*)][session_state]']},\n 'exclude': msdp_exclude},\n 'ops.interface.interface.Interface':{\n 'requirements':[\\\n ['info', '(?P<modify_originator_id>.*)', 'oper_status',\n 'up']],\n 'all_keys': True,\n 'kwargs':{'attributes': [\n 'info[(.*)][ipv4][(.*)][ip]',\n 'info[(.*)][vrf]',\n 'info[(.*)][oper_status]']},\n 'exclude': interface_exclude}},\n num_values={'vrf': 1, 'peer': 1, 'modify_originator_id': 1})\n\n\nclass TriggerModifyMsdpSaFilterIn(TriggerModify):\n \"\"\"Modify dynamically learned MSDP peer(s) sa-filter in then restore the\n configuration by reapplying the whole running configuration.\"\"\"\n\n __description__ = \"\"\"Modify dynamically learned MSDP peer(s) sa-filter in then restore the\n configuration by reapplying the whole running configuration.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n vrf: `str`\n peer: `str`\n sa_filter_in: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Msdp Ops object and store the \"established\" MSDP peer(s) sa-filter in\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. MOdify the learned MSDP peer(s) sa-filter in from step 1 \n with Msdp Conf object\n 4. Verify the MSDP peer(s) sa-filter in from step 3 is reflected in device configuration\n 5. Recover the device configurations to the one in step 2\n 6. Learn Msdp Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n MODIFY_NAME = 'modified_sa_filter_in'\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'session_state', 'established'],\n ['info', 'vrf', '(?P<vrf>.*)', 'peer', '(?P<peer>.*)', \n 'sa_filter', 'in', '(?P<sa_filter_in>.*)']],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][peer][(.*)][session_state]',\n 'info[vrf][(.*)][peer][(.*)][sa_filter]']},\n 'exclude': msdp_exclude}},\n config_info={'conf.msdp.Msdp':{\n 'requirements':[\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'peer_attr', '(?P<peer>.*)', 'sa_filter_in',MODIFY_NAME]],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer', '(?P<peer>.*)', \n 'sa_filter', 'in', MODIFY_NAME]],\n 'kwargs':{'attributes': ['info[vrf][(.*)][peer][(.*)][session_state]',\n 'info[vrf][(.*)][peer][(.*)][sa_filter]']},\n 'exclude': msdp_exclude}},\n num_values={'vrf': 1, 'peer': 1})\n\n\nclass TriggerModifyMsdpSaFilterOut(TriggerModify):\n \"\"\"Modify dynamically learned MSDP peer(s) sa-filter out then restore the\n configuration by reapplying the whole running configuration.\"\"\"\n\n __description__ = \"\"\"Modify dynamically learned MSDP peer(s) sa-filter out then restore the\n configuration by reapplying the whole running configuration.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n vrf: `str`\n peer: `str`\n sa_filter_out: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n\n steps:\n 1. Learn Msdp Ops object and store the \"established\" MSDP peer(s) sa-filter out\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. MOdify the learned MSDP peer(s) sa-filter in from step 1 \n with Msdp Conf object\n 4. Verify the MSDP peer(s) sa-filter out from step 3 is reflected in device configuration\n 5. Recover the device configurations to the one in step 2\n 6. Learn Msdp Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n MODIFY_NAME = 'modified_sa_filter_out'\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'session_state', 'established'],\n ['info', 'vrf', '(?P<vrf>.*)', 'peer', '(?P<peer>.*)', \n 'sa_filter', 'out', '(?P<sa_filter_out>.*)']],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][peer][(.*)][session_state]',\n 'info[vrf][(.*)][peer][(.*)][sa_filter]']},\n 'exclude': msdp_exclude}},\n config_info={'conf.msdp.Msdp':{\n 'requirements':[\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'peer_attr', '(?P<peer>.*)', 'sa_filter_out', MODIFY_NAME]],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer', '(?P<peer>.*)', \n 'sa_filter', 'out', MODIFY_NAME]],\n 'kwargs':{'attributes': ['info[vrf][(.*)][peer][(.*)][session_state]',\n 'info[vrf][(.*)][peer][(.*)][sa_filter]']},\n 'exclude': msdp_exclude}},\n num_values={'vrf': 1, 'peer': 1})\n\n\nclass TriggerModifyMsdpSaLimit(TriggerModify):\n \"\"\"Modify dynamically learned MSDP peer(s) sa-limit then restore the\n configuration by reapplying the whole running configuration.\"\"\"\n\n __description__ = \"\"\"Modify dynamically learned MSDP peer(s) sa-limit then restore the\n configuration by reapplying the whole running configuration.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n vrf: `str`\n peer: `str`\n sa_limit: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n\n steps:\n 1. Learn Msdp Ops object and store the \"established\" MSDP peer(s) sa-limit\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the learned MSDP peer(s) sa-limit from step 1 \n with Msdp Conf object\n 4. Verify the MSDP peer(s) sa-limit from step 3 is reflected in device configuration\n 5. Recover the device configurations to the one in step 2\n 6. Learn Msdp Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n MODIFY_NAME = 12345\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'session_state', 'established'],\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'sa_limit', '(?P<sa_limit>\\d+)']],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][peer][(.*)][session_state]',\n 'info[vrf][(.*)][peer][(.*)][sa_limit]']},\n 'exclude': msdp_exclude}},\n config_info={'conf.msdp.Msdp':{\n 'requirements':[\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'peer_attr', '(?P<peer>.*)', 'sa_limit', MODIFY_NAME]],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'sa_limit', str(MODIFY_NAME)]],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][peer][(.*)][session_state]',\n 'info[vrf][(.*)][peer][(.*)][sa_limit]']},\n 'exclude': msdp_exclude}},\n num_values={'vrf': 1, 'peer': 1})\n\n\nclass TriggerModifyMsdpMeshGroup(TriggerModify):\n \"\"\"Modify dynamically learned MSDP peer(s) mesh-group then restore the\n configuration by reapplying the whole running configuration.\"\"\"\n\n __description__ = \"\"\"Modify dynamically learned MSDP peer(s) mesh-group then restore the\n configuration by reapplying the whole running configuration.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n vrf: `str`\n peer: `str`\n mesh_group: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n\n steps:\n 1. Learn Msdp Ops object and store the 'established' MSDP peer(s) mesh group\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the learned MSDP peer(s) mesh group from step 1 \n with Msdp Conf object\n 4. Verify the MSDP peer(s) mesh group from step 3 is reflected in device configuration\n 5. Recover the device configurations to the one in step 2\n 6. Learn Msdp Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n MODIFY_NAME = 'modified_mesh_group'\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'session_state', 'established'],\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'mesh_group', '(?P<mesh_group>.*)']],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][peer][(.*)][session_state]',\n 'info[vrf][(.*)][peer][(.*)][mesh_group]']},\n 'exclude': msdp_exclude}},\n config_info={'conf.msdp.Msdp':{\n 'requirements':[\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'peer_attr', '(?P<peer>.*)', 'mesh_group', MODIFY_NAME]],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'mesh_group', MODIFY_NAME]],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][peer][(.*)][session_state]',\n 'info[vrf][(.*)][peer][(.*)][mesh_group]']},\n 'exclude': msdp_exclude}},\n num_values={'vrf': 1, 'peer': 1})\n\n\nclass TriggerModifyMsdpKeepaliveHoldtime(TriggerModify):\n \"\"\"Modify dynamically learned MSDP peer(s) keepalive&holdtime interval\n then restore the configuration by reapplying the whole running configuration.\"\"\"\n\n __description__ = \"\"\"Modify dynamically learned MSDP peer(s) keepalive&holdtime interval\n then restore the configuration by reapplying the whole running configuration.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n vrf: `str`\n peer: `str`\n keepalive_interval: `int`\n holdtime_interval: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Msdp Ops object and store the 'established' MSDP peer(s)\n keepalive&holdtime interval if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the learned MSDP peer(s) keepalive&holdtime interval from step 1 \n with Msdp Conf object\n 4. Verify the MSDP peer(s) keepalive&holdtime interval from step 3\n is reflected in device configuration\n 5. Recover the device configurations to the one in step 2\n 6. Learn Msdp Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n # <1-60> Keepalive interval in seconds\n keepalive_interval = 33\n # <1-90> Keepalive timeout in seconds\n holdtime_interval = 66\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer', '(?P<peer>.*)',\n 'timer', 'keepalive_interval', '(?P<keepalive_interval>.*)'],\n ['info', 'vrf', '(?P<vrf>.*)', 'peer', '(?P<peer>.*)',\n 'timer', 'holdtime_interval', '(?P<holdtime_interval>.*)']],\n 'all_keys': True,\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][peer][(.*)][timer]',\n 'info[vrf][(.*)][peer][(.*)][session_state]']},\n 'exclude': msdp_exclude}},\n config_info={'conf.msdp.Msdp':{\n 'requirements':[\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)', 'peer_attr',\n '(?P<peer>.*)', 'keepalive_interval', keepalive_interval],\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)', 'peer_attr',\n '(?P<peer>.*)', 'holdtime_interval', holdtime_interval]],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer', '(?P<peer>.*)',\n 'timer', 'keepalive_interval', keepalive_interval],\n ['info', 'vrf', '(?P<vrf>.*)', 'peer', '(?P<peer>.*)',\n 'timer', 'holdtime_interval', holdtime_interval]],\n 'kwargs':{'attributes': ['info[vrf][(.*)][peer][(.*)][timer]',\n 'info[vrf][(.*)][peer][(.*)][session_state]']},\n 'exclude': msdp_exclude}},\n num_values={'vrf': 1, 'peer': 1})\n\n\nclass TriggerModifyMsdpReconnectInterval(TriggerModify):\n \"\"\"Modify dynamically learned MSDP peer(s) reconnect interval\n then restore the configuration by reapplying the whole running configuration.\"\"\"\n\n __description__ = \"\"\"Modify dynamically learned MSDP peer(s) reconnect interval\n then restore the configuration by reapplying the whole running configuration.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n vrf: `str`\n peer: `str`\n connect_retry_interval: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Msdp Ops object and store the MSDP reconnect interval\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. MOdify the learned MSDP reconnect interval from step 1 \n with Msdp Conf object\n 4. Verify the MSDP reconnect interval from step 3 is reflected in device configuration\n 5. Recover the device configurations to the one in step 2\n 6. Learn Msdp Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n # <1-60> Interval in seconds\n connect_retry_interval = 33\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer', '(?P<peer>.*)', 'timer',\n 'connect_retry_interval', '(?P<connect_retry_interval>.*)']],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][peer][(.*)][timer]',\n 'info[vrf][(.*)][peer][(.*)][session_state]']},\n 'exclude': msdp_exclude}},\n config_info={'conf.msdp.Msdp':{\n 'requirements':[\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'global_connect_retry_interval', connect_retry_interval]],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer', '(?P<peer>.*)', 'timer',\n 'connect_retry_interval', connect_retry_interval]], \n 'kwargs':{'attributes': ['info[vrf][(.*)][peer][(.*)][timer]',\n 'info[vrf][(.*)][peer][(.*)][session_state]']},\n 'exclude': msdp_exclude}},\n num_values={'vrf': 1, 'peer': 'all'})\n\n\nclass TriggerModifyMsdpDescription(TriggerModify):\n \"\"\"Modify dynamically learned MSDP peer(s) description\n then restore the configuration by reapplying the whole running configuration.\"\"\"\n\n __description__ = \"\"\"Modify dynamically learned MSDP peer(s) description\n then restore the configuration by reapplying the whole running configuration.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n vrf: `str`\n peer: `str`\n description: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Msdp Ops object and store the MSDP peer(s) description\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the learned MSDP peer(s) description from step 1 \n with Msdp Conf object\n 4. Verify the MSDP peer(s) description from step 3 is reflected in device configuration\n 5. Recover the device configurations to the one in step 2\n 6. Learn Msdp Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n MODIFY_NAME = 'Modified Description'\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'session_state', 'established'],\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'description', '(?P<description>.*)']],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][peer][(.*)][description]',\n 'info[vrf][(.*)][peer][(.*)][session_state]']},\n 'exclude': msdp_exclude}},\n config_info={'conf.msdp.Msdp':{\n 'requirements':[\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'peer_attr', '(?P<peer>.*)', 'description', MODIFY_NAME]],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'description', MODIFY_NAME]],\n 'kwargs':{'attributes': ['info[vrf][(.*)][peer][(.*)][description]',\n 'info[vrf][(.*)][peer][(.*)][session_state]']},\n 'exclude': msdp_exclude}},\n num_values={'vrf': 1, 'peer': 1})\n\n\nclass TriggerModifyMsdpPeerConnectedSource(TriggerModify):\n \"\"\"Modify dynamically learned MSDP 'established' peer(s) connect-source \n then restore the configuration by reapplying the whole running configuration.\"\"\"\n\n __description__ = \"\"\"Modify dynamically learned MSDP 'established' peer(s) connect-source \n then restore the configuration by reapplying the whole running configuration.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n vrf: `str`\n peer: `str`\n connect_source: `str`\n modify_connect_source: `str`\n ip: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Msdp Ops object and store the MSDP 'established' peer(s) connect-source \n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the learned MSDP peer(s) connect-source from step 1 \n with Msdp Conf object\n 4. Verify the MSDP peer(s) connect-source from step 3 is reflected in device configuration\n 5. Recover the device configurations to the one in step 2\n 6. Learn Msdp Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n @aetest.test\n def modify_configuration(self, uut, abstract, steps):\n # shut no-shut msdp to make the change get effected\n original_conf = deepcopy(self.mapping.config_info['conf.msdp.Msdp']['requirements'])\n\n # shutdown the peer first \n self.mapping.config_info['conf.msdp.Msdp']['requirements'] = \\\n [['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)', 'peer_attr', '(?P<peer>.*)', 'enable', False]]\n super().modify_configuration(uut, abstract, steps)\n\n # modify connected-source\n self.mapping.config_info['conf.msdp.Msdp']['requirements'] = deepcopy(original_conf)\n super().modify_configuration(uut, abstract, steps)\n\n # unshut the peer\n self.mapping.config_info['conf.msdp.Msdp']['requirements'] = \\\n [['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)', 'peer_attr', '(?P<peer>.*)', 'enable', True]]\n super().modify_configuration(uut, abstract, steps)\n\n # revert the requirements\n self.mapping.config_info['conf.msdp.Msdp']['requirements'] = original_conf\n\n @aetest.test\n def restore_configuration(self, uut, method, abstract, steps):\n # before rollback, need to shutdown the modified peer to\n # let the rollback successfull \n self.mapping.config_info['conf.msdp.Msdp']['requirements'] = \\\n [['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)', 'peer_attr', '(?P<peer>.*)', 'enable', False]]\n super().modify_configuration(uut, abstract, steps)\n\n # retore the router\n super().restore_configuration(uut, method, abstract)\n\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'session_state', 'established'],\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'connect_source', '(?P<connect_source>.*)']],\n 'all_keys': True,\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][peer][(.*)]']},\n 'exclude': msdp_exclude},\n 'ops.interface.interface.Interface':{\n 'requirements':[\\\n ['info', '(?P<modify_connect_source>.*)', 'ipv4',\n '(?P<ipv4>.*)', 'ip', '(?P<ip>.*)'],\n ['info', '(?P<modify_connect_source>.*)', 'vrf',\n '(?P<vrf>.*)'],\n ['info', '(?P<modify_connect_source>.*)', 'oper_status',\n 'up']],\n 'all_keys': True,\n 'kwargs':{'attributes': [\n 'info[(.*)][ipv4][(.*)][ip]',\n 'info[(.*)][vrf]',\n 'info[(.*)][oper_status]']},\n 'exclude': interface_exclude},\n 'conf.msdp.Msdp':{\n 'requirements':[\\\n ['device_attr', '{uut}', '_vrf_attr', '(?P<vrf>.*)',\n '_peer_attr', '(?P<peer>.*)', NotExists('peer_as')]],\n 'kwargs':{'attributes': [\n 'msdp[vrf_attr][(.*)][peer_attr][(.*)][peer_as]',\n 'msdp[vrf_attr][(.*)][peer_attr][(.*)][connected_source]']},\n 'exclude': msdp_exclude}},\n config_info={'conf.msdp.Msdp':{\n 'requirements':[\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'peer_attr', '(?P<peer>.*)', 'connected_source',\n '(?P<modify_connect_source>.*)']],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'connect_source', '(?P<modify_connect_source>.*)'],\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'session_state', '(^(?!established).*)']],\n 'kwargs':{'attributes': ['info[vrf][(.*)][peer][(.*)]']},\n 'exclude': msdp_exclude},\n 'ops.interface.interface.Interface':{\n 'requirements':[\\\n ['info', '(?P<modify_connect_source>.*)', 'oper_status',\n 'up']],\n 'all_keys': True,\n 'kwargs':{'attributes': [\n 'info[(.*)][ipv4][(.*)][ip]',\n 'info[(.*)][vrf]',\n 'info[(.*)][oper_status]']},\n 'exclude': interface_exclude}},\n num_values={'vrf': 1, 'peer': 1, 'modify_connect_source': 1})\n\n\nclass TriggerModifyMsdpPeerAs(TriggerModify):\n \"\"\"Modify dynamically learned MSDP 'established' peer(s) remote-as\n then restore the configuration by reapplying the whole running configuration.\"\"\"\n\n __description__ = \"\"\"Modify dynamically learned MSDP 'established' peer(s) remote-as\n then restore the configuration by reapplying the whole running configuration.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n vrf: `str`\n peer: `str`\n peer_as: `int`\n connected_source: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Msdp Ops object and store the MSDP 'established' peer(s) remote-as\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Modify the learned MSDP peer(s) remote-as from step 1 \n with Msdp Conf object\n 4. Verify the MSDP peer(s) remote-as from step 3 is reflected in device configuration\n 5. Recover the device configurations to the one in step 2\n 6. Learn Msdp Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n # change 0xx to xx\n MODIFY_NAME = '333'\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'session_state', 'established']],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][peer][(.*)]']},\n 'exclude': msdp_exclude},\n 'conf.msdp.Msdp':{\n 'requirements':[\\\n ['device_attr', '{uut}', '_vrf_attr', '(?P<vrf>.*)',\n '_peer_attr', '(?P<peer>.*)', 'peer_as', '(?P<peer_as>.*)'],\n ['device_attr', '{uut}', '_vrf_attr', '(?P<vrf>.*)',\n '_peer_attr', '(?P<peer>.*)', 'connected_source',\n '(?P<connected_source>.*)']],\n 'all_keys': True,\n 'kwargs':{'attributes': [\n 'msdp[vrf_attr][(.*)][peer_attr][(.*)][connected_source]',\n 'msdp[vrf_attr][(.*)][peer_attr][(.*)][peer_as]']},\n 'exclude': msdp_exclude}},\n config_info={'conf.msdp.Msdp':{\n 'requirements':[\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'peer_attr', '(?P<peer>.*)', 'peer_as', MODIFY_NAME],\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'peer_attr', '(?P<peer>.*)', 'connected_source',\n '(?P<connected_source>.*)']],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'peer_as', MODIFY_NAME]],\n 'kwargs':{'attributes': ['info[vrf][(.*)][peer][(.*)]']},\n 'exclude': msdp_exclude}},\n num_values={'vrf': 1, 'peer': 1})\n", "id": "8314742", "language": "Python", "matching_score": 6.248793125152588, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/modify/msdp/nxos/modify.py" }, { "content": "''' implementation for Msdp shut/noshut triggers'''\n\n# import python\nimport time\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.shutnoshut.shutnoshut import \\\n TriggerShutNoShut\n\n\nexclude = ['maker', 'elapsed_time', 'discontinuity_time',\n 'keepalive', 'total', 'up_time', 'expire', 'remote',\n 'last_message_received', 'num_of_comparison', 'rpf_failure',\n 'total_accept_count', 'total_reject_count', 'notification']\n\n\nclass TriggerShutNoShutMsdp(TriggerShutNoShut):\n \"\"\"Shut and unshut the dynamically learned Msdp peer(s).\"\"\"\n \n __description__ = \"\"\"Shut and unshut the dynamically learned Msdp peer(s).\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n vrf: `str`\n peer: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Vlan Ops object and store the Msdp 'established' peer(s)\n if has any, otherwise, SKIP the trigger\n 2. Shut the learned Msdp peer(s) from step 1 with Msdp Conf object\n 3. Verify the state of learned Msdp peer(s) from step 2 is \"down\"\n 4. Unshut the Msdp peer(s) with Msdp Conf object\n 5. Learn Msdp Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n mapping = Mapping(requirements={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'session_state', 'established']],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][peer][(.*)]']},\n 'exclude': exclude}},\n config_info={'conf.msdp.Msdp':{\n 'requirements':[\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'peer_attr', '(?P<peer>.*)', 'enable', False]],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={'ops.msdp.msdp.Msdp':{\n 'requirements': [\\\n \t['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'session_state', 'admin-shutdown'],\n \t['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'enable', False]],\n 'kwargs':{'attributes':['info[vrf][(.*)][peer][(.*)]']},\n 'exclude': exclude}},\n num_values={'vrf': 1, 'peer': 1})\n\n", "id": "10298361", "language": "Python", "matching_score": 3.0942602157592773, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/shutnoshut/msdp/nxos/shutnoshut.py" }, { "content": "'''Nxos Implementation for Vxlan shutnoshut triggers'''\n\n# ats\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.shutnoshut.shutnoshut import TriggerShutNoShut\n\n# Which key to exclude for Vlan Ops comparison\n\ninterface_exclude = ['maker', 'last_change','in_rate','in_rate_pkts',\n 'out_rate', 'out_rate_pkts', 'in_octets',\n 'in_pkts', 'in_unicast_pkts', 'out_octets',\n 'out_pkts', 'out_unicast_pkts', 'out_multicast_pkts',\n 'in_multicast_pkts', 'last_clear', 'in_broadcast_pkts',\n 'out_broadcast_pkts', 'accounting']\n\nmultisite_exclude = ['elapsedtime','keepalive',\n 'keepaliverecvd','keepalivesent','lastread','lastwrite',\n 'msgrecvd','msgsent','neighbortableversion',\n 'tableversion','rtrefreshsent','updatesrecvd','updatessent',\n 'bytessent','bytesrecvd','localport','remoteport','connsdropped',\n 'connsestablished','fd','opensrecvd','openssent','prefixversion',\n 'bestpathnr','pathnr','advertisedto','tx_id','bytesattrs','memoryused','prefixreceived']\n\nnve_exclude = ['maker', 'uptime','up_time']\nl2route_exclude = ['total_memory','memory']\n\nclass TriggerShutNoShutNveOverlayInterface(TriggerShutNoShut):\n \"\"\"Shut and unshut the dynamically learned Nve onverlay interface(s).\"\"\"\n\n __description__ = \"\"\"Shut and unshut the dynamically learned Nve onverlay interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n nve_name: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn VxLan Ops object and verify if has any \"up\" Nve interface(s),\n otherwise, SKIP the trigger\n 2. Shut the learned Nve interface(s) from step 1 with Interface Conf object\n 3. Verify the state of learned Nve interface(s) from step 2 is \"down\"\n 4. Unshut the Nve interface(s) with Interface Conf object\n 5. Learn VxLan Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', '(?P<nve_name>.*)', 'if_state', 'up']],\n 'kwargs': {'attributes': [\n 'nve[(.*)][if_state]',\n 'nve[(.*)][vni][(.*)][vni]','l2route']},\n 'exclude': nve_exclude + ['peer_id','tx_id','total_memory','mac','prefix',\n 'memory','objects','total_mem','total_obj']}},\n config_info={'conf.interface.Interface': {\n 'requirements': [['enabled', False]],\n 'verify_conf': False,\n 'kwargs': {'mandatory': {'name': '(?P<nve_name>.*)',\n 'attach': False}}}},\n verify_ops={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', '(?P<nve_name>.*)', 'if_state', 'down']],\n 'kwargs': {'attributes': [\n 'nve[(.*)][if_state]',\n 'nve[(.*)][vni][(.*)][vni]','l2route']},\n 'exclude': nve_exclude + ['l2route']}},\n num_values={'nve_name': 1})\n\n\nclass TriggerShutNoShutNveLoopbackInterface(TriggerShutNoShut):\n \"\"\"Shut and unshut the dynamically learned Nve loopback interface(s).\"\"\"\n\n __description__ = \"\"\"Shut and unshut the dynamically learned Nve loopback interface(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n nve_name: `str`\n source_if: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn VxLan Ops object and verify if has any \"up\" Nve interface(s),\n otherwise, SKIP the trigger\n 2. Shut the learned Nve interface(s) from step 1 with Interface Conf object\n 3. Verify the state of learned Nve interface(s) from step 2 is \"down\"\n 4. Unshut the Nve interface(s) with Interface Conf object\n 5. Learn VxLan Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [[['nve', '(?P<nve_name>.*)', 'src_if_state', 'up']],\n [['nve', '(?P<nve_name>.*)', 'source_if', '(?P<source_if>(L|l)oopback.*)']]],\n 'all_keys': True,\n 'kwargs': {'attributes': ['nve', 'l2route', 'bgp_l2vpn_evpn']},\n 'exclude': nve_exclude + multisite_exclude + l2route_exclude + ['peer_id','resetreason','resettime','totalpaths']}},\n config_info={'conf.interface.Interface': {\n 'requirements': [['enabled', False]],\n 'verify_conf': False,\n 'kwargs': {'mandatory': {'name': '(?P<source_if>.*)',\n 'attach': False}}}},\n verify_ops={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', '(?P<nve_name>.*)', 'src_if_state', 'down'],\n ['nve', '(?P<nve_name>.*)', 'if_state', 'down'],\n ['nve', '(?P<nve_name>.*)', 'multisite_bgw_if_oper_state',\n 'down'],\n ['nve', '(?P<nve_name>.*)', 'sm_state', 'nve-intf-init'],\n ['nve', '(?P<nve_name>.*)', 'vni', '(.*)', 'vni_state',\n 'down'],\n ['nve', 'vni', 'summary', 'cp_vni_up', 0]],\n 'kwargs': {'attributes': ['nve', 'l2route', 'bgp_l2vpn_evpn']},\n 'exclude': nve_exclude + ['l2route', 'bgp_l2vpn_evpn','cp_vni_down'] }},\n num_values={'nve_name': 'all', 'source_if': 1})\n", "id": "1404244", "language": "Python", "matching_score": 5.120712757110596, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/shutnoshut/vxlan/nxos/shutnoshut.py" }, { "content": "'''Implementation for vxlan unconfigconfig triggers'''\n\nimport re\nimport logging\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.unconfigconfig.unconfigconfig import TriggerUnconfigConfig\nfrom ats.utils.objects import Not,NotExists\nfrom ats import aetest\nfrom collections import OrderedDict\n\n# Which key to exclude for Vxlan Ops comparison\nvxlan_base_exclude = ['maker','up_time']\nevpn_exclude =['bytesrecvd','bytesent','elapsedtime','keepalive','tableversion',\n 'keepaliverecvd','keepalivesent','lastread','totalnetworks',\n 'lastwrite','msgrecvd','msgsent','updatesrecvd','rtrefreshsent',\n 'totalpaths','numberattrs','updatesent','neighbortableversion',\n 'memoryused','byteattrs','bytessent','updatessent', 'openssent']\n\ninterface_exclude = ['maker', 'last_change','in_rate','in_rate_pkts',\n 'out_rate', 'out_rate_pkts', 'in_octets',\n 'in_pkts', 'in_unicast_pkts', 'out_octets',\n 'out_pkts', 'out_unicast_pkts', 'out_multicast_pkts',\n 'in_multicast_pkts', 'last_clear', 'in_broadcast_pkts',\n 'out_broadcast_pkts']\n\nmultisite_exclude = ['elapsedtime','keepalive','remoteport',\n 'keepaliverecvd','keepalivesent','lastread','lastwrite',\n 'msgrecvd','msgsent','neighbortableversion',\n 'tableversion','rtrefreshsent','updatesrecvd','updatessent',\n 'bytessent','bytesrecvd','localport','connsdropped',\n 'connsestablished','opensrecvd','openssent','prefixversion']\n\nl2vpn_exclude = ['bytesattrs','memoryused','numberattrs','bestpathnr','totalnetworks','totalpaths','total_memory','prefixreceived']\n\n\nclass TriggerUnconfigConfigEvpn(TriggerUnconfigConfig):\n \"\"\"Unconfigure evpn and reapply the whole configurations of dynamically learned Vxlan(s).\"\"\"\n \n __description__ = \"\"\"Unconfigure evpn and reapply the whole configurations of dynamically learned Vxlan(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n address_family: `str`\n rd: `str`\n rd_vniid: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Vxlan Ops object and store the vni id under bgp_l2vpn_evpn\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the Evpn with Vxlan Conf object\n 4. Verify the evpn from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn Vxlan Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n mapping = Mapping(requirements={'ops.vxlan.vxlan.Vxlan':{\n 'requirements': [['bgp_l2vpn_evpn', 'instance', '(?P<instance>.*)','vrf',\n '(?P<vrf>.*)', 'address_family', '(?P<address_family>.*)','rd','(?P<rd>.*)',\n 'rd_vniid','(?P<rd_vniid>.*)']],\n 'kwargs': {'attributes': ['bgp_l2vpn_evpn']},\n 'all_keys': True,\n 'exclude': vxlan_base_exclude + evpn_exclude + ['prefixversion','pathnr','bestpathnr',\n 'advertisedto','prefix'] }},\n config_info={'conf.vxlan.Vxlan':{\n 'requirements':[['device_attr', '{uut}', 'evpn_attr','(.*)']],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={'ops.vxlan.vxlan.Vxlan':{\n 'requirements': [['bgp_l2vpn_evpn', 'instance', '(.*)']],\n 'kwargs': {'attributes': ['bgp_l2vpn_evpn']},\n 'exclude': vxlan_base_exclude + evpn_exclude }},\n num_values={'instance':1 , 'vrf':1 , 'address_family':1 , 'rd':1})\n\n\nclass TriggerUnconfigConfigEvpnVni(TriggerUnconfigConfig):\n \"\"\"Unconfigure evpn vni and reapply the whole configurations of dynamically learned Vxlan(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure evpn and reapply the whole configurations of dynamically learned Vxlan(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n nve_name: `str`\n nve_vni: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Vxlan Ops object and store the vni state under nve\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the Evpn with Vxlan Conf object\n 4. Verify the evpn from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn Vxlan Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n mapping = Mapping(requirements={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', '(?P<nve_name>.*)', 'vni', '(?P<nve_vni>.*)',\n 'vni_state', 'up']],\n 'kwargs': {'attributes': ['nve']},\n 'all_keys':True,\n 'exclude': vxlan_base_exclude + ['uptime']}},\n config_info={'conf.interface.Interface': {\n 'requirements': [['nve_vni','(?P<nve_vni>.*)']],\n 'verify_conf': False,\n 'kwargs': {'mandatory': {'name': '(?P<nve_name>.*)','attach': False}}}},\n verify_ops={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', '(?P<nve_name>.*)', 'vni', NotExists('(?P<nve_vni>.*)')]],\n 'kwargs': {'attributes': ['nve']},\n 'exclude': vxlan_base_exclude +['uptime','active_vnis','cp_vni_up','cp_vni_count']}},\n num_values={'nve_name': 1, 'nve_vni': 1})\n\n\nclass TriggerUnconfigConfigEvpnMsiteBgwDelayRestoreTime(TriggerUnconfigConfig):\n \"\"\"Unconfigure evpn msite bgw delay restore time and reapply\n the whole configurations of dynamically learned Vxlan(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure evpn msite bgw delay restore time and reapply\n the whole configurations of dynamically learned Vxlan(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n nve_name: `str`\n evpn_multisite_border_gateway: `int`\n delay_restore_time: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Vxlan Ops object and store the evpn msite bgw delay restore time under nve\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the evpn msite bgw delay restore time with Vxlan Conf object\n 4. Verify the evpn msite bgw delay restore time from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn Vxlan Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n\n mapping = Mapping(requirements={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [[['nve', 'evpn_multisite_border_gateway', '(?P<evpn_multisite_border_gateway>.*)']],\n [['nve', '(?P<nve_name>.*)','multisite_convergence_time',\n '(?P<delay_restore_time>.*)']]],\n 'kwargs': {'attributes': ['nve']},\n 'exclude': vxlan_base_exclude + ['uptime','prefixversion','pathnr','bestpathnr']}},\n config_info={'conf.vxlan.Vxlan': {\n 'requirements': [['device_attr','{uut}', 'evpn_msite_attr', '(?P<evpn_multisite_border_gateway>.*)',\\\n 'evpn_msite_bgw_delay_restore_time', '(?P<delay_restore_time>.*)']],\n 'verify_conf': False,\n 'kwargs': {}}},\n verify_ops={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', '(?P<nve_name>.*)', 'multisite_convergence_time', 180]],\n 'kwargs': {'attributes': ['nve']},\n 'exclude': vxlan_base_exclude + ['uptime']}},\n num_values={'nve_name': 1,'evpn_multisite_border_gateway': 1})\n\n\nclass TriggerUnconfigConfigEvpnMsiteDciTracking(TriggerUnconfigConfig):\n \"\"\"Unconfigure evpn msite dci tracking and reapply\n the whole configurations of dynamically learned Vxlan(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure evpn msite dci tracking and reapply\n the whole configurations of dynamically learned Vxlan(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n nve_name: `str`\n dci_link: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Vxlan Ops object and store the evpn msite dci tracking under nve\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the evpn msite dci tracking with Interface Conf object\n 4. Verify the evpn msite dci tracking from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn Vxlan Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n\n mapping = Mapping(requirements={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [[['nve', 'multisite', 'dci_links', '(?P<dci_link>.*)', 'if_state', 'up']],\n [['nve', '(?P<nve_name>.*)',\\\n 'multisite_bgw_if_oper_state', 'up']]],\n 'kwargs': {'attributes': ['nve[(.*)][vni][(.*)][vni]',\n 'nve[(.*)][multisite_bgw_if_oper_state]',\n 'nve[multisite]']},\n 'exclude': vxlan_base_exclude}},\n config_info={'conf.interface.Interface': {\n 'requirements': [['evpn_multisite_dci_tracking',True]],\n 'verify_conf': False,\n 'kwargs': {'mandatory': {'name': '(?P<dci_link>.*)', 'attach': False}}}},\n verify_ops={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve','multisite', NotExists('dci_links')],\n ['nve', '(?P<nve_name>.*)', 'multisite_bgw_if_oper_state', 'down']],\n 'kwargs': {'attributes': ['nve[(.*)][vni][(.*)][vni]',\n 'nve[(.*)][multisite_bgw_if_oper_state]',\n 'nve[multisite]']},\n 'exclude': vxlan_base_exclude}},\n num_values={'dci_link': 'all','nve_name': 1 })\n\nclass TriggerUnconfigConfigEvpnMsiteFabricTracking(TriggerUnconfigConfig):\n \"\"\"Unconfigure evpn msite fabric tracking and reapply\n the whole configurations of dynamically learned Vxlan(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure evpn msite Fabric tracking and reapply\n the whole configurations of dynamically learned Vxlan(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n nve_name: `str`\n fabric_link: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Vxlan Ops object and store the evpn msite fabric tracking under nve\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the evpn msite fabric tracking with Interface Conf object\n 4. Verify the evpn msite fabric tracking from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn Vxlan Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n\n mapping = Mapping(requirements={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [[['nve', 'multisite', 'fabric_links', '(?P<fabric_link>.*)', 'if_state', 'up']],\n [['nve', '(?P<nve_name>.*)', \\\n 'multisite_bgw_if_oper_state', 'up']]],\n 'kwargs': {'attributes': ['nve[(.*)][vni][(.*)][vni]',\n 'nve[(.*)][multisite_bgw_if_oper_state]',\n 'nve[multisite]',\n 'bgp_l2vpn_evpn']},\n 'exclude': vxlan_base_exclude + multisite_exclude +['fd','resetreason','resettime','prefixreceived',\n 'bestpathnr','pathnr','advertisedto']}},\n config_info={'conf.interface.Interface': {\n 'requirements': [['evpn_multisite_fabric_tracking',True]],\n 'verify_conf': False,\n 'kwargs': {'mandatory': {'name': '(?P<fabric_link>.*)', 'attach': False}}}},\n verify_ops={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve','multisite',NotExists('fabric_links')],\n ['nve', '(?P<nve_name>.*)', 'multisite_bgw_if_oper_state', 'down']],\n 'kwargs': {'attributes': ['nve[(.*)][vni][(.*)][vni]',\n 'nve[(.*)][multisite_bgw_if_oper_state]',\n 'nve[multisite]',\n 'bgp_l2vpn_evpn']},\n 'exclude': vxlan_base_exclude +['bgp_l2vpn_evpn']}},\n num_values={'fabric_link': 'all', 'nve_name': 1})\n\nclass TriggerUnconfigConfigNveAdvertiseVirtualRmac(TriggerUnconfigConfig):\n \"\"\"Unconfigure virtual rmac advertised and reapply\n the whole configurations of dynamically learned Vxlan(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure virtual rmac advertised and reapply\n the whole configurations of dynamically learned Vxlan(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n nve_name: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Vxlan Ops object and store the virtual rmac advertised under nve\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the virtual rmac advertised with Interface Conf object\n 4. Verify the virtual rmac advertised from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn Vxlan Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n\n mapping = Mapping(requirements={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', '(?P<nve_name>.*)','if_state','up'],\n ['nve','(?P<nve_name>.*)','adv_vmac',True]],\n 'kwargs': {'attributes': ['nve[(.*)][adv_vmac]',\n 'nve[(.*)][if_state]',\n 'nve[(.*)][vni]','l2route','bgp_l2vpn_evpn']},\n 'all_keys':True,\n 'exclude': vxlan_base_exclude + multisite_exclude + l2vpn_exclude +\\\n ['peer_id','tx_id','client_nfn','prefixversion']}},\n config_info={'conf.interface.Interface': {\n 'requirements': [['nve_adv_virtual_rmac', True]],\n 'verify_conf': False,\n 'kwargs': {'mandatory': {'name': '(?P<nve_name>.*)', 'attach': False}}}},\n verify_ops={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', '(?P<nve_name>.*)','if_state','up'],\n ['nve','(?P<nve_name>.*)','adv_vmac',False]],\n 'kwargs': {'attributes': ['nve[(.*)][adv_vmac]',\n 'nve[(.*)][if_state]',\n 'nve[(.*)][vni]','l2route','bgp_l2vpn_evpn']},\n 'exclude': vxlan_base_exclude + ['l2route','bgp_l2vpn_evpn']}},\n num_values={'nve_name': 1})\n\nclass TriggerUnconfigConfigNveVniAssociateVrf(TriggerUnconfigConfig):\n \"\"\"Unconfigure nvi associated vrf and reapply\n the whole configurations of dynamically learned Vxlan(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure nvi associated vrf and reapply\n the whole configurations of dynamically learned Vxlan(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n nve_name: `str`\n nve_vni: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Vxlan Ops object and store the nvi associated vrf under nve\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the nvi associated vrf with Interface Conf object\n 4. Verify the nvi associated vrf from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn Vxlan Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n\n # associated vrf is shown as L3\n mapping = Mapping(requirements={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', '(?P<nve_name>.*)','vni','(?P<nve_vni>.*)',\\\n 'type','(?P<type>L3.*)']],\n 'kwargs': {'attributes': ['nve[(.*)][vni][(.*)]']},\n 'all_keys':True,\n 'exclude': vxlan_base_exclude }},\n config_info={'conf.interface.Interface': {\n 'requirements': [['nve_vni','(?P<nve_vni>.*)']],\n 'verify_conf': False,\n 'kwargs': {'mandatory': {'name': '(?P<nve_name>.*)', 'attach': False}}}},\n verify_ops={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', '(?P<nve_name>.*)','vni', NotExists('(?P<nve_vni>.*)')]],\n 'kwargs': {'attributes': ['nve[(.*)][vni][(.*)]']},\n 'exclude': vxlan_base_exclude}},\n num_values={'nve_name': 1, 'nve_vni':1})\n\nclass TriggerUnconfigConfigNveSourceInterfaceLoopback(TriggerUnconfigConfig):\n \"\"\"Unconfigure nve source interface and reapply the whole configurations of dynamically learned Vxlan(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure nve source interface and reapply the whole configurations of dynamically learned Vxlan(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n nve_name: `str`\n source_if: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Vxlan Ops object and store the nve source interface under nve\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the nve source interface with Interface Conf object\n 4. Verify the nve source interface from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn Vxlan Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n\n @aetest.test\n def unconfigure(self, uut, abstract, steps):\n\n cmd = \"interface (?P<nve_name>.*)\\n\" \\\n \" shutdown\\n\" \\\n \" no source-interface (?P<source_if>.*)\"\n x = re.findall(r'\\S+|\\n', cmd)\n req = self.mapping._path_population([x], uut)\n req_str = []\n for item in req[0]:\n req_str.append(str(item))\n\n cmd = ' '.join(req_str)\n try:\n uut.configure(cmd)\n except Exception as e:\n self.failed(\"Unable to configure: '{c}'\".format(c=cmd),\n from_exception=e)\n\n mapping = Mapping(requirements={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', '(?P<nve_name>.*)', 'source_if', '(?P<source_if>(L|l)oopback.*)'],\n ['nve', '(?P<nve_name>.*)', 'src_if_state', 'up']],\n 'kwargs': {'attributes': ['nve[(.*)][vni][(.*)][vni]',\n 'nve[(.*)][source_if]',\n 'nve[(.*)][src_if_state]']},\n 'all_keys':True,\n 'exclude': vxlan_base_exclude}},\n config_info={'conf.interface.Interface': {\n 'requirements': [],\n 'verify_conf': False,\n 'kwargs': {'mandatory': {'name': '(?P<nve_name>.*)', 'attach': False}}}},\n verify_ops={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', '(?P<nve_name>.*)', NotExists('source_if')]],\n 'kwargs': {'attributes': ['nve[(.*)][vni][(.*)][vni]',\n 'nve[(.*)][source_if]',\n 'nve[(.*)][src_if_state]']},\n 'exclude': vxlan_base_exclude}},\n num_values={'nve_name': 1, 'source_if': 1})\n\nclass TriggerUnconfigConfigNvOverlayEvpn(TriggerUnconfigConfig):\n \"\"\"Unconfigure nv overlay evpn and reapply the whole configurations of dynamically learned Vxlan(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure nv overlay evpn and reapply the whole configurations of dynamically learned Vxlan(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n instance: `str`\n vrf: `str`\n address_family: `str`\n rd: `str`\n rd_vniid: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Vxlan Ops object and store the vni id under bgp_l2vpn_evpn\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the nv overlay evpn interface with Vxlan Conf object\n 4. Verify the nv overlay evpn from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn Vxlan Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n\n mapping = Mapping(requirements={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['bgp_l2vpn_evpn', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'address_family', '(?P<address_family>.*)', 'rd', '(?P<rd>.*)',\n 'rd_vniid', '(?P<rd_vniid>.*)']],\n 'kwargs': {'attributes': ['bgp_l2vpn_evpn[instance][(.*)][vrf][(.*)]'\n '[address_family][(.*)][rd]','l2route']},\n 'all_keys': True,\n 'exclude': vxlan_base_exclude + ['sent_to','prefixversion','pathnr',\n 'bestpathnr','advertisedto','client_nfn',\n 'prefix','memory','objects','total_mem',\n 'total_obj','total_memory','mac','mac_ip','seq_num']}},\n config_info={'conf.vxlan.Vxlan': {\n 'requirements': [['device_attr', '{uut}', 'enabled_nv_overlay_evpn', True]],\n 'verify_conf': False,\n 'kwargs': {}}},\n verify_ops={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [[NotExists('bgp_l2vpn_evpn'), '']],\n 'kwargs': {'attributes': ['bgp_l2vpn_evpn[instance][(.*)][vrf][(.*)][address_family][(.*)][rd]','l2route']},\n 'exclude': vxlan_base_exclude +['bgp_l2vpn_evpn','l2route']}},\n num_values={'instance': 1, 'vrf': 1, 'address_family': 1, 'rd': 'all','rd_vniid': 'all'})\n\nclass TriggerUnconfigConfigNveVniMcastGroup(TriggerUnconfigConfig):\n \"\"\"Unconfigure mcast group and reapply the whole configurations of dynamically learned Vxlan(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure mcast group and reapply the whole configurations of dynamically learned Vxlan(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n nve_name: `str`\n nve_vni: `str`\n mcast: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Vxlan Ops object and store the mcast group under nve\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the mcast group with Interface Conf object\n 4. Verify the mcast from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn Vxlan Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n\n mapping = Mapping(requirements={'ops.vxlan.vxlan.Vxlan':{\n 'requirements':[['nve', '(?P<nve_name>.*)', 'vni', '(?P<nve_vni>.*)', 'mcast', '(?P<mcast>.*)'],\n ['nve', '(?P<nve_name>.*)', 'vni', '(?P<nve_vni>.*)', 'vni_state', 'up'],\n ['nve', '(?P<nve_name>.*)', 'vni', '(?P<nve_vni>.*)', 'type', '(?P<type>L2.*)']],\n 'kwargs':{'attributes':['nve[(.*)][vni]']},\n 'all_keys': True,\n 'exclude': vxlan_base_exclude}},\n config_info={'conf.interface.Interface': {\n 'requirements': [['nve_vni','(?P<nve_vni>.*)'],\n ['nve_vni_mcast_group','(?P<mcast>.*)'],\n ['nve_vni_associate_vrf',False]],\n 'verify_conf': False,\n 'kwargs': {'mandatory': {'name': '(?P<nve_name>.*)', 'attach': False}}}},\n verify_ops={'ops.vxlan.vxlan.Vxlan':{\n 'requirements':[['nve', '(?P<nve_name>.*)', 'vni', '(?P<nve_vni>.*)', 'mcast', 'unconfigured'],\n ['nve', '(?P<nve_name>.*)', 'vni', '(?P<nve_vni>.*)', 'vni_state', 'down'],\n ['nve', '(?P<nve_name>.*)', 'vni', '(?P<nve_vni>.*)', NotExists('repl_ip')]],\n 'kwargs':{'attributes':['nve[(.*)][vni]']},\n 'all_keys': True,\n 'exclude': vxlan_base_exclude}},\n num_values={'nve_name':1 , 'nve_vni':1 , 'mcast':1})\n\nclass TriggerUnconfigConfigNveVniMultisiteIngressReplication(TriggerUnconfigConfig):\n \"\"\"Unconfig multisite ingress replication under vxlan and then restore the\n configuration by reapplying the whole running configuration\"\"\"\n\n __description__ = \"\"\"Add multisite ingress replication under Vxlan then restore the\n configuration by reapplying the whole running configuration\n\n trigger_datafile:\n Mandatory Arguments:\n timeout:\n max_time (`int`): Maximum wait time for the trigger in seconds.\n Default: 180\n interval (`int`): Wait time between iteration when looping is\n needed in seconds. Default: 15\n method (`str`): Method to recover the device configuration.\n Supported methods:\n 'checkpoint': Rollback the configuration\n using checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config\n file on disk (iosxr)\n Optional Arguments:\n tgn_timeout (`int`): Maximum wait time for all traffic streams to be\n restored to the reference rate in seconds.\n Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic\n is resumed in seconds. Default: 10\n timeout_recovery:\n Buffer recovery timeout make sure devices are recovered at the\n end of the trigger execution. Used when previous timeouts have\n been exhausted.\n max_time (`int`): Maximum wait time for the last step of the\n trigger in seconds. Default: 180\n interval (`int`): Wait time between iteration when looping is\n needed in seconds. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n nve_name: `str`\n nve_vni: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n Steps:\n 1. Learn Vxlan Ops configured on device. SKIP the trigger if there\n is no vxlan configured on the device.\n 2. Save the current device configurations using \"method\" specified.\n 3. Add multisite ingress replication that using Genie Vxlan Conf.\n 4. Verify the newly multisite ingress replication under Vxlan is reflected in\n device configuration.\n 5. Restore the device configuration to the original configuration saved\n in step 2.\n 6. Learn Vxlan Ops again and verify it is the same as the Ops in step 1.\n \"\"\"\n mapping = Mapping(requirements={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', '(?P<nve_name>.*)', 'vni', '(?P<nve_vni>.*)',\\\n 'multisite_ingress_replication',True],\n ['nve', '(?P<nve_name>.*)', 'vni', '(?P<nve_vni>.*)', 'associated_vrf', False]],\n 'kwargs': {'attributes': ['nve[(.*)][vni][(.*)]']},\n 'all_keys': True,\n 'exclude': vxlan_base_exclude}},\n config_info={'conf.interface.Interface': {\n 'requirements': [['nve_vni','(?P<nve_vni>.*)'],\n ['nve_vni_multisite_ingress_replication',True],\n ['nve_vni_associate_vrf',False]],\n 'verify_conf': False,\n 'kwargs': {'mandatory': {'name': '(?P<nve_name>.*)', 'attach': False}}}},\n verify_ops={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', '(?P<nve_name>.*)', 'vni','(?P<nve_vni>.*)',\\\n NotExists('multisite_ingress_replication')]],\n 'kwargs': {'attributes': ['nve[(.*)][vni][(.*)]']},\n 'exclude': vxlan_base_exclude}},\n num_values={'nve_name': 1 ,'nve_vni':1 })\n\nclass TriggerUnconfigConfigEvpnMsiteBgw(TriggerUnconfigConfig):\n \"\"\"Unconfigure evpn msite bgw and reapply the whole\n configurations of dynamically learned Vxlan(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure evpn msite bgw and reapply the whole configurations of\n dynamically learned Vxlan(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n evpn_multisite_border_gateway: `int`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Vxlan Ops object and store the evpn msite bgw under nve\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the evpn msite bgw with Vxlan Conf object\n 4. Verify the evpn msite bgw from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn Vxlan Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n mapping = Mapping(requirements={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', 'evpn_multisite_border_gateway', '(?P<evpn_multisite_border_gateway>.*)']],\n 'kwargs': {'attributes': ['nve','l2route']},\n 'all_keys': True,\n 'exclude': vxlan_base_exclude + ['uptime','peer_id','tx_id','flags']}},\n config_info={'conf.vxlan.Vxlan': {\n 'requirements': [['device_attr', '{uut}', 'evpn_msite_attr', '(?P<evpn_multisite_border_gateway>.*)']],\n 'verify_conf': False,\n 'kwargs': {}}},\n verify_ops={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', NotExists('evpn_multisite_border_gateway')]],\n 'kwargs': {'attributes': ['nve','l2route']},\n 'exclude': vxlan_base_exclude + ['l2route','uptime']}},\n num_values={'evpn_multisite_border_gateway': 1 })\n\n\nclass TriggerUnconfigConfigNveMultisiteBgwInterface(TriggerUnconfigConfig):\n \"\"\"Unconfigure multisite bgw interface and reapply the whole\n configurations of dynamically learned Vxlan(s).\"\"\"\n\n __description__ = \"\"\"Unconfigure multisite bgw interface and reapply the whole configurations of\n dynamically learned Vxlan(s).\n\n trigger_datafile:\n Mandatory:\n timeout:\n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery:\n Buffer recovery timeout when the previous timeout has been exhausted,\n to make sure the devices are recovered before ending the trigger\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n nve_name: `str`\n multisite_bgw_if: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Vxlan Ops object and store the multisite bgw interface under nve.\n if has any, otherwise, SKIP the trigger\n 2. Save the current device configurations through \"method\" which user uses\n 3. Unconfigure the multisite bgw interface with Vxlan Conf object\n 4. Verify the multisite bgw interface from step 3 are no longer existed\n 5. Recover the device configurations to the one in step 2\n 6. Learn Vxlan Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n\n requirements = OrderedDict()\n requirements['ops.vxlan.vxlan.Vxlan'] = {\n 'requirements': [['nve', '(?P<nve_name>.*)', 'multisite_bgw_if',\n '(?P<multisite_bgw_if>.*)']],\n 'kwargs': {'attributes': ['nve']},\n 'exclude': vxlan_base_exclude + interface_exclude}\n\n requirements['ops.interface.interface.Interface'] = {\n 'requirements': [['info', '(?P<multisite_bgw_if>(L|l)oopback.*)', 'oper_status', 'up']],\n 'kwargs': {'attributes': ['info']},\n 'exclude': vxlan_base_exclude + interface_exclude}\n\n mapping = Mapping(requirements=requirements,\n config_info={'conf.interface.Interface': {\n 'requirements': [['nve_multisite_bgw_intf', '(?P<multisite_bgw_if>.*)']],\n 'verify_conf': False,\n 'kwargs': {'mandatory': {'name': '(?P<nve_name>.*)', 'attach': False}}}},\n verify_ops={'ops.vxlan.vxlan.Vxlan': {\n 'requirements': [['nve', '(?P<nve_name>.*)', NotExists('multisite_bgw_if')]],\n 'kwargs': {'attributes': ['nve']},\n 'exclude': vxlan_base_exclude + interface_exclude }},\n num_values={'nve_name':1, 'multisite_bgw_if': 1 })\n", "id": "7493143", "language": "Python", "matching_score": 6.10146427154541, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/unconfigconfig/vxlan/nxos/unconfigconfig.py" }, { "content": "# Genie\r\nfrom genie.ops.base import Base\r\n\r\n\r\nclass Vxlan(Base):\r\n exclude = ['bytesrecvd',\r\n 'bytessent',\r\n 'capabilitiesrecvd',\r\n 'capabilitiessent',\r\n 'connsdropped',\r\n 'connsestablished',\r\n 'elapsedtime',\r\n 'fd',\r\n 'keepalive',\r\n 'keepaliverecvd',\r\n 'keepalivesent',\r\n 'lastread',\r\n 'lastwrite',\r\n 'msgrecvd',\r\n 'msgsent',\r\n 'neighbortableversion',\r\n 'notificationssent',\r\n 'opensrecvd',\r\n 'openssent',\r\n 'tableversion',\r\n 'remoteport',\r\n 'rtrefreshsent',\r\n 'updatesrecvd',\r\n 'updatessent',\r\n 'prefixversion',\r\n 'tx_id',\r\n 'uptime',\r\n 'up_time',\r\n 'localport',\r\n 'resetreason',\r\n 'resettime',\r\n 'client_nfn',\r\n 'pathnr',\r\n 'bestpathnr',\r\n 'peer_id',\r\n 'bytesattrs',\r\n 'memoryused',\r\n 'prefixreceived',\r\n 'numberattrs',\r\n 'advertisedto',\r\n 'totalnetworks',\r\n 'totalpaths',\r\n 'flags',\r\n 'index',\r\n 'total_mem',\r\n 'memory',\r\n 'total_memory',\r\n 'mac',\r\n 'mac_ip',\r\n 'oif_index',\r\n '(0.0.0.0.*)',\r\n 'prefix',\r\n 'objects',\r\n 'total_obj',\r\n 'table_version',\r\n 'l2_oiflist_index',\r\n 'num_of_oifs',\r\n 'oifs',\r\n 'numof_converged_tables',\r\n 'rmac',\r\n 'vmac',\r\n 'local_rmac',\r\n 'router_mac',\r\n 'rpf_ifname']", "id": "5904749", "language": "Python", "matching_score": 1.349458932876587, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/vxlan/vxlan.py" }, { "content": "# Genie\nfrom genie.ops.base import Base\n\nclass RoutePolicy(Base):\n exclude = []", "id": "4939053", "language": "Python", "matching_score": 1.762895107269287, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/route_policy/route_policy.py" }, { "content": "# Genie\nfrom genie.ops.base import Base\n\n\nclass Vlan(Base):\n exclude = []", "id": "9968475", "language": "Python", "matching_score": 1.1459747552871704, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/vlan/vlan.py" }, { "content": "'''\nVlan Genie Ops Object for IOS - CLI.\n'''\nfrom ..iosxe.vlan import Vlan as VlanXE\n\nclass Vlan(VlanXE):\n pass", "id": "330000", "language": "Python", "matching_score": 1, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/vlan/ios/vlan.py" }, { "content": "from .vlan import *\n", "id": "7292365", "language": "Python", "matching_score": 0.862936794757843, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/vlan/__init__.py" }, { "content": "# python\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie package\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\n# Genie XBu_shared\nfrom genie.libs.conf.vlan import Vlan\nfrom genie.libs.conf.interface import Layer, L2_type\n\nclass test_vlan(TestCase):\n def test_init(self):\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='nxos')\n dev2 = Device(testbed=testbed, name='PE2', os='nxos')\n intf1 = Interface(name='Ethernet0/0/1', device=dev1)\n intf2 = Interface(name='Ethernet0/0/2', device=dev2)\n link = Link(name='1_2_1', testbed=testbed)\n link.connect_interface(interface=intf1)\n link.connect_interface(interface=intf2)\n self.assertSetEqual(\n set(link.find_interfaces()),\n set([intf1, intf2]))\n self.assertSetEqual(\n set(dev1.find_interfaces()),\n set([intf1]))\n self.assertSetEqual(\n set(dev2.find_interfaces()),\n set([intf2]))\n\n vlan = Vlan()\n\n self.assertSetEqual(set(vlan.devices), set([]))\n self.assertSetEqual(set(vlan.links), set([]))\n\n link.add_feature(vlan)\n self.assertCountEqual(vlan.devices, [dev1, dev2])\n self.assertSetEqual(set(vlan.links), set([link]))\n self.assertSetEqual(set(vlan.interfaces), set([intf1, intf2]))\n\n with self.assertRaises(AttributeError):\n vlan.access_map_id\n\n with self.assertRaises(AttributeError):\n vlan.vlan_configuration_id\n\n def test_basic_cfg(self):\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='nxos')\n dev2 = Device(testbed=testbed, name='PE2', os='nxos')\n intf1 = Interface(name='Ethernet0/0/1', device=dev1, layer=Layer.L3)\n intf2 = Interface(name='Ethernet0/0/2', device=dev2, layer=Layer.L2)\n link = Link(name='1_2_1', testbed=testbed)\n link.connect_interface(interface=intf1)\n link.connect_interface(interface=intf2)\n vlan = Vlan()\n link.add_feature(vlan)\n vlan.vlan_id = 100\n access_map_id = 'ed'\n vlan_configuration_id = '3'\n\n vlan.device_attr[dev1]\n vlan.device_attr[dev2]\n vlan.device_attr[dev1].interface_attr[intf1]\n vlan.device_attr[dev2].interface_attr[intf2]\n vlan.device_attr[dev2].interface_attr[intf2].switchport_mode = \\\n L2_type.TRUNK\n vlan.device_attr[dev2].interface_attr[intf2].sw_trunk_allowed_vlan = \\\n '200-201'\n vlan.device_attr[dev1].access_map_attr[access_map_id]\n vlan.device_attr[dev2].access_map_attr[access_map_id]\n vlan.device_attr[dev1].vlan_configuration_attr[vlan_configuration_id]\n vlan.device_attr[dev2].vlan_configuration_attr[vlan_configuration_id]\n\n # Defining attributes section\n vlan.shutdown = False\n with self.assertRaises(ValueError):\n vlan.media = 'invalid'\n vlan.media = 'enet'\n self.assertIs(type(vlan.media), Vlan.Media)\n vlan.egress_load_balance = True\n vlan.device_attr[dev1].access_map_action = 'drop'\n vlan.datalink_flow_monitor = True\n\n intf1.ipv4 = '192.168.3.11'\n intf1.ipv4.netmask = '255.255.255.0'\n intf1.speed = 1000\n intf1.mtu = 500\n intf1.ipv6 = '2001::12:1'\n\n cfg1 = vlan.build_config(apply=False)\n cfg2 = intf1.build_config(apply=False)\n self.assertCountEqual(cfg1.keys(), ['PE1', 'PE2'])\n\n self.assertMultiLineEqual(\n str(cfg1['PE1']),\n '\\n'.join([\n 'vlan 100',\n ' media enet',\n ' no shutdown',\n ' exit',\n 'vlan access-map ed',\n ' action drop',\n ' exit',\n 'vlan configuration 3',\n ' egress port-channel load-balance random',\n ' exit',\n 'interface Ethernet0/0/1',\n ' mtu 500',\n ' ip address 192.168.3.11 255.255.255.0',\n ' ipv6 address 2001::12:1/128',\n ' speed 1000',\n ' exit',\n ]))\n\n self.assertMultiLineEqual(\n str(cfg1['PE2']),\n '\\n'.join([\n 'vlan 100',\n ' media enet',\n ' no shutdown',\n ' exit',\n 'vlan access-map ed',\n ' exit',\n 'vlan configuration 3',\n ' egress port-channel load-balance random',\n ' exit',\n 'interface Ethernet0/0/2',\n ' switchport mode trunk',\n ' switchport trunk allowed vlan 200-201',\n ' exit',\n ]))\n\n self.assertMultiLineEqual(\n str(cfg2),\n '\\n'.join([\n 'interface Ethernet0/0/1',\n ' mtu 500',\n ' ip address 192.168.3.11 255.255.255.0',\n ' ipv6 address 2001::12:1/128',\n ' speed 1000',\n ' exit',\n ]))\n\n def test_basic_uncfg(self):\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='nxos')\n dev2 = Device(testbed=testbed, name='PE2', os='nxos')\n intf1 = Interface(name='Ethernet0/0/1', device=dev1, layer=Layer.L3)\n intf2 = Interface(name='Ethernet0/0/2', device=dev2, layer=Layer.L2)\n link = Link(name='1_2_1', testbed=testbed)\n link.connect_interface(interface=intf1)\n link.connect_interface(interface=intf2)\n vlan = Vlan()\n link.add_feature(vlan)\n vlan.vlan_id = 100\n access_map_id = 'ed'\n vlan_configuration_id = '3'\n\n vlan.device_attr[dev1]\n vlan.device_attr[dev2]\n vlan.device_attr[dev1].access_map_attr[access_map_id]\n vlan.device_attr[dev2].access_map_attr[access_map_id]\n vlan.device_attr[dev1].interface_attr[intf1]\n vlan.device_attr[dev2].interface_attr[intf2]\n vlan.device_attr[dev2].interface_attr[intf2].switchport_mode = \\\n L2_type.TRUNK\n vlan.device_attr[dev2].interface_attr[intf2].sw_trunk_allowed_vlan = \\\n '200-201'\n vlan.device_attr[dev1].vlan_configuration_attr[vlan_configuration_id]\n vlan.device_attr[dev2].vlan_configuration_attr[vlan_configuration_id]\n\n # Defining attributes section\n vlan.shutdown = False\n with self.assertRaises(ValueError):\n vlan.media = 'invalid'\n vlan.media = 'enet'\n self.assertIs(type(vlan.media), Vlan.Media)\n vlan.egress_port_channel_load_balance_random = True\n vlan.device_attr[dev1].access_map_action = 'drop'\n vlan.datalink_flow_monitor = True\n\n # Unconfig testing\n # Set a mock\n dev1.cli = Mock()\n dev1.configure = Mock()\n dev2.cli = Mock()\n dev2.configure = Mock()\n dev1.add_feature(vlan)\n dev2.add_feature(vlan)\n # Mock config\n\n output = vlan.build_config(apply=True)\n\n uncfg1 = vlan.build_unconfig(apply=False)\n self.assertCountEqual(uncfg1.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(\n str(uncfg1['PE1']),\n '\\n'.join([\n 'no vlan 100',\n 'no vlan access-map ed',\n 'no vlan configuration 3',\n ]))\n\n self.assertMultiLineEqual(\n str(uncfg1['PE2']),\n '\\n'.join([\n 'no vlan 100',\n 'no vlan access-map ed',\n 'no vlan configuration 3',\n 'interface Ethernet0/0/2',\n ' no switchport mode trunk',\n ' no switchport trunk allowed vlan 200-201',\n ' exit',\n ]))\n\n partial_uncfg1 = vlan.build_unconfig(apply=False,\n attributes={'device_attr': \\\n {'*': \"media\"}})\n self.assertCountEqual(partial_uncfg1.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(\n str(partial_uncfg1['PE1']),\n '\\n'.join([\n 'vlan 100',\n ' no media enet',\n ' exit',\n ]))\n\n partial_unconfigure = vlan.build_unconfig(apply=False,\n attributes={'device_attr': \\\n {'*': {'access_map_attr': \\\n {'*': \"access_map_action\"}}}})\n self.assertCountEqual(partial_unconfigure.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(\n str(partial_unconfigure['PE1']),\n '\\n'.join([\n 'vlan access-map ed',\n ' no action drop',\n ' exit',\n ]))\n\n all_vlan_interface_uncfg = vlan.build_unconfig(apply=False,\n attributes={ \\\n 'device_attr': {'*': { \\\n 'interface_attr': '*'}}})\n self.assertCountEqual(all_vlan_interface_uncfg.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(\n str(all_vlan_interface_uncfg['PE2']),\n '\\n'.join([\n 'interface Ethernet0/0/2',\n ' no switchport mode trunk',\n ' no switchport trunk allowed vlan 200-201',\n ' exit',\n ]))\n\n partial_vlan_interface_uncfg = vlan.build_unconfig(apply=False,\n attributes={ \\\n 'device_attr': {'*': \\\n {'interface_attr': \\\n {\n '*': \"sw_trunk_allowed_vlan\"}}}})\n self.assertCountEqual(partial_vlan_interface_uncfg.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(\n str(partial_vlan_interface_uncfg['PE2']),\n '\\n'.join([\n 'interface Ethernet0/0/2',\n ' no switchport trunk allowed vlan 200-201',\n ' exit',\n ]))\n\n def test_cfg_with_args(self):\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='nxos')\n dev2 = Device(testbed=testbed, name='PE2', os='nxos')\n intf1 = Interface(name='Ethernet0/0/1', device=dev1, layer=Layer.L3)\n intf2 = Interface(name='Ethernet0/0/2', device=dev2, layer=Layer.L2)\n link = Link(name='1_2_1', testbed=testbed)\n link.connect_interface(interface=intf1)\n link.connect_interface(interface=intf2)\n vlan = Vlan(vlan_id=100)\n link.add_feature(vlan)\n\n access_map_id = 'ed'\n vlan_configuration_id = '3'\n\n vlan.device_attr[dev1]\n vlan.device_attr[dev2]\n vlan.device_attr[dev1].interface_attr[intf1]\n vlan.device_attr[dev2].interface_attr[intf2]\n vlan.device_attr[dev2].interface_attr[intf2].switchport_mode = \\\n L2_type.TRUNK\n vlan.device_attr[dev2].interface_attr[intf2].sw_trunk_allowed_vlan = \\\n '200-201'\n vlan.device_attr[dev1].access_map_attr[access_map_id]\n vlan.device_attr[dev2].access_map_attr[access_map_id]\n vlan.device_attr[dev1].vlan_configuration_attr[vlan_configuration_id]\n vlan.device_attr[dev2].vlan_configuration_attr[vlan_configuration_id]\n\n # Defining attributes section\n vlan.shutdown = False\n with self.assertRaises(ValueError):\n vlan.media = 'invalid'\n vlan.media = 'enet'\n self.assertIs(type(vlan.media), Vlan.Media)\n vlan.egress_load_balance = True\n vlan.device_attr[dev1].access_map_action = 'drop'\n vlan.datalink_flow_monitor = True\n\n intf1.ipv4 = '192.168.3.11'\n intf1.ipv4.netmask = '255.255.255.0'\n intf1.speed = 1000\n intf1.mtu = 500\n intf1.ipv6 = '2001::12:1'\n\n cfg1 = vlan.build_config(apply=False)\n cfg2 = intf1.build_config(apply=False)\n self.assertCountEqual(cfg1.keys(), ['PE1', 'PE2'])\n\n self.assertMultiLineEqual(\n str(cfg1['PE1']),\n '\\n'.join([\n 'vlan 100',\n ' media enet',\n ' no shutdown',\n ' exit',\n 'vlan access-map ed',\n ' action drop',\n ' exit',\n 'vlan configuration 3',\n ' egress port-channel load-balance random',\n ' exit',\n 'interface Ethernet0/0/1',\n ' mtu 500',\n ' ip address 192.168.3.11 255.255.255.0',\n ' ipv6 address 2001::12:1/128',\n ' speed 1000',\n ' exit',\n ]))\n\n self.assertMultiLineEqual(\n str(cfg1['PE2']),\n '\\n'.join([\n 'vlan 100',\n ' media enet',\n ' no shutdown',\n ' exit',\n 'vlan access-map ed',\n ' exit',\n 'vlan configuration 3',\n ' egress port-channel load-balance random',\n ' exit',\n 'interface Ethernet0/0/2',\n ' switchport mode trunk',\n ' switchport trunk allowed vlan 200-201',\n ' exit',\n ]))\n\n self.assertMultiLineEqual(\n str(cfg2),\n '\\n'.join([\n 'interface Ethernet0/0/1',\n ' mtu 500',\n ' ip address 192.168.3.11 255.255.255.0',\n ' ipv6 address 2001::12:1/128',\n ' speed 1000',\n ' exit',\n ]))\n\n uncfg = vlan.build_unconfig(apply=False, attributes={'device_attr': {\n dev1: {\n 'shutdown': None}\n }})\n self.assertMultiLineEqual(\n str(uncfg['PE1']),\n '\\n'.join([\n 'vlan 100',\n ' shutdown',\n ' exit',\n ]))\n\n def test_new_vlan_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='N95', os='nxos')\n\n with self.assertNoWarnings():\n Genie.testbed = None\n with self.assertRaises(TypeError):\n vlan = Vlan()\n Genie.testbed = testbed\n\n vlan = Vlan(vlan='100')\n vlan.device_attr[dev1].vlan_attr['100'].name = 'new_vlan'\n vlan.device_attr[dev1].vlan_attr['100'].state = 'active'\n\n self.assertIs(vlan.testbed, testbed)\n\n dev1.add_feature(vlan)\n\n cfgs = vlan.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'vlan 100',\n ' name new_vlan',\n ' state active',\n ' exit',\n ]))\n\n def test_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='nxos')\n\n with self.assertNoWarnings():\n Genie.testbed = None\n with self.assertRaises(TypeError):\n vlan = Vlan()\n Genie.testbed = testbed\n\n vlan = Vlan(vlan='102')\n vlan.device_attr[dev1].vlan_attr['102'].name = 'vlan2'\n vlan.device_attr[dev1].vlan_attr['102'].state = 'active'\n vlan.device_attr[dev1].vlan_attr['102'].mode = 'ce'\n\n self.assertIs(vlan.testbed, testbed)\n\n dev1.add_feature(vlan)\n\n cfgs = vlan.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'vlan 102',\n ' name vlan2',\n ' state active',\n ' mode ce',\n ' exit',\n ]))\n\n def test_uncfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='nxos')\n\n with self.assertNoWarnings():\n Genie.testbed = None\n with self.assertRaises(TypeError):\n vlan = Vlan()\n Genie.testbed = testbed\n\n vlan = Vlan(vlan='102')\n vlan.device_attr[dev1].vlan_attr['102'].name = 'vlan2'\n vlan.device_attr[dev1].vlan_attr['102'].state = 'active'\n vlan.device_attr[dev1].vlan_attr['102'].mode = 'ce'\n\n self.assertIs(vlan.testbed, testbed)\n\n dev1.add_feature(vlan)\n\n cfgs = vlan.build_unconfig(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'no vlan 102',\n ]))\n\n def test_cfg_with_igmp(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='nxos')\n\n with self.assertNoWarnings():\n Genie.testbed = None\n with self.assertRaises(TypeError):\n vlan = Vlan()\n Genie.testbed = testbed\n\n vlan = Vlan(vlan='102')\n vlan.device_attr[dev1].vlan_attr['102'].name = 'vlan2'\n vlan.device_attr[dev1].vlan_attr['102'].state = 'active'\n vlan.device_attr[dev1].vlan_attr['102'].mode = 'ce'\n\n vlan.device_attr[dev1].config_vlan_attr['102'].config_vlan_id = '102'\n vlan.device_attr[dev1].config_vlan_attr['102'].ip_igmp_snooping = True\n\n self.assertIs(vlan.testbed, testbed)\n\n dev1.add_feature(vlan)\n\n cfgs = vlan.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'vlan 102',\n ' name vlan2',\n ' state active',\n ' mode ce',\n ' exit',\n 'vlan configuration 102',\n ' ip igmp snooping',\n ' exit'\n ]))\n\n un_cfgs = vlan.build_unconfig(apply=False)\n self.assertMultiLineEqual(str(un_cfgs[dev1.name]), '\\n'.join([\n 'no vlan 102',\n 'no vlan configuration 102',\n ]))\n\n un_cfgs = vlan.build_unconfig(apply=False,\n attributes={'device_attr': {\n dev1: {\n 'vlan_attr': {\n '102': {\"mode\": None}\n },\n 'config_vlan_attr': {\n '102': {\"ip_igmp_snooping\": None}\n }\n }\n }\n })\n self.assertCountEqual(un_cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(un_cfgs[dev1.name]), '\\n'.join([\n 'vlan 102',\n ' no mode ce',\n ' exit',\n 'vlan configuration 102',\n ' no ip igmp snooping',\n ' exit',\n ]))\n\n def test_cfg_without_igmp(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='nxos')\n\n with self.assertNoWarnings():\n Genie.testbed = None\n with self.assertRaises(TypeError):\n vlan = Vlan()\n Genie.testbed = testbed\n\n vlan = Vlan(vlan='102')\n vlan.device_attr[dev1].vlan_attr['102'].name = 'vlan2'\n vlan.device_attr[dev1].vlan_attr['102'].state = 'active'\n vlan.device_attr[dev1].vlan_attr['102'].mode = 'ce'\n\n vlan.device_attr[dev1].config_vlan_attr['102'].config_vlan_id = '102'\n\n self.assertIs(vlan.testbed, testbed)\n\n dev1.add_feature(vlan)\n\n cfgs = vlan.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'vlan 102',\n ' name vlan2',\n ' state active',\n ' mode ce',\n ' exit',\n 'vlan configuration 102',\n ' exit',\n ]))\n\n def test_enable_disable(self):\n\n # For failures\n self.maxDiff = None\n\n # Set testbed\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='nxos')\n\n # Create Vlan object\n vlan1 = Vlan()\n dev1.add_feature(vlan1)\n vlan1.device_attr[dev1].enabled = True\n\n # Check config strings built correctly\n cfgs = vlan1.build_config(apply=False)\n self.assertMultiLineEqual(\n str(cfgs[dev1.name]),\n '\\n'.join([\n 'feature interface-vlan',\n 'feature vn-segment-vlan-based',\n ]))\n\n # Unconfigure\n uncfgs = vlan1.build_unconfig(apply=False)\n self.assertMultiLineEqual(\n str(uncfgs[dev1.name]),\n '\\n'.join([\n 'no feature interface-vlan',\n 'no feature vn-segment-vlan-based',\n ]))\n\n dev1.remove_feature(vlan1)\n\n # Create Vlan object\n vlan2 = Vlan()\n dev1.add_feature(vlan2)\n vlan2.device_attr[dev1].enabled_interface_vlan = True\n\n # Check config strings built correctly\n cfgs = vlan2.build_config(apply=False)\n self.assertMultiLineEqual(\n str(cfgs[dev1.name]),\n '\\n'.join([\n 'feature interface-vlan',\n ]))\n\n # Unconfigure\n uncfgs = vlan2.build_unconfig(apply=False)\n self.assertMultiLineEqual(\n str(uncfgs[dev1.name]),\n '\\n'.join([\n 'no feature interface-vlan',\n ]))\n\n # Remove feature\n dev1.remove_feature(vlan2)\n\n # Create Vlan object\n vlan3 = Vlan()\n dev1.add_feature(vlan3)\n vlan3.device_attr[dev1].enabled_vn_segment_vlan_based = True\n\n # Build config\n cfgs = vlan3.build_config(apply=False)\n self.assertMultiLineEqual(\n str(cfgs[dev1.name]),\n '\\n'.join([\n 'feature vn-segment-vlan-based',\n ]))\n\n # Unconfigure\n uncfgs = vlan3.build_unconfig(apply=False)\n self.assertMultiLineEqual(\n str(uncfgs[dev1.name]),\n '\\n'.join([\n 'no feature vn-segment-vlan-based',\n ]))\n\nif __name__ == '__main__':\n unittest.main()", "id": "1881100", "language": "Python", "matching_score": 6.871676445007324, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/vlan/nxos/tests/test_vlan.py" }, { "content": "#!/usr/bin/env python\n\n#python\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie package\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\n# Genie XBu_shared\nfrom genie.libs.conf.vlan import Vlan\nfrom genie.libs.conf.base import Routing, \\\n IPv4Neighbor, \\\n IPv4Address, \\\n IPv6Address\n\n\nclass test_vlan(TestCase):\n\n def test_init(self):\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe')\n intf1 = Interface(name='GigabitEthernet0/0/1',device=dev1)\n intf2 = Interface(name='GigabitEthernet0/0/2',device=dev2)\n intf3 = Interface(name='Vlan100',device=dev1)\n link = Link(name='1_2_1',testbed=testbed)\n link.connect_interface(interface=intf1)\n link.connect_interface(interface=intf2)\n self.assertSetEqual(\n set(link.find_interfaces()),\n set([intf1, intf2]))\n self.assertSetEqual(\n set(dev1.find_interfaces()),\n set([intf1, intf3]))\n self.assertSetEqual(\n set(dev2.find_interfaces()),\n set([intf2]))\n\n vlan = Vlan()\n\n self.assertSetEqual(set(vlan.devices), set([]))\n self.assertSetEqual(set(vlan.links), set([]))\n\n link.add_feature(vlan)\n self.assertCountEqual(vlan.devices, [dev1, dev2])\n self.assertSetEqual(set(vlan.links), set([link]))\n self.assertSetEqual(set(vlan.interfaces), set([intf1, intf2]))\n\n with self.assertRaises(AttributeError):\n vlan.access_map_id\n\n with self.assertRaises(AttributeError):\n vlan.vlan_configuration_id\n\n def test_basic_cfg(self):\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe')\n intf1 = Interface(name='GigabitEthernet0/0/1',device=dev1)\n intf2 = Interface(name='GigabitEthernet0/0/2',device=dev2)\n intf3 = Interface(name='Vlan100',device=dev1)\n link = Link(name='1_2_1',testbed=testbed)\n link.connect_interface(interface=intf1)\n link.connect_interface(interface=intf2)\n vlan = Vlan()\n link.add_feature(vlan)\n vlan.vlan_id = 100\n access_map_id = 'ed'\n vlan_configuration_id = '3'\n\n vlan.device_attr[dev1]\n vlan.device_attr[dev2]\n vlan.device_attr[dev1].interface_attr[intf1].switchport_mode = \\\n 'trunk'\n vlan.device_attr[dev1].interface_attr[intf1].sw_trunk_allowed_vlan = \\\n '200-201'\n vlan.device_attr[dev1].access_map_attr[access_map_id]\n vlan.device_attr[dev2].access_map_attr[access_map_id]\n vlan.device_attr[dev1].vlan_configuration_attr[vlan_configuration_id]\n vlan.device_attr[dev2].vlan_configuration_attr[vlan_configuration_id]\n\n # Defining attributes section\n vlan.shutdown = False\n with self.assertRaises(ValueError):\n vlan.media = 'invalid'\n vlan.media = 'enet'\n self.assertIs(type(vlan.media), Vlan.Media)\n vlan.accounting_type = 'input'\n vlan.device_attr[dev1].access_map_action = 'drop'\n vlan.datalink_flow_monitor = True\n vlan.private_vlan_type = 'association'\n vlan.private_vlan_association_action = 'add'\n vlan.private_vlan_association_ids = '201,205'\n\n cfg1 = vlan.build_config(apply=False)\n\n self.assertCountEqual(cfg1.keys(), ['PE1', 'PE2'])\n\n self.assertMultiLineEqual(\n str(cfg1['PE1']),\n '\\n'.join([\n 'vlan 100',\n ' media enet',\n ' no shutdown',\n ' private-vlan association add 201,205',\n ' exit',\n 'vlan accounting input',\n 'vlan access-map ed',\n ' action drop',\n ' exit',\n 'vlan configuration 3',\n ' datalink flow monitor',\n ' exit',\n 'interface GigabitEthernet0/0/1',\n ' switchport mode trunk',\n ' switchport trunk allowed vlan 200-201',\n ' exit',\n ]))\n\n self.assertMultiLineEqual(\n str(cfg1['PE2']),\n '\\n'.join([\n 'vlan 100',\n ' media enet',\n ' no shutdown',\n ' private-vlan association add 201,205',\n ' exit',\n 'vlan accounting input',\n 'vlan access-map ed',\n ' exit',\n 'vlan configuration 3',\n ' datalink flow monitor',\n ' exit',\n ]))\n\n def test_basic_uncfg(self):\n testbed = Genie.testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n dev2 = Device(testbed=testbed, name='PE2', os='iosxe')\n intf1 = Interface(name='GigabitEthernet0/0/1',device=dev1)\n intf1.switchport = True\n intf2 = Interface(name='GigabitEthernet0/0/2',device=dev2)\n intf3 = Interface(name='Vlan100',device=dev1)\n vlan = Vlan()\n vlan.device_attr[dev1].vlan_id = 100\n vlan.device_attr[dev2].vlan_id = 300\n access_map_id = 'ed'\n vlan_configuration_id = '3'\n vlan.device_attr[dev1]\n vlan.device_attr[dev2]\n vlan.device_attr[dev1].interface_attr[intf1].switchport_mode = \\\n 'trunk'\n vlan.device_attr[dev1].interface_attr[intf1].sw_trunk_allowed_vlan = \\\n '200-201'\n\n vlan.device_attr[dev1].access_map_attr[access_map_id]\n vlan.device_attr[dev2].access_map_attr[access_map_id]\n vlan.device_attr[dev1].vlan_configuration_attr[vlan_configuration_id]\n vlan.device_attr[dev2].vlan_configuration_attr[vlan_configuration_id]\n\n # Defining attributes section\n with self.assertRaises(ValueError):\n vlan.media = 'invalid'\n vlan.media = 'enet'\n self.assertIs(type(vlan.media), Vlan.Media)\n vlan.accounting_type = 'input'\n vlan.device_attr[dev1].access_map_action = 'drop'\n\n # Unconfig testing\n # Set a mock\n dev1.configure = Mock()\n dev2.configure = Mock()\n dev1.add_feature(vlan)\n dev2.add_feature(vlan)\n # Mock config\n\n uncfg1 = vlan.build_unconfig(apply=False)\n self.assertCountEqual(uncfg1.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(\n str(uncfg1['PE1']),\n '\\n'.join([\n 'no vlan 100',\n 'no vlan accounting input',\n 'no vlan access-map ed',\n 'no vlan configuration 3',\n 'interface GigabitEthernet0/0/1',\n ' no switchport mode trunk',\n ' no switchport trunk allowed vlan 200-201',\n ' exit',\n ]))\n\n self.assertMultiLineEqual(\n str(uncfg1['PE2']),\n '\\n'.join([\n 'no vlan 300',\n 'no vlan accounting input',\n 'no vlan access-map ed',\n 'no vlan configuration 3',\n ]))\n\n uncfg_intf = intf1.build_unconfig(apply=False, attributes=\"switchport\")\n self.assertMultiLineEqual(\n str(uncfg_intf),\n '\\n'.join([\n 'interface GigabitEthernet0/0/1',\n ' no switchport',\n ' exit',\n ]))\n\n partial_uncfg1 = vlan.build_unconfig(apply=False,\n attributes={'device_attr':\\\n {'*':\"media\"}})\n self.assertCountEqual(partial_uncfg1.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(\n str(partial_uncfg1['PE1']),\n '\\n'.join([\n 'vlan 100',\n ' no media enet',\n ' exit',\n ]))\n\n partial_unconfigure = vlan.build_unconfig(apply=False,\n attributes={'device_attr':\\\n {'*':{'access_map_attr':\\\n {'*':\"access_map_action\"}}}})\n self.assertCountEqual(partial_unconfigure.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(\n str(partial_unconfigure['PE1']),\n '\\n'.join([\n 'vlan access-map ed',\n ' no action drop',\n ' exit',\n ]))\n\n all_vlan_interface_uncfg = vlan.build_unconfig(apply=False,\n attributes={'device_attr':\\\n {'*':{'interface_attr':'*'}}})\n self.assertCountEqual(all_vlan_interface_uncfg.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(\n str(all_vlan_interface_uncfg['PE1']),\n '\\n'.join([\n 'interface GigabitEthernet0/0/1',\n ' no switchport mode trunk',\n ' no switchport trunk allowed vlan 200-201',\n ' exit',\n ]))\n\n partial_vlan_interface_uncfg = vlan.build_unconfig(apply=False,\n attributes={'device_attr':\\\n {'*':{'interface_attr':\\\n {'*':\"sw_trunk_allowed_vlan\"}}}})\n self.assertCountEqual(partial_vlan_interface_uncfg.keys(), ['PE1', 'PE2'])\n self.assertMultiLineEqual(\n str(partial_vlan_interface_uncfg['PE1']),\n '\\n'.join([\n 'interface GigabitEthernet0/0/1',\n ' no switchport trunk allowed vlan 200-201',\n ' exit',\n ]))\n\n def test_new_vlan_cfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n\n with self.assertNoWarnings():\n Genie.testbed = None\n with self.assertRaises(TypeError):\n vlan = Vlan()\n Genie.testbed = testbed\n\n vlan = Vlan(vlan='100')\n vlan.device_attr[dev1].vlan_attr['100'].name = 'new_vlan'\n vlan.device_attr[dev1].vlan_attr['100'].state = 'active'\n\n self.assertIs(vlan.testbed, testbed)\n\n dev1.add_feature(vlan)\n\n cfgs = vlan.build_config(apply=False)\n self.assertCountEqual(cfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.join([\n 'vlan 100',\n ' name new_vlan',\n ' state active',\n ' exit',\n ]))\n\n def test_new_vlan_uncfg(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n with self.assertNoWarnings():\n vlan = Vlan(vlan='100')\n vlan.device_attr[dev1].vlan_attr['100']\n\n self.assertIs(vlan.testbed, testbed)\n\n dev1.add_feature(vlan)\n\n uncfgs = vlan.build_unconfig(apply=False)\n self.assertCountEqual(uncfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(uncfgs[dev1.name]), '\\n'.join([\n 'no vlan 100',\n ]))\n\n def test_uncfg_new_vlan_name(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n with self.assertNoWarnings():\n vlan = Vlan(vlan='100')\n vlan.device_attr[dev1].vlan_attr['100'].name = \"new_vlan\"\n\n self.assertIs(vlan.testbed, testbed)\n\n dev1.add_feature(vlan)\n\n uncfgs = vlan.build_unconfig(apply=False, attributes={'device_attr': \\\n {'*':\n {'vlan_attr': \\\n {'*': \"name\"}}}})\n self.assertCountEqual(uncfgs.keys(), [dev1.name])\n self.assertMultiLineEqual(str(uncfgs[dev1.name]), '\\n'.join([\n 'vlan 100',\n ' no name new_vlan',\n ' exit',\n ]))\n\n def test_new_vlan_unshut(self):\n Genie.testbed = testbed = Testbed()\n dev1 = Device(testbed=testbed, name='PE1', os='iosxe')\n with self.assertNoWarnings():\n vlan = Vlan(vlan='100')\n vlan.device_attr[dev1].vlan_attr['100']\n vlan.device_attr[dev1].vlan_attr['100'].shutdown = False\n self.assertIs(vlan.testbed, testbed)\n\n dev1.add_feature(vlan)\n\n partial_uncfg = vlan.build_config(apply=False)\n self.assertCountEqual(partial_uncfg.keys(), [dev1.name])\n self.assertMultiLineEqual(str(partial_uncfg[dev1.name]), '\\n'.join([\n 'vlan 100',\n ' no shutdown',\n ' exit',\n ]))\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "7406713", "language": "Python", "matching_score": 4.397848129272461, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/vlan/iosxe/tests/test_vlan.py" }, { "content": "\"\"\"Implement IOSXE (iosxe) Specific Configurations for Vlan objects.\n\"\"\"\n\n# Table of contents:\n# class Vlan:\n# class DeviceAttributes:\n# class AccessMapAttributes:\n# class VlanConfigurationAttributes:\n# class InterfaceAttributes:\n\n# Python\nfrom abc import ABC\n\n# Genie package\nfrom genie.conf.base.attributes import AttributesHelper\nfrom genie.conf.base import Interface\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\n\n\nclass Vlan(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxe: vlan 1000 (config-vlan)\n with configurations.submode_context(\n attributes.format('vlan {vlan_id}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxe: vlan 1000 / media enet\n # iosxe: vlan 1000 / media ethernet\n # iosxe: vlan 1000 / media fd-net\n # iosxe: vlan 1000 / media fddi\n # iosxe: vlan 1000 / media tokenring\n configurations.append_line(attributes.format(\n 'media {media.value}'))\n\n # iosxe: vlan 1000 / name vlan1000\n configurations.append_line(attributes.format('name {name}'))\n\n # iosxe: vlan 1000 / shutdown\n # iosxe: vlan 1000 / no shutdown\n v = attributes.value('shutdown')\n if v is not None:\n if v:\n configurations.append_line('shutdown')\n else:\n configurations.append_line('no shutdown')\n\n # iosxe: vlan 1000 / state active\n # iosxe: vlan 1000 / state suspend\n configurations.append_line(attributes.format(\n 'state {status.value}'))\n\n # iosxe: vlan 1000 / are <0-13>\n configurations.append_line(attributes.format('are {are}'))\n\n # iosxe: vlan 1000 / backupcrf enable\n # iosxe: vlan 1000 / backupcrf disable\n configurations.append_line(attributes.format(\n 'backupcrf {backupcrf.value}'))\n\n # iosxe: vlan 1000 / bridge <0-15>\n configurations.append_line(attributes.format(\n 'bridge {bridge}'))\n\n # iosxe: vlan 1000 / bridge type srb\n # iosxe: vlan 1000 / bridge type srt\n configurations.append_line(attributes.format(\n 'bridge type {bridge_type}'))\n\n # iosxe: vlan 1000 / parent <0-1005>\n configurations.append_line(attributes.format(\n 'parent {parent_id}'))\n\n # iosxe: vlan 1000 / private-vlan community\n # iosxe: vlan 1000 / private-vlan isolated\n # iosxe: vlan 1000 / private-vlan primary\n # iosxe: vlan 1000 / private-vlan association \\\n # <private_vlan_association_ids>\n # iosxe: vlan 1000 / private-vlan association add \\\n # <private_vlan_association_ids>\n # iosxe: vlan 1000 / private-vlan association remove \\\n # <private_vlan_association_ids>\n if attributes.value('private_vlan_type'):\n cfg = attributes.format('private-vlan {private_vlan_type}',\n force=True)\n v = attributes.value('private_vlan_association_action')\n if v is not None:\n cfg += ' {}'.format(v)\n association_id = \\\n attributes.value(\n 'private_vlan_association_ids', force=True)\n if association_id is not None:\n cfg += attributes.format(\n ' {private_vlan_association_ids}',\n force=True)\n\n configurations.append_line(cfg)\n\n # iosxe: vlan 1000 / remote-span\n if attributes.value('remote_span'):\n configurations.append_line(attributes.format(\n 'remote-span'))\n\n # iosxe: vlan 1000 / ring <1-1005>\n configurations.append_line(attributes.format('ring {ring}'))\n\n # iosxe: vlan 1000 / said <1-4294967294>\n configurations.append_line(attributes.format('said {said}'))\n\n # iosxe: vlan 1000 / ste <0-13>\n configurations.append_line(attributes.format('ste {ste}'))\n\n # iosxe: vlan 1000 / stp type auto\n # iosxe: vlan 1000 / stp type ibm\n # iosxe: vlan 1000 / stp type ieee\n configurations.append_line(attributes.format(\n 'stp type {stp_type}'))\n\n # iosxe: vlan 1000 / tb-vlan1 <0-1005>\n configurations.append_line(attributes.format(\n 'tb-vlan1 {tb_vlan1}'))\n\n # iosxe: vlan 1000 / tb-vlan2 <0-1005>\n configurations.append_line(attributes.format(\n 'tb-vlan2 {tb_vlan2}'))\n\n # iosxe: vlan accounting\n # iosxe: vlan accounting input\n # iosxe: vlan accounting output\n configurations.append_line(\n attributes.format('vlan accounting {accounting_type}'))\n\n # iosxe: vlan dot1q tag native\n if attributes.value('dot1q_tag_native'):\n configurations.append_line(\n attributes.format('vlan dot1q tag native'))\n\n # iosxe: vlan configuration <Vlan id list>\n configurations.append_line(\n attributes.format(\n 'vlan configuration {configuration_id_list}'))\n\n # iosxe: vlan group <group name> <Vlan id list>\n configurations.append_line(\n attributes.format('vlan group {group_name} {group_id_list}'))\n\n for sub, attributes2 in attributes.mapping_values(\n 'access_map_attr',\n keys=self.access_map_attr):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2,\n unconfig=unconfig, **kwargs))\n\n for sub, attributes2 in attributes.mapping_values(\n 'vlan_configuration_attr',\n keys=self.vlan_configuration_attr):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2,\n unconfig=unconfig, **kwargs))\n\n for sub, attributes2 in attributes.mapping_values(\n 'interface_attr',\n keys=self.interface_attr):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2,\n unconfig=unconfig, **kwargs))\n\n # new vlan structure for vlans\n for sub, attributes2 in attributes.mapping_values(\n 'vlan_attr', keys=self.vlan_attr):\n configurations.append_block(sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig,\n **kwargs))\n\n # new vlan structure for vlan_configs\n for sub, attributes2 in attributes.mapping_values(\n 'config_vlan_attr', keys=self.config_vlan_attr.keys()):\n configurations.append_block(sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig,\n **kwargs))\n\n\n if apply:\n if configurations:\n self.device.configure(str(configurations),\n fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class AccessMapAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n with configurations.submode_context(\n attributes.format('vlan access-map {access_map_id}',\n force=True)):\n if unconfig and attributes.iswildcard:\n # Never reached!\n configurations.submode_unconfig()\n\n # iosxe: vlan access-map <access_map_id> \\\n # <vlan_access_map_sequence>\n # A workaround that needs to be better handled\n if attributes.value('access_map_sequence'):\n configurations.append_line(\n attributes.format(\n 'no vlan access-map {access_map_id}'))\n configurations.append_line(\n attributes.format(\n 'vlan access-map {access_map_id}\\\n {access_map_sequence}'))\n\n # iosxe: vlan access-map <access_map_id> / action drop\n # iosxe: vlan access-map <access_map_id> / \\\n # action forward\n configurations.append_line(\n attributes.format('action {access_map_action}'))\n\n # iosxe: vlan access-map <access_map_id> / default\n\n # iosxe: vlan access-map <access_map_id> / \\\n # match ip address <access-list name>\n # iosxe: vlan access-map <access_map_id> / \\\n # match ipv6 address <access-list name>\n # iosxe: vlan access-map <access_map_id> / \\\n # match mac address <access-list name>\n if attributes.value('access_map_match'):\n if attributes.value('access_list'):\n configurations.append_line(\n attributes.format('match {access_map_match}\\\n address {access_list}'))\n\n # iosxe: vlan access-map <access_map_id> / no\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class VlanConfigurationAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n with configurations.submode_context(\n attributes.format(\n 'vlan configuration {vlan_configuration_id}',\n force=True)):\n if unconfig and attributes.iswildcard:\n # Never reached!\n configurations.submode_unconfig()\n\n # iosxe: vlan configuration <vlan_configuration_id> / \\\n # datalink flow monitor\n if attributes.value('datalink_flow_monitor'):\n configurations.append_line(\n attributes.format('datalink flow monitor'))\n # iosxe: vlan configuration <vlan_configuration_id> / \\\n # device-tracking\n # iosxe: vlan configuration <vlan_configuration_id> / \\\n # action\n # iosxe: vlan configuration <vlan_configuration_id> / exit\n # iosxe: vlan configuration <vlan_configuration_id> / ip\n # iosxe: vlan configuration <vlan_configuration_id> / ipv6\n # iosxe: vlan configuration <vlan_configuration_id> / no\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n # \"Set the interface to access or trunk mode for VLANs\"\n if attributes.value('switchport_mode'):\n self.interface.switchport_mode = \\\n attributes.value('switchport_mode')\n\n if attributes.value('sw_trunk_allowed_vlan'):\n self.interface.sw_trunk_allowed_vlan = \\\n attributes.value('sw_trunk_allowed_vlan')\n\n if attributes.value('sw_trunk_native_vlan'):\n self.interface.sw_trunk_allowed_vlan = \\\n attributes.value('sw_trunk_native_vlan')\n\n\n if unconfig:\n if attributes.attributes is not None:\n configurations = \\\n self.interface.build_unconfig(\n apply=False,\n attributes=attributes.attributes)\n else:\n configurations = \\\n self.interface.build_unconfig(\n apply=False,\n attributes={'switchport_mode': None,\n 'sw_trunk_allowed_vlan': None,\n 'sw_trunk_native_vlan': None})\n else:\n configurations = self.interface.build_config(apply=False)\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # ===================================================\n # new vlan structure\n # ====================================================\n\n class VlanAttributes(ABC):\n def build_config(self, devices=None, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(\n attributes.format('vlan {vlan}', force=True)):\n if unconfig and attributes.iswildcard:\n # Never reached!\n configurations.submode_unconfig()\n\n # shutdown\n if attributes.value('shutdown') == False:\n configurations.append_line(\n attributes.format('no shutdown'))\n elif attributes.value('shutdown') == True:\n configurations.append_line(\n attributes.format('shutdown'))\n\n # name\n if attributes.value('name'):\n configurations.append_line(\n attributes.format('name {name}'))\n\n # state\n if attributes.value('state'):\n state_value = attributes.value('state').value\n configurations.append_line(\n attributes.format('state {state}'.format(state=state_value)))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n", "id": "11357026", "language": "Python", "matching_score": 5.490806579589844, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/vlan/iosxe/vlan.py" }, { "content": "\"\"\"Implement Nexus (nxos) Specific Configurations for Vlan objects.\"\"\"\n\n# Table of contents:\n# class Vlan:\n# class DeviceAttributes:\n# class AccessMapAttributes:\n# class VlanConfigurationAttributes:\n# class InterfaceAttributes:\n\n# Python\nfrom abc import ABC\n\n# Genie package\nfrom genie.conf.base.attributes import AttributesHelper\nfrom genie.conf.base import Interface\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\n\n\nclass Vlan(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # enabled\n if attributes.value('enabled'):\n configurations.append_line(attributes.format(\n 'feature interface-vlan'))\n configurations.append_line(attributes.format(\n 'feature vn-segment-vlan-based'))\n\n # enabled_interface_vlan\n elif attributes.value('enabled_interface_vlan'):\n configurations.append_line('feature interface-vlan')\n \n # enabled_vn_segment_vlan_based\n elif attributes.value('enabled_vn_segment_vlan_based'):\n configurations.append_line('feature vn-segment-vlan-based')\n\n # nxos: vlan 1000 (config-vlan)\n with configurations.submode_context(\n attributes.format('vlan {vlan_id}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # nxos: vlan 1000 / media enet\n configurations.append_line(\n attributes.format('media {media.value}'))\n\n # nxos: vlan 1000 / name vlan1000\n configurations.append_line(attributes.format('name {name}'))\n\n # nxos: vlan 1000 / shutdown\n # nxos: vlan 1000 / no shutdown\n v = attributes.value('shutdown')\n if v is not None:\n if v:\n configurations.append_line('shutdown', unconfig_cmd='no shutdown')\n else:\n configurations.append_line('no shutdown', unconfig_cmd='shutdown')\n\n # nxos: vlan 1000 / remote-span\n if attributes.value('remote_span'):\n configurations.append_line(\n attributes.format('remote-span'))\n\n # nxos: vlan 1000 / state active\n # nxos: vlan 1000 / state suspend\n configurations.append_line(\n attributes.format('state {status.value}'))\n\n # nxos: vlan 1000 / vn-segment 4096\n if attributes.value('vn_segment_id'):\n configurations.append_line(\n attributes.format('vn-segment {vn_segment.vni_id}'))\n\n # nxos: vlan dot1q tag native\n # nxos: vlan dot1q tag native exclude control\n # nxos: vlan dot1q tag native fabricpath\n # nxos: vlan dot1q tag native fabricpath exclude control\n if attributes.value('dot1q_tag_native'):\n configurations.append_line(\n attributes.format('vlan dot1q tag native'))\n\n # nxos: vlan configuration <Vlan id list>\n configurations.append_line(\n attributes.format('vlan configuration'\n ' {configuration_id_list}'))\n\n # nxos: vlan filter <list name> <list of vlans>\n\n for sub, attributes2 in attributes.mapping_values(\n 'access_map_attr',\n keys=self.access_map_attr):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2,\n unconfig=unconfig, **kwargs))\n\n for sub, attributes2 in attributes.mapping_values(\n 'vlan_configuration_attr',\n keys=self.vlan_configuration_attr):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2,\n unconfig=unconfig, **kwargs))\n\n for sub, attributes2 in attributes.mapping_values(\n 'interface_attr',\n keys=self.interface_attr):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2,\n unconfig=unconfig, **kwargs))\n\n # new vlan structure for all vlans\n for sub, attributes2 in attributes.mapping_values(\n 'vlan_attr', keys=self.vlan_attr):\n configurations.append_block(sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig,\n **kwargs))\n\n # new vlan structure for vlan_configs\n for sub, attributes2 in attributes.mapping_values(\n 'config_vlan_attr', keys=self.config_vlan_attr.keys()):\n configurations.append_block(sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig,\n **kwargs))\n\n if apply:\n if configurations:\n self.device.configure(str(configurations),\n fail_invalid=True)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations, fail_invalid=True)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class AccessMapAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n with configurations.submode_context(\n attributes.format('vlan access-map {access_map_id}',\n force=True)):\n if unconfig and attributes.iswildcard:\n # Never reached!\n configurations.submode_unconfig()\n\n # No point of configuring access_map_sequence\n # nxos: vlan access-map <access_map_id> \\\n # <access_map_sequence>\n # A workaround that needs to be better handled\n if attributes.value('access_map_sequence'):\n configurations.append_line(\n attributes.format(\n 'no vlan access-map {access_map_id}'))\n configurations.append_line(\n attributes.format(\n 'vlan access-map {access_map_id} '\n '{access_map_sequence}'))\n\n # nxos: vlan access-map <access_map_id> / action drop\n # nxos: vlan access-map <access_map_id> /action forward\n # nxos: vlan access-map <access_map_id> / action \\\n # redirect <redirect_interface>\n if attributes.value('access_map_action') and \\\n attributes.value('redirect_interface'):\n configurations.append_line(\n attributes.format(\n 'action {access_map_action} '\n '{redirect_interface}'))\n else:\n configurations.append_line(\n attributes.format(\n 'action {access_map_action}'))\n\n # nxos: vlan access-map <access_map_id> / statistics\n # nxos: vlan access-map <access_map_id> / exit\n # nxos: vlan access-map <access_map_id> / match\n if attributes.value('access_map_match'):\n if attributes.value('access_list_name'):\n configurations.append_line(\n attributes.format('match {access_map_match}'\n ' address {access_list}'))\n\n # nxos: vlan access-map <access_map_id> / no\n # nxos: vlan access-map <access_map_id> / this\n # nxos: vlan access-map <access_map_id> / pop\n # nxos: vlan access-map <access_map_id> / push\n # nxos: vlan access-map <access_map_id> / where\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class VlanConfigurationAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n with configurations.submode_context(\n attributes.format(\n 'vlan configuration {vlan_configuration_id}',\n force=True)):\n if unconfig and attributes.iswildcard:\n # Never reached!\n configurations.submode_unconfig()\n\n # iosxe: vlan configuration <vlan_configuration_id> / \\\n # egress port-channel load-balance random\n if attributes.value('egress_load_balance'):\n configurations.append_line(\n attributes.format(\n 'egress port-channel load-balance random'))\n # nxos: vlan configuration <vlan_configuration_id> / \\\n # device-tracking\n # nxos: vlan configuration <vlan_configuration_id> / action\n # nxos: vlan configuration <vlan_configuration_id> / exit\n # nxos: vlan configuration <vlan_configuration_id> / ip\n # nxos: vlan configuration <vlan_configuration_id> / ipv6\n # nxos: vlan configuration <vlan_configuration_id> / no\n # nxos: vlan configuration <vlan_configuration_id> / egress\n # nxos: vlan configuration <vlan_configuration_id> /layer-2\n # nxos: vlan configuration <vlan_configuration_id> / \\\n # service-policy\n # nxos: vlan configuration <vlan_configuration_id> / this\n # nxos: vlan configuration <vlan_configuration_id> / pop\n # nxos: vlan configuration <vlan_configuration_id> / push\n # nxos: vlan configuration <vlan_configuration_id> / where\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n # \"Set the interface to access or trunk mode for VLANs\"\n if attributes.value('switchport_mode'):\n self.interface.switchport_mode = \\\n attributes.value('switchport_mode')\n\n if attributes.value('sw_trunk_allowed_vlan'):\n self.interface.sw_trunk_allowed_vlan = \\\n attributes.value('sw_trunk_allowed_vlan')\n\n if unconfig:\n if attributes.attributes is not None:\n configurations = \\\n self.interface.build_unconfig(\n apply=False,\n attributes=attributes.attributes)\n else:\n configurations = \\\n self.interface.build_unconfig(\n apply=False,\n attributes={'switchport_mode': None,\n 'sw_trunk_allowed_vlan': None})\n else:\n configurations = self.interface.build_config(apply=False)\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # ============== new vlan structure ===========================\n class VlanAttributes(ABC):\n def build_config(self, devices=None, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(\n attributes.format('vlan {vlan}', force=True)):\n if unconfig and attributes.iswildcard:\n # Never reached!\n configurations.submode_unconfig()\n\n # shutdown\n if attributes.value('shutdown') == False:\n configurations.append_line(\n attributes.format('no shutdown'))\n elif attributes.value('shutdown') == True:\n configurations.append_line(\n attributes.format('shutdown'))\n\n # name\n if attributes.value('name'):\n configurations.append_line(\n attributes.format('name {name}'))\n\n # state\n if attributes.value('state'):\n state_value = attributes.value('state').value\n configurations.append_line(\n attributes.format('state {state}'.format(state=state_value)))\n\n # mode\n if attributes.value('mode'):\n state_value = attributes.value('mode').value\n configurations.append_line(\n attributes.format('mode {mode.value}'))\n # vn_segment_id\n if attributes.value('vn_segment_id'):\n configurations.append_line(\n attributes.format('vn-segment {vn_segment_id}'))\n\n return str(configurations)\n\n class VlanConfigAttributes(ABC):\n def build_config(self, devices=None, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(\n attributes.format(\n 'vlan configuration {config_vlan_id}', force=True)):\n if unconfig and attributes.iswildcard:\n # Never reached!\n configurations.submode_unconfig()\n\n # ip_igmp_snooping\n if attributes.value('ip_igmp_snooping'):\n configurations.append_line('ip igmp snooping')\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n\n", "id": "3123696", "language": "Python", "matching_score": 2.2468631267547607, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/vlan/nxos/vlan.py" }, { "content": "# Genie package\nfrom genie.libs.ops.vlan.vlan import Vlan as SuperVlan\n\n# genie.libs\n#from genie.libs.parser.nxos import show_vlan, \\\n# show_interface\nfrom genie.libs.parser.nxos import show_vlan,\\\n show_feature,\\\n show_igmp,\\\n show_interface\n\n\nclass Vlan(SuperVlan):\n '''Vlan Ops Object'''\n \n def set_enable(self, item):\n try:\n for inst in item:\n if item[inst]['state'] == 'enabled':\n ret_val = True\n break\n else:\n ret_val = False\n continue\n except:\n ret_val = False\n return ret_val\n\n def transfer_to_bool(self, item):\n if 'active' not in item:\n return True\n else:\n return False\n\n def learn(self):\n '''Learn Vlan object'''\n\n # new vlan structure\n # Place holder to make it more readable\n src_vlan = '[feature]'\n dest_vlan = 'info[vlans]'\n\n self.add_leaf(cmd=show_feature.ShowFeature,\n src=src_vlan + '[interface-vlan][instance]',\n dest=dest_vlan + '[interface_vlan_enabled]',\n action=self.set_enable)\n\n self.add_leaf(cmd=show_feature.ShowFeature,\n src=src_vlan + '[vnseg_vlan][instance]',\n dest=dest_vlan + '[vn_segment_vlan_based_enabled]',\n action=self.set_enable)\n\n src_vlan = '[vlans][(?P<vlan_id>.*)]'\n dest_vlan = 'info[vlans][(?P<vlan_id>.*)]'\n\n req_keys = ['vlan_id','name','state','interfaces','mode']\n for key in req_keys:\n self.add_leaf(cmd=show_vlan.ShowVlan,\n src=src_vlan + '[{}]'.format(key),\n dest=dest_vlan + '[{}]'.format(key))\n\n # shutdown\n self.add_leaf(cmd=show_vlan.ShowVlan,\n src=src_vlan + '[state]',\n dest=dest_vlan + '[shutdown]',\n action=self.transfer_to_bool)\n\n self.add_leaf(cmd=show_vlan.ShowVlanIdVnSegment,\n src=src_vlan + '[vn_segment_id]',\n dest=dest_vlan + '[vn_segment_id]')\n\n src_igmp = '[vlans]'\n dest_igmp = 'info[vlans]'\n self.add_leaf(cmd=show_igmp.ShowIpIgmpSnooping,\n src=src_igmp + '[(?P<configuration_vlan_id>.*)][ip_igmp_snooping]',\n dest=dest_igmp + '[configuration][(?P<configuration_vlan_id>.*)][ip_igmp_snooping]')\n\n self.make(final_call=True)\n", "id": "11877111", "language": "Python", "matching_score": 3.865440607070923, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/vlan/nxos/vlan.py" }, { "content": "# Genie package\nfrom genie.libs.ops.vlan.vlan import Vlan as SuperVlan\n\n\nclass Vlan(SuperVlan):\n '''Vlan Ops Object'''\n\n def learn(self):\n '''Learn Vlan object'''\n\n # new vlan structure\n # Place holder to make it more readable\n src_vlan = '[vlans][(?P<vlan_id>.*)]'\n dest_vlan = 'info' + src_vlan\n\n # interface_vlan_enabled N/A\n # vn_segment_vlan_based_enabled N/A\n # ip_igmp_snooping N/A\n # mode N/A\n # vn_segment_id N/A\n\n req_key =['vlan_id','name','state','shutdown']\n for key in req_key:\n self.add_leaf(cmd='show vlan',\n src=src_vlan + '[{}]'.format(key),\n dest=dest_vlan + '[{}]'.format(key))\n\n self.make(final_call=True)", "id": "8948870", "language": "Python", "matching_score": 1.0660731792449951, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/vlan/iosxe/vlan.py" }, { "content": "''' \nACL Genie Ops Object for IOSXR - CLI.\n'''\n# # super class\nfrom genie.libs.ops.acl.acl import Acl as SuperAcl\nfrom genie.ops.base import Context\n\n# Parser\nfrom genie.libs.parser.iosxr.show_acl import ShowAclAfiAll, \\\n ShowAclEthernetServices\n\nclass Acl(SuperAcl):\n '''ACL Genie Ops Object'''\n def actions_forwarding(self, item):\n '''return accept when forwarding is permit and return drop if forwarding is deny'''\n if 'permit' == item['forwarding']:\n return 'accept'\n elif 'deny' == item['forwarding']:\n return 'drop'\n else:\n return 'reject'\n\n # ethernet types\n ether_types = {\n '0800' : 'ipv4',\n '0806' : 'arp',\n '0842' : 'wlan',\n '22F3' : 'trill',\n '22EA' : 'srp',\n '6003' : 'decnet',\n '8035' : 'rarp',\n '809B' : 'appletalk',\n '80F3' : 'aarp',\n '8100' : 'vlan',\n '8137' : 'ipx',\n '8204' : 'qnx',\n '86DD' : 'ipv6',\n '8808' : 'efc',\n '8809' : 'esp',\n '8819' : 'cobranet',\n '8847' : 'mpls-unicast',\n '8848' : 'mpls-multicast',\n '8863' : 'pppoe-discovery',\n '8864' : 'pppoe-session',\n '886D' : 'intel-ans', \n '887B' : 'homeplug', \n '888E' : 'eap',\n '8892' : 'profinet',\n '889A' : 'hyperscsi', \n '88A2' : 'aoe', \n '88A4' : 'ethercat', \n '88A8' : 'provider-bridging',\n '88AB' : 'ethernet-powerlink',\n '88B8' : 'goose',\n '88B9' : 'gse',\n '88BA' : 'sv',\n '88CC' : 'lldp',\n '88CD' : 'sercos',\n '88DC' : 'wsmp', \n '88E1' : 'homeplug-av-mme',\n '88E3' : 'mrp',\n '88E5' : 'macsec',\n '88E7' : 'pbb',\n '8902' : 'cfm',\n '8906' : 'fcoe',\n '8914' : 'fcoe-ip',\n '8915' : 'roce',\n '891D' : 'tte',\n '892F' : 'hsr'\n }\n\n def ethernet_type_conversion(self, item):\n item = item.upper()\n val = self.ether_types.get(item, \"\")\n if val:\n return val\n return item\n\n def learn(self):\n '''Learn access-list Ops'''\n\n ########################################################################\n # info\n ########################################################################\n\n # acls\n # acl_name\n # name\n # type\n # aces\n # seq\n # name\n # matches\n # l2\n # eth\n # destination_mac_address\n # source_mac_address\n # ether_type\n # l3\n # ipv4\n # dscp - N/A\n # ecn - N/A\n # length - N/A\n # ttl\n # ttl_operator\n # protocol - N/A\n # ihl - N/A\n # flags - N/A\n # offset - N/A\n # precedence\n # identification - N/A\n # destination_ipv4_network\n # dst\n # destination_ipv4_network\n # source_ipv4_network\n # src\n # source_ipv4_network\n # ipv6 \n # dscp - N/A\n # ecn - N/A\n # length - N/A\n # ttl - N/A\n # ttl_operator - N/A\n # protocol - N/A\n # destination_ipv6_network\n # dst\n # destination_ipv6_network\n # source_ipv6_network\n # src\n # source_ipv6_network\n # flow_label - N/A\n # l4\n # tcp \n # sequence_number - N/A\n # acknowledgement_number - N/A\n # data_offset - N/A\n # reserved - N/A\n # flags - N/A\n # window_size - N/A\n # urgent_pointer - N/A\n # options - N/A\n # source-port - N/A\n # range - N/A\n # lower_port - N/A\n # upper_port - N/A\n # operator\n # operator\n # port\n # destination_port - N/A\n # range - N/A\n # lower_port - N/A\n # upper_port - N/A\n # operator\n # operator\n # port\n # udp - N/A\n # length - N/A\n # source-port - N/A\n # range - N/A\n # lower_port - N/A\n # upper_port - N/A\n # operator - N/A\n # operator - N/A\n # port - N/A\n # destination_port - N/A\n # range - N/A\n # lower_port - N/A\n # upper_port - N/A\n # operator - N/A\n # operator - N/A\n # port - N/A\n # icmp - N/A\n # type - N/A\n # code - N/A\n # rest_of_header - N/A\n # egress_interface - N/A\n # ingress_interface - N/A\n # statistics - N/A\n # matched_packets - N/A\n # matched_octets - N/A\n # attachment_points - N/A\n # interface_id - N/A\n # interface_id - N/A\n # ingress - N/A\n # acl_sets - N/A\n # acl_name - N/A\n # name - N/A\n # ace_statistics - N/A\n # matched_packets - N/A\n # matched_octets - N/A\n # egress - N/A\n # acl_sets - N/A\n # acl_name - N/A\n # name - N/A\n # ace_statistics - N/A\n # matched_packets - N/A\n # matched_octets - N/A\n\n # l2\n l2_src = '[(?P<name>.*)][aces][(?P<aces>.*)][matches][l2][eth]'\n l2_dest = 'info[acls]' + l2_src\n\n # l3\n l3_v4_src = '[(?P<name>.*)][aces][(?P<aces>.*)][matches][l3][ipv4]'\n l3_v4_dest = 'info[acls]' + l3_v4_src\n l3_v6_src = '[(?P<name>.*)][aces][(?P<aces>.*)][matches][l3][ipv6]'\n l3_v6_dest = 'info[acls]' + l3_v6_src\n\n # l4\n l4_tcp_src = '[(?P<name>.*)][aces][(?P<aces>.*)][matches][l4][tcp]'\n l4_tcp_dest = 'info[acls]' + l4_tcp_src\n\n action_src = '[(?P<name>.*)][aces][(?P<aces>.*)][actions]'\n action_dest = 'info[acls][(?P<name>.*)][aces][(?P<aces>.*)][actions]'\n\n # prepare the keys\n # dictonary as {src: dest,}\n keys_show_acl_afi_all = {\n '[(?P<name>.*)][name]': 'info[acls][(?P<name>.*)][name]',\n '[(?P<name>.*)][type]': 'info[acls][(?P<name>.*)][type]',\n '[(?P<name>.*)][aces][(?P<aces>.*)][name]': \n 'info[acls][(?P<name>.*)][aces][(?P<aces>.*)][name]',\n action_src + '[logging]' : action_dest + '[logging]',\n l3_v4_src + '[ttl]': l3_v4_dest + '[ttl]',\n l3_v4_src + '[ttl_operator]': l3_v4_dest + '[ttl_operator]',\n l3_v4_src + '[precedence]': l3_v4_dest + '[precedence]',\n l3_v4_src + '[destination_ipv4_network][(?P<ipv4>.*)][destination_ipv4_network]': \n l3_v4_dest + '[destination_ipv4_network][(?P<ipv4>.*)][destination_ipv4_network]',\n l3_v4_src + '[source_ipv4_network][(?P<ipv4_s>.*)][source_ipv4_network]': \n l3_v4_dest + '[source_ipv4_network][(?P<ipv4_s>.*)][source_ipv4_network]',\n \n l3_v6_src + '[ttl]': l3_v6_dest + '[ttl]',\n l3_v6_src + '[ttl_operator]': l3_v6_dest + '[ttl_operator]',\n l3_v6_src + '[destination_ipv6_network][(?P<ipv6>.*)][destination_ipv6_network]': \n l3_v6_dest + '[destination_ipv6_network][(?P<ipv6>.*)][destination_ipv6_network]',\n l3_v6_src + '[source_ipv6_network][(?P<ipv6_s>.*)][source_ipv6_network]': \n l3_v6_dest + '[source_ipv6_network][(?P<ipv6_s>.*)][source_ipv6_network]',\n \n l4_tcp_src + '[established]': l4_tcp_dest + '[established]',\n l4_tcp_src + '[source-port]': l4_tcp_dest + '[source-port]',\n l4_tcp_src + '[destination_port]': l4_tcp_dest + '[destination_port]',\n }\n\n for src, dst in keys_show_acl_afi_all.items(): \n self.add_leaf(cmd=ShowAclAfiAll,\n src=src,\n dest=dst)\n # enabled\n self.add_leaf(cmd=ShowAclAfiAll,\n src=action_src,\n dest=action_dest + '[forwarding]',\n action=self.actions_forwarding)\n # prepare the keys\n # dictonary as {src: dest,}\n keys_ethernet_services = {\n '[(?P<name>.*)][name]': 'info[acls][(?P<name>.*)][name]',\n '[(?P<name>.*)][type]': 'info[acls][(?P<name>.*)][type]',\n '[(?P<name>.*)][aces][(?P<aces>.*)][name]': \n 'info[acls][(?P<name>.*)][aces][(?P<aces>.*)][name]',\n action_src + '[logging]' : action_dest + '[logging]',\n l2_src + '[destination_mac_address]': l2_dest + '[destination_mac_address]',\n l2_src + '[source_mac_address]': l2_dest + '[source_mac_address]',\n }\n\n for src, dst in keys_ethernet_services.items(): \n self.add_leaf(cmd=ShowAclEthernetServices,\n src=src,\n dest=dst)\n \n # enabled\n self.add_leaf(cmd=ShowAclEthernetServices,\n src=action_src,\n dest=action_dest + '[forwarding]',\n action=self.actions_forwarding)\n\n # ether_type conversion\n self.add_leaf(cmd=ShowAclEthernetServices,\n src=l2_src + '[ether_type]',\n dest=l2_dest + '[ether_type]',\n action=self.ethernet_type_conversion)\n # make to write in cache\n self.make(final_call=True)", "id": "3722177", "language": "Python", "matching_score": 5.130746364593506, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/acl/iosxr/acl.py" }, { "content": "''' \nACL Genie Ops Object for IOSXE - CLI.\n'''\n# # super class\nfrom genie.libs.ops.acl.acl import Acl as SuperAcl\n# Parser\nfrom genie.libs.parser.iosxe.show_acl import ShowAccessLists\n\n\nclass Acl(SuperAcl):\n '''ACL Genie Ops Object'''\n\n def learn(self):\n '''Learn access-list Ops'''\n \n ########################################################################\n # info\n ########################################################################\n\n # unsupported keys\n # ecn, length, ihl, flags, offset, identification\n # flow_label, acknowledgement_number, data_offset\n # reserved, flags, window_size, urgent_pointer\n # rest_of_header, egress_interface, ingress_interface\n # ingress_interface, attachment_points, sequence_number\n\n # l2\n l2_src = '[(?P<name>.*)][aces][(?P<aces>.*)][matches][l2][eth]'\n l2_dest = 'info[acls]' + l2_src\n\n # l3\n l3_v4_src = '[(?P<name>.*)][aces][(?P<aces>.*)][matches][l3][ipv4]'\n l3_v4_dest = 'info[acls]' + l3_v4_src\n l3_v6_src = '[(?P<name>.*)][aces][(?P<aces>.*)][matches][l3][ipv6]'\n l3_v6_dest = 'info[acls]' + l3_v6_src\n\n # l4\n l4_tcp_src = '[(?P<name>.*)][aces][(?P<aces>.*)][matches][l4][tcp]'\n l4_tcp_dest = 'info[acls]' + l4_tcp_src\n l4_udp_src = '[(?P<name>.*)][aces][(?P<aces>.*)][matches][l4][udp]'\n l4_udp_dest = 'info[acls]' + l4_udp_src\n l4_icmp_src = '[(?P<name>.*)][aces][(?P<aces>.*)][matches][l4][icmp]'\n l4_icmp_dest = 'info[acls]' + l4_icmp_src\n\n # prepare the keys\n # dictonary as {src: dest,}\n keys = {\n '[(?P<name>.*)][name]': 'info[acls][(?P<name>.*)][name]',\n '[(?P<name>.*)][type]': 'info[acls][(?P<name>.*)][type]',\n '[(?P<name>.*)][aces][(?P<aces>.*)][name]': \n 'info[acls][(?P<name>.*)][aces][(?P<aces>.*)][name]',\n '[(?P<name>.*)][aces][(?P<aces>.*)][actions]': \n 'info[acls][(?P<name>.*)][aces][(?P<aces>.*)][actions]',\n '[(?P<name>.*)][aces][(?P<aces>.*)][statistics]': \n 'info[acls][(?P<name>.*)][aces][(?P<aces>.*)][statistics]',\n l2_src + '[destination_mac_address]': l2_dest + '[destination_mac_address]',\n l2_src + '[source_mac_address]': l2_dest + '[source_mac_address]',\n l2_src + '[ether_type]': l2_dest + '[ether_type]',\n l3_v4_src + '[dscp]': l3_v4_dest + '[dscp]',\n l3_v4_src + '[ttl]': l3_v4_dest + '[ttl]',\n l3_v4_src + '[ttl_operator]': l3_v4_dest + '[ttl_operator]',\n l3_v4_src + '[protocol]': l3_v4_dest + '[protocol]',\n l3_v4_src + '[precedence]': l3_v4_dest + '[precedence]',\n l3_v4_src + '[destination_network][(?P<ipv4>.*)][destination_network]': \n l3_v4_dest + '[destination_ipv4_network][(?P<ipv4>.*)][destination_ipv4_network]',\n l3_v4_src + '[source_network][(?P<ipv4_s>.*)][source_network]': \n l3_v4_dest + '[source_ipv4_network][(?P<ipv4_s>.*)][source_ipv4_network]',\n l3_v6_src + '[dscp]': l3_v6_dest + '[dscp]',\n l3_v6_src + '[ttl]': l3_v6_dest + '[ttl]',\n l3_v6_src + '[ttl_operator]': l3_v6_dest + '[ttl_operator]',\n l3_v6_src + '[destination_network][(?P<ipv6>.*)][destination_network]': \n l3_v6_dest + '[destination_ipv6_network][(?P<ipv6>.*)][destination_ipv6_network]',\n l3_v6_src + '[source_network][(?P<ipv6_s>.*)][source_network]': \n l3_v6_dest + '[source_ipv6_network][(?P<ipv6_s>.*)][source_ipv6_network]',\n l3_v6_src + '[protocol]': l3_v6_dest + '[protocol]',\n l4_tcp_src + '[options]': l4_tcp_dest + '[options]',\n l4_tcp_src + '[established]': l4_tcp_dest + '[established]',\n l4_tcp_src + '[source_port]': l4_tcp_dest + '[source_port]',\n l4_tcp_src + '[destination_port]': l4_tcp_dest + '[destination_port]',\n l4_udp_src + '[source_port]': l4_udp_dest + '[source_port]',\n l4_udp_src + '[destination_port]': l4_udp_dest + '[destination_port]',\n l4_icmp_src + '[type]': l4_icmp_dest + '[type]',\n l4_icmp_src + '[code]': l4_icmp_dest + '[code]',\n }\n\n for src, dst in keys.items(): \n\n self.add_leaf(cmd='show access-lists',\n src=src,\n dest=dst)\n\n # make to write in cache\n self.make(final_call=True)\n", "id": "2142347", "language": "Python", "matching_score": 1.7668664455413818, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/acl/iosxe/acl.py" }, { "content": "\"\"\"Common configure functions for interface\"\"\"\n\n# Python\nimport logging\nfrom ipaddress import IPv4Address\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef config_extended_acl(\n device,\n acl_name,\n permission,\n protocol,\n src_ip,\n src_step,\n src_wildcard,\n dst_ip,\n dst_step,\n dst_wildcard,\n dst_port,\n entries,\n):\n \"\"\" Configure extended ACL on device\n\n Args:\n device ('obj'): device object\n acl_name ('str'): acl name\n permission ('str'): (permit | deny)\n protocol ('str'): protocol\n src_ip ('str'): source start ip\n src_step ('str'): increment step for source ip\n src_wildcard ('str'): source wildcard\n dst_ip ('str'): destination start ip\n dst_step ('str'): increment step for destination ip\n dst_wildcard ('str'): destination wildcard\n dst_port ('str'): Acl destination port\n entries ('int'): Acl entries\n\n Returns:\n config\n Raises:\n SubCommandFailure: Failed to configure access-list\n \"\"\"\n config = \"ip access-list extended {}\\n\".format(acl_name)\n src_ip = IPv4Address(src_ip)\n src_step = IPv4Address(src_step)\n dst_ip = IPv4Address(dst_ip)\n dst_step = IPv4Address(dst_step)\n\n if dst_wildcard != \"0.0.0.0\":\n cmd = (\n \" {sequence} {permission} {protocol} {src_ip} {src_wildcard} \"\n \"{dst_ip} {dst_wildcard} eq {dst_port}\\n\"\n )\n else:\n cmd = (\n \" {sequence} {permission} {protocol} {src_ip} {src_wildcard} \"\n \"host {dst_ip} eq {dst_port}\\n\"\n )\n\n for i in range(entries):\n src_ip += int(src_step)\n dst_ip += int(dst_step)\n\n config += cmd.format(\n sequence=i + 1,\n permission=permission,\n protocol=protocol,\n src_ip=src_ip,\n src_wildcard=src_wildcard,\n dst_ip=dst_ip,\n dst_wildcard=dst_wildcard,\n dst_port=dst_port,\n )\n\n config += \" {} permit ip any any\\n\".format(i + 2)\n out = device.configure(config)\n\n if \"% Duplicate sequence\" in out or \"%Failed\" in out:\n raise SubCommandFailure(\"Failed to configure access-list\")\n\n return out\n\n\ndef config_acl_on_interface(device, interface, acl_name, inbound=True):\n \"\"\" Configures acl on interface \n\n Args:\n device ('obj'): device to use\n interface ('str'): interface to configure\n acl_name ('str'): acl to apply\n \"\"\"\n if inbound:\n log.info(\n \"Configure inbound {acl} on {intf}\".format(\n acl=acl_name, intf=interface\n )\n )\n direction = \"in\"\n else:\n log.info(\n \"Configure outbound {acl} on {intf}\".format(\n acl=acl_name, intf=interface\n )\n )\n direction = \"out\"\n\n try:\n device.configure(\n \"interface {intf}\\nip access-group {acl} {direction}\".format(\n intf=interface, acl=acl_name, direction=direction\n )\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure acl {acl} on interface {interface}\".format(\n acl=acl_name, interface=interface\n )\n )\n", "id": "9988598", "language": "Python", "matching_score": 2.0272340774536133, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/acl/configure.py" }, { "content": "\"\"\"Common verify functions for acl\"\"\"\n\n# Python\nimport logging\n\nfrom genie.libs.sdk.apis.utils import compare_config_dicts\n\nlog = logging.getLogger(__name__)\n\n\ndef verify_acl_applied(device, acl_name, applied_config):\n \"\"\" Verify if access list is correctly applied\n Args:\n device ('obj'): Device object\n acl_name ('str'): Access list name\n applied_config ('str'): Output from acl.configure.config_extended_acl\n Raises:\n None\n Returns:\n True\n False\n\n \"\"\"\n log.info(\"Verify access-list {} is correctly programmed\".format(acl_name))\n parsed_config = device.parse(\"show ip access-lists\", output=applied_config)\n parsed_output = device.parse(\"show ip access-lists {}\".format(acl_name))\n\n result = compare_config_dicts(parsed_config, parsed_output)\n if result:\n log.info(\"Diff:\\n{}\".format(result))\n return False\n\n return True\n", "id": "2274264", "language": "Python", "matching_score": 0.9422338604927063, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/acl/verify.py" }, { "content": "\"\"\"Common verification functions for bfd\"\"\"\n\n# Python\nimport logging\n\n# Genie\nfrom genie.utils.timeout import Timeout\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\n\nlog = logging.getLogger(__name__)\n\n\ndef verify_bfd_configuration(\n device,\n interface,\n interval,\n state,\n protocols,\n max_time=60,\n check_interval=10,\n):\n \"\"\" Verifies bfd configuration is applied with correct interval and protocol\n\n Args:\n device ('obj'): device to use\n interface ('str'): interface bfd is configured on\n interval ('str'): interval to verify\n state ('str'): state to verify\n protocols ('list'): protocols to verify\n Returns:\n True\n False\n Raises:\n None\n \"\"\"\n timeout = Timeout(max_time, check_interval)\n log.info(\n \"Verifying BFD is {} and has the following applied: interval {}, protocols {}\".format(\n state, interval, protocols\n )\n )\n while timeout.iterate():\n try:\n out = device.parse(\n \"show bfd neighbors interface {} details\".format(interface)\n )\n except SchemaEmptyParserError:\n return False\n\n if out:\n for our_address in out.get(\"our_address\", {}):\n for neighbor_address in out[\"our_address\"][our_address].get(\n \"neighbor_address\", {}\n ):\n sub_dict = out[\"our_address\"][our_address][\n \"neighbor_address\"\n ][neighbor_address]\n if (\n \"state\" in sub_dict\n and state.lower() in sub_dict[\"state\"].lower()\n and \"registered_protocols\" in sub_dict\n and set(x.lower() for x in protocols).issubset(\n x.lower() for x in sub_dict[\"registered_protocols\"]\n )\n and \"session\" in sub_dict\n ):\n session_dict = sub_dict[\"session\"]\n if (\n \"state\" in session_dict\n and state.lower() in session_dict[\"state\"].lower()\n and \"echo_function\" in session_dict\n and session_dict[\"echo_function\"]\n and \"echo_interval_ms\" in session_dict\n and session_dict[\"echo_interval_ms\"] == interval\n ):\n return True\n timeout.sleep()\n\n return False\n", "id": "9860808", "language": "Python", "matching_score": 1.5606632232666016, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/bfd/verify.py" }, { "content": "'''Common verify functions for Segment routing'''\n# Python\nimport logging\n\n# Metaparser\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\nfrom genie.utils.timeout import Timeout\nfrom pyats.utils.objects import find, R\nfrom genie.libs.sdk.libs.utils.normalize import GroupKeys\n\nlog = logging.getLogger(__name__)\n\n\ndef verify_segment_routing_operation(device, loopback_interface, label_min,\n prefix_sid_index, max_time=60, check_interval=20):\n ''' Verify Segment routing operation\n\n Args:\n device (`obj`): Device object\n loopback_interface (`str`): Loopback interface\n label_min (`int`): Segment routing global block start\n prefix_sid_index (`int`): Prefix-sid index\n max_time (`int`): Max time\n check_interval (`int`): Check interval\n Returns:\n result (`bool`): Verified result\n '''\n timeout = Timeout(max_time, check_interval)\n\n while timeout.iterate():\n result = True\n\n try:\n out = device.parse('show isis segment-routing label table')\n except SchemaEmptyParserError:\n log.info(\"Device output is empty.\")\n result = False\n timeout.sleep()\n continue\n\n reqs = R(['instance', '(?P<segment_routing>.*)',\n 'label', '(?P<label>.*)',\n 'prefix_interface', '(?P<prefix_interface>.*)'])\n\n found = find([out], reqs, filter_=False, all_keys=True)\n\n if found:\n for item in found:\n if item[0] == loopback_interface:\n if item[1][3] == label_min+prefix_sid_index:\n result = True\n else:\n log.error(\"Could not find any mpls route\")\n result = False\n\n if result is True:\n return result\n\n timeout.sleep()\n\n return result\n\ndef verify_segment_routing_label_by_traceroute(device, traceroute_address,\n process_id):\n ''' Verify Segment routing label by traceroute\n\n Args:\n device (`obj`): Device object\n traceroute_address ('str): Traceroute address\n process_id ('str'): Router ISIS process ID\n Returns:\n result (`bool`): Verified result\n '''\n\n try:\n out = device.parse('show isis segment-routing label table')\n except SchemaEmptyParserError:\n log.info(\"Couldn't retrieve segment routing label details\")\n return False\n\n traceroute_result = device.api.get_traceroute_parsed_output(\n device=device, addr=traceroute_address)\n if not traceroute_result:\n log.info(\"Couldn't retrieve traceroute result\")\n return False\n\n for hop in traceroute_result['traceroute'][traceroute_address]['hops']:\n for next_hop in traceroute_result['traceroute'][traceroute_address]\\\n ['hops'][hop]['paths']:\n if 'label_info' in traceroute_result['traceroute'][traceroute_address]\\\n ['hops'][hop]['paths'][next_hop]:\n traceroute_label = traceroute_result['traceroute'][traceroute_address]\\\n ['hops'][hop]['paths'][next_hop]['label_info']['MPLS']['label']\n for label in out['instance'][process_id]['label']:\n if label == int(traceroute_label):\n log.info(\"Verified segment routing label\")\n return True\n\n return False\n", "id": "10616349", "language": "Python", "matching_score": 3.2982728481292725, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxr/segment_routing/verify.py" }, { "content": "\"\"\"Common verify functions for routing\"\"\"\n\n# Python\nimport re\nimport logging\nfrom prettytable import PrettyTable\n\n# pyATS\nfrom pyats.utils.objects import find, R\n\n# Genie\nfrom genie.utils.timeout import Timeout\nfrom genie.libs.sdk.libs.utils.normalize import GroupKeys\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\n# BGP\nfrom genie.libs.sdk.apis.iosxe.bgp.get import (\n get_ip_bgp_summary,\n get_bgp_route_from_neighbors,\n get_bgp_neighbors_advertised_routes,\n)\n\n# ROUTING\nfrom genie.libs.sdk.apis.iosxe.routing.get import get_routing_routes\nfrom genie.libs.sdk.apis.iosxe.routing.get import (\n get_routing_repair_path_information,\n)\n\nlog = logging.getLogger(__name__)\n\n\ndef verify_ip_cef_nexthop_label(device, ip, table=None, expected_label=None, vrf=None,\n max_time=30, check_interval=10):\n \"\"\" Verify nexthop does (not) have expected label\n\n Args:\n device (`obj`): Device object\n ip (`str`): IP address\n expected_label (`str`): Expected label. None if no label expected\n vrf (`str`): Not used on JuniperOS\n table (`str`): Route table\n max_time (`int`): Max time, default: 30\n check_interval (`int`): Check interval, default: 10\n Returns:\n result (`bool`): Verified result\n Raises:\n N/A\n \"\"\"\n\n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n try:\n if table:\n out = device.parse('show route table {table} {ip}'.format(table=table, ip=ip))\n else:\n out = device.parse('show route {ip}'.format(ip=ip))\n except SchemaEmptyParserError:\n log.info('Failed to parse. Device output might contain nothing.')\n timeout.sleep()\n continue\n\n for table_name in out.get('table_name', {}):\n if table and table_name not in table:\n continue\n\n for route in out['table_name'][table_name].get('routes', {}):\n if ip in route:\n hop_index_dict = out['table_name'][table_name]['routes'][route]\n for index in hop_index_dict.get('next_hop', {}).get('next_hop_list', {}):\n if '>' in hop_index_dict['next_hop']['next_hop_list'][index].get('best_route', ''):\n found_label = hop_index_dict['next_hop']['next_hop_list'][index].get('mpls_label')\n if expected_label and expected_label in found_label:\n log.info('Found the expected label \"{}\"'.format(found_label))\n return True\n elif not expected_label and not found_label:\n log.info('No label found. No label is expected')\n return True\n\n timeout.sleep()\n return False\n", "id": "1853043", "language": "Python", "matching_score": 3.3265697956085205, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/junos/routing/verify.py" }, { "content": "# Genie\nfrom genie.utils.timeout import Timeout\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\n# CEF\nfrom genie.libs.sdk.apis.iosxe.cef.get import get_cef_repair_path_of_route\n\n\ndef is_routing_repair_path_in_cef(\n device,\n prefix,\n max_time=60,\n check_interval=10,\n vrf='default',\n address_family='ipv4',\n):\n \"\"\" Verify 'repair path' is presente in express forwarding\n\n Args:\n device ('obj'): Device object\n route ('str'): Route address\n max_time ('int'): Max time in seconds retrieving and checking output\n check_interval ('int')\n vrf ('str'): VRF name\n address_family ('str'): Address family\n Raises:\n None\n Returns:\n True\n False\n \"\"\"\n\n timeout = Timeout(max_time=max_time, interval=check_interval)\n\n while timeout.iterate():\n is_present = get_cef_repair_path_of_route(\n device=device,\n prefix=prefix,\n vrf=vrf,\n address_family=address_family,\n )\n if is_present:\n return True\n\n timeout.sleep()\n\n return False\n", "id": "1797912", "language": "Python", "matching_score": 1.9683010578155518, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/cef/verify.py" }, { "content": "# Python\nimport logging\nimport re\n\n# Genie\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\nlog = logging.getLogger(__name__)\n\n\ndef get_cef_repair_path_of_route(device, prefix, vrf=\"default\", address_family=\"ipv4\"):\n \"\"\" Get 'repair path' of route\n Args:\n device ('obj'): Device object\n prefix ('str'): Prefix address\n vrf ('str'): VRF name\n address_family ('str'): Address family\n Return:\n tuple: (\n String: Repair path IP address,\n String: Repair path Interface\n )\n Raises:\n None\n \"\"\"\n try:\n output = device.parse(\"show ip cef {prefix} detail\".format(prefix=prefix))\n except SchemaEmptyParserError:\n log.info(\"Could not find cef repair path information\")\n return None, None\n\n r1 = re.compile(\n r\"attached\\-nexthop\\s+(?P<repair_next_hop_ip>\\S+)\\s+(?P<repair_next_hop_interface>\\S+)\"\n )\n\n prefixes_dict = (\n output[\"vrf\"]\n .get(vrf, {})\n .get(\"address_family\", {})\n .get(address_family, {})\n .get(\"prefix\", {})\n )\n for prefix_address in prefixes_dict:\n if prefix in prefix_address:\n for hop in prefixes_dict[prefix_address].get(\"nexthop\", {}):\n for out_interface in (\n prefixes_dict[prefix_address][\"nexthop\"]\n .get(hop, {})\n .get(\"outgoing_interface\", {})\n ):\n repair_path = (\n prefixes_dict[prefix_address][\"nexthop\"]\n .get(hop, {})[\"outgoing_interface\"][out_interface]\n .get(\"repair\", None)\n )\n\n if repair_path is None:\n continue\n\n result = r1.match(repair_path)\n if result:\n group = result.groupdict()\n repair_next_hop_ip = group[\"repair_next_hop_ip\"]\n repair_next_hop_interface = group[\"repair_next_hop_interface\"]\n\n log.info(\n \"Found repair path to address {address} and interface {interface}\".format(\n address=repair_next_hop_ip,\n interface=repair_next_hop_interface,\n )\n )\n return repair_next_hop_ip, repair_next_hop_interface\n\n log.info(\"Could not find cef repair path information\")\n return None, None\n\n\ndef get_cef_next_hop_ip_address(device, prefix, vrf=None, address_family=None):\n \"\"\" Get next hop ip address from Express Forwarding\n Args:\n device ('obj'): Device object\n prefix ('str'): Prefix address\n vrf ('str'): VRF name\n address_family ('str'): Address family\n Returns:\n String: Next hop Ip address \n Raises:\n None\n \"\"\"\n\n try:\n output = device.parse(\"show ip cef {prefix} detail\".format(prefix=prefix))\n except SchemaEmptyParserError as e:\n log.info(\n \"Could not find any next hop informatin for prefix {prefix}\".format(\n prefix=prefix\n )\n )\n return None\n\n vrf = vrf if vrf else \"default\"\n address_family = address_family if address_family else \"ipv4\"\n\n prefixes_dict = (\n output[\"vrf\"]\n .get(vrf, {})\n .get(\"address_family\", {})\n .get(address_family, {})\n .get(\"prefix\", {})\n )\n for prefix_address in prefixes_dict:\n if prefix in prefix_address:\n for hop in prefixes_dict[prefix_address].get(\"nexthop\", {}):\n log.info(\"Found next hop address {address}\".format(address=hop))\n return hop\n\n log.info(\n \"Could not find any next hop informatin for prefix {prefix}\".format(\n prefix=prefix\n )\n )\n return None\n\n\ndef get_cef_registred_label_to_prefix(\n device,\n prefix,\n vrf=\"default\",\n address_family=\"ipv4\",\n interface=None,\n nexthop_address=None,\n output=None,\n):\n \"\"\" Get registred label to prefix in CEF\n Args:\n device ('obj'): Device object\n prefix ('str'): Prefix address\n output ('dict'): Optional. Parsed output from command 'show ip cef {prefix} detail'\n vrf ('str'): Optional. VRF name. Default: 'default'\n address_family ('str'): Optional. Family name. Default: 'ipv4'\n interface ('str'): Optional. Interface name\n nexthop_address ('str'): Optional. Nexthop address\n Returns:\n int: Registred label\n Raises:\n ValueError: Found more than on registred label\n \"\"\"\n\n log.info(\"Getting registred label to prefix {prefix} in CEF\".format(prefix=prefix))\n\n if not output:\n try:\n if vrf and vrf != \"default\":\n output = device.parse(\n \"show ip cef vrf {vrf} {prefix} detail\".format(\n vrf=vrf, prefix=prefix\n )\n )\n else:\n output = device.parse(\n \"show ip cef {prefix} detail\".format(prefix=prefix)\n )\n except SchemaEmptyParserError:\n log.info(\n \"Could not find ant registred label for prefix {prefix}\".format(\n prefix=prefix\n )\n )\n return None\n\n prefixes_dict = (\n output[\"vrf\"]\n .get(vrf, {})\n .get(\"address_family\", {})\n .get(address_family, {})\n .get(\"prefix\", {})\n )\n\n label = []\n\n for pfx in prefixes_dict:\n if prefix in pfx:\n if interface and nexthop_address:\n label = (\n prefixes_dict[pfx]\n .get(\"nexthop\", {})\n .get(nexthop_address, {})\n .get(\"outgoing_interface\", {})\n .get(interface, {})\n .get(\"outgoing_label\", [])\n )\n elif nexthop_address:\n for interface in (\n prefixes_dict[pfx]\n .get(\"nexthop\", {})\n .get(nexthop_address, {})\n .get(\"outgoing_interface\", {})\n ):\n label = prefixes_dict[interface].get(\"outgoing_label\", [])\n\n elif interface:\n for hop in prefixes_dict[pfx].get(\"nexthop\", {}):\n label = (\n prefixes_dict[pfx][\"nexthop\"][hop]\n .get(\"outgoing_interface\", {})\n .get(interface, {})\n .get(\"outgoing_label\", [])\n )\n else:\n for hop in prefixes_dict[pfx].get(\"nexthop\", {}):\n for interface in prefixes_dict[pfx][\"nexthop\"][hop].get(\n \"outgoing_interface\", {}\n ):\n label = (\n prefixes_dict[pfx][\"nexthop\"][hop]\n .get(\"outgoing_interface\", {})[interface]\n .get(\"outgoing_label\", [])\n )\n\n if not label:\n log.info(\n \"Could not find ant registred label for prefix {prefix}\".format(\n prefix=prefix\n )\n )\n elif len(label) > 1:\n raise ValueError(\n \"Command has returned more than one label. The following \"\n \"labels have been returned:\\n{areas}\".format(areas=\"\\n\".join(label))\n )\n else:\n label = int(label[0])\n log.info(\"Found label {label}\".format(label=label))\n\n return label\n", "id": "537410", "language": "Python", "matching_score": 2.392446517944336, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/cef/get.py" }, { "content": "\"\"\"Common verify functions for vrf\"\"\"\r\n\r\n# Python\r\nimport logging\r\n\r\n# Genie\r\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\r\n\r\n# BGP\r\nfrom genie.libs.sdk.apis.iosxe.bgp.verify import (\r\n verify_bgp_route_is_advertised,\r\n is_route_in_bgp_table,\r\n verify_bgp_route_is_received,\r\n)\r\n\r\n\r\n# Utils\r\nfrom genie.libs.sdk.apis.iosxe.running_config.get import (\r\n get_running_config_section_dict,\r\n)\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef verify_vrf_advertised_routes(\r\n device,\r\n neighbor,\r\n address_family,\r\n config_vrf_list,\r\n vrf_name_routes,\r\n advertised_routes=None,\r\n):\r\n \"\"\" Verify if each VRF/route distinguisher have at least one advertised route\r\n\r\n Args:\r\n device ('obj'): Device object\r\n neighbor ('str'): Neighbor\r\n address_family ('address_family'): Address family\r\n config_vrf_list ('dict'): Dictionary with VRF config from command 'show config'\r\n vrf_name_routes ('str'): VRf name to be search under in advertised routes\r\n advertised_routes ('dict'): Advertised routes from command:\r\n 'show bgp {address_family} all neighbors {neighbor} advertised-routes'\r\n\r\n Returns:\r\n True\r\n False\r\n\r\n Raises:\r\n SchemaEmptyParserError\r\n \"\"\"\r\n\r\n if not advertised_routes:\r\n try:\r\n advertised_routes = device.parse(\r\n \"show bgp {address_family} all \"\r\n \"neighbors {neighbor} \"\r\n \"advertised-routes\".format(\r\n neighbor=neighbor, address_family=address_family\r\n )\r\n )\r\n except SchemaEmptyParserError as e:\r\n return False\r\n\r\n for vrf, vrf_data in config_vrf_list.get(\"vrf\", {}).items():\r\n default_rd = vrf_data.get(\"route_distinguisher\", \"\")\r\n\r\n if not default_rd:\r\n continue\r\n\r\n log.info(\r\n \"VRF {vrf} and route distinguisher {rd}\".format(\r\n vrf=vrf, rd=default_rd\r\n )\r\n )\r\n\r\n rd_address_family = \"{address_family} RD {rd}\".format(\r\n address_family=address_family, rd=default_rd\r\n )\r\n\r\n if not verify_bgp_route_is_advertised(\r\n device=device,\r\n advertised_routes=advertised_routes,\r\n address_family=rd_address_family,\r\n neighbor_address=neighbor,\r\n vrf=vrf_name_routes,\r\n default_vrf=vrf,\r\n default_rd=default_rd,\r\n ):\r\n\r\n log.error(\r\n \"VRF {vrf} and route distinguisher {rd} do not\"\r\n \"have any advertised route on device {dev}\".format(\r\n vrf=vrf, rd=default_rd, dev=device.name\r\n )\r\n )\r\n return False\r\n\r\n else:\r\n log.info(\r\n \"VRF {vrf} and route distinguishe {rd} have \"\r\n \"advertised routes on device {dev}\".format(\r\n vrf=vrf, rd=default_rd, dev=device.name\r\n )\r\n )\r\n\r\n return True\r\n\r\n\r\ndef verify_vrf_routes_in_bgp_table(\r\n device, config_vrf_list, address_family, routes_dict=None\r\n):\r\n \"\"\" Verify that each VRF and route distinguisher has at least one route in BGP table\r\n\r\n Args:\r\n device ('obj'): Device object\r\n config_vrf_list ('dict'): Dictionary with VRF config from command 'show config'\r\n address_family ('address_family'): Address family\r\n routes_dict ('dict'): Parsed output from command 'show ip bgp all'\r\n\r\n Returns:\r\n True\r\n False\r\n\r\n Raises:\r\n SchemaEmptyParserError\r\n \"\"\"\r\n\r\n if routes_dict is None:\r\n try:\r\n routes_dict = device.parse(\"show ip bgp all\")\r\n except SchemaEmptyParserError as e:\r\n return False\r\n\r\n for vrf, vrf_data in config_vrf_list.get(\"vrf\", {}).items():\r\n\r\n default_rd = vrf_data.get(\"route_distinguisher\", \"\")\r\n\r\n if not default_rd:\r\n continue\r\n\r\n rd_address_family = \"{address_family} RD {rd}\".format(\r\n address_family=address_family, rd=default_rd\r\n )\r\n\r\n log.info(\r\n \"VRF {vrf} and route distinguisher {rd}\".format(\r\n vrf=vrf, rd=default_rd\r\n )\r\n )\r\n\r\n if not is_route_in_bgp_table(\r\n device=device,\r\n routes_dict=routes_dict,\r\n address_family=rd_address_family,\r\n default_rd=default_rd,\r\n default_vrf=vrf,\r\n ):\r\n\r\n log.error(\r\n \"VRF {vrf} and route distinguishe {rd} do not \"\r\n \"have any route on BGP table in address family \"\r\n \"{address_family} on device {dev}\".format(\r\n vrf=vrf,\r\n rd=default_rd,\r\n address_family=address_family,\r\n dev=device.name,\r\n )\r\n )\r\n return False\r\n else:\r\n log.info(\r\n \"VRF {vrf} and Route Distinguisher {rd} have routes \"\r\n \"in BGP table on device {dev}\".format(\r\n vrf=vrf, rd=default_rd, dev=device.name\r\n )\r\n )\r\n\r\n return True\r\n\r\n\r\ndef verify_vrf_received_routes(\r\n device,\r\n address_family,\r\n neighbor,\r\n config_vrf_list,\r\n vrf_name_routes,\r\n received_routes,\r\n):\r\n\r\n \"\"\" Verify if each neighbor has at least one received route\r\n\r\n Args:\r\n device ('obj'): Device object\r\n neighbor ('str'): Neighbor\r\n address_family ('address_family'): Address family\r\n config_vrf_list ('dict'): Dictionary with VRF config from command 'show config'\r\n vrf_name_routes ('str'): VRf name to be searched under in received routes\r\n received_routes ('dict'): Advertised routes from command:\r\n 'show bgp {address_family} all neighbors {neighbor} routes\r\n\r\n Returns:\r\n True\r\n False\r\n\r\n Raises:\r\n None\r\n \"\"\"\r\n\r\n for vrf, vrf_data in config_vrf_list.get(\"vrf\", {}).items():\r\n\r\n default_rd = vrf_data.get(\"route_distinguisher\", \"\")\r\n\r\n if not default_rd:\r\n continue\r\n\r\n rd_address_family = \"{address_family} RD {rd}\".format(\r\n address_family=address_family, rd=default_rd\r\n )\r\n log.info(\r\n \"VRF {vrf} and route distinguisher {rd}\".format(\r\n vrf=vrf, rd=default_rd\r\n )\r\n )\r\n if not verify_bgp_route_is_received(\r\n device=device,\r\n neighbor_address=neighbor,\r\n vrf=vrf_name_routes,\r\n address_family=rd_address_family,\r\n default_rd=default_rd,\r\n default_vrf=vrf,\r\n received_routes=received_routes,\r\n ):\r\n\r\n log.error(\r\n \"VRF {vrf} and route distinguisher {rd} do not \"\r\n \"have any received route on device {dev}\".format(\r\n vrf=vrf, rd=default_rd, dev=device.name\r\n )\r\n )\r\n return False\r\n else:\r\n log.info(\r\n \"VRF {vrf} and Route Distinguisher {rd} have received \"\r\n \"routes on device {dev}\".format(\r\n vrf=vrf, rd=default_rd, dev=device.name\r\n )\r\n )\r\n\r\n return True\r\n\r\n\r\ndef verify_vrf_description_in_show_ip_vrf_detail(device, vrf, description):\r\n \"\"\"Verify vrf description in show ip vrf detail <vrf>\r\n\r\n Args:\r\n device (`obj`): Device object\r\n vrf (`str`): Vrf name\r\n description (`str`): Description\r\n\r\n Returns:\r\n True\r\n False\r\n\r\n Raises:\r\n SchemaEmptyParserError\r\n Exception\r\n \"\"\"\r\n\r\n cmd = \"show ip vrf detail {vrf}\".format(vrf=vrf)\r\n try:\r\n\r\n output = device.parse(cmd)\r\n except Exception as e:\r\n log.error(str(e))\r\n raise Exception(\"Failed to execute '{cmd}'\".format(cmd=cmd))\r\n except SchemaEmptyParserError:\r\n raise SchemaEmptyParserError(\r\n \"Command '{cmd}' has not returned any \" \"results\".format(cmd=cmd)\r\n )\r\n\r\n if output[vrf].get(\"description\", \"\") == description:\r\n return True\r\n\r\n return False\r\n\r\n\r\ndef verify_vrf_description_in_running_config(device, vrf, description):\r\n \"\"\"Verify vrf description in show running-config\r\n\r\n Args:\r\n device (`obj`): Device object\r\n vrf (`str`): VRF name\r\n description (`str`): Description\r\n\r\n Returns:\r\n True\r\n False\r\n\r\n Raises:\r\n Exception\r\n KeyError\r\n \"\"\"\r\n try:\r\n output = get_running_config_section_dict(\r\n device, vrf + \"$\"\r\n )\r\n except Exception as e:\r\n log.error(str(e))\r\n raise Exception(\r\n \"Failed to find vrf {vrf} through show running-config\".format(\r\n vrf=vrf\r\n )\r\n )\r\n\r\n vrf = \"vrf definition {vrf}\".format(vrf=vrf)\r\n desc = \"description {description}\".format(description=description)\r\n try:\r\n result = isinstance(output[vrf][desc], dict)\r\n except KeyError:\r\n return False\r\n\r\n return result\r\n", "id": "9813532", "language": "Python", "matching_score": 1.6788060665130615, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/vrf/verify.py" }, { "content": "\"\"\"Utility type functions that do not fit into another category\"\"\"\r\n\r\n# Python\r\nimport logging\r\nimport time\r\n\r\n# Running-Config\r\nfrom genie.libs.sdk.apis.utils import get_config_dict\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\ndef get_startup_config_dict(device, section=None, options=None):\r\n \"\"\" Get section information from show startup-config\r\n\r\n Args:\r\n device ('str'): Device str\r\n section ('str'): Section str\r\n Returns:\r\n Configuration dict\r\n \"\"\"\r\n if options and section:\r\n cmd = \"show startup-config {options} | section {section}\".format(\r\n options=options, section=section\r\n )\r\n elif options:\r\n cmd = \"show startup-config {options}\".format(options=options)\r\n\r\n elif section:\r\n cmd = \"show startup-config | section {section}\".format(section=section)\r\n else:\r\n cmd = \"show startup-config\"\r\n\r\n try:\r\n output = device.execute(cmd)\r\n except Exception as e:\r\n raise Exception(\r\n \"Could not execute command {cmd}\\nError:{e}\".format(\r\n cmd=cmd, e=str(e)\r\n )\r\n )\r\n config_dict = get_config_dict(output)\r\n\r\n return config_dict\r\n", "id": "1069124", "language": "Python", "matching_score": 0.43638768792152405, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/startup_config/get.py" }, { "content": "import re\nimport time\nimport pprint\nimport logging\nfrom lxml import etree\nfrom copy import deepcopy\nfrom ncclient import operations, xml_\nfrom requests import Request, Response\n\nfrom .gnmi import gNMIParser, gNMICalculator\nfrom .model import ModelDiff\nfrom .errors import ConfigError, ModelMissing, ModelIncompatible\nfrom .netconf import NetconfParser, NetconfCalculator\nfrom .composer import Composer\nfrom cisco_gnmi.proto.gnmi_pb2 import SetRequest, GetResponse\nfrom .restconf import RestconfParser, RestconfCalculator\nfrom .calculator import BaseCalculator\n\n# create a logger for this module\nlogger = logging.getLogger(__name__)\n\nnc_url = xml_.BASE_NS_1_0\nyang_url = 'urn:ietf:params:xml:ns:yang:1'\nconfig_tag = '{' + nc_url + '}config'\nfilter_tag = '{' + nc_url + '}filter'\noperation_tag = '{' + nc_url + '}operation'\nkey_tag = '{' + yang_url + '}key'\nvalue_tag = '{' + yang_url + '}value'\ninsert_tag = '{' + yang_url + '}insert'\n\ndef _cmperror(x, y):\n raise TypeError(\"can't compare '%s' to '%s'\" % (\n type(x).__name__, type(y).__name__))\n\n\nclass Config(object):\n '''Config\n\n Abstraction of a config state of a device.\n\n Attributes\n ----------\n device : `object`\n An instance of yang.ncdiff.ModelDevice, which represents a modeled\n device.\n\n ele : `Element`\n A lxml Element which contains the config.\n\n xml : `str`\n A string presentation of self.ele, not in pretty-print.\n\n ns : `dict`\n A dictionary of namespaces used by the config. Keys are prefixes and\n values are URLs.\n\n models : `list`\n A list of model names that self.roots belong to.\n\n roots : `dict`\n A dictionary of roots of self.ele. Dictionary keys are tags of roots in\n `{url}tagname` notation, and values are corresponding model names.\n '''\n\n def __init__(self, ncdevice, config=None):\n '''\n __init__ instantiates a Config instance.\n '''\n\n self.device = ncdevice\n self.parser = None\n if config is None:\n self.ele = etree.Element(config_tag, nsmap={'nc': nc_url})\n elif isinstance(config, operations.rpc.RPCReply) or \\\n isinstance(config, str) or \\\n etree.iselement(config):\n self.parser = NetconfParser(self.device, config)\n self.ele = self.parser.ele\n elif isinstance(config, Response):\n self.parser = RestconfParser(self.device, config)\n self.ele = self.parser.ele\n elif isinstance(config, GetResponse):\n self.parser = gNMIParser(self.device, config)\n self.ele = self.parser.ele\n else:\n raise TypeError(\"argument 'config' must be None, XML string, \" \\\n \"or Element, but not '{}'\" \\\n .format(type(config)))\n self.validate_config()\n\n def __repr__(self):\n return '<{}.{} {} at {}>'.format(self.__class__.__module__,\n self.__class__.__name__,\n self.ele.tag,\n hex(id(self)))\n\n def __str__(self):\n return etree.tostring(self.ele,\n encoding='unicode',\n pretty_print=True)\n\n def __bool__(self):\n d = Config(self.device, None)\n if self == d:\n return False\n else:\n return True\n\n def __add__(self, other):\n if isinstance(other, Config):\n if ConfigCompatibility(self, other).is_compatible:\n return Config(self.device,\n NetconfCalculator(self.device,\n self.ele, other.ele).add)\n elif isinstance(other, ConfigDelta):\n if ConfigCompatibility(self, other).is_compatible:\n return Config(self.device,\n NetconfCalculator(self.device,\n self.ele, other.nc).add)\n elif etree.iselement(other):\n return Config(self.device,\n NetconfCalculator(self.device, self.ele, other).add)\n elif isinstance(other, Request):\n return Config(self.device,\n RestconfCalculator(self.device, self.ele, other).add)\n elif isinstance(other, SetRequest):\n return Config(self.device,\n gNMICalculator(self.device, self.ele, other).add)\n else:\n return NotImplemented\n\n def __sub__(self, other):\n if type(other) == Config:\n if self == other:\n return None\n else:\n return ConfigDelta(config_src=other, config_dst=self)\n elif isinstance(other, ConfigDelta):\n return self.__add__(-other)\n else:\n return NotImplemented\n\n def __le__(self, other):\n if isinstance(other, Config):\n return BaseCalculator(self.device, self.ele, other.ele).le\n else:\n _cmperror(self, other)\n\n def __lt__(self, other):\n if isinstance(other, Config):\n return BaseCalculator(self.device, self.ele, other.ele).lt\n else:\n _cmperror(self, other)\n\n def __ge__(self, other):\n if isinstance(other, Config):\n return BaseCalculator(self.device, self.ele, other.ele).ge\n else:\n _cmperror(self, other)\n\n def __gt__(self, other):\n if isinstance(other, Config):\n return BaseCalculator(self.device, self.ele, other.ele).gt\n else:\n _cmperror(self, other)\n\n def __eq__(self, other):\n if isinstance(other, Config):\n return BaseCalculator(self.device, self.ele, other.ele).eq\n else:\n _cmperror(self, other)\n\n def __ne__(self, other):\n if isinstance(other, Config):\n return BaseCalculator(self.device, self.ele, other.ele).ne\n else:\n _cmperror(self, other)\n\n @property\n def xml(self):\n return etree.tostring(self.ele,\n encoding='unicode',\n pretty_print=False)\n\n @property\n def ns(self):\n return self.device._get_ns(self.ele)\n\n @property\n def models(self):\n return sorted(list(set([v for k, v in self.roots.items()])))\n\n @property\n def roots(self):\n roots = {}\n for child in self.ele.getchildren():\n if child.tag in self.device.roots:\n roots[child.tag] = self.device.roots[child.tag]\n else:\n ret = re.search('^{(.+)}(.+)$', child.tag)\n if not ret:\n raise ConfigError(\"unknown root '{}'\", child.tag)\n url_to_name = {i[2]: i[0] for i in self.device.namespaces\n if i[1] is not None}\n if ret.group(1) in url_to_name:\n raise ModelMissing(\"please load model '{0}' by calling \" \\\n \"method load_model('{0}') of device \" \\\n \"{1}\" \\\n .format(url_to_name[ret.group(1)],\n self.device))\n else:\n raise ConfigError(\"unknown model url '{}'\", ret.group(1))\n return roots\n\n def get_schema_node(self, node):\n '''get_schema_node\n\n High-level api: Return schema node of a config node.\n\n Parameters\n ----------\n\n node : `Element`\n An Element node in config tree.\n\n Returns\n -------\n\n Element\n A schema node of the config node.\n '''\n\n return self.device.get_schema_node(node)\n\n def get_model_name(self, node):\n '''get_model_name\n\n High-level api: Return model name of a config node.\n\n Parameters\n ----------\n\n node : `Element`\n An Element node in config tree.\n\n Returns\n -------\n\n Element\n Model name the config node belongs to.\n '''\n\n return self.device.get_model_name(node)\n\n def validate_config(self):\n '''validate_config\n\n High-level api: Validate config against models. ConfigError is raised\n if the config has issues.\n\n Returns\n -------\n\n None\n There is no return of this method.\n\n Raises\n ------\n\n ConfigError\n If config contains error.\n '''\n\n self.roots\n for child in self.ele.getchildren():\n self._validate_node(child)\n\n def ns_help(self):\n '''ns_help\n\n High-level api: Print known namespaces to make writing xpath easier.\n\n Returns\n -------\n\n None\n There is no return of this method.\n '''\n\n pprint.pprint(self.ns)\n\n def xpath(self, *args, **kwargs):\n '''xpath\n\n High-level api: It is a wrapper of xpath method in lxml package. If\n namespaces is not given, self.ns is used by default.\n\n Returns\n -------\n\n boolean or float or str or list\n Refer to http://lxml.de/xpathxslt.html#xpath-return-values\n '''\n\n if 'namespaces' not in kwargs:\n kwargs['namespaces'] = {i[1]: i[2] for i in self.device.namespaces\n if i[1] is not None}\n return self.ele.xpath(*args, **kwargs)\n\n def filter(self, *args, **kwargs):\n '''filter\n\n High-level api: Filter the config using xpath method. If namespaces is\n not given, self.ns is used by default.\n\n Returns\n -------\n\n Config\n A new Config instance which has less content according to your\n filter xpath expression.\n '''\n\n ancestors = set()\n filtrates = set()\n config = type(self)(self.device, deepcopy(self.ele))\n results = config.xpath(*args, **kwargs)\n if isinstance(results, list):\n for node in results:\n if etree.iselement(node):\n ancestors |= set(list(node.iterancestors()))\n filtrates.add(node)\n if filtrates:\n config._node_filter(config.ele, ancestors, filtrates)\n else:\n config.ele = etree.Element(config_tag, nsmap={'nc': nc_url})\n return config\n\n def _validate_node(self, node):\n '''_validate_node\n\n Low-level api: Validate one config node. This is a recursive method. An\n exception will be raised if validation fails.\n\n Parameters\n ----------\n\n node : `Element`\n An Element node in config tree.\n\n Returns\n -------\n\n None\n There is no return of this method.\n '''\n\n c = Composer(self.device, node)\n if c.schema_node is None:\n p = self.device.get_xpath(node, instance=False)\n raise ConfigError('schema node of the config node not ' \\\n 'found: {}'.format(p))\n if c.schema_node.get('type') == 'list':\n for key in c.keys:\n if node.find(key) is None:\n p = self.device.get_xpath(node, instance=False)\n raise ConfigError(\"missing key '{}' of the config \" \\\n \"node {}\".format(key, p))\n\n for tag in operation_tag, insert_tag, value_tag, key_tag:\n if node.get(tag):\n raise ConfigError(\"the config node contains invalid \" \\\n \"attribute '{}': {}\" \\\n .format(tag, self.device.get_xpath(node)))\n\n for child in node.getchildren():\n if len(child) > 0:\n self._validate_node(child)\n\n # clean up empty containers\n child_schema_node = self.device.get_schema_node(child)\n if child_schema_node is None:\n raise ConfigError(\"schema node of the config node {} cannot \" \\\n \"be found:\\n{}\" \\\n .format(self.device.get_xpath(child), self))\n if len(child) == 0 and \\\n child_schema_node.get('type') == 'container' and \\\n child_schema_node.get('presence') != 'true':\n node.remove(child)\n\n def _node_filter(self, node, ancestors, filtrates):\n '''_node_filter\n\n Low-level api: Remove unrelated nodes in config. This is a recursive\n method.\n\n Parameters\n ----------\n\n node : `Element`\n A node to be processed.\n\n ancestors : `list`\n A list of ancestors of filtrates.\n\n filtrates : `list`\n A list of filtrates which are result of xpath evaluation.\n\n Returns\n -------\n\n None\n There is no return of this method.\n '''\n\n if node in filtrates:\n return\n elif node in ancestors:\n if node.tag != config_tag:\n s_node = self.get_schema_node(node)\n if node.tag != config_tag and \\\n s_node.get('type') == 'list':\n for child in node.getchildren():\n s_node = self.get_schema_node(child)\n if s_node.get('is_key') or child in filtrates:\n continue\n elif child in ancestors:\n self._node_filter(child, ancestors, filtrates)\n else:\n node.remove(child)\n else:\n for child in node.getchildren():\n if child in filtrates:\n continue\n elif child in ancestors:\n self._node_filter(child, ancestors, filtrates)\n else:\n node.remove(child)\n else:\n node.getparent().remove(node)\n\n\nclass ConfigDelta(object):\n '''ConfigDelta\n\n Abstraction of a delta of two Config instances. This delta could be\n considered as a config state transition, from a source state to a\n destination state.\n\n Attributes\n ----------\n config_src : `Config`\n An instance of yang.ncdiff.Config, which is the source config state of\n a transition.\n\n config_dst : `Config`\n An instance of yang.ncdiff.Config, which is the destination config state\n of a transition.\n\n nc : `Element`\n A lxml Element which contains the delta. This attribute can be used by\n ncclient edit_config() directly. It is the Netconf presentation of a\n ConfigDelta instance.\n\n rc : `list`\n A list of requests.models.Request instances. Each Request instance can\n be used by prepare_request() in requests package. It is the Restconf\n presentation of a ConfigDelta instance.\n\n gnmi : `dict`\n A gnmi_pb2.SetRequest instance. It is the gNMI presentation of a\n ConfigDelta instance.\n\n ns : `dict`\n A dictionary of namespaces used by the attribute 'nc'. Keys are prefixes\n and values are URLs.\n\n models : `list`\n A list of model names that self.roots belong to.\n\n roots : `dict`\n A dictionary of roots of self.nc. Dictionary keys are tags of roots in\n `{url}tagname` notation, and values are corresponding model names.\n\n preferred_create : `str`\n Preferred operation of creating a new element. Choice of 'merge',\n 'create' or 'replace'.\n\n preferred_replace : `str`\n Preferred operation of replacing an existing element. Choice of\n 'merge' or 'replace'.\n\n preferred_delete : `str`\n Preferred operation of deleting an existing element. Choice of\n 'delete' or 'remove'.\n '''\n\n def __init__(self, config_src, config_dst=None, delta=None,\n preferred_create='merge',\n preferred_replace='merge',\n preferred_delete='delete'):\n '''\n __init__ instantiates a ConfigDelta instance.\n '''\n\n if not isinstance(config_src, Config):\n raise TypeError(\"argument 'config_src' must be \" \\\n \"yang.ncdiff.Config, but not '{}'\" \\\n .format(type(config_src)))\n if preferred_create in ['merge', 'create', 'replace']:\n self.preferred_create = preferred_create\n else:\n raise ValueError(\"only 'merge', 'create' or 'replace' are valid \" \\\n \"values of 'preferred_create'\")\n if preferred_replace in ['merge', 'replace']:\n self.preferred_replace = preferred_replace\n else:\n raise ValueError(\"only 'merge' or 'replace' are valid \" \\\n \"values of 'preferred_replace'\")\n if preferred_delete in ['delete', 'remove']:\n self.preferred_delete = preferred_delete\n else:\n raise ValueError(\"only 'delete' or 'remove' are valid \" \\\n \"values of 'preferred_delete'\")\n self.config_src = config_src\n if delta is not None:\n if isinstance(delta, str) or etree.iselement(delta):\n delta = NetconfParser(self.device, delta).ele\n elif isinstance(delta, Request) or isinstance(delta, SetRequest):\n delta = delta\n else:\n raise TypeError(\"argument 'delta' must be XML string, \" \\\n \"Element, requests.Request, or \" \\\n \"gnmi_pb2.SetRequest, but not \" \\\n \"'{}'\".format(type(delta)))\n if not isinstance(config_dst, Config) and config_dst is not None:\n raise TypeError(\"argument 'config_dst' must be \" \\\n \"yang.ncdiff.Config or None, but not '{}'\" \\\n .format(type(config_dst)))\n self.config_dst = config_dst\n if self.config_dst is None and delta is None:\n raise ValueError(\"either 'config_dst' or 'delta' must present\")\n if delta is not None:\n if self.config_dst is not None:\n logger.warning(\"argument 'config_dst' is ignored as 'delta' \" \\\n \"is provided\")\n self.config_dst = self.config_src + delta\n else:\n ConfigCompatibility(self.config_src, self.config_dst).is_compatible\n\n @property\n def device(self):\n return self.config_src.device\n\n @property\n def nc(self):\n return NetconfCalculator(self.device,\n self.config_dst.ele, self.config_src.ele,\n preferred_create=self.preferred_create,\n preferred_replace=self.preferred_replace,\n preferred_delete=self.preferred_delete).sub\n\n @property\n def rc(self):\n return RestconfCalculator(self.device,\n self.config_dst.ele, self.config_src.ele).sub\n\n @property\n def gnmi(self):\n return gNMICalculator(self.device,\n self.config_dst.ele, self.config_src.ele).sub\n\n @property\n def ns(self):\n return self.device._get_ns(self.nc)\n\n @property\n def models(self):\n return sorted(list(set(self.config_src.models + \\\n self.config_dst.models)))\n\n @property\n def roots(self):\n roots = {}\n roots.update(self.config_src.roots)\n roots.update(self.config_dst.roots)\n return roots\n\n def __str__(self):\n return etree.tostring(self.nc, encoding='unicode', pretty_print=True)\n\n def __neg__(self):\n return ConfigDelta(config_src=self.config_dst,\n config_dst=self.config_src,\n preferred_create=self.preferred_create,\n preferred_replace=self.preferred_replace,\n preferred_delete=self.preferred_delete)\n\n def __pos__(self):\n return self\n\n def __bool__(self):\n if self.config_src == self.config_dst:\n return False\n else:\n return True\n\n def __add__(self, other):\n if isinstance(other, Config):\n return other + self.nc\n\n def __sub__(self, other):\n return NotImplemented\n\n def __lt__(self, other):\n _cmperror(self, other)\n\n def __gt__(self, other):\n _cmperror(self, other)\n\n def __le__(self, other):\n _cmperror(self, other)\n\n def __ge__(self, other):\n _cmperror(self, other)\n\n def __eq__(self, other):\n _cmperror(self, other)\n\n def __ne__(self, other):\n _cmperror(self, other)\n\n\nclass ConfigCompatibility(object):\n '''ConfigCompatibility\n\n A class to check model compatibility between two Config instances. The\n prerequisite of calculating ConfigDelta is that two instances of Config are\n based on same model schema definations.\n\n Attributes\n ----------\n config1 : `object`\n An instance of Config.\n\n config2 : `Element`\n Another instance of Config.\n\n models : `list`\n A list of model names that are in self.config1 and self.config2.\n\n models_compatible : `str`\n True if all models in self.models are same in self.config1 as in\n self.config2.\n\n namespaces_compatible : `dict`\n True if all models in self.models have same prefix and URL in\n self.config1 as in self.config2.\n\n is_compatible : `dict`\n True if self.models_compatible is True and self.namespaces_compatible is\n True.\n '''\n\n def __init__(self, config1, config2):\n '''\n __init__ instantiates a ConfigCompatibility instance.\n '''\n\n self.config1 = config1\n self.config2 = config2\n\n @property\n def models(self):\n return sorted(list(set(self.config1.models + self.config2.models)))\n\n @property\n def models_compatible(self):\n\n def check_models(models):\n for device in [self.config1.device, self.config2.device]:\n missing_models = set(models) - set(device.models_loaded)\n if missing_models:\n raise ModelMissing('please load model {} by calling ' \\\n 'method load_model() of device {}' \\\n .format(str(list(missing_models))[1:-1],\n device))\n\n check_models(self.models)\n for model in self.models:\n diff = ModelDiff(self.config1.device.models[model],\n self.config2.device.models[model])\n if diff:\n logger.debug(str(self))\n raise ModelIncompatible(\"model '{}' on device {} is \" \\\n \"different from the one on device {}\" \\\n .format(model, self.config1.device,\n self.config2.device))\n return True\n\n @property\n def namespaces_compatible(self):\n\n def check_models(models):\n for device in [self.config1.device, self.config2.device]:\n missing_models = set(models) - set(device.models_loadable)\n if missing_models:\n raise ModelMissing('model {} does not exist on device {}' \\\n .format(str(list(missing_models))[1:-1],\n device))\n\n check_models(self.models)\n for model in self.models:\n prefix1 = [i[1] for i in self.config1.device.namespaces\n if i[0] == model][0]\n prefix2 = [i[1] for i in self.config2.device.namespaces\n if i[0] == model][0]\n if prefix1 != prefix2:\n raise ModelIncompatible(\"model '{}' uses prefix '{}' on \" \\\n \"device {}, but uses prefix '{}' on \" \\\n \"device {}\" \\\n .format(model,\n prefix1, self.config1.device,\n prefix2, self.config2.device))\n url1 = [i[2] for i in self.config1.device.namespaces\n if i[0] == model][0]\n url2 = [i[2] for i in self.config2.device.namespaces\n if i[0] == model][0]\n if url1 != url2:\n raise ModelIncompatible(\"model '{}' uses url '{}' on device \" \\\n \"{}, but uses url '{}' on device {}\" \\\n .format(model,\n url1, self.config1.device,\n url2, self.config2.device))\n return True\n\n @property\n def is_compatible(self):\n return self.namespaces_compatible and self.models_compatible\n", "id": "10872071", "language": "Python", "matching_score": 5.526732921600342, "max_stars_count": 15, "path": "ncdiff/src/yang/ncdiff/config.py" }, { "content": "import re\nimport json\nimport logging\nimport requests\nfrom lxml import etree\nfrom copy import deepcopy\nfrom xmljson import Parker\nfrom ncclient import xml_\nfrom xml.etree import ElementTree\nfrom collections import OrderedDict, defaultdict\nfrom urllib.parse import quote, unquote\n\nfrom .composer import Tag, Composer\nfrom .calculator import BaseCalculator\n\n# create a logger for this module\nlogger = logging.getLogger(__name__)\n\nnc_url = xml_.BASE_NS_1_0\nconfig_tag = '{' + nc_url + '}config'\nheader_json = {'Content-type': 'application/yang-data+json',\n 'Accept': 'application/yang-data+json, application/yang-data.errors+json'}\n\n\ndef _tostring(value):\n '''_tostring\n\n Convert value to XML compatible string.\n '''\n\n if value is True:\n return 'true'\n elif value is False:\n return 'false'\n elif value is None:\n return None\n else:\n return str(value)\n\ndef _fromstring(value):\n '''_fromstring\n\n Convert XML string value to None, boolean, int or float.\n '''\n\n if not value:\n return [None]\n std_value = value.strip().lower()\n if std_value == 'true':\n return True\n elif std_value == 'false':\n return False\n try:\n return int(std_value)\n except ValueError:\n pass\n try:\n return float(std_value)\n except ValueError:\n pass\n return value\n\n\nclass RestconfParser(object):\n '''RestconfParser\n\n A parser to convert a Restconf GET reply to an lxml Element object.\n\n Attributes\n ----------\n ele : `Element`\n An lxml Element object which is the root of the config tree.\n\n config_node : `Element`\n An Element node in the config tree, which is corresponding to the URL in\n the Restconf GET reply.\n\n xpath : `str`\n An xpath of attribute 'config_node', which is corresponding to the URL\n in the Restconf GET reply.\n '''\n\n def __init__(self, device, restconf_get_reply):\n self.device = device\n self.reply = restconf_get_reply\n self._config_node = None\n self._config_node_parent = None\n self._ele = None\n self._convert_tag = defaultdict(dict)\n\n self._name_to_prefix = {i[0]: i[1] for i in self.device.namespaces\n if i[1] is not None}\n self._name_to_url = {i[0]: i[2] for i in self.device.namespaces\n if i[1] is not None}\n\n @property\n def ele(self):\n if self._ele is None:\n pk = Parker(xml_tostring=_tostring, element=ElementTree.Element)\n for ele in pk.etree(self._json_data):\n self.parse_json(self.config_node_parent, ele)\n self._ele = self.root(self.config_node_parent)\n return self._ele\n\n @property\n def config_node(self):\n if self._config_node is None:\n self._config_node_parent, self._config_node = self.get_config_node()\n return self._config_node\n\n @property\n def xpath(self):\n if self.parse_url_piece(self._url_pieces[-1])[1]:\n return self.device.get_xpath(self.config_node,\n type=Tag.LXML_XPATH, instance=True)\n else:\n return self.device.get_xpath(self.config_node,\n type=Tag.LXML_XPATH, instance=False)\n\n @property\n def _url_pieces(self):\n regexp_str = '^https?://.+/data/(.*)'\n m = re.search(regexp_str, self.reply.url)\n if m:\n return m.group(1).split('/')\n else:\n raise ValueError(\"invalid url '{}'\".format(self.reply.url))\n\n @property\n def _json_data(self):\n return json.loads(self.reply.text, object_pairs_hook=OrderedDict)\n\n @property\n def config_node_parent(self):\n if self._config_node_parent is None:\n self._config_node_parent, self._config_node = self.get_config_node()\n return self._config_node_parent\n\n @staticmethod\n def get_ns(tag):\n m = re.search('{(.*)}.*', tag)\n return m.group(1)\n\n @staticmethod\n def parse_url_piece(url_piece):\n regexp_str = '(.*)=(.*)'\n m = re.search(regexp_str, url_piece)\n if m:\n return unquote(m.group(1)), \\\n [unquote(v) for v in m.group(2).split(',')]\n else:\n return unquote(url_piece), ''\n\n @staticmethod\n def root(node):\n ancestors = list(node.iterancestors())\n if ancestors:\n return ancestors[-1]\n else:\n return node\n\n @staticmethod\n def copy(node):\n def find_node(node1, node2):\n if node1 == node:\n return node2\n for child1, child2 in zip(node1.getchildren(), node2.getchildren()):\n if child1 == node:\n return child2\n elif child1.getchildren():\n ret = find_node(child1, child2)\n if ret is not None:\n return ret\n return None\n\n node1_root = RestconfParser.root(node)\n node2_root = deepcopy(node1_root)\n return find_node(node1_root, node2_root)\n\n def convert_tag(self, default_ns, tag, src=Tag.LXML_ETREE, dst=Tag.YTOOL):\n if src == Tag.JSON_NAME and dst == Tag.LXML_ETREE:\n if default_ns not in self._convert_tag or \\\n tag not in self._convert_tag[default_ns]:\n self._convert_tag[default_ns][tag] = \\\n self.device.convert_tag(default_ns, tag, src=src, dst=dst)\n return self._convert_tag[default_ns][tag]\n else:\n return self.device.convert_tag(default_ns, tag, src=src, dst=dst)\n\n def get_name(self, text):\n if text is None:\n return '', None\n m = re.search('^(.*):(.*)$', text)\n if m:\n if m.group(1) in self.device.models_loadable:\n return m.group(1), m.group(2)\n else:\n return '', text\n else:\n return '', text\n\n def subelement(self, parent, tag, text):\n default_url = self.get_ns(tag)\n nsmap = {None: default_url}\n model_name, text_value = self.get_name(text)\n if model_name:\n model_url = self._name_to_url[model_name]\n if model_url == default_url:\n e = etree.SubElement(parent, tag, nsmap=nsmap)\n e.text = text_value\n else:\n nsmap.update({self._name_to_prefix[model_name]: model_url})\n e = etree.SubElement(parent, tag, nsmap=nsmap)\n e.text = '{}:{}'.format(self._name_to_prefix[model_name],\n text_value)\n else:\n e = etree.SubElement(parent, tag, nsmap=nsmap)\n e.text = text_value\n return e\n\n def parse_json(self, lxml_parent, xml_child, default_ns=''):\n default_ns, tag = self.convert_tag(default_ns, xml_child.tag,\n src=Tag.JSON_NAME,\n dst=Tag.LXML_ETREE)\n lxml_child = self.subelement(lxml_parent, tag, xml_child.text)\n for child in xml_child:\n self.parse_json(lxml_child, child, default_ns)\n\n def get_config_node(self):\n '''get_config_node\n\n High-level api: get_config_node returns an Element node in the config\n tree, which is corresponding to the URL in the Restconf GET reply.\n\n Returns\n -------\n\n Element\n A config node.\n '''\n\n default_ns = ''\n config_node = etree.Element(config_tag, nsmap={'nc': nc_url})\n for index, url_piece in enumerate(self._url_pieces):\n if index == len(self._url_pieces)-1:\n config_node_parent = self.copy(config_node)\n node_name, values = self.parse_url_piece(url_piece)\n default_ns, tag = self.convert_tag(default_ns, node_name,\n src=Tag.JSON_NAME,\n dst=Tag.LXML_ETREE)\n config_node = self.subelement(config_node, tag, None)\n schema_node = self.device.get_schema_node(config_node)\n if schema_node.get('type') == 'leaf-list' and len(values) > 0:\n model_name, text_value = self.get_name(values[0])\n if model_name:\n prefix = self._name_to_prefix[model_name]\n config_node.text = '{}:{}'.format(prefix, text_value)\n else:\n config_node.text = text_value\n elif schema_node.get('type') == 'list' and len(values) > 0:\n key_tags = BaseCalculator._get_list_keys(schema_node)\n for key_tag, value in zip(key_tags, values):\n self.subelement(config_node, key_tag, value)\n return config_node_parent, config_node\n\n\nclass RestconfComposer(Composer):\n '''RestconfComposer\n\n A composer to convert an lxml Element object to Restconf JSON format.\n '''\n\n def get_json(self, instance=True, child_tag=None):\n '''get_json\n\n High-level api: get_json returns json_val of the config node.\n\n Parameters\n ----------\n\n instance : `bool`\n True if only one instance of list or leaf-list is required. False if\n all instances of list or leaf-list are needed.\n\n Returns\n -------\n\n str\n A string in JSON format.\n '''\n\n def get_json_instance(node):\n pk = Parker(xml_fromstring=_fromstring, dict_type=OrderedDict)\n default_ns = {}\n nodes = [node] + node.findall('.//')\n for item in nodes:\n parents = [p for p in node.findall('.//{}/..'.format(item.tag))\n if item in p.findall('*')]\n if parents and id(parents[0]) in default_ns:\n default_url = default_ns[id(parents[0])]\n ns, tag = self.device.convert_tag(default_url, item.tag,\n dst=Tag.JSON_NAME)\n else:\n ns, tag = self.device.convert_tag('', item.tag,\n dst=Tag.JSON_NAME)\n default_ns[id(item)] = ns\n item.tag = tag\n return pk.data(node, preserve_root=True)\n\n def convert_node(node, child_tag=None):\n # lxml.etree does not allow tag name like oc-if:enable\n # so it is converted to xml.etree.ElementTree\n string = etree.tostring(node, encoding='unicode',\n pretty_print=False)\n xml_node = ElementTree.fromstring(string)\n if child_tag is not None:\n for child in list(xml_node):\n if child.tag != child_tag:\n xml_node.remove(child)\n return xml_node\n\n if instance:\n return json.dumps(get_json_instance(convert_node(self.node, \n child_tag=child_tag)))\n else:\n nodes = [n for n in self.node.getparent() \\\n .iterchildren(tag=self.node.tag)]\n if len(nodes) > 1:\n return json.dumps([get_json_instance(convert_node(n))\n for n in nodes])\n else:\n return json.dumps(get_json_instance(convert_node(nodes[0])))\n\n def get_url(self, instance=True):\n '''get_url\n\n High-level api: get_url returns a Restconf URL of the config node.\n\n Parameters\n ----------\n\n instance : `bool`\n True if the Restconf URL refers to only one instance of a list or\n leaf-list. False if the Restconf URL refers to all instances of a\n list or leaf-list.\n\n Returns\n -------\n\n str\n A Restconf URL.\n '''\n\n def convert(default_ns, nodes):\n ret = ''\n for node in nodes:\n default_ns, id = self.device.convert_tag(default_ns, node.tag,\n dst=Tag.JSON_NAME)\n ret += '/' + quote(id, safe='')\n if self.is_config:\n n = Composer(self.device, node)\n if n.schema_node.get('type') == 'leaf-list':\n if node != self.node or instance:\n ret += '={}'.format(quote(node.text, safe=''))\n elif n.schema_node.get('type') == 'list':\n if node != self.node or instance:\n values = []\n for key in n.keys:\n values.append(quote(node.find(key).text,\n safe=''))\n ret += '={}'.format(','.join(values))\n return ret\n\n nodes = list(reversed(list(self.node.iterancestors())))[1:] + \\\n [self.node]\n return '/restconf/data' + convert('', nodes)\n\n\nclass RestconfCalculator(BaseCalculator):\n '''RestconfCalculator\n\n A Restconf calculator to do subtraction and addition. A subtraction is to\n compute the delta between two Config instances in a form of Restconf\n Requests. An addition is to apply one Restconf Request to a Config instance\n (TBD).\n\n Attributes\n ----------\n sub : `list`\n A list of Restconf Requests which can achieve a transition from one\n config, i.e., self.etree2, to another config, i.e., self.etree1.\n '''\n\n def __init__(self, device, etree1, etree2, request=None):\n '''\n __init__ instantiates a RestconfCalculator instance.\n '''\n\n self.device = device\n self.etree1 = etree1\n self.etree2 = etree2\n self.request = request\n self.port = '443'\n\n @property\n def ip(self):\n return self.device.connection_info['ip']\n\n @property\n def sub(self):\n deletes, puts, patches = self.node_sub(self.etree1, self.etree2)\n return deletes + puts + patches\n\n def node_sub(self, node_self, node_other):\n '''node_sub\n\n High-level api: Compute the delta of two config nodes. This method is\n recursive. Assume two config nodes are different.\n\n Parameters\n ----------\n\n node_self : `Element`\n A config node in the destination config that is being processed.\n node_self cannot be a leaf node.\n\n node_other : `Element`\n A config node in the source config that is being processed.\n\n Returns\n -------\n\n tuple\n There are three elements in the tuple: a list of Restconf DELETE\n Requests, a list of Restconf PUT Requests, and a list of Restconf\n PATCH Requests.\n '''\n\n deletes = []\n puts = []\n patches = []\n\n # if a leaf-list node, delete the leaf-list totally\n # if a list node, by default delete the list instance\n # if a list node and delete_whole=True, delete the list totally\n def generate_delete(node, instance=True):\n composer = RestconfComposer(self.device, node)\n url = 'https://{}:{}'.format(self.ip, self.port)\n url += composer.get_url(instance=instance)\n deletes.append(requests.Request('DELETE', url, headers=header_json))\n\n # if a leaf-list node, replace the leaf-list totally\n # if a list node, replace the list totally\n def generate_put(node, instance=True):\n composer = RestconfComposer(self.device, node)\n url = 'https://{}:{}'.format(self.ip, self.port)\n url += composer.get_url(instance=instance)\n data_json = composer.get_json(instance=instance)\n puts.append(requests.Request('PUT', url, headers=header_json,\n data=data_json))\n\n # if a leaf-list node, update the leaf-list totally\n # if a list node, by default update the list instance\n # if a list node and update_whole=True, update the list totally\n def generate_patch(node, instance=True):\n composer = RestconfComposer(self.device, node.getparent())\n url = 'https://{}:{}'.format(self.ip, self.port)\n url += composer.get_url(instance=instance)\n data_json = composer.get_json(instance=instance, child_tag=node.tag)\n patches.append(requests.Request('PATCH', url, headers=header_json,\n data=data_json))\n\n # the sequence of list instances under node_self is different from the\n # one under node_other\n def list_seq_is_different(tag):\n s_list = [i for i in node_self.iterchildren(tag=tag)]\n o_list = [i for i in node_other.iterchildren(tag=tag)]\n if [self.device.get_xpath(n) for n in s_list] == \\\n [self.device.get_xpath(n) for n in o_list]:\n return False\n else:\n return True\n\n # all list instances under node_self have peers under node_other, and\n # the sequence of list instances under node_self that have peers under\n # node_other is same as the sequence of list instances under node_other\n def list_seq_is_inclusive(tag):\n s_list = [i for i in node_self.iterchildren(tag=tag)]\n o_list = [i for i in node_other.iterchildren(tag=tag)]\n s_seq = [self.device.get_xpath(n) for n in s_list]\n o_seq = [self.device.get_xpath(n) for n in o_list]\n if set(s_seq) <= set(o_seq) and \\\n [i for i in s_seq if i in o_seq] == o_seq:\n return True\n else:\n return False\n\n in_s_not_in_o, in_o_not_in_s, in_s_and_in_o = \\\n self._group_kids(node_self, node_other)\n for child_s in in_s_not_in_o:\n schema_node = self.device.get_schema_node(child_s)\n if schema_node.get('type') == 'leaf' or \\\n schema_node.get('type') == 'container':\n generate_patch(child_s)\n elif schema_node.get('type') == 'leaf-list' or \\\n schema_node.get('type') == 'list':\n if schema_node.get('ordered-by') == 'user':\n return ([], [generate_put(node_self, instance=True)], [])\n else:\n generate_put(child_s, instance=True)\n for child_o in in_o_not_in_s:\n schema_node = self.device.get_schema_node(child_o)\n if schema_node.get('type') == 'leaf' or \\\n schema_node.get('type') == 'container':\n generate_delete(child_o)\n elif schema_node.get('type') == 'leaf-list' or \\\n schema_node.get('type') == 'list':\n if schema_node.get('ordered-by') == 'user':\n if list_seq_is_inclusive(child_o.tag):\n generate_delete(child_o, instance=True)\n else:\n return ([], [generate_put(node_self, instance=True)],\n [])\n else:\n generate_delete(child_o, instance=True)\n for child_s, child_o in in_s_and_in_o:\n schema_node = self.device.get_schema_node(child_s)\n if schema_node.get('type') == 'leaf':\n if child_s.text != child_o.text:\n generate_patch(child_s)\n elif schema_node.get('type') == 'leaf-list':\n if schema_node.get('ordered-by') == 'user':\n if list_seq_is_different(child_s.tag):\n return ([], [generate_put(node_self, instance=True)],\n [])\n elif schema_node.get('type') == 'container':\n if BaseCalculator(self.device, child_s, child_o).ne:\n x, y, z = self.node_sub(child_s, child_o)\n deletes += x\n puts += y\n patches += z\n elif schema_node.get('type') == 'list':\n if schema_node.get('ordered-by') == 'user':\n if list_seq_is_different(child_s.tag):\n return ([], [generate_put(node_self, instance=True)],\n [])\n else:\n if BaseCalculator(self.device, child_s, child_o).ne:\n x, y, z = self.node_sub(child_s, child_o)\n deletes += x\n puts += y\n patches += z\n else:\n if BaseCalculator(self.device, child_s, child_o).ne:\n x, y, z = self.node_sub(child_s, child_o)\n deletes += x\n puts += y\n patches += z\n return (deletes, puts, patches)\n", "id": "11729833", "language": "Python", "matching_score": 1.3796825408935547, "max_stars_count": 15, "path": "ncdiff/src/yang/ncdiff/restconf.py" }, { "content": "''' \nLISP Genie Ops Object for IOSXE - CLI.\n'''\n\n# Genie\nfrom genie.libs.ops.lisp.lisp import Lisp as SuperLisp\nfrom genie.ops.base import Context\n\n\nclass Lisp(SuperLisp):\n '''Lisp Ops Object'''\n\n def learn(self):\n '''Learn Lisp object'''\n\n ########################################################################\n # info\n ########################################################################\n\n # lisp_router_instances\n # lisp_router_instance_id\n info_src = '[lisp_router_instances][(?P<lisp_router_instance>.*)]'\n info_dest = 'info[lisp_router_instances][(?P<lisp_router_instance>.*)]'\n\n # lisp_router_instance_id\n self.add_leaf(cmd='show lisp all service {service}'.format(service='ipv4'),\n src=info_src+'[lisp_router_instance_id]',\n dest=info_dest+'[lisp_router_instance_id]',\n service='ipv4')\n\n # locator_sets\n # locator_set_name\n # local_interface - N/A\n # ls_interface - N/A\n # interface - N/A\n # interface_type - N/A\n # priority - N/A\n # weight - N/A\n # multicast_priority - N/A\n # multicast_weight -N/A\n\n # lisp_role - N/A\n # lisp_role_type - N/A\n # lisp_role_type -N/A\n\n # lisp_router_id\n # site_id\n # xtr_id\n self.add_leaf(cmd='show lisp all service {service}'.format(service='ipv4'),\n src=info_src+'[lisp_router_id]',\n dest=info_dest+'[lisp_router_id]',\n service='ipv4')\n\n ####################################################################\n # service\n ####################################################################\n\n # Loop over all services\n for service in ['ipv4', 'ipv6', 'ethernet']:\n\n service_src = info_src + '[service][{service}]'.format(service=service)\n service_dest = info_dest + '[service][{service}]'.format(service=service)\n\n # Get all instance_id's in Lisp configuration\n self.add_leaf(cmd='show lisp all service {service} summary'.format(service=service),\n src=service_src+'[virtual_network_ids]',\n dest='info[{service}][instance_ids]'.format(service=service),\n service=service)\n self.make() ; self.make()\n\n # service\n self.add_leaf(cmd='show lisp all service {service}'.format(service=service),\n src=service_src+'[service]',\n dest=service_dest+'[service]',\n service=service)\n\n # lisp_role - N/A\n # lisp_role_type - N/A\n # lisp_role_type -N/A\n\n # virtual_network_ids\n # instance_id\n # lisp_role\n # lisp_role_type\n # lisp_role_type\n self.add_leaf(cmd='show lisp all service {service} summary'.format(service=service),\n src=service_src+'[virtual_network_ids][(?P<vni>.*)][lisp_role]',\n dest=service_dest+'[virtual_network_ids][(?P<vni>.*)][lisp_role]',\n service=service)\n\n # Get instance_ids for this service\n if hasattr(self, 'info') and service in self.info and \\\n 'instance_ids' in self.info[service]:\n \n # Loop over all instance_ids\n for instance_id in sorted(self.info[service]['instance_ids']):\n\n # locator_sets\n # locator_set_name\n # locator_set_name\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service} database'.format(service=service, instance_id=instance_id),\n src=info_src+'[locator_sets][(?P<ls>.*)][locator_set_name]',\n dest=info_dest+'[locator_sets][(?P<ls>.*)][locator_set_name]',\n service=service, instance_id=instance_id)\n\n # ==========\n # map_server\n # ==========\n ms_src = service_src + '[map_server]'\n ms_dest = service_dest + '[map_server]'\n\n # map_server\n # enabled\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service}'.format(instance_id=instance_id, service=service),\n src=ms_src+'[enabled]',\n dest=ms_dest+'[enabled]',\n service=service, instance_id=instance_id)\n\n # map_server\n # sites\n # ms_site_id\n # site_id\n # auth_key - N/A\n # auth_key_value - N/A\n # auth_key_type - N/A\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service} server detail internal'.format(service=service, instance_id=instance_id),\n src=ms_src+'[sites][(?P<site>.*)][site_id]',\n dest=ms_dest+'[sites][(?P<site>.*)][site_id]',\n service=service, instance_id=instance_id)\n\n # =====================\n # map_server\n # virtual_network_ids\n # =====================\n vni_src = ms_src+'[virtual_network_ids][(?P<iid>.*)]'\n vni_dest = ms_dest+'[virtual_network_ids][(?P<iid>.*)]'\n\n # map_server\n # virtual_network_ids\n # instance_id\n # vni\n # extranets\n # ms_extranet\n # extranet\n # provider\n # ms_extranet_provider_eid\n # eid_record\n # bidirectional\n # subscriber\n # ms_extranet_provider_eid\n # eid_record\n # bidirectional\n for key in ['vni', 'extranets']:\n self.add_leaf(cmd='show lisp all extranet {extranet} instance-id {instance_id}'.format(instance_id=instance_id, extranet='ext1'),\n src=vni_src+'[{key}]'.format(key=key),\n dest=vni_dest+'[{key}]'.format(key=key),\n instance_id=instance_id, extranet='ext1')\n\n # map_server\n # virtual_network_ids\n # counters\n # map_registers_in\n # map_notify_records_out\n # proxy_reply_records_out\n # map_requests_forwarded_out\n # map_registers_in_auth_failed\n for key in ['map_register_records_in',\n 'map_notify_records_out',\n 'map_server_proxy_reply_records_out',\n 'map_server_map_requests_forwarded',\n 'map_registers_in_auth_failed',\n ]:\n # reset key names\n if key == 'map_server_proxy_reply_records_out':\n key2 = 'proxy_reply_records_out'\n elif key == 'map_server_map_requests_forwarded':\n key2 = 'map_requests_forwarded_out'\n elif key == 'map_register_records_in':\n key2 = 'map_registers_in'\n else:\n key2 = key\n # add the leaf\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service} statistics'.format(service=service, instance_id=instance_id),\n src=service_src+'[statistics][EID][control][{key}]'.format(key=key),\n dest=ms_dest+'[virtual_network_ids][{iid}][counters][{key2}]'.format(iid=instance_id, key2=key2),\n service=service, instance_id=instance_id)\n\n # =====================\n # map_server\n # virtual_network_ids\n # mappings\n # =====================\n mapping_src = vni_src+'[mappings][(?P<ms_eid>.*)]'\n mapping_dest = vni_dest+'[mappings][(?P<ms_eid>.*)]'\n\n # mappings\n # ms_eid_id\n # eid_id\n # site_id\n # more_specifics_accepted\n # mapping_expiration_timeout - N/A\n for key in ['eid_id', 'site_id', 'more_specifics_accepted']:\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service} server detail internal'.format(service=service, instance_id=instance_id),\n src=mapping_src+'[{key}]'.format(key=key),\n dest=mapping_dest+'[{key}]'.format(key=key),\n service=service, instance_id=instance_id)\n\n # mappings\n # ms_eid_id\n # eid_address\n # address_type\n # virtual_network_id\n # ipv4\n # ipv4\n # ipv4_prefix\n # ipv4_prefix\n # ipv6\n # ipv6\n # ipv6_prefix\n # ipv6_prefix\n for key in ['address_type', 'virtual_network_id', 'ipv4',\n 'ipv4_prefix', 'ipv6', 'ipv6_prefix']:\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service} server detail internal'.format(service=service, instance_id=instance_id),\n src=mapping_src+'[eid_address][{key}]'.format(key=key),\n dest=mapping_dest+'[eid_address][{key}]'.format(key=key),\n service=service, instance_id=instance_id)\n\n # mappings\n # ms_eid_id\n # mapping_records\n # xtr_id\n # site_id\n # time_to_live\n # creation_time\n # authoritative - N/A\n # static - N/A\n for key in ['xtr_id', 'site_id', 'time_to_live', 'creation_time']:\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service} server detail internal'.format(service=service, instance_id=instance_id),\n src=mapping_src+'[mapping_records][(?P<xtr>.*)][{key}]'.format(key=key),\n dest=mapping_dest+'[mapping_records][(?P<xtr>.*)][{key}]'.format(key=key),\n service=service, instance_id=instance_id)\n\n # mappings\n # ms_eid_id\n # mapping_records\n # eid\n # address_type\n # virtual_network_id\n # ipv4\n # ipv4\n # ipv4_prefix\n # ipv4_prefix\n # ipv6\n # ipv6\n # ipv6_prefix\n # ipv6_prefix\n for key in ['address_type', 'virtual_network_id', 'ipv4',\n 'ipv4_prefix', 'ipv6', 'ipv6_prefix']:\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service} server detail internal'.format(service=service, instance_id=instance_id),\n src=mapping_src+'[mapping_records][(?P<xtr>.*)][eid][{key}]'.format(key=key),\n dest=mapping_dest+'[mapping_records][(?P<xtr>.*)][eid][{key}]'.format(key=key),\n service=service, instance_id=instance_id)\n\n # mappings\n # ms_eid_id\n # mapping_records\n # xtr_id\n # negative_mapping - N/A\n # map_reply_action - N/A\n # positive_mapping - N/A\n # rlocs - N/A\n # id - N/A\n # id - N/A\n # locator_address - N/A\n # address_type - N/A\n # virtual_network_id - N/A\n # ipv4 - N/A\n # ipv4_prefix - N/A\n # ipv6 - N/A\n # ipv6_prefix - N/A\n # priority - N/A\n # weight - N/A\n # multicast_priority - N/A\n # multicast_weight - N/A\n\n # map_server\n # mapping_system_type - N/A\n\n # map_server\n # summary\n # number_configured_sites\n # number_registered_sites\n # af_datum\n # address_type\n # address_type\n # number_configured_eids\n # number_registered_eids\n for key in ['number_configured_sites', \n 'number_registered_sites', 'af_datum']:\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service} server summary'.format(service=service, instance_id=instance_id),\n src=service_src+'[instance_id][(?P<iid>.*)][map_server][summary][{key}]'.format(key=key),\n dest=ms_dest+'[summary][{key}]'.format(key=key),\n service=service, instance_id=instance_id)\n\n # map_server\n # counters\n # map_registers_in - N/A\n # map_registers_in_auth_failed - N/A\n # map_notify_records_out - N/A\n # proxy_reply_records_out - N/A\n # map_requests_forwarded_out - N/A\n\n # ============\n # map_resolver\n # ============\n mr_src = service_src + '[map_resolver]'\n mr_dest = service_dest + '[map_resolver]'\n\n # map_resolver\n # enabled\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service}'.format(service=service, instance_id=instance_id),\n src=mr_src+'[enabled]',\n dest=mr_src+'[enabled]',\n service=service, instance_id=instance_id)\n\n # map_resolver\n # mapping_system_type - N/A\n # ms_address - N/A\n\n # ===\n # itr\n # ===\n itr_src = service_src + '[itr]'\n itr_dest = service_dest + '[itr]'\n\n # itr\n # enabled\n # rloc_probing - N/A\n # interval - N/A\n # retries - N/A\n # retries_interval - N/A\n # itr_rlocs - N/A\n # map_resolvers\n # itr_map_resolver\n # map_resolver\n # proxy_itrs\n # proxy_itr\n # proxy_etr_address\n for key in ['enabled', 'map_resolvers', 'proxy_itrs']:\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service}'.format(service=service, instance_id=instance_id),\n src=itr_src+'[{key}]'.format(key=key),\n dest=itr_dest+'[{key}]'.format(key=key),\n service=service, instance_id=instance_id)\n\n # ===========\n # itr\n # map_cache\n # ===========\n map_cache_src = itr_src + '[map_cache][(?P<iid>.*)]' \n map_cache_dest = itr_dest + '[map_cache][(?P<iid>.*)]'\n\n # itr\n # map_cache\n # instance_id\n # vni\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service} map-cache'.format(service=service, instance_id=instance_id),\n src=map_cache_src+'[vni]',\n dest=map_cache_dest+'[vni]',\n service=service, instance_id=instance_id)\n\n # ========================\n # itr\n # map_cache\n # instance_id\n # mappings\n # itr_map_id\n # ========================\n mappings_src = map_cache_src+'[mappings][(?P<map_id>.*)]'\n mappings_dest = map_cache_dest+'[mappings][(?P<map_id>.*)]'\n\n # itr\n # map_cache\n # instance_id\n # mappings\n # itr_map_id\n # id\n # time_to_live\n # creation_time\n # authoritative - N/A\n # static - N/A\n for key in ['id', 'time_to_live', 'creation_time']:\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service} map-cache'.format(service=service, instance_id=instance_id),\n src=mappings_src+'[{key}]'.format(key=key),\n dest=mappings_dest+'[{key}]'.format(key=key),\n service=service, instance_id=instance_id)\n\n # itr\n # map_cache\n # instance_id\n # mappings\n # itr_map_id\n # eid\n # address_type\n # vrf\n # ipv4\n # ipv4\n # ipv4_prefix\n # ipv4_prefix\n # ipv6\n # ipv6\n # ipv6_prefix\n # ipv6_prefix\n for key in ['address_type', 'vrf', 'ipv4', 'ipv4_prefix'\n 'ipv6', 'ipv6_prefix']:\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service} map-cache'.format(service=service, instance_id=instance_id),\n src=mappings_src+'[eid][{key}]'.format(key=key),\n dest=mappings_dest+'[eid][{key}]'.format(key=key),\n service=service, instance_id=instance_id)\n\n # itr\n # map_cache\n # instance_id\n # mappings\n # itr_map_id\n # negative_mapping\n # map_reply_action\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service} map-cache'.format(service=service, instance_id=instance_id),\n src=mappings_src+'[negative_mapping][map_reply_action]',\n dest=mappings_dest+'[negative_mapping][map_reply_action]',\n service=service, instance_id=instance_id)\n\n # itr\n # map_cache\n # instance_id\n # mappings\n # itr_map_id\n # positive_mapping\n # rlocs\n # id\n # id\n # priority\n # weight\n # multicast_priority\n # multicast_weight\n for key in ['id', 'priority', 'weight', 'multicast_priority',\n 'multicast_weight']:\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service} map-cache'.format(service=service, instance_id=instance_id),\n src=mappings_src+'[positive_mapping][rlocs][(?P<id>.*)][{key}]'.format(key=key),\n dest=mappings_dest+'[positive_mapping][rlocs][(?P<id>.*)][{key}]'.format(key=key),\n service=service, instance_id=instance_id)\n\n # itr\n # map_cache\n # instance_id\n # mappings\n # itr_map_id\n # positive_mapping\n # rlocs\n # id\n # locator_address\n # address_type\n # virtual_network_id\n # ipv4\n # ipv4\n # ipv4_prefix\n # ipv4_prefix\n # ipv6\n # ipv6\n # ipv6_prefix\n # ipv6_prefix\n for key in ['address_type', 'virtual_network_id', 'ipv4',\n 'ipv4_prefix', 'ipv6', 'ipv6_prefix']:\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service} map-cache'.format(service=service, instance_id=instance_id),\n src=mappings_src+'[positive_mapping][rlocs][(?P<id>.*)][locator_address][{key}]'.format(key=key),\n dest=mappings_dest+'[positive_mapping][rlocs][(?P<id>.*)][locator_address][{key}]'.format(key=key),\n service=service, instance_id=instance_id)\n\n # ===\n # etr\n # ===\n etr_src = service_src + '[etr]'\n etr_dest = service_dest + '[etr]'\n\n # etr\n # enabled\n # encapsulation\n for key in ['enabled', 'encapsulation']:\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service}'.format(service=service, instance_id=instance_id),\n src=etr_src+'[{key}]'.format(key=key),\n dest=etr_dest+'[{key}]'.format(key=key),\n service=service, instance_id=instance_id)\n\n # etr\n # mapping_servers\n # etr_map_server\n # ms_address\n # auth_key - N/A\n # auth_key_type - N/A\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service}'.format(service=service, instance_id=instance_id),\n src=etr_src+'[mapping_servers][(?P<ms>.*)][ms_address]',\n dest=etr_dest+'[mapping_servers][(?P<ms>.*)][ms_address]',\n service=service, instance_id=instance_id)\n\n # etr\n # local_eids\n # instance_id\n # vni\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service} database'.format(service=service, instance_id=instance_id),\n src=etr_src+'[local_eids][(?P<iid>.*)][vni]',\n dest=etr_dest+'[local_eids][(?P<iid>.*)][vni]',\n service=service, instance_id=instance_id)\n\n # etr\n # local_eids\n # instance_id\n # use_petrs\n # etr_use_ptr\n # use_petr\n # priority - N/A\n # weight - N/A\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service}'.format(service=service, instance_id=instance_id),\n src=etr_src+'[use_petrs]',\n dest=etr_dest+'[local_eids][{iid}][use_petrs]'.format(iid=instance_id),\n service=service, instance_id=instance_id)\n\n # etr\n # local_eids\n # instance_id\n # dynamic_eids\n # etr_dyn_eid_id\n # id\n # rlocs\n # loopback_address\n # priority\n # weight\n # record_ttl - N/A\n # want_map_notify - N/A\n # proxy_reply - N/A\n # registration_interval - N/A\n # eids\n # etr_eid_id\n # id\n # rlocs\n # loopback_address\n # priority\n # weight\n # record_ttl - N/A\n # want_map_notify - N/A\n # proxy_reply - N/A\n # registration_interval - N/A\n for etr_type in ['dynamic_eids', 'eids']:\n for key in ['id', 'rlocs', 'loopback_address', 'priority',\n 'weight']:\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service} database'.format(service=service, instance_id=instance_id),\n src=etr_src+'[local_eids][(?P<iid>.*)][{etr_type}][(?P<dyn_id>.*)][{key}]'.format(etr_type=etr_type, key=key),\n dest=etr_dest+'[local_eids][(?P<iid>.*)][{etr_type}][(?P<dyn_id>.*)][{key}]'.format(etr_type=etr_type, key=key),\n service=service, instance_id=instance_id)\n\n # etr\n # local_eids\n # instance_id\n # dynamic_eids\n # etr_dyn_eid_id\n # eid_address\n # address_type\n # vrf\n # ipv4\n # ipv4\n # ipv4_prefix\n # ipv4_prefix\n # ipv6\n # ipv6\n # ipv6_prefix\n # ipv6_prefix\n # eids\n # etr_dyn_eid_id\n # eid_address\n # address_type\n # vrf\n # ipv4\n # ipv4\n # ipv4_prefix\n # ipv4_prefix\n # ipv6\n # ipv6\n # ipv6_prefix\n # ipv6_prefix\n for etr_type in ['dynamic_eids', 'eids']:\n for key in ['address_type', 'vrf', 'ipv4', 'ipv6',\n 'ipv4_prefix', 'ipv6_prefix']:\n self.add_leaf(cmd='show lisp all instance-id {instance_id} {service} database'.format(service=service, instance_id=instance_id),\n src=etr_src+'[local_eids][(?P<iid>.*)][{etr_type}][(?P<dyn_id>.*)][eid_address][{key}]'.format(etr_type=etr_type, key=key),\n dest=etr_dest+'[local_eids][(?P<iid>.*)][{etr_type}][(?P<dyn_id>.*)][eid_address][{key}]'.format(etr_type=etr_type, key=key),\n service=service, instance_id=instance_id)\n\n\n # Delete instance_ids for this service\n try:\n del self.info[service]\n except:\n pass\n\n ########################################################################\n # Final Structure\n ########################################################################\n\n # Make final Ops structure\n self.make(final_call=True)\n", "id": "6381998", "language": "Python", "matching_score": 5.255950927734375, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/lisp/iosxe/lisp.py" }, { "content": "''' \nLisp Genie Ops Object Outputs for IOSXE.\n'''\n\n\nclass LispOutput(object):\n\n ############################################################################\n # LISP INFO OUTPUTS\n ############################################################################\n\n # --------------------------------------------------------------------------\n # 'show lisp all service <service> summary'\n # --------------------------------------------------------------------------\n\n # 'show lisp all service ipv4 summary'\n ShowLispServiceIpv4Summary = '''\\\n 202-XTR#show lisp all service ipv4 summary \n\n =====================================================\n Output for router lisp 0\n =====================================================\n Router-lisp ID: 0\n Instance count: 2\n Key: DB - Local EID Database entry count (@ - RLOC check pending\n * - RLOC consistency problem),\n DB no route - Local EID DB entries with no matching RIB route,\n Cache - Remote EID mapping cache size, IID - Instance ID,\n Role - Configured Role\n\n Interface DB DB no Cache Incom Cache \n EID VRF name (.IID) size route size plete Idle Role\n red LISP0.101 1 0 2 0.0% 0.0% ITR-ETR\n\n Number of eid-tables: 2\n Total number of database entries: 2 (inactive 0)\n EID-tables with inconsistent locators: 0\n Total number of map-cache entries: 3\n EID-tables with incomplete map-cache entries: 0\n EID-tables pending map-cache update to FIB: 0\n '''\n\n # 'show lisp all service ipv6 summary'\n ShowLispServiceIpv6Summary = '''\\\n 202-XTR#show lisp all service ipv6 summary \n =====================================================\n Output for router lisp 0\n =====================================================\n Router-lisp ID: 0\n Instance count: 2\n Key: DB - Local EID Database entry count (@ - RLOC check pending\n * - RLOC consistency problem),\n DB no route - Local EID DB entries with no matching RIB route,\n Cache - Remote EID mapping cache size, IID - Instance ID,\n Role - Configured Role\n\n Interface DB DB no Cache Incom Cache \n EID VRF name (.IID) size route size plete Idle Role\n red LISP0.101 1 0 2 0.0% 0.0% ITR-ETR\n\n Number of eid-tables: 1\n Total number of database entries: 1 (inactive 0)\n EID-tables with inconsistent locators: 0\n Total number of map-cache entries: 2\n EID-tables with incomplete map-cache entries: 0\n EID-tables pending map-cache update to FIB: 0\n '''\n\n # 'show lisp all service ethernet summary'\n ShowLispServiceEthernetSummary = '''\n 202-XTR#show lisp all service ethernet summary\n =================================================\n Output for router lisp 0\n =================================================\n Router-lisp ID: 0\n Instance count: 69\n Key: DB - Local EID Database entry count (@ - RLOC check pending\n * - RLOC consistency problem),\n DB no route - Local EID DB entries with no matching RIB route,\n Cache - Remote EID mapping cache size, IID - Instance ID,\n Role - Configured Role\n\n Interface DB DB no Cache Incom Cache\n EID VRF name (.IID) size route size plete Idle Role\n LISP0.101 2 0 4 0.0% 100% NONE\n\n Number of eid-tables: 2\n Total number of database entries: 4 (inactive 0)\n Maximum database entries: 5120\n EID-tables with inconsistent locators: 0\n Total number of map-cache entries: 4\n Maximum map-cache entries: 5120\n EID-tables with incomplete map-cache entries: 0\n EID-tables pending map-cache update to FIB: 0\n '''\n\n # --------------------------------------------------------------------------\n # 'show lisp all service <service>'\n # --------------------------------------------------------------------------\n\n # 'show lisp all service ipv4'\n ShowLispServiceIpv4 = '''\\\n 202-XTR#show lisp all service ipv4\n =================================================\n Output for router lisp 0\n =================================================\n Router-lisp ID: 0\n Locator table: default\n Ingress Tunnel Router (ITR): enabled\n Egress Tunnel Router (ETR): enabled\n Proxy-ITR Router (PITR): enabled RLOCs: 10.10.10.10\n Proxy-ETR Router (PETR): disabled\n NAT-traversal Router (NAT-RTR): disabled\n Mobility First-Hop Router: disabled\n Map Server (MS): disabled\n Map Resolver (MR): disabled\n Delegated Database Tree (DDT): disabled\n ITR Map-Resolver(s): 10.64.4.4, 10.166.13.13\n ETR Map-Server(s): 10.64.4.4, 10.166.13.13\n xTR-ID: 0x730E0861-0x12996F6D-0xEFEA2114-0xE1C951F7\n site-ID: unspecified\n ITR local RLOC (last resort): *** NOT FOUND ***\n ITR Solicit Map Request (SMR): accept and process\n Max SMRs per map-cache entry: 8 more specifics\n Multiple SMR suppression time: 20 secs\n ETR accept mapping data: disabled, verify disabled\n ETR map-cache TTL: 1d00h\n Locator Status Algorithms:\n RLOC-probe algorithm: disabled\n RLOC-probe on route change: N/A (periodic probing disabled)\n RLOC-probe on member change: disabled\n LSB reports: process\n IPv4 RLOC minimum mask length: /0\n IPv6 RLOC minimum mask length: /0\n Map-cache: \n Map-cache limit: 1000\n Map-cache activity check period: 60 secs\n Persistent map-cache: disabled\n Database: \n Dynamic database mapping limit: 1000\n '''\n\n # 'show lisp all service ipv6'\n ShowLispServiceIpv6 = '''\\\n 202-XTR#show lisp all service ipv6\n =================================================\n Output for router lisp 0\n =================================================\n Router-lisp ID: 0\n Locator table: default\n Ingress Tunnel Router (ITR): enabled\n Egress Tunnel Router (ETR): enabled\n Proxy-ITR Router (PITR): disabled\n Proxy-ETR Router (PETR): disabled\n NAT-traversal Router (NAT-RTR): disabled\n Mobility First-Hop Router: disabled\n Map Server (MS): disabled\n Map Resolver (MR): disabled\n Delegated Database Tree (DDT): disabled\n ITR Map-Resolver(s): 10.64.4.4, 10.166.13.13\n ETR Map-Server(s): 10.64.4.4, 10.166.13.13\n xTR-ID: 0x730E0861-0x12996F6D-0xEFEA2114-0xE1C951F7\n site-ID: unspecified\n ITR local RLOC (last resort): *** NOT FOUND ***\n ITR Solicit Map Request (SMR): accept and process\n Max SMRs per map-cache entry: 8 more specifics\n Multiple SMR suppression time: 20 secs\n ETR accept mapping data: disabled, verify disabled\n ETR map-cache TTL: 1d00h\n Locator Status Algorithms:\n RLOC-probe algorithm: disabled\n RLOC-probe on route change: N/A (periodic probing disabled)\n RLOC-probe on member change: disabled\n LSB reports: process\n IPv4 RLOC minimum mask length: /0\n IPv6 RLOC minimum mask length: /0\n Map-cache: \n Map-cache limit: 1000\n Map-cache activity check period: 60 secs\n Persistent map-cache: disabled\n Database: \n Dynamic database mapping limit: 1000\n '''\n\n # 'show lisp all service ethernet'\n ShowLispServiceEthernet = '''\\\n OTT-LISP-C3K-3-xTR1#show lisp all service ethernet\n\n =================================================\n Output for router lisp 0\n =================================================\n Router-lisp ID: 0\n Locator table: default\n Ingress Tunnel Router (ITR): enabled\n Egress Tunnel Router (ETR): enabled\n Proxy-ITR Router (PITR): disabled\n Proxy-ETR Router (PETR): disabled\n NAT-traversal Router (NAT-RTR): disabled\n Mobility First-Hop Router: disabled\n Map Server (MS): disabled\n Map Resolver (MR): disabled\n Mr-use-petr: disabled\n Delegated Database Tree (DDT): disabled\n ITR Map-Resolver(s): 10.94.44.44\n 10.84.66.66\n ETR Map-Server(s): 10.94.44.44\n 10.84.66.66\n xTR-ID: 0x730E0861-0x12996F6D-0xEFEA2114-0xE1C951F7\n site-ID: unspecified\n ITR local RLOC (last resort): *** NOT FOUND ***\n ITR Solicit Map Request (SMR): accept and process\n Max SMRs per map-cache entry: 8 more specifics\n Multiple SMR suppression time: 20 secs\n ETR accept mapping data: disabled, verify disabled\n ETR map-cache TTL: 1d00h\n Locator Status Algorithms:\n RLOC-probe algorithm: disabled\n RLOC-probe on route change: N/A (periodic probing disabled)\n RLOC-probe on member change: disabled\n LSB reports: process\n IPv4 RLOC minimum mask length: /0\n IPv6 RLOC minimum mask length: /0\n Map-cache:\n Map-cache limit: 5120\n Map-cache activity check period: 60 secs\n Persistent map-cache: disabled\n Source locator configuration:\n Vlan100: 10.229.11.1 (Loopback0)\n Vlan101: 10.229.11.1 (Loopback0)\n Database:\n Dynamic database mapping limit: 5120\n '''\n\n # --------------------------------------------------------------------------\n # 'show lisp all instance-id <instance_id> <service>'\n # --------------------------------------------------------------------------\n\n # 'show lisp all instance-id 101 service ipv4'\n ShowLispInstance101ServiceIpv4 = '''\n 202-XTR#show lisp all instance-id 101 ipv4\n\n =================================================\n Output for router lisp 0\n =================================================\n Instance ID: 101\n Router-lisp ID: 0\n Locator table: default\n EID table: vrf red\n Ingress Tunnel Router (ITR): enabled\n Egress Tunnel Router (ETR): enabled\n Proxy-ITR Router (PITR): enabled RLOCs: 10.10.10.10\n Proxy-ETR Router (PETR): disabled\n NAT-traversal Router (NAT-RTR): disabled\n Mobility First-Hop Router: disabled\n Map Server (MS): disabled\n Map Resolver (MR): disabled\n Delegated Database Tree (DDT): disabled\n Site Registration Limit: 0\n Map-Request source: derived from EID destination\n ITR Map-Resolver(s): 10.64.4.4, 10.166.13.13\n ETR Map-Server(s): 10.64.4.4 (17:49:58), 10.166.13.13 (00:00:35)\n xTR-ID: 0x730E0861-0x12996F6D-0xEFEA2114-0xE1C951F7\n site-ID: unspecified\n ITR local RLOC (last resort): 10.16.2.2\n ITR use proxy ETR RLOC(s): 10.10.10.10\n ITR Solicit Map Request (SMR): accept and process\n Max SMRs per map-cache entry: 8 more specifics\n Multiple SMR suppression time: 20 secs\n ETR accept mapping data: disabled, verify disabled\n ETR map-cache TTL: 1d00h\n Locator Status Algorithms:\n RLOC-probe algorithm: disabled\n RLOC-probe on route change: N/A (periodic probing disabled)\n RLOC-probe on member change: disabled\n LSB reports: process\n IPv4 RLOC minimum mask length: /0\n IPv6 RLOC minimum mask length: /0\n Map-cache: \n Static mappings configured: 0\n Map-cache size/limit: 2/1000\n Imported route count/limit: 0/1000\n Map-cache activity check period: 60 secs\n Map-cache FIB updates: established\n Persistent map-cache: disabled\n Database: \n Total database mapping size: 1\n static database size/limit: 1/65535\n dynamic database size/limit: 0/65535\n route-import database size/limit: 0/1000\n Inactive (deconfig/away) size: 0\n Encapsulation type: lisp\n '''\n\n # 'show lisp all instance-id 101 service ipv6'\n ShowLispInstance101ServiceIpv6 = '''\\\n 202-XTR#show lisp all instance-id 101 ipv6\n\n =================================================\n Output for router lisp 0\n =================================================\n Instance ID: 101\n Router-lisp ID: 0\n Locator table: default\n EID table: vrf red\n Ingress Tunnel Router (ITR): enabled\n Egress Tunnel Router (ETR): enabled\n Proxy-ITR Router (PITR): disabled\n Proxy-ETR Router (PETR): disabled\n NAT-traversal Router (NAT-RTR): disabled\n Mobility First-Hop Router: disabled\n Map Server (MS): disabled\n Map Resolver (MR): disabled\n Delegated Database Tree (DDT): disabled\n Site Registration Limit: 0\n Map-Request source: derived from EID destination\n ITR Map-Resolver(s): 10.100.5.5, 10.66.12.12\n ETR Map-Server(s): 10.100.5.5 (17:49:58), 10.66.12.12 (00:00:35)\n xTR-ID: 0x730E0861-0x12996F6D-0xEFEA2114-0xE1C951F7\n site-ID: unspecified\n ITR local RLOC (last resort): 10.16.2.2\n ITR use proxy ETR RLOC(s): 10.10.10.10\n ITR Solicit Map Request (SMR): accept and process\n Max SMRs per map-cache entry: 8 more specifics\n Multiple SMR suppression time: 20 secs\n ETR accept mapping data: disabled, verify disabled\n ETR map-cache TTL: 1d00h\n Locator Status Algorithms:\n RLOC-probe algorithm: disabled\n RLOC-probe on route change: N/A (periodic probing disabled)\n RLOC-probe on member change: disabled\n LSB reports: process\n IPv4 RLOC minimum mask length: /0\n IPv6 RLOC minimum mask length: /0\n Map-cache: \n Static mappings configured: 0\n Map-cache size/limit: 2/1000\n Imported route count/limit: 0/1000\n Map-cache activity check period: 60 secs\n Map-cache FIB updates: established\n Persistent map-cache: disabled\n Database: \n Total database mapping size: 1\n static database size/limit: 1/65535\n dynamic database size/limit: 0/65535\n route-import database size/limit: 0/1000\n Inactive (deconfig/away) size: 0\n Encapsulation type: lisp\n '''\n\n # 'show lisp all instance-id 101 service ethernet'\n ShowLispInstance101ServiceEthernet = '''\\\n '''\n\n # --------------------------------------------------------------------------\n # 'show lisp all instance-id <instance_id> <service> server detail internal'\n # --------------------------------------------------------------------------\n\n # 'show lisp all instance-id 101 service ipv4 server detail internal'\n ShowLispInstance101Ipv4ServerDetailInternal = '''\\\n 204-MSMR#show lisp all instance-id 101 ipv4 server detail internal\n =====================================================\n Output for router lisp 0\n =====================================================\n LISP Site Registration Information\n\n Site name: provider\n Allowed configured locators: any\n Allowed EID-prefixes:\n\n Site name: xtr1_1\n Allowed configured locators: any\n Allowed EID-prefixes:\n\n EID-prefix: 192.168.0.0/24 instance-id 101\n First registered: 1w4d\n Last registered: 02:41:22\n Routing table tag: 0\n Origin: Configuration, accepting more specifics\n Merge active: No\n Proxy reply: No\n TTL: 00:00:00\n State: unknown\n Registration errors:\n Authentication failures: 0\n Allowed locators mismatch: 0\n No registrations.\n\n EID-prefix: 192.168.0.1/32 instance-id 101\n First registered: 01:12:41\n Last registered: 01:12:41\n Routing table tag: 0\n Origin: Dynamic, more specific of 192.168.0.0/24\n Merge active: No\n Proxy reply: Yes\n TTL: 1d00h\n State: complete\n Registration errors:\n Authentication failures: 0\n Allowed locators mismatch: 0\n ETR 10.16.2.2, last registered 01:12:41, proxy-reply, map-notify\n TTL 1d00h, no merge, hash-function sha1, nonce 0x70D18EF4-0x3A605D67\n state complete, no security-capability\n xTR-ID 0x21EDD25F-0x7598784C-0x769C8E4E-0xC04926EC\n site-ID unspecified\n sourced by reliable transport\n Locator Local State Pri/Wgt Scope\n 10.16.2.2 yes up 50/50 IPv4 none\n\n Site name: xtr1_2\n Allowed configured locators: any\n Allowed EID-prefixes:\n\n Site name: xtr2\n Allowed configured locators: any\n Allowed EID-prefixes:\n\n EID-prefix: 192.168.9.0/24 instance-id 101\n First registered: 01:55:47\n Last registered: 01:55:47\n Routing table tag: 0\n Origin: Configuration\n Merge active: No\n Proxy reply: Yes\n TTL: 1d00h\n State: complete\n Registration errors:\n Authentication failures: 0\n Allowed locators mismatch: 0\n ETR 10.1.8.8, last registered 01:55:47, proxy-reply, map-notify\n TTL 1d00h, no merge, hash-function sha1, nonce 0xB06AE31D-0x6ADB0BA5\n state complete, no security-capability\n xTR-ID 0x77200484-0xD134DC48-0x0FBAD9DC-0x4A46CA5D\n site-ID unspecified\n sourced by reliable transport\n Locator Local State Pri/Wgt Scope\n 10.1.8.8 yes up 50/50 IPv4 none\n '''\n\n # 'show lisp all instance-id 101 service ipv6 server detail internal'\n ShowLispInstance101Ipv6ServerDetailInternal = '''\\\n '''\n\n # 'show lisp all instance-id 101 service ethernet server detail internal'\n ShowLispInstance101EthernetServerDetailInternal = '''\\\n '''\n\n # --------------------------------------------------------------------------\n # 'show lisp all extranet <extranet> instance-id <instance_id>'\n # --------------------------------------------------------------------------\n\n # 'show lisp all extranet ext1 instance-id 101'\n ShowLispExtranet101 = '''\\\n 204-MSMR#show lisp all extranet ext1 instance-id 101\n Output for router lisp 0\n\n -----------------------------------------------------\n LISP Extranet table\n Home Instance ID: 101\n Total entries: 6\n Provider/Subscriber Inst ID EID prefix\n Provider 103 10.121.88.0/24\n Provider 103 10.220.100.0/24\n Provider 103 192.168.195.0/24\n Subscriber 102 172.16.1.0/24\n Subscriber 101 192.168.0.0/24\n Subscriber 101 192.168.9.0/24\n '''\n\n # --------------------------------------------------------------------------\n # 'show lisp all instance-id <instance_id> <service> statistics'\n # --------------------------------------------------------------------------\n\n # 'show lisp all instance-id 101 ipv4 statistics'\n ShowLispInstance101Ipv4Stats = '''\n 202-XTR#show lisp all instance-id 101 ipv4 statistics \n =====================================================\n Output for router lisp 0\n =====================================================\n LISP EID Statistics for instance ID 101 - last cleared: never\n Control Packets:\n Map-Requests in/out: 0/4\n Encapsulated Map-Requests in/out: 0/3\n RLOC-probe Map-Requests in/out: 0/1\n SMR-based Map-Requests in/out: 0/0\n Map-Requests expired on-queue/no-reply: 0/0\n Map-Resolver Map-Requests forwarded: 0\n Map-Server Map-Requests forwarded: 0\n Map-Reply records in/out: 2/1\n Authoritative records in/out: 1/1\n Non-authoritative records in/out: 1/0\n Negative records in/out: 0/0\n RLOC-probe records in/out: 1/1\n Map-Server Proxy-Reply records out: 0\n WLC Map-Subscribe records in/out: 0/1\n Map-Subscribe failures in/out: 0/0\n WLC Map-Unsubscribe records in/out: 0/0\n Map-Unsubscribe failures in/out: 0/0\n Map-Register records in/out: 0/2857\n Map-Server AF disabled: 0\n Authentication failures: 0\n WLC Map-Register records in/out: 0/0\n WLC AP Map-Register in/out: 0/0\n WLC Client Map-Register in/out: 0/0\n WLC Map-Register failures in/out: 0/0\n Map-Notify records in/out: 4/0\n Authentication failures: 0\n WLC Map-Notify records in/out: 0/0\n WLC AP Map-Notify in/out: 0/0\n WLC Client Map-Notify in/out: 0/0\n WLC Map-Notify failures in/out: 0/0\n Dropped control packets in input queue: 0\n Deferred packet transmission: 0/0\n DDT referral deferred/dropped: 0/0\n DDT request deferred/dropped: 0/0\n Map-Reply deferred/dropped: 0/0\n MR negative Map-Reply deferred/dropped: 0/0\n MR Map-Request fwd deferred/dropped: 0/0\n MS Map-Request fwd deferred/dropped: 0/0\n MS proxy Map-Reply deferred/dropped: 0/0\n xTR mcast Map-Notify deferred/dropped: 0/0\n MS Info-Reply deferred/dropped: 0/0\n RTR Map-Register fwd deferred/dropped: 0/0\n RTR Map-Notify fwd deferred/dropped: 0/0\n ETR Info-Request deferred/dropped: 0/0\n Errors:\n Map-Request invalid source rloc drops: 0\n Map-Register invalid source rloc drops: 0\n DDT ITR Map-Requests dropped: 0 (nonce-collision: 0, bad-xTR-nonce: 0)\n Cache Related:\n Cache entries created/deleted: 3/1\n NSF CEF replay entry count 0\n Number of EID-prefixes in map-cache: 2\n Number of negative entries in map-cache: 1\n Total number of RLOCs in map-cache: 1\n Average RLOCs per EID-prefix: 1\n Forwarding:\n Number of data signals processed: 1 (+ dropped 0)\n Number of reachability reports: 0 (+ dropped 0)\n ITR Map-Resolvers:\n Map-Resolver LastReply Metric ReqsSent Positive Negative No-Reply\n 10.64.4.4 03:13:58 4 1 1 0 0\n 10.166.13.13 03:13:58 26 2 0 0 1\n LISP RLOC Statistics - last cleared: never\n Control Packets:\n RTR Map-Requests forwarded: 0\n RTR Map-Notifies forwarded: 0\n DDT-Map-Requests in/out: 0/0\n DDT-Map-Referrals in/out: 0/0\n Errors:\n Map-Request format errors: 0\n Map-Reply format errors: 0\n Map-Referral format errors: 0\n Mapping record TTL alerts: 0\n DDT Requests failed: 0\n LISP Miscellaneous Statistics - last cleared: never\n Errors:\n Invalid IP version drops: 0\n Invalid IP header drops: 0\n Invalid IP proto field drops: 0\n Invalid packet size dropss: 0\n Invalid LISP control port drops: 0\n Invalid LISP checksum drops: 0\n Unsupported LISP packet type drops: 0\n Unknown packet drops: 0\n '''\n\n # 'show lisp all instance-id 101 ipv6 statistics'\n ShowLispInstance101Ipv6Stats = '''\n 202-XTR#show lisp all instance-id 101 ipv6 statistics \n =====================================================\n Output for router lisp 0\n =====================================================\n LISP EID Statistics for instance ID 101 - last cleared: never\n Control Packets:\n Map-Requests in/out: 0/6\n Encapsulated Map-Requests in/out: 0/5\n RLOC-probe Map-Requests in/out: 0/1\n SMR-based Map-Requests in/out: 0/0\n Map-Requests expired on-queue/no-reply 0/1\n Map-Resolver Map-Requests forwarded: 0\n Map-Server Map-Requests forwarded: 0\n Map-Reply records in/out: 2/1\n Authoritative records in/out: 1/1\n Non-authoritative records in/out: 1/0\n Negative records in/out: 0/0\n RLOC-probe records in/out: 1/1\n Map-Server Proxy-Reply records out: 0\n WLC Map-Subscribe records in/out: 0/2\n Map-Subscribe failures in/out: 0/0\n WLC Map-Unsubscribe records in/out: 0/0\n Map-Unsubscribe failures in/out: 0/0\n Map-Register records in/out: 0/52\n Map-Server AF disabled: 0\n Authentication failures: 0\n WLC Map-Register records in/out: 0/0\n WLC AP Map-Register in/out: 0/0\n WLC Client Map-Register in/out: 0/0\n WLC Map-Register failures in/out: 0/0\n Map-Notify records in/out: 2/0\n Authentication failures: 0\n WLC Map-Notify records in/out: 0/0\n WLC AP Map-Notify in/out: 0/0\n WLC Client Map-Notify in/out: 0/0\n WLC Map-Notify failures in/out: 0/0\n Dropped control packets in input queue: 0\n Deferred packet transmission: 0/0\n DDT referral deferred/dropped: 0/0\n DDT request deferred/dropped: 0/0\n Map-Reply deferred/dropped: 0/0\n MR negative Map-Reply deferred/dropped: 0/0\n MR Map-Request fwd deferred/dropped: 0/0\n MS Map-Request fwd deferred/dropped: 0/0\n MS proxy Map-Reply deferred/dropped: 0/0\n xTR mcast Map-Notify deferred/dropped: 0/0\n MS Info-Reply deferred/dropped: 0/0\n RTR Map-Register fwd deferred/dropped: 0/0\n RTR Map-Notify fwd deferred/dropped: 0/0\n ETR Info-Request deferred/dropped: 0/0\n Errors:\n Map-Request invalid source rloc drops: 0\n Map-Register invalid source rloc drops: 0\n DDT ITR Map-Requests dropped: 0 (nonce-collision: 0, bad-xTR-nonce: 0)\n Cache Related:\n Cache entries created/deleted: 4/2\n NSF CEF replay entry count 0\n Number of EID-prefixes in map-cache: 2\n Number of negative entries in map-cache: 1\n Total number of RLOCs in map-cache: 1\n Average RLOCs per EID-prefix: 1\n Forwarding:\n Number of data signals processed: 2 (+ dropped 0)\n Number of reachability reports: 0 (+ dropped 0)\n ITR Map-Resolvers:\n Map-Resolver LastReply Metric ReqsSent Positive Negative No-Reply\n 10.64.4.4 00:15:36 19 2 1 0 1\n 10.166.13.13 00:17:11 31 3 0 0 2\n LISP RLOC Statistics - last cleared: never\n Control Packets:\n RTR Map-Requests forwarded: 0\n RTR Map-Notifies forwarded: 0\n DDT-Map-Requests in/out: 0/0\n DDT-Map-Referrals in/out: 0/0\n Errors:\n Map-Request format errors: 0\n Map-Reply format errors: 0\n Map-Referral format errors: 0\n Mapping record TTL alerts: 0\n DDT Requests failed: 0\n LISP Miscellaneous Statistics - last cleared: never\n Errors:\n Invalid IP version drops: 0\n Invalid IP header drops: 0\n Invalid IP proto field drops: 0\n Invalid packet size dropss: 0\n Invalid LISP control port drops: 0\n Invalid LISP checksum drops: 0\n Unsupported LISP packet type drops: 0\n Unknown packet drops: 0\n '''\n\n # 'show lisp all instance-id 101 ethernet statistics'\n ShowLispInstance101EthernetStats = '''\\\n '''\n\n # --------------------------------------------------------------------------\n # 'show lisp all instance-id <instance_id> <service> server summary'\n # --------------------------------------------------------------------------\n\n ShowLispInstance101Ipv4ServerSummary = '''\n 204-MSMR#show lisp all instance-id 101 ipv4 server summary \n =====================================================\n Output for router lisp 0\n =====================================================\n ----------- IPv4 ----------- \n Site name Configured Registered Incons\n xtr1_1 1 1 0\n xtr2 1 1 0\n\n Number of configured sites: 2\n Number of registered sites: 2\n Sites with inconsistent registrations: 0\n IPv4\n Number of configured EID prefixes: 2\n Number of registered EID prefixes: 2\n '''\n\n ShowLispInstance101Ipv6ServerSummary = '''\\\n 204-MSMR#show lisp all instance-id 101 ipv6 server summary \n =====================================================\n Output for router lisp 0\n =====================================================\n ----------- IPv6 ----------- \n Site name Configured Registered Incons\n xtr1_1 1 1 0\n xtr2 1 1 0\n\n Number of configured sites: 2\n Number of registered sites: 2\n Sites with inconsistent registrations: 0\n IPv6\n Number of configured EID prefixes: 2\n Number of registered EID prefixes: 2\n '''\n\n ShowLispInstance101EthernetServerSummary = '''\\\n '''\n\n # --------------------------------------------------------------------------\n # 'show lisp all instance-id <instance-d> <service> map-cache'\n # --------------------------------------------------------------------------\n\n # 'show lisp all instance-id 101 ipv4 map-cache'\n ShowLispInstance101Ipv4MapCache= '''\\\n 202-XTR#show lisp all instance-id 101 ipv4 map-cache \n =====================================================\n Output for router lisp 0\n =====================================================\n LISP IPv4 Mapping Cache for EID-table vrf red (IID 101), 2 entries\n\n 0.0.0.0/0, uptime: 15:23:50, expires: never, via static-send-map-request\n Negative cache entry, action: send-map-request\n 192.168.9.0/24, uptime: 00:04:02, expires: 23:55:57, via map-reply, complete\n Locator Uptime State Pri/Wgt Encap-IID\n 10.1.8.8 00:04:02 up 50/50 -\n '''\n\n # 'show lisp all instance-id 101 ipv6 map-cache'\n ShowLispInstance101Ipv6MapCache = '''\\\n 202-XTR#show lisp all instance-id 101 ipv6 map-cache \n\n =====================================================\n Output for router lisp 0\n =====================================================\n LISP IPv6 Mapping Cache for EID-table vrf red (IID 101), 2 entries\n\n ::/0, uptime: 00:11:28, expires: never, via static-send-map-request\n Negative cache entry, action: send-map-request\n 2001:192:168:9::/64, uptime: 00:06:51, expires: 23:53:08, via map-reply, complete\n Locator Uptime State Pri/Wgt Encap-IID\n 10.1.8.8 00:06:51 up 50/50 -\n 172.16.10.0/24, uptime: 00:00:00, expires: 23:59:59, via map-reply, complete\n Locator Uptime State Pri/Wgt\n 172.16.156.134 00:00:00 up 1/50\n 192.168.65.94 00:00:00 up 1/50\n 2001:DB8:BBED:2829::80DF:9C86 00:00:00 up 2/100\n '''\n\n # 'show lisp all instance-id 101 ethernet map-cache'\n ShowLispInstance101EthernetMapCache = '''\\\n '''\n\n # --------------------------------------------------------------------------\n # 'show lisp all instance-id <instance_id> <service> dabatase'\n # --------------------------------------------------------------------------\n\n # 'show lisp all instance-id 101 ipv4 database'\n ShowLispInstance101Ipv4Database = '''\\\n 202-XTR#show lisp all instance-id 101 ipv4 database \n =====================================================\n Output for router lisp 0\n =====================================================\n LISP ETR IPv4 Mapping Database for EID-table vrf red (IID 101), LSBs: 0x1\n Entries total 1, no-route 0, inactive 0\n\n 192.168.0.0/24, locator-set RLOC\n Locator Pri/Wgt Source State\n 10.16.2.2 50/50 cfg-intf site-self, reachable\n '''\n\n # 'show lisp all instance-id 101 ipv6 database'\n ShowLispInstance101Ipv6Database = '''\\\n 202-XTR#show lisp all instance-id 101 ipv6 database \n =====================================================\n Output for router lisp 0\n =====================================================\n LISP ETR IPv6 Mapping Database for EID-table vrf red (IID 101), LSBs: 0x1\n Entries total 1, no-route 0, inactive 0\n\n 2001:192:168::/64, locator-set RLOC\n Locator Pri/Wgt Source State\n 10.16.2.2 50/50 cfg-intf site-self, reachable\n '''\n\n # 'show lisp all instance-id 101 ethernet database'\n ShowLispInstance101EthernetDatabase = '''\n 202-XTR#show lisp all instance-id 101 ethernet database\n\n =================================================\n Output for router lisp 0\n =================================================\n LISP ETR MAC Mapping Database for EID-table Vlan 101 (IID 101), LSBs: 0x1\n Entries total 2, no-route 0, inactive 0\n\n 0050.56b0.6a0e/48, dynamic-eid Auto-L2-group-1, inherited from default locator-set RLOC\n Locator Pri/Wgt Source State\n 10.229.11.1 1/100 cfg-intf site-self, reachable\n cafe.cafe.cafe/48, dynamic-eid Auto-L2-group-1, inherited from default locator-set RLOC\n Locator Pri/Wgt Source State\n 10.229.11.1 1/100 cfg-intf site-self, reachable\n '''\n\n ############################################################################\n # LISP INFO STRUCTURE\n ############################################################################\n\n LispInfo = {\n 'lisp_router_instances': \n {0: \n {'lisp_router_id': \n {'site_id': 'unspecified',\n 'xtr_id': '0x730E0861-0x12996F6D-0xEFEA2114-0xE1C951F7'},\n 'lisp_router_instance_id': 0,\n 'locator_sets': \n {'RLOC': \n {'locator_set_name': 'RLOC'}},\n 'service': \n {'ethernet':\n {'etr':\n {'local_eids':\n {'101':\n {'dynamic_eids':\n {'0050.56b0.6a0e/48':\n {'eid_address':\n {'address_type': 'ethernet',\n 'vrf': '101'},\n 'id': '0050.56b0.6a0e/48',\n 'loopback_address': '10.229.11.1',\n 'priority': 1,\n 'rlocs': 'RLOC',\n 'weight': 100},\n 'cafe.cafe.cafe/48':\n {'eid_address':\n {'address_type': 'ethernet',\n 'vrf': '101'},\n 'id': 'cafe.cafe.cafe/48',\n 'loopback_address': '10.229.11.1',\n 'priority': 1,\n 'rlocs': 'RLOC',\n 'weight': 100}},\n 'vni': '101'}}},\n 'service': 'ethernet',\n 'virtual_network_ids':\n {'101':\n {'lisp_role':\n {'none':\n {'lisp_role_type': 'none'}}}}},\n 'ipv4':\n {'etr': \n {'enabled': True,\n 'encapsulation': 'lisp',\n 'local_eids': \n {'101': \n {'eids': \n {'192.168.0.0/24': \n {'eid_address': \n {'address_type': 'ipv4',\n 'vrf': 'red'},\n 'id': '192.168.0.0/24',\n 'loopback_address': '10.16.2.2',\n 'priority': 50,\n 'rlocs': 'RLOC',\n 'weight': 50}},\n 'use_petrs': \n {'10.10.10.10': \n {'use_petr': '10.10.10.10',\n },\n },\n 'vni': '101'}},\n 'mapping_servers': \n {'10.166.13.13': \n {'ms_address': '10.166.13.13'},\n '10.64.4.4': \n {'ms_address': '10.64.4.4'}}},\n 'itr': \n {'enabled': True,\n 'map_cache': \n {'101': \n {'mappings': \n {'0.0.0.0/0': \n {'creation_time': '15:23:50',\n 'eid': \n {'address_type': 'ipv4-afi',\n 'ipv4': \n {'ipv4': '0.0.0.0/0'},\n 'vrf': 'red'},\n 'id': '0.0.0.0/0',\n 'negative_mapping': \n {'map_reply_action': 'send-map-request'},\n 'time_to_live': 'never'},\n '192.168.9.0/24': \n {'creation_time': '00:04:02',\n 'eid': \n {'address_type': 'ipv4-afi',\n 'ipv4': \n {'ipv4': '192.168.9.0/24'},\n 'vrf': 'red'},\n 'id': '192.168.9.0/24',\n 'positive_mapping': \n {'rlocs': \n {1: \n {'id': '1',\n 'locator_address': \n {'address_type': 'ipv4-afi',\n 'ipv4': \n {'ipv4': '10.1.8.8'},\n 'virtual_network_id': '101'},\n 'priority': 50,\n 'weight': 50}}},\n 'time_to_live': '23:55:57'}},\n 'vni': '101'}},\n 'map_resolvers': \n {'10.166.13.13': \n {'map_resolver': '10.166.13.13'},\n '10.64.4.4': \n {'map_resolver': '10.64.4.4'}},\n 'proxy_itrs': \n {'10.10.10.10': \n {'proxy_etr_address': '10.10.10.10'}}},\n 'map_server': \n {'enabled': False,\n 'sites': \n {'provider': \n {'site_id': 'provider'},\n 'xtr1_1': \n {'site_id': 'xtr1_1'},\n 'xtr1_2': \n {'site_id': 'xtr1_2'},\n 'xtr2': \n {'site_id': 'xtr2'}},\n 'summary': \n {'af_datum': \n {'ipv4-afi': \n {'address_type': 'ipv4-afi',\n 'number_configured_eids': 2,\n 'number_registered_eids': 2}},\n 'number_configured_sites': 2,\n 'number_registered_sites': 2},\n 'virtual_network_ids': \n {'101': \n {'counters': \n {'map_notify_records_out': '0',\n 'map_registers_in': '0',\n 'map_registers_in_auth_failed': '0',\n 'map_requests_forwarded_out': '0',\n 'proxy_reply_records_out': '0'},\n 'extranets': \n {'ext1': \n {'extranet': 'ext1',\n 'home_instance_id': 101,\n 'subscriber': \n {'192.168.0.0/24': \n {'bidirectional': True,\n 'eid_record': '192.168.0.0/24'},\n '192.168.9.0/24': \n {'bidirectional': True,\n 'eid_record': '192.168.9.0/24'}}}},\n 'mappings': \n {'192.168.0.0/24': \n {'eid_address': \n {'address_type': 'ipv4-afi',\n 'ipv4': \n {'ipv4': '192.168.0.0/24'},\n 'virtual_network_id': '101'},\n 'eid_id': '192.168.0.0/24',\n 'more_specifics_accepted': True,\n 'site_id': 'xtr1_1'},\n '192.168.0.1/32': \n {'eid_address': \n {'address_type': 'ipv4-afi',\n 'ipv4': \n {'ipv4': '192.168.0.1/32'},\n 'virtual_network_id': '101'},\n 'eid_id': '192.168.0.1/32',\n 'mapping_records': \n {'0x21EDD25F-0x7598784C-0x769C8E4E-0xC04926EC': \n {'creation_time': '01:12:41',\n 'eid': \n {'address_type': 'ipv4-afi',\n 'ipv4': \n {'ipv4': '192.168.0.1/32'},\n 'virtual_network_id': '101'},\n 'site_id': 'unspecified',\n 'time_to_live': 86400,\n 'xtr_id': '0x21EDD25F-0x7598784C-0x769C8E4E-0xC04926EC'}},\n 'site_id': 'xtr1_1'},\n '192.168.9.0/24': \n {'eid_address': \n {'address_type': 'ipv4-afi',\n 'ipv4': \n {'ipv4': '192.168.9.0/24'},\n 'virtual_network_id': '101'},\n 'eid_id': '192.168.9.0/24',\n 'mapping_records': \n {'0x77200484-0xD134DC48-0x0FBAD9DC-0x4A46CA5D': \n {'creation_time': '01:55:47',\n 'eid': \n {'address_type': 'ipv4-afi',\n 'ipv4': \n {'ipv4': '192.168.9.0/24'},\n 'virtual_network_id': '101'},\n 'site_id': 'unspecified',\n 'time_to_live': 86400,\n 'xtr_id': '0x77200484-0xD134DC48-0x0FBAD9DC-0x4A46CA5D'}},\n 'site_id': 'xtr2'}},\n 'vni': '101'},\n '102': \n {'extranets': \n {'ext1': \n {'extranet': 'ext1',\n 'home_instance_id': 101,\n 'subscriber': \n {'172.16.1.0/24': \n {'bidirectional': True,\n 'eid_record': '172.16.1.0/24'}}}},\n 'vni': '102'},\n '103': \n {'extranets': \n {'ext1': \n {'extranet': 'ext1',\n 'home_instance_id': 101,\n 'provider': \n {'10.220.100.0/24': \n {'bidirectional': True,\n 'eid_record': '10.220.100.0/24'},\n '192.168.195.0/24': \n {'bidirectional': True,\n 'eid_record': '192.168.195.0/24'},\n '10.121.88.0/24': \n {'bidirectional': True,\n 'eid_record': '10.121.88.0/24'}}}},\n 'vni': '103'}}},\n 'service': 'ipv4',\n 'virtual_network_ids': \n {'101': \n {'lisp_role': \n {'itr-etr': \n {'lisp_role_type': 'itr-etr'}}}}},\n 'ipv6': \n {'etr': \n {'enabled': True,\n 'encapsulation': 'lisp',\n 'local_eids': \n {'101': \n {'eids': \n {'2001:192:168::/64': \n {'eid_address': \n {'address_type': 'ipv6',\n 'vrf': 'red'},\n 'id': '2001:192:168::/64',\n 'loopback_address': '10.16.2.2',\n 'priority': 50,\n 'rlocs': 'RLOC',\n 'weight': 50}},\n 'use_petrs': \n {'10.10.10.10': \n {'use_petr': '10.10.10.10',\n },\n },\n 'vni': '101'}},\n 'mapping_servers': \n {'10.66.12.12': \n {'ms_address': '10.66.12.12'},\n '10.100.5.5': \n {'ms_address': '10.100.5.5'}}},\n 'itr': \n {'enabled': True,\n 'map_cache': \n {'101': \n {'mappings': \n {'172.16.10.0/24': \n {'creation_time': '00:00:00',\n 'eid': \n {'address_type': 'ipv4-afi',\n 'ipv4': \n {'ipv4': '172.16.10.0/24'},\n 'vrf': 'red'},\n 'id': '172.16.10.0/24',\n 'positive_mapping': \n {'rlocs': \n {1: \n {'id': '1',\n 'locator_address': \n {'address_type': 'ipv4-afi',\n 'ipv4': \n {'ipv4': '172.16.156.134'},\n 'virtual_network_id': '101'},\n 'priority': 1,\n 'weight': 50},\n 2: \n {'id': '2',\n 'locator_address': \n {'address_type': 'ipv4-afi',\n 'ipv4': \n {'ipv4': '192.168.65.94'},\n 'virtual_network_id': '101'},\n 'priority': 1,\n 'weight': 50},\n 3: \n {'id': '3',\n 'locator_address': \n {'address_type': 'ipv6-afi',\n 'ipv6': \n {'ipv6': '2001:DB8:BBED:2829::80DF:9C86'},\n 'virtual_network_id': '101'},\n 'priority': 2,\n 'weight': 100}}},\n 'time_to_live': '23:59:59'},\n '2001:192:168:9::/64': \n {'creation_time': '00:06:51',\n 'eid': \n {'address_type': 'ipv6-afi',\n 'vrf': 'red'},\n 'id': '2001:192:168:9::/64',\n 'positive_mapping': \n {'rlocs': \n {1: \n {'id': '1',\n 'locator_address': \n {'address_type': 'ipv4-afi',\n 'ipv4': \n {'ipv4': '10.1.8.8'},\n 'virtual_network_id': '101'},\n 'priority': 50,\n 'weight': 50}}},\n 'time_to_live': '23:53:08'},\n '::/0': \n {'creation_time': '00:11:28',\n 'eid': \n {'address_type': 'ipv6-afi',\n 'vrf': 'red'},\n 'id': '::/0',\n 'negative_mapping': \n {'map_reply_action': 'send-map-request'},\n 'time_to_live': 'never'}},\n 'vni': '101'}},\n 'map_resolvers': \n {'10.66.12.12': \n {'map_resolver': '10.66.12.12'},\n '10.100.5.5': \n {'map_resolver': '10.100.5.5'}}},\n 'map_server': \n {'enabled': False,\n 'summary': \n {'af_datum': \n {'ipv6-afi': \n {'address_type': 'ipv6-afi',\n 'number_configured_eids': 2,\n 'number_registered_eids': 2}},\n 'number_configured_sites': 2,\n 'number_registered_sites': 2},\n 'virtual_network_ids': \n {'101': \n {'counters': \n {'map_notify_records_out': '0',\n 'map_registers_in': '0',\n 'map_registers_in_auth_failed': '0',\n 'map_requests_forwarded_out': '0',\n 'proxy_reply_records_out': '0',\n },\n },\n },\n },\n 'service': 'ipv6',\n 'virtual_network_ids': \n {'101': \n {'lisp_role': \n {'itr-etr': \n {'lisp_role_type': 'itr-etr'}}}}}}}}}\n\n", "id": "1374937", "language": "Python", "matching_score": 3.8441805839538574, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/lisp/iosxe/tests/lisp_output.py" }, { "content": "\n# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# ATS\nfrom ats.topology import Device\n\n# Genie\nfrom genie.libs.ops.lisp.ios.lisp import Lisp\nfrom genie.libs.ops.lisp.ios.tests.lisp_output import LispOutput\n\n# iosxe show_lisp\nfrom genie.libs.parser.ios.show_lisp import ShowLispSession,\\\n ShowLispPlatform,\\\n ShowLispExtranet,\\\n ShowLispDynamicEidDetail,\\\n ShowLispService,\\\n ShowLispServiceMapCache,\\\n ShowLispServiceRlocMembers,\\\n ShowLispServiceSmr,\\\n ShowLispServiceSummary,\\\n ShowLispServiceDatabase,\\\n ShowLispServiceServerSummary,\\\n ShowLispServiceServerDetailInternal,\\\n ShowLispServiceStatistics\n\n\n# Set values\noutput = {}\n\n# 'show lisp all service <service>'\noutput['show lisp all service ipv4'] = LispOutput.ShowLispServiceIpv4\noutput['show lisp all service ipv6'] = LispOutput.ShowLispServiceIpv6\noutput['show lisp all service ethernet'] = LispOutput.ShowLispServiceEthernet\n\n# 'show lisp all service <service> summary'\noutput['show lisp all service ipv4 summary'] = LispOutput.ShowLispServiceIpv4Summary\noutput['show lisp all service ipv6 summary'] = LispOutput.ShowLispServiceIpv6Summary\noutput['show lisp all service ethernet summary'] = LispOutput.ShowLispServiceEthernetSummary\n\n# 'show lisp all instance-id <instance_id> <service>'\noutput['show lisp all instance-id 101 ipv4'] = LispOutput.ShowLispInstance101ServiceIpv4\noutput['show lisp all instance-id 101 ipv6'] = LispOutput.ShowLispInstance101ServiceIpv6\noutput['show lisp all instance-id 101 ethernet'] = LispOutput.ShowLispInstance101ServiceEthernet\n\n# 'show lisp all instance-id <instance_id> <service> server detail internal'\noutput['show lisp all instance-id 101 ipv4 server detail internal'] = LispOutput.ShowLispInstance101Ipv4ServerDetailInternal\noutput['show lisp all instance-id 101 ipv6 server detail internal'] = LispOutput.ShowLispInstance101Ipv6ServerDetailInternal\noutput['show lisp all instance-id 101 ethernet server detail internal'] = LispOutput.ShowLispInstance101EthernetServerDetailInternal\n\n# 'show lisp all extranet <extranet> instance-id <instance_id>'\noutput['show lisp all extranet ext1 instance-id 101'] = LispOutput.ShowLispExtranet101\n\n# 'show lisp all instance-id <instance_id> <service> statistics'\noutput['show lisp all instance-id 101 ipv4 statistics'] = LispOutput.ShowLispInstance101Ipv4Stats\noutput['show lisp all instance-id 101 ipv6 statistics'] = LispOutput.ShowLispInstance101Ipv6Stats\noutput['show lisp all instance-id 101 ethernet statistics'] = LispOutput.ShowLispInstance101EthernetStats\n\n# 'show lisp all instance-id <instance_id> <service> server summary'\noutput['show lisp all instance-id 101 ipv4 server summary'] = LispOutput.ShowLispInstance101Ipv4ServerSummary\noutput['show lisp all instance-id 101 ipv6 server summary'] = LispOutput.ShowLispInstance101Ipv6ServerSummary\noutput['show lisp all instance-id 101 ethernet server summary'] = LispOutput.ShowLispInstance101EthernetServerSummary\n\n# 'show lisp all instance-id <instance-d> <service> map-cache'\noutput['show lisp all instance-id 101 ipv4 map-cache'] = LispOutput.ShowLispInstance101Ipv4MapCache\noutput['show lisp all instance-id 101 ipv6 map-cache'] = LispOutput.ShowLispInstance101Ipv6MapCache\noutput['show lisp all instance-id 101 ethernet map-cache'] = LispOutput.ShowLispInstance101EthernetMapCache\n\n# 'show lisp all instance-id <instance_id> <service> dabatase'\noutput['show lisp all instance-id 101 ipv4 database'] = LispOutput.ShowLispInstance101Ipv4Database\noutput['show lisp all instance-id 101 ipv6 database'] = LispOutput.ShowLispInstance101Ipv6Database\noutput['show lisp all instance-id 101 ethernet database'] = LispOutput.ShowLispInstance101EthernetDatabase\n\n\n\ndef mapper(key):\n return output[key]\n\n\ndef empty_mapper(key):\n return ''\n\n\ndef incomplete_mapper(key):\n if key == 'show lisp all instance-id 101 ipv4':\n return ''\n else:\n return output[key]\n\n\nclass test_lisp(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'ios'\n self.device.custom['abstraction'] = {'order':['os']}\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n self.device.connectionmgr.connections['cli'] = self.device\n\n\n def test_complete_output(self):\n self.maxDiff = None\n lisp = Lisp(device=self.device)\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Learn the feature\n lisp.learn()\n\n # Verify Ops was created successfully\n self.assertEqual(lisp.info, LispOutput.LispInfo)\n\n\n def test_selective_attribute(self):\n self.maxDiff = None\n lisp = Lisp(device=self.device)\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Learn the feature\n lisp.learn()\n\n # Check selective attribute\n self.assertEqual('10.166.13.13', lisp.info['lisp_router_instances'][0]\\\n ['service']['ipv4']['etr']\\\n ['mapping_servers']['10.166.13.13']\\\n ['ms_address'])\n\n\n def test_empty_output(self):\n self.maxDiff = None\n lisp = Lisp(device=self.device)\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = empty_mapper\n\n # Learn the feature\n lisp.learn()\n\n # Verify attribute is missing\n with self.assertRaises(AttributeError):\n lisp.info['lisp_router_instances']\n\n\n def test_missing_attributes(self):\n self.maxDiff = None\n lisp = Lisp(device=self.device)\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = incomplete_mapper\n\n # Learn the feature\n lisp.learn()\n\n # Verify key not created due to ouput missing\n with self.assertRaises(KeyError):\n ms_address = lisp.info['lisp_router_instances'][0]\\\n ['service']['ipv4']['etr']\\\n ['mapping_servers']['10.166.13.13']\\\n ['ms_address']\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "131575", "language": "Python", "matching_score": 1.3171952962875366, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/lisp/ios/tests/test_lisp.py" }, { "content": "'''\nLisp Genie Ops Object for IOS - CLI\n'''\nfrom ..iosxe.lisp import Lisp as LispXE\n\nclass Lisp(LispXE):\n pass", "id": "6814646", "language": "Python", "matching_score": 1, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/lisp/ios/lisp.py" }, { "content": "from .lisp import *", "id": "10751101", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/lisp/__init__.py" }, { "content": "from .nd import *\n", "id": "9188061", "language": "Python", "matching_score": 0, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/nd/__init__.py" }, { "content": "from .ldp import *\n", "id": "11986158", "language": "Python", "matching_score": 0, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/ldp/__init__.py" }, { "content": "\n__all__ = (\n 'G8032Ring',\n )\n\nfrom genie.conf.base import DeviceFeature\nimport genie.conf.base.attributes\nfrom genie.conf.base.attributes import SubAttributes, SubAttributesDict, AttributesInheriter\n\n\nclass G8032Ring(DeviceFeature):\n\n # iosxr: l2vpn / ethernet ring g8032 someword (config-l2vpn)\n # iosxr: l2vpn / ethernet ring g8032 someword / exclusion-list vlan-ids 100-200,300\n # iosxr: l2vpn / ethernet ring g8032 someword / instance 1 (config-l2vpn)\n # iosxr: l2vpn / ethernet ring g8032 someword / instance 1 / aps-channel (config-l2vpn)\n # iosxr: l2vpn / ethernet ring g8032 someword / instance 1 / aps-channel / level <0-7>\n # iosxr: l2vpn / ethernet ring g8032 someword / instance 1 / aps-channel / port0 interface Bundle-Ether1\n # iosxr: l2vpn / ethernet ring g8032 someword / instance 1 / aps-channel / port1 bridge-domain someword2\n # iosxr: l2vpn / ethernet ring g8032 someword / instance 1 / aps-channel / port1 interface Bundle-Ether1\n # iosxr: l2vpn / ethernet ring g8032 someword / instance 1 / aps-channel / port1 none\n # iosxr: l2vpn / ethernet ring g8032 someword / instance 1 / aps-channel / port1 xconnect someword2\n # iosxr: l2vpn / ethernet ring g8032 someword / instance 1 / description someword2\n # iosxr: l2vpn / ethernet ring g8032 someword / instance 1 / inclusion-list vlan-ids someword2\n # iosxr: l2vpn / ethernet ring g8032 someword / instance 1 / profile someword2\n # iosxr: l2vpn / ethernet ring g8032 someword / instance 1 / rpl port0 neighbor\n # iosxr: l2vpn / ethernet ring g8032 someword / instance 1 / rpl port0 next-neighbor\n # iosxr: l2vpn / ethernet ring g8032 someword / instance 1 / rpl port0 owner\n # iosxr: l2vpn / ethernet ring g8032 someword / instance 1 / rpl port1 neighbor\n # iosxr: l2vpn / ethernet ring g8032 someword / instance 1 / rpl port1 next-neighbor\n # iosxr: l2vpn / ethernet ring g8032 someword / instance 1 / rpl port1 owner\n # iosxr: l2vpn / ethernet ring g8032 someword / open-ring\n # iosxr: l2vpn / ethernet ring g8032 someword / port0 interface Bundle-Ether1 (config-l2vpn)\n # iosxr: l2vpn / ethernet ring g8032 someword / port0 interface Bundle-Ether1 / monitor interface Bundle-Ether1\n # iosxr: l2vpn / ethernet ring g8032 someword / port1 interface Bundle-Ether1 (config-l2vpn)\n # iosxr: l2vpn / ethernet ring g8032 someword / port1 interface Bundle-Ether1 / monitor interface Bundle-Ether1\n # iosxr: l2vpn / ethernet ring g8032 someword / port1 none\n # iosxr: l2vpn / ethernet ring g8032 someword / port1 virtual\n\n def __init__(self):\n raise NotImplementedError\n\n", "id": "3169185", "language": "Python", "matching_score": 1.5513031482696533, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/g8032.py" }, { "content": "from .l2vpn import *\nfrom .bridge_domain import *\nfrom .xconnect import *\nfrom .pseudowire import *\nfrom .vfi import *\n#from .g8032 import *\nfrom .iccp_group import *\n\n", "id": "1850555", "language": "Python", "matching_score": 1.3336458206176758, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/__init__.py" }, { "content": "\"\"\"Common configure functions for mac\"\"\"\n\n# Python\nimport logging\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef config_mac_aging_time(device, bridge_domain, aging_time):\n \"\"\" Config mac-aging time under bridge domain\n\n Args:\n device (`obj`): device object\n bridge_domain (`int`): bridge domain id\n aging_time (`int`): mac aging-time\n Return:\n None\n Raises:\n SubCommandFailure: Failed configuring device\n \"\"\"\n log.info(\n \"Configuring mac aging-time to {} seconds under \"\n \"bridge domain {}\".format(aging_time, bridge_domain)\n )\n try:\n device.configure(\n [\n \"bridge-domain {}\".format(bridge_domain),\n \"mac aging-time {}\".format(aging_time),\n ]\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure aging time under bridge domain {bridge_domain}\".format(\n bridge_domain=bridge_domain\n )\n )\n\n\ndef unconfig_mac_aging_time(device, bridge_domain):\n \"\"\" Unconfig mac-aging time under bridge domain\n\n Args:\n device (`obj`): device object\n bridge_domain (`int`): bridge domain id\n Return:\n None\n Raises:\n SubCommandFailure: Failed configuring device\n \"\"\"\n log.info(\n \"Removing mac aging-time configuration under \"\n \"bridge domain {}\".format(bridge_domain)\n )\n try:\n device.configure(\n [\"bridge-domain {}\".format(bridge_domain), \"no mac aging-time\"]\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not unconfigure aging time under bridge domain {bridge_domain}\".format(\n bridge_domain=bridge_domain\n )\n )\n\n\ndef config_mac_learning(device, bridge_domain):\n \"\"\" Config mac learning under bridge domain\n\n Args:\n device (`obj`): device object\n bridge_domain (`int`): bridge domain id\n Return:\n None\n Raises:\n SubCommandFailure: Failed configuring device\n \"\"\"\n log.info(\n \"Configuring mac learning under bridge domain {}\".format(bridge_domain)\n )\n try:\n device.configure(\n [\"bridge-domain {}\".format(bridge_domain), \"mac learning\"]\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure mac learning under bridge domain {bridge_domain}\".format(\n bridge_domain=bridge_domain\n )\n )\n\n\ndef unconfig_mac_learning(device, bridge_domain):\n \"\"\" Unconfig mac learning under bridge domain\n\n Args:\n device (`obj`): device object\n bridge_domain (`int`): bridge domain id\n Return:\n None\n Raises:\n SubCommandFailure: Failed configuring device\n \"\"\"\n log.info(\n \"Removing mac learning under bridge domain {}\".format(bridge_domain)\n )\n try:\n device.configure(\n [\"bridge-domain {}\".format(bridge_domain), \"no mac learning\"]\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not unconfigure mac learning under bridge domain {bridge_domain}\".format(\n bridge_domain=bridge_domain\n )\n )\n", "id": "4020034", "language": "Python", "matching_score": 2.2236177921295166, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/mac/configure.py" }, { "content": "\"\"\"Common get info functions for mac\"\"\"\n\n# Python\nimport logging\n\n# Genie\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\nlog = logging.getLogger(__name__)\n\n\ndef get_mac_aging_timer(device, bridge_domain):\n \"\"\" Get Aging-Timer from bridge domain\n\n Args:\n device (`obj`): device object\n bridge_domain (`int`): bridge domain id\n Return:\n aging_time (`int`): aging-time in second\n None \n Raises:\n None\n \"\"\"\n try:\n out = device.parse(\"show bridge-domain {}\".format(bridge_domain))\n except SchemaEmptyParserError as e:\n return None\n\n aging_time = out[\"bridge_domain\"][bridge_domain][\"aging_timer\"]\n\n return aging_time\n\n\ndef get_mac_table(device, bridge_domain):\n \"\"\" Get mac table from bridge domain\n\n Args:\n device (`obj`): device object\n bridge_domain (`int`): bridge domain id\n Return:\n mac_table (`dict`): mac table dict\n None: When nothing has been found\n Raises:\n None\n \"\"\"\n try:\n out = device.parse(\"show bridge-domain {}\".format(bridge_domain))\n except SchemaEmptyParserError as e:\n return None\n\n mac_table = out[\"bridge_domain\"][bridge_domain].get(\"mac_table\")\n\n return mac_table\n", "id": "8284160", "language": "Python", "matching_score": 2.2212581634521484, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/mac/get.py" }, { "content": "\"\"\"Common get info functions for routing\"\"\"\n\n# Python\nimport os\nimport logging\nimport re\n\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\nlog = logging.getLogger(__name__)\n\n\ndef get_bridge_domain_bridge_domain_mac_count(device, timeout_parse=500):\n \"\"\" Get bridge domain mac count\n\n Args:\n device(`obj`): Device object \n timeout_parse ('int'): Timeout in seconds for command device.parse('show bridge-domain')\n Returns:\n integer: mac count\n Raises:\n None\n \"\"\"\n mac_count = 0\n\n log.info(\"Getting MAC count\")\n device.execute.timeout = timeout_parse\n try:\n output_bd = device.parse(\"show bridge-domain\")\n except SchemaEmptyParserError as e:\n return mac_count\n\n device.execute.timeout = 60\n\n for bd in output_bd.get(\"bridge_domain\", []):\n for intf in (\n output_bd[\"bridge_domain\"].get(bd, {}).get(\"mac_table\", [])\n ):\n mac_count += len(\n output_bd[\"bridge_domain\"][bd]\n .get(\"mac_table\", {})\n .get(intf, {})\n .get(\"mac_address\", {})\n .keys()\n )\n\n log.info(\"MAC count is {}\".format(mac_count))\n\n return mac_count\n\n\ndef get_bridge_domain_bridge_domain_interfaces(device, bridge_domain_id):\n \"\"\" Get list of interfaces using bridge-domain id\n\n Args:\n bridge_domain_id('int'): bridge-domain id to get interfaces\n device ('obj'): Device object\n\n Returns:\n list of interfaces\n Raises:\n None\n \"\"\"\n out = {}\n interfaces = []\n\n try:\n out = device.parse(\n \"show bridge-domain {bridge_domain_id}\".format(\n bridge_domain_id=bridge_domain_id\n )\n )\n\n except SchemaEmptyParserError as e:\n return interfaces\n\n try:\n interfaces = out[\"bridge_domain\"][bridge_domain_id][\n \"split-horizon_group\"\n ][\"0\"][\"interfaces\"]\n\n except KeyError as e:\n pass\n\n return interfaces\n", "id": "8562474", "language": "Python", "matching_score": 2.4333722591400146, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/bridge_domain/get.py" }, { "content": "\"\"\"Common verification functions for bgp\"\"\"\n\n# Python\nimport logging\n\n# Genie\nfrom genie.utils.timeout import Timeout\n\n# Pyats\nfrom genie.metaparser.util.exceptions import SchemaEmptyParserError\n\n# VRF\nfrom genie.libs.sdk.apis.iosxe.vrf.get import get_vrf_interface\n\n# BRIDGE-DOMAIN\nfrom genie.libs.sdk.apis.iosxe.bridge_domain.get import (\n get_bridge_domain_bridge_domain_interfaces,\n)\n\nlog = logging.getLogger(__name__)\n\n\ndef verify_vpls_same_interface_for_l2vpn_and_l3vpn(\n device, vrf, bridge_domain_id\n):\n \"\"\" Verify that subinterface for L3VPN vrf and service instance for L2VPN bridge-domain belong to same interface\n\n Args:\n device('obj'): device object \n vrf ('str'): vrf name\n bridge_domain_id ('int'): bridge-domain id\n Returns:\n True\n False\n Raises:\n None\n \"\"\"\n\n # Get Interfaces using L2VPN vrf name on device uut\n l2vpn_interfaces = []\n\n l2vpn_interfaces = get_vrf_interface(device=device, vrf=vrf)\n\n if not l2vpn_interfaces:\n return False\n\n # Get Interfaces using bridge-domain id on device uut\n l3vpn_interfaces = []\n try:\n l3vpn_interfaces = get_bridge_domain_bridge_domain_interfaces(\n device=device, bridge_domain_id=bridge_domain_id\n )\n\n except SchemaEmptyParserError as e:\n return False\n\n if l2vpn_interfaces and l3vpn_interfaces:\n\n for intf_1 in l2vpn_interfaces:\n # Get interface name by spliting the subinterface name by '.'\n intf_1 = intf_1.split(\".\")[0]\n\n for intf_2 in l3vpn_interfaces:\n # Get interface name by spliting the string using space\n intf_2 = intf_2.split(\" \")[0]\n\n if intf_1 == intf_2:\n\n log.info(\n \"Subinterface for L3VPN vrf {}\"\n \" and service instance for L2VPN bridge-domain {}\"\n \" belong to same interface {} for the device {}\".format(\n vrf, bridge_domain_id, intf_1, device.name\n )\n )\n\n return True\n\n return False\n", "id": "9136718", "language": "Python", "matching_score": 1.865397334098816, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/l2vpn/vpls/verify.py" }, { "content": "\"\"\"Common configure functions for subscriber\"\"\"\n# Python\nimport os\nimport logging\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\nlog = logging.getLogger(__name__)\n\n\ndef remove_subscriber(\n device, bgp_as, bridge_id, bridge_interface, vpn_id, vpn_interface\n):\n \"\"\" Remove subscriber\n\n Args:\n device ('obj'): Device object\n bgp_as ('str'): BGP AS\n bridge_id ('str'): Bridge Id\n bridge_interface ('str'): Bridge interface\n vpn_id ('str'): VPN id\n vpn_interface ('str'): VPN interface\n Returns:\n None\n Raise:\n SubCommandFailure\n \"\"\"\n\n config_cmd = (\n \"interface {bridge_interface}\\n\"\n \"no service instance {bridge_id} ethernet\\n\"\n \"bridge-domain {bridge_id}\\n\"\n \"no member vfi VPLS-{bridge_id}\\n\"\n \"no bridge-domain {bridge_id}\\n\"\n \"no l2vpn vfi context VPLS-{bridge_id}\\n\"\n \"no vrf definition L3VPN-{vpn_id}\\n\"\n \"no interface {vpn_interface}.{vpn_id}\\n\"\n \"router bgp {as_n}\\n\"\n \"no address-family ipv4 vrf L3VPN-{vpn_id}\".format(\n bridge_interface=bridge_interface,\n bridge_id=bridge_id,\n vpn_id=vpn_id,\n vpn_interface=vpn_interface,\n as_n=bgp_as,\n )\n )\n\n try:\n out = device.configure(config_cmd)\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Failed in unconfiguring subscriber \"\n \"on interface {bridge_interface} \"\n \"with bridge id {bridge_id} \"\n \"on device {device}, \"\n \"Error: {e}\".format(\n bridge_interface=bridge_interface,\n bridge_id=bridge_id,\n device=device.name,\n e=str(e),\n )\n ) from e\n", "id": "5497099", "language": "Python", "matching_score": 3.1581170558929443, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/subscriber/configure.py" }, { "content": "\"\"\"Common configure functions for vrf\"\"\"\n\n# Python\nimport logging\n\n# Unicon\nfrom unicon.core.errors import SubCommandFailure\n\n\nlog = logging.getLogger(__name__)\n\n\ndef configure_vrf_description(device, vrf, description):\n \"\"\"Configure vrf description\n\n Args:\n device (`obj`): Device object\n vrf (`str`): VRF name\n description(`str`): Description\n\n Returns:\n None\n Raises:\n SubCommandFailure\n \"\"\"\n try:\n device.configure(\n [\n \"vrf definition {vrf}\".format(vrf=vrf),\n \"description {description}\".format(description=description),\n ]\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not configure description '{desc}' on \"\n \"vrf {vrf}\".format(desc=description, vrf=vrf)\n )\n\n\ndef unconfigure_vrf_description(device, vrf):\n \"\"\"Unconfigure vrf description\n\n Args:\n device (`obj`): Device object\n vrf (`str`): VRF name\n\n Returns:\n None\n\n Raises:\n SubCommandFailure \n \"\"\"\n try:\n device.configure(\n [\"vrf definition {vrf}\".format(vrf=vrf), \"no description\"]\n )\n except SubCommandFailure:\n raise SubCommandFailure(\n \"Could not remove description on \" \"vrf {vrf}\".format(vrf=vrf)\n )\n", "id": "10647689", "language": "Python", "matching_score": 0.4968910813331604, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/vrf/configure.py" }, { "content": "import re\r\n\r\nfrom unicon.core.errors import SubCommandFailure\r\n\r\n\r\ndef get_issu_space_info_on_disk(device, disk, output=\"\"):\r\n \"\"\" Get free and total space on disk\r\n Args:\r\n device ('obj'): Device object\r\n disk ('str'): Disk name\r\n output ('str'): Output from command 'dir {disk}'\r\n Return: \r\n list: \r\n bytes_total ('int'): Total space on disk in bytes\r\n bytes_free ('int'): Free space on disk in bytes\r\n Raise:\r\n SubCommandFailure: Failed executing dir command\r\n \"\"\"\r\n\r\n if not output:\r\n try:\r\n output = device.execute(\"dir {disk}:\".format(disk=disk))\r\n except SubCommandFailure as e:\r\n raise SubCommandFailure(\r\n \"Unable to execute 'dir {disk}'\".format(disk)\r\n )\r\n\r\n m = re.search(\r\n \"(?P<total>(\\d+)) +bytes +total +\\((?P<free>(\\d+)) \" \"+bytes +free\\)\",\r\n output,\r\n )\r\n\r\n bytes_total = int(m.groupdict()[\"total\"])\r\n bytes_free = int(m.groupdict()[\"free\"])\r\n\r\n return bytes_total, bytes_free\r\n\r\n\r\ndef get_issu_free_space_on_disk(device, disk, output=\"\"):\r\n \"\"\" Get free space information on disk\r\n Args:\r\n device ('obj'): Device object\r\n disk ('str'): Disk name\r\n output ('str'): Output from command 'dir {disk}'\r\n Return: \r\n Integer: Free space on disk in bytes\r\n Raises: \r\n SubCommandFailure: Failed getting free space info on device\r\n \"\"\"\r\n try:\r\n return get_issu_space_info_on_disk(\r\n device=device, disk=disk, output=output\r\n )[1]\r\n except SubCommandFailure as e:\r\n raise SubCommandFailure(str(e))\r\n", "id": "5570519", "language": "Python", "matching_score": 1.0177109241485596, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/issu/get.py" }, { "content": "\n__all__ = (\n 'ESI',\n)\n\nimport functools\nfrom copy import copy\nimport collections.abc\n\nfrom genie.libs.conf.base import MAC, IPv4Address\n\n\n@functools.total_ordering\nclass ESI(object):\n '''Representation of a EVPN Ethernet Segment Identifier (ESI).\n '''\n\n __slots__ = {'_type', '_bytes'}\n\n @classmethod\n def create_type_0(cls, value):\n return cls(value, type=0)\n\n @classmethod\n def create_type_1(cls, system_mac, port_key):\n return cls(\n (1,)\n + tuple(MAC(system_mac).packed)\n + tuple(int(port_key).to_bytes(2, 'big'))\n + (0,)\n )\n\n @classmethod\n def create_type_2(cls, root_bridge_mac, root_bridge_priority):\n return cls(\n (2,)\n + tuple(MAC(root_bridge_mac).packed)\n + tuple(int(root_bridge_priority).to_bytes(2, 'big'))\n + (0,)\n )\n\n @classmethod\n def create_type_3(cls, system_mac, local_discriminator):\n return cls(\n (3,)\n + tuple(MAC(system_mac).packed)\n + tuple(int(local_discriminator).to_bytes(3, 'big'))\n )\n\n @classmethod\n def create_type_4(cls, router_id, local_discriminator):\n return cls(\n (4,)\n + tuple(IPv4Address(router_id).packed)\n + tuple(int(local_discriminator).to_bytes(4, 'big'))\n + (0,)\n )\n\n @classmethod\n def create_type_5(cls, asn, local_discriminator):\n return cls(\n (5,)\n + tuple(int(asn).to_bytes(4, 'big'))\n + tuple(int(local_discriminator).to_bytes(4, 'big'))\n + (0,)\n )\n\n def __init__(self, value, type=None):\n '''\n Args:\n value (:obj:`ESI` to copy, :obj:`str`, sequence of :obj:`int` bytes, or :obj:`int`): The ESI value.\n type (int): The ESI type (Default 0).\n '''\n _type = type\n del type\n super().__init__()\n\n if isinstance(value, ESI):\n # Copy constructor\n if _type is not None and _type != value.type:\n raise ValueError('cannot switch ESI type '\n 'using copy constructor!')\n self.bytes = copy(value.bytes)\n self.type = value.type\n return\n\n if isinstance(value, str):\n if value == '::':\n self.bytes = [0] * 9\n else:\n if '::' in value:\n try:\n prefix, suffix = value.split('::')\n except:\n raise ValueError('too many :: in ESI: %r' % (value,))\n prefix = prefix.split(':') if prefix else []\n suffix = suffix.split(':') if suffix else []\n if len(prefix) + len(suffix) >= 9:\n raise ValueError('compressed ESI not composed of less \\\n than 9 bytes: %r' % (value,))\n bytes = (\n [int(v, 16) for v in prefix]\n + [0] * (9 - len(prefix) - len(suffix))\n + [int(v, 16) for v in suffix])\n else:\n if '.' in value:\n bytes = [int(v, 16) for v in value.split('.')]\n if len(bytes) == 5:\n # dotted_hex5words_with_type\n words = bytes\n bytes = []\n for word in words:\n bytes += [word >> 8, word & 0xFF]\n else:\n bytes = [int(v, 16) for v in value.split(':')]\n if len(bytes) == 10:\n if _type is not None and _type != int(bytes[0]):\n raise ValueError('type argument does not match value')\n _type, *self.bytes = bytes\n else:\n self.bytes = bytes\n\n elif isinstance(value, collections.abc.Sequence) and len(value) == 10:\n if _type is not None and _type != int(value[0]):\n raise ValueError('type argument does not match value')\n _type, *self.bytes = value\n\n elif isinstance(value, collections.abc.Sequence) and len(value) == 9:\n self.bytes = value\n\n elif isinstance(value, int):\n self.value = value\n\n else:\n raise TypeError('bad ESI format: %r' % (value,))\n\n if _type is None:\n _type = 0\n self.type = _type\n\n @property\n def bytes(self):\n '''`tuple` of `int`: ESI value as a sequence of bytes (w/o type)'''\n return self._bytes\n\n @bytes.setter\n def bytes(self, value):\n if isinstance(value, str) \\\n or not isinstance(value, collections.abc.Sequence):\n raise ValueError('ESI bytes sequence expected: %r' % (value,))\n value = tuple(\n (int(byte, 16) if isinstance(byte, str) else int(byte))\n for byte in value)\n if len(value) != 9:\n raise ValueError('ESI not composed of 9 bytes: %r' % (value,))\n for byte in value:\n if not (0 <= byte <= 0xff):\n raise ValueError('ESI bytes out of range: %r' % (value,))\n self._bytes = value\n\n @property\n def bytes_with_type(self):\n return (self.type,) + self.bytes\n\n @property\n def words_with_type(self):\n bytes = self.bytes_with_type\n words = []\n while bytes:\n b1, b2, *bytes = bytes\n words.append(b1 << 8 | b2)\n return tuple(words)\n\n @property\n def type(self):\n '''`int`: Type byte of the ESI'''\n return self._type\n\n @type.setter\n def type(self, value):\n if not isinstance(value, int) or not (0 <= int(value) <= 5):\n raise ValueError('invalid ESI type: %r' % (value,))\n self._type = int(value)\n\n @property\n def value(self):\n '''`int`: ESI value as an integer (w/o type)'''\n return sum([\n v << (8 * shift)\n for shift, v in enumerate(reversed(self.bytes))])\n\n @value.setter\n def value(self, value):\n if not isinstance(value, int):\n raise ValueError('ESI integer value expected: %r' % (value,))\n if not (0 <= value <= 0xffffffffffffffffff):\n raise ValueError('ESI integer value out of range: %r' % (value,))\n self.bytes = [\n value >> (8 * shift) & 0xff\n for shift in reversed(range(9))]\n\n @property\n def system_mac(self):\n if self.type == 1:\n return MAC(':'.join(str(octet) for octet in self.bytes[0:6]))\n if self.type == 3:\n return MAC(':'.join(str(octet) for octet in self.bytes[0:6]))\n raise AttributeError\n\n @property\n def root_bridge_mac(self):\n if self.type == 2:\n return MAC(':'.join(str(octet) for octet in self.bytes[0:6]))\n raise AttributeError\n\n @property\n def router_id(self):\n if self.type == 4:\n return IPv4Address('.'.join(str(octet) for octet in self.bytes[0:4]))\n raise AttributeError\n\n @property\n def asn(self):\n if self.type == 5:\n return int.from_bytes(self.bytes[0:4], 'big')\n raise AttributeError\n\n @property\n def port_key(self):\n if self.type == 1:\n return int.from_bytes(self.bytes[6:8], 'big')\n raise AttributeError\n\n @property\n def root_bridge_priority(self):\n if self.type == 2:\n return int.from_bytes(self.bytes[6:8], 'big')\n raise AttributeError\n\n @property\n def local_discriminator(self):\n if self.type == 3:\n return int.from_bytes(self.bytes[6:9], 'big')\n if self.type == 4:\n return int.from_bytes(self.bytes[4:8], 'big')\n if self.type == 5:\n return int.from_bytes(self.bytes[4:8], 'big')\n raise AttributeError\n\n def __eq__(self, other):\n if not isinstance(other, ESI):\n try:\n other = self.__class__(other)\n except Exception:\n return NotImplemented\n return (self.type, self.bytes) == (other.type, other.bytes)\n\n def __lt__(self, other):\n if not isinstance(other, ESI):\n try:\n other = self.__class__(other)\n except Exception:\n return NotImplemented\n return (self.type, self.bytes) < (other.type, other.bytes)\n\n def __hash__(self):\n # TODO mutable! return hash((self.type, self.bytes))\n return 0\n\n def __repr__(self):\n return 'ESI(%r, type=%r)' % (str(self), self.type)\n\n def __str__(self):\n '''`str`: String representation of the ESI bytes (xx:xx:...:xx).'''\n return ':'.join('%02x' % (byte,) for byte in self.bytes)\n\n def __copy__(self):\n return self.__class__(self)\n\n @property\n def dotted(self):\n '''`str`: String representation of the ESI bytes (xx.xx.....xx).'''\n return '.'.join('%02x' % (byte,) for byte in self.bytes)\n\n @property\n def dotted_with_type(self):\n '''`str`: String representation of the ESI bytes (tt.xx.xx.....xx).'''\n return '.'.join('%02x' % (byte,) for byte in self.bytes_with_type)\n\n @property\n def dotted_hex5words_with_type(self):\n '''`str`: String representation of the ESI bytes (ttxx.....xxxx).'''\n return '.'.join('%04x' % (word,) for word in self.words_with_type)\n\n def __format__(self, format_spec):\n if len(format_spec) == 0 or format_spec == 'x:x:x:x:x:x:x:x:x':\n return str(self)\n if format_spec == 'x.x.x.x.x.x.x.x.x':\n return self.dotted\n if format_spec == 't.x.x.x.x.x.x.x.x.x':\n return self.dotted_with_type\n if format_spec == 'tx.xx.xx.xx.xx':\n return self.dotted_hex5words_with_type\n raise ValueError('Invalid format specifier: ' + format_spec)\n\n", "id": "4769254", "language": "Python", "matching_score": 2.0464589595794678, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/evpn/esi.py" }, { "content": "\"\"\"OSI/ISIS Network Entity Title (NET) implementation\n\"\"\"\n\n__all__ = (\n 'IsisAreaAddress',\n 'IsisSystemID',\n 'IsisNET',\n)\n\nfrom copy import copy\nimport re\nimport functools\n\nfrom genie.decorator import managedattribute\n\nre_area_address = r'(?:[A-Za-z0-9]{2}(?:\\.[A-Za-z0-9]{4}){0,6})'\nre_system_id = r'(?:[A-Za-z0-9]{4}\\.[A-Za-z0-9]{4}\\.[A-Za-z0-9]{4})'\nre_nsel = r'(?:[A-Za-z0-9]{2})'\n\n\n@functools.total_ordering\nclass IsisAreaAddress(object):\n\n value = managedattribute(\n name='value',\n fdel=None)\n\n @value.setter\n def value(self, value):\n m = re.match(r'^' + re_area_address + '$', value)\n if not m:\n raise ValueError(value)\n self._value = value.upper()\n\n def __init__(self, value=None):\n\n if isinstance(value, IsisAreaAddress):\n # Copy constructor\n self.value = value.value\n return\n\n if isinstance(value, str):\n self.value = value\n return\n\n raise TypeError(value)\n\n def __eq__(self, other):\n if not isinstance(other, IsisAreaAddress):\n try:\n other = self.__class__(other)\n except Exception:\n return NotImplemented\n return self.value == other.value\n\n def __lt__(self, other):\n if not isinstance(other, IsisAreaAddress):\n try:\n other = self.__class__(other)\n except Exception:\n return NotImplemented\n return self.value < other.value\n\n def __hash__(self):\n # TODO mutable!\n return 0\n\n def __repr__(self):\n return '%s(%r)' % (self.__class__.__name__, str(self))\n\n def __str__(self):\n return self.value\n\n def __copy__(self):\n return self.__class__(self)\n\n\n@functools.total_ordering\nclass IsisSystemID(object):\n\n value = managedattribute(\n name='value',\n fdel=None)\n\n @value.setter\n def value(self, value):\n m = re.match(r'^' + re_system_id + '$', value)\n if not m:\n raise ValueError(value)\n self._value = value.upper()\n\n def __init__(self, value=None):\n\n if isinstance(value, IsisSystemID):\n # Copy constructor\n self.value = value.value\n return\n\n if isinstance(value, str):\n self.value = value\n return\n\n raise TypeError(value)\n\n def __eq__(self, other):\n if not isinstance(other, IsisSystemID):\n try:\n other = self.__class__(other)\n except Exception:\n return NotImplemented\n return self.value == other.value\n\n def __lt__(self, other):\n if not isinstance(other, IsisSystemID):\n try:\n other = self.__class__(other)\n except Exception:\n return NotImplemented\n return self.value < other.value\n\n def __hash__(self):\n # TODO mutable!\n return 0\n\n def __repr__(self):\n return '%s(%r)' % (self.__class__.__name__, str(self))\n\n def __str__(self):\n return self.value\n\n def __copy__(self):\n return self.__class__(self)\n\n\n@functools.total_ordering\nclass IsisNET(object):\n\n area_address = managedattribute(\n name='area_address',\n fdel=None,\n type=IsisAreaAddress)\n\n system_id = managedattribute(\n name='system_id',\n fdel=None,\n type=IsisSystemID)\n\n nsel = managedattribute(\n name='nsel',\n fdel=None)\n\n @nsel.setter\n def nsel(self, value):\n m = re.match(r'^' + re_nsel + '$', value)\n if not m:\n raise ValueError(value)\n self._nsel = value.upper()\n\n def __init__(self, value=None, **kwargs):\n if value is None:\n if 'area_address' not in kwargs or 'system_id' not in kwargs:\n raise TypeError('area_address and system_id arguments mandatory.')\n kwargs.setdefault('nsel', '00')\n for attr in (\n 'area_address',\n 'system_id',\n 'nsel',\n ):\n v = kwargs.pop(attr)\n setattr(self, attr, v)\n if kwargs:\n raise TypeError('Unexpected keyword arguments: {}'\\\n .format(', '.join(kwargs.keys())))\n return\n\n if kwargs:\n raise TypeError('Provide either value or kwargs, not both.')\n\n if isinstance(value, IsisNET):\n # Copy constructor\n for attr in (\n 'area_address',\n 'system_id',\n 'nsel',\n ):\n v = getattr(value, attr)\n setattr(self, attr, v)\n return\n\n if isinstance(value, str):\n m = re.match(r'^'\n r'(?P<area_address>' + re_area_address + r')'\n r'\\.(?P<system_id>' + re_system_id + r')'\n r'\\.(?P<nsel>' + re_nsel + r')'\n r'$', value)\n if not m:\n raise ValueError(value)\n for k, v in m.groupdict().items():\n setattr(self, k, v)\n return\n\n raise TypeError(value)\n\n def __eq__(self, other):\n if not isinstance(other, IsisNET):\n try:\n other = self.__class__(other)\n except Exception:\n return NotImplemented\n return \\\n (self.area_address, self.system_id, self.nsel) \\\n == \\\n (other.area_address, other.system_id, other.nsel)\n\n def __lt__(self, other):\n if not isinstance(other, IsisNET):\n try:\n other = self.__class__(other)\n except Exception:\n return NotImplemented\n return \\\n (self.area_address, self.system_id, self.nsel) \\\n < \\\n (other.area_address, other.system_id, other.nsel)\n\n def __hash__(self):\n # TODO mutable!\n return 0\n\n def __repr__(self):\n return '%s(%r)' % (self.__class__.__name__, str(self))\n\n def __str__(self):\n return '{}.{}.{}'.format(\n self.area_address,\n self.system_id,\n self.nsel)\n\n def __copy__(self):\n return self.__class__(self)\n\n", "id": "4432164", "language": "Python", "matching_score": 1.1652214527130127, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/isis/isis_net.py" }, { "content": "from .isis import *\nfrom .isis_net import *\n", "id": "2098806", "language": "Python", "matching_score": 0, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/isis/__init__.py" }, { "content": "from .rsvp import *\n", "id": "8198085", "language": "Python", "matching_score": 0, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/rsvp/__init__.py" }, { "content": "from .te import *\n", "id": "8809032", "language": "Python", "matching_score": 0, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/te/__init__.py" }, { "content": "from .pim import *\nfrom .rp_address import *\n", "id": "5204786", "language": "Python", "matching_score": 0.04542161524295807, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/pim/__init__.py" }, { "content": "'''\n Nd Genie Ops Object Outputs for NXOS.\n'''\n\nclass NdOutput(object):\n\n showIpv6NeighborDetail = {\n 'interfaces':{\n 'Ethernet1/1':{\n 'interface': 'Ethernet1/1',\n 'neighbors': {\n '2001:db8:c56d:4::2': {\n 'ip': '2001:db8:c56d:4::2',\n 'link_layer_address': 'fa16.3e82.6320',\n 'age': '00:09:27',\n 'preference': 50,\n 'origin': 'other',\n 'physical_interface': 'Ethernet1/1',\n 'packet_count': 0,\n 'byte_count': 0,\n 'best': 'Yes',\n 'throttled': 'No',\n },\n },\n },\n 'Ethernet1/2':{\n 'interface': 'Ethernet1/2',\n 'neighbors':{\n '2001:db8:c8d1:4::33': {\n 'ip': '2001:db8:c8d1:4::33',\n 'link_layer_address': 'aaaa.bbbb.cccc',\n 'age': '2d15h',\n 'preference': 1,\n 'origin': 'static',\n 'physical_interface': 'Ethernet1/2',\n 'packet_count': 0,\n 'byte_count': 0,\n 'best': 'Yes',\n 'throttled': 'No',\n },\n },\n },\n },\n }\n showIpv6NdInterface = {\n \"vrf\": {\n \"vrf1\": {\n \"interfaces\": {\n \"Ethernet1/2\": {\n \"router_advertisement\": {\n \"default_router_preference\": \"medium\",\n \"interval\": 600,\n \"retrans_timer\": 0,\n \"suppress_mtu\": False,\n \"current_hop_limit\": 64,\n \"reachable_time\": 0,\n \"mtu\": 1500,\n \"suppress\": False,\n \"other_stateful_configuration\": False,\n \"suppress_route_information\": False,\n \"lifetime\": 1800,\n \"managed_address_configuration\": False\n },\n \"oper_status\": \"up\",\n \"neighbor_solicitation\": {\n \"interval\": 1000,\n \"retry_interval\": 1000,\n \"retry_base\": 1,\n \"retry_attempts\": 3\n },\n \"dad\": {\n \"maximum_attempts\": 1,\n \"current_attempt\": 1\n },\n \"local_address\": \"fe80::5c01:c0ff:fe02:7\",\n \"error_message\": {\n \"unreachables\": False,\n \"redirects\": True\n },\n \"enable\": True,\n \"link_status\": \"up\",\n \"ip\": \"2001:db8:c8d1:4::3/64\",\n \"mac_extract\": \"disabled\",\n \"active_timers\": {\n \"last_router_advertisement\": \"00:05:42\",\n \"last_neighbor_advertisement\": \"00:01:07\",\n \"last_neighbor_solicitation\": \"00:09:34\",\n \"next_router_advertisement\": \"00:01:46\"\n },\n \"interface\": \"Ethernet1/2\"\n },\n }\n },\n \"default\": {\n \"interfaces\": {\n \"Ethernet1/1\": {\n \"router_advertisement\": {\n \"default_router_preference\": \"medium\",\n \"interval\": 201,\n \"retrans_timer\": 0,\n \"suppress_mtu\": False,\n \"current_hop_limit\": 64,\n \"reachable_time\": 0,\n \"mtu\": 1500,\n \"suppress\": True,\n \"other_stateful_configuration\": False,\n \"suppress_route_information\": False,\n \"lifetime\": 1801,\n \"managed_address_configuration\": False\n },\n \"oper_status\": \"up\",\n \"neighbor_solicitation\": {\n \"interval\": 1000,\n \"retry_interval\": 1000,\n \"retry_base\": 1,\n \"retry_attempts\": 3\n },\n \"dad\": {\n \"maximum_attempts\": 1,\n \"current_attempt\": 1\n },\n \"local_address\": \"fe80::5c01:c0ff:fe02:7\",\n \"error_message\": {\n \"unreachables\": False,\n \"redirects\": True\n },\n \"enable\": True,\n \"link_status\": \"up\",\n \"ip\": \"2001:db8:c56d:4::3/64\",\n \"mac_extract\": \"disabled\",\n \"active_timers\": {\n \"last_router_advertisement\": \"1d18h\",\n \"last_neighbor_advertisement\": \"00:02:12\",\n \"last_neighbor_solicitation\": \"00:06:16\",\n \"next_router_advertisement\": \"0.000000\"\n },\n \"interface\": \"Ethernet1/1\"\n },\n }\n }\n }\n }\n showIpv6IcmpNeighborDetail = {\n \"interfaces\": {\n \"Ethernet1/2\": {\n \"neighbors\": {\n \"2001:db8:c8d1:4::33\": {\n \"neighbor_state\": \"stale\",\n \"age\": \"00:03:30\",\n \"ip\": \"2001:db8:c8d1:4::33\",\n \"link_layer_address\": \"fa16.3e8b.59c9\",\n \"physical_interface\": \"Ethernet1/2\"\n },\n },\n \"interface\": \"Ethernet1/2\"\n },\n \"Ethernet1/1\": {\n \"neighbors\": {\n \"2001:db8:c56d:4::2\": {\n \"neighbor_state\": \"stale\",\n \"age\": \"00:15:02\",\n \"ip\": \"2001:db8:c56d:4::2\",\n \"link_layer_address\": \"fa16.3e82.6320\",\n \"physical_interface\": \"Ethernet1/1\"\n }\n },\n \"interface\": \"Ethernet1/1\"\n },\n }\n }\n showIpv6Routers = {\n \"interfaces\": {\n \"Ethernet1/1\": {\n \"neighbors\": {\n \"2001:db8:c56d:4::2\": {\n \"autonomous_flag\": 1,\n \"homeagent_flag\": 0,\n \"valid_lifetime\": 2592000,\n \"is_router\": True,\n \"addr_flag\": 0,\n \"ip\": \"2001:db8:c56d:4::2\",\n \"lifetime\": 1800,\n \"onlink_flag\": 1,\n \"current_hop_limit\": 64,\n \"prefix\": \"2001:db8:c56d:4::/64\",\n \"retransmission_time\": 0,\n \"preferred_lifetime\": 604800,\n \"last_update\": \"3.2\",\n \"mtu\": 1500,\n \"preference\": \"medium\",\n \"other_flag\": 0,\n \"reachable_time\": 0\n }\n },\n \"interface\": \"Ethernet1/1\"\n },\n \"Ethernet1/2\": {\n \"neighbors\": {\n \"2001:db8:c8d1:4::33\": {\n \"autonomous_flag\": 1,\n \"homeagent_flag\": 0,\n \"valid_lifetime\": 2592000,\n \"is_router\": True,\n \"addr_flag\": 0,\n \"ip\": \"2001:db8:c8d1:4::33\",\n \"lifetime\": 1800,\n \"onlink_flag\": 1,\n \"current_hop_limit\": 64,\n \"prefix\": \"2001:db8:c8d1:4::/64\",\n \"retransmission_time\": 0,\n \"preferred_lifetime\": 604800,\n \"last_update\": \"1.5\",\n \"mtu\": 1500,\n \"preference\": \"medium\",\n \"other_flag\": 0,\n \"reachable_time\": 0\n }\n },\n \"interface\": \"Ethernet1/2\"\n }\n }\n }\n\n ndOpsOutput = {\n 'interfaces':{\n 'Ethernet1/1':{\n 'interface': 'Ethernet1/1',\n \"router_advertisement\": {\n \"interval\": 201,\n \"suppress\": True,\n \"lifetime\": 1801,\n },\n 'neighbors': {\n '2001:db8:c56d:4::2': {\n 'ip': '2001:db8:c56d:4::2',\n 'link_layer_address': 'fa16.3e82.6320',\n 'age': '00:09:27',\n 'origin': 'other',\n \"is_router\": True,\n \"neighbor_state\": \"stale\",\n },\n },\n },\n 'Ethernet1/2':{\n 'interface': 'Ethernet1/2',\n \"router_advertisement\": {\n \"interval\": 600,\n \"suppress\": False,\n \"lifetime\": 1800,\n },\n 'neighbors':{\n '2001:db8:c8d1:4::33': {\n 'ip': '2001:db8:c8d1:4::33',\n 'link_layer_address': 'aaaa.bbbb.cccc',\n 'age': '2d15h',\n 'origin': 'static',\n \"is_router\": True,\n \"neighbor_state\": \"stale\",\n },\n },\n },\n },\n }\n\n showIpv6NeighborDetail_custom = {\n 'adjacency_hit': {\n 'INVALID': {\n 'packet_count': 0,\n 'byte_count': 0,\n },\n 'GLOBAL DROP': {\n 'packet_count': 0,\n 'byte_count': 0,\n },\n 'GLOBAL PUNT': {\n 'packet_count': 0,\n 'byte_count': 0,\n },\n 'GLOBAL GLEAN': {\n 'packet_count': 0,\n 'byte_count': 0,\n },\n 'GLEAN': {\n 'packet_count': 0,\n 'byte_count': 0,\n },\n 'NORMAL': {\n 'packet_count': 0,\n 'byte_count': 0,\n },\n },\n 'adjacency_statistics_last_updated_before': 'never',\n 'total_number_of_entries': 5,\n 'interfaces': {\n 'Ethernet1/1.390': {\n 'interface': 'Ethernet1/1.390',\n 'neighbors': {\n 'fe80::f816:3eff:fe59:8f2e': {\n 'ip': 'fe80::f816:3eff:fe59:8f2e',\n 'age': '00:08:47',\n 'link_layer_address': 'fa16.3e59.8f2e',\n 'origin': 'other',\n 'preference': 50,\n 'physical_interface': 'Ethernet1/1.390',\n 'packet_count': 0,\n 'byte_count': 0,\n 'best': 'Yes',\n 'throttled': 'No',\n },\n },\n },\n 'Ethernet1/1.410': {\n 'interface': 'Ethernet1/1.410',\n 'neighbors': {\n 'fe80::f816:3eff:fe59:8f2e': {\n 'ip': 'fe80::f816:3eff:fe59:8f2e',\n 'age': '00:00:45',\n 'link_layer_address': 'fa16.3e59.8f2e',\n 'origin': 'other',\n 'preference': 50,\n 'physical_interface': 'Ethernet1/1.410',\n 'packet_count': 0,\n 'byte_count': 0,\n 'best': 'Yes',\n 'throttled': 'No',\n },\n },\n },\n 'Ethernet1/2.390': {\n 'interface': 'Ethernet1/2.390',\n 'neighbors': {\n 'fe80::f816:3eff:fe5b:cf97': {\n 'ip': 'fe80::f816:3eff:fe5b:cf97',\n 'age': '3w0d',\n 'link_layer_address': 'fa16.3e5b.cf97',\n 'origin': 'other',\n 'preference': 50,\n 'physical_interface': 'Ethernet1/2.390',\n 'packet_count': 0,\n 'byte_count': 0,\n 'best': 'Yes',\n 'throttled': 'No',\n },\n },\n },\n 'Ethernet1/2.415': {\n 'interface': 'Ethernet1/2.415',\n 'neighbors': {\n 'fe80::f816:3eff:fe5b:cf97': {\n 'ip': 'fe80::f816:3eff:fe5b:cf97',\n 'age': '3w0d',\n 'link_layer_address': 'fa16.3e5b.cf97',\n 'origin': 'other',\n 'preference': 50,\n 'physical_interface': 'Ethernet1/2.415',\n 'packet_count': 0,\n 'byte_count': 0,\n 'best': 'Yes',\n 'throttled': 'No',\n },\n },\n },\n 'Ethernet1/2.420': {\n 'interface': 'Ethernet1/2.420',\n 'neighbors': {\n 'fe80::f816:3eff:fe5b:cf97': {\n 'ip': 'fe80::f816:3eff:fe5b:cf97',\n 'age': '02:30:05',\n 'link_layer_address': 'fa16.3e5b.cf97',\n 'origin': 'static',\n 'preference': 50,\n 'physical_interface': 'Ethernet1/2.420',\n 'packet_count': 0,\n 'byte_count': 0,\n 'best': 'Yes',\n 'throttled': 'No',\n },\n },\n },\n },\n }\n showIpv6NdInterface_custom = {\n 'vrf': {\n 'VRF1': {\n 'interfaces': {\n 'Ethernet1/2.420': {\n 'interface': 'Ethernet1/2.420',\n 'oper_status': 'up',\n 'link_status': 'up',\n 'enable': True,\n 'ip': '2001:10:13:120::3/64',\n 'local_address': 'fe80::5c00:40ff:fe02:7',\n 'mac_extract': 'disabled',\n 'active_timers': {\n 'last_neighbor_solicitation': '00:12:56',\n 'last_neighbor_advertisement': '00:12:51',\n 'last_router_advertisement': '00:06:28',\n 'next_router_advertisement': '00:01:06',\n },\n 'router_advertisement': {\n 'interval': 600,\n 'managed_address_configuration': False,\n 'other_stateful_configuration': False,\n 'default_router_preference': 'medium',\n 'current_hop_limit': 64,\n 'mtu': 1500,\n 'lifetime': 1800,\n 'reachable_time': 0,\n 'retrans_timer': 0,\n 'suppress': False,\n 'suppress_mtu': False,\n 'suppress_route_information': False,\n },\n 'neighbor_solicitation': {\n 'interval': 1000,\n 'retry_base': 1,\n 'retry_interval': 1000,\n 'retry_attempts': 3,\n },\n 'error_message': {\n 'redirects': True,\n 'unreachables': False,\n },\n 'dad': {\n 'maximum_attempts': 1,\n 'current_attempt': 1,\n },\n },\n },\n },\n },\n }\n showIpv6IcmpNeighborDetail_custom = {\n 'interfaces': {\n 'Ethernet1/2.420': {\n 'interface': 'Ethernet1/2.420',\n 'neighbors': {\n 'fe80::f816:3eff:fe5b:cf97': {\n 'ip': 'fe80::f816:3eff:fe5b:cf97',\n 'link_layer_address': 'fa16.3e5b.cf97',\n 'neighbor_state': 'stale',\n 'age': '00:14:18',\n 'physical_interface': 'Ethernet1/2.420',\n },\n },\n },\n },\n }\n showIpv6Routers_custom = {\n 'interfaces': {\n 'Ethernet1/1.390': {\n 'interface': 'Ethernet1/1.390',\n 'neighbors': {\n 'fe80::f816:3eff:fe59:8f2e': {\n 'is_router': True,\n 'last_update': '2.2',\n 'ip': 'fe80::f816:3eff:fe59:8f2e',\n 'current_hop_limit': 64,\n 'lifetime': 1800,\n 'addr_flag': 0,\n 'other_flag': 0,\n 'mtu': 1500,\n 'homeagent_flag': 0,\n 'preference': 'medium',\n 'reachable_time': 0,\n 'retransmission_time': 0,\n 'prefix': {\n '2001:10:23:90::/64': {\n 'onlink_flag': 1,\n 'autonomous_flag': 1,\n 'valid_lifetime': 2592000,\n 'preferred_lifetime': 604800,\n },\n },\n },\n },\n },\n 'Ethernet1/1.410': {\n 'interface': 'Ethernet1/1.410',\n 'neighbors': {\n 'fe80::f816:3eff:fe59:8f2e': {\n 'is_router': True,\n 'last_update': '0.6',\n 'ip': 'fe80::f816:3eff:fe59:8f2e',\n 'current_hop_limit': 64,\n 'lifetime': 1800,\n 'addr_flag': 0,\n 'other_flag': 0,\n 'mtu': 1500,\n 'homeagent_flag': 0,\n 'preference': 'medium',\n 'reachable_time': 0,\n 'retransmission_time': 0,\n 'prefix': {\n '2001:10:23:110::/64': {\n 'onlink_flag': 1,\n 'autonomous_flag': 1,\n 'valid_lifetime': 2592000,\n 'preferred_lifetime': 604800,\n },\n },\n },\n },\n },\n 'Ethernet1/1.415': {\n 'interface': 'Ethernet1/1.415',\n 'neighbors': {\n 'fe80::f816:3eff:fe59:8f2e': {\n 'is_router': True,\n 'last_update': '1.3',\n 'ip': 'fe80::f816:3eff:fe59:8f2e',\n 'current_hop_limit': 64,\n 'lifetime': 1800,\n 'addr_flag': 0,\n 'other_flag': 0,\n 'mtu': 1500,\n 'homeagent_flag': 0,\n 'preference': 'medium',\n 'reachable_time': 0,\n 'retransmission_time': 0,\n 'prefix': {\n '2001:10:23:115::/64': {\n 'onlink_flag': 1,\n 'autonomous_flag': 1,\n 'valid_lifetime': 2592000,\n 'preferred_lifetime': 604800,\n },\n },\n },\n },\n },\n 'Ethernet1/1.420': {\n 'interface': 'Ethernet1/1.420',\n 'neighbors': {\n 'fe80::f816:3eff:fe59:8f2e': {\n 'is_router': True,\n 'last_update': '0.9',\n 'ip': 'fe80::f816:3eff:fe59:8f2e',\n 'current_hop_limit': 64,\n 'lifetime': 1800,\n 'addr_flag': 0,\n 'other_flag': 0,\n 'mtu': 1500,\n 'homeagent_flag': 0,\n 'preference': 'medium',\n 'reachable_time': 0,\n 'retransmission_time': 0,\n 'prefix': {\n '2001:10:23:120::/64': {\n 'onlink_flag': 1,\n 'autonomous_flag': 1,\n 'valid_lifetime': 2592000,\n 'preferred_lifetime': 604800,\n },\n },\n },\n },\n },\n 'Ethernet1/2.390': {\n 'interface': 'Ethernet1/2.390',\n 'neighbors': {\n 'fe80::f816:3eff:fe5b:cf97': {\n 'is_router': True,\n 'last_update': '0.6',\n 'ip': 'fe80::f816:3eff:fe5b:cf97',\n 'current_hop_limit': 64,\n 'lifetime': 1800,\n 'addr_flag': 0,\n 'other_flag': 0,\n 'mtu': 1500,\n 'homeagent_flag': 0,\n 'preference': 'medium',\n 'reachable_time': 0,\n 'retransmission_time': 0,\n 'prefix': {\n '2001:10:13:90::/64': {\n 'onlink_flag': 1,\n 'autonomous_flag': 1,\n 'valid_lifetime': 2592000,\n 'preferred_lifetime': 604800,\n },\n },\n },\n },\n },\n 'Ethernet1/2.410': {\n 'interface': 'Ethernet1/2.410',\n 'neighbors': {\n 'fe80::f816:3eff:fe5b:cf97': {\n 'is_router': True,\n 'last_update': '1.4',\n 'ip': 'fe80::f816:3eff:fe5b:cf97',\n 'current_hop_limit': 64,\n 'lifetime': 1800,\n 'addr_flag': 0,\n 'other_flag': 0,\n 'mtu': 1500,\n 'homeagent_flag': 0,\n 'preference': 'medium',\n 'reachable_time': 0,\n 'retransmission_time': 0,\n 'prefix': {\n '2001:10:13:110::/64': {\n 'onlink_flag': 1,\n 'autonomous_flag': 1,\n 'valid_lifetime': 2592000,\n 'preferred_lifetime': 604800,\n },\n },\n },\n },\n },\n 'Ethernet1/2.415': {\n 'interface': 'Ethernet1/2.415',\n 'neighbors': {\n 'fe80::f816:3eff:fe5b:cf97': {\n 'is_router': True,\n 'last_update': '0.5',\n 'ip': 'fe80::f816:3eff:fe5b:cf97',\n 'current_hop_limit': 64,\n 'lifetime': 1800,\n 'addr_flag': 0,\n 'other_flag': 0,\n 'mtu': 1500,\n 'homeagent_flag': 0,\n 'preference': 'medium',\n 'reachable_time': 0,\n 'retransmission_time': 0,\n 'prefix': {\n '2001:10:13:115::/64': {\n 'onlink_flag': 1,\n 'autonomous_flag': 1,\n 'valid_lifetime': 2592000,\n 'preferred_lifetime': 604800,\n },\n },\n },\n },\n },\n 'Ethernet1/2.420': {\n 'interface': 'Ethernet1/2.420',\n 'neighbors': {\n 'fe80::f816:3eff:fe5b:cf97': {\n 'is_router': True,\n 'last_update': '2.5',\n 'ip': 'fe80::f816:3eff:fe5b:cf97',\n 'current_hop_limit': 64,\n 'lifetime': 1800,\n 'addr_flag': 0,\n 'other_flag': 0,\n 'mtu': 1500,\n 'homeagent_flag': 0,\n 'preference': 'medium',\n 'reachable_time': 0,\n 'retransmission_time': 0,\n 'prefix': {\n '2001:10:13:120::/64': {\n 'onlink_flag': 1,\n 'autonomous_flag': 1,\n 'valid_lifetime': 2592000,\n 'preferred_lifetime': 604800,\n },\n },\n },\n },\n },\n },\n }\n ndOpsOutput_custom = {\n 'interfaces': {\n 'Ethernet1/2.420': {\n 'interface': 'Ethernet1/2.420',\n 'router_advertisement': {\n 'interval': 600,\n 'lifetime': 1800,\n 'suppress': False,\n },\n 'neighbors': {\n 'fe80::f816:3eff:fe5b:cf97': {\n 'ip': 'fe80::f816:3eff:fe5b:cf97',\n 'link_layer_address': 'fa16.3e5b.cf97',\n 'origin': 'static',\n 'age': '02:30:05',\n 'is_router': True,\n 'neighbor_state': 'stale',\n },\n },\n },\n },\n }\n", "id": "4471283", "language": "Python", "matching_score": 1.509200930595398, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/nd/nxos/tests/nd_output.py" }, { "content": "''' \nInterface Genie Ops Object Outputs for NXOS.\n'''\n\n\nclass InterfaceOutput(object):\n\n ShowInterface_all = '''\n mgmt0 is up\n admin state is up\n Hardware: Ethernet, address: 5254.00c9.d26e (bia 5254.00c9.d26e)\n MTU 1500 bytes, BW 1000000 Kbit, DLY 10 usec\n reliability 255/255, txload 1/255, rxload 1/255\n Encapsulation ARPA, medium is broadcast\n Port mode is routed\n full-duplex, 1000 Mb/s\n Auto-Negotiation is turned on\n Auto-mdix is turned off\n EtherType is 0x0000 \n 1 minute input rate 0 bits/sec, 0 packets/sec\n 1 minute output rate 24 bits/sec, 0 packets/sec\n Rx\n 2 input packets 0 unicast packets 2 multicast packets\n 0 broadcast packets 168 bytes\n Tx\n 22 output packets 0 unicast packets 18 multicast packets\n 4 broadcast packets 4726 bytes\n Ethernet2/1 is up\n admin state is up, Dedicated Interface\n Hardware: 10/100/1000 Ethernet, address: aaaa.bbbb.cccc (bia 5254.003b.4aca)\n Description: desc\n Internet Address is 10.4.4.4/24 secondary tag 10\n MTU 1600 bytes, BW 768 Kbit, DLY 3330 usec\n reliability 255/255, txload 1/255, rxload 1/255\n Encapsulation ARPA, medium is broadcast\n Port mode is routed\n full-duplex, 1000 Mb/s\n Beacon is turned off\n Auto-Negotiation is turned off\n Input flow-control is off, output flow-control is off\n Auto-mdix is turned off\n Switchport monitor is off \n EtherType is 0x8100 \n Members in this channel: Po1\n EEE (efficient-ethernet) : n/a\n Last link flapped 00:00:29\n Last clearing of \"show interface\" counters never\n 1 interface resets\n Load-Interval #1: 0 seconds\n 0 seconds input rate 0 bits/sec, 0 packets/sec\n 0 seconds output rate 0 bits/sec, 0 packets/sec\n input rate 0 bps, 0 pps; output rate 0 bps, 0 pps\n Load-Interval #2: 0 seconds\n 0 seconds input rate 0 bits/sec, 0 packets/sec\n 0 seconds output rate 0 bits/sec, 0 packets/sec\n input rate 0 bps, 0 pps; output rate 0 bps, 0 pps\n RX\n 0 unicast packets 0 multicast packets 0 broadcast packets\n 0 input packets 0 bytes\n 0 jumbo packets 0 storm suppression packets\n 0 runts 0 giants 0 CRC/FCS 0 no buffer\n 0 input error 0 short frame 0 overrun 0 underrun 0 ignored\n 0 watchdog 0 bad etype drop 0 bad proto drop 0 if down drop\n 0 input with dribble 0 input discard\n 0 Rx pause\n TX\n 0 unicast packets 0 multicast packets 0 broadcast packets\n 0 output packets 0 bytes\n 0 jumbo packets\n 0 output error 0 collision 0 deferred 0 late collision\n 0 lost carrier 0 no carrier 0 babble 0 output discard\n 0 Tx pause\n Ethernet2/1.10 is down (Administratively down)\n admin state is down, Dedicated Interface, [parent interface is Ethernet2/1]\n Hardware: 10/100/1000 Ethernet, address: 5254.003b.4af8 (bia 5254.003b.4aca)\n MTU 1600 bytes, BW 768 Kbit, DLY 10 usec\n reliability 255/255, txload 1/255, rxload 1/255\n Encapsulation 802.1Q Virtual LAN, Vlan ID 10, medium is broadcast\n Port mode is routed\n Auto-mdix is turned off\n EtherType is 0x8100 \n Ethernet2/1.20 is up\n admin state is up, Dedicated Interface, [parent interface is Ethernet2/1]\n Hardware: 10/100/1000 Ethernet, address: 5254.003b.4af8 (bia 5254.003b.4aca)\n MTU 1600 bytes, BW 768 Kbit, DLY 10 usec\n reliability 255/255, txload 1/255, rxload 1/255\n Encapsulation 802.1Q Virtual LAN, Vlan ID 20, medium is p2p\n Port mode is routed\n Auto-mdix is turned off\n EtherType is 0x8100 \n Ethernet2/2 is up\n admin state is up, Dedicated Interface\n Hardware: 10/100/1000 Ethernet, address: 5254.00ac.b52e (bia 5254.00ac.b52e)\n MTU 1500 bytes, BW 1000000 Kbit, DLY 10 usec\n reliability 255/255, txload 1/255, rxload 1/255\n Encapsulation ARPA, medium is broadcast\n Port mode is trunk\n full-duplex, 1000 Mb/s\n Beacon is turned off\n Auto-Negotiation is turned off\n Input flow-control is off, output flow-control is off\n Auto-mdix is turned off\n Switchport monitor is off \n EtherType is 0x8100\n Members in this channel: Po1\n EEE (efficient-ethernet) : n/a\n Last link flapped 00:07:28\n Last clearing of \"show interface\" counters never\n 1 interface resets\n Load-Interval #1: 0 seconds\n 0 seconds input rate 0 bits/sec, 0 packets/sec\n 0 seconds output rate 0 bits/sec, 0 packets/sec\n input rate 0 bps, 0 pps; output rate 0 bps, 0 pps\n Load-Interval #2: 0 seconds\n 0 seconds input rate 0 bits/sec, 0 packets/sec\n 0 seconds output rate 0 bits/sec, 0 packets/sec\n input rate 0 bps, 0 pps; output rate 0 bps, 0 pps\n RX\n 0 unicast packets 0 multicast packets 0 broadcast packets\n 0 input packets 0 bytes\n 0 jumbo packets 0 storm suppression packets\n 0 runts 0 giants 0 CRC/FCS 0 no buffer\n 0 input error 0 short frame 0 overrun 0 underrun 0 ignored\n 0 watchdog 0 bad etype drop 0 bad proto drop 0 if down drop\n 0 input with dribble 0 input discard\n 0 Rx pause\n TX\n 0 unicast packets 0 multicast packets 0 broadcast packets\n 0 output packets 0 bytes\n 0 jumbo packets\n 0 output error 0 collision 0 deferred 0 late collision\n 0 lost carrier 0 no carrier 0 babble 0 output discard\n 0 Tx pause\n '''\n\n ShowInterface_eth2 = '''\n Ethernet2/1 is up\n admin state is up, Dedicated Interface\n Hardware: 10/100/1000 Ethernet, address: aaaa.bbbb.cccc (bia 5254.003b.4aca)\n Description: desc\n Internet Address is 10.4.4.4/24 secondary tag 10\n MTU 1600 bytes, BW 768 Kbit, DLY 3330 usec\n reliability 255/255, txload 1/255, rxload 1/255\n Encapsulation ARPA, medium is broadcast\n Port mode is routed\n full-duplex, 1000 Mb/s\n Beacon is turned off\n Auto-Negotiation is turned off\n Input flow-control is off, output flow-control is off\n Auto-mdix is turned off\n Switchport monitor is off \n EtherType is 0x8100 \n Members in this channel: Po1\n EEE (efficient-ethernet) : n/a\n Last link flapped 00:00:29\n Last clearing of \"show interface\" counters never\n 1 interface resets\n Load-Interval #1: 0 seconds\n 0 seconds input rate 0 bits/sec, 0 packets/sec\n 0 seconds output rate 0 bits/sec, 0 packets/sec\n input rate 0 bps, 0 pps; output rate 0 bps, 0 pps\n Load-Interval #2: 0 seconds\n 0 seconds input rate 0 bits/sec, 0 packets/sec\n 0 seconds output rate 0 bits/sec, 0 packets/sec\n input rate 0 bps, 0 pps; output rate 0 bps, 0 pps\n RX\n 0 unicast packets 0 multicast packets 0 broadcast packets\n 0 input packets 0 bytes\n 0 jumbo packets 0 storm suppression packets\n 0 runts 0 giants 0 CRC/FCS 0 no buffer\n 0 input error 0 short frame 0 overrun 0 underrun 0 ignored\n 0 watchdog 0 bad etype drop 0 bad proto drop 0 if down drop\n 0 input with dribble 0 input discard\n 0 Rx pause\n TX\n 0 unicast packets 0 multicast packets 0 broadcast packets\n 0 output packets 0 bytes\n 0 jumbo packets\n 0 output error 0 collision 0 deferred 0 late collision\n 0 lost carrier 0 no carrier 0 babble 0 output discard\n 0 Tx pause'''\n ShowIpInterfaceVrfAll = {\n 'Ethernet2/1': {'directed_broadcast': 'disabled',\n 'icmp_port_unreachable': 'enabled',\n 'icmp_redirects': 'disabled',\n 'icmp_unreachable': 'disabled',\n 'int_stat_last_reset': 'never',\n 'interface_status': 'protocol-up/link-up/admin-up',\n 'iod': 36,\n 'ip_forwarding': 'disabled',\n 'ip_mtu': 1600,\n 'ipv4': {'10.2.2.2/24': {'ip': '10.2.2.2',\n 'ip_subnet': '10.2.2.0',\n 'prefix_length': '24',\n 'secondary': True},\n '10.3.3.3/24': {'broadcast_address': '255.255.255.255',\n 'ip': '10.3.3.3',\n 'ip_subnet': '10.3.3.0',\n 'prefix_length': '24',\n 'route_preference': '0',\n 'route_tag': '0',\n 'secondary': True},\n '10.4.4.4/24': {'ip': '10.4.4.4',\n 'ip_subnet': '10.4.4.0',\n 'prefix_length': '24',\n 'route_preference': 'None',\n 'route_tag': 'None'},\n 'unnumbered':{'interface_ref': 'loopback0'},\n 'counters': {'broadcast_bytes_consumed': 0,\n 'broadcast_bytes_forwarded': 0,\n 'broadcast_bytes_originated': 0,\n 'broadcast_bytes_received': 0,\n 'broadcast_bytes_sent': 0,\n 'broadcast_packets_consumed': 0,\n 'broadcast_packets_forwarded': 0,\n 'broadcast_packets_originated': 0,\n 'broadcast_packets_received': 0,\n 'broadcast_packets_sent': 0,\n 'labeled_bytes_consumed': 0,\n 'labeled_bytes_forwarded': 0,\n 'labeled_bytes_originated': 0,\n 'labeled_bytes_received': 0,\n 'labeled_bytes_sent': 0,\n 'labeled_packets_consumed': 0,\n 'labeled_packets_forwarded': 0,\n 'labeled_packets_originated': 0,\n 'labeled_packets_received': 0,\n 'labeled_packets_sent': 0,\n 'multicast_bytes_consumed': 0,\n 'multicast_bytes_forwarded': 0,\n 'multicast_bytes_originated': 0,\n 'multicast_bytes_received': 0,\n 'multicast_bytes_sent': 0,\n 'multicast_packets_consumed': 0,\n 'multicast_packets_forwarded': 0,\n 'multicast_packets_originated': 0,\n 'multicast_packets_received': 0,\n 'multicast_packets_sent': 0,\n 'unicast_bytes_consumed': 0,\n 'unicast_bytes_forwarded': 0,\n 'unicast_bytes_originated': 0,\n 'unicast_bytes_received': 0,\n 'unicast_bytes_sent': 0,\n 'unicast_packets_consumed': 0,\n 'unicast_packets_forwarded': 0,\n 'unicast_packets_originated': 0,\n 'unicast_packets_received': 0,\n 'unicast_packets_sent': 0}},\n 'load_sharing': 'none',\n 'local_proxy_arp': 'disabled',\n 'multicast_groups': ['172.16.58.3', '172.16.31.10', '172.16.17.32'],\n 'multicast_routing': 'disabled',\n 'proxy_arp': 'disabled',\n 'unicast_reverse_path': 'none',\n 'vrf': 'VRF1',\n 'wccp_redirect_exclude': 'disabled',\n 'wccp_redirect_inbound': 'disabled',\n 'wccp_redirect_outbound': 'disabled'}\n }\n ShowIpInterfaceVrfAll_all = '''\n IP Interface Status for VRF \"default\"\n\n IP Interface Status for VRF \"management\"\n\n IP Interface Status for VRF \"VRF1\"\n Ethernet2/1, Interface status: protocol-up/link-up/admin-up, iod: 36,\n IP address: 10.4.4.4, IP subnet: 10.4.4.0/24 secondary\n IP address: 10.2.2.2, IP subnet: 10.2.2.0/24 secondary\n IP address: 10.3.3.3, IP subnet: 10.3.3.0/24 secondary\n IP broadcast address: 255.255.255.255\n IP multicast groups locally joined: \n 172.16.58.3 172.16.31.10 172.16.17.32\n IP MTU: 1600 bytes (using link MTU)\n IP primary address route-preference: 0, tag: 0\n IP unnumbered interface (loopback0)\n IP proxy ARP : disabled\n IP Local Proxy ARP : disabled\n IP multicast routing: disabled\n IP icmp redirects: disabled\n IP directed-broadcast: disabled \n IP Forwarding: disabled \n IP icmp unreachables (except port): disabled\n IP icmp port-unreachable: enabled\n IP unicast reverse path forwarding: none\n IP load sharing: none \n IP interface statistics last reset: never\n IP interface software stats: (sent/received/forwarded/originated/consumed)\n Unicast packets : 0/0/0/0/0\n Unicast bytes : 0/0/0/0/0\n Multicast packets : 0/0/0/0/0\n Multicast bytes : 0/0/0/0/0\n Broadcast packets : 0/0/0/0/0\n Broadcast bytes : 0/0/0/0/0\n Labeled packets : 0/0/0/0/0\n Labeled bytes : 0/0/0/0/0\n WCCP Redirect outbound: disabled\n WCCP Redirect inbound: disabled\n WCCP Redirect exclude: disabled\n '''\n ShowIpInterfaceVrfAll_vrf1_eth2='''\n IP Interface Status for VRF \"VRF1\"\n Ethernet2/1, Interface status: protocol-up/link-up/admin-up, iod: 36,\n IP address: 10.4.4.4, IP subnet: 10.4.4.0/24 secondary\n IP address: 10.2.2.2, IP subnet: 10.2.2.0/24 secondary\n IP address: 10.3.3.3, IP subnet: 10.3.3.0/24 secondary\n IP broadcast address: 255.255.255.255\n IP multicast groups locally joined: \n 172.16.58.3 172.16.31.10 172.16.17.32\n IP MTU: 1600 bytes (using link MTU)\n IP primary address route-preference: 0, tag: 0\n IP unnumbered interface (loopback0)\n IP proxy ARP : disabled\n IP Local Proxy ARP : disabled\n IP multicast routing: disabled\n IP icmp redirects: disabled\n IP directed-broadcast: disabled \n IP Forwarding: disabled \n IP icmp unreachables (except port): disabled\n IP icmp port-unreachable: enabled\n IP unicast reverse path forwarding: none\n IP load sharing: none \n IP interface statistics last reset: never\n IP interface software stats: (sent/received/forwarded/originated/consumed)\n Unicast packets : 0/0/0/0/0\n Unicast bytes : 0/0/0/0/0\n Multicast packets : 0/0/0/0/0\n Multicast bytes : 0/0/0/0/0\n Broadcast packets : 0/0/0/0/0\n Broadcast bytes : 0/0/0/0/0\n Labeled packets : 0/0/0/0/0\n Labeled bytes : 0/0/0/0/0\n WCCP Redirect outbound: disabled\n WCCP Redirect inbound: disabled\n WCCP Redirect exclude: disabled\n '''\n ShowVrfAllInterface = {\n 'Ethernet2/1': {'site_of_origin': '--', 'vrf': 'VRF1', 'vrf_id': 3},\n 'Ethernet2/1.10': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/1.20': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/10': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/11': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/12': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/13': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/14': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/15': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/16': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/17': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/18': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/19': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/20': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/21': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/22': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/23': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/24': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/25': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/26': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/27': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/28': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/29': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/30': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/31': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/32': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/33': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/34': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/35': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/36': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/37': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/38': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/39': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/4': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/40': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/41': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/42': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/43': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/44': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/45': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/46': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/47': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/48': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/5': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/6': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/7': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/8': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet2/9': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/1': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/10': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/11': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/12': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/13': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/14': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/15': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/16': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/17': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/18': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/19': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/2': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/20': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/21': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/22': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/23': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/24': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/25': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/26': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/27': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/28': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/29': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/3': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/30': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/31': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/32': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/33': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/34': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/35': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/36': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/37': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/38': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/39': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/4': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/40': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/41': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/42': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/43': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/44': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/45': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/46': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/47': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/48': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/5': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/6': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/7': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/8': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet3/9': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/1': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/10': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/11': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/12': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/13': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/14': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/15': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/16': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/17': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/18': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/19': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/2': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/20': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/21': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/22': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/23': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/24': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/25': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/26': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/27': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/28': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/29': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/3': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/30': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/31': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/32': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/33': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/34': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/35': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/36': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/37': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/38': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/39': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/4': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/40': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/41': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/42': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/43': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/44': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/45': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/46': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/47': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/48': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/5': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/6': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/7': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/8': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Ethernet4/9': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'Null0': {'site_of_origin': '--', 'vrf': 'default', 'vrf_id': 1},\n 'mgmt0': {'site_of_origin': '--', 'vrf': 'management', 'vrf_id': 2}\n }\n ShowVrfAllInterface_all = '''\n\n Interface VRF-Name VRF-ID Site-of-Origin\n Ethernet2/1 VRF1 3 --\n Null0 default 1 --\n Ethernet2/1.10 default 1 --\n Ethernet2/1.20 default 1 --\n Ethernet2/4 default 1 --\n Ethernet2/5 default 1 --\n Ethernet2/6 default 1 --\n Ethernet2/7 default 1 --\n Ethernet2/8 default 1 --\n Ethernet2/9 default 1 --\n Ethernet2/10 default 1 --\n Ethernet2/11 default 1 --\n Ethernet2/12 default 1 --\n Ethernet2/13 default 1 --\n Ethernet2/14 default 1 --\n Ethernet2/15 default 1 --\n Ethernet2/16 default 1 --\n Ethernet2/17 default 1 --\n Ethernet2/18 default 1 --\n Ethernet2/19 default 1 --\n Ethernet2/20 default 1 --\n Ethernet2/21 default 1 --\n Ethernet2/22 default 1 --\n Ethernet2/23 default 1 --\n Ethernet2/24 default 1 --\n Ethernet2/25 default 1 --\n Ethernet2/26 default 1 --\n Ethernet2/27 default 1 --\n Ethernet2/28 default 1 --\n Ethernet2/29 default 1 --\n Ethernet2/30 default 1 --\n Ethernet2/31 default 1 --\n Ethernet2/32 default 1 --\n Ethernet2/33 default 1 --\n Ethernet2/34 default 1 --\n Ethernet2/35 default 1 --\n Ethernet2/36 default 1 --\n Ethernet2/37 default 1 --\n Ethernet2/38 default 1 --\n Ethernet2/39 default 1 --\n Ethernet2/40 default 1 --\n Ethernet2/41 default 1 --\n Ethernet2/42 default 1 --\n Ethernet2/43 default 1 --\n Ethernet2/44 default 1 --\n Ethernet2/45 default 1 --\n Ethernet2/46 default 1 --\n Ethernet2/47 default 1 --\n Ethernet2/48 default 1 --\n Ethernet3/1 default 1 --\n Ethernet3/2 default 1 --\n Ethernet3/3 default 1 --\n Ethernet3/4 default 1 --\n Ethernet3/5 default 1 --\n Ethernet3/6 default 1 --\n Ethernet3/7 default 1 --\n Ethernet3/8 default 1 --\n Ethernet3/9 default 1 --\n Ethernet3/10 default 1 --\n Ethernet3/11 default 1 --\n Ethernet3/12 default 1 --\n Ethernet3/13 default 1 --\n Ethernet3/14 default 1 --\n Ethernet3/15 default 1 --\n Ethernet3/16 default 1 --\n Ethernet3/17 default 1 --\n Ethernet3/18 default 1 --\n Ethernet3/19 default 1 --\n Ethernet3/20 default 1 --\n Ethernet3/21 default 1 --\n Ethernet3/22 default 1 --\n Ethernet3/23 default 1 --\n Ethernet3/24 default 1 --\n Ethernet3/25 default 1 --\n Ethernet3/26 default 1 --\n Ethernet3/27 default 1 --\n Ethernet3/28 default 1 --\n Ethernet3/29 default 1 --\n Ethernet3/30 default 1 --\n Ethernet3/31 default 1 --\n Ethernet3/32 default 1 --\n Ethernet3/33 default 1 --\n Ethernet3/34 default 1 --\n Ethernet3/35 default 1 --\n Ethernet3/36 default 1 --\n Ethernet3/37 default 1 --\n Ethernet3/38 default 1 --\n Ethernet3/39 default 1 --\n Ethernet3/40 default 1 --\n Ethernet3/41 default 1 --\n Ethernet3/42 default 1 --\n Ethernet3/43 default 1 --\n Ethernet3/44 default 1 --\n Ethernet3/45 default 1 --\n Ethernet3/46 default 1 --\n Ethernet3/47 default 1 --\n Ethernet3/48 default 1 --\n Ethernet4/1 default 1 --\n Ethernet4/2 default 1 --\n Ethernet4/3 default 1 --\n Ethernet4/4 default 1 --\n Ethernet4/5 default 1 --\n Ethernet4/6 default 1 --\n Ethernet4/7 default 1 --\n Ethernet4/8 default 1 --\n Ethernet4/9 default 1 --\n Ethernet4/10 default 1 --\n Ethernet4/11 default 1 --\n Ethernet4/12 default 1 --\n Ethernet4/13 default 1 --\n Ethernet4/14 default 1 --\n Ethernet4/15 default 1 --\n Ethernet4/16 default 1 --\n Ethernet4/17 default 1 --\n Ethernet4/18 default 1 --\n Ethernet4/19 default 1 --\n Ethernet4/20 default 1 --\n Ethernet4/21 default 1 --\n Ethernet4/22 default 1 --\n Ethernet4/23 default 1 --\n Ethernet4/24 default 1 --\n Ethernet4/25 default 1 --\n Ethernet4/26 default 1 --\n Ethernet4/27 default 1 --\n Ethernet4/28 default 1 --\n Ethernet4/29 default 1 --\n Ethernet4/30 default 1 --\n Ethernet4/31 default 1 --\n Ethernet4/32 default 1 --\n Ethernet4/33 default 1 --\n Ethernet4/34 default 1 --\n Ethernet4/35 default 1 --\n Ethernet4/36 default 1 --\n Ethernet4/37 default 1 --\n Ethernet4/38 default 1 --\n Ethernet4/39 default 1 --\n Ethernet4/40 default 1 --\n Ethernet4/41 default 1 --\n Ethernet4/42 default 1 --\n Ethernet4/43 default 1 --\n Ethernet4/44 default 1 --\n Ethernet4/45 default 1 --\n Ethernet4/46 default 1 --\n Ethernet4/47 default 1 --\n Ethernet4/48 default 1 --\n mgmt0 management 2 --\n\n '''\n ShowVrfAllInterface_vrf1_eth2='''\n Interface VRF-Name VRF-ID Site-of-Origin\n Ethernet2/1 VRF1 3 --\n '''\n ShowInterfaceSwitchport = {\n 'Ethernet2/2': {'access_vlan': 1,\n 'access_vlan_mode': 'default',\n 'admin_priv_vlan_primary_host_assoc': 'none',\n 'admin_priv_vlan_primary_mapping': 'none',\n 'admin_priv_vlan_secondary_host_assoc': 'none',\n 'admin_priv_vlan_secondary_mapping': 'none',\n 'admin_priv_vlan_trunk_encapsulation': 'dot1q',\n 'admin_priv_vlan_trunk_native_vlan': 'none',\n 'admin_priv_vlan_trunk_normal_vlans': 'none',\n 'admin_priv_vlan_trunk_private_vlans': 'none',\n 'native_vlan': 1,\n 'native_vlan_mode': 'default',\n 'operational_private_vlan': 'none',\n 'switchport_mode': 'trunk',\n 'switchport_monitor': 'Not enabled',\n 'switchport_status': 'enabled',\n 'switchport_enable': True,\n 'trunk_vlans': '100,300'},\n 'Ethernet2/3': {'access_vlan': 100,\n 'access_vlan_mode': 'Vlan not created',\n 'admin_priv_vlan_primary_host_assoc': 'none',\n 'admin_priv_vlan_primary_mapping': 'none',\n 'admin_priv_vlan_secondary_host_assoc': 'none',\n 'admin_priv_vlan_secondary_mapping': 'none',\n 'admin_priv_vlan_trunk_encapsulation': 'dot1q',\n 'admin_priv_vlan_trunk_native_vlan': 'none',\n 'admin_priv_vlan_trunk_normal_vlans': 'none',\n 'admin_priv_vlan_trunk_private_vlans': 'none',\n 'native_vlan': 1,\n 'native_vlan_mode': 'default',\n 'operational_private_vlan': 'none',\n 'switchport_mode': 'access',\n 'switchport_monitor': 'Not enabled',\n 'switchport_status': 'enabled',\n 'switchport_enable': True,\n 'trunk_vlans': '1-4094'}\n }\n ShowInterfaceSwitchport_all = '''\n Name: Ethernet2/2\n Switchport: Enabled\n Switchport Monitor: Not enabled \n Operational Mode: trunk\n Access Mode VLAN: 1 (default)\n Trunking Native Mode VLAN: 1 (default)\n Trunking VLANs Allowed: 100,300\n Administrative private-vlan primary host-association: none\n Administrative private-vlan secondary host-association: none\n Administrative private-vlan primary mapping: none\n Administrative private-vlan secondary mapping: none\n Administrative private-vlan trunk native VLAN: none\n Administrative private-vlan trunk encapsulation: dot1q\n Administrative private-vlan trunk normal VLANs: none\n Administrative private-vlan trunk private VLANs: none\n Operational private-vlan: none\n Name: Ethernet2/3\n Switchport: Enabled\n Switchport Monitor: Not enabled \n Operational Mode: access\n Access Mode VLAN: 100 (Vlan not created)\n Trunking Native Mode VLAN: 1 (default)\n Trunking VLANs Allowed: 1-4094\n Administrative private-vlan primary host-association: none\n Administrative private-vlan secondary host-association: none\n Administrative private-vlan primary mapping: none\n Administrative private-vlan secondary mapping: none\n Administrative private-vlan trunk native VLAN: none\n Administrative private-vlan trunk encapsulation: dot1q\n Administrative private-vlan trunk normal VLANs: none\n Administrative private-vlan trunk private VLANs: none\n Operational private-vlan: none \n '''\n ShowInterfaceSwitchport_eth2='''\n Name: Ethernet2/1\n Switchport: Enabled\n Switchport Monitor: Not enabled \n Operational Mode: trunk\n Access Mode VLAN: 1 (default)\n Trunking Native Mode VLAN: 1 (default)\n Trunking VLANs Allowed: 100,300\n Administrative private-vlan primary host-association: none\n Administrative private-vlan secondary host-association: none\n Administrative private-vlan primary mapping: none\n Administrative private-vlan secondary mapping: none\n Administrative private-vlan trunk native VLAN: none\n Administrative private-vlan trunk encapsulation: dot1q\n Administrative private-vlan trunk normal VLANs: none\n Administrative private-vlan trunk private VLANs: none\n Operational private-vlan: none\n '''\n ShowIpv6InterfaceVrfAll = {\n 'Ethernet2/1': {'enabled': True,\n 'interface_status': 'protocol-up/link-up/admin-up',\n 'iod': 36,\n 'ipv6': {'2001:db8:1:1::1/64': {'ip': '2001:db8:1:1::1',\n 'prefix_length': '64',\n 'status': 'valid'},\n '2001:db8:2:2::2/64': {'anycast': True,\n 'ip': '2001:db8:2:2::2',\n 'prefix_length': '64',\n 'status': 'valid'},\n '2001:db8:3:3::3/64': {'ip': '2001:db8:3:3::3',\n 'prefix_length': '64',\n 'status': 'valid'},\n '2001:db8:4:4:a8aa:bbff:febb:cccc/64': {'ip': '2001:db8:4:4:a8aa:bbff:febb:cccc',\n 'prefix_length': '64',\n 'status': 'valid'},\n 'counters': {'multicast_bytes_consumed': 640,\n 'multicast_bytes_forwarded': 0,\n 'multicast_bytes_originated': 1144,\n 'multicast_packets_consumed': 9,\n 'multicast_packets_forwarded': 0,\n 'multicast_packets_originated': 12,\n 'unicast_bytes_consumed': 0,\n 'unicast_bytes_forwarded': 0,\n 'unicast_bytes_originated': 0,\n 'unicast_packets_consumed': 0,\n 'unicast_packets_forwarded': 0,\n 'unicast_packets_originated': 0},\n 'ipv6_forwarding_feature': 'disabled',\n 'ipv6_last_reset': 'never',\n 'ipv6_link_local': 'fe80::a8aa:bbff:febb:cccc ',\n 'ipv6_link_local_state': 'default',\n 'ipv6_ll_state': 'valid',\n 'ipv6_load_sharing': 'none',\n 'ipv6_mtu': 1600,\n 'ipv6_multicast_entries': 'none',\n 'ipv6_multicast_groups': ['fc00:db20:35b:7399::5:ffbb:cccc',\n 'fc00:db20:35b:7399::5:ff00:3',\n 'fc00:db20:35b:7399::5:ff00:2',\n 'fc00:e968:6179::de52:7100',\n 'fc00:db20:35b:7399::5',\n 'fc00:db20:35b:7399::5:ff00:1',\n 'fc00:db20:35b:7399::5:ffbb:cccc',\n 'fc00:db20:35b:7399::5:ff00:0'],\n 'ipv6_multicast_routing': 'disabled',\n 'ipv6_report_link_local': 'disabled',\n 'ipv6_subnet': '2001:db8:1:1::/64',\n 'ipv6_unicast_rev_path_forwarding': 'none',\n 'ipv6_virtual_add': 'none',\n 'multicast_groups': True},\n 'vrf': 'VRF1'}\n }\n ShowIpv6InterfaceVrfAll_all = '''\n IPv6 Interface Status for VRF \"default\"\n\n IPv6 Interface Status for VRF \"management\"\n\n IPv6 Interface Status for VRF \"VRF1\"\n Ethernet2/1, Interface status: protocol-up/link-up/admin-up, iod: 36\n IPv6 address: \n 2001:db8:1:1::1/64 [VALID]\n 2001:db8:3:3::3/64 [VALID]\n 2fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/64 [VALID]\n 2001:db8:2:2::2/64 [VALID]\n IPv6 subnet: 2001:db8:1:1::/64\n Anycast configured addresses:\n 2001:db8:2:2::2/64 [VALID]\n IPv6 link-local address: fe80::a8aa:bbff:febb:cccc (default) [VALID]\n IPv6 virtual addresses configured: none\n IPv6 multicast routing: disabled\n IPv6 report link local: disabled\n IPv6 Forwarding feature: disabled\n IPv6 multicast groups locally joined: \n fc00:db20:35b:7399::5:ffbb:cccc fc00:db20:35b:7399::5:ff00:3 fc00:db20:35b:7399::5:ff00:2 fc00:e968:6179::de52:7100 \n fc00:db20:35b:7399::5 fc00:db20:35b:7399::5:ff00:1 fc00:db20:35b:7399::5:ffbb:cccc fc00:db20:35b:7399::5:ff00:0 \n IPv6 multicast (S,G) entries joined: none\n IPv6 MTU: 1600 (using link MTU)\n IPv6 unicast reverse path forwarding: none\n IPv6 load sharing: none \n IPv6 interface statistics last reset: never\n IPv6 interface RP-traffic statistics: (forwarded/originated/consumed)\n Unicast packets: 0/0/0\n Unicast bytes: 0/0/0\n Multicast packets: 0/12/9\n Multicast bytes: 0/1144/640\n '''\n ShowIpv6InterfaceVrfAll_eth1='''\n IPv6 Interface Status for VRF \"VRF1\"\n Ethernet2/1, Interface status: protocol-up/link-up/admin-up, iod: 36\n IPv6 address: \n 2001:db8:1:1::1/64 [VALID]\n 2001:db8:3:3::3/64 [VALID]\n 2fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/64 [VALID]\n 2fdf8:f53e:61e4::18/64 [VALID]\n IPv6 subnet: 2001:db8:1:1::/64\n Anycast configured addresses:\n 2001:db8:2:2::2/64 [VALID]\n IPv6 link-local address: fe80::a8aa:bbff:febb:cccc (default) [VALID]\n IPv6 virtual addresses configured: none\n IPv6 multicast routing: disabled\n IPv6 report link local: disabled\n IPv6 Forwarding feature: disabled\n IPv6 multicast groups locally joined: \n fc00:db20:35b:7399::5:ffbb:cccc fc00:db20:35b:7399::5:3 fc00:db20:35b:7399::5:2 fc00:e968:6179::de52:7100 \n fc00:db20:35b:7399::5 fc00:db20:35b:7399::5:1 fc00:db20:35b:7399::5:ffbb:cccc fc00:db20:35b:7399::5:0 \n IPv6 multicast (S,G) entries joined: none\n IPv6 MTU: 1600 (using link MTU)\n IPv6 unicast reverse path forwarding: none\n IPv6 load sharing: none \n IPv6 interface statistics last reset: never\n IPv6 interface RP-traffic statistics: (forwarded/originated/consumed)\n Unicast packets: 0/0/0\n Unicast bytes: 0/0/0\n Multicast packets: 0/12/9\n Multicast bytes: 0/1144/640\n '''\n ShowRoutingVrfAll = {\n 'vrf':\n {'VRF1':\n {'address_family':\n {'vpnv4 unicast':\n {'bgp_distance_internal_as': 33,\n 'bgp_distance_local': 55,\n 'ip':\n {'10.2.2.2/24':\n {'ubest_num': '1',\n 'mbest_num': '0',\n 'best_route':\n {'unicast':\n {'nexthop':\n {'Null0':\n {'protocol':\n {'bgp':\n {'uptime': '5w0d',\n 'preference': '55',\n 'metric': '0',\n 'protocol_id': '100',\n 'interface':\n 'Ethernet2/1',\n 'attribute':\n 'discard',\n 'tag':\n '100'}}}}}}}}}}},\n 'default':\n {'address_family':\n {'ipv4 unicast':\n {'bgp_distance_internal_as': 33,\n 'bgp_distance_local': 55,\n 'ip':\n {'10.169.2.2/24':\n {'ubest_num': '1',\n 'mbest_num': '0',\n 'best_route':\n {'unicast':\n {'nexthop':\n {'Null0':\n {'protocol':\n {'bgp':\n {'uptime': '5w0d',\n 'preference': '55',\n 'metric': '0',\n 'protocol_id': '100',\n 'interface': 'Ethernet2/1',\n 'attribute': 'discard',\n 'tag': '100'}}}}}}}}}}}}}\n ShowRoutingVrfAll_vrf1 = {\n 'vrf':\n {'VRF1':\n {'address_family':\n {'vpnv4 unicast':\n {'bgp_distance_internal_as': 33,\n 'bgp_distance_local': 55,\n 'ip':\n {'10.2.2.2/24':\n {'ubest_num': '1',\n 'mbest_num': '0',\n 'best_route':\n {'unicast':\n {'nexthop':\n {'Null0':\n {'protocol':\n {'bgp':\n {'uptime': '5w0d',\n 'preference': '55',\n 'metric': '0',\n 'protocol_id': '100',\n 'interface':\n 'Ethernet2/1',\n 'attribute':\n 'discard',\n 'tag':\n '100'}}}}}}}}}}}, }}\n ShowRoutingIpv6VrfAll = {\n \"vrf\": {\n \"VRF1\": {\n \"address_family\": {\n \"ipv6 unicast\": {\n \"ip\": {\n \"2001:db8:1:1::1/128\": {\n \"attach\": \"attached\",\n \"best_route\": {\n \"unicast\": {\n \"nexthop\": {\n \"2001:db8:1:1::1\": {\n \"protocol\": {\n \"local\": {\n \"interface\": \"Ethernet1/1\",\n \"metric\": \"0\",\n \"uptime\": \"00:15:46\",\n \"preference\": \"0\"\n }\n }\n }\n }\n }\n },\n \"mbest_num\": \"0\",\n \"ubest_num\": \"1\"\n },\n \"2001:db8:1:1::/64\": {\n \"attach\": \"attached\",\n \"best_route\": {\n \"unicast\": {\n \"nexthop\": {\n \"2001:db8:1:1::1\": {\n \"protocol\": {\n \"direct\": {\n \"interface\": \"Ethernet1/1\",\n \"metric\": \"0\",\n \"uptime\": \"00:15:46\",\n \"preference\": \"0\"\n }\n }\n }\n }\n }\n },\n \"mbest_num\": \"0\",\n \"ubest_num\": \"1\"\n },\n \"2001:db8:2:2::2/128\": {\n \"attach\": \"attached\",\n \"best_route\": {\n \"unicast\": {\n \"nexthop\": {\n \"2001:db8:2:2::2\": {\n \"protocol\": {\n \"local\": {\n \"interface\": \"Ethernet1/1\",\n \"metric\": \"0\",\n \"tag\": \"222\",\n \"uptime\": \"00:15:46\",\n \"preference\": \"0\"\n }\n }\n }\n }\n }\n },\n \"mbest_num\": \"0\",\n \"ubest_num\": \"1\"\n },\n \"2fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b/64\": {\n \"attach\": \"attached\",\n \"best_route\": {\n \"unicast\": {\n \"nexthop\": {\n \"2001:db8::5054:ff:fed5:63f9\": {\n \"protocol\": {\n \"local\": {\n \"interface\": \"Ethernet2/1\",\n \"metric\": \"0\",\n \"uptime\": \"00:15:46\",\n \"preference\": \"0\"\n }\n }\n }\n }\n }\n },\n \"mbest_num\": \"0\",\n \"ubest_num\": \"1\"\n },\n \"2001:db8::/64\": {\n \"attach\": \"attached\",\n \"best_route\": {\n \"unicast\": {\n \"nexthop\": {\n \"2001:db8::5054:ff:fed5:63f9\": {\n \"protocol\": {\n \"direct\": {\n \"interface\": \"Ethernet1/1\",\n \"metric\": \"0\",\n \"uptime\": \"00:15:46\",\n \"preference\": \"0\"\n }\n }\n }\n }\n }\n },\n \"mbest_num\": \"0\",\n \"ubest_num\": \"1\"\n },\n \"2001:db8:3:3::3/64\": {\n \"attach\": \"attached\",\n \"best_route\": {\n \"unicast\": {\n \"nexthop\": {\n \"2001:db8:2:2::2\": {\n \"protocol\": {\n \"direct\": {\n \"interface\": \"Ethernet2/1\",\n \"metric\": \"0\",\n \"tag\": \"222\",\n \"uptime\": \"00:15:46\",\n \"preference\": \"0\"\n }\n }\n }\n }\n }\n },\n \"mbest_num\": \"0\",\n \"ubest_num\": \"1\"\n }}}}}}\n }\n InterfaceOpsOutput_custom_info = {\n 'Ethernet2/1': {'access_vlan': 1,\n 'auto_negotiate': False,\n 'bandwidth': 768,\n 'counters': {'in_broadcast_pkts': 0,\n 'in_crc_errors': 0,\n 'in_errors': 0,\n 'in_mac_pause_frames': 0,\n 'in_multicast_pkts': 0,\n 'in_octets': 0,\n 'in_pkts': 0,\n 'in_unicast_pkts': 0,\n 'in_unknown_protos': 0,\n 'last_clear': 'never',\n 'out_broadcast_pkts': 0,\n 'out_discard': 0,\n 'out_errors': 0,\n 'out_mac_pause_frames': 0,\n 'out_multicast_pkts': 0,\n 'out_octets': 0,\n 'out_pkts': 0,\n 'out_unicast_pkts': 0,\n 'rate': {'in_rate': 0,\n 'in_rate_pkts': 0,\n 'load_interval': 0,\n 'out_rate': 0,\n 'out_rate_pkts': 0}},\n 'delay': 3330,\n 'description': 'desc',\n 'duplex_mode': 'full',\n 'enabled': True,\n 'encapsulation': {'encapsulation': 'arpa'},\n 'flow_control': {'receive': False, 'send': False},\n 'port_channel': {'port_channel_member': True,\n 'port_channel_member_intfs': ['Port-channel1']},\n 'ipv4': {'10.2.2.2/24': {'ip': '10.2.2.2',\n 'prefix_length': '24',\n 'origin': 'bgp',\n 'route_tag': '100',\n 'secondary': True},\n '10.3.3.3/24': {'ip': '10.3.3.3',\n 'prefix_length': '24',\n 'secondary': True},\n '10.4.4.4/24': {'ip': '10.4.4.4',\n 'prefix_length': '24',\n 'route_tag': '10',\n 'secondary': True},\n 'unnumbered': {'interface_ref': 'loopback0'}},\n 'mac_address': 'aaaa.bbbb.cccc',\n 'medium': 'broadcast',\n 'mtu': 1600,\n 'oper_status': 'up',\n 'port_speed': '1000',\n 'last_change': '00:00:29',\n 'phys_address': '5254.003b.4aca',\n 'switchport_mode': 'trunk',\n 'switchport_enable': True,\n 'trunk_vlans': '100,300',\n 'type': '10/100/1000 Ethernet',\n 'vrf': 'VRF1'}, }\n\n InterfaceOpsOutput_info = {\n 'Ethernet2/1': {'auto_negotiate': False,\n 'bandwidth': 768,\n 'counters': {'in_broadcast_pkts': 0,\n 'in_crc_errors': 0,\n 'in_errors': 0,\n 'in_mac_pause_frames': 0,\n 'in_multicast_pkts': 0,\n 'in_octets': 0,\n 'in_pkts': 0,\n 'in_unicast_pkts': 0,\n 'in_unknown_protos': 0,\n 'last_clear': 'never',\n 'out_broadcast_pkts': 0,\n 'out_discard': 0,\n 'out_errors': 0,\n 'out_mac_pause_frames': 0,\n 'out_multicast_pkts': 0,\n 'out_octets': 0,\n 'out_pkts': 0,\n 'out_unicast_pkts': 0,\n 'rate': {'in_rate': 0,\n 'in_rate_pkts': 0,\n 'load_interval': 0,\n 'out_rate': 0,\n 'out_rate_pkts': 0}},\n 'delay': 3330,\n 'description': 'desc',\n 'duplex_mode': 'full',\n 'enabled': True,\n 'encapsulation': {'encapsulation': 'arpa'},\n 'flow_control': {'receive': False, 'send': False},\n 'port_channel': {'port_channel_member': True,\n 'port_channel_member_intfs': ['Port-channel1']},\n 'ipv4': {'10.2.2.2/24': {'ip': '10.2.2.2',\n 'prefix_length': '24',\n 'origin': 'bgp',\n 'route_tag': '100',\n 'secondary': True},\n '10.3.3.3/24': {'ip': '10.3.3.3',\n 'prefix_length': '24',\n 'secondary': True},\n '10.4.4.4/24': {'ip': '10.4.4.4',\n 'prefix_length': '24',\n 'route_tag': '10',\n 'secondary': True},\n 'unnumbered':{'interface_ref': 'loopback0'}},\n 'ipv6': {'2001:db8:1:1::1/64': {'ip': '2001:db8:1:1::1',\n 'prefix_length': '64',\n 'status': 'valid'},\n '2001:db8:2:2::2/64': {'anycast': True,\n 'ip': '2001:db8:2:2::2',\n 'prefix_length': '64',\n 'status': 'valid'},\n '2001:db8:3:3::3/64': {'ip': '2001:db8:3:3::3',\n 'prefix_length': '64',\n 'origin': 'direct',\n 'route_tag': '222',\n 'status': 'valid'},\n '2001:db8:4:4:a8aa:bbff:febb:cccc/64': {'ip': '2001:db8:4:4:a8aa:bbff:febb:cccc',\n 'prefix_length': '64',\n 'eui_64': True,\n 'origin': 'local',\n 'status': 'valid'}},\n 'mac_address': 'aaaa.bbbb.cccc',\n 'medium': 'broadcast',\n 'mtu': 1600,\n 'oper_status': 'up',\n 'port_speed': '1000',\n 'last_change': '00:00:29',\n 'phys_address': '5254.003b.4aca',\n 'type': '10/100/1000 Ethernet',\n 'vrf': 'VRF1'},\n 'Ethernet2/1.10': {'bandwidth': 768,\n 'delay': 10,\n 'enabled': False,\n 'encapsulation': {'encapsulation': 'dot1q',\n 'first_dot1q': '10'},\n 'mac_address': '5254.003b.4af8',\n 'medium': 'broadcast',\n 'mtu': 1600,\n 'oper_status': 'down',\n 'phys_address': '5254.003b.4aca',\n 'port_channel': {'port_channel_member': False},\n 'type': '10/100/1000 Ethernet',\n 'vlan_id': '10',\n 'vrf': 'default'},\n 'Ethernet2/1.20': {'bandwidth': 768,\n 'delay': 10,\n 'enabled': True,\n 'encapsulation': {'encapsulation': 'dot1q',\n 'first_dot1q': '20'},\n 'mac_address': '5254.003b.4af8',\n 'medium': 'p2p',\n 'mtu': 1600,\n 'oper_status': 'up',\n 'phys_address': '5254.003b.4aca',\n 'port_channel': {'port_channel_member': False},\n 'type': '10/100/1000 Ethernet',\n 'vlan_id': '20',\n 'vrf': 'default'},\n 'Ethernet2/10': {'vrf': 'default'},\n 'Ethernet2/11': {'vrf': 'default'},\n 'Ethernet2/12': {'vrf': 'default'},\n 'Ethernet2/13': {'vrf': 'default'},\n 'Ethernet2/14': {'vrf': 'default'},\n 'Ethernet2/15': {'vrf': 'default'},\n 'Ethernet2/16': {'vrf': 'default'},\n 'Ethernet2/17': {'vrf': 'default'},\n 'Ethernet2/18': {'vrf': 'default'},\n 'Ethernet2/19': {'vrf': 'default'},\n 'Ethernet2/2': {'access_vlan': 1,\n 'auto_negotiate': False,\n 'bandwidth': 1000000,\n 'counters': {'in_broadcast_pkts': 0,\n 'in_crc_errors': 0,\n 'in_errors': 0,\n 'in_mac_pause_frames': 0,\n 'in_multicast_pkts': 0,\n 'in_octets': 0,\n 'in_pkts': 0,\n 'in_unicast_pkts': 0,\n 'in_unknown_protos': 0,\n 'last_clear': 'never',\n 'out_broadcast_pkts': 0,\n 'out_discard': 0,\n 'out_errors': 0,\n 'out_mac_pause_frames': 0,\n 'out_multicast_pkts': 0,\n 'out_octets': 0,\n 'out_pkts': 0,\n 'out_unicast_pkts': 0,\n 'rate': {'in_rate': 0,\n 'in_rate_pkts': 0,\n 'load_interval': 0,\n 'out_rate': 0,\n 'out_rate_pkts': 0}},\n 'delay': 10,\n 'duplex_mode': 'full',\n 'enabled': True,\n 'encapsulation': {'encapsulation': 'arpa'},\n 'flow_control': {'receive': False, 'send': False},\n 'port_channel': {'port_channel_member': True,\n 'port_channel_member_intfs': ['Port-channel1']},\n 'mac_address': '5254.00ac.b52e',\n 'medium': 'broadcast',\n 'mtu': 1500,\n 'oper_status': 'up',\n 'phys_address': '5254.00ac.b52e',\n 'port_speed': '1000',\n 'last_change': '00:07:28',\n 'switchport_mode': 'trunk',\n 'switchport_enable': True,\n 'trunk_vlans': '100,300',\n 'type': '10/100/1000 Ethernet'},\n 'Ethernet2/20': {'vrf': 'default'},\n 'Ethernet2/21': {'vrf': 'default'},\n 'Ethernet2/22': {'vrf': 'default'},\n 'Ethernet2/23': {'vrf': 'default'},\n 'Ethernet2/24': {'vrf': 'default'},\n 'Ethernet2/25': {'vrf': 'default'},\n 'Ethernet2/26': {'vrf': 'default'},\n 'Ethernet2/27': {'vrf': 'default'},\n 'Ethernet2/28': {'vrf': 'default'},\n 'Ethernet2/29': {'vrf': 'default'},\n 'Ethernet2/3': {'access_vlan': 100,\n 'switchport_mode': 'access',\n 'switchport_enable': True,\n 'trunk_vlans': '1-4094'},\n 'Ethernet2/30': {'vrf': 'default'},\n 'Ethernet2/31': {'vrf': 'default'},\n 'Ethernet2/32': {'vrf': 'default'},\n 'Ethernet2/33': {'vrf': 'default'},\n 'Ethernet2/34': {'vrf': 'default'},\n 'Ethernet2/35': {'vrf': 'default'},\n 'Ethernet2/36': {'vrf': 'default'},\n 'Ethernet2/37': {'vrf': 'default'},\n 'Ethernet2/38': {'vrf': 'default'},\n 'Ethernet2/39': {'vrf': 'default'},\n 'Ethernet2/4': {'vrf': 'default'},\n 'Ethernet2/40': {'vrf': 'default'},\n 'Ethernet2/41': {'vrf': 'default'},\n 'Ethernet2/42': {'vrf': 'default'},\n 'Ethernet2/43': {'vrf': 'default'},\n 'Ethernet2/44': {'vrf': 'default'},\n 'Ethernet2/45': {'vrf': 'default'},\n 'Ethernet2/46': {'vrf': 'default'},\n 'Ethernet2/47': {'vrf': 'default'},\n 'Ethernet2/48': {'vrf': 'default'},\n 'Ethernet2/5': {'vrf': 'default'},\n 'Ethernet2/6': {'vrf': 'default'},\n 'Ethernet2/7': {'vrf': 'default'},\n 'Ethernet2/8': {'vrf': 'default'},\n 'Ethernet2/9': {'vrf': 'default'},\n 'Ethernet3/1': {'vrf': 'default'},\n 'Ethernet3/10': {'vrf': 'default'},\n 'Ethernet3/11': {'vrf': 'default'},\n 'Ethernet3/12': {'vrf': 'default'},\n 'Ethernet3/13': {'vrf': 'default'},\n 'Ethernet3/14': {'vrf': 'default'},\n 'Ethernet3/15': {'vrf': 'default'},\n 'Ethernet3/16': {'vrf': 'default'},\n 'Ethernet3/17': {'vrf': 'default'},\n 'Ethernet3/18': {'vrf': 'default'},\n 'Ethernet3/19': {'vrf': 'default'},\n 'Ethernet3/2': {'vrf': 'default'},\n 'Ethernet3/20': {'vrf': 'default'},\n 'Ethernet3/21': {'vrf': 'default'},\n 'Ethernet3/22': {'vrf': 'default'},\n 'Ethernet3/23': {'vrf': 'default'},\n 'Ethernet3/24': {'vrf': 'default'},\n 'Ethernet3/25': {'vrf': 'default'},\n 'Ethernet3/26': {'vrf': 'default'},\n 'Ethernet3/27': {'vrf': 'default'},\n 'Ethernet3/28': {'vrf': 'default'},\n 'Ethernet3/29': {'vrf': 'default'},\n 'Ethernet3/3': {'vrf': 'default'},\n 'Ethernet3/30': {'vrf': 'default'},\n 'Ethernet3/31': {'vrf': 'default'},\n 'Ethernet3/32': {'vrf': 'default'},\n 'Ethernet3/33': {'vrf': 'default'},\n 'Ethernet3/34': {'vrf': 'default'},\n 'Ethernet3/35': {'vrf': 'default'},\n 'Ethernet3/36': {'vrf': 'default'},\n 'Ethernet3/37': {'vrf': 'default'},\n 'Ethernet3/38': {'vrf': 'default'},\n 'Ethernet3/39': {'vrf': 'default'},\n 'Ethernet3/4': {'vrf': 'default'},\n 'Ethernet3/40': {'vrf': 'default'},\n 'Ethernet3/41': {'vrf': 'default'},\n 'Ethernet3/42': {'vrf': 'default'},\n 'Ethernet3/43': {'vrf': 'default'},\n 'Ethernet3/44': {'vrf': 'default'},\n 'Ethernet3/45': {'vrf': 'default'},\n 'Ethernet3/46': {'vrf': 'default'},\n 'Ethernet3/47': {'vrf': 'default'},\n 'Ethernet3/48': {'vrf': 'default'},\n 'Ethernet3/5': {'vrf': 'default'},\n 'Ethernet3/6': {'vrf': 'default'},\n 'Ethernet3/7': {'vrf': 'default'},\n 'Ethernet3/8': {'vrf': 'default'},\n 'Ethernet3/9': {'vrf': 'default'},\n 'Ethernet4/1': {'vrf': 'default'},\n 'Ethernet4/10': {'vrf': 'default'},\n 'Ethernet4/11': {'vrf': 'default'},\n 'Ethernet4/12': {'vrf': 'default'},\n 'Ethernet4/13': {'vrf': 'default'},\n 'Ethernet4/14': {'vrf': 'default'},\n 'Ethernet4/15': {'vrf': 'default'},\n 'Ethernet4/16': {'vrf': 'default'},\n 'Ethernet4/17': {'vrf': 'default'},\n 'Ethernet4/18': {'vrf': 'default'},\n 'Ethernet4/19': {'vrf': 'default'},\n 'Ethernet4/2': {'vrf': 'default'},\n 'Ethernet4/20': {'vrf': 'default'},\n 'Ethernet4/21': {'vrf': 'default'},\n 'Ethernet4/22': {'vrf': 'default'},\n 'Ethernet4/23': {'vrf': 'default'},\n 'Ethernet4/24': {'vrf': 'default'},\n 'Ethernet4/25': {'vrf': 'default'},\n 'Ethernet4/26': {'vrf': 'default'},\n 'Ethernet4/27': {'vrf': 'default'},\n 'Ethernet4/28': {'vrf': 'default'},\n 'Ethernet4/29': {'vrf': 'default'},\n 'Ethernet4/3': {'vrf': 'default'},\n 'Ethernet4/30': {'vrf': 'default'},\n 'Ethernet4/31': {'vrf': 'default'},\n 'Ethernet4/32': {'vrf': 'default'},\n 'Ethernet4/33': {'vrf': 'default'},\n 'Ethernet4/34': {'vrf': 'default'},\n 'Ethernet4/35': {'vrf': 'default'},\n 'Ethernet4/36': {'vrf': 'default'},\n 'Ethernet4/37': {'vrf': 'default'},\n 'Ethernet4/38': {'vrf': 'default'},\n 'Ethernet4/39': {'vrf': 'default'},\n 'Ethernet4/4': {'vrf': 'default'},\n 'Ethernet4/40': {'vrf': 'default'},\n 'Ethernet4/41': {'vrf': 'default'},\n 'Ethernet4/42': {'vrf': 'default'},\n 'Ethernet4/43': {'vrf': 'default'},\n 'Ethernet4/44': {'vrf': 'default'},\n 'Ethernet4/45': {'vrf': 'default'},\n 'Ethernet4/46': {'vrf': 'default'},\n 'Ethernet4/47': {'vrf': 'default'},\n 'Ethernet4/48': {'vrf': 'default'},\n 'Ethernet4/5': {'vrf': 'default'},\n 'Ethernet4/6': {'vrf': 'default'},\n 'Ethernet4/7': {'vrf': 'default'},\n 'Ethernet4/8': {'vrf': 'default'},\n 'Ethernet4/9': {'vrf': 'default'},\n 'Null0': {'vrf': 'default'},\n 'Mgmt0': \n {'auto_negotiate': True,\n 'bandwidth': 1000000,\n 'counters': \n {'in_broadcast_pkts': 4,\n 'in_multicast_pkts': 2,\n 'in_octets': 4726,\n 'in_pkts': 2,\n 'in_unicast_pkts': 0,\n 'rate':\n {'in_rate': 0,\n 'in_rate_pkts': 0,\n 'load_interval': 1,\n 'out_rate': 24,\n 'out_rate_pkts': 0}},\n 'delay': 10,\n 'duplex_mode': 'full',\n 'enabled': True,\n 'encapsulation': {'encapsulation': 'arpa'},\n 'mac_address': '5254.00c9.d26e',\n 'medium': 'broadcast',\n 'mtu': 1500,\n 'oper_status': 'up',\n 'phys_address': '5254.00c9.d26e',\n 'port_channel': {'port_channel_member': False},\n 'port_speed': '1000',\n 'type': 'Ethernet',\n 'vrf': 'management'}}\n", "id": "4921024", "language": "Python", "matching_score": 6.29628849029541, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/interface/nxos/tests/interface_output.py" }, { "content": "''' \nArp Genie Ops Object Outputs for NXOS.\n'''\n\n\nclass ArpOutput(object):\n\n\tShowIpArpDetailVrfAll = {\n 'interfaces': {\n 'Ethernet1/1': {\n 'ipv4': {\n 'neighbors': {\n '10.1.3.5': {\n 'age': '-',\n 'ip': '10.1.3.5',\n 'link_layer_address': 'aaaa.bbbb.cccc',\n 'origin': 'static',\n 'physical_interface': 'Ethernet1/1'}\n }\n }\n },\n 'Ethernet1/1.1': {\n 'ipv4': {\n 'neighbors': {\n '192.168.4.2': {\n 'age': '00:01:53',\n 'ip': '192.168.4.2',\n 'link_layer_address': '000c.292a.1eaf',\n 'origin': 'dynamic',\n 'physical_interface': 'Ethernet1/1.1'}\n }\n }\n },\n 'Ethernet1/1.2': {\n 'ipv4': {\n 'neighbors': {\n '192.168.154.2': {\n 'age': '00:00:47',\n 'ip': '192.168.154.2',\n 'link_layer_address': '000c.292a.1eaf',\n 'origin': 'dynamic',\n 'physical_interface': 'Ethernet1/1.2'}\n }\n }\n },\n 'Ethernet1/1.4': {\n 'ipv4': {\n 'neighbors': {\n '192.168.106.2': {\n 'age': '00:08:42',\n 'ip': '192.168.106.2',\n 'link_layer_address': '000c.292a.1eaf',\n 'origin': 'dynamic',\n 'physical_interface': 'Ethernet1/1.4'}\n }\n }\n },\n 'Ethernet1/2.1': {\n 'ipv4': {\n 'neighbors': {\n '192.168.154.2': {\n 'age': '00:18:24',\n 'ip': '192.168.154.2',\n 'link_layer_address': '000c.2904.5840',\n 'origin': 'dynamic',\n 'physical_interface': 'Ethernet1/2.1'}\n }\n }\n },\n 'Ethernet1/2.2': {\n 'ipv4': {\n 'neighbors': {\n '192.168.51.2': {\n 'age': '00:05:21',\n 'ip': '192.168.51.2',\n 'link_layer_address': '000c.2904.5840',\n 'origin': 'dynamic',\n 'physical_interface': 'Ethernet1/2.2'}\n }\n }\n },\n 'Ethernet1/2.4': {\n 'ipv4': {\n 'neighbors': {\n '192.168.9.2': {\n 'age': '00:10:51',\n 'ip': '192.168.9.2',\n 'link_layer_address': '000c.2904.5840',\n 'origin': 'dynamic',\n 'physical_interface': 'Ethernet1/2.4'}\n }\n }\n },\n 'Ethernet1/4.100': {\n 'ipv4': {\n 'neighbors': {\n '10.51.1.101': {\n 'age': '00:01:28',\n 'ip': '10.51.1.101',\n 'link_layer_address': '0000.71c7.6e61',\n 'origin': 'dynamic',\n 'physical_interface': 'Ethernet1/4.100'}\n }\n }\n },\n 'Ethernet1/4.101': {\n 'ipv4': {\n 'neighbors': {\n '10.154.1.101': {\n 'age': '00:01:28',\n 'ip': '10.154.1.101',\n 'link_layer_address': '0000.71c7.75c1',\n 'origin': 'dynamic',\n 'physical_interface': 'Ethernet1/4.101'}\n }\n }\n },\n 'Ethernet1/4.200': {\n 'ipv4': {\n 'neighbors': {\n '10.76.1.101': {\n 'age': '00:01:28',\n 'ip': '10.76.1.101',\n 'link_layer_address': '0000.0068.ce6f',\n 'origin': 'dynamic',\n 'physical_interface': 'Ethernet1/4.200'}\n }\n }\n },\n 'mgmt0': {\n 'ipv4': {\n 'neighbors': {\n '10.1.7.1': {\n 'age': '00:17:15',\n 'ip': '10.1.7.1',\n 'link_layer_address': '0012.7f57.ac80',\n 'origin': 'dynamic',\n 'physical_interface': 'mgmt0'},\n '10.1.7.250': {\n 'age': '00:14:24',\n 'ip': '10.1.7.250',\n 'link_layer_address': '0050.5682.7915',\n 'origin': 'dynamic',\n 'physical_interface': 'mgmt0'},\n '10.1.7.253': {\n 'age': '00:10:22',\n 'ip': '10.1.7.253',\n 'link_layer_address': '0050.56a4.a9fc',\n 'origin': 'dynamic',\n 'physical_interface': 'mgmt0'}\n }\n }\n }\n }\n }\n\n\tShowIpArpSummaryVrfAll = {\n 'incomplete': 0,\n 'throttled': 0,\n 'resolved': 12,\n 'total': 12,\n 'unknown': 0}\n\n\tShowIpArpstatisticsVrfAll = {\n 'statistics': {\n 'adjacency': {\n 'adjacency_adds': 43,\n 'adjacency_deletes': 12,\n 'adjacency_timeouts': 12,\n 'failed_due_to_limits': 0},\n 'received': {\n 'anycast_proxy_arp': 0,\n 'dropped': 28218,\n 'dropped_server_port': 0,\n 'drops_details': {\n 'appeared_on_a_wrong_interface': 0,\n 'arp_refresh_requests_received_from_clients': 0,\n 'context_not_created': 0,\n 'directed_broadcast_source': 0,\n 'dropping_due_to_tunneling_failures': 0,\n 'glean_requests_recv_count': 71,\n 'grat_arp_received_on_proxy': 0,\n 'incorrect_length': 0,\n 'invalid_context': 0,\n 'invalid_destination_ip_address': 0,\n 'invalid_hardwaretype': 0,\n 'invalid_layer2_address_length': 0,\n 'invalid_layer3_address_length': 0,\n 'invalid_protocol_packet': 0,\n 'invalid_source_ip_address': 28,\n 'invalid_source_mac_address': 0,\n 'l2_packet_on_untrusted_l2_port': 0,\n 'l2fm_query_failed_for_a_l2address': 0,\n 'no_mem_to_create_per_intf_structure': 0,\n 'non_active_fhrp_dest_ip': 0,\n 'non_local_destination_ip_address': 20421,\n 'number_of_signals_received_from_l2rib': 0,\n 'packet_with_vip_on_standby_fhrp': 0,\n 'received_before_arp_initialization': 0,\n 'requests_came_for_exising_entries': 15,\n 'requests_came_on_a_l2_interface': 0,\n 'source_address_mismatch_with_subnet': 0,\n 'source_mac_address_is_our_own': 0},\n 'enhanced_proxy_arp': 0,\n 'fastpath': 0,\n 'l2_port_track_proxy_arp': 0,\n 'l2_replies': 0,\n 'l2_requests': 0,\n 'local_proxy_arp': 0,\n 'proxy_arp': 0,\n 'replies': 6582,\n 'requests': 22632,\n 'snooped': 0,\n 'total': 0,\n 'tunneled': 0},\n 'sent': {\n 'dropped': 0,\n 'drops_details': {\n 'adjacency_couldnt_be_added': 0,\n 'arp_refresh_skipped_over_core_and_flooded': 0,\n 'client_enqueue_failed': 0,\n 'context_not_created': 0,\n 'dest_not_reachable_for_proxy_arp': 0,\n 'dest_unreachable_for_enhanced_proxy': 0,\n 'destnination_is_our_own_ip': 26,\n 'destnination_on_l2_port_tracked': 0,\n 'invalid_context': 0,\n 'invalid_dest_ip': 0,\n 'invalid_ifindex': 0,\n 'invalid_local_proxy_arp': 0,\n 'invalid_proxy_arp': 0,\n 'invalid_src_ip': 0,\n 'mbuf_operation_failed': 0,\n 'null_source_ip': 0,\n 'null_source_mac': 0,\n 'unattached_ip': 0,\n 'vip_is_not_active': 0},\n 'gratuitous': 58,\n 'l2_replies': 0,\n 'l2_requests': 0,\n 'replies': 998,\n 'requests': 2102,\n 'total': 3158,\n 'tunneled': 0}\n }\n }\n\n\tShowIpInterfaceVrfAll = {\n \"Ethernet2/11\": {\n \"icmp_port_unreachable\": \"enabled\",\n \"multicast_groups_address\": \"none\",\n \"proxy_arp\": \"disabled\",\n \"interface_status\": \"protocol-down/link-down/admin-down\",\n \"load_sharing\": \"none\",\n \"ipv4\": {\n \"counters\": {\n \"multicast_bytes_received\": 0,\n \"labeled_packets_forwarded\": 0,\n \"multicast_bytes_sent\": 0,\n \"unicast_bytes_sent\": 0,\n \"labeled_packets_received\": 0,\n \"labeled_packets_originated\": 0,\n \"multicast_bytes_consumed\": 0,\n \"multicast_packets_sent\": 0,\n \"unicast_bytes_consumed\": 0,\n \"broadcast_packets_originated\": 0,\n \"multicast_packets_originated\": 0,\n \"multicast_bytes_originated\": 0,\n \"multicast_packets_received\": 0,\n \"multicast_packets_consumed\": 0,\n \"broadcast_packets_forwarded\": 0,\n \"broadcast_bytes_originated\": 0,\n \"labeled_bytes_originated\": 0,\n \"broadcast_bytes_consumed\": 0,\n \"broadcast_packets_sent\": 0,\n \"labeled_packets_consumed\": 0,\n \"unicast_packets_consumed\": 0,\n \"labeled_bytes_forwarded\": 0,\n \"broadcast_packets_consumed\": 0,\n \"unicast_packets_sent\": 0,\n \"broadcast_bytes_received\": 0,\n \"labeled_packets_sent\": 0,\n \"labeled_bytes_consumed\": 0,\n \"unicast_bytes_received\": 0,\n \"multicast_bytes_forwarded\": 0,\n \"multicast_packets_forwarded\": 0,\n \"unicast_packets_forwarded\": 0,\n \"unicast_packets_received\": 0,\n \"broadcast_packets_received\": 0,\n \"broadcast_bytes_sent\": 0,\n \"broadcast_bytes_forwarded\": 0,\n \"labeled_bytes_sent\": 0,\n \"unicast_bytes_forwarded\": 0,\n \"unicast_packets_originated\": 0,\n \"labeled_bytes_received\": 0,\n \"unicast_bytes_originated\": 0\n },\n \"10.64.4.4/24\": {\n \"ip\": \"10.64.4.4\",\n \"prefix_length\": \"24\",\n \"broadcast_address\": \"255.255.255.255\",\n \"secondary\": False,\n \"ip_subnet\": \"10.64.4.0\"\n },\n \"unnumbered\": {\n \"interface_ref\": \"loopback0\"\n }\n },\n \"icmp_unreachable\": \"disabled\",\n \"wccp_redirect_inbound\": \"disabled\",\n \"unicast_reverse_path\": \"none\",\n \"icmp_redirects\": \"enabled\",\n \"multicast_routing\": \"disabled\",\n \"wccp_redirect_outbound\": \"disabled\",\n \"iod\": 46,\n \"directed_broadcast\": \"disabled\",\n \"ip_mtu\": 1500,\n \"vrf\": \"default\",\n \"local_proxy_arp\": \"disabled\",\n \"ip_forwarding\": \"disabled\",\n \"int_stat_last_reset\": \"never\",\n \"wccp_redirect_exclude\": \"disabled\"\n },\n \"loopback0\": {\n \"icmp_port_unreachable\": \"enabled\",\n \"multicast_groups_address\": \"none\",\n \"proxy_arp\": \"disabled\",\n \"interface_status\": \"protocol-up/link-up/admin-up\",\n \"load_sharing\": \"none\",\n \"ipv4\": {\n \"counters\": {\n \"multicast_bytes_received\": 0,\n \"labeled_packets_forwarded\": 0,\n \"multicast_bytes_sent\": 0,\n \"unicast_bytes_sent\": 0,\n \"labeled_packets_received\": 0,\n \"labeled_packets_originated\": 0,\n \"multicast_bytes_consumed\": 0,\n \"multicast_packets_sent\": 0,\n \"unicast_bytes_consumed\": 5612014,\n \"broadcast_packets_originated\": 0,\n \"multicast_packets_originated\": 0,\n \"multicast_bytes_originated\": 0,\n \"multicast_packets_received\": 0,\n \"multicast_packets_consumed\": 0,\n \"broadcast_packets_forwarded\": 0,\n \"broadcast_bytes_originated\": 0,\n \"labeled_bytes_originated\": 0,\n \"broadcast_bytes_consumed\": 0,\n \"broadcast_packets_sent\": 0,\n \"labeled_packets_consumed\": 0,\n \"unicast_packets_consumed\": 92391,\n \"labeled_bytes_forwarded\": 0,\n \"broadcast_packets_consumed\": 0,\n \"unicast_packets_sent\": 0,\n \"broadcast_bytes_received\": 0,\n \"labeled_packets_sent\": 0,\n \"labeled_bytes_consumed\": 0,\n \"unicast_bytes_received\": 0,\n \"multicast_bytes_forwarded\": 0,\n \"multicast_packets_forwarded\": 0,\n \"unicast_packets_forwarded\": 0,\n \"unicast_packets_received\": 0,\n \"broadcast_packets_received\": 0,\n \"broadcast_bytes_sent\": 0,\n \"broadcast_bytes_forwarded\": 0,\n \"labeled_bytes_sent\": 0,\n \"unicast_bytes_forwarded\": 0,\n \"unicast_packets_originated\": 0,\n \"labeled_bytes_received\": 0,\n \"unicast_bytes_originated\": 0\n },\n \"10.64.4.4/24\": {\n \"route_preference\": \"0\",\n \"prefix_length\": \"24\",\n \"broadcast_address\": \"255.255.255.255\",\n \"ip_subnet\": \"10.64.4.0\",\n \"ip\": \"10.64.4.4\",\n \"secondary\": False,\n \"route_tag\": \"0\"\n }\n },\n \"icmp_unreachable\": \"disabled\",\n \"wccp_redirect_inbound\": \"disabled\",\n \"unicast_reverse_path\": \"none\",\n \"icmp_redirects\": \"enabled\",\n \"multicast_routing\": \"disabled\",\n \"wccp_redirect_outbound\": \"disabled\",\n \"iod\": 180,\n \"directed_broadcast\": \"disabled\",\n \"ip_mtu\": 1500,\n \"vrf\": \"default\",\n \"local_proxy_arp\": \"disabled\",\n \"ip_forwarding\": \"disabled\",\n \"int_stat_last_reset\": \"never\",\n \"wccp_redirect_exclude\": \"disabled\"\n },\n \"Ethernet2/1\": {\n \"icmp_port_unreachable\": \"enabled\",\n \"load_sharing\": \"none\",\n \"proxy_arp\": \"disabled\",\n \"interface_status\": \"protocol-up/link-up/admin-up\",\n \"ipv4\": {\n \"counters\": {\n \"multicast_bytes_received\": 13421700,\n \"labeled_packets_forwarded\": 0,\n \"multicast_bytes_sent\": 17167084,\n \"unicast_bytes_sent\": 9499793,\n \"labeled_packets_received\": 0,\n \"labeled_packets_originated\": 0,\n \"multicast_bytes_consumed\": 13421700,\n \"multicast_packets_sent\": 208673,\n \"unicast_bytes_consumed\": 2804558,\n \"broadcast_packets_originated\": 0,\n \"multicast_packets_originated\": 208673,\n \"multicast_bytes_originated\": 17167084,\n \"multicast_packets_received\": 208601,\n \"multicast_packets_consumed\": 417202,\n \"broadcast_packets_forwarded\": 0,\n \"broadcast_bytes_originated\": 0,\n \"labeled_bytes_originated\": 0,\n \"broadcast_bytes_consumed\": 0,\n \"broadcast_packets_sent\": 0,\n \"labeled_packets_consumed\": 0,\n \"unicast_packets_consumed\": 46150,\n \"labeled_bytes_forwarded\": 0,\n \"broadcast_packets_consumed\": 0,\n \"unicast_packets_sent\": 53942,\n \"broadcast_bytes_received\": 0,\n \"labeled_packets_sent\": 0,\n \"labeled_bytes_consumed\": 0,\n \"unicast_bytes_received\": 2803426,\n \"multicast_bytes_forwarded\": 0,\n \"multicast_packets_forwarded\": 0,\n \"unicast_packets_forwarded\": 0,\n \"unicast_packets_received\": 46139,\n \"broadcast_packets_received\": 0,\n \"broadcast_bytes_sent\": 0,\n \"broadcast_bytes_forwarded\": 0,\n \"labeled_bytes_sent\": 0,\n \"unicast_bytes_forwarded\": 0,\n \"unicast_packets_originated\": 53942,\n \"labeled_bytes_received\": 0,\n \"unicast_bytes_originated\": 9499793\n },\n \"10.3.4.4/24\": {\n \"route_preference\": \"0\",\n \"prefix_length\": \"24\",\n \"broadcast_address\": \"255.255.255.255\",\n \"ip_subnet\": \"10.3.4.0\",\n \"ip\": \"10.3.4.4\",\n \"secondary\": False,\n \"route_tag\": \"0\"\n }\n },\n \"icmp_unreachable\": \"disabled\",\n \"wccp_redirect_inbound\": \"disabled\",\n \"unicast_reverse_path\": \"none\",\n \"icmp_redirects\": \"enabled\",\n \"multicast_routing\": \"disabled\",\n \"wccp_redirect_outbound\": \"disabled\",\n \"iod\": 36,\n \"directed_broadcast\": \"disabled\",\n \"ip_mtu\": 1500,\n \"vrf\": \"default\",\n \"local_proxy_arp\": \"disabled\",\n \"wccp_redirect_exclude\": \"disabled\",\n \"ip_forwarding\": \"disabled\",\n \"int_stat_last_reset\": \"never\",\n \"multicast_groups\": [\n \"192.168.3.11\",\n \"192.168.3.11\",\n \"172.16.17.32\"\n ]\n },\n \"Ethernet2/10.12\": {\n \"icmp_port_unreachable\": \"enabled\",\n \"multicast_groups_address\": \"none\",\n \"proxy_arp\": \"disabled\",\n \"interface_status\": \"protocol-down/link-down/admin-down\",\n \"load_sharing\": \"none\",\n \"ipv4\": {\n \"counters\": {\n \"multicast_bytes_received\": 0,\n \"labeled_packets_forwarded\": 0,\n \"multicast_bytes_sent\": 0,\n \"unicast_bytes_sent\": 0,\n \"labeled_packets_received\": 0,\n \"labeled_packets_originated\": 0,\n \"multicast_bytes_consumed\": 0,\n \"multicast_packets_sent\": 0,\n \"unicast_bytes_consumed\": 0,\n \"broadcast_packets_originated\": 0,\n \"multicast_packets_originated\": 0,\n \"multicast_bytes_originated\": 0,\n \"multicast_packets_received\": 0,\n \"multicast_packets_consumed\": 0,\n \"broadcast_packets_forwarded\": 0,\n \"broadcast_bytes_originated\": 0,\n \"labeled_bytes_originated\": 0,\n \"broadcast_bytes_consumed\": 0,\n \"broadcast_packets_sent\": 0,\n \"labeled_packets_consumed\": 0,\n \"unicast_packets_consumed\": 0,\n \"labeled_bytes_forwarded\": 0,\n \"broadcast_packets_consumed\": 0,\n \"unicast_packets_sent\": 0,\n \"broadcast_bytes_received\": 0,\n \"labeled_packets_sent\": 0,\n \"labeled_bytes_consumed\": 0,\n \"unicast_bytes_received\": 0,\n \"multicast_bytes_forwarded\": 0,\n \"multicast_packets_forwarded\": 0,\n \"unicast_packets_forwarded\": 0,\n \"unicast_packets_received\": 0,\n \"broadcast_packets_received\": 0,\n \"broadcast_bytes_sent\": 0,\n \"broadcast_bytes_forwarded\": 0,\n \"labeled_bytes_sent\": 0,\n \"unicast_bytes_forwarded\": 0,\n \"unicast_packets_originated\": 0,\n \"labeled_bytes_received\": 0,\n \"unicast_bytes_originated\": 0\n },\n \"10.66.12.12/24\": {\n \"route_preference\": \"0\",\n \"prefix_length\": \"24\",\n \"broadcast_address\": \"255.255.255.255\",\n \"ip_subnet\": \"10.66.12.0\",\n \"ip\": \"10.66.12.12\",\n \"secondary\": False,\n \"route_tag\": \"0\"\n }\n },\n \"icmp_unreachable\": \"disabled\",\n \"wccp_redirect_inbound\": \"disabled\",\n \"unicast_reverse_path\": \"none\",\n \"icmp_redirects\": \"enabled\",\n \"multicast_routing\": \"disabled\",\n \"wccp_redirect_outbound\": \"disabled\",\n \"iod\": 184,\n \"directed_broadcast\": \"disabled\",\n \"ip_mtu\": 1500,\n \"vrf\": \"default\",\n \"local_proxy_arp\": \"disabled\",\n \"ip_forwarding\": \"disabled\",\n \"int_stat_last_reset\": \"never\",\n \"wccp_redirect_exclude\": \"disabled\"\n },\n \"Ethernet2/12\": {\n \"icmp_port_unreachable\": \"enabled\",\n \"multicast_groups_address\": \"none\",\n \"proxy_arp\": \"disabled\",\n \"interface_status\": \"protocol-down/link-down/admin-down\",\n \"load_sharing\": \"none\",\n \"ipv4\": {\n \"counters\": {\n \"multicast_bytes_received\": 0,\n \"labeled_packets_forwarded\": 0,\n \"multicast_bytes_sent\": 0,\n \"unicast_bytes_sent\": 0,\n \"labeled_packets_received\": 0,\n \"labeled_packets_originated\": 0,\n \"multicast_bytes_consumed\": 0,\n \"multicast_packets_sent\": 0,\n \"unicast_bytes_consumed\": 0,\n \"broadcast_packets_originated\": 0,\n \"multicast_packets_originated\": 0,\n \"multicast_bytes_originated\": 0,\n \"multicast_packets_received\": 0,\n \"multicast_packets_consumed\": 0,\n \"broadcast_packets_forwarded\": 0,\n \"broadcast_bytes_originated\": 0,\n \"labeled_bytes_originated\": 0,\n \"broadcast_bytes_consumed\": 0,\n \"broadcast_packets_sent\": 0,\n \"labeled_packets_consumed\": 0,\n \"unicast_packets_consumed\": 0,\n \"labeled_bytes_forwarded\": 0,\n \"broadcast_packets_consumed\": 0,\n \"unicast_packets_sent\": 0,\n \"broadcast_bytes_received\": 0,\n \"labeled_packets_sent\": 0,\n \"labeled_bytes_consumed\": 0,\n \"unicast_bytes_received\": 0,\n \"multicast_bytes_forwarded\": 0,\n \"multicast_packets_forwarded\": 0,\n \"unicast_packets_forwarded\": 0,\n \"unicast_packets_received\": 0,\n \"broadcast_packets_received\": 0,\n \"broadcast_bytes_sent\": 0,\n \"broadcast_bytes_forwarded\": 0,\n \"labeled_bytes_sent\": 0,\n \"unicast_bytes_forwarded\": 0,\n \"unicast_packets_originated\": 0,\n \"labeled_bytes_received\": 0,\n \"unicast_bytes_originated\": 0\n },\n \"10.66.12.12/24\": {\n \"ip\": \"10.66.12.12\",\n \"prefix_length\": \"24\",\n \"broadcast_address\": \"255.255.255.255\",\n \"secondary\": False,\n \"ip_subnet\": \"10.66.12.0\"\n },\n \"unnumbered\": {\n \"interface_ref\": \"Ethernet2/10.12\"\n }\n },\n \"icmp_unreachable\": \"disabled\",\n \"wccp_redirect_inbound\": \"disabled\",\n \"unicast_reverse_path\": \"none\",\n \"icmp_redirects\": \"enabled\",\n \"multicast_routing\": \"disabled\",\n \"wccp_redirect_outbound\": \"disabled\",\n \"iod\": 47,\n \"directed_broadcast\": \"disabled\",\n \"ip_mtu\": 1500,\n \"vrf\": \"default\",\n \"local_proxy_arp\": \"disabled\",\n \"ip_forwarding\": \"disabled\",\n \"int_stat_last_reset\": \"never\",\n \"wccp_redirect_exclude\": \"disabled\"}\n }\n\n\tArp_info = {\n 'interfaces': {\n 'Ethernet1/1': {\n 'ipv4': {\n 'neighbors': {\n '10.1.3.5': {\n 'ip': '10.1.3.5',\n 'link_layer_address': 'aaaa.bbbb.cccc',\n 'origin': 'static'}\n }\n }\n },\n 'Ethernet1/1.1': {\n 'ipv4': {\n 'neighbors': {\n '192.168.4.2': {\n 'ip': '192.168.4.2',\n 'link_layer_address': '000c.292a.1eaf',\n 'origin': 'dynamic'}\n }\n }\n },\n 'Ethernet1/1.2': {\n 'ipv4': {\n 'neighbors': {\n '192.168.154.2': {\n 'ip': '192.168.154.2',\n 'link_layer_address': '000c.292a.1eaf',\n 'origin': 'dynamic'}\n }\n }\n },\n 'Ethernet1/1.4': {\n 'ipv4': {\n 'neighbors': {\n '192.168.106.2': {\n 'ip': '192.168.106.2',\n 'link_layer_address': '000c.292a.1eaf',\n 'origin': 'dynamic'}\n }\n }\n },\n 'Ethernet1/2.1': {\n 'ipv4': {\n 'neighbors': {\n '192.168.154.2': {\n 'ip': '192.168.154.2',\n 'link_layer_address': '000c.2904.5840',\n 'origin': 'dynamic'}\n }\n }\n },\n 'Ethernet1/2.2': {\n 'ipv4': {\n 'neighbors': {\n '192.168.51.2': {\n 'ip': '192.168.51.2',\n 'link_layer_address': '000c.2904.5840',\n 'origin': 'dynamic'}\n }\n }\n },\n 'Ethernet1/2.4': {\n 'ipv4': {\n 'neighbors': {\n '192.168.9.2': {\n 'ip': '192.168.9.2',\n 'link_layer_address': '000c.2904.5840',\n 'origin': 'dynamic'}\n }\n }\n },\n 'Ethernet1/4.100': {\n 'ipv4': {\n 'neighbors': {\n '10.51.1.101': {\n 'ip': '10.51.1.101',\n 'link_layer_address': '0000.71c7.6e61',\n 'origin': 'dynamic'}\n }\n }\n },\n 'Ethernet1/4.101': {\n 'ipv4': {\n 'neighbors': {\n '10.154.1.101': {\n 'ip': '10.154.1.101',\n 'link_layer_address': '0000.71c7.75c1',\n 'origin': 'dynamic'}\n }\n }\n },\n 'Ethernet1/4.200': {\n 'ipv4': {\n 'neighbors': {\n '10.76.1.101': {\n 'ip': '10.76.1.101',\n 'link_layer_address': '0000.0068.ce6f',\n 'origin': 'dynamic'}\n }\n }\n },\n 'Ethernet2/1': {\n 'arp_dynamic_learning': {\n 'local_proxy_enable': False,\n 'proxy_enable': False}\n },\n 'Ethernet2/10.12': {\n 'arp_dynamic_learning': {\n 'local_proxy_enable': False,\n 'proxy_enable': False}\n },\n 'Ethernet2/11': {\n 'arp_dynamic_learning': {\n 'local_proxy_enable': False,\n 'proxy_enable': False}\n },\n 'Ethernet2/12': {\n 'arp_dynamic_learning': {\n 'local_proxy_enable': False,\n 'proxy_enable': False}\n },\n 'loopback0': {\n 'arp_dynamic_learning': {\n 'local_proxy_enable': False,\n 'proxy_enable': False}\n },\n 'mgmt0': {\n 'ipv4': {\n 'neighbors': {\n '10.1.7.1': {\n 'ip': '10.1.7.1',\n 'link_layer_address': '0012.7f57.ac80',\n 'origin': 'dynamic'},\n '10.1.7.250': {\n 'ip': '10.1.7.250',\n 'link_layer_address': '0050.5682.7915',\n 'origin': 'dynamic'},\n '10.1.7.253': {\n 'ip': '10.1.7.253',\n 'link_layer_address': '0050.56a4.a9fc',\n 'origin': 'dynamic'}\n }\n }\n }\n },\n 'statistics': {\n 'entries_total': 12,\n 'in_drops': 28218,\n 'in_replies_pkts': 6582,\n 'in_requests_pkts': 22632,\n 'in_total': 0,\n 'incomplete_total': 0,\n 'out_drops': 0,\n 'out_gratuitous_pkts': 58,\n 'out_replies_pkts': 998,\n 'out_requests_pkts': 2102,\n 'out_total': 3158}\n }", "id": "10996361", "language": "Python", "matching_score": 4.355809688568115, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/arp/nxos/tests/arp_output.py" }, { "content": "''' \nArp Genie Ops Object Outputs for IOSXR.\n'''\n\n\nclass ArpOutput(object):\n\n ShowArpDetail = {\n 'interfaces': {\n 'GigabitEthernet0/0/0/0': {\n 'ipv4': {\n 'neighbors': {\n '10.1.2.1': {\n 'age': '02:55:43',\n 'ip': '10.1.2.1',\n 'link_layer_address': 'fa16.3e4c.b963',\n 'origin': 'dynamic',\n 'type': 'ARPA'},\n '10.1.2.2': {\n 'age': '-',\n 'ip': '10.1.2.2',\n 'link_layer_address': 'fa16.3ee4.1462',\n 'origin': 'static',\n 'type': 'ARPA'}\n }\n }\n },\n 'GigabitEthernet0/0/0/1': {\n 'ipv4': {\n 'neighbors': {\n '10.2.3.2': {\n 'age': '-',\n 'ip': '10.2.3.2',\n 'link_layer_address': 'fa16.3e8f.3468',\n 'origin': 'static',\n 'type': 'ARPA'},\n '10.2.3.3': {\n 'age': '00:13:12',\n 'ip': '10.2.3.3',\n 'link_layer_address': '5e00.8002.0007',\n 'origin': 'dynamic',\n 'type': 'ARPA'}\n }\n }\n }\n }\n }\n\n ShowArpTrafficDetail = {\n '0/0/CPU0': {\n 'cache': {\n 'alias': 0,\n 'dhcp': 0,\n 'dynamic': 2,\n 'interface': 2,\n 'ip_packet_drop_count': 0,\n 'standby': 0,\n 'static': 0,\n 'total_arp_entries': 4,\n 'total_arp_idb': 2},\n 'statistics': {\n 'in_replies_pkts': 8,\n 'in_requests_pkts': 108,\n 'no_buffers_errors': 0,\n 'out_gratuitous_pkts': 2,\n 'out_local_proxy': 0,\n 'out_of_memory_errors': 0,\n 'out_of_subnet_errors': 0,\n 'out_proxy': 0,\n 'out_replies_pkts': 108,\n 'out_requests_pkts': 8,\n 'resolve_dropped_requests': 0,\n 'resolve_rcvd_requests': 0,\n 'subscriber_intf_gratuitous': 0,\n 'subscriber_intf_replies': 0,\n 'subscriber_intf_requests': 0}\n },\n '0/RP0/CPU0': {\n 'cache': {\n 'alias': 0,\n 'dhcp': 0,\n 'dynamic': 0,\n 'interface': 0,\n 'ip_packet_drop_count': 0,\n 'standby': 0,\n 'static': 0,\n 'total_arp_entries': 0,\n 'total_arp_idb': 0},\n 'statistics': {\n 'in_replies_pkts': 0,\n 'in_requests_pkts': 0,\n 'no_buffers_errors': 0,\n 'out_gratuitous_pkts': 0,\n 'out_local_proxy': 0,\n 'out_of_memory_errors': 0,\n 'out_of_subnet_errors': 0,\n 'out_proxy': 0,\n 'out_replies_pkts': 0,\n 'out_requests_pkts': 0,\n 'resolve_dropped_requests': 0,\n 'resolve_rcvd_requests': 0,\n 'subscriber_intf_gratuitous': 0,\n 'subscriber_intf_replies': 0,\n 'subscriber_intf_requests': 0}\n }\n }\n\n ShowIpv4VrfAllInterface = {\n 'GigabitEthernet0/0/0/0': {\n 'int_status': 'up',\n 'ipv4': {\n '10.1.3.1/24': {\n 'ip': '10.1.3.1',\n 'prefix_length': '24'},\n 'broadcast_forwarding': 'disabled',\n 'icmp_redirects': 'never sent',\n 'icmp_replies': 'never sent',\n 'icmp_unreachables': 'always sent',\n 'mtu': 1514,\n 'mtu_available': 1500,\n 'proxy_arp': 'disabled',\n 'table_id': '0xe0000000'},\n 'multicast_groups': ['172.16.17.32',\n '192.168.3.11',\n '172.16.17.32',\n '192.168.3.11',\n '192.168.3.11'],\n 'oper_status': 'up',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'GigabitEthernet0/0/0/1': {\n 'int_status': 'up',\n 'ipv4': {\n '10.1.5.1/24': {\n 'ip': '10.1.5.1',\n 'prefix_length': '24',\n 'route_tag': 50},\n '10.2.2.2/24': {\n 'ip': '10.2.2.2',\n 'prefix_length': '24',\n 'secondary': True},\n 'broadcast_forwarding': 'disabled',\n 'icmp_redirects': 'never sent',\n 'icmp_replies': 'never sent',\n 'icmp_unreachables': 'always sent',\n 'mtu': 1514,\n 'mtu_available': 1500,\n 'proxy_arp': 'disabled',\n 'table_id': '0xe0000010'},\n 'multicast_groups': ['172.16.17.32', '192.168.3.11'],\n 'oper_status': 'up',\n 'vrf': 'VRF1',\n 'vrf_id': '0x60000001'},\n 'GigabitEthernet0/0/0/2': {\n 'int_status': 'up',\n 'ipv4': {\n '10.186.5.1/24': {\n 'ip': '10.186.5.1',\n 'prefix_length': '24'},\n 'broadcast_forwarding': 'disabled',\n 'icmp_redirects': 'never sent',\n 'icmp_replies': 'never sent',\n 'icmp_unreachables': 'always sent',\n 'mtu': 1514,\n 'mtu_available': 1500,\n 'proxy_arp': 'disabled',\n 'table_id': '0xe0000011'},\n 'multicast_groups': ['172.16.17.32', '192.168.3.11'],\n 'oper_status': 'up',\n 'vrf': 'VRF2',\n 'vrf_id': '0x60000002'},\n 'GigabitEthernet0/0/0/3': {\n 'int_status': 'up',\n 'ipv4': {\n '10.1.2.1/24': {\n 'ip': '10.1.2.1',\n 'prefix_length': '24'},\n 'broadcast_forwarding': 'disabled',\n 'icmp_redirects': 'never sent',\n 'icmp_replies': 'never sent',\n 'icmp_unreachables': 'always sent',\n 'mtu': 1514,\n 'mtu_available': 1500,\n 'proxy_arp': 'disabled',\n 'table_id': '0xe0000000'},\n 'multicast_groups': ['172.16.17.32',\n '192.168.3.11',\n '172.16.17.32',\n '192.168.3.11',\n '192.168.3.11'],\n 'oper_status': 'up',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'GigabitEthernet0/0/0/4': {\n 'int_status': 'up',\n 'ipv4': {\n '10.69.111.111/32': {\n 'ip': '10.69.111.111',\n 'prefix_length': '32'},\n 'broadcast_forwarding': 'disabled',\n 'icmp_redirects': 'never sent',\n 'icmp_replies': 'never sent',\n 'icmp_unreachables': 'always sent',\n 'mtu': 1514,\n 'mtu_available': 1500,\n 'proxy_arp': 'disabled',\n 'table_id': '0xe0000000',\n 'unnumbered': {'unnumbered_intf_ref': 'Loopback11'}},\n 'multicast_groups': ['172.16.17.32', '192.168.3.11'],\n 'oper_status': 'up',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'GigabitEthernet0/0/0/5': {\n 'int_status': 'shutdown',\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'GigabitEthernet0/0/0/6': {\n 'int_status': 'shutdown',\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'Loopback0': {\n 'int_status': 'up',\n 'ipv4': {\n '10.4.1.1/32': {\n 'ip': '10.4.1.1',\n 'prefix_length': '32'},\n 'broadcast_forwarding': 'disabled',\n 'icmp_redirects': 'never sent',\n 'icmp_replies': 'never sent',\n 'icmp_unreachables': 'always sent',\n 'mtu': 1500,\n 'mtu_available': 1500,\n 'proxy_arp': 'disabled',\n 'table_id': '0xe0000000'},\n 'oper_status': 'up',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'Loopback11': {\n 'int_status': 'up',\n 'ipv4': {\n '10.69.111.111/32': {\n 'ip': '10.69.111.111',\n 'prefix_length': '32'},\n 'broadcast_forwarding': 'disabled',\n 'icmp_redirects': 'never sent',\n 'icmp_replies': 'never sent',\n 'icmp_unreachables': 'always sent',\n 'mtu': 1500,\n 'mtu_available': 1500,\n 'proxy_arp': 'disabled',\n 'table_id': '0xe0000000'},\n 'oper_status': 'up',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'},\n 'MgmtEth0/0/CPU0/0': {\n 'int_status': 'shutdown',\n 'oper_status': 'down',\n 'vrf': 'default',\n 'vrf_id': '0x60000000'}\n }\n\n Arp_info = {\n 'interfaces': {\n 'GigabitEthernet0/0/0/0': {\n 'arp_dynamic_learning': {\n 'proxy_enable': False},\n 'ipv4': {\n 'neighbors': {\n '10.1.2.1': {\n 'ip': '10.1.2.1',\n 'link_layer_address': 'fa16.3e4c.b963',\n 'origin': 'dynamic'},\n '10.1.2.2': {\n 'ip': '10.1.2.2',\n 'link_layer_address': 'fa16.3ee4.1462',\n 'origin': 'static'}\n }\n }\n },\n 'GigabitEthernet0/0/0/1': {\n 'arp_dynamic_learning': {\n 'proxy_enable': False},\n 'ipv4': {\n 'neighbors': {\n '10.2.3.2': {\n 'ip': '10.2.3.2',\n 'link_layer_address': 'fa16.3e8f.3468',\n 'origin': 'static'},\n '10.2.3.3': {\n 'ip': '10.2.3.3',\n 'link_layer_address': '5e00.8002.0007',\n 'origin': 'dynamic'}\n }\n }\n },\n 'GigabitEthernet0/0/0/2': {\n 'arp_dynamic_learning': {\n 'proxy_enable': False}\n },\n 'GigabitEthernet0/0/0/3': {\n 'arp_dynamic_learning': {\n 'proxy_enable': False}\n },\n 'GigabitEthernet0/0/0/4': {\n 'arp_dynamic_learning': {\n 'proxy_enable': False}\n },\n 'Loopback0': {\n 'arp_dynamic_learning': {\n 'proxy_enable': False}\n },\n 'Loopback11': {\n 'arp_dynamic_learning': {\n 'proxy_enable': False}\n }\n },\n 'statistics': {\n 'in_replies_pkts': 8,\n 'in_requests_pkts': 108,\n 'out_gratuitous_pkts': 2,\n 'out_replies_pkts': 108,\n 'out_requests_pkts': 8}\n }", "id": "610245", "language": "Python", "matching_score": 3.0689635276794434, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/arp/iosxr/tests/arp_output.py" }, { "content": "# Genie\r\nfrom genie.ops.base import Base\r\n\r\n\r\nclass Arp(Base):\r\n exclude = ['in_requests_pkts',\r\n 'in_replies_pkts',\r\n 'in_requests_pkts',\r\n 'out_replies_pkts',\r\n 'out_requests_pkts',\r\n 'in_drops',\r\n 'in_replies_pkts',\r\n 'out_requests_pkts',\r\n 'out_total']\r\n", "id": "10277292", "language": "Python", "matching_score": 1.6758325099945068, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/arp/arp.py" }, { "content": "# Genie\nfrom genie.ops.base import Base\n\n\nclass Stp(Base):\n exclude = []", "id": "2369005", "language": "Python", "matching_score": 1, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/stp/stp.py" }, { "content": "from .stp import *\n", "id": "8570457", "language": "Python", "matching_score": 0.8516729474067688, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/stp/__init__.py" }, { "content": "'''\nSpanning-tree Genie Ops Object for IOS - CLI.\n'''\nfrom ..iosxe.stp import Stp as StpXE\n\nclass Stp(StpXE):\n pass", "id": "2080837", "language": "Python", "matching_score": 1.3993382453918457, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/stp/ios/stp.py" }, { "content": "# Python\nimport unittest\nfrom copy import deepcopy\nfrom unittest.mock import Mock\n\n# ATS\nfrom ats.topology import Device\n\n# Genie\nfrom genie.libs.ops.stp.iosxr.stp import Stp\nfrom genie.libs.ops.stp.iosxr.tests.stp_output import StpOutput\n\n# Parser\nfrom genie.libs.parser.iosxr.show_spanning_tree import ShowSpanningTreeMst, \\\n ShowSpanningTreeMstag, \\\n ShowSpanningTreePvrst, \\\n ShowSpanningTreePvrsTag, \\\n ShowSpanningTreePvsTag\n\noutputs = {}\noutputs.update({'show spanning-tree mst test': StpOutput.ShowSpanningTreeMst_output})\noutputs.update({'show spanning-tree mstag risc': StpOutput.ShowSpanningTreeMstag_output})\noutputs.update({'show spanning-tree pvrst a': StpOutput.ShowSpanningTreePvrst_output})\noutputs.update({'show spanning-tree pvrstag foo': StpOutput.ShowSpanningTreePvrsTag_output})\noutputs.update({'show spanning-tree pvstag foo': StpOutput.ShowSpanningTreePvsTag_output})\n\n\ndef mapper(key):\n return outputs[key]\n\nclass test_stp(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'iosxr'\n self.device.custom['abstraction'] = {'order':['os']}\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n self.device.connectionmgr.connections['cli'] = self.device\n \n\n def test_complete_output(self):\n stp = Stp(device=self.device)\n # Get outputs\n stp.maker.outputs[ShowSpanningTreeMst] = \\\n {'': StpOutput.ShowSpanningTreeMst}\n\n stp.maker.outputs[ShowSpanningTreeMstag] = \\\n {'': StpOutput.ShowSpanningTreeMstag}\n\n stp.maker.outputs[ShowSpanningTreePvrst] = \\\n {'': StpOutput.ShowSpanningTreePvrst}\n\n stp.maker.outputs[ShowSpanningTreePvrsTag] = \\\n {'': StpOutput.ShowSpanningTreePvrsTag}\n\n stp.maker.outputs[ShowSpanningTreePvsTag] = \\\n {'': StpOutput.ShowSpanningTreePvsTag}\n \n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Learn the feature\n stp.learn(mst_domain='test', mstag_domain='risc', pvst_id='a', pvrstag_domain='foo', pvstag_domain='foo')\n self.maxDiff = None\n # Verify Ops was created successfully\n self.assertEqual(stp.info, StpOutput.stpOutput)\n \n # Check Selected Attributes\n self.assertEqual(stp.info['mstp']['test']['mst_instances']['0']['vlan'],\n '1-4094')\n\n\n def test_empty_output(self):\n self.maxDiff = None\n stp = Stp(device=self.device)\n\n stp.maker.outputs[ShowSpanningTreeMst] = \\\n {'': {}}\n\n stp.maker.outputs[ShowSpanningTreeMstag] = \\\n {'': {}}\n\n stp.maker.outputs[ShowSpanningTreePvrst] = \\\n {'': {}}\n\n stp.maker.outputs[ShowSpanningTreePvrsTag] = \\\n {'': {}}\n\n stp.maker.outputs[ShowSpanningTreePvsTag] = \\\n {'': {}}\n\n\n outputs.update({'show spanning-tree mst test': ''})\n outputs.update({'show spanning-tree mstag risc': ''})\n outputs.update({'show spanning-tree pvrst a': ''})\n outputs.update({'show spanning-tree pvrstag foo': ''})\n outputs.update({'show spanning-tree pvstag foo': ''})\n \n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n \n # Learn the feature\n stp.learn(mst_domain='test', mstag_domain='risc', pvst_id='a', pvrstag_domain='foo', pvstag_domain='foo')\n\n outputs.update({'show spanning-tree mst test': StpOutput.ShowSpanningTreeMst_output})\n outputs.update({'show spanning-tree mstag risc': StpOutput.ShowSpanningTreeMstag_output})\n outputs.update({'show spanning-tree pvrst a': StpOutput.ShowSpanningTreePvrst_output})\n outputs.update({'show spanning-tree pvrstag foo': StpOutput.ShowSpanningTreePvrsTag_output})\n outputs.update({'show spanning-tree pvstag foo': StpOutput.ShowSpanningTreePvsTag_output})\n \n # Check no attribute not found\n with self.assertRaises(AttributeError):\n stp.info['mstp']['test']['mst_instances']['0']['vlan']\n\n def test_incomplete_output(self):\n self.maxDiff = None\n \n stp = Stp(device=self.device)\n\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Get outputs\n stp.maker.outputs[ShowSpanningTreeMst] = \\\n {'': StpOutput.ShowSpanningTreeMst}\n\n stp.maker.outputs[ShowSpanningTreeMstag] = \\\n {'': StpOutput.ShowSpanningTreeMstag}\n\n stp.maker.outputs[ShowSpanningTreePvrst] = \\\n {'': StpOutput.ShowSpanningTreePvrst}\n\n stp.maker.outputs[ShowSpanningTreePvrsTag] = \\\n {'': StpOutput.ShowSpanningTreePvrsTag}\n\n stp.maker.outputs[ShowSpanningTreePvsTag] = \\\n {'': StpOutput.ShowSpanningTreePvsTag}\n\n # Learn the feature\n stp.learn(mst_domain='test', mstag_domain='risc', pvst_id='a', pvrstag_domain='foo', pvstag_domain='foo')\n \n # delete keys from input\n del(stp.info['mstp']['test']['mst_instances']['0']['mst_id'])\n \n # Delete missing specific attribute values\n expect_dict = deepcopy(StpOutput.stpOutput)\n del(expect_dict['mstp']['test']['mst_instances']['0']['mst_id']) \n\n # Verify Ops was created successfully\n self.assertEqual(stp.info, expect_dict)\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "10616650", "language": "Python", "matching_score": 1.5242840051651, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/stp/iosxr/tests/test_stp.py" }, { "content": "# Python\nimport unittest\nfrom copy import deepcopy\nfrom unittest.mock import Mock\n\n# ATS\nfrom ats.topology import Device\n\n# Genie\nfrom genie.libs.ops.acl.iosxr.acl import Acl\nfrom genie.libs.ops.acl.iosxr.tests.acl_output import AclOutput\n\n# Parser\nfrom genie.libs.parser.iosxr.show_acl import ShowAclAfiAll, \\\n ShowAclEthernetServices\n\nclass test_acl(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'iosxr'\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n self.device.connectionmgr.connections['cli'] = self.device\n\n def test_complete_output(self):\n acl = Acl(device=self.device)\n # Get outputs\n acl.maker.outputs[ShowAclAfiAll] = \\\n {'': AclOutput.ShowAclAfiAll}\n\n acl.maker.outputs[ShowAclEthernetServices] = \\\n {'': AclOutput.ShowAclEthernetServices}\n # Learn the feature\n acl.learn()\n # Verify Ops was created successfully\n self.assertEqual(acl.info, AclOutput.aclOutput)\n \n # Check Selected Attributes\n self.assertEqual(acl.info['acls']['acl_name']['name'], 'acl_name')\n # info - ipv4_acl\n self.assertEqual(acl.info['acls']['test22']['aces'][30]\\\n ['actions']['forwarding'], 'drop')\n\n\n def test_empty_output(self):\n self.maxDiff = None\n acl = Acl(device=self.device)\n\n acl.maker.outputs[ShowAclAfiAll] = \\\n {'': {}}\n\n acl.maker.outputs[ShowAclEthernetServices] = \\\n {'': {}}\n # Learn the feature\n acl.learn()\n\n # Check no attribute not found\n with self.assertRaises(AttributeError):\n acl.info['acls']['acl_name']['name']\n\n def test_incomplete_output(self):\n self.maxDiff = None\n \n acl = Acl(device=self.device)\n # Get outputs\n acl.maker.outputs[ShowAclAfiAll] = \\\n {'': AclOutput.ShowAclAfiAll}\n\n acl.maker.outputs[ShowAclEthernetServices] = \\\n {'': AclOutput.ShowAclEthernetServices}\n # delete keys from input\n del(AclOutput.ShowAclAfiAll['acl_name']['name'])\n del(AclOutput.ShowAclEthernetServices['eth_acl']['name'])\n # Learn the feature\n acl.learn()\n\n # Delete missing specific attribute values\n expect_dict = deepcopy(AclOutput.aclOutput)\n del(expect_dict['acls']['acl_name']['name'])\n del(expect_dict['acls']['eth_acl']['name']) \n # Verify Ops was created successfully\n self.assertEqual(acl.info, expect_dict)\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "8000440", "language": "Python", "matching_score": 2.699767827987671, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/acl/iosxr/tests/test_acl.py" }, { "content": "# Python\nimport unittest\nfrom copy import deepcopy\nfrom unittest.mock import Mock\n\n# ATS\nfrom ats.topology import Device\n\n# Genie\nfrom genie.libs.ops.lldp.iosxe.lldp import Lldp\nfrom genie.libs.ops.lldp.iosxe.tests.lldp_output import LldpOutput\n\n# Parser\nfrom genie.libs.parser.iosxe.show_lldp import ShowLldp, \\\n ShowLldpEntry, \\\n ShowLldpNeighborsDetail,\\\n ShowLldpTraffic, \\\n ShowLldpInterface\n\n\nclass test_lldp(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'iosxe'\n self.device.custom['abstraction'] = {'order':['os']}\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n self.device.connectionmgr.connections['cli'] = self.device\n\n def test_complete_output(self):\n self.maxDiff = None\n lldp = Lldp(device=self.device)\n # Get outputs\n lldp.maker.outputs[ShowLldp] = \\\n {'': LldpOutput.ShowLldp}\n\n lldp.maker.outputs[ShowLldpEntry] = \\\n {'': LldpOutput.ShowLldpEntry}\n\n lldp.maker.outputs[ShowLldpNeighborsDetail] = \\\n {'': LldpOutput.ShowLldpNeighborsDetail}\n\n lldp.maker.outputs[ShowLldpTraffic] = \\\n {'': LldpOutput.ShowLldpTraffic}\n\n lldp.maker.outputs[ShowLldpInterface] = \\\n {'': LldpOutput.ShowLldpInterface}\n\n # Learn the feature\n lldp.learn()\n # Verify Ops was created successfully\n self.assertEqual(lldp.info, LldpOutput.Lldp_info)\n\n # Check Selected Attributes\n self.assertEqual(lldp.info['enabled'], True)\n # info - mlldp default\n self.assertEqual(lldp.info['interfaces']['GigabitEthernet1/0/16']\\\n ['enabled'], True)\n\n def test_empty_output(self):\n self.maxDiff = None\n lldp = Lldp(device=self.device)\n\n lldp.maker.outputs[ShowLldp] = \\\n {'': {}}\n\n lldp.maker.outputs[ShowLldpEntry] = \\\n {'': {}}\n\n lldp.maker.outputs[ShowLldpNeighborsDetail] = \\\n {'': {}}\n\n lldp.maker.outputs[ShowLldpTraffic] = \\\n {'': {}}\n\n lldp.maker.outputs[ShowLldpInterface] = \\\n {'': LldpOutput.ShowLldpInterface}\n # Learn the feature\n lldp.learn()\n\n # Check no attribute not found\n with self.assertRaises(KeyError):\n lldp.info['enabled']\n\n\n def test_incomplete_output(self):\n self.maxDiff = None\n \n lldp = Lldp(device=self.device)\n # Get outputs\n lldp.maker.outputs[ShowLldp] = \\\n {'': LldpOutput.ShowLldp}\n\n lldp.maker.outputs[ShowLldpEntry] = \\\n {'': LldpOutput.ShowLldpEntry}\n \n lldp.maker.outputs[ShowLldpNeighborsDetail] = \\\n {'': LldpOutput.ShowLldpNeighborsDetail}\n\n lldp.maker.outputs[ShowLldpTraffic] = \\\n {'': LldpOutput.ShowLldpTraffic}\n\n lldp.maker.outputs[ShowLldpInterface] = \\\n {'': {}}\n\n # Learn the feature\n lldp.learn()\n\n # Delete missing specific attribute values\n expect_dict = deepcopy(LldpOutput.Lldp_info)\n del(expect_dict['interfaces']['GigabitEthernet1/0/15']['enabled'])\n del(expect_dict['interfaces']['GigabitEthernet1/0/16']['enabled'])\n del(expect_dict['interfaces']['GigabitEthernet1/0/17']['enabled'])\n del(expect_dict['interfaces']['GigabitEthernet2/0/15']['enabled'])\n \n # Verify Ops was created successfully\n self.assertEqual(lldp.info, expect_dict)\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "1225816", "language": "Python", "matching_score": 1.4279930591583252, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/lldp/iosxe/tests/test_lldp.py" }, { "content": "'''Lldp Genie Ops Object Outputs for IOS'''\n\n\nclass LldpOutput(object):\n\n ShowLldp = {\n \"hello_timer\": 30,\n \"enabled\": True,\n \"hold_timer\": 120,\n \"status\": \"active\",\n \"reinit_timer\": 2\n\n }\n \n ShowLldpEntry = {\n 'interfaces': {\n 'GigabitEthernet2/0/15': {\n 'if_name': 'GigabitEthernet2/0/15',\n 'port_id': {\n 'GigabitEthernet1/0/4': {\n 'neighbors': {\n 'R5': {\n 'neighbor_id': 'R5',\n 'chassis_id': '843d.c638.b980',\n 'port_id': 'GigabitEthernet1/0/4',\n 'port_description': 'GigabitEthernet1/0/4',\n 'system_name': 'R5',\n 'time_remaining': 112,\n 'capabilities': {\n 'mac_bridge': {\n 'name': 'mac_bridge',\n 'system': True,\n 'enabled': True,\n },\n 'router': {\n 'name': 'router',\n 'system': True,\n 'enabled': True,\n },\n },\n 'management_address': '10.9.1.1',\n 'auto_negotiation': 'supported, enabled',\n 'physical_media_capabilities': ['1000baseT(FD)', '100base-TX(FD)', '100base-TX(HD)', '10base-T(FD)', '10base-T(HD)'],\n 'unit_type': 30,\n 'vlan_id': 1,\n },\n },\n },\n },\n },\n 'GigabitEthernet1/0/16': {\n 'if_name': 'GigabitEthernet1/0/16',\n 'port_id': {\n 'GigabitEthernet1/0/2': {\n 'neighbors': {\n 'R5': {\n 'neighbor_id': 'R5',\n 'chassis_id': '843d.c638.b980',\n 'port_id': 'GigabitEthernet1/0/2',\n 'port_description': 'GigabitEthernet1/0/2',\n 'system_name': 'R5',\n 'time_remaining': 111,\n 'capabilities': {\n 'mac_bridge': {\n 'name': 'mac_bridge',\n 'system': True,\n 'enabled': True,\n },\n 'router': {\n 'name': 'router',\n 'system': True,\n 'enabled': True,\n },\n },\n 'management_address': '10.9.1.1',\n 'auto_negotiation': 'supported, enabled',\n 'physical_media_capabilities': ['1000baseT(FD)', '100base-TX(FD)', '100base-TX(HD)', '10base-T(FD)', '10base-T(HD)'],\n 'unit_type': 30,\n 'vlan_id': 1,\n },\n },\n },\n },\n },\n 'GigabitEthernet1/0/17': {\n 'if_name': 'GigabitEthernet1/0/17',\n 'port_id': {\n 'GigabitEthernet1/0/3': {\n 'neighbors': {\n 'R5': {\n 'neighbor_id': 'R5',\n 'chassis_id': '843d.c638.b980',\n 'port_id': 'GigabitEthernet1/0/3',\n 'port_description': 'GigabitEthernet1/0/3',\n 'system_name': 'R5',\n 'time_remaining': 108,\n 'capabilities': {\n 'mac_bridge': {\n 'name': 'mac_bridge',\n 'system': True,\n 'enabled': True,\n },\n 'router': {\n 'name': 'router',\n 'system': True,\n 'enabled': True,\n },\n },\n 'management_address': '10.9.1.1',\n 'auto_negotiation': 'supported, enabled',\n 'physical_media_capabilities': ['1000baseT(FD)', '100base-TX(FD)', '100base-TX(HD)', '10base-T(FD)', '10base-T(HD)'],\n 'unit_type': 30,\n 'vlan_id': 1,\n },\n },\n },\n },\n },\n 'GigabitEthernet1/0/15': {\n 'if_name': 'GigabitEthernet1/0/15',\n 'port_id': {\n 'GigabitEthernet1/0/1': {\n 'neighbors': {\n 'R5': {\n 'neighbor_id': 'R5',\n 'chassis_id': '843d.c638.b980',\n 'port_id': 'GigabitEthernet1/0/1',\n 'port_description': 'GigabitEthernet1/0/1',\n 'system_name': 'R5',\n 'time_remaining': 108,\n 'capabilities': {\n 'mac_bridge': {\n 'name': 'mac_bridge',\n 'system': True,\n 'enabled': True,\n },\n 'router': {\n 'name': 'router',\n 'system': True,\n 'enabled': True,\n },\n },\n 'management_address': '10.9.1.1',\n 'auto_negotiation': 'supported, enabled',\n 'physical_media_capabilities': ['1000baseT(FD)', '100base-TX(FD)', '100base-TX(HD)', '10base-T(FD)', '10base-T(HD)'],\n 'unit_type': 30,\n 'vlan_id': 1,\n },\n },\n },\n },\n },\n },\n 'total_entries': 4,\n }\n\n ShowLldpNeighborsDetail = {\n 'interfaces': {\n 'GigabitEthernet2/0/15': {\n 'if_name': 'GigabitEthernet2/0/15',\n 'port_id': {\n 'GigabitEthernet1/0/4': {\n 'neighbors': {\n 'R5': {\n 'neighbor_id': 'R5',\n 'chassis_id': '843d.c638.b980',\n 'port_id': 'GigabitEthernet1/0/4',\n 'port_description': 'GigabitEthernet1/0/4',\n 'system_name': 'R5',\n 'time_remaining': 101,\n 'capabilities': {\n 'mac_bridge': {\n 'name': 'mac_bridge',\n 'system': True,\n 'enabled': True,\n },\n 'router': {\n 'name': 'router',\n 'system': True,\n 'enabled': True,\n },\n },\n 'management_address': '10.9.1.1',\n 'auto_negotiation': 'supported, enabled',\n 'physical_media_capabilities': ['1000baseT(FD)', '100base-TX(FD)', '100base-TX(HD)', '10base-T(FD)', '10base-T(HD)'],\n 'unit_type': 30,\n 'vlan_id': 1,\n },\n },\n },\n },\n },\n 'GigabitEthernet1/0/16': {\n 'if_name': 'GigabitEthernet1/0/16',\n 'port_id': {\n 'GigabitEthernet1/0/2': {\n 'neighbors': {\n 'R5': {\n 'neighbor_id': 'R5',\n 'chassis_id': '843d.c638.b980',\n 'port_id': 'GigabitEthernet1/0/2',\n 'port_description': 'GigabitEthernet1/0/2',\n 'system_name': 'R5',\n 'time_remaining': 99,\n 'capabilities': {\n 'mac_bridge': {\n 'name': 'mac_bridge',\n 'system': True,\n 'enabled': True,\n },\n 'router': {\n 'name': 'router',\n 'system': True,\n 'enabled': True,\n },\n },\n 'management_address': '10.9.1.1',\n 'auto_negotiation': 'supported, enabled',\n 'physical_media_capabilities': ['1000baseT(FD)', '100base-TX(FD)', '100base-TX(HD)', '10base-T(FD)', '10base-T(HD)'],\n 'unit_type': 30,\n 'vlan_id': 1,\n },\n },\n },\n },\n },\n 'GigabitEthernet1/0/17': {\n 'if_name': 'GigabitEthernet1/0/17',\n 'port_id': {\n 'GigabitEthernet1/0/3': {\n 'neighbors': {\n 'R5': {\n 'neighbor_id': 'R5',\n 'chassis_id': '843d.c638.b980',\n 'port_id': 'GigabitEthernet1/0/3',\n 'port_description': 'GigabitEthernet1/0/3',\n 'system_name': 'R5',\n 'time_remaining': 94,\n 'capabilities': {\n 'mac_bridge': {\n 'name': 'mac_bridge',\n 'system': True,\n 'enabled': True,\n },\n 'router': {\n 'name': 'router',\n 'system': True,\n 'enabled': True,\n },\n },\n 'management_address': '10.9.1.1',\n 'auto_negotiation': 'supported, enabled',\n 'physical_media_capabilities': ['1000baseT(FD)', '100base-TX(FD)', '100base-TX(HD)', '10base-T(FD)', '10base-T(HD)'],\n 'unit_type': 30,\n 'vlan_id': 1,\n },\n },\n },\n },\n },\n 'GigabitEthernet1/0/15': {\n 'if_name': 'GigabitEthernet1/0/15',\n 'port_id': {\n 'GigabitEthernet1/0/1': {\n 'neighbors': {\n 'R5': {\n 'neighbor_id': 'R5',\n 'chassis_id': '843d.c638.b980',\n 'port_id': 'GigabitEthernet1/0/1',\n 'port_description': 'GigabitEthernet1/0/1',\n 'system_name': 'R5',\n 'time_remaining': 98,\n 'capabilities': {\n 'mac_bridge': {\n 'name': 'mac_bridge',\n 'system': True,\n 'enabled': True,\n },\n 'router': {\n 'name': 'router',\n 'system': True,\n 'enabled': True,\n },\n },\n 'management_address': '10.9.1.1',\n 'auto_negotiation': 'supported, enabled',\n 'physical_media_capabilities': ['1000baseT(FD)', '100base-TX(FD)', '100base-TX(HD)', '10base-T(FD)', '10base-T(HD)'],\n 'unit_type': 30,\n 'vlan_id': 1,\n },\n },\n },\n },\n },\n },\n 'total_entries': 4,\n }\n\n ShowLldpTraffic = {\n \"frame_in\": 13315,\n \"frame_out\": 20372,\n \"frame_error_in\": 0,\n \"frame_discard\": 14,\n \"tlv_discard\": 0,\n 'tlv_unknown': 0,\n 'entries_aged_out': 34\n }\n\n ShowLldpInterface = {\n 'interfaces': {\n 'GigabitEthernet1/0/15': {\n 'tx': 'enabled',\n 'rx': 'enabled',\n 'tx_state': 'idle',\n 'rx_state': 'wait for frame',\n },\n 'GigabitEthernet1/0/16': {\n 'tx': 'enabled',\n 'rx': 'enabled',\n 'tx_state': 'idle',\n 'rx_state': 'wait for frame',\n },\n 'GigabitEthernet1/0/17': {\n 'tx': 'enabled',\n 'rx': 'enabled',\n 'tx_state': 'idle',\n 'rx_state': 'wait for frame',\n },\n 'GigabitEthernet2/0/15': {\n 'tx': 'enabled',\n 'rx': 'enabled',\n 'tx_state': 'idle',\n 'rx_state': 'wait for frame',\n },\n } \n }\n\n Lldp_info = {\n 'enabled': True,\n 'hello_timer': 30,\n 'hold_timer': 120,\n 'counters': {\n 'frame_in': 13315,\n 'frame_out': 20372,\n 'frame_error_in': 0,\n 'frame_discard': 14,\n 'tlv_discard': 0,\n 'tlv_unknown': 0,\n 'entries_aged_out': 34,\n },\n 'interfaces': {\n 'GigabitEthernet1/0/15': {\n 'if_name': 'GigabitEthernet1/0/15',\n 'port_id': {\n 'GigabitEthernet1/0/1': {\n 'neighbors': {\n 'R5': {\n 'chassis_id': '843d.c638.b980',\n 'port_id': 'GigabitEthernet1/0/1',\n 'neighbor_id': 'R5',\n 'system_name': 'R5',\n 'port_description': 'GigabitEthernet1/0/1',\n 'management_address': '10.9.1.1',\n 'capabilities': {\n 'router': {\n 'enabled': True,\n 'name': 'router',\n },\n 'mac_bridge': {\n 'enabled': True,\n 'name': 'mac_bridge',\n },\n },\n },\n },\n },\n },\n 'enabled': True,\n },\n 'GigabitEthernet1/0/17': {\n 'if_name': 'GigabitEthernet1/0/17',\n 'port_id': {\n 'GigabitEthernet1/0/3': {\n 'neighbors': {\n 'R5': {\n 'chassis_id': '843d.c638.b980',\n 'port_id': 'GigabitEthernet1/0/3',\n 'neighbor_id': 'R5',\n 'system_name': 'R5',\n 'port_description': 'GigabitEthernet1/0/3',\n 'management_address': '10.9.1.1',\n 'capabilities': {\n 'router': {\n 'enabled': True,\n 'name': 'router',\n },\n 'mac_bridge': {\n 'enabled': True,\n 'name': 'mac_bridge',\n },\n },\n },\n },\n },\n },\n 'enabled': True,\n },\n 'GigabitEthernet1/0/16': {\n 'if_name': 'GigabitEthernet1/0/16',\n 'port_id': {\n 'GigabitEthernet1/0/2': {\n 'neighbors': {\n 'R5': {\n 'chassis_id': '843d.c638.b980',\n 'port_id': 'GigabitEthernet1/0/2',\n 'neighbor_id': 'R5',\n 'system_name': 'R5',\n 'port_description': 'GigabitEthernet1/0/2',\n 'management_address': '10.9.1.1',\n 'capabilities': {\n 'router': {\n 'enabled': True,\n 'name': 'router',\n },\n 'mac_bridge': {\n 'enabled': True,\n 'name': 'mac_bridge',\n },\n },\n },\n },\n },\n },\n 'enabled': True,\n },\n 'GigabitEthernet2/0/15': {\n 'if_name': 'GigabitEthernet2/0/15',\n 'port_id': {\n 'GigabitEthernet1/0/4': {\n 'neighbors': {\n 'R5': {\n 'chassis_id': '843d.c638.b980',\n 'port_id': 'GigabitEthernet1/0/4',\n 'neighbor_id': 'R5',\n 'system_name': 'R5',\n 'port_description': 'GigabitEthernet1/0/4',\n 'management_address': '10.9.1.1',\n 'capabilities': {\n 'router': {\n 'enabled': True,\n 'name': 'router',\n },\n 'mac_bridge': {\n 'enabled': True,\n 'name': 'mac_bridge',\n },\n },\n },\n },\n },\n },\n 'enabled': True,\n },\n },\n }", "id": "10616175", "language": "Python", "matching_score": 4.289865016937256, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/lldp/ios/tests/lldp_output.py" }, { "content": "''' \nLLDP Genie Ops Object for IOSXE - CLI.\n'''\n# Genie\nfrom genie.ops.base import Base\nfrom genie.ops.base import Context\n\n\nclass Lldp(Base):\n '''LLDP Genie Ops Object'''\n\n def tx_rx_both_enabled(self, item):\n '''return True when logic and for tx and rx is True'''\n try:\n if 'enabled' in item['tx'] and 'enabled' in item['rx']:\n return True\n else:\n return False\n except Exception:\n return False\n\n def learn(self):\n '''Learn lldp Ops'''\n ########################################################################\n # info\n ########################################################################\n \n # unsupported keys\n # enabled\n # hello_timer\n # hold_timer\n # suppress_tlv_advertisement: - NA\n # chassis_id - N/A\n # port_id - N/A\n # port_description - N/A\n # system_name - N/A\n # system_description - N/A\n # system_capabilities - N/A\n # management_address - N/A\n # system_name - N/A\n # system_description - N/A\n # chassis_id - N/A\n # chassis_id_type - N/A\n # counters\n # frame_in\n # frame_out\n # frame_error_in\n # frame_discard\n # tlv_discard - N/A\n # tlv_unknown - N/A\n # last_clear - N/A\n # tlv_accepted - N/A\n # entries_aged_out\n # interfaces\n # if_name\n # if_name\n # enabled\n # counters\n # frame_in\n # frame_out\n # frame_error_in\n # frame_discard\n # tlv_discard\n # tlv_unknown\n # last_clear - N/A\n # frame_error_out - N/A\n # entries_aged_out\n # pord_id\n # neighbors\n # neighbor_id\n # neighbor_id\n # system_name\n # system_description\n # chassis_id\n # chassis_id_type - N/A\n # id\n # age\n # last_update\n # port_id\n # port_id_type - N/A\n # port_description\n # management_address\n # management_address_type - N/A\n # custom_tlvs' - N/A\n # [type oui oui_subtype] - N/A\n # type - N/A\n # oui - N/A\n # oui_subtype - N/A\n # value - N/A\n # capabilities\n # name\n # name - N/A\n # enabled\n \n for key in ['enabled', 'hello_timer', 'hold_timer']:\n self.add_leaf(cmd='show lldp',\n src='[{}]'.format(key),\n dest='info[{}]'.format(key))\n\n for key in ['frame_in', 'frame_out', 'frame_error_in', 'frame_discard',\n 'tlv_discard', 'tlv_unknown', 'entries_aged_out']:\n self.add_leaf(cmd='show lldp traffic',\n src='[{}]'.format(key),\n dest='info[counters][{}]'.format(key))\n\n intf_src = '[interfaces][(?P<intf>.*)]'\n intf_dest = 'info[interfaces][(?P<intf>.*)]'\n\n nbr_src = '[interfaces][(?P<intf>.*)][port_id][(?P<p_id>.*)][neighbors][(?P<nei>.*)]'\n nbr_dest = 'info[interfaces][(?P<intf>.*)][port_id][(?P<p_id>.*)][neighbors][(?P<nei>.*)]'\n\n self.add_leaf(cmd='show lldp entry *',\n src=intf_src + '[if_name]',\n dest=intf_dest + '[if_name]')\n\n self.add_leaf(cmd='show lldp neighbors detail',\n src=intf_src + '[if_name]',\n dest=intf_dest + '[if_name]')\n\n for key in ['[chassis_id]', '[port_id]', '[neighbor_id]', '[system_name]',\n '[system_description]', '[port_description]', '[management_address]',\n '[capabilities][(?P<cap>.*)][enabled]','[capabilities][(?P<cap>.*)][name]' ]:\n self.add_leaf(cmd='show lldp entry *',\n src=nbr_src + key,\n dest=nbr_dest + key)\n \n for key in ['[chassis_id]', '[port_id]', '[neighbor_id]', '[system_name]',\n '[system_description]', '[port_description]', '[management_address]',\n '[capabilities][(?P<cap>.*)][enabled]','[capabilities][(?P<cap>.*)][name]' ]:\n self.add_leaf(cmd='show lldp neighbors detail',\n src=nbr_src + key,\n dest=nbr_dest + key)\n # enabled\n self.add_leaf(cmd='show lldp interface',\n src=intf_src,\n dest=intf_dest + '[enabled]',\n action=self.tx_rx_both_enabled)\n\n # make to write in cache\n self.make(final_call=True)\n", "id": "9591615", "language": "Python", "matching_score": 2.9381103515625, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/lldp/iosxe/lldp.py" }, { "content": "# Genie\r\nfrom genie.ops.base import Base\r\n\r\n\r\nclass Lldp(Base):\r\n exclude = ['frame_in',\r\n 'frame_out',\r\n 'tlv_discard',\r\n 'tlv_unknown',\r\n 'frame_discard']", "id": "2006379", "language": "Python", "matching_score": 1.212247371673584, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/lldp/lldp.py" }, { "content": "'''\nLldp Genie Ops Object for IOS - CLI.\n'''\nfrom ..iosxe.lldp import Lldp as LldpXE\n\nclass Lldp(LldpXE):\n pass", "id": "2105324", "language": "Python", "matching_score": 1, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/lldp/ios/lldp.py" }, { "content": "from .lldp import *\n", "id": "4267155", "language": "Python", "matching_score": 0, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/lldp/__init__.py" }, { "content": "from .evpn import *\nfrom .evi import *\nfrom .vni import *\nfrom .esi import *\n\n", "id": "3580764", "language": "Python", "matching_score": 0.9061760306358337, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/evpn/__init__.py" }, { "content": "\n__all__ = (\n 'Evi',\n 'EviNeighbor',\n)\n\nimport functools\nimport weakref\n\nfrom genie.utils.cisco_collections import typedset\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import ConfigurableBase\nimport genie.conf.base.attributes\nfrom genie.conf.base.attributes import SubAttributes, KeyedSubAttributes, AttributesHelper\n\nfrom genie.libs.conf.base import Neighbor\nfrom genie.libs.conf.base import RouteDistinguisher, RouteTarget\n#from .evpn import Evpn\n\n\n@functools.total_ordering\nclass EviNeighbor(Neighbor):\n '''An EVI Neighbor class.'''\n\n evi = managedattribute(\n name='evi',\n read_only=True,\n doc='Evi: The EVI neighbor (read-only hash key)')\n\n ac_id = managedattribute(\n name='ac_id',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n def __init__(self, evi, **kwargs):\n '''\n Args:\n evi (Evi): The EVI Neighbor.\n '''\n if not isinstance(evi, Evi):\n raise TypeError(evi)\n self._evi = evi\n super().__init__(**kwargs)\n\n def _neighbor_comparison_tokens(self):\n return super()._neighbor_comparison_tokens() + (\n 'evi', self.evi,\n 'ac_id', self.ac_id,\n )\n\n def __hash__(self):\n return hash((self.evi, self.ac_id))\n\n def __repr__(self):\n s = '<{}'.format(\n self.__class__.__name__,\n )\n s += ' EVI {} AC {}'.format(\n self.evi.evi_id,\n self.ac_id,\n )\n s += '>'\n\n\nclass EviSubAttributes(KeyedSubAttributes):\n '''SubAttributes class keyed by EVI ID.'''\n\n evi_id = managedattribute(\n name='evi_id',\n read_only=True,\n doc='''int: EVI ID read-only key''')\n\n @property\n def evi(self):\n '''Evi: The Evi object associated with the EVI ID.'''\n evi_id = self.evi_id\n for evi in self.parent.evis:\n if evi.evi_id == evi_id:\n return evi\n raise AttributeError('evi: no Evi found matching evi_id=%r' % (evi_id,))\n\n @classmethod\n def _sanitize_key(cls, key):\n if isinstance(key, Evi):\n return key.evi_id\n if isinstance(key, (str, int)):\n return int(key)\n return key\n\n def __init__(self, parent, key):\n '''\n Args:\n parent: Parent object to inherit attributes from.\n key (int): EVI ID key.\n '''\n self._evi_id = int(key)\n super().__init__(parent=parent)\n\n\n@functools.total_ordering\nclass Evi(ConfigurableBase):\n\n evi_id = managedattribute(\n name='evi_id',\n read_only=True,\n doc='int: EVI ID (read-only hash key)')\n\n evi_mode = managedattribute(\n name='evi_mode',\n default='vlan-based',\n type=(None,str))\n\n device = managedattribute(\n name='device',\n read_only=True,\n gettype=managedattribute.auto_unref)\n\n @property\n def testbed(self):\n return self.device.testbed\n\n @property\n def evpn(self):\n return self.device.evpn\n\n advertise_mac = managedattribute(\n name='advertise_mac',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n control_word_disable = managedattribute(\n name='control_word_disable',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n class BgpAttributes(SubAttributes):\n\n enabled = managedattribute(\n name='enabled',\n default=False,\n type=managedattribute.test_istype(bool))\n\n rd = managedattribute(\n name='rd',\n default=None,\n type=(None, RouteDistinguisher))\n\n export_route_targets = managedattribute(\n name='export_route_targets',\n finit=typedset(RouteTarget.ImportExport).copy,\n type=typedset(RouteTarget.ImportExport)._from_iterable)\n\n export_route_target_none = managedattribute(\n name='export_route_target_none',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n import_route_targets = managedattribute(\n name='import_route_targets',\n finit=typedset(RouteTarget.ImportExport).copy,\n type=typedset(RouteTarget.ImportExport)._from_iterable)\n\n import_route_target_none = managedattribute(\n name='import_route_target_none',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n bgp = managedattribute(\n name='bgp',\n read_only=True,\n doc=BgpAttributes.__doc__)\n\n @bgp.initter\n def bgp(self):\n return self.BgpAttributes(parent=self)\n\n class LoadBalancingAttributes(SubAttributes):\n\n def __init__(self, _evi):\n self._evi = _evi\n super().__init__(\n # Evpn.device_attr[].load_balancing\n parent=None)\n\n @property\n def parent(self):\n return self._evi.evpn.device_attr[self.device].load_balancing\n\n @property\n def testbed(self):\n return self._evi.testbed\n\n @property\n def device_name(self):\n return self._evi.device_name\n\n @property\n def device(self):\n return self._evi.device\n\n load_balancing = managedattribute(\n name='load_balancing',\n read_only=True,\n doc=LoadBalancingAttributes.__doc__)\n\n @load_balancing.initter\n def load_balancing(self):\n return self.LoadBalancingAttributes(_evi=self)\n\n def __eq__(self, other):\n if not isinstance(other, Evi):\n return NotImplemented\n # return (self.device, self.evi_id) == (other.device, other.evi_id)\n return (self.evi_id, self.device) == (other.evi_id, other.device)\n\n def __lt__(self, other):\n if not isinstance(other, Evi):\n return NotImplemented\n return (self.device, self.evi_id) < (other.device, other.evi_id)\n\n def __hash__(self):\n return hash(self.evi_id)\n\n def __init__(self, device, evi_id, *args, **kwargs):\n self._evi_id = evi_id\n assert getattr(device, 'evpn', None)\n self._device = weakref.ref(device)\n self.evpn.add_evi(self)\n super().__init__(*args, **kwargs)\n\n def remove(self):\n try:\n self.evpn.remove_evi(self)\n except:\n pass\n self._device = None\n\n def __repr__(self):\n return '<%s object %r on %r at 0x%x>' % (\n self.__class__.__name__,\n self.evi_id,\n self.device.name,\n id(self))\n\n", "id": "10198424", "language": "Python", "matching_score": 3.7254741191864014, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/evpn/evi.py" }, { "content": "\n__all__ = (\n 'Evpn',\n)\n\nfrom genie.utils.cisco_collections import typedset\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import Base, Interface\nimport genie.conf.base.attributes\nfrom genie.conf.base.attributes import SubAttributes, SubAttributesDict, AttributesHelper\n\nfrom genie.libs.conf.base import MAC\nfrom genie.libs.conf.base import RouteDistinguisher, RouteTarget\nfrom genie.libs.conf.base.feature import *\nfrom genie.libs.conf.l2vpn.pseudowire import PseudowireNeighborSubAttributes\n\nfrom .esi import ESI\nfrom .vni import Vni, VniSubAttributes\nfrom .evi import Evi\n\n# evpn.\n# # Defaults:\n# bgp.\n# ethernet_segment.\n# bgp.\n# load_balancing.\n# # Actual:\n# device_attr[]. (parent = evpn)\n# # Defaults:\n# ethernet_segment. (parent = evpn.ethernet_segment)\n# bgp.\n# # Actual:\n# interface_attr[]. (parent = evpn.device_attr[])\n# ethernet_segment. (parent = evpn.device_attr[].ethernet_segment)\n# bgp.\n# vni_attr[]. (parent = evpn.device_attr[])\n# ethernet_segment. (parent = evpn.device_attr[].ethernet_segment)\n# bgp.\n# pw_neighbor_attr[]. (parent = evpn.device_attr[])\n# ethernet_segment. (parent = evpn.device_attr[].ethernet_segment)\n# bgp.\n# vfi_attr[]. (parent = evpn.device_attr[])\n# ethernet_segment. (parent = evpn.device_attr[].ethernet_segment)\n# bgp.\n# bgp. (parent = evpn.bgp)\n# load_balancing. (parent = evpn.load_balancing)\n\n\nclass BaseNamespace(Base):\n '''A simple namespace that inherits some attributes from a Base-like object.\n\n Attributes inherited:\n - testbed\n '''\n\n base = managedattribute(\n name='base',\n read_only=True,\n doc='''Object that is either a Base or inherits from it.''')\n\n def __init__(self, base, **kwargs):\n self._base = base\n super().__init__(**kwargs)\n\n @property\n def testbed(self):\n return self.base.testbed\n\nclass DeviceNamespace(BaseNamespace):\n '''A simple namespace that inherits some attributes from a Device-like object.\n\n Attributes inherited:\n - testbed (BaseNamespace)\n - device_name (if base is a DeviceSubAttributes)\n - device\n '''\n\n @property\n def device_name(self):\n return self.base.device_name\n\n @property\n def device(self):\n return self.base.device\n\nclass InterfaceNamespace(DeviceNamespace):\n '''A simple namespace that inherits some attributes from a Interface-like object.\n\n Attributes inherited:\n - testbed (BaseNamespace)\n - device_name (DeviceNamespace)\n - device (DeviceNamespace)\n - interface_name (if base is a InterfaceSubAttributes)\n - interface\n '''\n\n @property\n def interface_name(self):\n return self.base.interface_name\n\n @property\n def interface(self):\n return self.base.interface\n\nclass PseudowireNeighborNamespace(BaseNamespace):\n pass\n\nclass VfiNamespace(BaseNamespace):\n pass\n\nclass VniNamespace(BaseNamespace):\n '''A simple namespace that inherits some attributes from a Vni-like object.\n\n Attributes inherited:\n - testbed (BaseNamespace)\n - vni (if base is a VniAttributes)\n - vni_id\n '''\n\n @property\n def vni_id(self):\n return self.base.vni_id\n\n @property\n def vni(self):\n return self.base.vni\n\nclass VfiSubAttributes(genie.conf.base.attributes.KeyedSubAttributes):\n\n vfi_name = managedattribute(\n name='vfi_name',\n read_only=True)\n\n def __init__(self, parent, key):\n assert isinstance(key, str)\n self._vfi_name = key\n super().__init__(parent=parent)\n\nclass Evpn(DeviceFeature, InterfaceFeature):\n\n class DefaultDeviceBgpAttributes(BaseNamespace):\n\n enabled = managedattribute(\n name='enabled',\n default=False,\n type=managedattribute.test_istype(bool))\n\n rd = managedattribute(\n name='rd',\n default=None,\n type=(None, RouteDistinguisher))\n\n bgp = managedattribute(\n name='bgp',\n read_only=True,\n doc=DefaultDeviceBgpAttributes.__doc__)\n\n @bgp.initter\n def bgp(self):\n return self.DefaultDeviceBgpAttributes(base=self)\n\n evis = managedattribute(\n name='evis',\n finit=typedset(managedattribute.test_isinstance(Evi)).copy,\n type=typedset(managedattribute.test_isinstance(Evi))._from_iterable,\n doc='A `set` of Evi associated objects')\n\n def add_evi(self, evi): # TODO DEPRECATE\n self.evis.add(evi)\n\n def remove_evi(self, evi): # TODO DEPRECATE\n self.evis.remove(evi)\n\n class InterfaceAttributes(genie.conf.base.attributes.InterfaceSubAttributes):\n\n class EthernetSegmentAttributes(InterfaceNamespace, SubAttributes):\n\n esi = managedattribute(\n name='esi',\n default=None,\n type=(None, ESI))\n\n class BgpAttributes(InterfaceNamespace, SubAttributes):\n\n def __init__(self, base):\n super().__init__(\n base=base,\n # Evpn.device_attr[].ethernet_segment.bgp\n parent=base.parent.ethernet_segment.bgp)\n\n bgp = managedattribute(\n name='bgp',\n read_only=True,\n doc=BgpAttributes.__doc__)\n\n @bgp.initter\n def bgp(self):\n return self.BgpAttributes(base=self.base)\n\n def __init__(self, base):\n super().__init__(\n base=base,\n # Evpn.device_attr[].ethernet_segment\n parent=base.parent.ethernet_segment)\n\n ethernet_segment = managedattribute(\n name='ethernet_segment',\n read_only=True,\n doc=EthernetSegmentAttributes.__doc__)\n\n @ethernet_segment.initter\n def ethernet_segment(self):\n return self.EthernetSegmentAttributes(base=self)\n\n def __init__(self, parent, key):\n super().__init__(parent, key)\n\n class PseudowireNeighborAttributes(PseudowireNeighborSubAttributes):\n\n class EthernetSegmentAttributes(PseudowireNeighborNamespace, SubAttributes):\n\n esi = managedattribute(\n name='esi',\n default=None,\n type=(None, ESI))\n\n class BgpAttributes(PseudowireNeighborNamespace, SubAttributes):\n\n def __init__(self, base):\n super().__init__(\n base=base,\n # Evpn.device_attr[].ethernet_segment.bgp\n parent=base.parent.ethernet_segment.bgp)\n\n bgp = managedattribute(\n name='bgp',\n read_only=True,\n doc=BgpAttributes.__doc__)\n\n @bgp.initter\n def bgp(self):\n return self.BgpAttributes(base=self.base)\n\n def __init__(self, base):\n super().__init__(\n base=base,\n # Evpn.device_attr[].ethernet_segment\n parent=base.parent.ethernet_segment)\n\n ethernet_segment = managedattribute(\n name='ethernet_segment',\n read_only=True,\n doc=EthernetSegmentAttributes.__doc__)\n\n @ethernet_segment.initter\n def ethernet_segment(self):\n return self.EthernetSegmentAttributes(base=self)\n\n def __init__(self, parent, key):\n super().__init__(parent, key)\n\n class VfiAttributes(VfiSubAttributes):\n\n class EthernetSegmentAttributes(VfiNamespace, SubAttributes):\n\n esi = managedattribute(\n name='esi',\n default=None,\n type=(None, ESI))\n\n class BgpAttributes(VfiNamespace, SubAttributes):\n\n def __init__(self, base):\n super().__init__(\n base=base,\n # Evpn.device_attr[].ethernet_segment.bgp\n parent=base.parent.ethernet_segment.bgp)\n\n bgp = managedattribute(\n name='bgp',\n read_only=True,\n doc=BgpAttributes.__doc__)\n\n @bgp.initter\n def bgp(self):\n return self.BgpAttributes(base=self.base)\n\n def __init__(self, base):\n super().__init__(\n base=base,\n # Evpn.device_attr[].ethernet_segment\n parent=base.parent.ethernet_segment)\n\n ethernet_segment = managedattribute(\n name='ethernet_segment',\n read_only=True,\n doc=EthernetSegmentAttributes.__doc__)\n\n @ethernet_segment.initter\n def ethernet_segment(self):\n return self.EthernetSegmentAttributes(base=self)\n\n def __init__(self, parent, key):\n super().__init__(parent, key)\n\n class VniAttributes(VniSubAttributes):\n\n class EthernetSegmentAttributes(VniNamespace, SubAttributes):\n\n esi = managedattribute(\n name='esi',\n default=None,\n type=(None, ESI))\n\n class BgpAttributes(VniNamespace, SubAttributes):\n\n def __init__(self, base):\n super().__init__(\n base=base,\n # Evpn.device_attr[].ethernet_segment.bgp\n parent=base.parent.ethernet_segment.bgp)\n\n bgp = managedattribute(\n name='bgp',\n read_only=True,\n doc=BgpAttributes.__doc__)\n\n @bgp.initter\n def bgp(self):\n return self.BgpAttributes(base=self.base)\n\n def __init__(self, base):\n super().__init__(\n base=base,\n # Evpn.device_attr[].ethernet_segment\n parent=base.parent.ethernet_segment)\n\n ethernet_segment = managedattribute(\n name='ethernet_segment',\n read_only=True,\n doc=EthernetSegmentAttributes.__doc__)\n\n @ethernet_segment.initter\n def ethernet_segment(self):\n return self.EthernetSegmentAttributes(base=self)\n\n def __init__(self, parent, key):\n super().__init__(parent, key)\n\n class DefaultInterfaceEthernetSegmentAttributes(BaseNamespace):\n\n enabled = managedattribute(\n name='enabled',\n default=False,\n type=managedattribute.test_istype(bool))\n\n backbone_source_mac = managedattribute(\n name='backbone_source_mac',\n default=None,\n type=(None, MAC))\n\n class DefaultBgpAttributes(BaseNamespace):\n\n enabled = managedattribute(\n name='enabled',\n default=False,\n type=managedattribute.test_istype(bool))\n\n import_route_target = managedattribute(\n name='import_route_target',\n default=None,\n type=(None, MAC)) # Yes, this corresponds to the system_mac/root_bridge_mac of type 1/2/3 ESIs\n\n bgp = managedattribute(\n name='bgp',\n read_only=True,\n doc=DefaultBgpAttributes.__doc__)\n\n @bgp.initter\n def bgp(self):\n return self.DefaultBgpAttributes(base=self.base)\n\n force_single_homed = managedattribute(\n name='force_single_homed',\n default=False,\n type=managedattribute.test_istype(bool))\n\n esi = managedattribute(\n name='esi',\n default=None,\n type=(None, ESI))\n\n load_balancing_mode = managedattribute(\n name='load_balancing_mode',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n ethernet_segment = managedattribute(\n name='ethernet_segment',\n read_only=True,\n doc=DefaultInterfaceEthernetSegmentAttributes.__doc__)\n\n @ethernet_segment.initter\n def ethernet_segment(self):\n return self.DefaultInterfaceEthernetSegmentAttributes(base=self)\n\n mac_flush = managedattribute(\n name='mac_flush',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n recovery_timer = managedattribute(\n name='recovery_timer',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n peering_timer = managedattribute(\n name='peering_timer',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n source_interface = managedattribute(\n name='source_interface',\n default=None,\n type=(None, managedattribute.test_isinstance(Interface)))\n\n class DefaultDeviceLoadBalancingAttributes(BaseNamespace):\n\n enabled = managedattribute(\n name='enabled',\n default=False,\n type=(None, managedattribute.test_istype(bool)))\n\n flow_label_static = managedattribute(\n name='flow_label_static',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n load_balancing = managedattribute(\n name='load_balancing',\n read_only=True,\n doc=DefaultDeviceLoadBalancingAttributes.__doc__)\n\n @load_balancing.initter\n def load_balancing(self):\n return self.DefaultDeviceLoadBalancingAttributes(base=self)\n\n class DeviceAttributes(genie.conf.base.attributes.DeviceSubAttributes):\n\n replication_type = managedattribute(\n name='replication_type',\n default=None,\n type=(None,str))\n\n label_mode = managedattribute(\n name='label_mode',\n default=None,\n type=(None,str))\n\n arp_flooding_suppression = managedattribute(\n name='arp_flooding_suppression',\n default=None,\n type=(None,bool))\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n #doc=Evpn.InterfaceAttributes.__doc__,\n doc='Interface-specific attributes. See Evpn.InterfaceAttributes')\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(Evpn.InterfaceAttributes, parent=self)\n\n pw_neighbor_attr = managedattribute(\n name='pw_neighbor_attr',\n read_only=True,\n doc='pw-neighbor-specific attributes. See Evpn.PseudowireNeighborAttributes')\n\n @pw_neighbor_attr.initter\n def pw_neighbor_attr(self):\n return SubAttributesDict(Evpn.PseudowireNeighborAttributes, parent=self)\n\n vfi_attr = managedattribute(\n name='vfi_attr',\n read_only=True,\n doc='vfi-specific attributes. See Evpn.VfiAttributes')\n\n @vfi_attr.initter\n def vfi_attr(self):\n return SubAttributesDict(Evpn.VfiAttributes, parent=self)\n\n vni_attr = managedattribute(\n name='vni_attr',\n read_only=True,\n #doc=Evpn.VniAttributes.__doc__,\n doc='VNI-specific attributes. See Evpn.VniAttributes')\n\n @vni_attr.initter\n def vni_attr(self):\n return SubAttributesDict(Evpn.VniAttributes, parent=self)\n\n class BgpAttributes(DeviceNamespace, SubAttributes):\n\n def __init__(self, base):\n super().__init__(\n base=base,\n # Evpn.bgp\n parent=base.parent.bgp)\n\n bgp = managedattribute(\n name='bgp',\n read_only=True,\n doc=BgpAttributes.__doc__)\n\n @bgp.initter\n def bgp(self):\n return self.BgpAttributes(base=self)\n\n class DefaultInterfaceEthernetSegmentAttributes(DeviceNamespace, SubAttributes):\n\n class BgpAttributes(DeviceNamespace, SubAttributes):\n\n def __init__(self, base):\n super().__init__(\n base=base,\n # Evpn.ethernet_segment.bgp\n parent=base.parent.ethernet_segment.bgp)\n\n bgp = managedattribute(\n name='bgp',\n read_only=True,\n doc=BgpAttributes.__doc__)\n\n @bgp.initter\n def bgp(self):\n return self.BgpAttributes(base=self.base)\n\n esi = managedattribute(\n name='esi',\n default=None,\n type=(None, ESI))\n\n def __init__(self, base):\n super().__init__(\n base=base,\n # Evpn.ethernet_segment\n parent=base.parent.ethernet_segment)\n\n ethernet_segment = managedattribute(\n name='ethernet_segment',\n read_only=True,\n doc=DefaultInterfaceEthernetSegmentAttributes.__doc__)\n\n @ethernet_segment.initter\n def ethernet_segment(self):\n return self.DefaultInterfaceEthernetSegmentAttributes(base=self)\n\n class LoadBalancingAttributes(DeviceNamespace, SubAttributes):\n\n def __init__(self, base):\n super().__init__(\n base=base,\n # Evpn.load_balancing\n parent=base.parent.load_balancing)\n\n load_balancing = managedattribute(\n name='load_balancing',\n read_only=True,\n doc=LoadBalancingAttributes.__doc__)\n\n @load_balancing.initter\n def load_balancing(self):\n return self.LoadBalancingAttributes(base=self)\n\n @property\n def evis(self):\n '''EVIs on this device'''\n device = self.device\n for evi in self.parent.evis:\n if evi.device is device:\n yield evi\n\n def __init__(self, parent, key):\n super().__init__(parent, key)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n def _on_added_from_device(self, device):\n super()._on_added_from_device(device)\n assert getattr(device, 'evpn', None) is None\n device.evpn = self\n\n def _on_removed_from_device(self, device):\n assert getattr(device, 'evpn', None) is self\n super()._on_removed_from_device(device)\n device.evpn = None\n\n def _on_added_from_interface(self, interface):\n super()._on_added_from_interface(interface)\n assert getattr(interface, 'evpn', None) is None\n interface.evpn = self\n\n def _on_removed_from_interface(self, interface):\n assert getattr(interface, 'evpn', None) is self\n super()._on_removed_from_interface(interface)\n interface.evpn = None\n\n def build_config(self, devices=None, interfaces=None,\n apply=True, attributes=None):\n attributes = AttributesHelper(self, attributes)\n cfgs = {}\n\n devices, interfaces, links = consolidate_feature_args(self, devices, interfaces, None)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2,\n interfaces=interfaces)\n cfgs = {key: value for key, value in cfgs.items() if value}\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, interfaces=None,\n apply=True, attributes=None):\n attributes = AttributesHelper(self, attributes)\n cfgs = {}\n\n devices, interfaces, links = consolidate_feature_args(self, devices, interfaces, None)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2,\n interfaces=interfaces)\n cfgs = {key: value for key, value in cfgs.items() if value}\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n", "id": "9380492", "language": "Python", "matching_score": 3.671700954437256, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/evpn/evpn.py" }, { "content": "\n__all__ = (\n 'L2vpn',\n )\n\nimport collections\n\nfrom genie.utils.cisco_collections import typedset\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import DeviceFeature\nimport genie.conf.base.attributes\nfrom genie.conf.base.attributes import SubAttributes, SubAttributesDict, AttributesInheriter, AttributesHelper\n\nfrom genie.libs.conf.base import IPv4Address, MAC\nfrom .bridge_domain import BridgeDomain\nfrom .g8032 import G8032Ring\nfrom .pseudowire import PseudowireClass\nfrom .xconnect import Xconnect\n\nForwardingInjectLocalMac = collections.namedtuple(\n 'ForwardingInjectLocalMac',\n ('mac_address', 'interface', 'location'))\n\nclass L2vpn(DeviceFeature):\n \n ForwardingInjectLocalMac = ForwardingInjectLocalMac\n\n bridge_domains = managedattribute(\n name='bridge_domains',\n finit=typedset(managedattribute.test_isinstance(BridgeDomain)).copy,\n type=typedset(managedattribute.test_isinstance(BridgeDomain))._from_iterable,\n doc='A `set` of BridgeDomain associated objects')\n\n def add_bridge_domain(self, bridge_domain): # TODO DEPRECATE\n self.bridge_domains.add(bridge_domain)\n\n def remove_bridge_domain(self, bridge_domain): # TODO DEPRECATE\n self.bridge_domains.remove(bridge_domain)\n\n g8032_rings = managedattribute(\n name='g8032_rings',\n finit=typedset(managedattribute.test_isinstance(G8032Ring)).copy,\n type=typedset(managedattribute.test_isinstance(G8032Ring))._from_iterable,\n doc='A `set` of G.8032 Ring associated objects')\n\n def add_g8032_ring(self, g8032_ring): # TODO DEPRECATE\n self.g8032_rings.add(g8032_ring)\n\n def remove_g8032_ring(self, g8032_ring): # TODO DEPRECATE\n self.g8032_rings.remove(g8032_ring)\n\n pseudowire_classes = managedattribute(\n name='pseudowire_classes',\n finit=typedset(managedattribute.test_isinstance(PseudowireClass)).copy,\n type=typedset(managedattribute.test_isinstance(PseudowireClass))._from_iterable,\n doc='A `set` of Pseudowire Class associated objects')\n\n def add_pseudowire_class(self, pseudowire_class): # TODO DEPRECATE\n self.pseudowire_classes.add(pseudowire_class)\n\n def remove_pseudowire_class(self, pseudowire_class): # TODO DEPRECATE\n self.pseudowire_classes.remove(pseudowire_class)\n\n xconnects = managedattribute(\n name='xconnects',\n finit=typedset(managedattribute.test_isinstance(Xconnect)).copy,\n type=typedset(managedattribute.test_isinstance(Xconnect))._from_iterable,\n doc='A `set` of Xconnect associated objects')\n\n def add_xconnect(self, xconnect): # TODO DEPRECATE\n self.xconnects.add(xconnect)\n\n def remove_xconnect(self, xconnect): # TODO DEPRECATE\n self.xconnects.remove(xconnect)\n\n class DefaultDevicePbbAttributes(object):\n\n enabled = managedattribute(\n name='enabled',\n default=False,\n type=managedattribute.test_istype(bool))\n\n backbone_source_mac = managedattribute(\n name='backbone_source_mac',\n default=None,\n type=(None, MAC))\n\n pbb = managedattribute(\n name='pbb',\n read_only=True,\n finit=DefaultDevicePbbAttributes,\n doc=DefaultDevicePbbAttributes.__doc__)\n\n class DeviceAttributes(genie.conf.base.attributes.DeviceSubAttributes):\n\n router_id = managedattribute(\n name='router_id',\n default=None,\n type=(None, IPv4Address))\n\n forwarding_inject_local_macs = managedattribute(\n name='forwarding_inject_local_macs',\n finit=set)\n #finit=typedset(ForwardingInjectLocalMac).copy,\n #type=typedset(ForwardingInjectLocalMac)._from_iterable)\n\n class PbbAttributes(SubAttributes):\n\n def __init__(self, _device_attr):\n self._device_attr = _device_attr\n super().__init__(\n # L2vpn.pbb\n parent=_device_attr.parent.pbb)\n\n @property\n def testbed(self):\n return self._device_attr.testbed\n\n @property\n def device_name(self):\n return self._device_attr.device_name\n\n @property\n def device(self):\n return self._device_attr.device\n\n pbb = managedattribute(\n name='pbb',\n read_only=True,\n doc=PbbAttributes.__doc__)\n\n @pbb.initter\n def pbb(self):\n return self.PbbAttributes(_device_attr=self)\n\n @property\n def bridge_domains(self):\n device = self.device\n for bd in self.parent.bridge_domains:\n if device in bd.devices:\n yield bd\n\n @property\n def g8032_rings(self):\n device = self.device\n for ring in self.parent.g8032_rings:\n if device in ring.devices:\n yield ring\n\n @property\n def pseudowire_classes(self):\n device = self.device\n for pwc in self.parent.pseudowire_classes:\n if device in pwc.devices:\n yield pwc\n\n @property\n def xconnects(self):\n device = self.device\n for xc in self.parent.xconnects:\n if device in xc.devices:\n yield xc\n\n def __init__(self, parent, key):\n super().__init__(parent, key)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n def _on_added_from_device(self, device):\n super()._on_added_from_device(device)\n assert getattr(device, 'l2vpn', None) is None\n device.l2vpn = self\n\n def _on_removed_from_device(self, device):\n assert getattr(device, 'l2vpn', None) is self\n super()._on_removed_from_device(device)\n device.l2vpn = None\n\n def build_config(self, devices=None, apply=True,\n attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True,\n attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n", "id": "7416943", "language": "Python", "matching_score": 4.723637580871582, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/l2vpn.py" }, { "content": "\n__all__ = (\n 'IccpGroup',\n )\n\nfrom ats.datastructures import WeakList\n\nfrom genie.utils.cisco_collections import typedset\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import DeviceFeature, Interface\nimport genie.conf.base.attributes\nfrom genie.conf.base.attributes import SubAttributes, SubAttributesDict, AttributesInheriter, AttributesHelper\n\nfrom genie.libs.conf.base import \\\n MAC, \\\n IPv4Address, IPv6Address\n\nclass IccpGroup(DeviceFeature):\n\n group_id = managedattribute(\n name='group_id',\n read_only=True,\n doc='Group ID (mandatory)')\n\n mlacp_node_id = managedattribute(\n name='mlacp_node_id',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n mlacp_system_mac = managedattribute(\n name='mlacp_system_mac',\n default=None,\n type=(None, MAC))\n\n mlacp_system_priority = managedattribute(\n name='mlacp_system_priority',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n interfaces = managedattribute(\n name='interfaces',\n read_only=True,\n finit=WeakList,\n gettype=frozenset)\n\n def add_interface(self, interface):\n assert isinstance(interface, genie.conf.base.Interface)\n self._interfaces.append(interface)\n\n def remove_interface(self, interface):\n assert isinstance(interface, genie.conf.base.Interface)\n self._interfaces.remove(interface)\n\n backbone_interfaces = managedattribute(\n name='backbone_interfaces',\n finit=typedset(managedattribute.test_isinstance(Interface)).copy,\n type=typedset(managedattribute.test_isinstance(Interface))._from_iterable)\n\n isolation_recovery_delay = managedattribute(\n name='isolation_recovery_delay',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n mac_flush = managedattribute(\n name='mac_flush',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n mode = managedattribute(\n name='mode',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n primary_vlan = managedattribute(\n name='primary_vlan',\n default=None,\n type=(None,\n managedattribute.test_istype(int),\n managedattribute.test_istype(str)))\n\n recovery_delay = managedattribute(\n name='recovery_delay',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n secondary_vlan = managedattribute(\n name='secondary_vlan',\n default=None,\n type=(None,\n managedattribute.test_istype(int),\n managedattribute.test_istype(str)))\n\n multi_homing_node_id = managedattribute(\n name='multi_homing_node_id',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n class DeviceAttributes(genie.conf.base.attributes.DeviceSubAttributes):\n\n backbone_interfaces = managedattribute(\n name='backbone_interfaces',\n type=typedset(managedattribute.test_isinstance(Interface))._from_iterable)\n\n @backbone_interfaces.defaulter\n def backbone_interfaces(self):\n device = self.device\n return frozenset(\n interface\n for interface in self.parent.backbone_interfaces\n if interface.device is device)\n\n class InterfaceAttributes(genie.conf.base.attributes.InterfaceSubAttributes):\n\n def __init__(self, parent, key):\n super().__init__(parent, key)\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n # interfaces -- See DeviceSubAttributes\n\n def __init__(self, parent, key):\n super().__init__(parent, key)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n def __init__(self, group_id, *args, **kwargs):\n self._group_id = int(group_id)\n self.interfaces # init!\n super().__init__(*args, **kwargs)\n\n def build_config(self, devices=None, apply=True,\n attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True,\n attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n", "id": "2782177", "language": "Python", "matching_score": 4.088777542114258, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/l2vpn/iccp_group.py" }, { "content": "__all__ = (\n 'Igmp',\n )\n\n# Genie\nfrom genie.utils.cisco_collections import typedset\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.base import DeviceFeature, InterfaceFeature\n\n# genie.libs\nfrom .ssm import Ssm\nfrom .igmp_group import IgmpGroup\nfrom genie.libs.conf.base import Routing\nfrom genie.libs.conf.vrf import VrfSubAttributes\nfrom genie.conf.base.attributes import DeviceSubAttributes, \\\n SubAttributesDict,\\\n InterfaceSubAttributes, \\\n AttributesHelper\n\n# Structure Hierarchy:\n# Igmp\n# +-- DeviceAtributes\n# +-- VrfAttributes\n# +-- InterfaceAttributes\n\n\nclass Igmp(Routing, DeviceFeature, InterfaceFeature):\n \n # require_router_alert\n require_router_alert = managedattribute(\n name='require_router_alert',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Configure require_router_alert.\")\n\n # global_max_groups\n global_max_groups = managedattribute(\n name='global_max_groups',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure global_max_groups under vrf attribute.\")\n\n # enable\n enable = managedattribute(\n name='enable',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Configure 'ip pim sparse-mode' under interface.\")\n\n # last_member_query_interval\n last_member_query_interval = managedattribute(\n name='last_member_query_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure last_member_query_interval under interface.\")\n\n # group_policy\n group_policy = managedattribute(\n name='group_policy',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc=\"Configure group_policy under interface.\")\n\n # immediate_leave\n immediate_leave = managedattribute(\n name='immediate_leave',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Configure immediate_leave under interface.\")\n\n # max_groups\n max_groups = managedattribute(\n name='max_groups',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure max_groups under interface.\")\n\n # query_interval\n query_interval = managedattribute(\n name='query_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure query_interval under interface.\")\n\n # query_max_response_time\n query_max_response_time = managedattribute(\n name='query_max_response_time',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure query_max_response_time under interface.\")\n\n # robustness_variable\n robustness_variable = managedattribute(\n name='robustness_variable',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure robustness_variable.\")\n\n # version\n version = managedattribute(\n name='version',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc=\"Configure version under interface.\")\n\n\n class DeviceAttributes(DeviceSubAttributes):\n\n class VrfAttributes(VrfSubAttributes):\n\n def __init__(self, parent, key):\n self.vrf_id = key\n super().__init__(parent, key)\n\n ssm = managedattribute(\n name='ssm',\n finit=typedset(managedattribute.test_isinstance(Ssm)).copy,\n type=typedset(managedattribute.test_isinstance(Ssm))._from_iterable,\n doc='A `set` of ssm associated objects')\n\n def add_ssm(self, ssm):\n self.ssm.add(ssm)\n\n def remove_ssm(self, ssm):\n ssm._device = None\n try:\n self.ssm.remove(ssm)\n except:\n pass\n\n \n class InterfaceAttributes(InterfaceSubAttributes):\n\n def __init__(self, parent, key):\n self.intf = key\n super().__init__(parent, key)\n \n groups = managedattribute(\n name='groups',\n finit=typedset(managedattribute.test_isinstance(IgmpGroup)).copy,\n type=typedset(managedattribute.test_isinstance(IgmpGroup))._from_iterable,\n doc='A `set` of IgmpGroup associated objects')\n\n def add_groups(self, groups):\n self.groups.add(groups)\n\n def remove_groups(self, groups):\n groups._device = None\n try:\n self.groups.remove(groups)\n except:\n pass\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(\n self.InterfaceAttributes, parent=self)\n\n vrf_attr = managedattribute(\n name='vrf_attr',\n read_only=True,\n doc=VrfAttributes.__doc__)\n\n @vrf_attr.initter\n def vrf_attr(self):\n return SubAttributesDict(self.VrfAttributes, parent=self)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n # ===========================================================\n\n def build_config(self, devices=None, apply=True, attributes=None,\n unconfig=False):\n cfgs = {}\n attributes = AttributesHelper(self, attributes)\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr', sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n if apply:\n for device_name, cfg in sorted(cfgs.items()):\n self.testbed.config_on_devices(cfg, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True, attributes=None):\n cfgs = {}\n attributes = AttributesHelper(self, attributes)\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr', sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n for device_name, cfg in sorted(cfgs.items()):\n self.testbed.config_on_devices(cfg, fail_invalid=True)\n else:\n return cfgs\n", "id": "10628105", "language": "Python", "matching_score": 4.845114707946777, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/igmp/igmp.py" }, { "content": "__all__ = (\n 'Mcast',\n )\n\n# Genie\nfrom genie.utils.cisco_collections import typedset\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.base import DeviceFeature, InterfaceFeature\n\nfrom .mroute import Mroute\nfrom genie.libs.conf.base import Routing\nfrom genie.libs.conf.vrf import Vrf, VrfSubAttributes\nfrom genie.libs.conf.address_family import AddressFamily,\\\n AddressFamilySubAttributes\nfrom genie.conf.base.attributes import DeviceSubAttributes, SubAttributesDict,\\\n InterfaceSubAttributes, AttributesHelper,\\\n KeyedSubAttributes\n\n# Structure Hierarchy:\n# Mcast\n# +--DeviceAttributes\n# +--VrfAttributes\n\n\nclass Mcast(Routing, DeviceFeature, InterfaceFeature):\n\n # ==================== MCAST attributes ====================\n\n # feature pim/pim6\n enabled = managedattribute(\n name='enabled',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Enable or disable both feature pim and pim6 on the device.\")\n\n # multipath\n multipath = managedattribute(\n name='multipath',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Configure 'ip multicast multipath' on the device.\")\n\n # if_enable\n if_enable = managedattribute(\n name='if_enable',\n default=False,\n type=(None, managedattribute.test_istype(bool)),\n doc=\"Configure 'enable' on the device interface.\")\n\n # ===========================================================\n\n # Compatibility\n address_families = managedattribute(\n name='address_families',\n finit=typedset(AddressFamily, {AddressFamily.ipv4}).copy,\n type=typedset(AddressFamily)._from_iterable)\n\n # Compatibility\n @property\n def vrfs(self):\n return \\\n self.force_vrfs | \\\n {intf.vrf for intf in self.interfaces}\n\n # Compatibility\n force_vrfs = managedattribute(\n name='force_vrfs',\n read_only=True,\n finit=set,\n gettype=frozenset)\n # XXXJST TODO force_vrfs needs to also be accessible per-device. Being read_only, that can't happen\n\n # Compatibility\n def add_force_vrf(self, vrf):\n assert vrf is None or isinstance(vrf, Vrf)\n self.force_vrfs # init!\n self._force_vrfs.add(vrf)\n\n # Compatibility\n def remove_force_vrf(self, vrf):\n assert vrf is None or isinstance(vrf, Vrf)\n self.force_vrfs # init!\n self._force_vrfs.remove(vrf)\n\n # ===========================================================\n\n class DeviceAttributes(DeviceSubAttributes):\n\n mroutes = managedattribute(\n name='mroutes',\n finit=typedset(managedattribute.test_isinstance(Mroute)).copy,\n type=typedset(managedattribute.test_isinstance(Mroute))._from_iterable,\n doc='A `set` of Mroute associated objects')\n\n def add_mroute(self, mroute):\n self.mroutes.add(mroute)\n\n def remove_mroute(self, mroute):\n mroute._device = None\n try:\n self.mroutes.remove(mroute)\n except:\n pass\n\n # Compatibility\n address_families = managedattribute(\n name='address_families',\n type=typedset(AddressFamily)._from_iterable)\n\n # Compatibility\n @property\n def vrfs(self):\n return \\\n self.force_vrfs | \\\n {intf.vrf for intf in self.interfaces}\n\n # Compatibility\n @address_families.defaulter\n def address_families(self):\n return frozenset(self.parent.address_families)\n\n\n class VrfAttributes(VrfSubAttributes):\n\n def __init__(self, parent, key):\n self.vrf_id = key\n super().__init__(parent, key)\n\n mroutes = managedattribute(\n name='mroutes',\n finit=typedset(managedattribute.test_isinstance(Mroute)).copy,\n type=typedset(managedattribute.test_isinstance(Mroute))._from_iterable,\n doc='A `set` of Mroute associated objects')\n\n def add_mroute(self, mroute):\n self.mroutes.add(mroute)\n\n def remove_mroute(self, mroute):\n mroute._device = None\n try:\n self.mroutes.remove(mroute)\n except:\n pass\n\n # Compatibility\n address_families = managedattribute(\n name='address_families',\n type=typedset(AddressFamily)._from_iterable)\n\n # Compatibility\n @address_families.defaulter\n def address_families(self):\n return frozenset(self.parent.address_families)\n\n class AddressFamilyAttributes(AddressFamilySubAttributes):\n\n mroutes = managedattribute(\n name='mroutes',\n finit=typedset(managedattribute.test_isinstance(Mroute)).copy,\n type=typedset(managedattribute.test_isinstance(Mroute))._from_iterable,\n doc='A `set` of Mroute associated objects')\n\n def add_mroute(self, mroute):\n self.mroutes.add(mroute)\n\n def remove_mroute(self, mroute):\n mroute._device = None\n try:\n self.mroutes.remove(mroute)\n except:\n pass\n\n # Compatibility\n class InterfaceAttributes(InterfaceSubAttributes):\n pass\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(\n self.InterfaceAttributes, parent=self)\n\n address_family_attr = managedattribute(\n name='address_family_attr',\n read_only=True,\n doc=AddressFamilySubAttributes.__doc__)\n\n @address_family_attr.initter\n def address_family_attr(self):\n return SubAttributesDict(self.AddressFamilyAttributes,\n parent=self)\n\n vrf_attr = managedattribute(\n name='vrf_attr',\n read_only=True,\n doc=VrfAttributes.__doc__)\n\n @vrf_attr.initter\n def vrf_attr(self):\n return SubAttributesDict(self.VrfAttributes, parent=self)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n # ===========================================================\n\n def build_config(self, devices=None, apply=True, attributes=None,\n unconfig=False):\n cfgs = {}\n attributes = AttributesHelper(self, attributes)\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr', sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n if apply:\n for device_name, cfg in sorted(cfgs.items()):\n self.testbed.config_on_devices(cfg, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True, attributes=None):\n cfgs = {}\n attributes = AttributesHelper(self, attributes)\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr', sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n for device_name, cfg in sorted(cfgs.items()):\n self.testbed.config_on_devices(cfg, fail_invalid=True)\n else:\n return cfgs\n", "id": "11440321", "language": "Python", "matching_score": 5.06085729598999, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/mcast/mcast.py" }, { "content": "\n__all__ = (\n 'Rip',\n)\n\nfrom genie.utils.cisco_collections import typedset\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import DeviceFeature, LinkFeature, InterfaceFeature\nimport genie.conf.base.attributes\nfrom genie.conf.base.attributes import SubAttributes, SubAttributesDict, AttributesHelper\n\nfrom genie.libs.conf.base import Routing\nfrom genie.libs.conf.address_family import AddressFamily, AddressFamilySubAttributes\nfrom genie.libs.conf.vrf import Vrf, VrfSubAttributes\n\n\nclass Rip(Routing, DeviceFeature, LinkFeature, InterfaceFeature):\n \"\"\"Rip class\n\n `Rip` inherits `Feature' class. The class defines all rip related\n information and functionalities.\n\n Args:\n\n\n Returns:\n a `Rip` object\n\n \"\"\"\n\n # When adding a rip instance to a link, all the interfaces in this link\n # needs to be told about the rip object.\n # this syntax means : all interfaces\n register_name = {}\n register_name['interfaces'] = 'rip'\n\n instance_id = managedattribute(\n name='instance_id',\n read_only=True, # mandatory\n doc='RIP Instance ID')\n\n shutdown = managedattribute(\n name='shutdown',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n address_families = managedattribute(\n name='address_families',\n finit=typedset(AddressFamily, {AddressFamily.ipv4_unicast}).copy,\n type=typedset(AddressFamily)._from_iterable)\n\n # AddressFamilyAttributes\n\n distance = managedattribute(\n name='distance',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n maximum_paths = managedattribute(\n name='maximum_paths',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n default_metric = managedattribute(\n name='default_metric',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n redistribute_lisp_rmap = managedattribute(\n name='redistribute_lisp_rmap',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n redistribute_direct_rmap = managedattribute(\n name='redistribute_direct_rmap',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n redistribute_static_rmap = managedattribute(\n name='redistribute_static_rmap',\n default=None,\n type=(None, managedattribute.test_istype(str)))\n\n @property\n def vrfs(self):\n return \\\n self.force_vrfs | \\\n {intf.vrf for intf in self.interfaces}\n\n force_vrfs = managedattribute(\n name='force_vrfs',\n read_only=True,\n finit=set,\n gettype=frozenset)\n # XXXJST TODO force_vrfs needs to also be accessible per-device. Being read_only, that can't happen\n\n def add_force_vrf(self, vrf):\n assert vrf is None or isinstance(vrf, Vrf)\n self.force_vrfs # init!\n self._force_vrfs.add(vrf)\n\n def remove_force_vrf(self, vrf):\n assert vrf is None or isinstance(vrf, Vrf)\n self.force_vrfs # init!\n self._force_vrfs.remove(vrf)\n\n class DeviceAttributes(genie.conf.base.attributes.DeviceSubAttributes):\n \"\"\"DeviceAttributes class\n\n `DeviceAttributes` contains attributes and functionalities\n that are specific for a device.\n\n Args:\n kwargs (`dict`) : assign attributes to this object while\n creating it.\n\n Returns:\n a `DeviceAttributes` object\n \"\"\"\n\n enabled_feature = managedattribute(\n name='enabled_feature',\n default=False,\n type=managedattribute.test_istype(bool),\n doc='''Argument to control 'feature rip' CLI''')\n\n address_families = managedattribute(\n name='address_families',\n type=typedset(AddressFamily)._from_iterable)\n\n @address_families.defaulter\n def address_families(self):\n return frozenset(self.parent.address_families)\n\n @property\n def vrfs(self):\n return \\\n self.force_vrfs | \\\n {intf.vrf for intf in self.interfaces}\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n class VrfAttributes(VrfSubAttributes):\n \"\"\" VrfAttributes class\n\n `VrfAttributes` inherits from `SubAttributes' class.\n It contains all vrf related attributes and\n functionalities. Class contains some powers to access\n its parent attributes.\n\n Args:\n kwargs (`dict`) : gives the user ability to assign some or all\n address family attributes while creating the\n object.\n\n \"\"\"\n\n def __init__(self, **kwargs):\n super().__init__(**kwargs)\n\n address_families = managedattribute(\n name='address_families',\n type=typedset(AddressFamily)._from_iterable)\n\n @address_families.defaulter\n def address_families(self):\n return frozenset(self.parent.address_families)\n\n class AddressFamilyAttributes(AddressFamilySubAttributes):\n \"\"\" AddressFamilyAttributes class\n\n `AddressFamilyAttributes` inherits from `SubAttributes' class.\n It contains all address family related attributes and\n functionalities. Class contains some powers to access\n its parent attributes.\n\n Args:\n kwargs (`dict`) : gives the user ability to assign some or all\n address family attributes while creating the\n object.\n\n Class variables:\n\n allowed_keys (`List`): list of all allowed 'keys' the object can\n access.\n\n \"\"\"\n\n allowed_keys = (\n AddressFamily.ipv4_unicast,\n AddressFamily.ipv6_unicast,\n )\n\n address_family_attr = managedattribute(\n name='address_family_attr',\n read_only=True,\n doc=AddressFamilyAttributes.__doc__)\n\n @address_family_attr.initter\n def address_family_attr(self):\n return SubAttributesDict(self.AddressFamilyAttributes, parent=self)\n\n vrf_attr = managedattribute(\n name='vrf_attr',\n read_only=True,\n doc=VrfAttributes.__doc__)\n\n @vrf_attr.initter\n def vrf_attr(self):\n return SubAttributesDict(self.VrfAttributes, parent=self)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n def __init__(self, instance_id, **kwargs):\n '''Rip Base class'''\n self._instance_id = int(instance_id)\n super().__init__(**kwargs)\n\n def build_config(self, devices=None, apply=True, attributes=None, **kwargs):\n \"\"\"method to build the configuration based on attributes\n\n Api to build the configuration of an Rip object.\n This configuration depends of the configurable attributes of\n this object.\n\n If Apply is set to True, then it will apply on the device(s)\n If it is set to False, then it will return a dictionary.\n\n If any kwargs are passed, then the configuration that is built\n will use those kwargs given, and not the object attributes. This\n is useful for modifying the configuration, without re-applying\n everything.\n\n\n Args:\n apply (`bool`): If True will apply the configuration on the device\n and if False will return the configuration in a\n dictionary\n kwargs (`dict`): If there is kwargs, then it will use those\n attributes to configure the feature. Otherwise\n will use the object attributes\n\n Return:\n `str`\n \"\"\"\n attributes = AttributesHelper(self, attributes)\n\n # Get devices if none were passed\n if devices is None:\n devices = self.devices\n\n # For each device, loop over device_attr\n cfgs = {}\n for key, sub, attributes2 in attributes.mapping_items('device_attr', keys=devices):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2, **kwargs)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n # Return configuration\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True, attributes=None, **kwargs):\n \"\"\"method to build the unconfiguration based on attributes\n\n Api to build the unconfiguration of an Rip object.\n This configuration depends of the configurable attributes of\n this object.\n\n If Apply is set to True, then it will apply on the device(s)\n If it is set to False, then it will return a dictionary.\n\n If any kwargs are passed, then the configuration that is built\n will use those kwargs given, and not the object attributes. This\n is useful for modifying the configuration, without re-applying\n everything.\n\n\n Args:\n apply (`bool`): If True will apply the configuration on the device\n and if False will return the configuration in a\n dictionary\n kwargs (`dict`): If there is kwargs, then it will use those\n attributes to configure the feature. Otherwise\n will use the object attributes\n\n Return:\n `str`\n \"\"\"\n attributes = AttributesHelper(self, attributes)\n\n # Get devices if none were passed\n if devices is None:\n devices = self.devices\n\n # For each device, loop over device_attr\n cfgs = {}\n for key, sub, attributes2 in attributes.mapping_items('device_attr', keys=devices):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2, **kwargs)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n # Return configuration\n return cfgs\n", "id": "12298669", "language": "Python", "matching_score": 4.841239929199219, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/rip/rip.py" }, { "content": "\nfrom abc import ABC\nimport warnings\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base import Interface\nfrom genie.conf.base.config import CliConfig\n\nfrom genie.libs.conf.vrf import VrfSubAttributes\n\n\nclass Rip(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, unconfig_feature=False, **kwargs):\n \"\"\"method to build the configuration based on attributes\n\n Api to build the configuration of an `DeviceAttributes` object.\n This configuration depends of the configurable attributes of this\n object.\n\n Args:\n kwargs (`dict`): Argument to drive configuration\n\n Return:\n `str`\n \"\"\"\n assert not apply # not supported\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # If enable feature has not been sent to the device yet,\n if unconfig and unconfig_feature:\n # if not self.enabled_feature:\n configurations.append_line('no feature rip', raw=True)\n self.enabled_feature = False\n else:\n if not self.enabled_feature:\n configurations.append_line('feature rip', raw=True)\n self.enabled_feature = True\n\n with configurations.submode_context(attributes.format('router rip {instance_id}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n v = attributes.value('shutdown')\n if v is not None:\n if v is True:\n configurations.append_line('shutdown')\n elif not unconfig:\n configurations.append_line('no shutdown') # always configure it\n\n for sub, attributes2 in attributes.mapping_values('vrf_attr', keys=self.vrfs, sort=True):\n configurations.append_block(sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n if apply:\n if configurations:\n self.device.configure(str(configurations))\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class VrfAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n \"\"\"method to build the configuration based on attributes\n\n Api to build the configuration of an `VrfAttributes` object.\n This configuration depends of the configurable attributes of\n this object.\n\n Args:\n kwargs (`dict`): Argument to drive configuration\n\n Return:\n `str`\n \"\"\"\n assert not apply # not supported\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(\n None if self.vrf_name == 'default' else attributes.format('vrf {vrf_name}', force=True)):\n if self.vrf_name != 'default' and unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n for sub, attributes2 in attributes.mapping_values('address_family_attr', keys=self.address_families, sort=True):\n configurations.append_block(sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig, **kwargs))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class AddressFamilyAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n \"\"\"method to build the configuration based on attributes\n\n Api to build the configuration of an `AddressFamilyAttributes` object.\n This configuration depends of the configurable attributes of\n this object.\n\n Args:\n kwargs (`dict`): Argument to drive configuration\n\n Return:\n `str`\n \"\"\"\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(attributes.format('address-family {address_family.value}', force=True)):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n configurations.append_line(attributes.format('default-metric {default_metric}'))\n\n configurations.append_line(attributes.format('distance {distance}'))\n\n configurations.append_line(attributes.format('maximum-paths {maximum_paths}'))\n\n configurations.append_line(attributes.format('redistribute lisp route-map {redistribute_lisp_rmap}'))\n\n configurations.append_line(attributes.format('redistribute direct route-map {redistribute_direct_rmap}'))\n\n configurations.append_line(attributes.format('redistribute static route-map {redistribute_static_rmap}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n# -- RIP\n# nxos: interface <intf> / ip rip authentication key-chain someword\n# nxos: interface <intf> / ip rip authentication mode md5\n# nxos: interface <intf> / ip rip authentication mode text\n# nxos: interface <intf> / ip rip metric-offset 1\n# nxos: interface <intf> / ip rip offset-list 1\n# nxos: interface <intf> / ip rip passive-interface\n# nxos: interface <intf> / ip rip poison-reverse\n# nxos: interface <intf> / ip rip route-filter prefix-list someword in\n# nxos: interface <intf> / ip rip route-filter prefix-list someword out\n# nxos: interface <intf> / ip rip route-filter route-map rpl1 in\n# nxos: interface <intf> / ip rip route-filter route-map rpl1 out\n# nxos: interface <intf> / ip rip summary-address 1.2.3.0/24\n# nxos: interface <intf> / ip router rip someword\n# nxos: interface <intf> / ipv6 rip metric-offset 1\n# nxos: interface <intf> / ipv6 rip offset-list 1\n# nxos: interface <intf> / ipv6 rip passive-interface\n# nxos: interface <intf> / ipv6 rip poison-reverse\n# nxos: interface <intf> / ipv6 rip route-filter prefix-list someword in\n# nxos: interface <intf> / ipv6 rip route-filter prefix-list someword out\n# nxos: interface <intf> / ipv6 rip route-filter route-map rpl1 in\n# nxos: interface <intf> / ipv6 rip route-filter route-map rpl1 out\n# nxos: interface <intf> / ipv6 rip summary-address fdf8:f53e:61e4::18/128\n# nxos: interface <intf> / ipv6 router rip someword\n# nxos: interface <intf> / rip shutdown\n\n", "id": "8777602", "language": "Python", "matching_score": 5.311943531036377, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/rip/nxos/rip.py" }, { "content": "'''\nIOSXR specific configurations for Mcast feature object.\n'''\n\n# Python\nfrom abc import ABC\n\n# Genie\nfrom genie.decorator import managedattribute\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning, \\\n AttributesHelper\n\n# Structure Hierarchy:\n# Mcast\n# +--DeviceAttributes\n# +--VrfAttributes\n# +-- AddressFamilyAttributes\n\n\nclass Mcast(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # multicast-routing\n with configurations.submode_context('multicast-routing'):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # VrfAttributes\n for sub, attributes2 in attributes.mapping_values('vrf_attr',\n sort=True, keys=self.vrf_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n \n class VrfAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(\n None if self.vrf_name == 'default' \\\n else attributes.format('vrf {vrf_name}', force=True)):\n if self.vrf_name != 'default' and unconfig \\\n and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # AddressFamilyAttributes\n for sub, attributes2 in attributes.mapping_values('address_family_attr',\n sort=True, keys=self.address_family_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class AddressFamilyAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # # Set decider key af_name from user set address family\n # self.af_name = self.address_family.value\n\n with configurations.submode_context(attributes.format(\n 'address-family {address_family.name}', force=True)):\n\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # ======\n # enable\n # ======\n if attributes.value('enable'):\n configurations.append_line(attributes.format(\n 'interface all enable'))\n\n # =========\n # multipath\n # =========\n if attributes.value('multipath'):\n configurations.append_line(\n attributes.format('multipath'))\n\n # Mroute attributes configs\n for mroute, attributes2 in attributes.sequence_values(\n 'mroutes', sort=True):\n if unconfig:\n configurations.append_block(mroute.build_unconfig(\n apply=False, attributes=attributes2, **kwargs))\n else:\n configurations.append_block(mroute.build_config(\n apply=False, attributes=attributes2, **kwargs))\n\n # InterfaceAttribute\n for sub, attributes2 in attributes.mapping_values('interface_attr',\n sort=True, keys=self.interface_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n with configurations.submode_context(attributes.format(\n 'interface {interface.name}', force=True)):\n\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # ======\n # if_enable\n # ======\n if attributes.value('if_enable'):\n configurations.append_line(attributes.format(\n 'enable'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)", "id": "3522199", "language": "Python", "matching_score": 2.8175106048583984, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/mcast/iosxr/mcast.py" }, { "content": "\nfrom abc import ABC\nimport warnings\n\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning,\\\n AttributesHelper\nfrom genie.conf.base import Interface\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.config import CliConfig\nfrom ..isis import Isis as _Isis\n\nfrom genie.libs.conf.address_family import AddressFamily, AddressFamilySubAttributes\n\n\nclass Isis(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxe: router isis 100 (config-isis)\n context_cli = attributes.format('router isis {pid}', force=True)\n with configurations.submode_context(context_cli,cancel_empty=True):\n\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n # iosxe: router isis 100 / protocol shutdown\n if attributes.value('shutdown'):\n configurations.append_line(attributes.format('protocol shutdown'))\n\n # iosxe: router isis 100 / is-type level-1\n # iosxe: router isis 100 / is-type level-2-only\n # iosxe: router isis 100 / is-type level-1-2\n configurations.append_line(attributes.format('is-type {is_type}', transform={\n _Isis.IsType.level_1: 'level-1',\n _Isis.IsType.level_2: 'level-2-only',\n _Isis.IsType.level_1_2: 'level-1-2',\n }))\n\n # iosxe: router isis 100 / nsf cisco\n # iosxe: router isis 100 / nsf ietf\n configurations.append_line(attributes.format('nsf {nsf}', transform={\n _Isis.Nsf.cisco: 'cisco',\n _Isis.Nsf.ietf: 'ietf',\n }))\n\n # iosxe: router isis 100 / nsr\n if attributes.value('nsr'):\n configurations.append_line(attributes.format('nsr'))\n\n # iosxe: router isis 100 / distribute link-state\n if attributes.value('distribute_link_state'):\n configurations.append_line(attributes.format('distribute link-state'))\n\n # iosxe: router isis 100 / segment-routing mpls\n if attributes.value('segment_routing_mpls'):\n configurations.append_line(attributes.format('segment-routing mpls'))\n\n # iosxe: router isis 100 / segment-routing prefix-sid-map advertise-local\n if attributes.value('segment_routing_prefix_sid_map_advertise_local'):\n configurations.append_line(attributes.format('segment-routing prefix-sid-map advertise-local'))\n\n # iosxe: router isis 100 / segment-routing prefix-sid-map receive disable \n if attributes.value('segment_routing_prefix_sid_map_receive') is True:\n configurations.append_line(attributes.format('segment-routing prefix-sid-map receive'))\n elif attributes.value('segment_routing_prefix_sid_map_receive') is False:\n configurations.append_line(attributes.format('segment-routing prefix-sid-map receive disable'))\n\n # iosxe: router isis 100 / net 11.0000.0000.0000.0000.00\n for net_id, attributes2 in attributes.sequence_values('net_ids', sort=True):\n configurations.append_line(attributes2.format('net {area_address}.{system_id}.{nsel}'))\n\n # iosxe: router isis 100 / passive-interface Loopback0\n for sub, attributes2 in attributes.mapping_values('interface_attr', keys=self.interfaces, sort=True):\n if attributes2.value('passive'):\n configurations.append_line(attributes2.format('passive-interface {interface.name}',force=True))\n\n for sub, attributes2 in attributes.mapping_values('address_family_attr', keys=self.address_families, sort=True):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n for sub, attributes2 in attributes.mapping_values('interface_attr', keys=self.interfaces, sort=True):\n configurations.append_block(\n sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class AddressFamilyAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n assert not kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n if self.address_family.name == 'ipv4_unicast':\n\n # iosxe: router isis 100 / metric-style wide\n configurations.append_line(attributes.format('metric-style {metric_style.value}'))\n\n # iosxe : router isis 100 / ispf level-1\n # iosxe : router isis 100 / ispf level-2\n # iosxe : router isis 100 / ispf level-1-2\n configurations.append_line(attributes.format('ispf {ispf_type}', transform={\n _Isis.IsType.level_1: 'level-1',\n _Isis.IsType.level_2: 'level-2',\n _Isis.IsType.level_1_2: 'level-1-2',\n }))\n\n # iosxe: router isis 100 / mpls traffic-eng level-1\n # iosxe: router isis 100 / mpls traffic-eng level-2\n # iosxe: router isis 100 / mpls traffic-eng level-1 ; mpls traffic-eng level-2\n configurations.append_line(attributes.format('mpls traffic-eng {mpls_te_level}', transform={\n _Isis.IsType.level_1: 'level-1',\n _Isis.IsType.level_2: 'level-2',\n _Isis.IsType.level_1_2: 'level-1\\nmpls traffic-eng level-2',\n }))\n\n # iosxe: router isis 100 / mpls traffic-eng router-id <intf>\n configurations.append_line(attributes.format('mpls traffic-eng router-id {mpls_te_rtrid.name}'))\n\n # iosxe : router isis 100 / maximum-paths 32\n configurations.append_line(attributes.format('maximum-paths {maximum-paths}'))\n\n # iosxe : router isis 100 / mpls ldp autoconfig\n if attributes.value('ldp_auto_config'):\n configurations.append_line(attributes.format('mpls ldp autoconfig'))\n\n # iosxe : router isis 100 / mpls ldp sync\n if attributes.value('ldp_sync'):\n configurations.append_line(attributes.format('mpls ldp sync'))\n\n # iosxe : router isis 100 / mpls ldp sync-igp-shortcut\n if attributes.value('ldp_sync_shortcut'):\n configurations.append_line(attributes.format('mpls ldp sync-igp-shortcut'))\n\n # iosxe : router isis 100 / mpls ldp ac-igp-shortcut\n if attributes.value('ldp_auto_config_shortcut'):\n configurations.append_line(attributes.format('mpls ldp ac-igp-shortcut'))\n\n # iosxe: router isis 100 / address-family ipv4|ipv6 unicast|multicast (config-isis-af)\n with configurations.submode_context(attributes.format('address-family {address_family.value}', force=True),\\\n cancel_empty=True):\n if unconfig and attributes.iswildcard:\n configurations.submode_unconfig()\n\n if self.address_family.name != 'ipv4_unicast':\n configurations.append_line(attributes.format('metric-style {metric_style.value}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):\n assert not apply\n assert not kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # iosxe: interface <intf> (config-if)\n with configurations.submode_context(attributes.format('interface {interface.name}',force=True),cancel_empty=True):\n for sub, attributes2 in attributes.mapping_values('address_family_attr',keys=self.address_families, sort=True):\n if sub.address_family.name.startswith('ipv4'):\n configurations.append_line(attributes.format('ip router isis {pid}',force=True))\n if sub.address_family.name.startswith('ipv6'):\n configurations.append_line(attributes.format('ipv6 router isis {pid}',force=True))\n\n # iosxe: interface <intf> / isis network point-to-point\n if attributes.value('point_to_point'):\n configurations.append_line(attributes.format('isis network point-to-point'))\n\n # iosxe: interface <intf> / isis circuit-type level-1\n # iosxe: interface <intf> / isis circuit-type level-2-only\n # iosxe: interface <intf> / isis circuit-type level-1-2\n configurations.append_line(attributes.format('isis circuit-type {circuit_type}', transform={\n _Isis.IsType.level_1: 'level-1',\n _Isis.IsType.level_2: 'level-2-only',\n _Isis.IsType.level_1_2: 'level-1-2',\n }))\n\n # iosxe: interface <intf> / isis metric 10\n configurations.append_line(attributes.format('isis metric {metric}'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)\n\n", "id": "5331749", "language": "Python", "matching_score": 3.653733968734741, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/isis/iosxe/isis.py" }, { "content": "\n__all__ = (\n 'Isis',\n )\n\nfrom enum import Enum\nimport ipaddress\nimport binascii\nfrom ipaddress import IPv4Address, IPv4Interface, IPv6Address, IPv6Interface\n\nfrom genie.utils.cisco_collections import typedset\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import Base, DeviceFeature, InterfaceFeature, LinkFeature, Interface\nimport genie.conf.base.attributes\nfrom genie.conf.base.attributes import SubAttributes, SubAttributesDict, AttributesHelper\n\nfrom genie.libs.conf.base import Routing\nfrom genie.libs.conf.address_family import AddressFamily, AddressFamilySubAttributes\nfrom .isis_net import IsisAreaAddress, IsisSystemID, IsisNET\n\n\nclass Isis(Routing, DeviceFeature, InterfaceFeature, LinkFeature):\n\n pid = managedattribute(\n name='pid',\n type=str,\n doc='Process ID (mandatory)')\n\n address_families = managedattribute(\n name='address_families',\n finit=typedset(AddressFamily, {AddressFamily.ipv4_unicast}).copy,\n type=typedset(AddressFamily)._from_iterable)\n\n class MetricStyle(Enum):\n narrow = 'narrow'\n wide = 'wide'\n transition = 'transition'\n narrow_transition = 'narrow transition'\n wide_transition = 'wide transition'\n\n metric_style = managedattribute(\n name='metric_style',\n default=None,\n type=(None, MetricStyle))\n\n metric = managedattribute(\n name='metric',\n default=None,\n type=(None, int, str))\n\n class IsType(Enum):\n level_1 = 'level 1'\n level_1_2 = 'level 1 and 2'\n level_2 = 'level 2'\n\n is_type = managedattribute(\n name='is_type',\n default=None,\n type=(None, IsType))\n\n ispf_type = managedattribute(\n name='ispf_type',\n default=None,\n type=(None, IsType))\n\n circuit_type = managedattribute(\n name='circuit_type',\n default=None,\n type=(None, IsType))\n\n maximum_paths = managedattribute(\n name='maximum_paths',\n default=None,\n type=(None, int))\n\n ldp_auto_config = managedattribute(\n name='ldp_auto_config',\n default=None,\n type=(None, bool))\n\n ldp_sync = managedattribute(\n name='ldp_sync',\n default=None,\n type=(None, bool))\n\n ldp_sync_shortcut = managedattribute(\n name='ldp_sync_shortcut',\n default=None,\n type=(None, bool))\n\n ldp_auto_config_shortcut = managedattribute(\n name='ldp_auto_config_shortcut',\n default=None,\n type=(None, bool))\n\n distribute_link_state = managedattribute(\n name='distribute_link_state',\n default=None,\n type=(None, bool))\n\n mpls_te_level = managedattribute(\n name='mpls_te_level',\n default=None,\n type=(None, IsType))\n\n mpls_te_rtrid = managedattribute(\n name='mpls_te_rtrid',\n default=None,\n type=(None, managedattribute.test_isinstance(Interface)))\n\n net_id = managedattribute(\n name='net_id',\n read_only=True,\n doc='''Single Network Entity Title (NET). Only meaningful per device.''')\n\n net_ids = managedattribute(\n name='net_ids',\n read_only=True,\n doc='''Set of Network Entity Title (NET). Only meaningful per device.''')\n\n area_addresses = managedattribute(\n name='area_addresses',\n type=(None, managedattribute.test_set_of(IsisAreaAddress)),\n doc='''Set of area address part of Network Entity Title (NET).\n\n Default value is a single area address unique value based on ISIS process ID.\n\n Set to None to trigger each device to have a unique value based on individual device name.\n ''')\n\n @area_addresses.defaulter\n def area_addresses(self):\n unique_int = binascii.crc32(self.pid.encode())\n return frozenset([\n IsisAreaAddress(\n '47.{:04X}.{:04X}'.format(\n (unique_int >> 16) & 0xFFFF,\n unique_int & 0xFFFF,\n ))])\n\n @property\n def area_address(self):\n '''The area address part of the Network Entity Title (NET).\n\n `area_address` can be assigned to and will set `area_addresses` to a\n single item.\n\n `area_address`'s value is a single area address, or None. Use\n `area_addresses` to retrieve all the area addresses. Assign\n `area_addresses` to support multiple area addresses.\n '''\n area_addresses = self.area_addresses\n if area_addresses:\n for area_address in sorted(self.area_addresses):\n return area_address\n return None\n\n @area_address.setter\n def area_address(self, value):\n if value is None:\n self.area_addresses = None\n else:\n self.area_addresses = {value}\n\n @area_address.deleter\n def area_address(self):\n del self.area_addresses\n\n system_id = managedattribute(\n name='system_id',\n default=None,\n type=(None, IsisSystemID),\n doc='''System ID. Assign to make all devices use the same System ID for level 1 operation.''')\n\n class Nsf(Enum):\n cisco = 'cisco'\n ietf = 'ietf'\n\n nsf = managedattribute(\n name='nsf',\n default=None,\n type=(None, Nsf))\n\n nsf_lifetime = managedattribute(\n name='nsf_lifetime',\n default=None,\n type=(None, int))\n\n nsr = managedattribute(\n name='nsr',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n redistribute_connected = managedattribute(\n name='redistribute_connected',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n passive = managedattribute(\n name='passive',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n point_to_point = managedattribute(\n name='point_to_point',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n shutdown = managedattribute(\n name='shutdown',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n nsf_lifetime = managedattribute(\n name='lsp_mtu',\n default=None,\n type=(None, int))\n\n segment_routing_mpls = managedattribute(\n name='segment_routing_mpls',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n segment_routing_mpls_sr_prefer = managedattribute(\n name='segment_routing_mpls_sr_prefer',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n segment_routing_prefix_sid_map_advertise_local = managedattribute(\n name='segment_routing_prefix_sid_map_advertise_local',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n segment_routing_prefix_sid_map_receive = managedattribute(\n name='segment_routing_prefix_sid_map_receive',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n # NOTE: prefix_sid and prefix_sid_index are mutually exclusive\n prefix_sid = managedattribute(\n name='prefix_sid',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n # NOTE: prefix_sid and prefix_sid_index are mutually exclusive\n prefix_sid_index = managedattribute(\n name='prefix_sid_index',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n prefix_sid_explicit_null = managedattribute(\n name='prefix_sid_explicit_null',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n prefix_sid_n_flag_clear = managedattribute(\n name='prefix_sid_n_flag_clear',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n class DeviceAttributes(genie.conf.base.attributes.DeviceSubAttributes):\n\n address_families = managedattribute(\n name='address_families',\n type=typedset(AddressFamily)._from_iterable)\n\n @address_families.defaulter\n def address_families(self):\n return frozenset(self.parent.address_families)\n\n area_addresses = managedattribute(\n name='area_addresses',\n type=managedattribute.test_set_of(IsisAreaAddress),\n doc='''Set of area address part of Network Entity Title (NET).\n\n Default value is taken from parent Isis object or, if None, a single area address unique value based on device name.\n ''')\n\n @area_addresses.defaulter\n def area_addresses(self):\n area_addresses = self.parent.area_addresses\n if area_addresses is None:\n unique_int = binascii.crc32(self.device_name.encode())\n area_addresses = [\n IsisAreaAddress(\n '47.{:04X}.{:04X}'.format(\n (unique_int >> 16) & 0xFFFF,\n unique_int & 0xFFFF,\n ))]\n return frozenset(area_addresses)\n\n @property\n def area_address(self):\n '''The area address part of the Network Entity Title (NET).\n\n `area_address` can be assigned to and will set `area_addresses` to a\n single item.\n\n `area_address`'s value is a single area address, or None. Use\n `area_addresses` to retrieve all the area addresses. Assign\n `area_addresses` to support multiple area addresses.\n '''\n for area_address in sorted(self.area_addresses):\n return area_address\n return None\n\n @area_address.setter\n def area_address(self, value):\n self.area_addresses = {value}\n\n @area_address.deleter\n def area_address(self):\n del self.area_addresses\n\n system_id = managedattribute(\n name='system_id',\n type=IsisSystemID,\n doc='The system ID. Default is a unique value per device name.')\n\n @system_id.defaulter\n def system_id(self):\n system_id = self.parent.system_id\n if system_id is None:\n unique_int = binascii.crc32(self.device_name.encode())\n system_id = IsisSystemID(\n 'FFFF.{:04X}.{:04X}'.format(\n (unique_int >> 16) & 0xFFFF,\n unique_int & 0xFFFF,\n ))\n return system_id\n\n @property\n def net_ids(self):\n '''The set of Network Entity Titles (NETs).\n\n Please assign using `system_id`, `area_addresses` or `net_id`.\n '''\n system_id = self.system_id\n return frozenset([\n IsisNET(area_address=area_address, system_id=system_id)\n for area_address in self.area_addresses])\n\n @property\n def net_id(self):\n '''The Network Entity Title (NET).\n\n The NET is formatted as `{area_address}.{system_id}.00`\n\n There can be only 1 `system_id` but there can be multiple areas (`area_addresses`).\n\n `net_id` can be assigned to and will set `area_addresses` to a\n single item as well as `system_id` to the desired value.\n\n `net_id`'s value is a single NET, or None. Use `net_ids` to\n retrieve all the NETs. Assign `area_addresses` and `system_id` to\n support multiple NETs.\n '''\n for net_id in sorted(self.net_ids):\n return net_id\n return None\n\n @net_id.setter\n def net_id(self, value):\n if value is None:\n self.area_addresses = ()\n else:\n net_id = IsisNET(value)\n self.system_id = net_id.system_id\n self.area_address = net_id.area_address\n\n @net_id.deleter\n def net_id(self):\n try:\n del self.area_address\n except AttributeError:\n pass\n try:\n del self.system_id\n except AttributeError:\n pass\n\n class AddressFamilyAttributes(AddressFamilySubAttributes):\n\n def __init__(self, parent, key):\n super().__init__(parent, key)\n\n address_family_attr = managedattribute(\n name='address_family_attr',\n read_only=True,\n doc=AddressFamilyAttributes.__doc__)\n\n @address_family_attr.initter\n def address_family_attr(self):\n return SubAttributesDict(self.AddressFamilyAttributes, parent=self)\n\n class InterfaceAttributes(genie.conf.base.attributes.InterfaceSubAttributes):\n\n address_families = managedattribute(\n name='address_families',\n type=typedset(AddressFamily)._from_iterable)\n\n @address_families.defaulter\n def address_families(self):\n return frozenset(self.parent.address_families)\n\n class AddressFamilyAttributes(AddressFamilySubAttributes):\n\n def __init__(self, parent, key):\n super().__init__(parent, key)\n\n address_family_attr = managedattribute(\n name='address_family_attr', read_only=True,\n doc=AddressFamilyAttributes.__doc__)\n\n @address_family_attr.initter\n def address_family_attr(self):\n return SubAttributesDict(self.AddressFamilyAttributes, parent=self)\n\n def __init__(self, parent, key):\n super().__init__(parent, key)\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n def __init__(self, parent, key):\n super().__init__(parent, key)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n def __init__(self, pid, *args, **kwargs):\n self.pid = pid\n super().__init__(*args, **kwargs)\n\n def build_config(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n", "id": "4127688", "language": "Python", "matching_score": 6.614814758300781, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/isis/isis.py" }, { "content": "\n__all__ = (\n 'SegmentRouting',\n 'PrefixSidMapEntry',\n )\n\nimport ipaddress\nimport collections\nfrom ipaddress import IPv4Address, IPv4Interface, IPv6Address, IPv6Interface, IPv4Network\n\nfrom genie.utils.cisco_collections import typedset\n\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import Base, DeviceFeature, Interface\nimport genie.conf.base.attributes\nfrom genie.conf.base.attributes import SubAttributes, SubAttributesDict, AttributesHelper\n\nfrom genie.libs.conf.base import Routing\nfrom genie.libs.conf.address_family import AddressFamily, AddressFamilySubAttributes\n\nclass PrefixSidMapEntry(Base):\n\n prefix = managedattribute(\n name='prefix',\n default=None,\n type=(None, managedattribute.test_istype(IPv4Network)))\n\n index = managedattribute(\n name='index',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n range = managedattribute(\n name='range',\n default=None,\n type=(None, managedattribute.test_istype(int)))\n\n attach = managedattribute(\n name='attach',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n def __hash__(self):\n return hash((self.prefix,self.index,self.range,self.attach))\n\nclass SegmentRouting(Routing, DeviceFeature):\n\n address_families = managedattribute(\n name='address_families',\n finit=typedset(AddressFamily, {AddressFamily.ipv4_unicast}).copy,\n type=typedset(AddressFamily)._from_iterable)\n\n shutdown = managedattribute(\n name='shutdown',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n global_block = managedattribute(\n name='global_block',\n default=None,\n type=(None, managedattribute.test_istype(range)))\n\n sr_label_preferred = managedattribute(\n name='sr_label_preferred',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n explicit_null = managedattribute(\n name='explicit_null',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n mapping_server = managedattribute(\n name='mapping_server',\n default=None,\n type=(None, managedattribute.test_istype(bool)))\n\n connected_prefix_sid_map = managedattribute(\n name='connected_prefix_sid_map',\n finit=set,\n type=managedattribute.test_set_of(\n managedattribute.test_isinstance(PrefixSidMapEntry)),\n gettype=frozenset,\n doc='A `set` of connected_prefix_sid_map entries')\n\n prefix_sid_map = managedattribute(\n name='prefix_sid_map',\n finit=set,\n type=managedattribute.test_set_of(\n managedattribute.test_isinstance(PrefixSidMapEntry)),\n gettype=frozenset,\n doc='A `set` of prefix_sid_map entries')\n\n class DeviceAttributes(genie.conf.base.attributes.DeviceSubAttributes):\n\n address_families = managedattribute(\n name='address_families',\n type=typedset(AddressFamily)._from_iterable)\n\n @address_families.initter\n def address_families(self):\n return frozenset(self.parent.address_families)\n\n class AddressFamilyAttributes(AddressFamilySubAttributes):\n\n def __init__(self, parent, key):\n super().__init__(parent, key)\n\n def add_prefix_sid_map_entry(self,entry):\n self.prefix_sid_map |= {entry}\n\n def add_connected_prefix_sid_map_entry(self,entry):\n self.connected_prefix_sid_map |= {entry}\n\n def remove_prefix_sid_map_entry(self,entry):\n self.prefix_sid_map.remove(entry)\n\n def remove_connected_prefix_sid_map_entry(self,entry):\n self.connected_prefix_sid_map.remove(entry)\n\n address_family_attr = managedattribute(\n name='address_family_attr',\n read_only=True,\n doc=AddressFamilyAttributes.__doc__)\n\n @address_family_attr.initter\n def address_family_attr(self):\n return SubAttributesDict(self.AddressFamilyAttributes, parent=self)\n\n def __init__(self, parent, key):\n super().__init__(parent, key)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n def build_config(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, apply=True, attributes=None,\n **kwargs):\n cfgs = {}\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n\n if devices is None:\n devices = self.devices\n devices = set(devices)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n self.testbed.config_on_devices(cfgs, fail_invalid=True)\n else:\n return cfgs\n\n", "id": "3263830", "language": "Python", "matching_score": 4.2341156005859375, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/segment_routing/segment_routing.py" }, { "content": "# Python\nfrom enum import Enum\n\n# Genie package\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import Base, \\\n DeviceFeature, \\\n LinkFeature, \\\n Interface\nimport genie.conf.base.attributes\nfrom genie.conf.base.attributes import SubAttributes, \\\n SubAttributesDict, \\\n AttributesHelper, \\\n KeyedSubAttributes\n\n# Genie Xbu_shared\nfrom genie.libs.conf.base.feature import consolidate_feature_args\n\n__all__ = (\n 'StaticRouting',\n )\n# Table of contents:\n# class StaticRouting:\n# class DeviceAttributes:\n# class VrfAttributes:\n# class AddressFamilyAttributes:\n# class RouteAttributes:\n# class InterfaceAttributes:\n# class NextHopAttributes:\n\nclass StaticRouting(DeviceFeature, LinkFeature):\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n # =============================================\n # Device attributes\n # =============================================\n class DeviceAttributes(genie.conf.base.attributes.DeviceSubAttributes):\n\n # VrfAttributes\n class VrfAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.vrf = key\n super().__init__(parent=parent)\n\n # AddressFamilyAttribute\n class AddressFamilyAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.af = key\n super().__init__(parent)\n\n # RouteAttributes\n class RouteAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.route = key\n super().__init__(parent)\n\n # InterfaceAttributes\n class InterfaceAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.interface = key\n super().__init__(parent)\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n # NextHopAttributes\n class NextHopAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.nexthop = key\n super().__init__(parent)\n\n next_hop_attr = managedattribute(\n name='next_hop_attr',\n read_only=True,\n doc=NextHopAttributes.__doc__)\n\n @next_hop_attr.initter\n def next_hop_attr(self):\n return SubAttributesDict(self.NextHopAttributes, parent=self)\n\n\n route_attr = managedattribute(\n name='route_attr',\n read_only=True,\n doc=RouteAttributes.__doc__)\n\n @route_attr.initter\n def route_attr(self):\n return SubAttributesDict(self.RouteAttributes, parent=self)\n\n\n address_family_attr = managedattribute(\n name='address_family_attr',\n read_only=True,\n doc=AddressFamilyAttributes.__doc__)\n\n @address_family_attr.initter\n def address_family_attr(self):\n return SubAttributesDict(self.AddressFamilyAttributes, parent=self)\n\n vrf_attr = managedattribute(\n name='vrf_attr',\n read_only=True,\n doc=VrfAttributes.__doc__)\n\n @vrf_attr.initter\n def vrf_attr(self):\n return SubAttributesDict(self.VrfAttributes, parent=self)\n\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n # ============ managedattributes ============#\n vrf = managedattribute(\n name='vrf',\n default=None,\n type=managedattribute.test_istype(str),\n doc='Vrf Name')\n\n # address_family\n class ADDRESS_FAMILY(Enum):\n ipv4 = 'ipv4'\n ipv6 = 'ipv6'\n\n af = managedattribute(\n name='address_family',\n default='ipv4',\n type=(None, ADDRESS_FAMILY),\n doc='Configure static routing address family')\n\n route = managedattribute(\n name='route',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='route name')\n\n interface = managedattribute(\n name='interface',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Interface name')\n\n if_nexthop = managedattribute(\n name='if_nexthop',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Next hop')\n\n if_preference = managedattribute(\n name='if_preference',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Preference')\n\n if_tag = managedattribute(\n name='if_tag',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Tag')\n\n if_track = managedattribute(\n name='if_track',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Track')\n\n if_nh_vrf = managedattribute(\n name='if_nh_vrf',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='vrf for next hop')\n\n nexthop = managedattribute(\n name='nexthop',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='Next hop')\n\n preference = managedattribute(\n name='preference',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Preference')\n\n tag = managedattribute(\n name='tag',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Tag')\n\n track = managedattribute(\n name='track',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='Track')\n\n nh_vrf = managedattribute(\n name='nh_vrf',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='vrf for next hop')\n\n # =========================================================\n # build_config\n # =========================================================\n def build_config(self, devices=None, interfaces=None, links=None,\n apply=True, attributes=None, **kwargs):\n attributes = AttributesHelper(self, attributes)\n cfgs = {}\n\n devices, interfaces, links = \\\n consolidate_feature_args(self, devices, interfaces, links)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n if apply:\n for device_name, cfg in sorted(cfgs.items()):\n self.testbed.config_on_devices(cfg, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, interfaces=None, links=None,\n apply=True, attributes=None, **kwargs):\n attributes = AttributesHelper(self, attributes)\n\n cfgs = {}\n\n devices, interfaces, links = \\\n consolidate_feature_args(self, devices, interfaces, links)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n for device_name, cfg in sorted(cfgs.items()):\n self.testbed.config_on_devices(cfg, fail_invalid=True)\n else:\n return cfgs\n", "id": "12604367", "language": "Python", "matching_score": 5.728643417358398, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/static_routing/static_routing.py" }, { "content": "# Genie package\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import Base, \\\n DeviceFeature, \\\n LinkFeature, \\\n Interface\nimport genie.conf.base.attributes\nfrom genie.conf.base.attributes import SubAttributes, \\\n SubAttributesDict, \\\n AttributesHelper, \\\n KeyedSubAttributes\n# Genie Xbu_shared\nfrom genie.libs.conf.base.feature import consolidate_feature_args\n__all__ = (\n 'Nd',\n )\n# Table of contents:\n# class Nd:\n# class DeviceAttributes:\n# class InterfaceAttributes:\n# class NeighborAttributes:\n\nclass Nd(DeviceFeature, LinkFeature):\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n # =============================================\n # Device attributes\n # =============================================\n class DeviceAttributes(genie.conf.base.attributes.DeviceSubAttributes):\n\n # InterfaceAttributes\n class InterfaceAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.interface = key\n super().__init__(parent=parent)\n\n # NeighborAttribute\n class NeighborAttributes(KeyedSubAttributes):\n def __init__(self, parent, key):\n self.ip = key\n super().__init__(parent)\n\n neighbor_attr = managedattribute(\n name='neighbor_attr',\n read_only=True,\n doc=NeighborAttributes.__doc__)\n\n @neighbor_attr.initter\n def neighbor_attr(self):\n return SubAttributesDict(self.NeighborAttributes, parent=self)\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n # ============ managedattributes ============#\n interface = managedattribute(\n name='interface',\n default=None,\n type=managedattribute.test_istype(str),\n doc='Interface')\n\n if_ra_interval = managedattribute(\n name='if_ra_interval',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='set interface periodic interval')\n\n if_ra_lifetime = managedattribute(\n name='if_ra_lifetime',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='set router lifetime')\n\n if_ra_suppress = managedattribute(\n name='if_ra_suppress',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='Enable suppress RA')\n\n ip = managedattribute(\n name='ip',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='neighbor ip')\n\n link_layer_address = managedattribute(\n name='link_layer_address',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='set Mac address')\n\n # =========================================================\n # build_config\n # =========================================================\n def build_config(self, devices=None, interfaces=None, links=None,\n apply=True, attributes=None, **kwargs):\n attributes = AttributesHelper(self, attributes)\n cfgs = {}\n\n devices, interfaces, links = \\\n consolidate_feature_args(self, devices, interfaces, links)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n if apply:\n for device_name, cfg in sorted(cfgs.items()):\n self.testbed.config_on_devices(cfg, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, interfaces=None, links=None,\n apply=True, attributes=None, **kwargs):\n attributes = AttributesHelper(self, attributes)\n\n cfgs = {}\n\n devices, interfaces, links = \\\n consolidate_feature_args(self, devices, interfaces, links)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n for device_name, cfg in sorted(cfgs.items()):\n self.testbed.config_on_devices(cfg, fail_invalid=True)\n else:\n return cfgs\n", "id": "3327907", "language": "Python", "matching_score": 4.461162567138672, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/nd/nd.py" }, { "content": "\nfrom enum import Enum\n\n\n# Genie package\nfrom genie.decorator import managedattribute\nfrom genie.conf.base import Base, \\\n DeviceFeature, \\\n LinkFeature, \\\n Interface\nimport genie.conf.base.attributes\nfrom genie.conf.base.attributes import SubAttributes, \\\n SubAttributesDict, \\\n AttributesHelper, \\\n KeyedSubAttributes\nfrom genie.conf.base.attributes import InterfaceSubAttributes\nfrom genie.libs import parser\nfrom genie.abstract import Lookup\nfrom genie.ops.base import Base as ops_Base\nfrom genie.ops.base import Context\n# Genie Xbu_shared\n\nfrom genie.libs.conf.base.feature import consolidate_feature_args\n\n__all__ = (\n 'Arp',\n )\n# Structure Hierarchy:\n# Arp\n# +--DeviceAttributes\n# +-- InterfaceAttributes\n# +-- StaticArpAttributes\n\nclass Arp(DeviceFeature):\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n # =============================================\n # Device attributes\n # =============================================\n class DeviceAttributes(genie.conf.base.attributes.DeviceSubAttributes):\n\n # InterfaceAttributes\n class InterfaceAttributes(KeyedSubAttributes):\n def __init__(self, parent,key):\n self.interface = key\n super().__init__(parent)\n\n # StaticArpAttributes\n class StaticArpAttributes(KeyedSubAttributes):\n def __init__(self, parent,key):\n self.if_static_ip_address = key\n super().__init__(parent)\n\n static_arp_attr = managedattribute(\n name='static_arp_attr',\n read_only=True,\n doc=StaticArpAttributes.__doc__)\n\n @static_arp_attr.initter\n def static_arp_attr(self):\n return SubAttributesDict(self.StaticArpAttributes, parent=self)\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n interface_attr = managedattribute(\n name='interface_attr',\n read_only=True,\n doc=InterfaceAttributes.__doc__)\n\n @interface_attr.initter\n def interface_attr(self):\n return SubAttributesDict(self.InterfaceAttributes, parent=self)\n\n device_attr = managedattribute(\n name='device_attr',\n read_only=True,\n doc=DeviceAttributes.__doc__)\n\n @device_attr.initter\n def device_attr(self):\n return SubAttributesDict(self.DeviceAttributes, parent=self)\n\n # ============ managedattributes ============#\n max_entries = managedattribute(\n name='max_entries',\n default=None,\n type=managedattribute.test_istype(int),\n doc='ARP Entry count limit <1-2147483647>')\n\n interface = managedattribute(\n name='interface',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='interface')\n\n if_proxy_enable = managedattribute(\n name='if_proxy_enable',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='enabled interface proxy')\n\n if_local_proxy_enable = managedattribute(\n name='if_local_proxy_enable',\n default=None,\n type=(None, managedattribute.test_istype(bool)),\n doc='enabled interface local proxy')\n\n if_expire_time = managedattribute(\n name='if_expire_time',\n default=None,\n type=(None, managedattribute.test_istype(int)),\n doc='interface expire time')\n\n if_static_ip_address = managedattribute(\n name='if_static_ip_addrtess',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='static ip address')\n\n if_static_mac_address = managedattribute(\n name='if_static_mac_addrtess',\n default=None,\n type=(None, managedattribute.test_istype(str)),\n doc='static mac address')\n\n class staticEncapType(Enum):\n ARPA = 'arpa'\n\n if_static_encap_type = managedattribute(\n name='if_static_encap_type',\n default=None,\n type=(None, staticEncapType),\n doc='static encap type')\n\n if_static_alias = managedattribute(\n name='if_static_alias',\n default=None,\n type=managedattribute.test_istype(bool),\n doc='static alias')\n\n if_static_vrf = managedattribute(\n name='if_static_vrf',\n default=None,\n type=managedattribute.test_istype(str),\n doc='static vrf')\n\n # =========================================================\n # build_config\n # =========================================================\n def build_config(self, devices=None, interfaces=None, links=None,\n apply=True, attributes=None, **kwargs):\n attributes = AttributesHelper(self, attributes)\n cfgs = {}\n\n devices, interfaces, links = \\\n consolidate_feature_args(self, devices, interfaces, links)\n\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_config(apply=False, attributes=attributes2)\n if apply:\n for device_name, cfg in sorted(cfgs.items()):\n self.testbed.config_on_devices(cfg, fail_invalid=True)\n else:\n return cfgs\n\n def build_unconfig(self, devices=None, interfaces=None, links=None,\n apply=True, attributes=None, **kwargs):\n attributes = AttributesHelper(self, attributes)\n cfgs = {}\n\n devices, interfaces, links = \\\n consolidate_feature_args(self, devices, interfaces, links)\n for key, sub, attributes2 in attributes.mapping_items(\n 'device_attr',\n keys=devices, sort=True):\n cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)\n\n if apply:\n for device_name, cfg in sorted(cfgs.items()):\n self.testbed.config_on_devices(cfg, fail_invalid=True)\n else:\n return cfgs\n", "id": "1569597", "language": "Python", "matching_score": 4.908102989196777, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/arp/arp.py" }, { "content": "\"\"\"\nImplement Iosxe Specific Configurations for Arp objects.\n\"\"\"\n\n# Table of contents:\n# class DeviceAttributes\n# class InterfaceAttributes\n# class StaticArpAttributes\n\n# Python\nfrom abc import ABC\n# Genie package\nfrom genie.conf.base.attributes import AttributesHelper\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\n\nclass Arp(ABC):\n\n class DeviceAttributes(ABC):\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # arp entries interface-limit <max_entries>\n if attributes.value('max_entries'):\n configurations.append_line(attributes.format('arp entries interface-limit {max_entries}'))\n\n # InterfaceAttributes\n for sub, attributes2 in attributes.mapping_values('interface_attr',\n sort=True,\n keys=self.interface_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # InterfaceAttributes\n class InterfaceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n\n if attributes.value('if_proxy_enable') or \\\n attributes.value('if_local_proxy_enable') or\\\n attributes.value('if_expire_time'):\n with configurations.submode_context(\n attributes.format('interface {interface}',force=True)):\n\n if attributes.value('if_proxy_enable'):\n configurations.append_line(attributes.format('ip proxy-arp'))\n\n if attributes.value('if_local_proxy_enable'):\n configurations.append_line(attributes.format('ip local-proxy-arp'))\n\n if attributes.value('if_expire_time'):\n configurations.append_line(attributes.format('arp timeout {if_expire_time}'))\n\n\n for sub, attributes2 in attributes.mapping_values('static_arp_attr',\n sort=True,\n keys=self.static_arp_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n return str(configurations)\n\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n # StaticArpAttributes\n class StaticArpAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n vrf = \"\"\n if attributes.value('if_static_vrf'):\n vrf = 'vrf {}'.format(attributes.value('if_static_vrf'))\n\n static_alias = \"\"\n if attributes.value('if_static_alias')==True:\n static_alias = 'alias'\n\n cli_cmd = ['arp {vrf} {if_static_ip_address} {if_static_mac_address} {if_static_encap_type.value} {static_alias}',\n 'arp {if_static_ip_address} {if_static_mac_address} {if_static_encap_type.value} {static_alias}',\n 'arp {vrf} {if_static_ip_address} {if_static_mac_address} {if_static_encap_type.value}',\n 'arp {if_static_ip_address} {if_static_mac_address} {if_static_encap_type.value}']\n\n if attributes.value('if_static_ip_address') and \\\n attributes.value('if_static_mac_address') and \\\n attributes.value('if_static_encap_type'):\n if vrf and static_alias:\n cmd = cli_cmd[0]. \\\n format(vrf=vrf,\n if_static_ip_address=attributes.value('if_static_ip_address'),\n if_static_mac_address=attributes.value('if_static_mac_address'),\n if_static_encap_type=attributes.value('if_static_encap_type'),\n static_alias=static_alias)\n if vrf and not static_alias:\n cmd = cli_cmd[1]. \\\n format(vrf=vrf,\n if_static_ip_address=attributes.value('if_static_ip_address'),\n if_static_mac_address=attributes.value('if_static_mac_address'),\n if_static_encap_type=attributes.value('if_static_encap_type'))\n if not vrf and static_alias:\n cmd = cli_cmd[2]. \\\n format(if_static_ip_address=attributes.value('if_static_ip_address'),\n if_static_mac_address=attributes.value('if_static_mac_address'),\n if_static_encap_type=attributes.value('if_static_encap_type'),\n static_alias=static_alias)\n if not vrf and not static_alias:\n cmd = cli_cmd[3]. \\\n format(if_static_ip_address=attributes.value('if_static_ip_address'),\n if_static_mac_address=attributes.value('if_static_mac_address'),\n if_static_encap_type=attributes.value('if_static_encap_type'))\n\n configurations.append_line(cmd)\n\n\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n", "id": "12156869", "language": "Python", "matching_score": 4.363008499145508, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/arp/iosxe/arp.py" }, { "content": "\n# import python\nfrom abc import ABC\n\n# import genie\nfrom genie.conf.base.config import CliConfig\nfrom genie.conf.base.cli import CliConfigBuilder\nfrom genie.conf.base.attributes import AttributesHelper\n\n\nclass Fdb(ABC):\n\n class DeviceAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n \n # mac address-table aging-time <mac_aging_time>\n configurations.append_line(\n attributes.format('mac address-table aging-time {mac_aging_time}'))\n\n # vlan attributes\n for sub, attributes2 in attributes.mapping_values('vlan_attr',\n sort=True, keys=self.vlan_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n if apply:\n if configurations:\n self.device.configure(configurations)\n else:\n return CliConfig(device=self.device, unconfig=unconfig,\n cli_config=configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n class VlanAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None, unconfig=False,\n **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n # mac address-table learning vlan <vlan_id>\n if attributes.value('vlan_mac_learning'):\n configurations.append_line(\n attributes.format('mac address-table learning vlan {vlan_id}', force=True))\n\n # mac address-table aging-time <vlan_mac_aging_time> vlan <vlan_id>\n configurations.append_line(\n attributes.format('mac address-table aging-time '\n '{vlan_mac_aging_time} vlan {vlan_id}', force=True))\n\n # mac_address_attr\n for sub, attributes2 in attributes.mapping_values('mac_address_attr',\n sort=True, keys=self.mac_address_attr):\n configurations.append_block(\n sub.build_config(apply=False,\n attributes=attributes2,\n unconfig=unconfig))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None, **kwargs):\n return self.build_config(apply=apply, attributes=attributes,\n unconfig=True, **kwargs)\n\n\n class MacAddressAttributes(ABC):\n\n def build_config(self, apply=True, attributes=None,\n unconfig=False, **kwargs):\n assert not apply\n assert not kwargs, kwargs\n attributes = AttributesHelper(self, attributes)\n configurations = CliConfigBuilder(unconfig=unconfig)\n\n self.vlan_id = self.parent.vlan_id\n\n # mac address-table static <mac_address> vlan <vlan_id> interface <interface>\n if attributes.value('interface'):\n interface = ' '.join(attributes.value('interface'))\n cmd = 'mac address-table static {mac_address} vlan {vlan_id}'\n cmd += ' interface {interface}'.format(interface=interface)\n configurations.append_line(attributes.format(cmd, force=True))\n\n # mac address-table static <mac_address> vlan <vlan_id> drop\n if attributes.value('drop'):\n configurations.append_line(attributes.format(\n 'mac address-table static {mac_address} vlan {vlan_id} drop'))\n\n return str(configurations)\n\n def build_unconfig(self, apply=True, attributes=None,\n **kwargs):\n return self.build_config(apply=apply,\n attributes=attributes,\n unconfig=True, **kwargs)\n", "id": "905592", "language": "Python", "matching_score": 2.9109041690826416, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/fdb/iosxe/fdb.py" }, { "content": "#!/usr/bin/env python\n\n# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device\n\n# Fdb\nfrom genie.libs.conf.fdb import Fdb\n\n\nclass test_fdb(TestCase):\n\n def setUp(self):\n \n # Set Genie Tb\n testbed = Testbed()\n Genie.testbed = testbed\n \n # Device\n self.dev1 = Device(name='PE1', testbed=testbed, os='iosxe')\n\n def test_fdb_config(self):\n\n # For failures\n self.maxDiff = None\n \n # Fdb object\n fdb = Fdb()\n self.dev1.add_feature(fdb)\n\n # bridge_assurance command rejected by router\n fdb.device_attr[self.dev1].mac_aging_time = 0\n fdb.device_attr[self.dev1].vlan_attr['10'].vlan_mac_learning = True\n fdb.device_attr[self.dev1].vlan_attr['105'].vlan_mac_learning = True\n fdb.device_attr[self.dev1].vlan_attr['10'].vlan_mac_aging_time = 10\n fdb.device_attr[self.dev1].vlan_attr['10'].mac_address_attr['aaaa.bbbb.cccc'].\\\n interface = ['GigabitEthernet1/0/8', 'GigabitEthernet1/0/9']\n fdb.device_attr[self.dev1].vlan_attr['20'].mac_address_attr['aaaa.bbbb.cccc'].\\\n drop = True\n fdb.device_attr[self.dev1].vlan_attr['30'].mac_address_attr['aaaa.bbbb.cccc'].\\\n drop = True\n\n cfgs = fdb.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'mac address-table aging-time 0',\n 'mac address-table learning vlan 10',\n 'mac address-table aging-time 10 vlan 10',\n 'mac address-table static aaaa.bbbb.cccc vlan 10 interface GigabitEthernet1/0/8 GigabitEthernet1/0/9',\n 'mac address-table learning vlan 105',\n 'mac address-table static aaaa.bbbb.cccc vlan 20 drop',\n 'mac address-table static aaaa.bbbb.cccc vlan 30 drop',\n ]))\n\n cfgs = fdb.build_unconfig(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'no mac address-table aging-time 0',\n 'no mac address-table learning vlan 10',\n 'no mac address-table aging-time 10 vlan 10',\n 'no mac address-table static aaaa.bbbb.cccc vlan 10 interface GigabitEthernet1/0/8 GigabitEthernet1/0/9',\n 'no mac address-table learning vlan 105',\n 'no mac address-table static aaaa.bbbb.cccc vlan 20 drop',\n 'no mac address-table static aaaa.bbbb.cccc vlan 30 drop',\n ]))\n\n # uncfg with attributes\n cfgs = fdb.build_unconfig(apply=False,\n attributes={\n 'device_attr': {\n self.dev1: {\n 'mac_aging_time': None,\n 'vlan_attr': {\n '10': {\n 'vlan_mac_learning': None,\n 'mac_address_attr': {\n 'aaaa.bbbb.cccc': {\n 'interface': None\n }\n }\n }\n }\n }}})\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[self.dev1.name]), '\\n'.\\\n join([\n 'no mac address-table aging-time 0',\n 'no mac address-table learning vlan 10',\n 'no mac address-table aging-time 10 vlan 10',\n 'no mac address-table static aaaa.bbbb.cccc vlan 10 interface GigabitEthernet1/0/8 GigabitEthernet1/0/9',\n ]))\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "8854682", "language": "Python", "matching_score": 2.171783208847046, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/fdb/iosxe/tests/test_fdb.py" }, { "content": "''' \nFdb Genie Ops Object for IOSXE - CLI.\n'''\n# Genie\nfrom genie.libs.ops.fdb.fdb import Fdb as SuperFdb\nfrom genie.ops.base import Context\n\n\nclass Fdb(SuperFdb):\n '''Fdb Genie Ops Object'''\n\n def learn(self):\n '''Learn Fdb Ops'''\n \n ########################################################################\n # info\n ########################################################################\n\n # unsupported keys\n # maximum_entries, mac_learning (global), age\n\n # mac_aging_time \n self.add_leaf(cmd='show mac address-table aging-time',\n src='[mac_aging_time]',\n dest='info[mac_aging_time]')\n\n vlan_src = '[vlans][(?P<vlan>^\\d+$)]'\n vlan_dst = 'info[mac_table][vlans][(?P<vlan>^\\d+$)]'\n\n self.add_leaf(cmd='show mac address-table aging-time',\n src=vlan_src + '[mac_aging_time]',\n dest=vlan_dst + '[mac_aging_time]')\n\n # mac_learning\n self.add_leaf(cmd='show mac address-table learning',\n src=vlan_src + '[mac_learning]',\n dest=vlan_dst + '[mac_learning]')\n\n # vlan, mac_address, drop, interfaces\n self.add_leaf(cmd='show mac address-table',\n src='[mac_table]' + vlan_src,\n dest='info[mac_table][vlans][(?P<vlan>^\\d+$)]')\n # total_mac_addresses\n self.add_leaf(cmd='show mac address-table',\n src='[total_mac_addresses]',\n dest='info[total_mac_addresses]')\n\n # make to write in cache\n self.make(final_call=True)\n", "id": "3338164", "language": "Python", "matching_score": 1.6858410835266113, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/fdb/iosxe/fdb.py" }, { "content": "''' \nDot1x Genie Ops Object for IOSXE - CLI.\n'''\n# Genie\nfrom genie.libs.ops.dot1x.dot1x import Dot1X as SuperDot1X\nfrom genie.ops.base import Context\n\n\nclass Dot1X(SuperDot1X):\n '''Dot1x Genie Ops Object'''\n\n def learn(self):\n '''Learn Dot1x Ops'''\n \n ########################################################################\n # info\n ########################################################################\n # unsupported keys\n # credentials, critical, test, supplicant\n\n # version, system_auth_control\n for key in ['version', 'system_auth_control']:\n self.add_leaf(cmd='show dot1x all details',\n src='[{}]'.format(key),\n dest='info[{}]'.format(key))\n\n # sessions\n self.add_leaf(cmd='show dot1x all count',\n src='[sessions]',\n dest='info[sessions]')\n\n # -------------- interfaces -----------------\n intf_src = '[interfaces][(?P<intf>.*)]'\n intf_dst = 'info[interfaces][(?P<intf>.*)]'\n\n # 'max_req', 'max_reauth_req', 'pae'\n # interface, credentials, authenticator\n # supplicant, max_start\n for key in ['max_req', 'max_reauth_req', 'interface', 'pae',\n 'credentials', 'authenticator', 'supplicant', 'max_start']:\n self.add_leaf(cmd='show dot1x all details',\n src=intf_src + '[{}]'.format(key),\n dest=intf_dst + '[{}]'.format(key))\n\n # timeout\n for key in ['auth_period', 'held_period', 'quiet_period', 'ratelimit_period',\n 'server_timeout', 'start_period', 'supp_timeout', 'tx_period']:\n self.add_leaf(cmd='show dot1x all details',\n src=intf_src + '[timeout][{}]'.format(key),\n dest=intf_dst + '[timeout][{}]'.format(key))\n # statistics\n self.add_leaf(cmd='show dot1x all statistics',\n src=intf_src + '[statistics]',\n dest=intf_dst + '[statistics]')\n # client\n client_src = '[interfaces][(?P<intf>.*)][clients][(?P<clients>.*)]'\n client_dst = 'info[interfaces][(?P<intf>.*)][clients][(?P<clients>.*)]'\n # client, status, pae\n for key in ['client', 'status', 'pae']:\n self.add_leaf(cmd='show dot1x all summary',\n src=client_src + '[{}]'.format(key),\n dest=client_dst + '[{}]'.format(key))\n # client\n # eap_method, session\n for key in ['eap_method', 'session']:\n self.add_leaf(cmd='show dot1x all details',\n src=client_src + '[{}]'.format(key),\n dest=client_dst + '[{}]'.format(key))\n\n # make to write in cache\n self.make(final_call=True)\n", "id": "77771", "language": "Python", "matching_score": 3.230543851852417, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/dot1x/iosxe/dot1x.py" }, { "content": "'''Dot1x Genie Ops Object Outputs for IOSXE.'''\n\n\nclass Dot1xOutput(object):\n\n ShowDot1xAllDetail = {\n \"version\": 3,\n \"interfaces\": {\n \"GigabitEthernet1/0/9\": {\n \"interface\": \"GigabitEthernet1/0/9\",\n \"max_start\": 3,\n \"pae\": \"supplicant\",\n \"credentials\": 'switch4',\n 'supplicant': {\n 'eap': {\n 'profile': 'EAP-METH'\n }\n },\n \"timeout\": {\n \"held_period\": 60,\n \"start_period\": 30,\n \"auth_period\": 30\n }\n }\n },\n \"system_auth_control\": True\n }\n\n ShowDot1xAllStatistics = {\n \"interfaces\": {\n \"GigabitEthernet1/0/9\": {\n \"interface\": \"GigabitEthernet1/0/9\",\n \"statistics\": {\n \"txtotal\": 3,\n \"rxreq\": 0,\n \"txstart\": 3,\n \"rxversion\": 0,\n \"txlogoff\": 0,\n \"rxinvalid\": 0,\n \"rxlenerr\": 0,\n \"lastrxsrcmac\": \"0000.0000.0000\",\n \"rxtotal\": 0,\n \"txresp\": 0\n }\n }\n }\n }\n\n ShowDot1xAllSummary = {\n \"interfaces\": {\n \"GigabitEthernet0/1\": {\n \"clients\": {\n \"fa16.3ede.7048\": {\n \"pae\": \"authenticator\",\n \"status\": \"unauthorized\",\n \"client\": \"fa16.3ede.7048\"\n },\n \"fa16.3ea5.663a\": {\n \"pae\": \"authenticator\",\n \"status\": \"authorized\",\n \"client\": \"fa16.3ea5.663a\"\n },\n \"fa16.3ea5.663b\": {\n \"pae\": \"supplicant\",\n \"status\": \"authorized\",\n \"client\": \"fa16.3ea5.663b\"\n },\n \"fa16.3ede.7049\": {\n \"pae\": \"supplicant\",\n \"status\": \"unauthorized\",\n \"client\": \"fa16.3ede.7049\"\n }\n },\n \"interface\": \"GigabitEthernet0/1\"\n }\n }\n }\n\n ShowDot1xAllCount = {\n 'sessions': {\n 'authorized_clients': 0,\n 'unauthorized_clients': 0,\n 'total': 0,\n }\n }\n\n Dot1x_info = {\n \"version\": 3,\n \"interfaces\": {\n \"GigabitEthernet1/0/9\": {\n \"supplicant\": {\n \"eap\": {\n \"profile\": \"EAP-METH\"\n }\n },\n \"credentials\": \"switch4\",\n \"pae\": \"supplicant\",\n \"max_start\": 3,\n \"interface\": \"GigabitEthernet1/0/9\",\n \"timeout\": {\n \"auth_period\": 30,\n \"start_period\": 30,\n \"held_period\": 60\n },\n \"statistics\": {\n \"rxinvalid\": 0,\n \"rxtotal\": 0,\n \"rxlenerr\": 0,\n \"txresp\": 0,\n \"txtotal\": 3,\n \"txstart\": 3,\n \"rxreq\": 0,\n \"rxversion\": 0,\n \"lastrxsrcmac\": \"0000.0000.0000\",\n \"txlogoff\": 0\n }\n },\n \"GigabitEthernet0/1\": {\n \"clients\": {\n \"fa16.3ea5.663b\": {\n \"pae\": \"supplicant\",\n \"client\": \"fa16.3ea5.663b\",\n \"status\": \"authorized\"\n },\n \"fa16.3ede.7049\": {\n \"pae\": \"supplicant\",\n \"client\": \"fa16.3ede.7049\",\n \"status\": \"unauthorized\"\n },\n \"fa16.3ede.7048\": {\n \"pae\": \"authenticator\",\n \"client\": \"fa16.3ede.7048\",\n \"status\": \"unauthorized\"\n },\n \"fa16.3ea5.663a\": {\n \"pae\": \"authenticator\",\n \"client\": \"fa16.3ea5.663a\",\n \"status\": \"authorized\"\n }\n }\n }\n },\n \"sessions\": {\n \"authorized_clients\": 0,\n \"unauthorized_clients\": 0,\n \"total\": 0\n },\n \"system_auth_control\": True\n }\n", "id": "5880575", "language": "Python", "matching_score": 2.4183757305145264, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/dot1x/iosxe/tests/dot1x_output.py" }, { "content": "# python\nimport copy\n\nclass TriggersConfigs():\n \n basic_config = {\n 'devices': {\n 'uut': {\n '1': {\n 'config': 'lldp run\\n'\\\n 'interface GigabitEthernet1/0/13\\n'\\\n 'no shutdown\\n'\\\n 'default interface GigabitEthernet2/0/15\\n'\\\n 'default interface GigabitEthernet3/0/15\\n'\\\n 'no interface vlan 10\\n'\\\n 'no interface vlan 11',\n 'sleep': 300\n }\n },\n 'helper': {\n '1': {\n 'config': 'lldp run'\n }\n }\n }\n }\n \n basic_unconfig = {\n 'devices': {\n 'uut': {\n '1': {\n 'config': 'no lldp run\\n'\\\n 'default interface GigabitEthernet1/0/13\\n'\\\n 'default interface vlan 1\\n'\\\n 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'default interface GigabitEthernet1/0/17\\n'\\\n 'default interface GigabitEthernet2/0/15\\n'\\\n 'default interface GigabitEthernet3/0/15\\n'\\\n 'no vlan 2\\n'\\\n 'no vlan 10\\n'\\\n 'no vlan 11\\n'\\\n 'no interface vlan 2\\n'\\\n 'no interface vlan 10\\n'\\\n 'no interface vlan 11',\n }\n },\n 'helper': {\n '1': {\n 'config': 'no lldp run\\n'\\\n 'default vlan 1\\n'\\\n 'default interface vlan 1\\n'\\\n 'no vlan 2\\n'\\\n 'no vlan 10\\n'\\\n 'no vlan 11\\n'\\\n 'no interface vlan 2\\n'\\\n 'no interface vlan 10\\n'\\\n 'no interface vlan 11\\n'\\\n 'no ip vrf test1\\n'\\\n 'no ip vrf test2\\n'\\\n 'no ip vrf test3\\n'\\\n 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'default interface GigabitEthernet1/0/4\\n'\\\n 'default interface GigabitEthernet1/0/5\\n',\n }\n }\n }\n }\n\n switchover_ping = {\n 'devices': {\n 'uut': {\n '1': {\n 'config': 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'default interface vlan 1\\n'\\\n 'ip routing\\n'\\\n 'vlan 10\\n'\\\n 'state active\\n'\\\n 'interface vlan 10\\n'\\\n 'ip address 1.1.1.2 255.255.255.0\\n'\\\n 'no shut\\n'\\\n 'ipv6 unicast-routing\\n'\\\n 'interface GigabitEthernet2/0/15\\n'\\\n 'switchport access vlan 10\\n'\\\n 'switchport mode access\\n'\\\n 'no shut\\n'\\\n 'vlan 11\\n'\\\n 'state active\\n'\\\n 'interface vlan 11\\n'\\\n 'ip address 2.2.2.2 255.255.255.0\\n'\\\n 'no shut\\n'\\\n 'interface GigabitEthernet3/0/15\\n'\\\n 'switchport access vlan 11\\n'\\\n 'switchport mode access\\n'\\\n 'no shut\\n'\\\n 'do copy running-config startup-config\\n'\\\n 'do write memory',\n 'unconfig': 'default interface GigabitEthernet2/0/15\\n'\\\n 'default interface GigabitEthernet3/0/15\\n'\\\n 'no vlan 10\\n'\\\n 'no vlan 11\\n'\\\n 'no interface vlan 10\\n'\\\n 'no interface vlan 11',\n 'sleep': 45\n }\n },\n 'helper': {\n '1': {\n 'config': '!\\n'\\\n 'ip routing\\n'\\\n '!\\n'\\\n 'ip vrf vrf1\\n'\\\n 'rd 1:1\\n'\\\n '!\\n'\\\n 'ip vrf vrf2\\n'\\\n 'rd 1:2\\n'\\\n '!\\n'\\\n 'interface GigabitEthernet1/0/4\\n'\\\n 'no switchport\\n'\\\n 'no ip address\\n'\\\n 'ip vrf forwarding vrf1\\n'\\\n 'ip address 1.1.1.1 255.255.255.0\\n'\\\n 'no shutdown\\n'\\\n '!\\n'\\\n 'interface GigabitEthernet1/0/5\\n'\\\n 'no switchport\\n'\\\n 'no ip address\\n'\\\n 'ip vrf forwarding vrf2\\n'\\\n 'ip address 2.2.2.1 255.255.255.0\\n'\\\n 'no shutdown\\n'\\\n 'ip route vrf vrf2 1.1.1.0 255.255.255.0 GigabitEthernet1/0/5 2.2.2.2\\n'\\\n 'ip route vrf vrf1 2.2.2.0 255.255.255.0 GigabitEthernet1/0/4 1.1.1.2',\n 'unconfig': 'default interface GigabitEthernet1/0/4\\n'\\\n 'default interface GigabitEthernet1/0/5\\n'\\\n 'no ip vrf vrf1\\n'\\\n 'no ip vrf vrf2\\n'\\\n 'no ip route vrf vrf2 1.1.1.0 255.255.255.0 GigabitEthernet1/0/5 2.2.2.2\\n'\\\n 'no ip route vrf vrf1 2.2.2.0 255.255.255.0 GigabitEthernet1/0/4 1.1.1.2',\n }\n }\n }\n }\n\n application_sanity_base = '!enable gloabl cef for ipv4 and vrf forwarding\\n'\\\n 'ip routing\\n'\\\n '!\\n'\\\n 'ip vrf vrf1\\n'\\\n 'rd 1:1\\n'\\\n '!\\n'\\\n 'ip vrf vrf2\\n'\\\n 'rd 1:2\\n'\\\n '!\\n'\\\n 'interface GigabitEthernet1/0/1\\n'\\\n 'no switchport\\n'\\\n 'no ip address\\n'\\\n 'ip vrf forwarding vrf1\\n'\\\n 'ip address 10.0.0.1 255.255.255.0\\n'\\\n 'mac-address 0200.dead.0001\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'interface GigabitEthernet1/0/2\\n'\\\n 'no switchport\\n'\\\n 'no ip address\\n'\\\n 'ip vrf forwarding vrf2\\n'\\\n 'ip address 10.0.0.2 255.255.255.0\\n'\\\n 'mac-address 0200.dead.0002\\n'\\\n 'no shut\\n'\\\n '!'\n\n l2_int_up = {\n 'devices': {\n 'uut': {\n '1': {\n 'config': 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'interface GigabitEthernet1/0/15\\n'\\\n 'no shut\\n'\\\n 'interface GigabitEthernet1/0/16\\n'\\\n 'no shut',\n 'unconfig': 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16',\n 'sleep': 30\n }\n },\n 'helper': {\n '1': {\n 'config': application_sanity_base,\n 'unconfig': 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'no ip vrf vrf1\\n'\\\n 'no ip vrf vrf2',\n }\n }\n }\n }\n\n SVI_L3_ping_native_VLAN = {\n 'devices': {\n 'uut': {\n '1': {\n 'config': 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'default interface vlan 1\\n'\\\n 'interface GigabitEthernet1/0/15\\n'\\\n 'switchport\\n'\\\n 'switchport access vlan 1\\n'\\\n 'no shut\\n'\\\n 'interface GigabitEthernet1/0/16\\n'\\\n 'switchport\\n'\\\n 'switchport access vlan 1\\n'\\\n 'no shut\\n'\\\n 'interface vlan 1\\n'\\\n 'ip address 172.16.17.32 255.255.255.0\\n'\\\n 'no shut',\n 'unconfig': 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'default interface vlan 1',\n 'sleep': 30\n }\n },\n 'helper': {\n '1': {\n 'config': application_sanity_base,\n 'unconfig': 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'no ip vrf vrf1\\n'\\\n 'no ip vrf vrf2',\n },\n '2': {\n 'config': 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'default interface vlan 1\\n'\\\n 'interface GigabitEthernet1/0/1\\n'\\\n 'switchport\\n'\\\n 'switchport access vlan 1\\n'\\\n 'no shut\\n'\\\n 'interface GigabitEthernet1/0/2\\n'\\\n 'switchport\\n'\\\n 'switchport access vlan 1\\n'\\\n 'no shut\\n'\\\n 'interface vlan 1\\n'\\\n 'ip address 172.16.31.10 255.255.255.0\\n'\\\n 'no shut',\n 'unconfig': 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'default interface vlan 1',\n }\n }\n }\n }\n\n SVI_L3_ping_VLAN99 = {\n 'devices': {\n 'uut': {\n '1': {\n 'config': 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'default interface vlan 1\\n'\\\n 'interface vlan 99\\n'\\\n 'ip address 172.16.17.32 255.255.255.0\\n'\\\n 'no shut\\n'\\\n 'interface GigabitEthernet1/0/15\\n'\\\n 'switchport\\n'\\\n 'switchport access vlan 99\\n'\\\n 'no shut\\n'\\\n 'interface GigabitEthernet1/0/16\\n'\\\n 'switchport\\n'\\\n 'switchport access vlan 99\\n'\\\n 'no shut',\n 'unconfig': 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'no interface vlan 99',\n 'sleep': 40\n }\n },\n 'helper': {\n '1': {\n 'config': application_sanity_base,\n 'unconfig': 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'no ip vrf vrf1\\n'\\\n 'no ip vrf vrf2',\n },\n '2': {\n 'config': 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'default interface vlan 1\\n'\\\n 'interface vlan 99\\n'\\\n 'ip address 172.16.31.10 255.255.255.0\\n'\\\n 'no shut\\n'\\\n 'interface GigabitEthernet1/0/1\\n'\\\n 'switchport\\n'\\\n 'switchport access vlan 99\\n'\\\n 'no shut\\n'\\\n 'interface GigabitEthernet1/0/2\\n'\\\n 'switchport\\n'\\\n 'switchport access vlan 99\\n'\\\n 'no shut',\n 'unconfig': 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'no interface vlan 99',\n }\n }\n }\n }\n\n trunk_config = {\n 'devices': {\n 'uut': {\n '1': {\n 'config': 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'default interface vlan 1\\n'\\\n 'vlan 99\\n'\\\n 'state active\\n'\\\n 'interface vlan 99\\n'\\\n 'ip address 172.16.17.32 255.255.255.0\\n'\\\n 'no shut\\n'\\\n 'interface GigabitEthernet1/0/15\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 99\\n'\\\n 'no shut\\n'\\\n 'interface GigabitEthernet1/0/16\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 99\\n'\\\n 'no shut',\n 'unconfig': 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'no vlan 99\\n'\\\n 'no interface vlan 99',\n 'sleep': 60\n }\n },\n 'helper': {\n '1': {\n 'config': application_sanity_base,\n 'unconfig': 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'no ip vrf vrf1\\n'\\\n 'no ip vrf vrf2',\n },\n '2': {\n 'config': 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'default interface vlan 1\\n'\\\n 'vlan 99\\n'\\\n 'state active\\n'\\\n 'interface vlan 99\\n'\\\n 'ip address 172.16.31.10 255.255.255.0\\n'\\\n 'no shut\\n'\\\n 'interface loopback1\\n'\\\n 'ip address 1.1.1.1 255.255.255.0\\n'\\\n 'interface GigabitEthernet1/0/1\\n'\\\n 'switchport trunk encapsulation dot1q\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 99\\n'\\\n 'no shut\\n'\\\n 'interface GigabitEthernet1/0/2\\n'\\\n 'switchport trunk encapsulation dot1q\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 99\\n'\\\n 'no shut',\n 'unconfig': 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'no interface loopback1\\n'\\\n 'no vlan 99\\n'\\\n 'no interface vlan 99',\n }\n }\n }\n }\n\n SVI_L3_ping_VLAN99_Trunk = {\n 'devices': {\n 'uut': {\n 'next_hop': '172.16.31.10',\n '1': {\n 'config': 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'default interface vlan 1\\n'\\\n 'vlan 99\\n'\\\n 'state active\\n'\\\n 'interface vlan 99\\n'\\\n 'ip address 172.16.17.32 255.255.255.0\\n'\\\n 'no shut\\n'\\\n 'interface GigabitEthernet1/0/15\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 99\\n'\\\n 'no shut\\n'\\\n 'interface GigabitEthernet1/0/16\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 99\\n'\\\n 'no shut',\n 'unconfig': 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'no vlan 99\\n'\\\n 'no interface vlan 99',\n 'sleep': 50\n }\n },\n 'helper': {\n '1': {\n 'config': application_sanity_base,\n 'unconfig': 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'no ip vrf vrf1\\n'\\\n 'no ip vrf vrf2',\n },\n '2': {\n 'config': 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'default interface vlan 1\\n'\\\n 'vlan 99\\n'\\\n 'state active\\n'\\\n 'interface vlan 99\\n'\\\n 'ip address {next_hop} 255.255.255.0\\n'\\\n 'no shut\\n'\\\n 'interface loopback1\\n'\\\n 'ip address 1.1.1.1 255.255.255.0\\n'\\\n 'interface GigabitEthernet1/0/1\\n'\\\n 'switchport trunk encapsulation dot1q\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 99\\n'\\\n 'no shut\\n'\\\n 'interface GigabitEthernet1/0/2\\n'\\\n 'switchport trunk encapsulation dot1q\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 99\\n'\\\n 'no shut',\n 'unconfig': 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'no interface loopback1\\n'\\\n 'no vlan 99\\n'\\\n 'no interface vlan 99',\n }\n }\n }\n }\n\n IPv6_Traceroute_VLAN99_Trunk = {\n 'devices': {\n 'uut': {\n 'next_hop': 'fdf8:f53e:61e4::18',\n '1': {\n 'config': 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'default interface vlan 1\\n'\\\n 'vlan 99\\n'\\\n 'state active\\n'\\\n 'interface vlan 99\\n'\\\n 'ipv6 address fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b/64\\n'\\\n 'no shut\\n'\\\n 'ipv6 unicast-routing\\n'\\\n 'interface GigabitEthernet1/0/15\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 99\\n'\\\n 'no shut\\n'\\\n 'interface GigabitEthernet1/0/16\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 99\\n'\\\n 'no shut',\n 'unconfig': 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'no vlan 99\\n'\\\n 'no interface vlan 99',\n 'sleep': 50\n }\n },\n 'helper': {\n '1': {\n 'config': application_sanity_base,\n 'unconfig': 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'no ip vrf vrf1\\n'\\\n 'no ip vrf vrf2',\n },\n '2': {\n 'config': 'default interface GigabitEthernet1/0/2\\n'\\\n 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface vlan 1\\n'\\\n 'vlan 99\\n'\\\n 'state active\\n'\\\n 'interface vlan 99\\n'\\\n 'ipv6 address fdf8:f53e:61e4::18/64\\n'\\\n 'no shut\\n'\\\n 'interface loopback1\\n'\\\n 'ipv6 address fc00:db20:35b:7399::5/64\\n'\\\n 'interface GigabitEthernet1/0/1\\n'\\\n 'switchport trunk encapsulation dot1q\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 99\\n'\\\n 'no shut\\n'\\\n 'interface GigabitEthernet1/0/2\\n'\\\n 'switchport trunk encapsulation dot1q\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 99\\n'\\\n 'no shut',\n 'unconfig': 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'no interface loopback1\\n'\\\n 'no vlan 99\\n'\\\n 'no interface vlan 99',\n }\n }\n }\n }\n\n stp_base_uut = 'interface GigabitEthernet1/0/15\\n'\\\n 'shutdown\\n'\\\n 'interface GigabitEthernet1/0/16\\n'\\\n 'shutdown\\n'\\\n 'interface GigabitEthernet1/0/17\\n'\\\n 'shutdown\\n'\\\n 'interface GigabitEthernet1/0/1\\n'\\\n 'shutdown\\n'\\\n 'interface GigabitEthernet1/0/2\\n'\\\n 'shutdown\\n'\\\n 'interface GigabitEthernet1/0/3\\n'\\\n 'shutdown\\n'\\\n 'interface GigabitEthernet1/0/4\\n'\\\n 'shutdown\\n'\\\n 'interface GigabitEthernet1/0/7\\n'\\\n 'shutdown\\n'\\\n 'interface GigabitEthernet1/0/8\\n'\\\n 'shutdown\\n'\\\n 'interface GigabitEthernet1/0/9\\n'\\\n 'shutdown\\n'\\\n 'interface GigabitEthernet2/0/15\\n'\\\n 'shutdown\\n'\\\n 'interface GigabitEthernet3/0/15\\n'\\\n 'shutdown\\n'\\\n 'vlan 1-2\\n'\\\n 'no vlan 3-1000\\n'\\\n 'spanning-tree vlan 1-2\\n'\\\n 'spanning-tree mode pvst\\n'\\\n 'default int GigabitEthernet1/0/15\\n'\\\n 'int GigabitEthernet1/0/15\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 1-2\\n'\\\n 'shut\\n'\\\n 'no shut\\n'\\\n 'int GigabitEthernet1/0/16\\n'\\\n 'shut\\n'\\\n 'default int GigabitEthernet1/0/17\\n'\\\n 'int GigabitEthernet1/0/17\\n'\\\n 'switchport mode access\\n'\\\n 'switchport access vlan 1\\n'\\\n 'shut\\n'\\\n 'no shut\\n'\\\n 'default int GigabitEthernet2/0/15\\n'\\\n 'int GigabitEthernet2/0/15\\n'\\\n 'switchport mode access\\n'\\\n 'switchport access vlan 2\\n'\\\n 'shut\\n'\\\n 'no shut'\n\n stp_base_helper = 'ip routing\\n'\\\n 'vlan 1-2\\n'\\\n 'exit\\n'\\\n 'spanning-tree vlan 1-2\\n'\\\n 'spanning-tree mode pvst\\n'\\\n 'ip vrf test1\\n'\\\n 'rd 100:1\\n'\\\n 'route-target export 100:1\\n'\\\n 'route-target import 100:1\\n'\\\n '!\\n'\\\n 'ip vrf test2\\n'\\\n 'rd 100:2\\n'\\\n 'route-target export 100:2\\n'\\\n 'route-target import 100:2\\n'\\\n '!\\n'\\\n 'ip vrf test3\\n'\\\n 'rd 100:3\\n'\\\n 'route-target export 100:3\\n'\\\n 'route-target import 100:3\\n'\\\n 'exit\\n'\\\n '!\\n'\\\n 'default int Vlan1\\n'\\\n 'interface Vlan1\\n'\\\n 'ip vrf forwarding test1\\n'\\\n 'ip address 1.2.1.1 255.255.255.0\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'interface Vlan2\\n'\\\n 'ip vrf forwarding test1\\n'\\\n 'ip address 1.2.2.1 255.255.255.0\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'default int GigabitEthernet1/0/1\\n'\\\n 'interface GigabitEthernet1/0/1\\n'\\\n 'switchport trunk encapsulation dot1q\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 1-2\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'interface GigabitEthernet1/0/2\\n'\\\n 'shut\\n'\\\n '!\\n'\\\n 'default int GigabitEthernet1/0/3\\n'\\\n 'interface GigabitEthernet1/0/3\\n'\\\n 'no switchport\\n'\\\n 'ip vrf forwarding test2\\n'\\\n 'ip address 1.2.1.2 255.255.255.0\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'default int GigabitEthernet1/0/4\\n'\\\n 'interface GigabitEthernet1/0/4\\n'\\\n 'no switchport\\n'\\\n 'ip vrf forwarding test3\\n'\\\n 'ip address 1.2.2.2 255.255.255.0\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'loggin buffer 500000 debugging'\n\n STP_root_check = {\n 'devices': {\n 'uut': {\n '1': {\n 'config': stp_base_uut,\n 'unconfig': 'default vlan 1\\n'\\\n 'no vlan 2\\n'\\\n 'no interface vlan2',\n },\n '2': {\n 'config': 'spanning-tree vlan 1\\n'\\\n 'spanning-tree mode pvst\\n'\\\n 'default spanning-tree vlan 1 priority\\n'\\\n 'spanning-tree vlan 1 priority 0',\n 'unconfig': 'default spanning-tree vlan 1\\n'\\\n 'default spanning-tree mode\\n'\\\n 'default spanning-tree vlan 1 priority\\n'\\\n 'default interface GigabitEthernet2/0/15\\n'\\\n 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'default interface GigabitEthernet1/0/17\\n'\\\n 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'default interface GigabitEthernet1/0/3\\n'\\\n 'default interface GigabitEthernet1/0/4\\n'\\\n 'default interface GigabitEthernet1/0/7\\n'\\\n 'default interface GigabitEthernet1/0/8\\n'\\\n 'default interface GigabitEthernet1/0/9\\n'\\\n 'default interface GigabitEthernet3/0/15',\n 'sleep': 60\n }\n },\n 'helper': {\n '1': {\n 'config': stp_base_helper,\n 'unconfig': 'no ip vrf test1\\n'\\\n 'no ip vrf test2\\n'\\\n 'no ip vrf test3\\n'\\\n 'default vlan 1\\n'\\\n 'no vlan 2\\n'\\\n 'no interface vlan2',\n },\n '2': {\n 'config': 'spanning-tree vlan 1\\n'\\\n 'spanning-tree mode pvst\\n'\\\n 'default spanning-tree vlan 1 priority',\n 'unconfig': 'default spanning-tree vlan 1\\n'\\\n 'default spanning-tree mode\\n'\\\n 'default spanning-tree vlan 1 priority\\n'\\\n 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'default interface GigabitEthernet1/0/3\\n'\\\n 'default interface GigabitEthernet1/0/4\\n'\\\n 'no interface vlan2',\n }\n }\n }\n }\n\n STP_Rapid_PVST_root_check = {\n 'devices': {\n 'uut': {\n '1': {\n 'config': stp_base_uut,\n 'unconfig': 'default vlan 1\\n'\\\n 'no vlan 2\\n'\\\n 'no interface vlan2',\n },\n '2': {\n 'config': 'vlan 1-2\\n'\\\n 'exit\\n'\\\n 'spanning-tree vlan 1-2\\n'\\\n 'spanning-tree mode rapid-pvst\\n'\\\n 'default spanning-tree vlan 1-2 priority\\n'\\\n 'spanning-tree vlan 1-2 priority 0',\n 'unconfig': 'default spanning-tree vlan 1\\n'\\\n 'default spanning-tree mode\\n'\\\n 'default spanning-tree vlan 1-2 priority\\n'\\\n 'default interface GigabitEthernet2/0/15\\n'\\\n 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'default interface GigabitEthernet1/0/17\\n'\\\n 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'default interface GigabitEthernet1/0/3\\n'\\\n 'default interface GigabitEthernet1/0/4\\n'\\\n 'default interface GigabitEthernet1/0/7\\n'\\\n 'default interface GigabitEthernet1/0/8\\n'\\\n 'default interface GigabitEthernet1/0/9\\n'\\\n 'no vlan 2\\n'\\\n 'default vlan 1\\n'\\\n 'default interface GigabitEthernet3/0/15',\n 'sleep': 60\n }\n },\n 'helper': {\n '1': {\n 'config': stp_base_helper,\n 'unconfig': 'no ip vrf test1\\n'\\\n 'no ip vrf test2\\n'\\\n 'no ip vrf test3\\n'\\\n 'default vlan 1\\n'\\\n 'no vlan 2\\n'\\\n 'no interface vlan2',\n },\n '2': {\n 'config': 'vlan 1-2\\n'\\\n 'exit\\n'\\\n 'spanning-tree vlan 1-2\\n'\\\n 'spanning-tree mode rapid-pvst\\n'\\\n 'default spanning-tree vlan 1-2 priority',\n 'unconfig': 'default spanning-tree vlan 1\\n'\\\n 'default spanning-tree mode\\n'\\\n 'default spanning-tree vlan 1-2 priority\\n'\\\n 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'default interface GigabitEthernet1/0/3\\n'\\\n 'default interface GigabitEthernet1/0/4\\n'\\\n 'no vlan 2\\n'\\\n 'default vlan 1\\n'\\\n 'no interface vlan2',\n }\n }\n }\n }\n\n ethernet_acl_pacl = {\n 'devices': {\n 'uut': {\n '1': {\n 'config': stp_base_uut,\n 'unconfig': 'default vlan 1\\n'\\\n 'no vlan 2\\n'\\\n 'no interface vlan2',\n },\n '2': {\n 'config': 'vlan 1-2\\n'\\\n 'exit\\n'\\\n 'spanning-tree vlan 1-2\\n'\\\n 'spanning-tree mode rapid-pvst\\n'\\\n 'default spanning-tree vlan 1-2 priority',\n 'unconfig': 'default spanning-tree vlan 1\\n'\\\n 'default spanning-tree mode\\n'\\\n 'default spanning-tree vlan 1-2 priority\\n'\\\n 'default interface GigabitEthernet2/0/15\\n'\\\n 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'default interface GigabitEthernet1/0/17\\n'\\\n 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'default interface GigabitEthernet1/0/3\\n'\\\n 'default interface GigabitEthernet1/0/4\\n'\\\n 'default interface GigabitEthernet1/0/7\\n'\\\n 'default interface GigabitEthernet1/0/8\\n'\\\n 'default interface GigabitEthernet1/0/9\\n'\\\n 'no vlan 2\\n'\\\n 'default vlan 1\\n'\\\n 'default interface GigabitEthernet3/0/15',\n 'sleep': 50\n }\n },\n 'helper': {\n '1': {\n 'config': stp_base_helper,\n 'unconfig': 'no ip vrf test1\\n'\\\n 'no ip vrf test2\\n'\\\n 'no ip vrf test3\\n'\\\n 'default vlan 1\\n'\\\n 'no vlan 2\\n'\\\n 'no interface vlan2',\n },\n '2': {\n 'config': 'vlan 1-2\\n'\\\n 'exit\\n'\\\n 'spanning-tree vlan 1-2\\n'\\\n 'spanning-tree mode rapid-pvst\\n'\\\n 'default spanning-tree vlan 1-2 priority',\n 'unconfig': 'default spanning-tree vlan 1\\n'\\\n 'default spanning-tree mode\\n'\\\n 'default spanning-tree vlan 1-2 priority\\n'\\\n 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'default interface GigabitEthernet1/0/3\\n'\\\n 'default interface GigabitEthernet1/0/4\\n'\\\n 'no vlan 2\\n'\\\n 'default vlan 1\\n'\\\n 'no interface vlan2',\n }\n }\n }\n }\n\n ip_acl_pacl_permit = copy.deepcopy(ethernet_acl_pacl)\n ip_acl_pacl_deny = copy.deepcopy(ethernet_acl_pacl)\n\n ip_acl_racl = {\n 'devices': {\n 'helper': {\n '1': {\n 'config': \\\n 'interface vlan 10\\n'\\\n 'ip address 172.16.17.32 255.255.255.0\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'interface GigabitEthernet1/0/2\\n'\\\n 'switchport mode access\\n'\\\n 'switchport access vlan 10\\n'\\\n 'no shut\\n'\\\n 'exit\\n'\\\n '!\\n'\\\n 'ip routing\\n'\\\n 'ip route 1.5.1.0 255.255.255.0 172.16.17.32',\n 'unconfig': 'no interface vlan 10\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'no ip route 1.5.1.0 255.255.255.0 172.16.17.32',\n }\n },\n 'uut': {\n '1': {\n 'config': \\\n 'interface vlan 10\\n'\\\n 'ip address 172.16.17.32 255.255.255.0\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'interface GigabitEthernet1/0/16\\n'\\\n 'switchport mode access\\n'\\\n 'switchport access vlan 10\\n'\\\n 'no shut\\n'\\\n 'exit\\n'\\\n 'interface loopback 10\\n'\\\n 'ip address 1.5.1.1 255.255.255.0\\n'\\\n 'no shut\\n'\\\n '!',\n 'unconfig': 'no interface vlan 10\\n'\\\n 'no vlan 10\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'no interface loopback 10',\n 'sleep': 40\n }\n }\n }\n }\n\n Etherchannel_LACP_ping_check = {\n 'devices': {\n 'helper': {\n '1': {\n 'config': 'vlan 11\\n'\\\n 'spanning-tree mode rapid-pvst\\n'\\\n 'interface Vlan11\\n'\\\n 'ip address 1.2.1.1 255.255.255.0\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'default int GigabitEthernet1/0/1\\n'\\\n 'interface GigabitEthernet1/0/1\\n'\\\n 'switchport trunk encapsulation dot1q\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 11\\n'\\\n 'channel-group 10 mode passive\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'default int GigabitEthernet1/0/2\\n'\\\n 'interface GigabitEthernet1/0/2\\n'\\\n 'switchport trunk encapsulation dot1q\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 11\\n'\\\n 'channel-group 10 mode passive\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'default int GigabitEthernet1/0/3\\n'\\\n 'interface GigabitEthernet1/0/3\\n'\\\n 'switchport trunk encapsulation dot1q\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 11\\n'\\\n 'channel-group 10 mode passive\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'loggin buffer 500000 debugging',\n 'unconfig': 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'default interface GigabitEthernet1/0/3\\n'\\\n 'no vlan 11\\n'\\\n 'no interface vlan 11\\n'\\\n 'no interface Port-channel10\\n'\\\n 'no spanning-tree mode rapid-pvst',\n }\n },\n 'uut': {\n '1': {\n 'config': 'no vlan 11\\n'\\\n 'vlan 11\\n'\\\n 'spanning-tree mode rapid-pvst\\n'\\\n 'spanning-tree vlan 11\\n'\\\n 'no interface Vlan11\\n'\\\n 'no interface Po10\\n'\\\n 'interface Vlan11\\n'\\\n 'ip address 1.2.1.2 255.255.255.0\\n'\\\n 'no shut\\n'\\\n 'default interface GigabitEthernet1/0/15\\n'\\\n 'interface GigabitEthernet1/0/15\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 11\\n'\\\n 'no shut\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'interface GigabitEthernet1/0/16\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 11\\n'\\\n 'no shut\\n'\\\n 'default interface GigabitEthernet1/0/17\\n'\\\n 'interface GigabitEthernet1/0/17\\n'\\\n 'switchport mode trunk\\n'\\\n 'switchport trunk allowed vlan 11\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'loggin buffer 500000 debugging',\n 'unconfig': 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'default interface GigabitEthernet1/0/17\\n'\\\n 'no interface vlan11\\n'\\\n 'no vlan 11\\n'\\\n 'no interface Port-channel10\\n'\\\n 'no spanning-tree mode rapid-pvst',\n 'sleep': 40\n }\n }\n }\n }\n\n Etherchannel_PAGP_ping_check = { \n 'devices': {\n 'helper': {\n '1': {\n 'config': 'vlan 11\\n'\\\n 'spanning-tree mode rapid-pvst\\n'\\\n '!\\n'\\\n 'interface Vlan11\\n'\\\n 'ip address 1.2.1.1 255.255.255.0\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'default int GigabitEthernet1/0/1\\n'\\\n 'interface GigabitEthernet1/0/1\\n'\\\n 'switchport mode access\\n'\\\n 'switchport access vlan 11\\n'\\\n 'channel-group 10 mode desirable non-silent\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'default int GigabitEthernet1/0/2\\n'\\\n 'interface GigabitEthernet1/0/2\\n'\\\n 'switchport mode access\\n'\\\n 'switchport access vlan 11\\n'\\\n 'channel-group 10 mode desirable non-silent\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'default int GigabitEthernet1/0/3\\n'\\\n 'interface GigabitEthernet1/0/3\\n'\\\n 'switchport mode access\\n'\\\n 'switchport access vlan 11\\n'\\\n 'channel-group 10 mode desirable non-silent\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'loggin buffer 500000 debugging',\n 'unconfig': 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'default interface GigabitEthernet1/0/3\\n'\\\n 'no vlan 11\\n'\\\n 'no interface vlan 11\\n'\\\n 'no interface Port-channel10\\n'\\\n 'no spanning-tree mode rapid-pvst',\n }\n },\n 'uut': {\n '1': {\n 'config': 'no vlan 11\\n'\\\n 'vlan 11\\n'\\\n 'spanning-tree mode rapid-pvst\\n'\\\n 'interface Vlan11\\n'\\\n 'ip address 1.2.1.2 255.255.255.0\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'default int GigabitEthernet1/0/15\\n'\\\n 'interface GigabitEthernet1/0/15\\n'\\\n 'switchport mode access\\n'\\\n 'switchport access vlan 11\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'default int GigabitEthernet1/0/16\\n'\\\n 'interface GigabitEthernet1/0/16\\n'\\\n 'switchport mode access\\n'\\\n 'switchport access vlan 11\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'default int GigabitEthernet1/0/17\\n'\\\n 'interface GigabitEthernet1/0/17\\n'\\\n 'switchport mode access\\n'\\\n 'switchport access vlan 11\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'loggin buffer 500000 debugging',\n 'unconfig': 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'default interface GigabitEthernet1/0/17\\n'\\\n 'no interface vlan11\\n'\\\n 'no vlan 11\\n'\\\n 'no interface Port-channel10\\n'\\\n 'no spanning-tree mode rapid-pvst',\n 'sleep': 30\n }\n }\n }\n }\n\n L3_Etherchannel_ping_check = { \n 'devices': {\n 'helper': {\n '1': {\n 'config': 'default interface GigabitEthernet1/0/1\\n'\\\n 'interface GigabitEthernet1/0/1\\n'\\\n 'no switchport\\n'\\\n 'channel-group 10 mode desirable non-silent\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'interface GigabitEthernet1/0/2\\n'\\\n 'no switchport\\n'\\\n 'channel-group 10 mode desirable non-silent\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'default interface GigabitEthernet1/0/3\\n'\\\n 'interface GigabitEthernet1/0/3\\n'\\\n 'no switchport\\n'\\\n 'channel-group 10 mode desirable non-silent\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'interface Po10\\n'\\\n 'no switchport\\n'\\\n 'ip address 1.2.1.1 255.255.255.0\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'loggin buffer 500000 debugging',\n 'unconfig': 'default interface GigabitEthernet1/0/1\\n'\\\n 'default interface GigabitEthernet1/0/2\\n'\\\n 'default interface GigabitEthernet1/0/3\\n'\\\n 'no interface Port-channel10',\n }\n },\n 'uut': {\n '1': {\n 'config': 'spanning-tree mode rapid-pvst\\n'\\\n '!\\n'\\\n 'default interface Port-channel10\\n'\\\n 'interface Port-channel10\\n'\\\n 'no switchport\\n'\\\n 'no shut\\n'\\\n 'ip address 1.2.1.2 255.255.255.0\\n'\\\n '!\\n'\\\n 'default interface GigabitEthernet1/0/15\\n'\\\n 'interface GigabitEthernet1/0/15\\n'\\\n 'no switchport\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'interface GigabitEthernet1/0/16\\n'\\\n 'no switchport\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'default interface GigabitEthernet1/0/17\\n'\\\n 'interface GigabitEthernet1/0/17\\n'\\\n 'no switchport\\n'\\\n 'no shut\\n'\\\n '!\\n'\\\n 'loggin buffer 500000 debugging',\n 'unconfig': 'default interface GigabitEthernet1/0/15\\n'\\\n 'default interface GigabitEthernet1/0/16\\n'\\\n 'default interface GigabitEthernet1/0/17\\n'\\\n 'no interface Port-channel10\\n'\\\n 'no spanning-tree mode rapid-pvst',\n 'sleep': 30\n }\n }\n }\n }\n\n dot1x_base_uut = 'interface vlan 1\\n'\\\n '!name default\\n'\\\n '!\\n'\\\n 'interface vlan 20\\n'\\\n '!name nondefault\\n'\\\n '!\\n'\\\n 'dot1x system-auth-control\\n'\\\n '!\\n'\\\n 'interface GigabitEthernet1/0/15\\n'\\\n 'shut\\n'\\\n 'interface vlan1\\n'\\\n 'no ip address\\n'\\\n 'interface vlan20\\n'\\\n 'no ip address\\n'\\\n 'no shut\\n'\\\n 'policy-map type control subscriber dummy\\n'\\\n '!\\n'\\\n 'no policy-map type control subscriber dummy\\n'\\\n '!\\n'\\\n 'aaa new-model\\n'\\\n 'aaa session-id common\\n'\\\n 'aaa authentication dot1x default local\\n'\\\n 'aaa authorization network default local\\n'\\\n 'aaa local authentication default authorization default\\n'\\\n 'aaa authorization credential-download default local\\n'\\\n '!\\n'\\\n 'eap profile EAP-METH\\n'\\\n 'method md5\\n'\\\n '!\\n'\\\n 'interface GigabitEthernet1/0/15\\n'\\\n 'dot1x authenticator eap profile EAP-METH\\n'\\\n 'username switch password <PASSWORD>'\\\n 'username switch4 password <PASSWORD>'\n\n dot1x_base_helper = 'line con 0\\n'\\\n 'exec-timeout 0 0\\n'\\\n '!\\n'\\\n 'ip routing\\n'\\\n 'ip domain-name cisco\\n'\\\n 'cdp run\\n'\\\n 'dot1x system-auth-control\\n'\\\n 'dot1x supplicant force-multicast\\n'\\\n '!\\n'\\\n 'ip vrf ABCD\\n'\\\n '!\\n'\\\n 'eap profile EAP-METH\\n'\\\n 'method md5\\n'\\\n '!\\n'\\\n 'dot1x credentials switch1\\n'\\\n 'username switch\\n'\\\n 'password 0 <PASSWORD>'\\\n '!\\n'\\\n 'dot1x credentials switch2\\n'\\\n 'username switch2\\n'\\\n 'password 0 <PASSWORD>'\\\n '!\\n'\\\n 'dot1x credentials switch3\\n'\\\n 'username switch3\\n'\\\n 'password 0 <PASSWORD>'\\\n '!\\n'\\\n 'dot1x credentials switch4\\n'\\\n 'username switch4\\n'\\\n 'password 0 <PASSWORD>'\\\n '!\\n'\\\n 'dot1x credentials user1\\n'\\\n 'username user1\\n'\\\n 'password 0 <PASSWORD>'\\\n '!\\n'\\\n 'dot1x credentials user2\\n'\\\n 'username aaaaa\\n'\\\n 'password 0 <PASSWORD>'\\\n '!\\n'\\\n '!\\n'\\\n 'dot1x credentials wrong\\n'\\\n 'username wrong\\n'\\\n 'password 0 wrong'\n\n dot1xeapsessiondefaultvlan = {\n 'devices': {\n 'uut': {\n '1': {\n 'config': dot1x_base_uut,\n 'unconfig': 'no interface vlan20\\n'\\\n 'default interface vlan1\\n'\\\n 'no dot1x system-auth-control\\n'\\\n 'default interface GigabitEthernet1/0/15\\n'\\\n 'no aaa authentication dot1x default local\\n'\\\n 'no aaa authorization network default local\\n'\\\n 'no aaa authorization credential-download default local\\n'\\\n 'no username switch\\n'\\\n 'no username switch4\\n'\\\n 'no eap profile EAP-METH\\n'\\\n 'no parameter-map type webauth global\\n'\\\n 'no policy-map type control subscriber DOT1X\\n'\\\n 'no service-template DEFAULT_CRITICAL_VOICE_TEMPLATE\\n'\\\n 'no service-template DEFAULT_LINKSEC_POLICY_MUST_SECURE\\n'\\\n 'no service-template DEFAULT_LINKSEC_POLICY_SHOULD_SECURE\\n'\\\n 'no service-template webauth-global-inactive\\n'\\\n 'no aaa local authentication default authorization default\\n'\\\n 'no aaa new-model',\n },\n '2': {\n 'config': 'interface GigabitEthernet1/0/15\\n'\\\n 'dot1x authenticator eap profile EAP-METH\\n'\\\n 'policy-map type control subscriber DOT1X\\n'\\\n 'event session-started match-all\\n'\\\n '1 class always do-until-failure\\n'\\\n '1 authenticate using dot1x\\n'\\\n 'interface GigabitEthernet1/0/15\\n'\\\n 'no shut\\n'\\\n 'switchport\\n'\\\n 'switchport mode access\\n'\\\n 'switchport access vlan 1\\n'\\\n 'no access-session closed\\n'\\\n 'no switchport port-security\\n'\\\n 'access-session port-control auto\\n'\\\n 'access-session host-mode multi-auth\\n'\\\n 'dot1x pae authenticator\\n'\\\n 'dot1x authenticator eap profile EAP-METH\\n'\\\n 'service-policy type control subscriber DOT1X\\n'\\\n 'interface vlan1\\n'\\\n '!no switchport\\n'\\\n 'ip address 1.1.1.2 255.255.255.0\\n'\\\n 'no shut',\n 'unconfig': 'default interface GigabitEthernet1/0/15\\n'\\\n 'no policy-map type control subscriber DOT1X\\n'\\\n 'default interface vlan 1',\n 'sleep': 45\n }\n },\n 'helper': {\n '1': {\n 'config': dot1x_base_helper,\n 'unconfig': 'no dot1x system-auth-control\\n'\\\n 'no dot1x supplicant force-multicast\\n'\\\n '!\\n'\\\n 'no eap profile EAP-METH\\n'\\\n '!\\n'\\\n 'no dot1x credentials switch\\n'\\\n '!\\n'\\\n 'default interface GigabitEthernet1/0/1\\n'\\\n 'no dot1x credentials switch1\\n'\\\n 'no dot1x credentials switch2\\n'\\\n 'no dot1x credentials switch3\\n'\\\n 'no dot1x credentials switch4\\n'\\\n 'no dot1x credentials user1\\n'\\\n 'no dot1x credentials user2\\n'\\\n 'no ip vrf ABCD\\n'\\\n 'no ip domain-name cisco\\n'\\\n 'no dot1x credentials wrong',\n },\n '2': {\n 'config': 'interface GigabitEthernet1/0/1\\n'\\\n 'no switchport\\n'\\\n 'ip address 1.1.1.1 255.255.255.0\\n'\\\n 'dot1x pae supplicant\\n'\\\n 'dot1x credentials switch1\\n'\\\n 'dot1x supplicant eap profile EAP-METH\\n'\\\n 'no shut',\n 'unconfig': 'default interface GigabitEthernet1/0/1',\n }\n }\n }\n }\n\n dot1xeapsessionWrongUser = {\n 'devices': {\n 'uut': {\n 'peer': 'helper',\n 'name': 'GigabitEthernet1/0/15',\n 'peer_intf': 'GigabitEthernet1/0/1',\n '1': {\n 'config': dot1x_base_uut,\n 'unconfig': 'no interface vlan20\\n'\\\n 'default interface vlan1\\n'\\\n 'no dot1x system-auth-control\\n'\\\n 'default interface {name}\\n'\\\n 'no aaa authentication dot1x default local\\n'\\\n 'no aaa authorization network default local\\n'\\\n 'no aaa authorization credential-download default local\\n'\\\n 'no username switch\\n'\\\n 'no username switch4\\n'\\\n 'no eap profile EAP-METH\\n'\\\n 'no parameter-map type webauth global\\n'\\\n 'no policy-map type control subscriber DOT1X\\n'\\\n 'no service-template DEFAULT_CRITICAL_VOICE_TEMPLATE\\n'\\\n 'no service-template DEFAULT_LINKSEC_POLICY_MUST_SECURE\\n'\\\n 'no service-template DEFAULT_LINKSEC_POLICY_SHOULD_SECURE\\n'\\\n 'no service-template webauth-global-inactive\\n'\\\n 'no aaa local authentication default authorization default\\n'\\\n 'no aaa new-model',\n },\n '2': {\n 'config': 'interface {name}\\n'\\\n 'dot1x authenticator eap profile EAP-METH\\n'\\\n 'policy-map type control subscriber DOT1X\\n'\\\n 'event session-started match-all\\n'\\\n '1 class always do-until-failure\\n'\\\n '1 authenticate using dot1x\\n'\\\n 'interface {name}\\n'\\\n 'no shut\\n'\\\n 'switchport\\n'\\\n 'switchport mode access\\n'\\\n 'switchport access vlan 20\\n'\\\n 'access-session closed\\n'\\\n 'no switchport port-security\\n'\\\n 'access-session port-control auto\\n'\\\n 'access-session host-mode single-host\\n'\\\n 'dot1x pae authenticator\\n'\\\n 'dot1x authenticator eap profile EAP-METH\\n'\\\n 'service-policy type control subscriber DOT1X',\n 'unconfig': 'default interface {name}\\n'\\\n 'no policy-map type control subscriber DOT1X\\n'\\\n 'no vlan 20\\n'\\\n 'no interface vlan 20',\n 'sleep': 45\n }\n },\n 'helper': {\n '1': {\n 'config': dot1x_base_helper,\n 'unconfig': 'no dot1x system-auth-control\\n'\\\n 'no dot1x supplicant force-multicast\\n'\\\n '!\\n'\\\n 'no eap profile EAP-METH\\n'\\\n '!\\n'\\\n 'no dot1x credentials switch\\n'\\\n '!\\n'\\\n 'default interface {peer_intf}\\n'\\\n 'no dot1x credentials switch1\\n'\\\n 'no dot1x credentials switch2\\n'\\\n 'no dot1x credentials switch3\\n'\\\n 'no dot1x credentials switch4\\n'\\\n 'no dot1x credentials user1\\n'\\\n 'no dot1x credentials user2\\n'\\\n 'no ip vrf ABCD\\n'\\\n 'no ip domain-name cisco\\n'\\\n 'no dot1x credentials wrong',\n },\n '2': {\n 'config': 'interface {peer_intf}\\n'\\\n 'no switchport\\n'\\\n 'ip address 1.1.1.1 255.255.255.0\\n'\\\n 'dot1x pae supplicant\\n'\\\n 'dot1x credentials switch1\\n'\\\n 'dot1x supplicant eap profile EAP-METH\\n'\\\n 'no shut',\n 'unconfig': 'default interface {peer_intf}',\n }\n }\n }\n }\n \n", "id": "12579771", "language": "Python", "matching_score": 2.2090089321136475, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/xe_sanity/configurations.py" }, { "content": "''' \nInterface Genie Ops Object for IOSXE - CLI.\n'''\n\nimport re\n\n# genie.libs\nfrom ..interface import Interface as CommonInterface\n\n# iosxe show_interface\nfrom genie.libs.parser.iosxe.show_interface import ShowInterfacesSwitchport\n\n\nclass Interface(CommonInterface):\n '''Interface Genie Ops Object'''\n\n def learn(self, interface='', address_family='', custom=None):\n '''Learn Interface Ops'''\n \n # ======================================================================\n # common keys\n # ======================================================================\n super().learn(custom=custom, interface=interface, address_family=address_family)\n if hasattr(self, 'info'):\n for intf in self.info:\n if 'switchport_enable' in self.info[intf] and self.info[intf]['switchport_enable']== False:\n self.info[intf].pop('switchport_enable')\n\n # ======================================================================\n # switchport related\n # ======================================================================\n # Global source\n src = '[(?P<interface>.*)]'\n dest = 'info[(?P<interface>.*)]'\n\n # vlan_id\n self.add_leaf(cmd=ShowInterfacesSwitchport,\n src=src + '[access_vlan]',\n dest=dest + '[vlan_id]',\n interface=interface)\n\n req_keys = ['access_vlan', 'trunk_vlans', 'switchport_mode',\n 'switchport_enable', 'operational_mode']\n\n for key in req_keys:\n self.add_leaf(cmd=ShowInterfacesSwitchport,\n src=src + '[{}]'.format(key),\n dest=dest + '[{}]'.format(key),\n interface=interface)\n\n # create overwrite keys for port_channel\n self.add_leaf(cmd=ShowInterfacesSwitchport,\n src=src + '[port_channel]',\n dest=dest + '[overwrite_port_channel]',\n interface=interface)\n\n # ======================================================================\n # encapsulation\n # ======================================================================\n # Global source\n dest = 'info[(?P<interface>.*)][encapsulation]'\n\n # native_vlan\n self.add_leaf(cmd=ShowInterfacesSwitchport,\n src='[(?P<interface>.*)][encapsulation][native_vlan]',\n dest=dest + '[native_vlan]',\n interface=interface)\n # make\n self.make(final_call=True)\n\n # calculate the switchport switch_enable\n if hasattr(self, 'info'):\n for intf in self.info:\n # switchport_enable is False if not in the intf dict\n if 'switchport_enable' not in self.info[intf]:\n self.info[intf]['switchport_enable'] = False\n\n # switchport_enable is False when intf switchport operational_mode is down\n if 'operational_mode' in self.info[intf]:\n\n if self.info[intf]['operational_mode'] == 'down':\n self.info[intf]['switchport_enable'] = False\n\n # delete operational_mode which ops does not need\n del(self.info[intf]['operational_mode'])\n else:\n self.info[intf]['switchport_enable'] = False\n\n # overwrite port_channel from show interface switchport\n if 'overwrite_port_channel' in self.info[intf]:\n\n self.info[intf]['port_channel'].update(self.info[intf]['overwrite_port_channel'])\n\n # delete overwrite_port_channel which ops does not need\n del(self.info[intf]['overwrite_port_channel'])", "id": "6825478", "language": "Python", "matching_score": 2.0268640518188477, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/interface/iosxe/c3850/interface.py" }, { "content": "''' \nPim Genie Ops Object for IOSXE - CLI\n'''\n# super class\nfrom genie.libs.ops.pim.pim import Pim as SuperPim\n\nfrom genie.libs.parser.iosxe.show_mcast import ShowIpMroute,\\\n ShowIpv6Mroute\n\n# iosxe show_vrf\nfrom genie.libs.parser.iosxe.show_vrf import ShowVrfDetail\n\n\nclass Pim(SuperPim):\n '''Pim Genie Ops Object'''\n\n def transfer_to_bool(self, item):\n if item == 'enabled':\n return True\n else:\n return False\n\n def keys(self, item):\n if isinstance(item, dict):\n return sorted(list(item.keys()))\n\n def check_exists(self, item):\n if 'bidir' in item:\n return True\n else:\n return False\n\n\n def learn(self):\n '''Learn Pim Ops'''\n\n # get vrf list \n self.add_leaf(cmd=ShowVrfDetail,\n src='',\n dest='list_of_vrfs',\n action=self.keys)\n\n self.make()\n\n vrf_list = ['default']\n try:\n vrf_list.extend(self.list_of_vrfs)\n except:\n pass\n else: \n # delete the list_of_vrfs in the info table\n del self.list_of_vrfs\n\n # loop for vrfs\n for vrf in sorted(vrf_list):\n\n # skip the vrf when it is mgmt-vrf\n if vrf == 'Mgmt-vrf':\n continue\n\n # create kwargs\n vrf_name = '' if vrf == 'default' else vrf\n \n ########################################################################\n # info\n ########################################################################\n\n # --------- topology_tree_info --------------\n # topology_tree_info\n # -- group, , source_address, is_rpt, expiration\n # -- incoming_interface, rpf_neighbor, rp_address\n # -- msdp_learned, rp_bit\n # -- mode is not supported on IOSXE\n \n src = '[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][multicast_group]'\\\n '[(?P<group>.*)][source_address][(?P<src>.*)]'\n dest = 'info[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][multicast_group]'\\\n '[(?P<group>.*)][source_address][(?P<src>.*)]'\n\n # up_time\n for cmd in [ShowIpMroute, ShowIpv6Mroute]:\n self.add_leaf(cmd=cmd, src=src, dest=dest, vrf=vrf_name)\n\n # make to get the group and source address\n self.make()\n\n if hasattr(self, 'info'):\n if 'vrf' in self.info:\n for vrf in self.info['vrf']:\n if 'address_family' not in self.info['vrf'][vrf]:\n continue\n for af in self.info['vrf'][vrf]['address_family']:\n\n if 'topology_tree_info' not in \\\n self.info['vrf'][vrf]['address_family'][af]:\n\n self.info['vrf'][vrf]['address_family'][af]['topology_tree_info'] = {}\n\n if 'multicast_group' not in self.info['vrf'][vrf]['address_family'][af]:\n continue\n\n for group in self.info['vrf'][vrf]['address_family'][af]['multicast_group']:\n\n for source in self.info['vrf'][vrf]['address_family']\\\n [af]['multicast_group'][group]['source_address']:\n\n sub_dict = self.info['vrf'][vrf]['address_family']\\\n [af]['multicast_group'][group]['source_address'][source]\n\n # create topo_tree\n is_rpt = True if source == '*' else False\n tp_tree = '{gr} {sr} {rpt}'.format(gr=group, sr=source, rpt=is_rpt)\n\n if tp_tree not in self.info['vrf'][vrf]['address_family'][af]['topology_tree_info']:\n self.info['vrf'][vrf]['address_family'][af]['topology_tree_info'][tp_tree] = {}\n\n tp_dict = self.info['vrf'][vrf]['address_family'][af]['topology_tree_info'][tp_tree]\n\n # group\n tp_dict['group'] = group\n # source_address\n tp_dict['source_address'] = source\n # is_rpt\n tp_dict['is_rpt'] = is_rpt\n\n # expiration\n try:\n tp_dict['expiration'] = sub_dict['expire']\n except:\n pass\n # incoming_interface\n try:\n tp_dict['incoming_interface'] = list(sub_dict['incoming_interface_list'].keys())[0]\n except:\n pass\n # rp_address\n try:\n tp_dict['rp_address'] = sub_dict['rp']\n except:\n pass\n # rpf_neighbor\n try:\n tp_dict['rpf_neighbor'] = sub_dict['rpf_nbr']\n except:\n pass\n # up_time\n try:\n tp_dict['up_time'] = sub_dict['uptime']\n except:\n pass\n # msdp_learned\n try:\n tp_dict['msdp_learned'] = sub_dict['msdp_learned']\n except:\n pass\n # rp_bit\n try:\n tp_dict['rp_bit'] = sub_dict['rp_bit']\n except:\n pass\n # outgoing_interface\n try:\n for intf in sub_dict['outgoing_interface_list']:\n if 'outgoing_interface' not in tp_dict:\n tp_dict['outgoing_interface'] = {}\n if intf not in tp_dict['outgoing_interface']:\n tp_dict['outgoing_interface'][intf] = {}\n tp_dict['outgoing_interface'][intf]['up_time'] = \\\n sub_dict['outgoing_interface_list'][intf]['uptime']\n except:\n pass\n\n # delete none-used keys\n try:\n del(self.info['vrf'][vrf]['address_family'][af]\\\n ['multicast_group'])\n except:\n pass\n\n\n # --------- rp --------------\n\n # static_rp\n src = '[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][rp][static_rp][(?P<static>.*)]'\n dest = 'info[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][rp][static_rp[(?P<static>.*)]]'\n\n static_keys = ['[sm][policy_name]', '[sm][override]','[bidir]']\n\n for key in static_keys:\n self.add_leaf(cmd='show ip pim vrf {vrf} rp mapping'.format(vrf=vrf),\n src=src + key,\n dest=dest + key,\n vrf=vrf_name)\n\n # autorp is not supported on IOSXE\n # listener is not supported on IOSXE\n\n\n # bsr\n # bsr_candidate\n # -- address, hash_mask_length, priority\n # -- if_name, accept_rp_acl not supported on IOSXE\n # bsr\n # -- address, hash_mask_length, priority\n # -- up_time, expires\n #\n # election_state is not supported on IOSXE\n #\n # bsr_next_bootstrap, rp_candidate_next_advertisement\n src = '[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][rp][bsr]'\n dest = 'info[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][rp][bsr]'\n\n bsr_keys = ['[bsr_candidate][address]', '[bsr_candidate][hash_mask_length]',\n '[bsr_candidate][priority]', '[bsr_next_bootstrap]',\n '[bsr][address]', '[bsr][hash_mask_length]', '[bsr][priority]',\n '[bsr][up_time]', '[bsr][expires]']\n\n for cmd in ['show ip pim vrf {vrf} bsr-router'.format(vrf=vrf), 'show ipv6 pim vrf {vrf} bsr election'.format(vrf=vrf)]:\n for key in bsr_keys:\n self.add_leaf(cmd=cmd,\n src=src + key,\n dest=dest + key,\n vrf=vrf_name)\n\n # rp_candidate_next_advertisement\n self.add_leaf(cmd='show ipv6 pim vrf {vrf} bsr candidate-rp'.format(vrf=vrf),\n src=src + '[rp_candidate_next_advertisement]',\n dest=dest + '[rp_candidate_next_advertisement]',\n vrf=vrf_name)\n\n # bsr\n # rp\n # -- rp_address, up_time, group_policy\n src = '[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][rp][bsr][rp]'\n dest = 'info[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][rp][bsr][rp]'\n self.add_leaf(cmd='show ip pim vrf {vrf} rp mapping'.format(vrf=vrf),\n src=src,\n dest=dest,\n vrf=vrf_name)\n\n # bsr\n # bsr_rp_candidate_interface|bsr_rp_candidate_address:\n # -- address, interface, priority\n # -- mode, interval\n # -- policy, route_map, prefix_list are not supported on IOSXE\n\n src = '[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][rp][bsr][(?P<bsr>^(?![bsr]).*)]'\n dest = 'info[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][rp][bsr][(?P<bsr>^(?![bsr]).*)]'\n bsr_keys = ['[address]', '[interface]', '[priority]',\n '[mode]', '[interval]']\n\n for cmd in ['show ip pim vrf {vrf} bsr-router'.format(vrf=vrf), 'show ipv6 pim vrf {vrf} bsr candidate-rp'.format(vrf=vrf)]:\n for key in bsr_keys:\n self.add_leaf(cmd=cmd,\n src=src + key,\n dest=dest + key,\n vrf=vrf_name)\n\n # rp_list\n # -- address, mode, info_source_address\n # -- info_source_type, up_time, expiration\n src = '[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][rp][rp_list][(?P<rp_list>.*)]'\n dest = 'info[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][rp][rp_list][(?P<rp_list>.*)]'\n\n rp_list_keys = ['[address]', '[mode]', '[info_source_address]',\n '[info_source_type]', '[up_time]', '[expiration]']\n\n for key in rp_list_keys:\n self.add_leaf(cmd='show ip pim vrf {vrf} rp mapping'.format(vrf=vrf),\n src=src + key,\n dest=dest + key,\n vrf=vrf_name)\n\n # rp_mappings\n # -- group, rp_address, protocol\n # -- up_time, expiration\n src = '[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][rp][rp_mappings][(?P<rp_map>.*)]'\n dest = 'info[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)][rp][rp_mappings][(?P<rp_map>.*)]'\n\n rp_list_keys = ['[group]', '[rp_address]', '[protocol]',\n '[up_time]', '[expiration]']\n\n for key in rp_list_keys:\n self.add_leaf(cmd='show ip pim vrf {vrf} rp mapping'.format(vrf=vrf),\n src=src + key,\n dest=dest + key,\n vrf=vrf_name)\n\n\n # bidir\n # df_election is not supported on IOSXE\n #\n # interface_df_election\n # -- address, interface_name, df_address\n # -- interface_state is not supported on IOSXE\n src = '[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)]'\\\n '[rp][bidir][interface_df_election][(?P<df_key>.*)]'\n dest = 'info[vrf][(?P<vrf>.*)][address_family][(?P<af>.*)]'\\\n '[rp][bidir][interface_df_election][(?P<df_key>.*)]'\n\n df_election_keys = ['[address]', '[interface_name]', '[df_address]']\n\n for key in df_election_keys:\n self.add_leaf(cmd='show ip pim vrf {vrf} interface df'.format(vrf=vrf),\n src=src + key,\n dest=dest + key,\n vrf=vrf_name) \n\n # asm\n # anycast_rp, spt_switch, accept_register, register_source are not supported on IOSXE\n # spt_switch, sg_expiry_timer are not supported on IOSXE\n # log_neighbor_changes not supported on IOSXE\n\n\n # --------- interfaces --------------\n src = '[vrf][(?P<vrf>.*)][interfaces][(?P<intf>.*)][address_family][(?P<af>.*)]'\n dest = 'info[vrf][(?P<vrf>.*)][interfaces][(?P<intf>.*)][address_family][(?P<af>.*)]'\n\n # bfd, , hello_interval, jp_interval\n # address, dr_address, oper_status, bsr_border\n # dm, neighbor_filter, sm-passive\n # propagation_delay, override_interval, hello_expiration, bidir are not supported on IOSXE\n\n intf_keys = ['[bfd][enable]', '[hello_interval]', '[oper_status]',\n '[jp_interval]', '[address]', '[bsr_border]',\n '[neighbor_filter]', '[dm]', '[sm]']\n\n for key in intf_keys:\n self.add_leaf(cmd='show ip pim vrf {vrf} interface detail'.format(vrf=vrf),\n src=src + key,\n dest=dest + key,\n vrf=vrf_name)\n\n # dr_priority\n self.add_leaf(cmd='show ip pim vrf {vrf} interface'.format(vrf=vrf),\n src=src + '[dr_priority]',\n dest=dest + '[dr_priority]',\n vrf=vrf_name)\n\n # mode for dm under ssm\n self.add_leaf(cmd='show ip pim vrf {vrf} interface'.format(vrf=vrf),\n src=src + '[mode]',\n dest=dest + '[mode]',\n vrf=vrf_name)\n\n # ipv6 interface\n src = '[vrf][(?P<vrf>.*)][interface][(?P<intf>.*)]'\n dest = 'info[vrf][(?P<vrf>.*)][interfaces][(?P<intf>.*)][address_family][ipv6]'\n\n intf_keys = ['[hello_interval]', '[address]', '[dr_priority]']\n\n for key in intf_keys:\n self.add_leaf(cmd='show ipv6 pim vrf {vrf} interface'.format(vrf=vrf),\n src=src + key,\n dest=dest + key,\n vrf=vrf_name)\n\n\n # --------- neighbors --------------\n src = '[vrf][(?P<vrf>.*)][interfaces][(?P<intf>.*)]\\\n [address_family][(?P<af>.*)][neighbors][(?P<nei>.*)]'\n dest = 'info[vrf][(?P<vrf>.*)][interfaces][(?P<intf>.*)]\\\n [address_family][(?P<af>.*)][neighbors][(?P<nei>.*)]'\n \n # expiration, dr_priority, up_time\n # interface, bidir_capable\n # bfd_status, gen_id are not supported on IOSXE\n nei_keys = ['[expiration]', '[dr_priority]',\n '[up_time]', '[interface]', '[bidir_capable]']\n\n for cmd in ['show ip pim neighbor', 'show ipv6 pim neighbor detail']:\n for key in nei_keys:\n self.add_leaf(cmd=cmd,\n src=src + key,\n dest=dest + key,\n vrf=vrf_name)\n\n # make to write in cache\n self.make(final_call=True)\n\n # --------- dm --------------\n # --------- bidir -----------\n if hasattr(self, 'info'):\n if 'vrf' in self.info:\n for vrf in self.info['vrf']:\n if 'interfaces' not in self.info['vrf'][vrf]:\n continue\n for intf in self.info['vrf'][vrf]['interfaces']:\n if 'address_family' not in self.info['vrf'][vrf]\\\n ['interfaces'][intf]:\n continue\n for af in self.info['vrf'][vrf]\\\n ['interfaces'][intf]['address_family']:\n if 'mode' in self.info['vrf'][vrf]\\\n ['interfaces'][intf]['address_family'] and \\\n 'dense' in self.info['vrf'][vrf]\\\n ['interfaces'][intf]['address_family']['mode']:\n try:\n self.info['vrf'][vrf]['address_family'][af]['dm'] = {}\n except:\n pass\n\n try:\n del(self.info['vrf'][vrf]['address_family']\\\n ['interfaces'][intf]['address_family']['mode'])\n except:\n pass\n\n # bidir\n if af in self.info['vrf'][vrf]['address_family'] and \\\n 'rp' in self.info['vrf'][vrf]['address_family'][af] and \\\n 'bidir' in self.info['vrf'][vrf]['address_family'][af]['rp']:\n try:\n self.info['vrf'][vrf]['address_family'][af]['bidir'] = {}\n except:\n pass\n\n \n\n", "id": "6267494", "language": "Python", "matching_score": 4.50598669052124, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/pim/iosxe/pim.py" }, { "content": "# Genie\r\nfrom genie.ops.base import Base\r\n\r\n\r\nclass Pim(Base):\r\n exclude = ['expiration',\r\n 'hello_expiration',\r\n 'hello_interval',\r\n 'up_time',\r\n 'bsr_next_bootstrap',\r\n 'expires',\r\n 'rp_candidate_next_advertisement',\r\n 'genid',\r\n 'df_address',\r\n 'gen_id',\r\n 'incoming_interface',\r\n 'rpf_neighbor',\r\n 'dr_address',\r\n 'neighbors']", "id": "9912028", "language": "Python", "matching_score": 1.2829346656799316, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/pim/pim.py" }, { "content": "# Genie\r\nfrom genie.ops.base import Base\r\n\r\n\r\nclass Mcast(Base):\r\n exclude = ['expire',\r\n 'uptime',\r\n 'flags',\r\n 'incoming_interface_list']", "id": "10528309", "language": "Python", "matching_score": 1.1844356060028076, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/mcast/mcast.py" }, { "content": "'''\nMcast Genie Ops Object for IOS - CLI.\n'''\nfrom ..iosxe.mcast import Mcast as McastXE\n\nclass Mcast(McastXE):\n pass", "id": "11379529", "language": "Python", "matching_score": 0.6067762970924377, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/mcast/ios/mcast.py" }, { "content": "'''\nMld Genie Ops Object for IOS - CLI.\n'''\nfrom ..iosxe.mld import Mld as MldXE\n\nclass Mld(MldXE):\n pass", "id": "1148029", "language": "Python", "matching_score": 0.7592188119888306, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/mld/ios/mld.py" }, { "content": "from .mld import *\nfrom .mld_group import *\nfrom .ssm import *\n", "id": "2095765", "language": "Python", "matching_score": 1.279608130455017, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/mld/__init__.py" }, { "content": "#!/usr/bin/env python\n\n# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Interface\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning\n\n# Mld\nfrom genie.libs.conf.mld import Mld\nfrom genie.libs.conf.mld.ssm import Ssm\nfrom genie.libs.conf.mld.mld_group import MldGroup\n\n# Vrf\nfrom genie.libs.conf.vrf import Vrf\n\n# Interface\nfrom genie.libs.conf.interface import IPv6Addr\nfrom genie.libs.conf.interface import Interface\n\nclass test_mld(TestCase):\n\n def setUp(self):\n \n # Set Genie Tb\n testbed = Testbed()\n Genie.testbed = testbed\n \n # Device\n self.dev1 = Device(name='PE1', testbed=testbed, os='iosxe')\n \n # Mld object\n self.mld = Mld()\n\n def test_mcast_config(self):\n\n # For failures\n self.maxDiff = None\n\n # VRF configuration\n vrf1 = Vrf('VRF1')\n self.mld.device_attr[self.dev1].vrf_attr[vrf1].global_max_groups = 63999\n mld1 = self.mld.device_attr[self.dev1].vrf_attr[vrf1]\n ssm1 = Ssm(device=self.dev1)\n ssm1.ssm_group_policy = 'test'\n ssm1.ssm_source_addr = '2001:DB8:1:1::1'\n mld1.add_ssm(ssm1)\n\n # Interface configuration\n intf1_obj = Interface(device=self.dev1, name='GigabitEthernet2')\n intf1_obj.vrf = vrf1\n intf1 = intf1_obj.name\n self.mld.device_attr[self.dev1].vrf_attr[vrf1].interface_attr[intf1]\\\n .enable = True\n self.mld.device_attr[self.dev1].vrf_attr[vrf1].interface_attr[intf1]\\\n .group_policy = 'test2'\n self.mld.device_attr[self.dev1].vrf_attr[vrf1].interface_attr[intf1]\\\n .max_groups = 6400\n self.mld.device_attr[self.dev1].vrf_attr[vrf1].interface_attr[intf1]\\\n .query_interval = 366\n self.mld.device_attr[self.dev1].vrf_attr[vrf1].interface_attr[intf1]\\\n .query_max_response_time = 16\n\n # join and static groups configuration\n mld_intf1 = self.mld.device_attr[self.dev1].vrf_attr[vrf1].interface_attr[intf1]\n join_group1 = MldGroup(device=self.dev1)\n join_group1.join_group = 'fc00:db20:35b:7399::5'\n join_group2 = MldGroup(device=self.dev1)\n join_group2.join_group = 'fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b'\n join_group2.join_group_source_addr = '2001:DB8:2:2::2'\n static_group1 = MldGroup(device=self.dev1)\n static_group1.static_group = 'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b'\n static_group2 = MldGroup(device=self.dev1)\n static_group2.static_group = 'fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b'\n static_group2.static_group_source_addr = '2001:DB8:3:3::3'\n mld_intf1.add_groups(join_group1)\n mld_intf1.add_groups(join_group2)\n mld_intf1.add_groups(static_group1)\n mld_intf1.add_groups(static_group2)\n\n \n vrf2 = Vrf('default')\n self.mld.device_attr[self.dev1].vrf_attr[vrf2].global_max_groups = 63999\n mld2 = self.mld.device_attr[self.dev1].vrf_attr[vrf2]\n ssm1 = Ssm(device=self.dev1)\n ssm1.ssm_group_policy = 'test'\n ssm1.ssm_source_addr = '2001:DB8:1:1::1'\n mld2.add_ssm(ssm1)\n\n # Interface configuration\n intf2_obj = Interface(device=self.dev1, name='GigabitEthernet1')\n ipv6a = IPv6Addr(device=self.dev1)\n ipv6a.ipv6 = '2001:DB8:1:1::1'\n ipv6a.ipv6_prefix_length = '64'\n intf2_obj.add_ipv6addr(ipv6a)\n\n intf2 = intf2_obj.name\n self.mld.device_attr[self.dev1].vrf_attr[vrf2].interface_attr[intf2]\\\n .enable = True\n\n # join and static groups configuration\n mld_intf1 = self.mld.device_attr[self.dev1].vrf_attr[vrf2].interface_attr[intf2]\n join_group = MldGroup(device=self.dev1)\n join_group.join_group = 'fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b'\n join_group.join_group_source_addr = '2001:DB8:2:2::2'\n static_group = MldGroup(device=self.dev1)\n static_group.static_group = 'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b'\n mld_intf1.add_groups(join_group)\n mld_intf1.add_groups(static_group)\n\n # Build interface config for none-default vrfs\n intf_cfgs = intf1_obj.build_config(apply=False)\n self.assertMultiLineEqual(\n str(intf_cfgs),\n '\\n'.join([\n 'interface GigabitEthernet2',\n ' vrf forwarding VRF1',\n ' exit',\n ]))\n\n intf_cfgs = intf2_obj.build_config(apply=False)\n self.assertMultiLineEqual(\n str(intf_cfgs),\n '\\n'.join([\n 'interface GigabitEthernet1',\n ' ipv6 address 2001:db8:1:1::1/64',\n ' exit',\n ]))\n\n\n\n # Build mld configuration\n cfgs = self.mld.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'ipv6 mld state-limit 63999',\n 'ipv6 mld ssm-map enable',\n 'ipv6 mld ssm-map static test 2001:DB8:1:1::1',\n 'interface GigabitEthernet1',\n ' ipv6 mld router',\n ' ipv6 mld join-group FFfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b 2001:DB8:2:2::2',\n ' ipv6 mld static-group FFfc00:db20:35b:7399::5',\n ' exit',\n 'ipv6 mld vrf VRF1 state-limit 63999',\n 'ipv6 mld vrf VRF1 ssm-map enable',\n 'ipv6 mld vrf VRF1 ssm-map static test 2001:DB8:1:1::1',\n 'interface GigabitEthernet2',\n ' ipv6 mld router',\n ' ipv6 mld access-group test2',\n ' ipv6 mld limit 6400',\n ' ipv6 mld query-interval 366',\n ' ipv6 mld query-max-response-time 16',\n ' ipv6 mld join-group FFfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b 2001:DB8:2:2::2',\n ' ipv6 mld join-group FFfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b',\n ' ipv6 mld static-group FFfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b 2001:DB8:3:3::3',\n ' ipv6 mld static-group FFfc00:db20:35b:7399::5',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = self.mld.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'no ipv6 mld state-limit 63999',\n 'no ipv6 mld ssm-map enable',\n 'no ipv6 mld ssm-map static test 2001:DB8:1:1::1',\n 'interface GigabitEthernet1',\n ' no ipv6 mld router',\n ' no ipv6 mld join-group FFfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b 2001:DB8:2:2::2',\n ' no ipv6 mld static-group FFfc00:db20:35b:7399::5',\n ' exit',\n 'no ipv6 mld vrf VRF1 state-limit 63999',\n 'no ipv6 mld vrf VRF1 ssm-map enable',\n 'no ipv6 mld vrf VRF1 ssm-map static test 2001:DB8:1:1::1',\n 'interface GigabitEthernet2',\n ' no ipv6 mld router',\n ' no ipv6 mld access-group test2',\n ' no ipv6 mld limit 6400',\n ' no ipv6 mld query-interval 366',\n ' no ipv6 mld query-max-response-time 16',\n ' no ipv6 mld join-group FFfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b 2001:DB8:2:2::2',\n ' no ipv6 mld join-group FFfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b',\n ' no ipv6 mld static-group FFfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b 2001:DB8:3:3::3',\n ' no ipv6 mld static-group FFfc00:db20:35b:7399::5',\n ' exit',\n ]))\n\n # Build unconfig with attribute\n cfgs = self.mld.build_unconfig(apply=False,\n attributes={'device_attr': {\n self.dev1: {\n 'vrf_attr': {\n vrf1: {\n 'global_max_groups': None,\n 'ssm': {ssm1: None}},\n vrf2: {\n 'interface_attr': {\n 'GigabitEthernet1': {\n 'enable': True\n }\n }\n }}}}})\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'interface GigabitEthernet1',\n ' no ipv6 mld router',\n ' exit',\n 'no ipv6 mld vrf VRF1 state-limit 63999',\n 'no ipv6 mld vrf VRF1 ssm-map enable',\n 'no ipv6 mld vrf VRF1 ssm-map static test 2001:DB8:1:1::1',\n ]))\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "8728535", "language": "Python", "matching_score": 3.3139078617095947, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/mld/iosxe/tests/test_mld.py" }, { "content": "#!/usr/bin/env python\n\n# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\nfrom genie.conf.base.attributes import UnsupportedAttributeWarning\n\n# PIM\nfrom genie.libs.conf.vrf import Vrf\nfrom genie.libs.conf.pim import Pim\n\n\nclass test_pim(TestCase):\n\n def setUp(self):\n \n # Set Genie Tb\n testbed = Testbed()\n Genie.testbed = testbed\n \n # Device\n self.dev1 = Device(name='PE1', testbed=testbed, os='iosxr')\n\n def test_pim_full_config(self):\n\n # For failures\n self.maxDiff = None\n\n # shorten the line\n dev1 = self.dev1\n \n # Pim object\n pim = Pim()\n dev1.add_feature(pim)\n\n # VRF configuration\n vrf1 = Vrf('default')\n pim.device_attr[self.dev1].vrf_attr[vrf1]\n vrf2 = Vrf('red')\n pim.device_attr[self.dev1].vrf_attr[vrf2]\n\n for vrf in [vrf1, vrf2]:\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n rp_address = '2.2.2.2'\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr['Loopback0']\n pim.device_attr[dev1].vrf_attr[vrf].address_family_attr['ipv4'].\\\n interface_attr['HundredGigE0/0/0/1']\n \n # Build config\n cfgs = pim.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'router pim',\n ' address-family ipv4',\n ' interface HundredGigE0/0/0/1',\n ' exit',\n ' interface Loopback0',\n ' exit',\n ' rp-address 2.2.2.2',\n ' exit',\n ' vrf red',\n ' address-family ipv4',\n ' interface HundredGigE0/0/0/1',\n ' exit',\n ' interface Loopback0',\n ' exit',\n ' rp-address 2.2.2.2',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n cfgs = pim.build_unconfig(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'no router pim',\n ]))\n\n \n cfgs = pim.build_unconfig(apply=False,\n attributes={'device_attr': {\n self.dev1: {\n 'vrf_attr': {\n 'default': {\n 'address_family_attr': {\n 'ipv4': {\n 'rp_address': None\n }\n }\n },\n 'red': {\n 'address_family_attr': {\n 'ipv4': {\n 'interface_attr': {\n 'Loopback0': None\n }\n }\n } \n }\n }\n }}})\n\n # Check config built correctly\n self.assertMultiLineEqual(str(cfgs[dev1.name]), '\\n'.\\\n join([\n 'router pim',\n ' address-family ipv4',\n ' no rp-address 2.2.2.2',\n ' exit',\n ' vrf red',\n ' address-family ipv4',\n ' no interface Loopback0',\n ' exit',\n ' exit',\n ' exit',\n ]))\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "6118229", "language": "Python", "matching_score": 3.1051461696624756, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/pim/iosxr/tests/test_pim.py" }, { "content": "#!/usr/bin/env python\n\nimport unittest\nimport re\nfrom unittest.mock import Mock\n\nfrom genie.conf.tests import TestCase\nfrom genie.conf import Genie\nfrom genie.conf.base import Testbed, Device, Link, Interface\n\nfrom genie.libs.conf.vrf import Vrf\nfrom genie.libs.conf.bgp import RouteTarget\nfrom genie.libs.conf.address_family import AddressFamily\n\n\nclass test_vrf(TestCase):\n\n def setUp(self):\n tb = Genie.testbed = Testbed()\n self.dev1 = Device(testbed=tb, name='PE1', os='iosxr')\n self.dev2 = Device(testbed=tb, name='PE2', os='iosxr')\n self.i1 = Interface(name='GigabitEthernet0/0/0/1', device=self.dev1)\n self.i2 = Interface(name='GigabitEthernet0/0/0/2', device=self.dev2)\n self.i3 = Interface(name='GigabitEthernet0/0/0/3', device=self.dev1)\n self.i4 = Interface(name='GigabitEthernet0/0/0/4', device=self.dev2)\n self.i5 = Interface(name='GigabitEthernet0/0/0/5', device=self.dev1)\n self.i6 = Interface(name='GigabitEthernet0/0/0/6', device=self.dev2)\n self.i7 = Interface(name='GigabitEthernet0/0/0/7', device=self.dev1)\n self.i8 = Interface(name='GigabitEthernet0/0/0/8', device=self.dev2)\n self.link = Link(name='1_2_1', testbed=tb)\n self.link.connect_interface(interface=self.i1)\n self.link.connect_interface(interface=self.i2)\n self.link2 = Link(name='1_2_2', testbed=tb)\n self.link2.connect_interface(interface=self.i3)\n self.link2.connect_interface(interface=self.i4)\n self.link3 = Link(name='1_2_3', testbed=tb)\n self.link3.connect_interface(interface=self.i5)\n self.link3.connect_interface(interface=self.i6)\n self.link4 = Link(name='1_2_4', testbed=tb)\n self.link4.connect_interface(interface=self.i7)\n self.link4.connect_interface(interface=self.i8)\n self.assertSetEqual(\n set(self.link.find_interfaces()),\n set([self.i1, self.i2]))\n self.assertSetEqual(\n set(self.dev1.find_interfaces()),\n set([self.i1, self.i3, self.i5, self.i7]))\n self.assertSetEqual(\n set(self.dev2.find_interfaces()),\n set([self.i2, self.i4, self.i6, self.i8]))\n\n def test_init(self):\n\n with self.subTest('name'):\n with self.assertRaises(TypeError):\n vrf = Vrf()\n with self.assertRaises(AssertionError):\n vrf = Vrf(name=123)\n vrf = Vrf(name='red')\n vrf2 = Vrf(name='blue')\n\n with self.subTest('fallback_vrf'):\n # fallback_vrf's type is defined post Vrf class; Make sure it works.\n self.assertIs(vrf.fallback_vrf, None)\n with self.assertRaises(ValueError):\n vrf.fallback_vrf = 123\n self.assertIs(vrf.fallback_vrf, None)\n with self.assertRaises(ValueError):\n vrf.fallback_vrf = 'blue'\n self.assertIs(vrf.fallback_vrf, None)\n vrf.fallback_vrf = None\n self.assertIs(vrf.fallback_vrf, None)\n vrf.fallback_vrf = vrf2\n self.assertIs(vrf.fallback_vrf, vrf2)\n\n def test_1_top_level(self):\n\n vrf = Vrf(name='vrf1')\n self.dev1.add_feature(vrf)\n self.dev2.add_feature(vrf)\n\n vrf.device_attr['PE1'].scale_mode = 'big'\n vrf.description = 'my description'\n cfgs = vrf.build_config(apply=False)\n \n self.assertMultiLineDictEqual(cfgs, {\n self.dev1.name: '\\n'.join([\n 'vrf vrf1',\n ' description my description',\n ' mode big',\n ' exit',\n ]),\n self.dev2.name: '\\n'.join([\n 'vrf vrf1',\n ' description my description',\n ' exit',\n ]),\n })\n\n cfgs = vrf.build_config(apply=False, attributes='device_attr__*__description')\n\n self.assertMultiLineDictEqual(cfgs, {\n self.dev1.name: '\\n'.join([\n 'vrf vrf1',\n ' description my description',\n ' exit',\n ]),\n self.dev2.name: '\\n'.join([\n 'vrf vrf1',\n ' description my description',\n ' exit',\n ]),\n })\n\n cfgs = vrf.build_config(apply=False, attributes={\n 'device_attr': {\n '*': {\n 'description': None,\n },\n },\n })\n self.assertMultiLineDictEqual(cfgs, {\n self.dev1.name: '\\n'.join([\n 'vrf vrf1',\n ' description my description',\n ' exit',\n ]),\n self.dev2.name: '\\n'.join([\n 'vrf vrf1',\n ' description my description',\n ' exit',\n ]),\n })\n\n cfgs = vrf.build_config(apply=False, devices=[self.dev2], attributes={\n 'device_attr': {\n '*': {\n 'description': None,\n },\n },\n })\n self.assertMultiLineDictEqual(cfgs, {\n self.dev2.name: '\\n'.join([\n 'vrf vrf1',\n ' description my description',\n ' exit',\n ]),\n })\n\n cfgs = vrf.build_config(apply=False, devices=[self.dev2], attributes={\n 'device_attr': {\n self.dev1: {\n 'description': None,\n },\n },\n })\n self.assertMultiLineDictEqual(cfgs, {})\n\n cfgs = vrf.build_config(apply=False, devices=[self.dev2], attributes={\n 'device_attr': {\n self.dev2: {\n 'description': None,\n },\n },\n })\n self.assertMultiLineDictEqual(cfgs, {\n self.dev2.name: '\\n'.join([\n 'vrf vrf1',\n ' description my description',\n ' exit',\n ]),\n })\n\n cfgs = vrf.build_config(apply=False, devices=[self.dev2], attributes={\n 'device_attr': {\n self.dev2.name: {\n 'description': None,\n },\n },\n })\n self.assertMultiLineDictEqual(cfgs, {\n self.dev2.name: '\\n'.join([\n 'vrf vrf1',\n ' description my description',\n ' exit',\n ]),\n })\n\n def test_4_per_af(self):\n\n vrf = Vrf(name='vrf1')\n self.dev1.add_feature(vrf)\n\n vrf.address_families |= {AddressFamily.ipv6_unicast}\n vrf.device_attr['PE1'].address_family_attr['ipv4 unicast'].export_route_targets = [\n RouteTarget.ImportExport('100:200', stitching=True),\n ]\n\n cfgs = vrf.build_config(apply=False)\n\n self.assertMultiLineDictEqual(cfgs, {\n self.dev1.name: '\\n'.join([\n 'vrf vrf1',\n ' address-family ipv4 unicast',\n ' export route-target 100:200 stitching',\n ' exit',\n ' exit',\n ]),\n })\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "583671", "language": "Python", "matching_score": 3.5412185192108154, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/vrf/tests/test_vrf.py" }, { "content": "#!/usr/bin/env python\n\n'''\nIOSXE unit tests for Genie Vrf conf using CLI.\n'''\n\n# Python\nimport re\nimport unittest\nfrom unittest.mock import Mock\n\n# Genie\nfrom genie.conf import Genie\nfrom genie.conf.tests import TestCase\nfrom genie.conf.base import Testbed, Device\nfrom genie.libs.conf.vrf.vrf import Vrf\n\n\nclass test_vrf(TestCase):\n\n def setUp(self):\n # Set Genie Tb\n testbed = Testbed()\n Genie.testbed = testbed\n # Device\n self.dev1 = Device(name='PE1', testbed=testbed, os='iosxe')\n\n def test_cli_config_v4(self): \n # prefix-list conf\n vrf_conf = Vrf('VRF1')\n self.dev1.add_feature(vrf_conf)\n\n # Apply configuration\n vrf_conf.device_attr[self.dev1].rd = '100:1'\n vrf_conf.device_attr[self.dev1].address_family_attr['ipv4 unicast'].\\\n import_from_global_map = 'import_from_global_map'\n vrf_conf.device_attr[self.dev1].address_family_attr['ipv4 unicast'].\\\n export_to_global_map = 'export_to_global_map'\n vrf_conf.device_attr[self.dev1].address_family_attr['ipv4 unicast'].\\\n routing_table_limit_number = 10000\n vrf_conf.device_attr[self.dev1].address_family_attr['ipv4 unicast'].\\\n simple_alert = True\n\n vrf_conf.device_attr[self.dev1].address_family_attr['ipv4 unicast'].\\\n route_target_attr['200:1'].rt_type = 'both'\n vrf_conf.device_attr[self.dev1].address_family_attr['ipv4 unicast'].\\\n route_target_attr['100:1'].rt_type = 'import'\n\n\n # Build config\n cfgs = vrf_conf.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'vrf definition VRF1',\n ' rd 100:1',\n ' address-family ipv4 unicast',\n ' import ipv4 unicast map import_from_global_map',\n ' export ipv4 unicast map export_to_global_map',\n ' maximum routes 10000 warning-only',\n ' route-target import 100:1',\n ' route-target import 200:1',\n ' route-target export 200:1',\n ' exit',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = vrf_conf.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'no vrf definition VRF1',\n ]))\n\n # Build unconfig for selected attributes\n cfgs = vrf_conf.build_unconfig(apply=False,\n attributes={'device_attr': {\n self.dev1: {\n 'rd': None,\n 'address_family_attr': {\n 'ipv4 unicast': {\n 'export_to_global_map': None\n }\n }}}})\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'vrf definition VRF1',\n ' no rd 100:1',\n ' address-family ipv4 unicast',\n ' no export ipv4 unicast map export_to_global_map',\n ' exit',\n ' exit',\n ]))\n\n # Build unconfig for selected attributes\n cfgs = vrf_conf.build_unconfig(apply=False,\n attributes={'device_attr': {self.dev1: {\n 'address_family_attr': {'ipv4 unicast': {\n 'route_target_attr': {'200:1': {\n 'rt_type': None}\n }}}}}})\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'vrf definition VRF1',\n ' address-family ipv4 unicast',\n ' no route-target import 200:1',\n ' no route-target export 200:1',\n ' exit',\n ' exit',\n ]))\n\n def test_cli_config_v6(self): \n # prefix-list conf\n vrf_conf = Vrf('VRF2')\n self.dev1.add_feature(vrf_conf)\n\n # Apply configuration\n vrf_conf.device_attr[self.dev1].rd = '100:1'\n vrf_conf.device_attr[self.dev1].address_family_attr['ipv6 unicast'].\\\n import_from_global_map = 'test_import'\n vrf_conf.device_attr[self.dev1].address_family_attr['ipv6 unicast'].\\\n export_to_global_map = 'test_export'\n vrf_conf.device_attr[self.dev1].address_family_attr['ipv6 unicast'].\\\n routing_table_limit_number = 10000\n vrf_conf.device_attr[self.dev1].address_family_attr['ipv6 unicast'].\\\n alert_percent_value = 50\n\n vrf_conf.device_attr[self.dev1].address_family_attr['ipv6 unicast'].\\\n route_target_attr['100:1'].rt_type = 'export'\n vrf_conf.device_attr[self.dev1].address_family_attr['ipv6 unicast'].\\\n route_target_attr['200:1'].rt_type = 'export'\n vrf_conf.device_attr[self.dev1].address_family_attr['ipv6 unicast'].\\\n route_target_attr['300:1'].rt_type = 'import'\n vrf_conf.device_attr[self.dev1].address_family_attr['ipv6 unicast'].\\\n route_target_attr['400:1'].rt_type = 'import'\n\n\n # Build config\n cfgs = vrf_conf.build_config(apply=False)\n\n # Check config built correctly\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'vrf definition VRF2',\n ' rd 100:1',\n ' address-family ipv6 unicast',\n ' import ipv6 unicast map test_import',\n ' export ipv6 unicast map test_export',\n ' maximum routes 10000 50',\n ' route-target export 100:1',\n ' route-target export 200:1',\n ' route-target import 300:1',\n ' route-target import 400:1',\n ' exit',\n ' exit',\n ]))\n\n # Build unconfig\n cfgs = vrf_conf.build_unconfig(apply=False)\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'no vrf definition VRF2',\n ]))\n\n # Build unconfig for selected attributes\n cfgs = vrf_conf.build_unconfig(apply=False,\n attributes={'device_attr': {\n self.dev1: {\n 'rd': None,\n 'address_family_attr': {\n 'ipv6 unicast': {\n 'routing_table_limit_number': None,\n 'alert_percent_value': None,\n }\n }}}})\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'vrf definition VRF2',\n ' no rd 100:1',\n ' address-family ipv6 unicast',\n ' no maximum routes 10000 50',\n ' exit',\n ' exit',\n ]))\n\n # Build unconfig for selected attributes\n cfgs = vrf_conf.build_unconfig(apply=False,\n attributes={'device_attr': {self.dev1: {\n 'address_family_attr': {'ipv6 unicast': {\n 'route_target_attr': {'200:1': {\n 'rt_type': None}\n }}}}}})\n\n # Check config correctly unconfigured\n self.assertMultiLineEqual(\n str(cfgs[self.dev1.name]),\n '\\n'.join([\n 'vrf definition VRF2',\n ' address-family ipv6 unicast',\n ' no route-target export 200:1',\n ' exit',\n ' exit',\n ]))\n\n\nif __name__ == '__main__':\n unittest.main()\n\n", "id": "3956297", "language": "Python", "matching_score": 3.0584986209869385, "max_stars_count": 0, "path": "pkgs/conf-pkg/src/genie/libs/conf/vrf/iosxe/tests/test_vrf.py" }, { "content": "''' \nVrf Genie Ops Object Outputs for IOSXR.\n'''\n\n\nclass VrfOutput(object):\n ShowVrfAllDetail = {\n \"VRF1\": {\n \"description\": \"not set\",\n \"vrf_mode\": \"regular\",\n \"address_family\": {\n \"ipv6 unicast\": {\n \"route_target\": {\n \"400:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"400:1\"\n },\n \"300:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"300:1\"\n },\n \"200:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:1\"\n },\n \"200:2\": {\n \"rt_type\": \"import\",\n \"route_target\": \"200:2\"\n }\n }\n },\n \"ipv4 unicast\": {\n \"route_target\": {\n \"400:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"400:1\"\n },\n \"300:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"300:1\"\n },\n \"200:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:1\"\n },\n \"200:2\": {\n \"rt_type\": \"import\",\n \"route_target\": \"200:2\"\n }\n }\n }\n },\n \"route_distinguisher\": \"200:1\",\n \"interfaces\": [\n \"GigabitEthernet0/0/0/1\"\n ]\n },\n \"VRF2\": {\n \"description\": \"not set\",\n \"vrf_mode\": \"regular\",\n \"address_family\": {\n \"ipv6 unicast\": {\n \"route_target\": {\n \"200:2\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:2\"\n }\n }\n },\n \"ipv4 unicast\": {\n \"route_target\": {\n \"200:2\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:2\"\n }\n }\n }\n },\n \"route_distinguisher\": \"200:2\",\n \"interfaces\": [\n \"GigabitEthernet0/0/0/2\"\n ]}\n }\n ShowVrfAllDetailCustom = {\n \"VRF2\": {\n \"description\": \"not set\",\n \"vrf_mode\": \"regular\",\n \"address_family\": {\n \"ipv6 unicast\": {\n \"route_target\": {\n \"200:2\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:2\"\n }\n }\n },\n \"ipv4 unicast\": {\n \"route_target\": {\n \"200:2\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:2\"\n }\n }\n }\n },\n \"route_distinguisher\": \"200:2\",\n \"interfaces\": [\n \"GigabitEthernet0/0/0/2\"\n ]}\n }\n\n VrfCustomInfo = {\n 'vrfs': {\n \"VRF2\": {\n \"route_distinguisher\": \"200:2\",\n \"address_family\": {\n \"ipv4 unicast\": {\n \"route_targets\": {\n \"200:2\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:2\"\n }\n }\n },\n \"ipv6 unicast\": {\n \"route_targets\": {\n \"200:2\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:2\"\n }\n }\n }\n }\n },\n }\n }\n showVrfDetail_all='''\n Mon Sep 18 09:36:51.507 PDT\n\n VRF VRF1; RD 200:1; VPN ID not set\n VRF mode: Regular\n Description not set\n Interfaces:\n GigabitEthernet0/0/0/1\n Address family IPV4 Unicast\n Import VPN route-target communities:\n RT:200:1\n RT:200:2\n RT:300:1\n RT:400:1\n Export VPN route-target communities:\n RT:200:1\n No import route policy\n No export route policy\n Address family IPV6 Unicast\n Import VPN route-target communities:\n RT:200:1\n RT:200:2\n RT:300:1\n RT:400:1\n Export VPN route-target communities:\n RT:200:1\n No import route policy\n No export route policy\n\n VRF VRF2; RD 200:2; VPN ID not set\n VRF mode: Regular\n Description not set\n Interfaces:\n GigabitEthernet0/0/0/2\n Address family IPV4 Unicast\n Import VPN route-target communities:\n RT:200:2\n Export VPN route-target communities:\n RT:200:2\n No import route policy\n No export route policy\n Address family IPV6 Unicast\n Import VPN route-target communities:\n RT:200:2\n Export VPN route-target communities:\n RT:200:2\n No import route policy\n No export route policy\n '''\n showVrfDetail_vrf2 = '''\nVRF VRF2; RD 200:2; VPN ID not set\n VRF mode: Regular\n Description not set\n Interfaces:\n GigabitEthernet0/0/0/2\n Address family IPV4 Unicast\n Import VPN route-target communities:\n RT:200:2\n Export VPN route-target communities:\n RT:200:2\n No import route policy\n No export route policy\n Address family IPV6 Unicast\n Import VPN route-target communities:\n RT:200:2\n Export VPN route-target communities:\n RT:200:2\n No import route policy\n No export route policy\n '''\n\n VrfInfo = {\n \"vrfs\": {\n \"VRF2\": {\n \"route_distinguisher\": \"200:2\",\n \"address_family\": {\n \"ipv4 unicast\": {\n \"route_targets\": {\n \"200:2\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:2\"\n }\n }\n },\n \"ipv6 unicast\": {\n \"route_targets\": {\n \"200:2\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:2\"\n }\n }\n }\n }\n },\n \"VRF1\": {\n \"route_distinguisher\": \"200:1\",\n \"address_family\": {\n \"ipv4 unicast\": {\n \"route_targets\": {\n \"400:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"400:1\"\n },\n \"300:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"300:1\"\n },\n \"200:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:1\"\n },\n \"200:2\": {\n \"rt_type\": \"import\",\n \"route_target\": \"200:2\"\n }\n }\n },\n \"ipv6 unicast\": {\n \"route_targets\": {\n \"400:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"400:1\"\n },\n \"300:1\": {\n \"rt_type\": \"import\",\n \"route_target\": \"300:1\"\n },\n \"200:1\": {\n \"rt_type\": \"both\",\n \"route_target\": \"200:1\"\n },\n \"200:2\": {\n \"rt_type\": \"import\",\n \"route_target\": \"200:2\"\n }\n }\n }\n }\n }\n }\n }\n", "id": "6876880", "language": "Python", "matching_score": 1.7047761678695679, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/vrf/iosxr/tests/vrf_output.py" }, { "content": "from .abc import *\nfrom .neighbor import *\nfrom .routing import *\nfrom .mac import *\nfrom .ipaddress import *\nfrom .password_type import *\nfrom .route_target import *\nfrom .route_distinguisher import *\nfrom .redistribution_attr import *\n", "id": "11460036", "language": "Python", "matching_score": 0.9773238301277161, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/base/__init__.py" }, { "content": "\"\"\"RouteTarget type implementation\n\nA RouteTarget is distinct from a RouteDistinguisher but has the same type 0/1/2\nformat.\n\"\"\"\n\n__all__ = (\n 'RouteTarget',\n )\n\nfrom copy import copy\nimport collections.abc\nimport re\nfrom ipaddress import IPv4Address\nfrom enum import Enum\nimport functools\nimport struct\n\nfrom genie.decorator import managedattribute\n\n\n@functools.total_ordering\nclass RouteTargetType(Enum):\n ASN2_index = (0, \"ASN2:index\", struct.Struct('!BHL'))\n IPv4Address_index = (1, \"IPv4Address:index\", struct.Struct('!BLH'))\n ASN4_index = (2, \"ASN4:index\", struct.Struct('!BLH'))\n\n def __init__(self, type, format_str, struct_format):\n self.__class__._value2member_map_[type] = self\n self.__class__._value2member_map_[format_str] = self\n\n @property\n def type(self):\n return self.value[0]\n\n @property\n def format_str(self):\n return self.value[1]\n\n @property\n def struct_format(self):\n return self.value[2]\n\n def __str__(self):\n return self.format_str\n\n def __int__(self):\n return self.type\n\n def __eq__(self, other):\n if not isinstance(other, RouteTargetType):\n try:\n other = self.__class__(other)\n except Exception:\n return NotImplemented\n return self.type is other.type\n\n def __lt__(self, other):\n if not isinstance(other, RouteTargetType):\n try:\n other = self.__class__(other)\n except Exception:\n return NotImplemented\n return self.type < other.type\n\n __hash__ = Enum.__hash__\n\n\n@functools.total_ordering\nclass RouteTargetImportExport(object):\n\n route_target = managedattribute(\n name='route_target',\n read_only=True) # (read-only hash key)\n\n stitching = False\n\n def __init__(self, route_target, *, stitching=None):\n if stitching is None and isinstance(route_target, RouteTargetImportExport):\n # copy constructor\n route_target, stitching = \\\n route_target.route_target, \\\n route_target.stitching\n self._route_target = RouteTarget(route_target)\n self.stitching = bool(stitching)\n\n def __eq__(self, other):\n if not isinstance(other, RouteTargetImportExport):\n return NotImplemented\n return (self.route_target, self.stitching) \\\n == (other.route_target, other.stitching)\n\n def __lt__(self, other):\n if not isinstance(other, RouteTargetImportExport):\n return NotImplemented\n return (self.route_target, self.stitching) \\\n < (other.route_target, other.stitching)\n\n def __hash__(self):\n # return hash((self.route_target, self.stitching))\n return hash(self.route_target)\n\n def __repr__(self):\n return '%s(%r%s)' % (\n self.__class__.__name__,\n self.route_target,\n ', stitching=%r' % (self.stitching,) if self.stitching else '')\n\n\n@functools.total_ordering\nclass RouteTarget(object):\n\n __slots__ = {'_fields'}\n\n Type = RouteTargetType\n\n ImportExport = RouteTargetImportExport\n\n def create_import_export(self, **kwargs):\n return self.ImportExport(route_target=self, **kwargs)\n\n def __init__(self, value, *, type=None):\n type_ = type\n del type\n super().__init__()\n\n if type_ is not None:\n type_ = RouteTargetType(type_)\n\n if isinstance(value, RouteTarget):\n # Copy constructor\n if type_ is not None and type_ is not value.type:\n raise TypeError\n self._fields = value._fields # no need to copy a tuple\n return\n\n if isinstance(value, str):\n\n m = re.match(r'^(\\d+\\.\\d+\\.\\d+\\.\\d+):(\\d+)$', value)\n if m:\n # IPv4Address:index\n if type_ is None:\n type_ = RouteTargetType.IPv4Address_index\n elif type_ is not RouteTargetType.IPv4Address_index:\n raise TypeError\n ip = IPv4Address(m.group(1))\n idx = int(m.group(2))\n self.fields = [type_, ip, idx]\n return\n\n m = re.match(r'^(\\d+):(\\d+)$', value)\n if m:\n # ASN2:index or ASN4:index\n asn = int(m.group(1))\n idx = int(m.group(2))\n if type_ is None:\n type_ = RouteTargetType.ASN2_index if asn <= 0xFFFF else \\\n RouteTargetType.ASN4_index\n elif type_ not in (\n RouteTargetType.ASN2_index,\n RouteTargetType.ASN4_index):\n raise TypeError\n self.fields = [type_, asn, idx]\n return\n\n m = re.match(r'^(\\d+)\\.(\\d+):(\\d+)$', value)\n if m:\n # \"dotted\" ASN4:index\n asnh = int(m.group(1))\n asnl = int(m.group(2))\n idx = int(m.group(3))\n if type_ is None:\n type_ = RouteTargetType.ASN4_index\n elif type_ is not RouteTargetType.ASN4_index:\n raise TypeError\n if asnh > 0xFFFF or asnl > 0xFFFF:\n raise ValueError(value)\n asn = asnh << 16 | asnl\n self.fields = [type_, asn, idx]\n return\n\n m = re.match(r'^([0-9a-fA-F]{1,4})'\n r'\\.([0-9a-fA-F]{1,4})'\n r'\\.([0-9a-fA-F]{1,4})$', value)\n if m:\n # \"dotted_hex3words\" ASN2:index or ASN4:index\n w1 = int(m.group(1), 16)\n w2 = int(m.group(2), 16)\n w3 = int(m.group(3), 16)\n if type_ is None:\n type_ = RouteTargetType.ASN2_index \\\n if w1 == 0 \\\n else RouteTargetType.ASN4_index\n if type_ is RouteTargetType.ASN2_index:\n asn = w1\n idx = w2 << 16 | w3\n self.fields = [type_, asn, idx]\n elif type_ is RouteTargetType.ASN4_index:\n ip = IPv4Address(w1 << 16 | w2)\n idx = w3\n self.fields = [type_, ip, idx]\n elif type_ is RouteTargetType.ASN4_index:\n asn = w1 << 16 | w2\n idx = w3\n self.fields = [type_, asn, idx]\n else:\n raise TypeError\n return\n\n raise ValueError(value)\n\n if isinstance(value, collections.abc.Sequence):\n if type_ is not None:\n raise TypeError\n self.fields = value\n return\n\n raise ValueError('bad RouteTarget format: %r' % (value,))\n\n @property\n def fields(self):\n return self._fields\n\n @fields.setter\n def fields(self, value):\n if isinstance(value, str):\n raise ValueError(value)\n value = list(value)\n if not value:\n raise ValueError(value)\n value[0] = RouteTargetType(value[0])\n if value[0] is RouteTargetType.ASN2_index:\n if len(value) != 3:\n raise ValueError(value)\n value[1] = int(value[1])\n value[2] = int(value[2])\n if not (0 <= value[1] <= 0xFFFF and 0 <= value[2] <= 0xFFFFFFFF):\n raise ValueError(value)\n elif value[0] is RouteTargetType.IPv4Address_index:\n value[1] = IPv4Address(value[1])\n value[2] = int(value[2])\n if not (0 <= value[2] <= 0xFFFF):\n raise ValueError(value)\n elif value[0] is RouteTargetType.ASN4_index:\n if len(value) != 3:\n raise ValueError(value)\n value[1] = int(value[1])\n value[2] = int(value[2])\n if not (0 <= value[1] <= 0xFFFFFFFF and 0 <= value[2] <= 0xFFFF):\n raise ValueError(value)\n else:\n raise ValueError(value)\n self._fields = tuple(value)\n\n @property\n def type(self):\n return self.fields[0]\n\n @property\n def ip(self):\n if self.type is RouteTargetType.IPv4Address_index:\n return copy(self.fields[1])\n else:\n raise TypeError\n\n @property\n def asn(self):\n if self.type in (\n RouteTargetType.ASN2_index,\n RouteTargetType.ASN4_index,\n ):\n return self.fields[1]\n else:\n raise TypeError\n\n @property\n def index(self):\n if self.type in (\n RouteTargetType.ASN2_index,\n RouteTargetType.IPv4Address_index,\n RouteTargetType.ASN4_index,\n ):\n return self.fields[2]\n else:\n raise TypeError\n\n def __eq__(self, other):\n if not isinstance(other, RouteTarget):\n try:\n other = self.__class__(other, type=self.type)\n except Exception:\n return NotImplemented\n return self.fields == other.fields\n\n def __lt__(self, other):\n if not isinstance(other, RouteTarget):\n try:\n other = self.__class__(other, type=self.type)\n except Exception:\n return NotImplemented\n return self.fields < other.fields\n\n def __hash__(self):\n # TODO mutable! return hash(self.fields)\n return 0\n\n def __repr__(self):\n if (self.type is RouteTargetType.ASN4_index\n and self.asn <= 0xFFFF) \\\n or (self.type is RouteTargetType.ASN2_index\n and self.asn > 0xFFFF):\n return '%s(%r, type=%s)' % (\n self.__class__.__name__,\n str(self),\n self.type)\n else:\n # Unambiguous\n return '%s(%r)' % (\n self.__class__.__name__,\n str(self))\n\n def __str__(self):\n return ':'.join(str(p) for p in self.fields[1:])\n\n def __copy__(self):\n return self.__class__(self)\n\n @property\n def dotted(self):\n if self.type is RouteTargetType.ASN4_index:\n # 1.2:3 \"dotted\" format as seen on Nexus\n # \"The AS number can a 32-bit integer in the form of a higher\n # 16-bit decimal number and a lower 16-bit decimal number in xx.xx\n # format\"\n return \"%d.%d:%d\" % self.value_words\n else:\n return str(self)\n\n @property\n def packed(self):\n return self.type.struct_format.pack(*[int(v) for v in self.fields])\n\n @property\n def value_packed(self):\n return self.packed[1:]\n\n @property\n def bytes(self):\n return struct.unpack('B' * 7, self.packed)\n\n @property\n def value_bytes(self):\n return struct.unpack('B' * 6, self.value_packed)\n\n @property\n def value_words(self):\n return struct.unpack('!HHH', self.value_packed)\n\n @property\n def dotted_hex3words(self):\n return '%04x.%04x.%04x' % self.value_words\n\n def __format__(self, format_spec):\n if len(format_spec) == 0 or format_spec == 'd:d':\n return str(self)\n if format_spec == 'x.x.x':\n return self.dotted_hex3words\n if format_spec == 'd.d:d':\n return self.dotted\n raise ValueError('Invalid format specifier: ' + format_spec)\n\n", "id": "7284678", "language": "Python", "matching_score": 2.6092135906219482, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/base/route_target.py" }, { "content": "from ..base import RouteTarget\nfrom ..base import RouteDistinguisher\nfrom .bgp import *\nfrom .bgp_prefix import *\n", "id": "8418147", "language": "Python", "matching_score": 0.888364851474762, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/bgp/__init__.py" }, { "content": "# Python\nimport unittest\nfrom unittest.mock import Mock\n\n# ATS\nfrom ats.topology import Device\n\n# Genie\nfrom genie.libs.ops.bgp.ios.bgp import Bgp\nfrom genie.libs.ops.bgp.ios.tests.bgp_output import BgpOutput\n\n# ios show_bgp\nfrom genie.libs.parser.ios.show_bgp import ShowBgpAllSummary, ShowBgpAllClusterIds, \\\n ShowBgpAllNeighborsAdvertisedRoutes, \\\n ShowBgpAllNeighborsReceivedRoutes, \\\n ShowBgpAllNeighborsRoutes, \\\n ShowIpBgpTemplatePeerPolicy, \\\n ShowBgpAllNeighbors, \\\n ShowIpBgpAllDampeningParameters, \\\n ShowIpBgpTemplatePeerSession, \\\n ShowBgpAllNeighborsPolicy, \\\n ShowBgpAllDetail, \\\n ShowBgpAll\n\noutputs = {}\n\noutputs['show bgp vpnv4 unicast all neighbors 10.16.2.2 advertised-routes'] = BgpOutput.nbr1_ipv4_advertised_routes\noutputs['show bgp vpnv4 unicast all neighbors 10.16.2.2 routes'] = BgpOutput.nbr1_ipv4_routes\noutputs['show bgp vpnv4 unicast all neighbors 10.16.2.2 received-routes'] = BgpOutput.nbr1_ipv4_received_routes\n\noutputs['show bgp all neighbors 10.16.2.2 policy'] = BgpOutput.nbr1_bgp_policy\noutputs['show bgp all neighbors 10.36.3.3 policy'] = BgpOutput.nbr2_bgp_policy\noutputs['show bgp all neighbors | i BGP neighbor'] = BgpOutput.nbr1_bgp_all_neighbors\noutputs['show bgp all neighbors 10.16.2.2 advertised-routes'] = BgpOutput.nbr1_advertised_routes\noutputs['show bgp all neighbors 10.16.2.2 routes'] = BgpOutput.nbr1_routes\noutputs['show bgp all neighbors 10.16.2.2 received-routes'] = BgpOutput.nbr1_received_routes\noutputs['show bgp all neighbors | i BGP neighbor'] = BgpOutput.bgp_all_neighbors\noutputs['show bgp all neighbors 10.36.3.3 advertised-routes'] = BgpOutput.nbr2_advertised_routes\noutputs['show bgp all neighbors 10.36.3.3 routes'] = BgpOutput.nbr2_routes\noutputs['show bgp all neighbors 10.36.3.3 received-routes'] = BgpOutput.nbr2_received_routes\n\n\ndef mapper(key):\n return outputs[key]\n\n\nclass test_bgp(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'ios'\n self.device.custom['abstraction'] = {'order':['os']}\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n self.device.connectionmgr.connections['cli'] = self.device\n\n def test_complete_output(self):\n self.maxDiff = None\n bgp = Bgp(device=self.device)\n\n # Get outputs\n bgp.maker.outputs[ShowBgpAllSummary] = \\\n {\"{'address_family':'','vrf':''}\":BgpOutput.ShowBgpAllSummary}\n bgp.maker.outputs[ShowBgpAllClusterIds] = \\\n {'':BgpOutput.ShowBgpAllClusterIds}\n bgp.maker.outputs[ShowIpBgpTemplatePeerPolicy] = \\\n {'':BgpOutput.ShowIpBgpTemplatePeerPolicy}\n bgp.maker.outputs[ShowBgpAllNeighbors] = \\\n {\"{'address_family':'','neighbor':''}\":BgpOutput.ShowBgpAllNeighbors}\n bgp.maker.outputs[ShowIpBgpAllDampeningParameters] = \\\n {'':BgpOutput.ShowIpBgpAllDampeningParameters}\n bgp.maker.outputs[ShowIpBgpTemplatePeerSession] = \\\n {'':BgpOutput.ShowIpBgpTemplatePeerSession}\n bgp.maker.outputs[ShowBgpAllDetail] = \\\n {\"{'address_family':'','vrf':''}\":BgpOutput.ShowBgpAllDetail}\n bgp.maker.outputs[ShowBgpAll] = \\\n {\"{'address_family':''}\":BgpOutput.ShowBgpAll}\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Learn the feature\n bgp.learn()\n\n # Verify Ops was created successfully\n self.assertEqual(bgp.info, BgpOutput.BgpOpsOutput_info)\n self.assertDictEqual(bgp.table, BgpOutput.BgpOpsOutput_table)\n self.assertDictEqual(bgp.routes_per_peer, BgpOutput.BgpOpsOutput_routesperpeer)\n\n def test_custom_output(self):\n self.maxDiff = None\n bgp = Bgp(device=self.device)\n outputs[\n 'show bgp all neighbors | i BGP neighbor'] = BgpOutput.nbr1_bgp_all_neighbors\n # Get outputs\n bgp.maker.outputs[ShowBgpAllSummary] = \\\n {\"{'address_family':'vpnv4 unicast','vrf':'VRF1'}\":BgpOutput.ShowBgpAllSummary_custom}\n bgp.maker.outputs[ShowBgpAllClusterIds] = \\\n {'':BgpOutput.ShowBgpAllClusterIds}\n bgp.maker.outputs[ShowIpBgpTemplatePeerPolicy] = \\\n {'':BgpOutput.ShowIpBgpTemplatePeerPolicy}\n bgp.maker.outputs[ShowBgpAllNeighbors] = \\\n {\"{'address_family':'vpnv4 unicast','neighbor':'10.16.2.2'}\":BgpOutput.ShowBgpAllNeighbors_nbr1}\n bgp.maker.outputs[ShowIpBgpAllDampeningParameters] = \\\n {'':BgpOutput.ShowIpBgpAllDampeningParameters}\n bgp.maker.outputs[ShowIpBgpTemplatePeerSession] = \\\n {'':BgpOutput.ShowIpBgpTemplatePeerSession}\n bgp.maker.outputs[ShowBgpAllDetail] = \\\n {\"{'address_family':'vpnv4 unicast','vrf':'VRF1'}\":BgpOutput.ShowBgpAllDetail_custom}\n bgp.maker.outputs[ShowBgpAll] = \\\n {\"{'address_family':'vpnv4 unicast'}\":BgpOutput.ShowBgpAll_custom}\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Learn the feature\n bgp.learn(address_family='vpnv4 unicast RD 300:1', vrf='VRF1', neighbor='10.16.2.2')\n outputs[\n 'show bgp all neighbors | i BGP neighbor'] = BgpOutput.bgp_all_neighbors\n # Verify Ops was created successfully\n self.assertDictEqual(bgp.info, BgpOutput.BgpOpsOutput_info_custom)\n self.assertDictEqual(bgp.table, BgpOutput.BgpOpsOutput_table_custom)\n self.assertDictEqual(bgp.routes_per_peer, BgpOutput.BgpOpsOutput_routesperpeer_custom)\n\n def test_empty_output(self):\n self.maxDiff = None\n bgp = Bgp(device=self.device)\n # Get outputs\n bgp.maker.outputs[ShowBgpAllSummary] = \\\n {\"{'address_family':'','vrf':''}\":''}\n bgp.maker.outputs[ShowBgpAllClusterIds] = \\\n {'':''}\n bgp.maker.outputs[ShowIpBgpTemplatePeerPolicy] = \\\n {'':''}\n bgp.maker.outputs[ShowBgpAllNeighbors] = \\\n {\"{'address_family':'','neighbor':''}\":''}\n bgp.maker.outputs[ShowIpBgpAllDampeningParameters] = \\\n {'':''}\n bgp.maker.outputs[ShowIpBgpTemplatePeerSession] = \\\n {'':''}\n bgp.maker.outputs[ShowBgpAllDetail] = \\\n {\"{'address_family':'','vrf':''}\":''}\n bgp.maker.outputs[ShowBgpAll] = \\\n {\"{'address_family':''}\":''}\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n # Learn the feature\n bgp.learn()\n\n # Check no attribute not found\n # info - bgp_id\n with self.assertRaises(AttributeError):\n bgp_id = (bgp.info['instance']['default']['bgp_id'])\n # table - bgp_table_version\n with self.assertRaises(AttributeError):\n bgp_table_version = (bgp.table['instance']['default']['vrf']\\\n ['default']['address_family']['ipv4 unicast']\\\n ['bgp_table_version'])\n # routes_per_peer - remote_as\n with self.assertRaises(AttributeError):\n remote_as = (bgp.routes_per_peer['instance']['default']['vrf']\\\n ['default']['neighbor']['10.16.2.2']['remote_as'])\n\n def test_selective_attribute(self):\n self.maxDiff = None\n bgp = Bgp(device=self.device)\n\n # Get outputs\n bgp.maker.outputs[ShowBgpAllSummary] = \\\n {\"{'address_family':'','vrf':''}\":BgpOutput.ShowBgpAllSummary}\n bgp.maker.outputs[ShowBgpAllClusterIds] = \\\n {'':BgpOutput.ShowBgpAllClusterIds}\n bgp.maker.outputs[ShowIpBgpTemplatePeerPolicy] = \\\n {'':BgpOutput.ShowIpBgpTemplatePeerPolicy}\n bgp.maker.outputs[ShowBgpAllNeighbors] = \\\n {\"{'address_family':'','neighbor':''}\":BgpOutput.ShowBgpAllNeighbors}\n bgp.maker.outputs[ShowIpBgpAllDampeningParameters] = \\\n {'':BgpOutput.ShowIpBgpAllDampeningParameters}\n bgp.maker.outputs[ShowIpBgpTemplatePeerSession] = \\\n {'':BgpOutput.ShowIpBgpTemplatePeerSession}\n bgp.maker.outputs[ShowBgpAllDetail] = \\\n {\"{'address_family':'','vrf':''}\":BgpOutput.ShowBgpAllDetail}\n bgp.maker.outputs[ShowBgpAll] = \\\n {\"{'address_family':''}\":BgpOutput.ShowBgpAll}\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Learn the feature\n bgp.learn()\n\n # Check specific attribute values\n # info - bgp_id\n self.assertEqual(bgp.info['instance']['default']['bgp_id'], 100)\n # table - bgp_table_version\n self.assertEqual(bgp.table['instance']['default']['vrf']['evpn1']\\\n ['address_family']['vpnv4 unicast RD 65535:1']\\\n ['bgp_table_version'], 5)\n # routes_per_peer - localprf\n self.assertEqual(bgp.routes_per_peer['instance']['default']['vrf']\\\n ['default']['neighbor']['10.16.2.2']['address_family']\\\n ['ipv4 unicast']['advertised']['10.1.1.0/24']['index'][1]\\\n ['localprf'], 100)\n\n def test_incomplete_output(self):\n self.maxDiff = None\n bgp = Bgp(device=self.device)\n\n # Get outputs\n bgp.maker.outputs[ShowBgpAllSummary] = \\\n {\"{'address_family':'','vrf':''}\":BgpOutput.ShowBgpAllSummary}\n bgp.maker.outputs[ShowBgpAllClusterIds] = \\\n {'':BgpOutput.ShowBgpAllClusterIds}\n bgp.maker.outputs[ShowIpBgpTemplatePeerPolicy] = \\\n {'':BgpOutput.ShowIpBgpTemplatePeerPolicy}\n bgp.maker.outputs[ShowBgpAllNeighbors] = \\\n {\"{'address_family':'','neighbor':''}\":BgpOutput.ShowBgpAllNeighbors}\n bgp.maker.outputs[ShowIpBgpAllDampeningParameters] = \\\n {'':BgpOutput.ShowIpBgpAllDampeningParameters}\n bgp.maker.outputs[ShowIpBgpTemplatePeerSession] = \\\n {'':BgpOutput.ShowIpBgpTemplatePeerSession}\n bgp.maker.outputs[ShowBgpAllDetail] = \\\n {\"{'address_family':'','vrf':''}\":BgpOutput.ShowBgpAllDetail}\n bgp.maker.outputs[ShowBgpAll] = \\\n {\"{'address_family':''}\":BgpOutput.ShowBgpAll}\n\n # Outputs from side_effect set to empty\n bgp.maker.outputs[ShowBgpAllNeighborsPolicy] = {'':''}\n bgp.maker.outputs[ShowBgpAllNeighborsAdvertisedRoutes] = {'':''}\n bgp.maker.outputs[ShowBgpAllNeighborsRoutes] = {'':''}\n bgp.maker.outputs[ShowBgpAllNeighborsReceivedRoutes] = {'':''}\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = ['', '', '', '', '', '', '', '', '',\\\n '', '', '', '', '', '', '', '', '',\\\n '', '', '', '', '', '', '', '', '',\\\n '', '', '', '', '', '', '', '', '',\\\n '', '', '', '']\n\n # Learn the feature\n bgp.learn()\n\n # Check attribute values of output provided is found\n \n # bgp.info - bgp_id\n self.assertEqual(bgp.info['instance']['default']['bgp_id'], 100)\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "6249851", "language": "Python", "matching_score": 4.155714988708496, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/bgp/ios/tests/test_bgp.py" }, { "content": "# Python\nimport unittest\nfrom copy import deepcopy\nfrom unittest.mock import Mock\n\n# ATS\nfrom ats.topology import Device\n\n# Genie\nfrom genie.libs.ops.vrf.nxos.vrf import Vrf\nfrom genie.libs.ops.vrf.nxos.tests.vrf_output import VrfOutput\n\n# nxos show_vrf\nfrom genie.libs.parser.nxos.show_vrf import ShowVrfDetail\n\noutputs = {}\noutputs['show vrf default detail'] = VrfOutput.showVrfDetail_default\noutputs['show vrf all detail'] = VrfOutput.showVrfDetail_all\n\ndef mapper(key):\n return outputs[key]\n\n\nclass test_vrf(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'nxos'\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n # Give the device as a connection type\n # This is done in order to call the parser on the output provided\n self.device.connectionmgr.connections['cli'] = self.device\n\n\n def test_complete_output(self):\n vrf = Vrf(device=self.device)\n\n # Set outputs\n vrf.maker.outputs[ShowVrfDetail] = {'': VrfOutput.ShowVrfDetail}\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n # Learn the feature\n vrf.learn()\n\n # Verify Ops was created successfully\n self.assertEqual(vrf.info, VrfOutput.VrfInfo)\n\n def test_custom_output(self):\n vrf = Vrf(device=self.device)\n vrf.maker.outputs[ShowVrfDetail] = {'': VrfOutput.ShowVrfDetailCustom}\n # Set outputs\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n # Learn the feature\n vrf.learn(vrf='default')\n self.maxDiff = None\n\n # Verify Ops was created successfully\n self.assertDictEqual(vrf.info, VrfOutput.VrfCustomInfo)\n\n\n\n def test_selective_attribute(self):\n vrf = Vrf(device=self.device)\n\n # Set outputs\n vrf.maker.outputs[ShowVrfDetail] = {'': VrfOutput.ShowVrfDetail}\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n # Learn the feature\n vrf.learn()\n\n # Test specific attributes in info\n self.assertEqual(vrf.info['vrfs']['VRF1']['address_family']['ipv6'], {})\n\n def test_empty_output(self):\n vrf = Vrf(device=self.device)\n\n # Set outputs\n vrf.maker.outputs[ShowVrfDetail] = {'': {}}\n outputs['show vrf default detail'] = ''\n outputs['show vrf all detail'] = ''\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n # Learn the feature\n vrf.learn()\n\n # revert back\n outputs['show vrf default detail'] = VrfOutput.showVrfDetail_default\n outputs['show vrf all detail'] = VrfOutput.showVrfDetail_all\n # Check no outputs in vrf.info\n with self.assertRaises(AttributeError):\n neighbor_address = vrf.info['vrfs']['default']['route_distinguisher']\n\n\nif __name__ == '__main__':\n unittest.main()", "id": "8242579", "language": "Python", "matching_score": 3.49043345451355, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/vrf/nxos/tests/test_vrf.py" }, { "content": "# Python\nimport unittest\n\n# Ats\nfrom ats.topology import Device\n\n# Genie package\nfrom genie.ops.base import Base\nfrom genie.ops.base.maker import Maker\n\nfrom unittest.mock import Mock\n# genie.libs\nfrom genie.libs.ops.static_routing.ios.static_routing import StaticRouting\nfrom genie.libs.ops.static_routing.ios.tests.static_routing_output import StaticRouteOutput\n\nfrom genie.libs.parser.iosxe.show_vrf import ShowVrfDetail\n\noutputs = {}\noutputs['show ip static route'] = StaticRouteOutput.showIpv4StaticRoute_default\noutputs['show ip static route vrf VRF1'] = StaticRouteOutput.showIpv4StaticRoute_vrf1\noutputs['show ipv6 static detail'] = StaticRouteOutput.showIpv6StaticRoute_default\noutputs['show ipv6 static vrf VRF1 detail'] = StaticRouteOutput.showIpv6StaticRoute_vrf1\n\ndef mapper(key):\n return outputs[key]\n\nclass test_static_route_all(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'ios'\n self.device.custom['abstraction'] = {'order':['os']}\n self.device.mapping = {}\n self.device.mapping['cli'] = 'cli'\n self.device.connectionmgr.connections['cli'] = self.device\n\n def test_full_static_route(self):\n f = StaticRouting(device=self.device)\n f.maker.outputs[ShowVrfDetail] = {'': StaticRouteOutput.ShowVrfDetail}\n\n # Get 'show ip static route' output\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n # Learn the feature\n f.learn()\n\n self.maxDiff = None\n self.assertEqual(f.info, StaticRouteOutput.staticRouteOpsOutput)\n\n def test_selective_attribute_static_route(self):\n f = StaticRouting(device=self.device)\n f.maker.outputs[ShowVrfDetail] = {'': StaticRouteOutput.ShowVrfDetail}\n\n # Get 'show ip static route' output\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n # Learn the feature\n f.learn()\n # Check match\n\n self.assertEqual('GigabitEthernet0/2', f.info['vrf']['VRF1']['address_family']['ipv4']['routes']\\\n ['10.36.3.3/32']['next_hop']['outgoing_interface']['GigabitEthernet0/2']['outgoing_interface'])\n # Check does not match\n self.assertNotEqual('GigabitEthernet0/0', f.info['vrf']['VRF1']['address_family']['ipv4']['routes']\\\n ['10.36.3.3/32']['next_hop']['outgoing_interface']['GigabitEthernet0/2']['outgoing_interface'])\n\n\n def test_missing_attributes_static_route(self):\n f = StaticRouting(device=self.device)\n f.maker.outputs[ShowVrfDetail] = {'': StaticRouteOutput.ShowVrfDetail}\n\n # Get 'show ip static route' output\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n # Learn the feature\n f.learn()\n\n with self.assertRaises(KeyError):\n interfaces = f.info['vrf']['VRF1']['address_family']['ipv4']['routes']\\\n ['10.36.3.3/32']['next_hop']['interface']\n\n def test_empty_output_static_route(self):\n self.maxDiff = None\n f = StaticRouting(device=self.device)\n # Get outputs\n f.maker.outputs[ShowVrfDetail] = {'': {}}\n\n outputs['show ip static route'] = ''\n outputs['show ip static route vrf VRF1'] = ''\n outputs['show ipv6 static detail'] = ''\n outputs['show ipv6 static vrf VRF1 detail'] = ''\n\n # Return outputs above as inputs to parser when called\n self.device.execute = Mock()\n self.device.execute.side_effect = mapper\n\n # Learn the feature\n f.learn()\n\n # revert back\n outputs['show ip static route'] = StaticRouteOutput.showIpv4StaticRoute_default\n outputs['show ip static route vrf VRF1'] = StaticRouteOutput.showIpv4StaticRoute_vrf1\n outputs['show ipv6 static detail'] = StaticRouteOutput.showIpv6StaticRoute_default\n outputs['show ipv6 static vrf VRF1 detail'] = StaticRouteOutput.showIpv6StaticRoute_vrf1\n\n # Check no attribute not found\n with self.assertRaises(AttributeError):\n f.info['vrf']\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "4192609", "language": "Python", "matching_score": 6.664492607116699, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/static_routing/ios/tests/test_static_routing.py" }, { "content": "# Python\nimport unittest\n\n# Ats\nfrom ats.topology import Device\n\n# Genie package\nfrom genie.ops.base import Base\nfrom genie.ops.base.maker import Maker\n\nfrom unittest.mock import Mock\n# genie.libs\nfrom genie.libs.ops.static_routing.iosxr.static_routing import StaticRouting\nfrom genie.libs.ops.static_routing.iosxr.tests.static_routing_output import StaticRouteOutput\n\nfrom genie.libs.parser.iosxr.show_static_routing import ShowStaticTopologyDetail\n\n\nclass test_static_route_all(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'iosxr'\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n self.device.connectionmgr.connections['cli'] = '5'\n\n def test_full_static_route(self):\n f = StaticRouting(device=self.device)\n # Get 'show ip static route' output\n f.maker.outputs[ShowStaticTopologyDetail] = {'': StaticRouteOutput.showStaticRouteTopologyDetail}\n self.device.execute = Mock()\n # Learn the feature\n f.learn()\n\n self.maxDiff = None\n self.assertEqual(f.info, StaticRouteOutput.staticRouteOpsOutput)\n\n def test_selective_attribute_static_route(self):\n f = StaticRouting(device=self.device)\n\n # Get 'show ipv4 static route' output\n f.maker.outputs[ShowStaticTopologyDetail] = {'': StaticRouteOutput.showStaticRouteTopologyDetail}\n # Learn the feature\n f.learn()\n # Check match\n\n self.assertEqual('Null0', f.info['vrf']['VRF1']['address_family']['ipv6']['routes']\\\n ['2001:1:1:1::1/128']['next_hop']['outgoing_interface']['Null0']['outgoing_interface'])\n # Check does not match\n self.assertNotEqual(5, f.info['vrf']['default']['address_family']['ipv6']['routes']\\\n ['2001:1:1:1::1/128']['next_hop']['next_hop_list'][1]['index'])\n\n\n\n def test_missing_attributes_static_route(self):\n f = StaticRouting(device=self.device)\n f.maker.outputs[ShowStaticTopologyDetail] = {'': StaticRouteOutput.showStaticRouteTopologyDetail}\n\n # Learn the feature\n f.learn()\n\n with self.assertRaises(KeyError):\n interfaces = f.info['vrf']['VRF1']['address_family']['ipv4']['routes']\\\n ['10.4.1.1/32']['next_hop']['next_hop_vrf']\n\n def test_empty_output_static_route(self):\n self.maxDiff = None\n f = StaticRouting(device=self.device)\n\n # Get outputs\n f.maker.outputs[ShowStaticTopologyDetail] = {'': {}}\n\n # Learn the feature\n f.learn()\n\n # Check no attribute not found\n with self.assertRaises(AttributeError):\n f.info['vrf']\n\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "8254201", "language": "Python", "matching_score": 5.970210552215576, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/static_routing/iosxr/tests/test_static_routing.py" }, { "content": "# Python\nimport unittest\n\n# Ats\nfrom ats.topology import Device\n\nfrom unittest.mock import Mock\n# genie.libs\nfrom genie.libs.ops.routing.iosxr.routing import Routing\nfrom genie.libs.ops.routing.iosxr.tests.routing_output import RoutingOutput\n\nfrom genie.libs.parser.iosxr.show_routing import ShowRouteIpv4, ShowRouteIpv6\n\n\nclass test_route_all(unittest.TestCase):\n\n def setUp(self):\n self.device = Device(name='aDevice')\n self.device.os = 'iosxr'\n self.device.mapping={}\n self.device.mapping['cli']='cli'\n self.device.connectionmgr.connections['cli'] = '5'\n\n def test_full_route(self):\n f = Routing(device=self.device)\n # Get 'show ip static route' output\n f.maker.outputs[ShowRouteIpv4] = {'': RoutingOutput.showRouteIpv4}\n f.maker.outputs[ShowRouteIpv6] = {'': RoutingOutput.showRouteIpv6}\n self.device.execute = Mock()\n # Learn the feature\n f.learn()\n self.maxDiff = None\n self.assertEqual(f.info, RoutingOutput.showRouteOpsOutput)\n\n\n def test_selective_attribute_route(self):\n f = Routing(device=self.device)\n\n # Get 'show ipv4 static route' output\n f.maker.outputs[ShowRouteIpv4] = {'': RoutingOutput.showRouteIpv4}\n f.maker.outputs[ShowRouteIpv6] = {'': RoutingOutput.showRouteIpv6}\n # Learn the feature\n f.learn()\n # Check match\n\n self.assertEqual('2001:1:1:1::1/128', f.info['vrf']['default']['address_family']['ipv6']['routes']\\\n ['2001:1:1:1::1/128']['route'])\n # Check does not match\n self.assertNotEqual(5, f.info['vrf']['default']['address_family']['ipv6']['routes']\\\n ['2001:1:1:1::1/128']['next_hop']['next_hop_list'][1]['index'])\n\n\n\n def test_missing_attributes_route(self):\n f = Routing(device=self.device)\n f.maker.outputs[ShowRouteIpv4] = {'': RoutingOutput.showRouteIpv4}\n f.maker.outputs[ShowRouteIpv6] = {'': RoutingOutput.showRouteIpv6}\n\n # Learn the feature\n f.learn()\n\n with self.assertRaises(KeyError):\n interfaces = f.info['vrf']['VRF1']['address_family']['ipv4']['routes']\\\n ['10.4.1.1/32']['next_hop']['next_hop_vrf']\n\n def test_empty_output_route(self):\n self.maxDiff = None\n f = Routing(device=self.device)\n\n # Get outputs\n f.maker.outputs[ShowRouteIpv4] = {'': {}}\n f.maker.outputs[ShowRouteIpv6] = {'': {}}\n\n # Learn the feature\n f.learn()\n\n # Check no attribute not found\n with self.assertRaises(AttributeError):\n f.info['vrf']\n\nif __name__ == '__main__':\n unittest.main()\n", "id": "1396808", "language": "Python", "matching_score": 2.674525022506714, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/routing/iosxr/tests/test_routing.py" }, { "content": "'''IOSXE Implementation for routing addremove triggers'''\n\n# python\nfrom functools import partial\n\n# import genie.libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.addremove.addremove import TriggerAddRemove\nfrom genie.libs.sdk.libs.abstracted_libs.processors import traceroute_loopback\n\n# ATS\nfrom ats import aetest\nfrom ats.utils.objects import NotExists\n\n# Which key to exclude for BGP Ops comparison\nrouting_exclude = ['maker', 'attributes']\n\n\nclass TriggerAddRemoveIpv4StaticRoutes(TriggerAddRemove):\n \"\"\"Apply the ipv4 static routing to device, and remove the\n added ipv4 static routing.\n \"\"\"\n\n __description__ = \"\"\"Apply the ipv4 static routing to device, and remove the\n added ipv4 static routing.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n vrf: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn static_routing Ops object and store the routes info if has any.\n 2. Save the current device configurations through \"method\" which user uses\n 3. Add the configuration of static routing with static_routing Conf object\n 4. Verify the static_routing from step 3 has configured\n 5. Remove the static_routing configurations.\n 6. Recover the device configurations to the one in step 2\n 7. Learn static_routing Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n ADD_ROUTE = '0.0.0.0/0'\n AF = 'ipv4'\n\n # will move below to datafile when subsection \n # prepost processors passing from datafile suppported\n\n traceroute_info = [{'src': 'uut',\n 'dest': 'helper',\n 'protocol': AF,\n 'dest_route': '{next_hop}',\n 'timeout_interval': 5,\n 'timeout_max_time': 60,\n 'peer_num': 1}]\n\n @aetest.processors.post(partial(traceroute_loopback, traceroute_args=traceroute_info))\n @aetest.test\n def verify_configuration(self, uut, abstract, steps): \n\n # replace the traceroute_info if has any\n if hasattr(self, 'traceroute_info'):\n for item_args in self.traceroute_info:\n for keys in self.mapping.keys:\n item_args['dest_route'] = keys['next_hop'] \n super().verify_configuration(uut, abstract, steps)\n\n\n mapping = Mapping(requirements={'ops.static_routing.static_routing.StaticRoute':{\n 'requirements':[['info', 'vrf', '(?P<vrf>.*)',\n 'address_family', AF,\n 'routes', NotExists(ADD_ROUTE)],\n [NotExists('info')]],\n 'kwargs':{'attributes':['info[vrf][(.*)][address_family][(.*)]']},\n 'exclude': routing_exclude}},\n config_info={'conf.static_routing.StaticRouting':{\n 'requirements':[['device_attr', '{uut}',\n 'vrf_attr', 'default',\n 'address_family_attr',AF,\n 'route_attr', ADD_ROUTE,\n 'next_hop_attr', '(?P<next_hop>.*)']],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={'ops.static_routing.static_routing.StaticRoute':{\n 'requirements': [['info', 'vrf', 'default', 'address_family', AF,\n 'routes', ADD_ROUTE, 'next_hop',\n 'next_hop_list', 1, 'active', True],\n\n ['info', 'vrf', 'default', 'address_family', AF,\n 'routes', ADD_ROUTE, 'next_hop',\n 'next_hop_list', 1, 'index', 1],\n\n ['info', 'vrf', 'default', 'address_family', AF,\n 'routes', ADD_ROUTE, 'next_hop',\n 'next_hop_list', 1, 'next_hop', '(?P<next_hop>.*)'],\n\n ['info', 'vrf', 'default', 'address_family', AF,\n 'routes', ADD_ROUTE, 'next_hop',\n 'next_hop_list', 1, 'preference', 1],\n\n ['info', 'vrf', 'default',\n 'address_family', AF,\n 'routes', ADD_ROUTE, 'route', ADD_ROUTE]],\n 'kwargs':{'attributes':['info[vrf][(.*)][address_family][(.*)]']},\n 'exclude': routing_exclude}},\n num_values={'vrf':1, 'route': 'all'})\n\n\nclass TriggerAddRemoveIpv6StaticRoutes(TriggerAddRemoveIpv4StaticRoutes):\n \"\"\"Apply the ipv6 static routing to device, and remove the\n added ipv6 static routing.\n \"\"\"\n\n __description__ = \"\"\"Apply the ipv6 static routing to device, and remove the\n added ipv6 static routing.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iterations when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n vrf: `str`\n route: `str`\n out_intf: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn static_routing Ops object and store the routes info if has any.\n 2. Save the current device configurations through \"method\" which user uses\n 3. Add the configuration of static routing with static_routing Conf object\n 4. Verify the static_routing from step 3 has configured\n 5. Remove the static_routing configurations.\n 6. Recover the device configurations to the one in step 2\n 7. Learn static_routing Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n ADD_ROUTE = '::/0'\n AF = 'ipv6'\n\n \n traceroute_info = [{'src': 'uut',\n 'dest': 'helper',\n 'protocol': AF,\n 'dest_route': '(?P<next_hop>.*)',\n 'timeout_interval': 5,\n 'timeout_max_time': 60,\n 'peer_num': 1}]\n \n @aetest.processors.post(partial(traceroute_loopback, traceroute_args=traceroute_info))\n @aetest.test\n def verify_configuration(self, uut, abstract, steps):\n super().verify_configuration(uut, abstract, steps)\n\n mapping = Mapping(requirements={'ops.static_routing.static_routing.StaticRoute':{\n 'requirements':[['info', 'vrf', '(?P<vrf>.*)',\n 'address_family', AF,\n 'routes', NotExists(ADD_ROUTE)],\n [NotExists('info')]],\n 'kwargs':{'attributes':['info[vrf][(.*)][address_family][(.*)]']},\n 'exclude': routing_exclude},\n 'ops.routing.routing.Routing':{\n 'requirements':[['info', 'vrf', '(?P<vrf>.*)',\n 'address_family', AF, 'routes',\n '(?P<route>.*)', 'source_protocol', 'connected'],\n ['info', 'vrf', '(?P<vrf>.*)',\n 'address_family', AF, 'routes',\n '(?P<route>.*)', 'next_hop', 'outgoing_interface',\n '(?P<out_intf>.*)', 'outgoing_interface', '(?P<out_intf>.*)']],\n 'kwargs': {'attributes': ['info[vrf][(.*)][address_family][ipv6][routes][(.*)]']},\n 'exclude': routing_exclude}},\n config_info={'conf.static_routing.StaticRouting':{\n 'requirements':[['device_attr', '{uut}',\n 'vrf_attr', 'default',\n 'address_family_attr',AF,\n 'route_attr', ADD_ROUTE,\n 'next_hop_attr', '(?P<next_hop>.*)']],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={'ops.static_routing.static_routing.StaticRoute':{\n 'requirements': [['info', 'vrf', 'default', 'address_family', AF,\n 'routes', ADD_ROUTE, 'next_hop',\n 'next_hop_list', 1, 'active', True],\n\n ['info', 'vrf', 'default', 'address_family', AF,\n 'routes', ADD_ROUTE, 'next_hop',\n 'next_hop_list', 1, 'index', 1],\n\n ['info', 'vrf', 'default', 'address_family', AF,\n 'routes', ADD_ROUTE, 'next_hop',\n 'next_hop_list', 1, 'next_hop', '(?P<next_hop>.*)'],\n\n ['info', 'vrf', 'default', 'address_family', AF,\n 'routes', ADD_ROUTE, 'next_hop',\n 'next_hop_list', 1, 'preference', 1],\n\n ['info', 'vrf', 'default',\n 'address_family', AF,\n 'routes', ADD_ROUTE, 'route', ADD_ROUTE]],\n 'kwargs':{'attributes':['info[vrf][(.*)][address_family][(.*)]']},\n 'exclude': routing_exclude}},\n num_values={'vrf':1, 'route': 'all'})", "id": "8682728", "language": "Python", "matching_score": 5.519786357879639, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/addremove/routing/iosxe/addremove.py" }, { "content": "'''NXOS Implementation for Mld add-remove triggers'''\n\n# python\nfrom functools import partial\n\n# import ats\nfrom ats import aetest\nfrom ats.utils.objects import Not, NotExists\n\n# Genie Libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.addremove.addremove import TriggerAddRemove\nfrom genie.libs.sdk.libs.utils.triggeractions import configure_add_attributes\nfrom genie.libs.conf.mld.mld_group import MldGroup\n\n\n# Which key to exclude for Mld Ops comparison\nmld_exclude = ['maker', 'elapsed_time', 'discontinuity_time',\n 'keepalive', 'total', 'up_time', 'expire', 'remote',\n 'last_message_received', 'num_of_comparison', 'rpf_failure',\n 'total_accept_count', 'total_reject_count', 'notification']\n\n# Which key to exclude for Interface Ops comparison\ninterface_exclude = ['maker', 'last_change','in_rate','in_rate_pkts',\n 'out_rate', 'out_rate_pkts', 'in_octets',\n 'in_pkts', 'in_unicast_pkts', 'out_octets',\n 'out_pkts', 'out_unicast_pkts', 'out_multicast_pkts',\n 'in_multicast_pkts', 'last_clear', 'in_broadcast_pkts',\n 'out_broadcast_pkts', 'bandwidth', 'load_interval',\n 'port_speed', 'in_crc_errors', 'in_discards',\n 'unnumbered', '(Tunnel.*)', 'accounting']\n \n\nclass TriggerAddRemoveMldEnable(TriggerAddRemove):\n \"\"\"Apply the Mld interface enable, add remove added Mld interface enable\"\"\"\n\n __description__ = \"\"\"Apply the Mld interface enable, add remove added Mld interface enable.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Mld Ops/Conf object and store the Mld interface enable, learn Interface ops to \n get interface with ip address and not same to the existing mld interface.\n 2. Save the current device configurations through \"method\" which user uses\n 3. Add the configuration of interface as mld originator-id with Mld Conf object\n 4. Verify the mld interface enable from step 3 has configured\n 5. Remove the mld interface enable configurations.\n 6. Recover the device configurations to the one in step 2\n 7. Learn Mld Ops again and verify it is the same as the Ops in step 1\n \"\"\"\n\n @aetest.setup\n def verify_prerequisite(self, uut, abstract, steps, timeout):\n # learn existing mld interfaces\n super().verify_prerequisite(uut, abstract, steps, timeout)\n mld_keys = self.mapping.keys\n\n\n # learn interafce ops to get ipv6 up interfaces\n self.mapping.requirements = {}\n self.mapping.requirements['ops.interface.interface.Interface'] = \\\n {'requirements':[['info', '(?P<interface>^(?!mgmt).*)', 'ipv6',\n '(?P<ip>.*)', 'ip', '(?P<address>.*)'],\n ['info', '(?P<interface>.*)', 'vrf',\n '(?P<add_mld_intf_vrf>.*)']],\n 'all_keys': True,\n 'kwargs':{'attributes': [\n 'info[(.*)][ipv6][(.*)][ip]',\n 'info[(.*)][vrf]']},\n 'exclude': interface_exclude}\n super().verify_prerequisite(uut, abstract, steps, timeout)\n intf_keys = self.mapping.keys\n\n # find interface\n with steps.start(\"Extracting ipv6 interfaces \"\n \"which are not igmp interfaces\") as step:\n add_keys = {}\n for item in intf_keys:\n if all(item['interface'] not in \\\n i['interface'] for i in mld_keys):\n # attach the add value to mapping keys\n add_keys.update({'add_mld_intf': item['interface'],\n 'add_mld_intf_vrf': item['add_mld_intf_vrf']})\n break\n\n if not add_keys:\n step.skipped('Could not find up ipv6 interface which is '\n 'not existed igmp interface')\n self.skipped('Could not find up ipv6 interface which is not '\n 'existed mld interface', goto=['next_tc'])\n\n self.mapping.keys = [add_keys]\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={'ops.mld.mld.Mld':{\n 'requirements':[\\\n ['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)']],\n 'kwargs':{'attributes': [\n 'info[vrfs][(.*)][interfaces]']},\n 'exclude': mld_exclude}},\n config_info={'conf.mld.Mld':{\n 'requirements':[\n ['device_attr', '{uut}', 'vrf_attr', '(?P<add_mld_intf_vrf>.*)',\n 'interface_attr', '(?P<add_mld_intf>.*)', 'enable', True]],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={'ops.mld.mld.Mld':{\n 'requirements':[\\\n ['info', 'vrfs', '(?P<add_mld_intf_vrf>.*)', 'interfaces',\n '(?P<add_mld_intf>.*)', 'enable', True]],\n 'kwargs':{'attributes': ['info[vrfs][(.*)][interfaces]']},\n 'exclude': mld_exclude}},\n num_values={'vrf': 'all', 'interface': 'all'})\n\n\nclass TriggerAddRemoveMldVersion(TriggerAddRemove):\n \"\"\"Apply Mld interface version, and remove added Mld interface version\"\"\"\n\n __description__ = \"\"\"Apply Mld interface version, and remove added Mld interface version.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Mld Ops object and store the Mld which interface version is default value.\n 2. Save the current device configurations through \"method\" which user uses\n 3. Add the configuration of mld interface version with Mld Conf object\n 4. Verify the mld interface version from step 3 has configured\n 5. Remove the mld interface version configurations.\n 6. Recover the device configurations to the one in step 2\n 7. Learn Mld Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n ADD_VERSION = 1\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={\n 'ops.mld.mld.Mld':{\n 'requirements':[\\\n ['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'enable', True],\n ['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'version', '(?P<version>2)']],\n 'all_keys': True,\n 'kwargs':{'attributes': [\n 'info[vrfs][(.*)]']},\n 'exclude': mld_exclude}},\n config_info={'conf.mld.Mld':{\n 'requirements':[\n ['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'interface_attr', '(?P<interface>.*)', 'version',\n ADD_VERSION]],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={\n 'ops.mld.mld.Mld':{\n 'requirements':[\\\n ['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'version', ADD_VERSION]],\n 'kwargs':{'attributes': [\n 'info[vrfs][(.*)]']},\n 'exclude': mld_exclude}},\n num_values={'vrf': 1, 'interface': 1})\n\n\nclass TriggerAddRemoveMldJoinGroup(TriggerAddRemove):\n \"\"\"Apply the Mld interface join-group, and remove added Mld interface join-group\"\"\"\n\n __description__ = \"\"\"Apply the Mld peer(s) sa-filter out, and\n remove added Mld peer(s) sa-filter out.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n add_mld_group: Trigger yaml file customized add imgp join-group. \n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n source: `str`\n join_group: `str`\n group: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Mld Ops object and store the Mld interface(s) which does not have added join-group.\n 2. Save the current device configurations through \"method\" which user uses\n 3. Add the configuration of mld interface join-group with Mld Conf object\n 4. Verify the mld interface join-group from step 3 has configured\n 5. Remove the mld interface join-group configurations.\n 6. Recover the device configurations to the one in step 2\n 7. Learn Mld Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n ADD_JOIN_GROUP = 'fdf8:f53e:61e4::18 *'\n\n @aetest.setup\n def verify_prerequisite(self, uut, abstract, steps, timeout, add_mld_group=None):\n\n self.timeout = timeout\n\n try:\n self.pre_snap = self.mapping.learn_ops(device=uut,\n abstract=abstract,\n steps=steps,\n timeout=timeout)\n except Exception as e:\n self.errored(\"Section failed due to: '{e}'\".format(e=e))\n\n # nothing in the static groups,\n # then learn if any groups to have some vrf interface value\n if any(not item for item in self.mapping.keys):\n self.mapping.requirements['ops.mld.mld.Mld']['requirements'] = \\\n [['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'group', '(?P<group>.*)', '(?P<dummy>.*)']] # incase there is nothing learned\n\n try:\n self.pre_snap = self.mapping.learn_ops(device=uut,\n abstract=abstract,\n steps=steps,\n timeout=timeout)\n except Exception as e:\n self.errored(\"Section failed due to: '{e}'\".format(e=e))\n\n if any(not item for item in self.mapping.keys):\n self.skipped('Cannot learn the feature', goto=['next_tc'])\n\n with steps.start(\"Check if added group %s not in the \"\n \"existing groups\" % self.ADD_JOIN_GROUP) as step:\n\n # learn mld ops to have existing mld interfaces\n if any(self.ADD_JOIN_GROUP in \\\n item.get('join_group', '') for item in self.mapping.keys):\n if add_mld_group:\n self.ADD_JOIN_GROUP = add_mld_group\n else:\n self.skipped('Could not find join_group does not '\n 'include the added group %s' % self.ADD_JOIN_GROUP, goto=['next_tc'])\n\n # attach the add value to mapping keys\n [item.update({'add_mld_group': self.ADD_JOIN_GROUP.split()[0],\n 'add_mld_source': self.ADD_JOIN_GROUP.split()[1],\n 'add_mld_group_key': self.ADD_JOIN_GROUP}) for item in self.mapping.keys]\n\n step.passed('Will add group %s' % self.ADD_JOIN_GROUP)\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={\n 'ops.mld.mld.Mld':{\n 'requirements':[\\\n ['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'join_group', '(?P<join_group>.*)',\n 'group', '(?P<group>.*)'],\n ['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'join_group', '(?P<join_group>.*)',\n 'source', '(?P<source>\\*)'],\n ['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'group', '(?P<group>.*)', '(?P<dummy>.*)'], # incase there is nothing learned\n ],\n 'all_keys': True,\n 'kwargs':{'attributes': [\n 'info[vrfs][(.*)][interfaces][(.*)][join_group][(.*)]',\n 'info[vrfs][(.*)][interfaces][(.*)][static_group][(.*)]',\n 'info[vrfs][(.*)][interfaces][(.*)][group][(.*)]']},\n 'exclude': mld_exclude}},\n config_info={'conf.mld.Mld':{\n 'requirements':[\n [partial(configure_add_attributes, # callable configuration\n add_obj=MldGroup,\n base=[['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'interface_attr', '(?P<interface>.*)']],\n add_attribute=[['join_group', '(?P<add_mld_group>.*)'],\n ['join_group_source_addr', '(?P<add_mld_source>.*)'],],\n add_method='add_groups',\n )]],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={\n 'ops.mld.mld.Mld':{\n 'requirements':[\\\n ['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'join_group', '(?P<add_mld_group_key>.*)',\n 'group', '(?P<add_mld_group>.*)'],\n ['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'join_group', '(?P<add_mld_group_key>.*)',\n 'source', '(?P<add_mld_source>.*)'],\n ['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'group', '(?P<add_mld_group>.*)', '(.*)']],\n 'kwargs':{'attributes': [\n 'info[vrfs][(.*)][interfaces][(.*)][join_group][(.*)]',\n 'info[vrfs][(.*)][interfaces][(.*)][static_group][(.*)]',\n 'info[vrfs][(.*)][interfaces][(.*)][group][(.*)]']},\n 'exclude': mld_exclude}},\n num_values={'vrf': 1, 'interface': 1, 'join_group': 1, 'group': 1, 'source': 1})\n\n\nclass TriggerAddRemoveMldStaticGroup(TriggerAddRemove):\n \"\"\"Apply the Mld interface static-group, and remove added Mld interface static-group\"\"\"\n\n __description__ = \"\"\"Apply the Mld interface static-group,\n and remove added Mld interface static-group.\n\n trigger_datafile:\n Mandatory:\n timeout: \n max_time (`int`): Maximum wait time for the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n method (`str`): Method to recover the device configuration,\n Support methods:\n 'checkpoint': Rollback the configuration by\n checkpoint (nxos),\n archive file (iosxe),\n load the saved running-config file on disk (iosxr)\n Optional:\n tgn_timeout (`int`): Maximum wait time for all traffic threads to be\n restored to the reference rate,\n in second. Default: 60\n tgn_delay (`int`): Wait time between each poll to verify if traffic is resumed,\n in second. Default: 10\n timeout_recovery: \n Buffer recovery timeout make sure devices are recovered at the end\n of the trigger execution. Used when previous timeouts have been exhausted.\n\n max_time (`int`): Maximum wait time for the last step of the trigger,\n in second. Default: 180\n interval (`int`): Wait time between iteration when looping is needed,\n in second. Default: 15\n static:\n The keys below are dynamically learnt by default.\n However, they can also be set to a custom value when provided in the trigger datafile.\n\n interface: `str`\n source: `str`\n static_group: `str`\n group: `str`\n\n (e.g) interface: '(?P<interface>Ethernet1*)' (Regex supported)\n OR\n interface: 'Ethernet1/1/1' (Specific value)\n steps:\n 1. Learn Mld Ops object and store the Mld interface(s) which does not have added static-group.\n 2. Save the current device configurations through \"method\" which user uses\n 3. Add the configuration of mld interface static-group with Mld Conf object\n 4. Verify the mld interface static-group from step 3 has configured\n 5. Remove the mld interface static-group configurations.\n 6. Recover the device configurations to the one in step 2\n 7. Learn Mld Ops again and verify it is the same as the Ops in step 1\n\n \"\"\"\n ADD_STATIC_GROUP = 'fdf8:f53e:61e4::18 *'\n\n @aetest.setup\n def verify_prerequisite(self, uut, abstract, steps, timeout, add_mld_group=None):\n\n self.timeout = timeout\n\n try:\n self.pre_snap = self.mapping.learn_ops(device=uut,\n abstract=abstract,\n steps=steps,\n timeout=timeout)\n except Exception as e:\n self.errored(\"Section failed due to: '{e}'\".format(e=e))\n\n # nothing in the static groups,\n # then learn if any groups to have some vrf interface value\n if any(not item for item in self.mapping.keys):\n self.mapping.requirements['ops.mld.mld.Mld']['requirements'] = \\\n [['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'group', '(?P<group>.*)', '(?P<dummy>.*)']] # incase there is nothing learned\n\n try:\n self.pre_snap = self.mapping.learn_ops(device=uut,\n abstract=abstract,\n steps=steps,\n timeout=timeout)\n except Exception as e:\n self.errored(\"Section failed due to: '{e}'\".format(e=e))\n\n if any(not item for item in self.mapping.keys):\n self.skipped('Cannot learn the feature', goto=['next_tc'])\n\n with steps.start(\"Check if added group %s not in the \"\n \"existing groups\" % self.ADD_STATIC_GROUP) as step:\n\n # learn mld ops to have existing mld interfaces\n if any(self.ADD_STATIC_GROUP in \\\n item.get('static_group', '') for item in self.mapping.keys):\n if add_mld_group:\n self.ADD_STATIC_GROUP = add_mld_group\n else:\n self.skipped('Could not find static_group does not '\n 'include the added group %s' % self.ADD_STATIC_GROUP, goto=['next_tc'])\n\n # attach the add value to mapping keys\n [item.update({'add_mld_group': self.ADD_STATIC_GROUP.split()[0],\n 'add_mld_source': self.ADD_STATIC_GROUP.split()[1],\n 'add_mld_group_key': self.ADD_STATIC_GROUP}) for item in self.mapping.keys]\n\n step.passed('Will add group %s' % self.ADD_STATIC_GROUP)\n\n # Mapping of Information between Ops and Conf\n # Also permit to dictate which key to verify\n mapping = Mapping(requirements={\n 'ops.mld.mld.Mld':{\n 'requirements':[\\\n ['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'static_group', '(?P<static_group>.*)',\n 'group', '(?P<group>.*)']],\n 'kwargs':{'attributes': [\n 'info[vrfs][(.*)][interfaces][(.*)][join_group][(.*)]',\n 'info[vrfs][(.*)][interfaces][(.*)][static_group][(.*)]',\n 'info[vrfs][(.*)][interfaces][(.*)][group][(.*)]']},\n 'exclude': mld_exclude}},\n config_info={'conf.mld.Mld':{\n 'requirements':[\n [partial(configure_add_attributes, # callable configuration\n add_obj=MldGroup,\n base=[['device_attr', '{uut}', 'vrf_attr', '(?P<vrf>.*)',\n 'interface_attr', '(?P<interface>.*)']],\n add_attribute=[['static_group', '(?P<add_mld_group>.*)'],\n ['static_group_source_addr', '(?P<add_mld_source>.*)'],],\n add_method='add_groups',\n )]],\n 'verify_conf':False,\n 'kwargs':{}}},\n verify_ops={\n 'ops.mld.mld.Mld':{\n 'requirements':[\\\n ['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'static_group', '(?P<add_mld_group_key>.*)',\n 'group', '(?P<add_mld_group>.*)'],\n ['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'static_group', '(?P<add_mld_group_key>.*)',\n 'source', '(?P<add_mld_source>.*)'],\n ['info', 'vrfs', '(?P<vrf>.*)', 'interfaces',\n '(?P<interface>.*)', 'group', '(?P<add_mld_group>.*)', '(.*)']],\n 'kwargs':{'attributes': [\n 'info[vrfs][(.*)][interfaces][(.*)][join_group][(.*)]',\n 'info[vrfs][(.*)][interfaces][(.*)][static_group][(.*)]',\n 'info[vrfs][(.*)][interfaces][(.*)][group][(.*)]']},\n 'exclude': mld_exclude}},\n num_values={'vrf': 1, 'interface': 1, 'static_group': 1, 'group': 1, 'source': 1})\n", "id": "2171909", "language": "Python", "matching_score": 3.8687658309936523, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/addremove/mld/nxos/addremove.py" }, { "content": "# Genie\r\nfrom genie.ops.base import Base\r\n\r\n\r\nclass Interface(Base):\r\n exclude = ['in_discards',\r\n 'in_octets',\r\n 'in_pkts',\r\n 'last_clear',\r\n 'out_octets',\r\n 'out_pkts',\r\n 'in_rate',\r\n 'out_rate',\r\n 'in_errors',\r\n 'in_crc_errors',\r\n 'in_rate_pkts',\r\n 'out_rate_pkts',\r\n 'in_broadcast_pkts',\r\n 'out_broadcast_pkts',\r\n 'in_multicast_pkts',\r\n 'out_multicast_pkts',\r\n 'in_unicast_pkts',\r\n 'out_unicast_pkts',\r\n 'last_change',\r\n 'mac_address',\r\n 'phys_address',\r\n '((t|T)unnel.*)',\r\n '(Null.*)',\r\n 'chars_out',\r\n 'chars_in',\r\n 'pkts_out',\r\n 'pkts_in',\r\n 'mgmt0']", "id": "5458383", "language": "Python", "matching_score": 2.290924310684204, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/interface/interface.py" }, { "content": "'''Common implementation for interface clear triggers'''\n\n# python\nfrom functools import partial\n\n# genie libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.clear.clear import TriggerClear, verify_clear_callable\nfrom genie.libs.sdk.libs.utils.triggeractions import CompareCounters, CompareUptime\n\n# List of values to exclude for interface ops\ninterface_exclude = ['maker', 'last_change','in_rate','in_rate_pkts',\n 'out_rate', 'out_rate_pkts', 'in_octets',\n 'in_pkts', 'in_unicast_pkts', 'out_octets',\n 'out_pkts', 'out_unicast_pkts', 'out_multicast_pkts',\n 'in_multicast_pkts', 'last_clear', 'in_broadcast_pkts',\n 'out_broadcast_pkts', 'in_crc_errors', 'in_errors',\n 'accounting']\n\n\nclass TriggerClearCounters(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args_in={'r_obj': [['info', '(?P<interface>.*)',\n 'counters', 'in_pkts', '(.*)']],\n 'relation': '<',\n 'threshold_counter': '(?P<in_pkts>.*)',\n 'ops': 'ops'}\n \n verify_func_args_out={'r_obj': [['info', '(?P<interface>.*)',\n 'counters', 'out_pkts', '(.*)']],\n 'relation': '<',\n 'threshold_counter': '(?P<out_pkts>.*)',\n 'ops': 'ops'}\n\n verify_func_args={'r_obj': [['info', '(?P<interface>.*)',\n 'counters', 'last_clear', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.interface.interface.Interface':{\n 'requirements':[\\\n ['info', '(?P<interface>[\\w\\-\\/]+$)', 'enabled', True],\n ['info', '(?P<interface>[\\w\\-\\/]+$)', 'oper_status', 'up'],\n ['info', '(?P<interface>[\\w\\-\\/]+$)', 'counters',\n 'in_pkts', '(?P<in_pkts>.*)'],\n ['info', '(?P<interface>[\\w\\-\\/]+$)', 'counters',\n 'out_pkts', '(?P<out_pkts>.*)']],\n 'all_keys': True,\n 'kwargs': {'attributes': \\\n ['info[(.*)][enabled]',\n 'info[(.*)][counters]']},\n 'exclude': interface_exclude}},\n verify_ops={'ops.interface.interface.Interface':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareCounters.compare_counter,\n verify_func_args=verify_func_args_in)],\n [partial(verify_clear_callable,\n verify_func=CompareCounters.compare_counter,\n verify_func_args=verify_func_args_out)],\n [partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs': {'attributes': \\\n ['info[(.*)][enabled]',\n 'info[(.*)][counters]']},\n 'exclude': interface_exclude}},\n num_values={'interface': 'all'})\n", "id": "1491493", "language": "Python", "matching_score": 4.346866607666016, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/clear/interface/clear.py" }, { "content": "'''Common implementation for routing clear triggers'''\n\n# python\nfrom functools import partial\n\n# genie libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.clear.clear import TriggerClear, verify_clear_callable\nfrom genie.libs.sdk.libs.utils.triggeractions import CompareUptime, CompareCounters\n\n# Ignore keys when doing the diff with Ops objects for save_snapshot and\n# verify_clear, it will be used for LearnPollDiff.ops_diff callable\nexclude = ['maker', 'elapsed_time', 'discontinuity_time',\n 'keepalive', 'total', 'up_time', 'expire', 'remote',\n 'last_message_received', 'num_of_comparison', 'rpf_failure',\n 'total_accept_count', 'total_reject_count', 'notification']\n\n\nclass TriggerClearMsdpPeer(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n verify_func_args={'r_obj': [['info', 'vrf', '(?P<vrf>.*)', 'peer', '(?P<peer>.*)', 'elapsed_time', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'session_state', 'established']],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][peer][(.*)]']},\n 'exclude': exclude}},\n verify_ops={'ops.msdp.msdp.Msdp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': ['info[vrf][(.*)][peer][(.*)]']},\n 'exclude': exclude}},\n num_values={'vrf': 'all', 'peer':'all'})\n\n\nclass TriggerClearMsdpStatistics(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'statistics', 'discontinuity_time', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'session_state', 'established']],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][peer][(.*)]']},\n 'exclude': exclude}},\n verify_ops={'ops.msdp.msdp.Msdp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': ['info[vrf][(.*)][peer][(.*)]']},\n 'exclude': exclude}},\n num_values={'vrf': 'all', 'peer':'all'})\n\n\nclass TriggerClearMsdpPolicyStatisticsSaPolicyIn(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'statistics', 'sa_policy',\n 'in', '(?P<sa_filter_in>.*)', '(?P<match>.*)',\n 'num_of_comparison', '(.*)']],\n 'relation': '<',\n 'threshold_counter': '(?P<num>.*)',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'session_state', 'established'],\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'statistics', 'sa_policy', 'in', '(?P<sa_filter_in>.*)',\n '(?P<match>.*)', 'num_of_comparison', '(?P<num>^(?!0).*)']],\n 'all_keys': True,\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][peer][(.*)][session_state]',\n 'info[vrf][(.*)][peer][(.*)][statistics][sa_policy][in]']},\n 'exclude': exclude}},\n verify_ops={'ops.msdp.msdp.Msdp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareCounters.compare_counter,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': ['info[vrf][(.*)][peer][(.*)][session_state]',\n 'info[vrf][(.*)][peer][(.*)][statistics][sa_policy][in]']},\n 'exclude': exclude}},\n num_values={'vrf': 'all', 'peer':'all', 'num': 1})\n\n\nclass TriggerClearMsdpPolicyStatisticsSaPolicyOut(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'statistics', 'sa_policy',\n 'out', '(?P<sa_filter_out>.*)', '(?P<match>.*)',\n 'num_of_comparison', '(.*)']],\n 'relation': '<',\n 'threshold_counter': '(?P<num>.*)',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'session_state', 'established'],\n ['info', 'vrf', '(?P<vrf>.*)', 'peer',\n '(?P<peer>.*)', 'statistics', 'sa_policy', 'out', '(?P<sa_filter_out>.*)',\n '(?P<match>.*)', 'num_of_comparison', '(?P<num>^(?!0).*)']],\n 'all_keys': True,\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][peer][(.*)][session_state]',\n 'info[vrf][(.*)][peer][(.*)][statistics][sa_policy][out]']},\n 'exclude': exclude}},\n verify_ops={'ops.msdp.msdp.Msdp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareCounters.compare_counter,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': ['info[vrf][(.*)][peer][(.*)][session_state]',\n 'info[vrf][(.*)][peer][(.*)][statistics][sa_policy][out]']},\n 'exclude': exclude}},\n num_values={'vrf': 'all', 'peer':'all', 'num': 1})\n \n\nclass TriggerClearMsdpSaCache(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'vrf', '(?P<vrf>.*)',\n 'sa_cache', '(?P<sa>.*)', 'up_time', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'sa_cache',\n '(?P<sa>.*)', 'group', '(?P<group>.*)']],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][sa_cache][(.*)]']},\n 'exclude': exclude}},\n verify_ops={'ops.msdp.msdp.Msdp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': ['info[vrf][(.*)][sa_cache][(.*)]']},\n 'exclude': exclude}},\n num_values={'vrf': 'all', 'group':'all'})\n \n\nclass TriggerClearMsdpRoute(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'vrf', '(?P<vrf>.*)',\n 'sa_cache', '(?P<sa>.*)', 'up_time', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.msdp.msdp.Msdp':{\n 'requirements':[\\\n ['info', 'vrf', '(?P<vrf>.*)', 'sa_cache',\n '(?P<sa>.*)', 'group', '(?P<group>.*)']],\n 'kwargs':{'attributes': [\n 'info[vrf][(.*)][sa_cache][(.*)]']},\n 'exclude': exclude}},\n verify_ops={'ops.msdp.msdp.Msdp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': ['info[vrf][(.*)][sa_cache][(.*)]']},\n 'exclude': exclude}},\n num_values={'vrf': 'all', 'group':'all'})\n", "id": "8868841", "language": "Python", "matching_score": 4.51196813583374, "max_stars_count": 94, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/clear/msdp/clear.py" }, { "content": "'''Common implementation for bgp clear triggers'''\n\n# python\nfrom functools import partial\n\n# genie libs\nfrom genie.libs.sdk.libs.utils.mapping import Mapping\nfrom genie.libs.sdk.triggers.clear.clear import TriggerClear, verify_clear_callable\nfrom genie.libs.sdk.libs.utils.triggeractions import CompareUptime\n\n# Ignore keys when doing the diff with Ops objects for save_snapshot and\n# verify_clear, it will be used for LearnPollDiff.ops_diff callable\nexclude = ['keepalives','total', 'total_bytes', 'up_time', 'opens', 'capability',\n 'updates', 'notifications', 'foreign_port', 'local_port', 'totals',\n 'bgp_table_version', 'route_refresh', 'maker', 'callables',\n 'connections_dropped', 'connections_established', 'last_reset',\n 'bgp_negotiated_keepalive_timers', 'distance_extern_as',\n 'reset_reason', 'holdtime', 'keepalive_interval']\n\nclass TriggerClearBgp(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'up_time', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)','neighbor', '(?P<neighbor>.*)',\n 'session_state', 'established']],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude}},\n num_values={'vrf':'all', 'instance':'all',\n 'neighbor':'all'})\n\n\nclass TriggerClearBgpAll(TriggerClearBgp):\n pass\n\nclass TriggerClearIpBgpSoft(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'up_time', '(.*)']],\n 'relation': '>=',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)','neighbor', '(?P<neighbor>.*)',\n 'session_state', 'established']],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude}},\n num_values={'vrf':'all', 'instance':'all',\n 'neighbor':'all'})\n\nclass TriggerClearBgpNeighbor(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'up_time', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)','neighbor', '(?P<neighbor>.*)',\n 'session_state', 'established']],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude}},\n num_values={'vrf':'1', 'instance':'1',\n 'neighbor':'1'})\n\n\nclass TriggerClearBgpNeighborSoft(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'up_time', '(.*)']],\n 'relation': '>=',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)','neighbor', '(?P<neighbor>.*)',\n 'session_state', 'established']],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude}},\n num_values={'vrf':'1', 'instance':'1',\n 'neighbor':'1'})\n\n\nclass TriggerClearBgpNeighborIpv4(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'up_time', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[\n ['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)','neighbor', '(?P<neighbor>^[\\d\\.]+$)',\n 'session_state', 'established']],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude}},\n num_values={'vrf':'1', 'instance':'1','neighbor':'1'})\n\n\nclass TriggerClearBgpNeighborIpv6(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'up_time', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[\n ['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)','neighbor', '(?P<neighbor>^[\\w\\:]+$)',\n 'session_state', 'established']],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude}},\n num_values={'vrf':'1', 'instance':'1','neighbor':'1'})\n\n\nclass TriggerClearBgpNeighborSoftIpv4(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'up_time', '(.*)']],\n 'relation': '>=',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[\n ['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)','neighbor', '(?P<neighbor>^[\\d\\.]+$)',\n 'session_state', 'established']],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude}},\n num_values={'vrf':'1', 'instance':'1','neighbor':'1'})\n\nclass TriggerClearBgpNeighborSoftIpv6(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'up_time', '(.*)']],\n 'relation': '>=',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[\n ['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)','neighbor', '(?P<neighbor>^[\\w\\:]+$)',\n 'session_state', 'established']],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude}},\n num_values={'vrf':'1', 'instance':'1','neighbor':'1'})\n\n\nclass TriggerClearIpRouteCheckBgp(TriggerClearBgp):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'up_time', '(.*)']],\n 'relation': '>=',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp':{\n 'requirements':[\\\n ['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)','neighbor', '(?P<neighbor>.*)',\n 'session_state', 'established']],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes':['info']},\n 'exclude': exclude}},\n num_values={'vrf':'all', 'instance':'all',\n 'neighbor':'all'})\n\nclass TriggerClearBgpVpnv4UnicastVrfAll(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['routes_per_peer', 'instance', 'default',\n 'vrf', '(?P<vrf>.*)','neighbor','(?P<neighbor>.*)',\n 'address_family', '(?P<af>vpnv4 unicast.*)',\n 'up_down', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp': {\n 'requirements': [ \\\n [['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'address_family','(?P<af>(vpnv4 unicast).*)',\n 'session_state', 'established']],\n [['routes_per_peer', 'instance', 'default',\\\n 'vrf', '(?P<vrf>.*)','neighbor','(?P<neighbor>.*)',\\\n 'address_family', '(?P<af>(vpnv4 unicast).*)','(.*)']]],\n 'all_keys': True,\n 'kwargs': {'attributes': ['routes_per_peer','info']},\n 'exclude': exclude + ['msg_sent','msg_rcvd','up_down','tbl_ver']}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': ['routes_per_peer','info']},\n 'exclude': exclude + ['msg_sent','msg_rcvd','up_down','tbl_ver']}},\n num_values={'vrf': 'all','neighbor': 'all', 'af': 'all'})\n\n\nclass TriggerClearBgpVpnv6UnicastVrfAll(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['routes_per_peer', 'instance', 'default',\n 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<af>vpnv6 unicast.*)',\n 'up_down', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp': {\n 'requirements': [ \\\n [['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<af>(vpnv6 unicast).*)',\n 'session_state', 'established']],\n [['routes_per_peer', 'instance', 'default', \\\n 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', \\\n 'address_family', '(?P<af>(vpnv6 unicast).*)', '(.*)']]],\n 'all_keys': True,\n 'kwargs': {'attributes': ['routes_per_peer','info']},\n 'exclude': exclude + ['msg_sent', 'msg_rcvd', 'up_down', 'tbl_ver']}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': ['routes_per_peer','info']},\n 'exclude': exclude + ['msg_sent','msg_rcvd','up_down','tbl_ver']}},\n num_values={'vrf': 'all', 'neighbor': 'all', 'af': 'all'})\n\n\nclass TriggerClearIpBgpVrfAll(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['routes_per_peer', 'instance', 'default',\n 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<af>ipv4.*)',\n 'up_down', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp': {\n 'requirements': [ \\\n [['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<af>.*)',\n 'session_state', 'established']],\n [['routes_per_peer', 'instance', '(?P<instance>.*)', \\\n 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)', \\\n 'address_family', '(?P<af>ipv4.*)', '(.*)']]],\n 'all_keys': True,\n 'kwargs': {'attributes': ['info','routes_per_peer']},\n 'exclude': exclude + ['msg_sent', 'msg_rcvd', 'up_down', 'tbl_ver']}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': ['info','routes_per_peer']},\n 'exclude': exclude + ['msg_sent','msg_rcvd','up_down','tbl_ver']}},\n num_values={'vrf': 'all', 'neighbor': 'all', 'af': 'all'})\n\n\nclass TriggerRestartBgp(TriggerClear):\n\n # Argument with dynamic value for verify callable\n # As verify callable can be re-used in multiple triggers\n # with different variable names. This dictionary is used to map\n # dynamic argument name to actual script argument name\n # <expected argument_name for callable>: <script argument name>\n \n verify_func_args={'r_obj': [['info', 'instance', '(?P<instance>.*)', 'vrf',\n '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'up_time', '(.*)']],\n 'relation': '<',\n 'threshold_time': 'compare_time',\n 'ops': 'ops'}\n\n mapping = Mapping(requirements={'ops.bgp.bgp.Bgp': {\n 'requirements': [ \\\n ['info', 'instance', '(?P<instance>.*)',\n 'vrf', '(?P<vrf>.*)', 'neighbor', '(?P<neighbor>.*)',\n 'address_family', '(?P<af>.*)',\n 'session_state', 'established'],\n ['info','instance','(?P<instance>.*)','bgp_id', '(?P<bgp_id>.*)']\n ],\n 'all_keys': True ,\n 'kwargs': {'attributes': ['info']},\n 'exclude': exclude}},\n verify_ops={'ops.bgp.bgp.Bgp':{\n 'requirements':[[partial(verify_clear_callable,\n verify_func=CompareUptime.compare_uptime,\n verify_func_args=verify_func_args)]],\n 'kwargs':{'attributes': ['info']},\n 'exclude': exclude}},\n num_values={'vrf': 'all', 'instance': 'all', 'neighbor': 'all', 'bgp_id': 'all'})\n", "id": "12366069", "language": "Python", "matching_score": 4.99648380279541, "max_stars_count": 0, "path": "pkgs/sdk-pkg/src/genie/libs/sdk/triggers/clear/bgp/clear.py" }, { "content": "# Genie\r\nfrom genie.ops.base import Base\r\n\r\n\r\nclass Bgp(Base):\r\n exclude = ['if_handle',\r\n 'keepalives',\r\n 'last_reset',\r\n 'reset_reason',\r\n 'foreign_port',\r\n 'local_port',\r\n 'msg_rcvd',\r\n 'msg_sent',\r\n 'up_down',\r\n 'bgp_table_version',\r\n 'routing_table_version',\r\n 'tbl_ver',\r\n 'table_version',\r\n 'memory_usage',\r\n 'updates',\r\n 'mss',\r\n 'total',\r\n 'total_bytes',\r\n 'up_time',\r\n 'bgp_negotiated_keepalive_timers',\r\n 'hold_time',\r\n 'keepalive_interval',\r\n 'sent',\r\n 'received',\r\n 'status_codes',\r\n 'holdtime',\r\n 'router_id',\r\n 'connections_dropped',\r\n 'connections_established',\r\n 'advertised',\r\n 'prefixes',\r\n 'routes',\r\n 'state_pfxrcd']", "id": "11214814", "language": "Python", "matching_score": 1.2216482162475586, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/bgp/bgp.py" }, { "content": "# Genie\r\nfrom genie.ops.base import Base\r\n\r\n\r\nclass Msdp(Base):\r\n exclude = ['statistics',\r\n 'elapsed_time',\r\n 'up_time',\r\n 'expire',\r\n 'sa_message',\r\n 'last_message_received',\r\n 'total',\r\n 'keepalive']", "id": "5712274", "language": "Python", "matching_score": 1.7200082540512085, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/msdp/msdp.py" }, { "content": "# Genie\r\nfrom genie.ops.base import Base\r\n\r\n\r\nclass Mld(Base):\r\n exclude = ['expire',\r\n 'up_time',\r\n 'last_reporter',\r\n 'querier']", "id": "4893506", "language": "Python", "matching_score": 2.124422311782837, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/mld/mld.py" }, { "content": "# Genie\r\nfrom genie.ops.base import Base\r\n\r\n\r\nclass Igmp(Base):\r\n\r\n exclude = ['expire', 'up_time']", "id": "9926205", "language": "Python", "matching_score": 2.0328330993652344, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/igmp/igmp.py" }, { "content": "# Genie\r\nfrom genie.ops.base import Base\r\n\r\n\r\nclass Rip(Base):\r\n exclude = ['expire_time']", "id": "2368575", "language": "Python", "matching_score": 1.4027241468429565, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/rip/rip.py" }, { "content": "'''\nRIP Genie Ops Object for IOS - CLI.\n'''\nfrom ..iosxe.rip import Rip as RipXE\n\nclass Rip(RipXE):\n pass", "id": "2856360", "language": "Python", "matching_score": 1, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/rip/ios/rip.py" }, { "content": "from .rip import *\n", "id": "12854399", "language": "Python", "matching_score": 0, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/rip/__init__.py" }, { "content": "# Genie\r\nfrom genie.ops.base import Base\r\n\r\n\r\nclass Ospf(Base):\r\n exclude = ['age',\r\n 'uptime',\r\n 'last_change',\r\n 'cksum',\r\n 'seq',\r\n 'dead_timer',\r\n 'hello_timer',\r\n 'checksum',\r\n 'seq_num',\r\n 'statistics',\r\n 'lsas',\r\n 'last_state_change',\r\n 'bdr_ip_addr',\r\n 'dr_ip_addr',\r\n 'state',\r\n 'bdr_router_id',\r\n 'dr_router_id',\r\n 'area_scope_lsa_cksum_sum']", "id": "5093839", "language": "Python", "matching_score": 2.0816762447357178, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/ospf/ospf.py" }, { "content": "# Genie\r\nfrom genie.ops.base import Base\r\n\r\n\r\nclass Nd(Base):\r\n exclude = ['neighbor_state', 'age']", "id": "3260184", "language": "Python", "matching_score": 1.6834214925765991, "max_stars_count": 94, "path": "pkgs/ops-pkg/src/genie/libs/ops/nd/nd.py" }, { "content": "# Genie\nfrom genie.ops.base import Base\n\nclass Hsrp(Base):\n exclude = []", "id": "5562567", "language": "Python", "matching_score": 1, "max_stars_count": 0, "path": "pkgs/ops-pkg/src/genie/libs/ops/hsrp/hsrp.py" }, { "content": "from .hsrp import *\n", "id": "3102248", "language": "Python", "matching_score": 0.31387531757354736, "max_stars_count": 94, "path": "pkgs/conf-pkg/src/genie/libs/conf/hsrp/__init__.py" } ]
2.761636
MuhengYan
[ { "content": "import os\nimport errno\nfrom config import EMB_DIR\n\nimport pickle\nimport numpy as np\nfrom torch.utils.data import Dataset\n\n#----functions for loading embeddings\n\ndef emb_from_cache(emb_name):\n file_path = os.path.join(EMB_DIR, '{}_cached.pkl'.format(emb_name))\n return pickle.load(open(file_path, 'rb'))\n\n\ndef save_emb_cache(emb_name, data):\n file_path = os.path.join(EMB_DIR, '{}_cached.pkl'.format(emb_name))\n pickle.dump(data, open(file_path, 'wb'), protocol=2)\n return\n\n \ndef load_embeddings(emb_name):\n try:\n cache = emb_from_cache(emb_name)\n print('Loaded {} from cached file!'.format(emb_name))\n return cache\n except OSError:\n pass\n \n file_path = os.path.join(EMB_DIR, '{}.pkl'.format(emb_name))\n print('Loading {} from raw file!'.format(emb_name))\n \n if os.path.exists(file_path):\n emb_data = pickle.load(open(file_path, 'rb'))\n \n print('Indexing {}...'.format(emb_name))\n word2idx = {}\n idx2word = {}\n embeddings = []\n \n #take idx=0 as zero padding\n dim = len(next(iter(emb_data.values())))\n embeddings.append(np.zeros(dim))\n \n #indexing the word vectors\n for _idx, _word in enumerate(emb_data, 1):\n _vector = emb_data[_word]\n word2idx[_word] = _idx\n idx2word[_idx] = _word\n embeddings.append(_vector)\n \n #add UNK token for out of vocab words\n if '<UNK>' not in word2idx:\n word2idx['<UNK>'] = len(word2idx) + 1\n idx2word[len(word2idx) + 1] = '<UNK>'\n embeddings.append(np.random.uniform(low=-0.05, high=0.05, size=dim))\n \n# assert len(set([len(x) for x in embeddings])) == 1\n print('Indexed {} word vectors.'.format(len(word2idx)))\n embeddings = np.array(embeddings, dtype='float32')\n \n #save cache for the indexing\n save_emb_cache(emb_name, (word2idx, idx2word, embeddings))\n return word2idx, idx2word, embeddings\n else:\n print('{} not found'.format(file_path))\n raise OSError(errno.ENOENT, os.strerror(errno.ENOENT), file)\n \n \n#--dataset class\nclass BaseDataset(Dataset):\n def __init__(self, \n X, y, z=None, \n length=None, name=None, \n verbose=True):\n self.X = X\n self.y = y\n self.z = z\n self.name = name\n \n self.set_length(length)\n \n if verbose:\n self.stats()\n \n \n def set_length(self, length):\n if length is None:\n self.length = max([len(item) for item in self.X])\n else:\n self.length = length\n \n \n def stats(self):\n raise NotImplementedError\n \n \n @staticmethod\n def _load_from_cache():\n return\n \n def _save_cache():\n return\n \n \n \nclass LocalDataset(BaseDataset):\n \n def __init__(self, \n X, y, z, \n word2idx,\n length=None, name=None, \n verbose=True):\n self.word2idx = word2idx\n \n BaseDataset.__init__(self, X, y, z, length, name, verbose)\n \n def stats(self):\n vocab = []\n unk = []\n \n for seq in self.X:\n for token in seq:\n vocab.append(token)\n if token not in self.word2idx:\n unk.append(token)\n \n labels = {}\n for lab in self.y:\n if np.argmax(lab) in labels:\n labels[np.argmax(lab)] += 1\n else:\n labels[np.argmax(lab)] = 1 \n \n print()\n print('Dataset:', self.name)\n print('Total #token:', len(vocab), 'Total #UNK:', len(unk), '{:.2f}%'.format(100 * len(unk)/len(vocab)))\n print('Unique #token:', len(set(vocab)), 'Unique #UNK:', len(set(unk)), '{:.2f}%'.format(100 * len(set(unk))/len(set(vocab))))\n print('Label Distribution:')\n for key in labels:\n print(key, ':' ,labels[key])\n print()\n \n \n \n def __len__(self):\n \n assert len(self.X) == len(self.y) and len(self.X) == len(self.z)\n \n return len(self.X)\n \n \n def __getitem__(self, index):\n X, y, z = self.X[index], self.y[index], self.z[index]\n \n X = self._to_vector(X, self.word2idx, self.length, unk_pol='random')\n z = self._to_vector(z, self.word2idx, self.length, unk_pol='zero')\n \n if isinstance(y, (list, tuple)):\n y = np.array(y)\n\n return X, y, z, len(self.X[index])\n \n \n @staticmethod\n def _to_vector(sequence, word2idx, length, unk_pol='random'):\n \n seq_vec = np.zeros(length).astype(int)\n \n for i, token in enumerate(sequence[:length]):\n if token in word2idx:\n seq_vec[i] = word2idx[token]\n elif token.lower() in word2idx:\n seq_vec[i] = word2idx[token.lower()]\n else:\n if unk_pol == 'random':\n seq_vec[i] = word2idx['<UNK>']\n elif unk_pol == 'zero':\n seq_vec[i] = 0\n else:\n raise ValueError('UNK policy not recognized!')\n return seq_vec\n\n\nclass GlobalDataset(BaseDataset):\n\n def __init__(self,\n X, y, z,\n word2idx,\n length=None, length_z=None, name=None,\n verbose=True):\n self.word2idx = word2idx\n self.length_z = length_z\n BaseDataset.__init__(self, X, y, z, length, name, verbose)\n\n def stats(self):\n vocab = []\n unk = []\n\n for seq in self.X:\n for token in seq:\n vocab.append(token)\n if token not in self.word2idx:\n unk.append(token)\n\n labels = {}\n for lab in self.y:\n if np.argmax(lab) in labels:\n labels[np.argmax(lab)] += 1\n else:\n labels[np.argmax(lab)] = 1\n\n print()\n print('Dataset:', self.name)\n print('Total #token:', len(vocab), 'Total #UNK:', len(unk), '{:.2f}%'.format(100 * len(unk) / len(vocab)))\n print('Unique #token:', len(set(vocab)), 'Unique #UNK:', len(set(unk)),\n '{:.2f}%'.format(100 * len(set(unk)) / len(set(vocab))))\n print('Label Distribution:')\n for key in labels:\n print(key, ':', labels[key])\n print()\n\n def __len__(self):\n\n assert len(self.X) == len(self.y) and len(self.X) == len(self.z)\n\n return len(self.X)\n\n def __getitem__(self, index):\n X, y, z = self.X[index], self.y[index], self.z[index]\n\n X = self._to_vector(X, self.word2idx, self.length, unk_pol='random')\n z = self._to_vector(z, self.word2idx, self.length_z, unk_pol='zero')\n\n if isinstance(y, (list, tuple)):\n y = np.array(y)\n\n return X, y, z, len(self.X[index])\n\n @staticmethod\n def _to_vector(sequence, word2idx, length, unk_pol='random'):\n\n seq_vec = np.zeros(length).astype(int)\n\n for i, token in enumerate(sequence[:length]):\n if token in word2idx:\n seq_vec[i] = word2idx[token]\n elif token.lower() in word2idx:\n seq_vec[i] = word2idx[token.lower()]\n else:\n if unk_pol == 'random':\n seq_vec[i] = word2idx['<UNK>']\n elif unk_pol == 'zero':\n seq_vec[i] = 0\n else:\n raise ValueError('UNK policy not recognized!')\n return seq_vec\n\n\nclass PlainDataset(BaseDataset):\n \n def __init__(self, \n X, y,\n word2idx,\n length=None, name=None, \n verbose=True):\n self.word2idx = word2idx\n \n BaseDataset.__init__(self, X, y, None, length, name, verbose)\n \n def stats(self):\n vocab = []\n unk = []\n \n for seq in self.X:\n for token in seq:\n vocab.append(token)\n if token not in self.word2idx:\n unk.append(token)\n \n labels = {}\n for lab in self.y:\n if np.argmax(lab) in labels:\n labels[np.argmax(lab)] += 1\n else:\n labels[np.argmax(lab)] = 1 \n \n print()\n print('Dataset:', self.name)\n print('Total #token:', len(vocab), 'Total #UNK:', len(unk), '{:.2f}%'.format(100 * len(unk)/len(vocab)))\n print('Unique #token:', len(set(vocab)), 'Unique #UNK:', len(set(unk)), '{:.2f}%'.format(100 * len(set(unk))/len(set(vocab))))\n print('Label Distribution:')\n for key in labels:\n print(key, ':' ,labels[key])\n print()\n \n \n def __len__(self):\n \n assert len(self.X) == len(self.y)\n \n return len(self.X)\n \n \n def __getitem__(self, index):\n X, y = self.X[index], self.y[index]\n \n X = self._to_vector(X, self.word2idx, self.length, unk_pol='random')\n \n if isinstance(y, (list, tuple)):\n y = np.array(y)\n return X, y, len(self.X[index])\n \n \n @staticmethod\n def _to_vector(sequence, word2idx, length, unk_pol='random'):\n \n seq_vec = np.zeros(length).astype(int)\n \n for i, token in enumerate(sequence[:length]):\n if token in word2idx:\n seq_vec[i] = word2idx[token]\n elif token.lower() in word2idx:\n seq_vec[i] = word2idx[token.lower()]\n else:\n if unk_pol == 'random':\n seq_vec[i] = word2idx['<UNK>']\n elif unk_pol == 'zero':\n seq_vec[i] = 0\n else:\n raise ValueError('UNK policy not recognized!')\n return seq_vec\n ", "id": "12255103", "language": "Python", "matching_score": 1.995765209197998, "max_stars_count": 0, "path": "utils/dataloading.py" }, { "content": "import os\n\nBASE_PATH = os.path.dirname(os.path.abspath(__file__))\n\nTRAINED_PATH = os.path.join(BASE_PATH, \"output/trained_model\")\n\nEXPS_PATH = os.path.join(BASE_PATH, \"output/experiments\")\n\nDATA_DIR = os.path.join(BASE_PATH, 'datasets')\n\nEMB_DIR = os.path.join(BASE_PATH, 'embeddings')\n\n\nPAD = 0\n\n", "id": "277982", "language": "Python", "matching_score": 0.3763885498046875, "max_stars_count": 0, "path": "config.py" }, { "content": "#optimizer, train, eval, early_stopping\nimport os\nimport pickle\nimport sys\nimport numpy as np\nimport random\nimport torch\nfrom torch import nn\nimport torch.nn.functional as F\n\nfrom config import TRAINED_PATH\n\n\ndef create_balanced_mask(shape, length):\n assert len(shape) == 3\n\n mask = torch.zeros(shape)\n\n for _idx0 in range(shape[0]):\n for _idx1 in range(shape[1]):\n # try:\n num = int(length[_idx0] / 2) + 1\n # except Exception:\n # print(len(length), _idx0, shape)\n\n _indexes_1st = random.sample(range(int(shape[2] / 2)), num)\n _indexes_2nd = random.sample(range(int(shape[2] / 2)), num)\n for _rand_idx in _indexes_1st:\n mask[_idx0][_idx1][_rand_idx] = 1\n for _rand_idx in _indexes_2nd:\n mask[_idx0][_idx1][-(_rand_idx + 1)] = 1\n\n return mask.view(shape)\n\nclass TransformerOptimizer():\n \n def __init__(self, optimizer, d_model, warmup_steps):\n self.optimizer = optimizer\n self.static_lr = np.power(d_model, -0.5)\n# self.static_lr = 0.1\n self.warmup_steps = warmup_steps\n \n self.num_step = 1\n \n def step(self):\n self._adjust_learning_rate()\n self.optimizer.step()\n \n def zero_grad(self):\n self.optimizer.zero_grad()\n \n def _adjust_learning_rate(self):\n lr = self.static_lr * min(np.power(self.num_step, -0.5), \n (self.num_step * np.power(self.warmup_steps, -1.5)))\n# lr = self.static_lr\n for param_group in self.optimizer.param_groups:\n param_group['lr'] = lr\n \n\ndef print_progress(loss, epoch, batch, batch_size, dataset_size):\n count = batch * batch_size\n bar_len = 40\n filled_len = int(round(bar_len * count / float(dataset_size)))\n\n bar = '+' * filled_len + '-' * (bar_len - filled_len)\n\n status = 'Epoch {}, Batch Loss ({}): {:.4f}'.format(epoch, batch, loss)\n _progress_str = \"\\r \\r [{}] ...{}\".format(bar, status)\n sys.stdout.write(_progress_str)\n sys.stdout.flush()\n \n \ndef train_epoch(model, optimizer, loader, epoch, device, args, mask=False, mask_ratio=0.5):\n \n true = []\n pred = []\n losses = []\n b_s = loader.batch_size\n d_s = len(loader.dataset)\n \n model.train()\n \n for i, batch in enumerate(loader, 1):\n \n X = batch[0].to(device)\n y = batch[1].to(device)\n \n if isinstance(model, nn.DataParallel):\n inc_lex = model.module.include_lex\n else:\n inc_lex = model.include_lex\n\n if inc_lex:\n z = batch[2].to(device)\n else:\n z = None\n\n lengths = batch[-1].float()\n\n try:\n assert len(lengths) == len(X)\n except Exception:\n print(len(lengths), lengths[0])\n \n optimizer.zero_grad()\n sp = [len(X), args['seq_length'], args['z_length']]\n\n if mask:\n msk = create_balanced_mask(shape=sp,\n length=lengths*mask_ratio).to(device)\n\n else:\n msk = None\n\n logits, _, _, residual = model(X, z, context_mask=msk)\n\n loss = F.cross_entropy(logits, y.argmax(axis=1)) + residual\n loss.backward()\n optimizer.step()\n \n print_progress(loss.item(), epoch, i, b_s, d_s)\n \n pred += logits.argmax(axis=1).tolist()\n true += y.argmax(axis=1).tolist()\n losses.append(loss.item())\n \n accuracy = np.sum([1 for i in range(len(pred)) if pred[i] == true[i]]) / len(pred)\n \n return np.mean(losses), accuracy\n \n \ndef eval_epoch(model, loader, epoch, device):\n \n true = []\n pred = []\n losses = []\n b_s = loader.batch_size\n d_s = len(loader.dataset)\n \n model.eval()\n \n for i, batch in enumerate(loader, 1):\n \n X = batch[0].to(device)\n y = batch[1].to(device)\n# if model.include_lex:\n# z = batch[2].to(device)\n# else:\n# z = None\n\n if isinstance(model, nn.DataParallel):\n inc_lex = model.module.include_lex\n else:\n inc_lex = model.include_lex\n\n if inc_lex:\n z = batch[2].to(device)\n else:\n z = None\n \n logits, con_attn, lex_attn, _ = model(X, z)\n \n loss = F.cross_entropy(logits, y.argmax(axis=1))\n \n pred += logits.argmax(axis=1).tolist()\n true += y.argmax(axis=1).tolist()\n losses.append(loss.item())\n \n accuracy = np.sum([1 for i in range(len(pred)) if pred[i] == true[i]]) / len(pred)\n \n return np.mean(losses), accuracy, con_attn, lex_attn\n\n\n \n \n\n\ndef load_model(name):\n \n return\n\n\nclass TrainingManager():\n \n \n def __init__(self, tolerance):\n self.best_score = 0\n self.best_test_score = 0\n self.tolerance = tolerance\n self.max_tol = tolerance\n \n \n def checkpoint(self, model, args, score, test_score):\n if score > self.best_score:\n \n self.best_score = score\n self.best_test_score = test_score\n self.tolerance = self.max_tol\n print('TOLERANCE:', self.tolerance,'Improved! saving model...')\n\n self._save_model(model, args)\n return True\n else:\n self.tolerance -= 1\n print('TOLERANCE:', self.tolerance, 'Not Improved!')\n if self.tolerance <= 0:\n return False\n else:\n return True\n \n @staticmethod\n \n def _save_model(model, args):\n \n\n torch.save(model.state_dict(), \n os.path.join(TRAINED_PATH, \n '{}.model'.format(args['experiment_name'])))\n\n pickle.dump(args, \n open(os.path.join(TRAINED_PATH, \n '{}.args'.format(args['experiment_name'])), 'wb'),\n protocol=2)\n \n \n \n \n", "id": "5463210", "language": "Python", "matching_score": 3.9930224418640137, "max_stars_count": 0, "path": "utils/train.py" }, { "content": "import pickle\nimport os\nimport torch\nimport torch.optim\nimport random\nimport numpy as np\n\nfrom config import PAD\nfrom hyper_params import args\n\nfrom torch import nn\nfrom torch.utils.data import Dataset\nfrom torch.utils.data import DataLoader\n\nfrom utils.dataloading import load_embeddings, LocalDataset, PlainDataset\nfrom utils.train import TransformerOptimizer, TrainingManager\nfrom utils.train import train_epoch, eval_epoch\n\n# from LexTransformer.Modules import Embed, PosEmbed\n# from LexTransformer.Encoders import LexiconTransformerEncoder\nfrom LexTransformer.Models import LexiconTransformerClassifier\n\ndef set_seed(seed):\n random.seed(seed)\n np.random.seed(seed)\n torch.manual_seed(seed)\n #torch.cuda.manual_seed_all(seed)\n \n \n\ngpu_num = args['gpu_num']\n\nos.environ[\"CUDA_VISIBLE_DEVICES\"] = '{}'.format(gpu_num)\n\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n\nprint('Model is running on:', device, '!')\nprint()\n\nset_seed(666)\n\nword2idx, idx2word, embeddings = load_embeddings(args['embedding_loc'])\nbatch_size = args['batch_size']\n\n#----------subject to change------------\ndev_dataset = pickle.load(open('datasets/sst-2/train.pkl', 'rb'))\nX = dev_dataset['X']\ny = dev_dataset['y']\n# z = pickle.load(open('datasets/sst_dev/z.pkl', 'rb'))\nz = None\n\n# dataset = LocalDataset(X, y, z, word2idx, name='dev_test', length=args['seq_length'])\ndataset = PlainDataset(X, y, word2idx, name='dev_train', length=args['seq_length'])\n\nloader_train = DataLoader(dataset, batch_size, shuffle=True, drop_last=True)\n\ndev_dataset = pickle.load(open('datasets/sst-2/dev.pkl', 'rb'))\nX = dev_dataset['X']\ny = dev_dataset['y']\n# z = pickle.load(open('datasets/sst_dev/z.pkl', 'rb'))\nz = None\n\ndataset = PlainDataset(X, y, word2idx, name='dev_dev', length=args['seq_length'])\nloader_dev = DataLoader(dataset, batch_size, shuffle=True, drop_last=False)\n\ndev_dataset = pickle.load(open('datasets/sst-2/test.pkl', 'rb'))\nX = dev_dataset['X']\ny = dev_dataset['y']\n# z = pickle.load(open('datasets/sst_dev/z.pkl', 'rb'))\nz = None\n\ndataset = PlainDataset(X, y, word2idx, name='dev_test', length=args['seq_length'])\nloader_test = DataLoader(dataset, batch_size, shuffle=True, drop_last=False)\n\n\n\n#--------------------------------------\n\n\nmodel = LexiconTransformerClassifier(length=args['seq_length'], \n emb_dim=args['embedding_dim'], \n embeddings=embeddings, \n n_transformer=args['transformer_layers'], \n num_head=args['num_heads'], \n d_k=args['sequence_attn_dim'],\n d_linear=args['feed_forward_dim'], \n d_kl=args['lex_attn_dim'], \n alpha=args['alpha'], \n n_logits=args['output_dim'], \n dropout=args['dropout'])\n\nparams = filter(lambda p: p.requires_grad, model.parameters())\n\nbase_optimizer = torch.optim.Adam(params, betas=(0.9, 0.98), eps=1e-9) #copying transformer settings\n\noptimizer = TransformerOptimizer(optimizer=base_optimizer, \n d_model=args['embedding_dim'],\n warmup_steps=args['warmup_steps'])\n\nmanager = TrainingManager(tolerance=args['tolerance'])\n\n#if torch.cuda.device_count() > 1:\n# print(\"Let's use\", torch.cuda.device_count() - 1, \"GPUs!\")\n# model = nn.DataParallel(model)\n\nmodel = model.to(device)\nprint(model)\nprint()\n\nfor ep in range(1, args['max_epoch']):\n train_loss, train_acc = train_epoch(model=model,\n optimizer=optimizer,\n loader=loader_train,\n epoch=ep, \n device=device)\n \n dev_loss, dev_acc, _, _ = eval_epoch(model=model, \n loader=loader_dev, \n epoch=ep, \n device=device)\n \n test_loss, test_acc, _, _ = eval_epoch(model=model, \n loader=loader_test, \n epoch=ep, \n device=device)\n print()\n print('----Epoch', ep, 'Summary-----')\n print('Train', 'Loss:', train_loss, 'Acc:', train_acc)\n print('Dev', 'Loss:', dev_loss, 'Acc:', dev_acc)\n print('Test', 'Loss:', test_loss, 'Acc:', test_acc)\n print('------------------------')\n \n passed = manager.checkpoint(model=model, args=args, score=dev_acc)\n print()\n \n if not passed:\n print('early stopping...')\n print('best:', manager.best_score)\n break\n \n\n", "id": "5240264", "language": "Python", "matching_score": 4.819737911224365, "max_stars_count": 0, "path": "run_sst_2.py" }, { "content": "# args = {\n# 'gpu_num': 7,\n#\n# 'dataset_name': 'dev_test',\n# 'experiment_name': 'hint_sst_2_class',\n#\n# #pre-trained embedding\n# 'embedding_loc': 'glove',\n# 'embedding_dim': 300,\n#\n# #sequential data\n# 'seq_length': 63,\n# 'z_length': 194,\n# 'batch_size': 32,\n#\n# #transformer\n# 'transformer_layers': 1,\n# 'num_heads': 4,\n#\n# 'sequence_attn_dim': 300,\n# 'lex_attn_dim': None, #set to None if no lex\n# 'feed_forward_dim': 300,\n#\n# #general\n# 'alpha': 1.0,\n# 'output_dim': 2,\n# 'dropout': 0.3,\n# 'warmup_steps': 1000, #see 'Attention is all you need'\n# 'max_epoch': 100,\n# 'tolerance': 10,\n# 'lr': 0.1\n# }\nargs = {\n 'dataset_name': 'dev_test',\n 'experiment_name': 'hint_global_repeat',\n\n # pre-trained embedding\n 'embedding_loc': 'glove',\n 'embedding_dim': 300,\n\n # sequential data\n 'seq_length': 63,\n 'z_length': 194,\n 'mask_ratio': 0.5,\n 'batch_size': 64,\n\n # transformer\n 'transformer_layers': 2,\n 'num_heads': 4,\n\n 'sequence_attn_dim': 300,\n 'lex_attn_dim': 300, # set to None if no lex\n 'feed_forward_dim': 300,\n\n # general\n 'alpha': 0.7,\n 'output_dim': 2,\n 'dropout': 0.3,\n 'max_epoch': 100,\n 'tolerance': 10,\n 'lr': 0.1\n }", "id": "6180099", "language": "Python", "matching_score": 1.5372798442840576, "max_stars_count": 0, "path": "hyper_params.py" }, { "content": "import torch\n\nimport torch.nn as nn\nimport numpy as np\n\nfrom .Modules import Embed, PosEmbed\nfrom .Encoders import TransformerEncoder\n\nclass Encoder(nn.Module):\n def __init__(self, \n length, emb_dim, embeddings, \n num_layer, num_head, d_k, d_linear,\n d_kl = None, alpha=0.5, dropout=.1):\n super(Encoder, self).__init__()\n self.embed = Embed(length=length, emb_dim=emb_dim, \n embeddings=embeddings, \n trainable=False, dropout=dropout)\n \n self.pos_embed = PosEmbed(length=length, \n emb_dim=emb_dim)\n \n if d_kl is None:\n self.include_lex = False\n print('Initializing Plain Transformer!')\n self.transformers = nn.ModuleList([TransformerEncoder(num_head=num_head, \n d_x=emb_dim, d_k=d_k, \n d_linear=d_linear, \n dropout=dropout) for i in range(num_layer)])\n else:\n self.include_lex = True\n print('Initializing LexTransformer!')\n self.transformers = nn.ModuleList([LexiconTransformerEncoder(num_head=num_head, \n d_x=emb_dim, d_k=d_k, d_kl=d_kl, \n d_linear=d_linear,\n dropout=dropout) for i in range(num_layer)])\n \n\n \n def forward(self, X, z=None, context_mask=None):\n\n embedded = self.embed(X) + self.pos_embed(X, 'seq')\n #embedded = self.embed(X)\n\n pad_mask = self.masking(X, X)\n \n if z is not None:\n pad_mask_l = self.masking(X, z)\n# embedded_z = self.embed(z) + pos_emb(z, 'set')\n embedded_z = self.embed(z)\n\n \n encoded = embedded\n \n if self.include_lex:\n count = 0\n for _transformer in self.transformers:\n encoded, attn_con, attn_lex, residual = _transformer(encoded, embedded_z,\n pad_mask=pad_mask, \n pad_mask_l=pad_mask_l, \n context_mask=context_mask)\n count += 1\n else:\n for _transformer in self.transformers:\n encoded, attn_con = _transformer(encoded, pad_mask=pad_mask)\n attn_lex = None\n \n return encoded, attn_con, attn_lex, residual\n \n @staticmethod\n \n \n def masking(query_seq, key_seq):\n l_q = query_seq.shape[1]\n l_k = key_seq.shape[1]\n mask_q = query_seq.eq(0).unsqueeze(-1).expand(-1, -1, l_k)\n mask_k = key_seq.eq(0).unsqueeze(1).expand(-1, l_q, -1)\n mask = mask_q.masked_fill(mask_k, True)\n return mask\n \n \nclass DenseLayers(nn.Module):\n #task specific dense layer\n def __init__(self, dim, n_logits):\n \n super(DenseLayers, self).__init__()\n self.dense = nn.Linear(in_features=dim,\n out_features=n_logits)\n nn.init.xavier_normal_(self.dense.weight)\n \n def forward(self, X):\n return self.dense(X)\n \nclass LexiconTransformerClassifier(nn.Module):\n #full model\n def __init__(self, length, emb_dim, embeddings, \n n_transformer, num_head, d_k, \n d_linear, d_kl=None, alpha=0.5, \n n_logits=2,\n dropout=.1):\n super(LexiconTransformerClassifier, self).__init__()\n \n self.length = length\n self.emb_dim = emb_dim\n if d_kl is None:\n self.include_lex = False\n else:\n self.include_lex = True\n \n \n self.encoder = Encoder(length=length, \n emb_dim=emb_dim, \n embeddings=embeddings, \n num_layer=n_transformer, \n num_head=num_head, \n d_k=d_k, \n d_linear=d_linear,\n d_kl = d_kl, \n alpha=alpha, \n dropout=dropout)\n\n# self.dense_layer = DenseLayers(dim=length*emb_dim, n_logits=n_logits)\n self.dense_layer = DenseLayers(dim=emb_dim, n_logits=n_logits)\n\n def forward(self, X, z=None, context_mask=None):\n encoded, attn_con, attn_lex, residual = self.encoder(X=X, z=z,\n context_mask=context_mask) #batch * length * emb_dim\n \n# encoded = encoded.view(-1, self.length * self.emb_dim) #batch * (length*emb_dim)\n encoded = torch.mean(encoded, dim=1)\n\n logits = self.dense_layer(encoded)\n \n return logits, attn_con, attn_lex, residual\n \n \n", "id": "9546613", "language": "Python", "matching_score": 3.952836275100708, "max_stars_count": 0, "path": "LexTransformer/Models.py" }, { "content": "#basic components: Embedding Layer, Scaled Dot-Product Attention, Dense Layer\n\nimport numpy as np\nimport torch.nn.functional as F\nfrom torch import nn\nimport torch\n\n\nclass Embed(nn.Module):\n def __init__(self, length, emb_dim, \n embeddings=None, trainable=False, dropout=.1):\n super(Embed, self).__init__()\n \n self.embedding = nn.Embedding(num_embeddings=length, \n embedding_dim=emb_dim, \n padding_idx=0)\n \n if embeddings is not None:\n print(\"Loading pre-trained embeddings!\")\n self.embedding.weight = nn.Parameter(torch.from_numpy(embeddings),\n requires_grad=trainable)\n self.dropout = nn.Dropout(dropout)\n \n \n def forward(self, X):\n embedded = self.embedding(X)\n \n embedded = self.dropout(embedded)\n \n return embedded\n \n \nclass PosEmbed(nn.Module):\n def __init__(self, length, emb_dim):\n super(PosEmbed, self).__init__()\n \n self.length = length\n self.emb_dim = emb_dim\n \n pos_weight = self.position_encoding_init(n_position=length,\n emb_dim=emb_dim)\n self.pos_embedding = nn.Embedding.from_pretrained(pos_weight, freeze=True)\n \n \n def get_pos(self, word_sequences, mode='seq'):\n batch = []\n for word_seq in word_sequences:\n start_idx = 1.0\n word_pos = []\n for pos in word_seq:\n if mode == 'seq':\n if int(pos) == 0:\n word_pos.append(0.0)\n else:\n word_pos.append(start_idx)\n start_idx += 1.0\n elif mode == 'set':\n word_pos.append(0.0)\n else:\n raise ValueError('Unrecognized position encoding mode! Should be chosen from \"seq\" or \"set\"! ')\n\n \n batch.append(torch.from_numpy(np.array(word_pos)).type(torch.LongTensor))\n batch = torch.cat(batch).view(-1, self.length) \n return batch.to('cuda')\n\n \n def forward(self, X, mode='seq'):\n X = self.get_pos(X, mode=mode)\n pos_embeded = self.pos_embedding(X)\n return pos_embeded\n \n \n \n \n @staticmethod\n def position_encoding_init(n_position, emb_dim):\n ''' Init the sinusoid position encoding table '''\n # keep dim 0 for padding token position encoding zero vector\n n_position += 1\n position_enc = np.array([\n [pos / np.power(10000, 2 * (j // 2) / emb_dim) for j in range(emb_dim)]\n if pos != 0 else np.zeros(emb_dim) for pos in range(n_position)])\n \n\n position_enc[1:, 0::2] = np.sin(position_enc[1:, 0::2]) # apply sin on 0th,2nd,4th...emb_dim\n position_enc[1:, 1::2] = np.cos(position_enc[1:, 1::2]) # apply cos on 1st,3rd,5th...emb_dim\n return torch.from_numpy(position_enc).type(torch.FloatTensor)\n \n\n \n \nclass ScaledDotProductAttention(nn.Module):\n def __init__(self, d_k, num_head,dropout=.1):\n \n super(ScaledDotProductAttention, self).__init__()\n self.reg = np.sqrt(d_k)\n self.num_head = num_head\n self.dropout = nn.Dropout(dropout)\n self.softmax = nn.Softmax(dim=2) #input tensor dim: (batch, seq_length, seq_length)\n \n \n def forward(self, q, k, v, pad_mask=None, context_mask=None):\n \n attention = torch.bmm(q, k.transpose(1, 2)) #dim of q and k: (batch * n_head, seq_length)\n attention /= self.reg\n \n if pad_mask is not None:\n attention = attention.masked_fill(pad_mask, -1e9) #see Attention is all you need 3.2.3\n \n attention = self.softmax(attention)\n attention = self.dropout(attention)\n\n if pad_mask is not None:\n attention = attention.masked_fill(pad_mask, 0) #see Attention is all you need 3.2.3\n\n if context_mask is not None: #context masking\n attention *= context_mask\n # attention residual\n residual = 0\n if self.num_head > 1:\n _length_1 = attention.shape[1]\n _length_2 = attention.shape[2]\n _attn = attention.contiguous().view(self.num_head, -1, _length_1, _length_2)\n for m, left in enumerate(_attn):\n for n, right in enumerate(_attn):\n if not m == n:\n residual += torch.sum(torch.abs(left * right)) / _length_1\n residual = residual/self.num_head/self.num_head/2\n\n\n output = torch.bmm(attention, v)\n \n return output, attention, residual\n\n\n \nclass MultiHeadAttention(nn.Module):\n def __init__(self, num_head, d_x, d_k, dropout=.1):\n super(MultiHeadAttention, self).__init__()\n self.num_head = num_head\n self.d_k = d_k\n \n self.wq = nn.Linear(d_x, num_head * d_k)\n self.wk = nn.Linear(d_x, num_head * d_k)\n self.wv = nn.Linear(d_x, num_head * d_k)\n\n \n\n nn.init.xavier_normal_(self.wq.weight)\n nn.init.xavier_normal_(self.wk.weight)\n nn.init.xavier_normal_(self.wv.weight)\n\n self.sdp_attn = ScaledDotProductAttention(d_k=d_k, num_head=num_head, dropout=dropout)\n \n self.dropout = nn.Dropout(dropout)\n self.norm = nn.LayerNorm(d_x)\n \n self.wo = nn.Linear(num_head * d_k, d_x)\n nn.init.xavier_normal_(self.wo.weight)\n \n \n def forward(self, q, k, v, pad_mask=None):\n X = q #batch * length_q * d_x\n length_q = q.shape[1]\n assert v.shape[1] == k.shape[1]\n length_k = k.shape[1]\n \n q = self.wq(q).view(-1, length_q, self.num_head, self.d_k) #batch * length * num_head * d_k\n k = self.wk(k).view(-1, length_k, self.num_head, self.d_k)\n v = self.wv(v).view(-1, length_k, self.num_head, self.d_k)\n\n q = q.permute(2, 0, 1, 3).contiguous().view(-1, length_q, self.d_k) # (batch * num_head) * length * d_k\n k = k.permute(2, 0, 1, 3).contiguous().view(-1, length_k, self.d_k)\n v = v.permute(2, 0, 1, 3).contiguous().view(-1, length_k, self.d_k)\n \n if pad_mask is not None:\n pad_mask = pad_mask.repeat(self.num_head, 1, 1) # batch * length_q * length_k -> (batch * num_head) * l_q * l_k\n\n output, attention, _ = self.sdp_attn(q, k, v, \n pad_mask=pad_mask) \n #output: (batch*nh) * length_q * d_k\n #attention: (batch*nh) * length_q * length_k\n\n output = output.view(self.num_head, -1, length_q, self.d_k) # nh * batch * l_q * d_k\n output = output.permute(1, 2, 0, 3).contiguous().view(-1, length_q,\n self.num_head * self.d_k) # batch * l_q * (nh * d_k)\n\n output = self.norm(self.dropout(self.wo(output)) + X) #batch * l_q * d_x\n \n attention = attention.view(self.num_head, -1, length_q, length_k).permute(1, 0, 2, 3) #batch * nh * l_q * l_k\n \n return output, attention\n \n \n \nclass LexiconMultiHeadAttention(nn.Module):\n \n def __init__(self, num_head, d_x, d_k, d_kl, dropout=.1):\n \n super(LexiconMultiHeadAttention, self).__init__()\n self.num_head = num_head\n self.d_k = d_k\n self.d_kl = d_kl\n \n self.wq = nn.Linear(d_x, num_head * d_k)\n self.wk = nn.Linear(d_x, num_head * d_k)\n self.wv = nn.Linear(d_x, num_head * d_k) \n self.wkl = nn.Linear(d_x, num_head * d_kl)\n self.wvl = nn.Linear(d_x, num_head * d_kl) \n \n #initialization problems?\n nn.init.xavier_normal_(self.wq.weight)\n nn.init.xavier_normal_(self.wk.weight)\n nn.init.xavier_normal_(self.wv.weight)\n nn.init.xavier_normal_(self.wkl.weight)\n nn.init.xavier_normal_(self.wvl.weight)\n \n self.sdp_attn_context = ScaledDotProductAttention(d_k=d_k, num_head=num_head, dropout=dropout)\n self.sdp_attn_lex = ScaledDotProductAttention(d_k=d_kl, num_head=num_head, dropout=dropout)\n \n self.dropout = nn.Dropout(dropout)\n self.norm = nn.LayerNorm(d_x)\n \n self.wo = nn.Linear(num_head * d_k, d_x)\n nn.init.xavier_normal_(self.wo.weight)\n \n \n def forward(self, q, k, v, kl, vl, \n pad_mask=None, pad_mask_l=None, \n context_mask=None, alpha=0.5):\n \n X = q #batch * length_q * d_x\n length_q = q.shape[1]\n \n assert v.shape[1] == k.shape[1]\n length_k = k.shape[1]\n assert vl.shape[1] == kl.shape[1]\n length_kl = kl.shape[1]\n \n q = self.wq(q).view(-1, length_q, self.num_head, self.d_k) #batch * length * num_head * d_k\n \n k = self.wk(k).view(-1, length_k, self.num_head, self.d_k)\n v = self.wv(v).view(-1, length_k, self.num_head, self.d_k) \n \n kl = self.wkl(kl).view(-1, length_kl, self.num_head, self.d_kl)\n vl = self.wvl(vl).view(-1, length_kl, self.num_head, self.d_kl) \n \n \n \n q = q.permute(2, 0, 1, 3).contiguous().view(-1, length_q, self.d_k) # (batch * num_head) * length * d_k\n \n k = k.permute(2, 0, 1, 3).contiguous().view(-1, length_k, self.d_k)\n v = v.permute(2, 0, 1, 3).contiguous().view(-1, length_k, self.d_k)\n # value residual\n residual = 0\n # if self.num_head > 1:\n #\n # _v = v.contiguous().view(self.num_head, -1, length_k, self.d_k)\n # _sim = torch.nn.CosineSimilarity(dim=2)\n # for m, left in enumerate(_v):\n # for n, right in enumerate(_v):\n # if not m == n:\n # residual += (torch.sum(torch.abs(_sim(left, right)))) / left.shape[0]\n # residual /= 2\n # residual = residual/self.num_head/self.num_head\n\n kl = kl.permute(2, 0, 1, 3).contiguous().view(-1, length_kl, self.d_kl)\n vl = vl.permute(2, 0, 1, 3).contiguous().view(-1, length_kl, self.d_kl)\n \n \n if pad_mask is not None:\n pad_mask = pad_mask.repeat(self.num_head, 1, 1) # batch * length_q * length_k -> (batch * num_head) * l_q * l_k\n \n if pad_mask_l is not None:\n pad_mask_l = pad_mask_l.repeat(self.num_head, 1, 1)\n \n if context_mask is not None:\n context_mask = context_mask.repeat(self.num_head, 1, 1)\n \n output_context, attention_context, a_res_context = self.sdp_attn_context(q, k, v,\n pad_mask=pad_mask)\n \n output_lexicon, attention_lexicon, a_res_lexicon = self.sdp_attn_lex(q, kl, vl,\n pad_mask=pad_mask_l,\n context_mask=context_mask)\n \n output = alpha * output_context + (1 - alpha) * output_lexicon\n\n residual += a_res_context\n \n #output: (batch*nh) * length_q * d_k\n #attention: (batch*nh) * length_q * length_k\n \n output = output.view(self.num_head, -1, length_q, self.d_k) #nh * batch * l_q * d_k\n output = output.permute(1, 2, 0, 3).contiguous().view(-1, length_q, self.num_head * self.d_k) #batch * l_q * (nh * d_k)\n \n output = self.norm(self.dropout(self.wo(output)) + X) #batch * l_q * d_x\n \n attention_context = attention_context.view(self.num_head, -1, length_q, length_k).permute(1, 0, 2, 3)#batch * nh * l_q * l_k\n attention_lexicon = attention_lexicon.view(self.num_head, -1, length_q, length_kl).permute(1, 0, 2, 3)#batch * nh * l_q * l_k\n \n return output, attention_context, attention_lexicon, residual\n \n \n \nclass PointwiseFF(nn.Module):\n def __init__(self, d_x, d_ff, dropout=.0):\n super(PointwiseFF, self).__init__()\n self.w1 = nn.Conv1d(d_x, d_ff, 1)\n self.w2 = nn.Conv1d(d_ff, d_x, 1)\n\n nn.init.xavier_normal_(self.w1.weight)\n nn.init.xavier_normal_(self.w2.weight)\n\n self.dropout = nn.Dropout(dropout)\n self.norm = nn.LayerNorm(d_x)\n \n \n def forward(self, X):\n output = self.w2(F.relu(self.w1(X.transpose(1, 2)))) #dim of x: (batch, seq_length, d_x)\n output = self.dropout(output.transpose(1, 2))\n output = self.norm(output + X) #batch * seq_length * d_x\n return output\n \n \n", "id": "54751", "language": "Python", "matching_score": 4.337634086608887, "max_stars_count": 0, "path": "LexTransformer/Modules.py" }, { "content": "\nimport numpy as np\nimport torch.nn.functional as F\n\nfrom torch import nn\nimport torch\n\nfrom LexTransformer.Modules import MultiHeadAttention, LexiconMultiHeadAttention, PointwiseFF\n\nclass TransformerEncoder(nn.Module):\n def __init__(self, num_head, d_x, d_k, d_linear, dropout=.1):\n super(TransformerEncoder, self).__init__()\n self.mh_attn = MultiHeadAttention(num_head=num_head, \n d_x=d_x, d_k=d_k, \n dropout=dropout)\n self.linear = PointwiseFF(d_x=d_x, \n d_ff=d_linear, \n dropout=dropout)\n \n def forward(self, X, pad_mask=None):\n output, attention = self.mh_attn(X, X, X, \n pad_mask=pad_mask) \n #output dim: batch * length_X * d_X\n #attention dim: batch * nh * l_q * l_k\n \n norm = X.ne(0.0).any(axis=2).type(torch.float).unsqueeze(-1)\n output *= norm\n \n \n output = self.linear(output) #batch * length * dx\n output *= norm\n \n return output, attention\n\n \nclass LexiconTransformerEncoder(nn.Module):\n def __init__(self, num_head, d_x, d_k, d_kl, d_linear, dropout=.1):\n super(LexiconTransformerEncoder, self).__init__()\n self.mh_attn = LexiconMultiHeadAttention(num_head=num_head, \n d_x=d_x, d_k=d_k, d_kl=d_kl, \n dropout=dropout)\n \n self.linear = PointwiseFF(d_x=d_x, \n d_ff=d_linear, \n dropout=dropout)\n \n def forward(self, X, z, pad_mask=None, pad_mask_l=None, context_mask=None):\n \n output, attention_context, attention_lexicon, residual = self.mh_attn(X, X, X, z, z,\n pad_mask=pad_mask,\n pad_mask_l=pad_mask_l,\n context_mask=context_mask) #output dim: batch * length_X * d_X\n\n #attention dim: batch * nh * l_q * l_k\n \n norm = X.ne(0.0).any(axis=2).type(torch.float).unsqueeze(-1)\n # output *= norm\n \n \n output = self.linear(output) #batch * length * dx\n output *= norm\n \n return output, attention_context, attention_lexicon, residual", "id": "5201294", "language": "Python", "matching_score": 3.2705187797546387, "max_stars_count": 0, "path": "LexTransformer/Encoders.py" } ]
3.611678
lightsourceengine
[ { "content": "{\n \"variables\": {\n \"lse_enable_plugin_platform_sdl%\": \"1\",\n \"lse_enable_plugin_audio_sdl_audio%\": \"1\",\n \"lse_enable_plugin_audio_sdl_mixer%\": \"1\",\n \"lse_sdl_include%\": \"third-party/SDL/include\",\n \"lse_sdl_mixer_include%\": \"third-party/SDL_mixer/include\",\n # note: enable_native_tests variable is set in set-enable-native-tests.gypi\n \"lse_enable_native_tests%\": \"0\",\n },\n \"target_defaults\": {\n \"conditions\": [\n [ \"lse_enable_plugin_platform_sdl==1\", { \"defines\": [ \"LSE_ENABLE_PLUGIN_PLATFORM_SDL\" ] } ],\n [ \"lse_enable_plugin_audio_sdl_audio==1\", { \"defines\": [ \"LSE_ENABLE_PLUGIN_AUDIO_SDL_AUDIO\" ] } ],\n [ \"lse_enable_plugin_audio_sdl_mixer==1\", { \"defines\": [ \"LSE_ENABLE_PLUGIN_AUDIO_SDL_MIXER\" ] } ],\n ],\n \"configurations\": {\n \"Release\": {\n \"defines\": [\n \"NDEBUG\"\n ]\n }\n }\n },\n \"includes\": [\n # move setting enable_native_tests variable to another file so tests can be disabled in npm package builds\n \"addon/set-enable-native-tests.gypi\",\n\n # third party static libraries\n \"third-party/nanosvg/build.gypi\",\n \"third-party/stb/build.gypi\",\n \"third-party/freetype/build.gypi\",\n \"third-party/base64/build.gypi\",\n \"third-party/yoga/build.gypi\",\n\n # static libraries\n \"addon/lse-lib-napi-ext/build.gypi\",\n \"addon/lse-lib-habitat/build.gypi\",\n \"addon/lse-lib-audio/build.gypi\",\n \"addon/lse-lib-platform/build.gypi\",\n \"addon/lse-lib-util/build.gypi\",\n \"addon/lse-lib-logger/build.gypi\",\n \"addon/lse-lib-font/build.gypi\",\n \"addon/lse-plugin-ref/build.gypi\",\n \"addon/lse-plugin-sdl/build.gypi\",\n \"addon/lse-plugin-sdl-audio/build.gypi\",\n \"addon/lse-plugin-sdl-mixer/build.gypi\",\n\n # node modules\n \"addon/lse-core/build.gypi\"\n ]\n}\n", "id": "1767266", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "packages/@lse/core/binding.gyp" } ]
0
chessgoose
[ { "content": "\n# Get NBA team IDs\nfrom nba_api.stats.static import teams\nnba_teams = teams.get_teams()\nnba_team_ids = [team[\"id\"] for team in nba_teams]\n\nfrom nba_api.stats.endpoints import leaguegamefinder\nfrom nba_api.stats.library.parameters import SeasonTypeAllStar\n# Game finder for season and season type\ngamefinder = leaguegamefinder.LeagueGameFinder(season_nullable = \"2020-21\",\n season_type_nullable = SeasonTypeAllStar.playoffs, timeout = 10)\n\n# Extract game IDs\ngames_dict = gamefinder.get_normalized_dict()\ngames = games_dict[\"LeagueGameFinderResults\"]\ngame_ids = [game[\"GAME_ID\"] for game in games if game[\"TEAM_ID\"] in nba_team_ids] # filter to nba teams only\n\n# Print game IDs\nprint(len(game_ids))\n\nfrom nba_api.stats.endpoints import playbyplay\nimport pandas as pd\nimport time\n\ndef timeConvert(mins, secs):\n return (60 * mins + secs)\n\ndef getIntByGameID(game_id):\n time.sleep(0.5)\n df = playbyplay.PlayByPlay(game_id).get_data_frames()[0]\n\n # Select rows with scores\n df = df.loc[df[\"SCORE\"].notnull()]\n\n # Clean up columns\n df[[\"minute\", \"second\"]] = df[\"PCTIMESTRING\"].str.split(\":\", expand = True).astype(int)\n df[[\"left_score\", \"right_score\"]] = df[\"SCORE\"].str.split(\" - \", expand = True).astype(int)\n df.rename(columns = {\"PERIOD\":\"period\"}, inplace = True)\n df = df.loc[:, [\"period\", \"minute\", \"second\", \"left_score\", \"right_score\"]]\n\n df.to_excel(\"heat-bucks1.xlsx\")\n\n #Loop through all scores\n totTime = 0\n integral = 0\n\n #Keep track of the previous row to calculate difference\n pMins = 12\n pSecs = 0\n pLeft = 0\n pRight = 0\n for row in df.itertuples():\n dt = abs(timeConvert(pMins, pSecs) - timeConvert(row.minute, row.second))\n\n if dt == 720 or (row.period >= 5 and dt == 300):\n print(\"New quarter shit\")\n dt = 0\n\n totTime += dt\n integral += dt * abs(pLeft - pRight)\n\n pMins = row.minute\n pSecs = row.second\n pLeft = row.left_score\n pRight = row.right_score\n\n print(\"Completed game\" + game_id)\n print(\"GIntegral: \" + str(integral))\n print(\"Total time: \" + str(totTime))\n print(\"Point difference per second: \" + str(float(integral/(totTime))))\n print()\n\n\n return float(integral/totTime)\n\n\noutput_dict = {}\npointspersec = []\n\nfor i in game_ids:\n pointspersec.append(getIntByGameID(i))\n\noutput_dict = dict(zip(game_ids, pointspersec))\n\nkeys = output_dict.keys()\nvalues = output_dict.values()\n\ndf2 = pd.DataFrame({\"GameID\": keys, \"PD\": values})\ndf2.to_excel(\"playoffPD.xlsx\")\n", "id": "3478740", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "close-games.py" }, { "content": "import numpy as np\nimport pandas as pd\n\nfrom nba_api.stats.static import players\nfrom nba_api.stats.endpoints import shotchartdetail\nfrom nba_api.stats.endpoints import playercareerstats\n\ndef get_player_shotchartdetail(player_name, season_id):\n nba_players = players.get_players()\n player_dict = [player for player in nba_players if player['full_name'] == player_name][0]\n print(player_dict)\n\nget_player_shotchartdetail(\"<NAME>\", \"2020-21\")\n", "id": "6652910", "language": "Python", "matching_score": 0, "max_stars_count": 0, "path": "shot-chart.py" } ]
0