trunk
parent 5646555b14
commit f712734648

@ -6,7 +6,6 @@ import logging
import re
import typing as t
import anosql
from anosql.core import from_str, Queries

@ -12,7 +12,6 @@ import re
from calf.grammar import TOKENS
from calf.io.reader import PeekPosReader
from calf.token import CalfToken
from calf.util import *
class CalfLexer:

@ -2,11 +2,10 @@
The Calf parser.
"""
from itertools import tee
import logging
from calf.grammar import MATCHING, WHITESPACE_TYPES
from calf.lexer import CalfLexer, lex_buffer, lex_file
from calf.grammar import MATCHING
from calf.lexer import lex_buffer, lex_file
from calf.token import *

@ -7,7 +7,6 @@ trip through the lexer.
import calf.lexer as cl
from conftest import parametrize
import pytest
def lex_single_token(buffer):

@ -1,4 +1,4 @@
from setuptools import find_packages, setup
from setuptools import setup
here = path.abspath(path.dirname(__file__))

@ -59,7 +59,6 @@ from datalog.evaluator import select
from datalog.reader import pr_str, read_command, read_dataset
from datalog.types import (
CachedDataset,
Constant,
Dataset,
LVar,
PartlyIndexedDataset,
@ -67,7 +66,7 @@ from datalog.types import (
TableIndexedDataset
)
from prompt_toolkit import print_formatted_text, prompt, PromptSession
from prompt_toolkit import print_formatted_text, PromptSession
from prompt_toolkit.formatted_text import FormattedText
from prompt_toolkit.history import FileHistory
from prompt_toolkit.styles import Style

@ -1,5 +1,9 @@
"""Debris."""
from random import shuffle
from datalog.types import LVar
def shuffled(seq):
"""Because random.shuffle() is in-place >.>"""

@ -3,11 +3,8 @@ A datalog engine.
"""
from functools import reduce
from itertools import chain
from datalog.parser import parse
from datalog.reader import pr_str, read
from datalog.types import CachedDataset, Constant, Dataset, LVar, Rule, TableIndexedDataset
from datalog.types import CachedDataset, Constant, Dataset, LVar, TableIndexedDataset
def match(tuple, expr, bindings=None):

@ -3,7 +3,6 @@ A datalog reader.
"""
from collections import defaultdict
from itertools import chain
from datalog.parser import FAILURE, Grammar
from datalog.types import Constant, Dataset, LVar, Rule

@ -5,9 +5,7 @@ from datalog.types import (
CachedDataset,
Constant,
Dataset,
LVar,
PartlyIndexedDataset,
Rule,
TableIndexedDataset
)

@ -1,5 +1,3 @@
#!/usr/bin/env python3
"""
A (toy) tool for emitting Python ASTs as YAML formatted data.
"""
@ -12,85 +10,85 @@ import yaml
def propnames(node):
"""return names of attributes specific for the current node"""
"""return names of attributes specific for the current node"""
props = {x for x in dir(node) if not x.startswith("_")}
props = {x for x in dir(node) if not x.startswith("_")}
if isinstance(node, ast.Module):
props -= {"body"}
if isinstance(node, ast.Module):
props -= {"body"}
if isinstance(node, (ast.Expr, ast.Attribute)):
props -= {"value"}
if isinstance(node, (ast.Expr, ast.Attribute)):
props -= {"value"}
if isinstance(node, ast.Constant):
props -= {"n", "s"}
if isinstance(node, ast.Constant):
props -= {"n", "s"}
if isinstance(node, ast.ClassDef):
props -= {"body"}
if isinstance(node, ast.ClassDef):
props -= {"body"}
return props
return props
# Note that ast.NodeTransformer exists for mutations.
# This is just for reads.
class TreeDumper(ast.NodeVisitor):
def __init__(self):
super().__init__()
self._stack = []
def __init__(self):
super().__init__()
self._stack = []
def dump(self, node):
self.visit(node)
def dump(self, node):
self.visit(node)
def visit(self, node):
nodetype = type(node)
nodename = node.__class__.__name__
indent = " " * len(self._stack) * 2
print(indent + nodename)
for n in propnames(node):
print(indent + "%s: %s" % (n, node.__dict__[n]))
def visit(self, node):
nodetype = type(node)
nodename = node.__class__.__name__
indent = " " * len(self._stack) * 2
print(indent + nodename)
for n in propnames(node):
print(indent + "%s: %s" % (n, node.__dict__[n]))
self._stack.append(node)
self.generic_visit(node)
self._stack.pop()
self._stack.append(node)
self.generic_visit(node)
self._stack.pop()
class YAMLTreeDumper(ast.NodeVisitor):
def __init__(self):
super().__init__()
self._stack = []
def node2yml(self, node):
try:
nodetype = type(node)
nodename = node.__class__.__name__
return {
"op": nodename,
"props": {n: node.__dict__[n] for n in propnames(node)},
"children": [],
}
except:
print(repr(node), propnames(node), dir(node))
def visit(self, node):
yml_node = self.node2yml(node)
self._stack.append(yml_node)
old_stack = self._stack
self._stack = yml_node["children"]
self.generic_visit(node)
self._stack = old_stack
return yml_node
def __init__(self):
super().__init__()
self._stack = []
def node2yml(self, node):
try:
nodetype = type(node)
nodename = node.__class__.__name__
return {
"op": nodename,
"props": {n: node.__dict__[n] for n in propnames(node)},
"children": [],
}
except:
print(repr(node), propnames(node), dir(node))
def visit(self, node):
yml_node = self.node2yml(node)
self._stack.append(yml_node)
old_stack = self._stack
self._stack = yml_node["children"]
self.generic_visit(node)
self._stack = old_stack
return yml_node
if __name__ == "__main__":
parser = optparse.OptionParser(usage="%prog [options] <filename.py>")
opts, args = parser.parse_args()
parser = optparse.OptionParser(usage="%prog [options] <filename.py>")
opts, args = parser.parse_args()
if len(args) == 0:
parser.print_help()
sys.exit(-1)
filename = args[0]
if len(args) == 0:
parser.print_help()
sys.exit(-1)
filename = args[0]
with open(filename) as f:
root = ast.parse(f.read(), filename)
with open(filename) as f:
root = ast.parse(f.read(), filename)
print(yaml.dump(YAMLTreeDumper().visit(root), default_flow_style=False, sort_keys=False))
print(yaml.dump(YAMLTreeDumper().visit(root), default_flow_style=False, sort_keys=False))

@ -11,7 +11,6 @@ from kazoo.client import KazooClient
from kazoo.exceptions import NodeExistsError
from kazoo.protocol.states import ZnodeStat
from kazoo.recipe.lock import Lock, ReadLock, WriteLock
from kazoo.recipe.watchers import ChildrenWatch, DataWatch
from kook.config import current_config, KookConfig
from toolz.dicttoolz import assoc as _assoc, dissoc as _dissoc, merge as _merge, update_in

@ -7,9 +7,9 @@ import sys
import traceback
from lilith.interpreter import Bindings, eval as lil_eval, Runtime
from lilith.parser import Apply, Args, parse_expr, Symbol
from lilith.reader import Def, Import, Module, read_buffer, read_file
from prompt_toolkit import print_formatted_text, prompt, PromptSession
from lilith.parser import parse_expr, Symbol
from lilith.reader import Def, Module, read_buffer, read_file
from prompt_toolkit import print_formatted_text, PromptSession
from prompt_toolkit.formatted_text import FormattedText
from prompt_toolkit.history import FileHistory
from prompt_toolkit.styles import Style

@ -6,7 +6,7 @@ import logging
import typing as t
from warnings import warn
from lilith.parser import Args, Block, parse_buffer, Symbol
from lilith.parser import Block, parse_buffer, Symbol
log = logging.getLogger(__name__)

@ -2,7 +2,7 @@
Pytest fixtures.
"""
from lilith.parser import Block, GRAMMAR, parser_with_transformer
from lilith.parser import GRAMMAR, parser_with_transformer
import pytest

@ -1,6 +1,6 @@
"""tests covering the Lilith parser."""
from lilith.parser import Apply, Args, Block, GRAMMAR, parse_buffer, parser_with_transformer, Symbol
from lilith.parser import Apply, Args, Block, parse_buffer, Symbol
import pytest

@ -21,23 +21,14 @@ import socket
import subprocess
import sys
from sys import exit
import syslog
from telnetlib import Telnet
from threading import Event, Lock, Thread
from time import sleep
from kazoo.client import KazooClient
from kazoo.exceptions import (
ConnectionLoss,
LockTimeout,
NodeExistsError,
NoNodeError,
SessionExpiredError
)
from kazoo.exceptions import ConnectionLoss, LockTimeout, SessionExpiredError
from kazoo.handlers.threading import KazooTimeoutError
from kazoo.recipe.lock import Lock as KazooLock
from kook.client import KookClient, lock
import yaml
log = logging.getLogger("arrdem.overwatchd")

@ -1,4 +1,4 @@
from setuptools import find_packages, setup
from setuptools import setup
here = path.abspath(path.dirname(__file__))

@ -3,7 +3,6 @@
from hypothesis import given
from hypothesis.strategies import integers
import proquint
import pytest
@given(integers(min_value=0, max_value=1<<16))

@ -10,7 +10,6 @@ import re
from gandi.client import GandiAPI
import jinja2
import meraki
import pkg_resources
import yaml

@ -7,8 +7,6 @@ import os
import socket
import sqlite3 as sql
from ratchet import Event, Message, Request
SCHEMA_SCRIPT = """
PRAGMA foreign_keys = ON;

Loading…
Cancel
Save