How to use planet - 10 common examples

To help you get started, we’ve selected a few planet examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github google-research / planet / planet / scripts / configs.py View on Github external
def _training_schedule(config, params):
  config.train_steps = int(params.get('train_steps', 50000))
  config.test_steps = int(params.get('test_steps', 50))
  config.max_steps = int(params.get('max_steps', 5e7))
  config.train_log_every = config.train_steps
  config.train_checkpoint_every = None
  config.test_checkpoint_every = int(
      params.get('checkpoint_every', 10 * config.test_steps))
  config.checkpoint_to_load = None
  config.savers = [tools.AttrDict(exclude=(r'.*_temporary.*',))]
  config.print_metrics_every = config.train_steps // 10
  config.train_dir = os.path.join(params.logdir, 'train_episodes')
  config.test_dir = os.path.join(params.logdir, 'test_episodes')
  config.random_collects = _initial_collection(config, params)
  config.train_collects = _active_collection(
      params.get('train_collects', [{}]), dict(
          prefix='train',
          save_episode_dir=config.train_dir,
          action_noise=params.get('train_action_noise', 0.3),
      ), config, params)
  config.test_collects = _active_collection(
      params.get('test_collects', [{}]), dict(
          prefix='test',
          save_episode_dir=config.test_dir,
          action_noise=0.0,
      ), config, params)
github rubys / venus / tests / capture.py View on Github external
This script captures such output.  It should be run whenever there is
a major change in the contract between stages
"""

import shutil, os, sys

# move up a directory
sys.path.insert(0, os.path.split(sys.path[0])[0])
os.chdir(sys.path[0])

# copy spider output to splice input
import planet
from planet import spider, config
planet.getLogger('CRITICAL',None)

config.load('tests/data/spider/config.ini')
spider.spiderPlanet()
if os.path.exists('tests/data/splice/cache'):
    shutil.rmtree('tests/data/splice/cache')
shutil.move('tests/work/spider/cache', 'tests/data/splice/cache')

source=open('tests/data/spider/config.ini')
dest1=open('tests/data/splice/config.ini', 'w')
dest1.write(source.read().replace('/work/spider/', '/data/splice/'))
dest1.close()

source.seek(0)
dest2=open('tests/work/apply_config.ini', 'w')
dest2.write(source.read().replace('[Planet]', '''[Planet]
output_theme = asf
output_dir = tests/work/apply'''))
dest2.close()
github rubys / venus / tests / test_spider.py View on Github external
def tearDown(self):
        shutil.rmtree(workdir)
        os.removedirs(os.path.split(workdir)[0])
        planet.logger = self.original_logger
github rubys / venus / tests / test_expunge.py View on Github external
def test_expunge(self):
        config.load(configfile)

        # create test entries in cache with correct timestamp
        for entry in glob.glob(testentries):
            e=minidom.parse(entry)
            e.normalize()
            eid = e.getElementsByTagName('id')
            efile = filename(workdir, eid[0].childNodes[0].nodeValue)
            eupdated = e.getElementsByTagName('updated')[0].childNodes[0].nodeValue
            emtime = time.mktime(feedparser._parse_date_w3dtf(eupdated))
            if not eid or not eupdated: continue
            shutil.copyfile(entry, efile)
            os.utime(efile, (emtime, emtime))
  
        # create test feeds in cache
        sources = config.cache_sources_directory()
        for feed in glob.glob(testfeeds):
github rubys / venus / tests / test_spider.py View on Github external
def test_spiderFeed(self):
        config.load(configfile)
        self.spiderFeed(testfeed % '1b')
        self.verify_spiderFeed()
github rubys / venus / tests / reconstitute.py View on Github external
work = reduce(os.path.join, ['tests','work','reconsititute'], venus_base)
    output = os.path.join(work, 'output')
    filters = os.path.join(venus_base,'filters')
    parser.set('Planet','cache_directory',work)
    parser.set('Planet','output_dir',output)
    parser.set('Planet','filter_directories',filters)
    if hide_planet_ns:
        parser.set('Planet','template_files','themes/common/atom.xml.xslt')
    else:
        parser.set('Planet','template_files','tests/data/reconstitute.xslt')

    for name, value in zip(sys.argv[2::2],sys.argv[3::2]):
        parser.set(sys.argv[1], name.lstrip('-'), value)

    from planet import config
    config.parser = parser

    from planet import spider
    spider.spiderPlanet(only_if_new=False)

    import feedparser
    for source in glob.glob(os.path.join(work, 'sources/*')):
        feed = feedparser.parse(source).feed
        if feed.has_key('title'):
            config.parser.set('Planet','name',feed.title_detail.value)
        if feed.has_key('link'):
            config.parser.set('Planet','link',feed.link)
        if feed.has_key('author_detail'):
            if feed.author_detail.has_key('name'):
                config.parser.set('Planet','owner_name',feed.author_detail.name)
            if feed.author_detail.has_key('email'):
                config.parser.set('Planet','owner_email',feed.author_detail.email)
github rubys / venus / tests / test_idindex.py View on Github external
def test_index_spider(self):
        import test_spider
        config.load(test_spider.configfile)

        index = idindex.create()
        self.assertEqual(0, len(index))
        index.close()

        from planet.spider import spiderPlanet
        try:
            spiderPlanet()

            index = idindex.open()
            self.assertEqual(12, len(index))
            self.assertEqual('tag:planet.intertwingly.net,2006:testfeed1', index['planet.intertwingly.net,2006,testfeed1,1'])
            self.assertEqual('http://intertwingly.net/code/venus/tests/data/spider/testfeed3.rss', index['planet.intertwingly.net,2006,testfeed3,1'])
            index.close()
        finally:
            import os, shutil
github rubys / venus / tests / test_filters.py View on Github external
def test_excerpt_images1(self):
        config.load('tests/data/filter/excerpt-images.ini')
        self.verify_images()
github rubys / venus / tests / test_filter_xslt.py View on Github external
def test_xslt_filter(self):
        config.load('tests/data/filter/translate.ini')
        testfile = 'tests/data/filter/category-one.xml'

        input = open(testfile).read()
        output = shell.run(config.filters()[0], input, mode="filter")
        dom = xml.dom.minidom.parseString(output)
        catterm = dom.getElementsByTagName('category')[0].getAttribute('term')
        self.assertEqual('OnE', catterm)
github rubys / venus / tests / test_foaf.py View on Github external
def test_recursive(self):
        config.load('tests/data/config/foaf-deep.ini')
        feeds = config.subscriptions()
        feeds.sort()
        self.assertEqual(['http://api.flickr.com/services/feeds/photos_public.gne?id=77366516@N00',
        'http://del.icio.us/rss/eliast', 'http://del.icio.us/rss/leef',
        'http://del.icio.us/rss/rubys', 'http://intertwingly.net/blog/atom.xml',
        'http://thefigtrees.net/lee/life/atom.xml',
        'http://torrez.us/feed/rdf'], feeds)