prompt
large_stringlengths 72
9.34k
| completion
large_stringlengths 0
7.61k
|
---|---|
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>import pytest
from tests.base import Author, Post, Comment, Keyword, fake
def make_author():
return Author(
id=fake.random_int(),
first_name=fake.first_name(),
last_name=fake.last_name(),
twitter=fake.domain_word(),
)
def make_post(with_comments=True, with_author=True, with_keywords=True):
comments = [make_comment() for _ in range(2)] if with_comments else []
keywords = [make_keyword() for _ in range(3)] if with_keywords else []
author = make_author() if with_author else None
return Post(
id=fake.random_int(),
title=fake.catch_phrase(),
author=author,
author_id=author.id if with_author else None,
comments=comments,
keywords=keywords,
)
def make_comment(with_author=True):
author = make_author() if with_author else None
return Comment(id=fake.random_int(), body=fake.bs(), author=author)
def make_keyword():
return Keyword(keyword=fake.domain_word())
@pytest.fixture()
def author():
return make_author()
@pytest.fixture()
def authors():
return [make_author() for _ in range(3)]
@pytest.fixture()
def comments():
return [make_comment() for _ in range(3)]
@pytest.fixture()
def post():
return make_post()
@pytest.fixture()
def post_with_null_comment():
return make_post(with_comments=False)
@pytest.fixture()
def post_with_null_author():
return make_post(with_author=False)
@pytest.fixture()
def posts():
<|fim_middle|>
<|fim▁end|> | return [make_post() for _ in range(3)] |
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>import pytest
from tests.base import Author, Post, Comment, Keyword, fake
def <|fim_middle|>():
return Author(
id=fake.random_int(),
first_name=fake.first_name(),
last_name=fake.last_name(),
twitter=fake.domain_word(),
)
def make_post(with_comments=True, with_author=True, with_keywords=True):
comments = [make_comment() for _ in range(2)] if with_comments else []
keywords = [make_keyword() for _ in range(3)] if with_keywords else []
author = make_author() if with_author else None
return Post(
id=fake.random_int(),
title=fake.catch_phrase(),
author=author,
author_id=author.id if with_author else None,
comments=comments,
keywords=keywords,
)
def make_comment(with_author=True):
author = make_author() if with_author else None
return Comment(id=fake.random_int(), body=fake.bs(), author=author)
def make_keyword():
return Keyword(keyword=fake.domain_word())
@pytest.fixture()
def author():
return make_author()
@pytest.fixture()
def authors():
return [make_author() for _ in range(3)]
@pytest.fixture()
def comments():
return [make_comment() for _ in range(3)]
@pytest.fixture()
def post():
return make_post()
@pytest.fixture()
def post_with_null_comment():
return make_post(with_comments=False)
@pytest.fixture()
def post_with_null_author():
return make_post(with_author=False)
@pytest.fixture()
def posts():
return [make_post() for _ in range(3)]
<|fim▁end|> | make_author |
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>import pytest
from tests.base import Author, Post, Comment, Keyword, fake
def make_author():
return Author(
id=fake.random_int(),
first_name=fake.first_name(),
last_name=fake.last_name(),
twitter=fake.domain_word(),
)
def <|fim_middle|>(with_comments=True, with_author=True, with_keywords=True):
comments = [make_comment() for _ in range(2)] if with_comments else []
keywords = [make_keyword() for _ in range(3)] if with_keywords else []
author = make_author() if with_author else None
return Post(
id=fake.random_int(),
title=fake.catch_phrase(),
author=author,
author_id=author.id if with_author else None,
comments=comments,
keywords=keywords,
)
def make_comment(with_author=True):
author = make_author() if with_author else None
return Comment(id=fake.random_int(), body=fake.bs(), author=author)
def make_keyword():
return Keyword(keyword=fake.domain_word())
@pytest.fixture()
def author():
return make_author()
@pytest.fixture()
def authors():
return [make_author() for _ in range(3)]
@pytest.fixture()
def comments():
return [make_comment() for _ in range(3)]
@pytest.fixture()
def post():
return make_post()
@pytest.fixture()
def post_with_null_comment():
return make_post(with_comments=False)
@pytest.fixture()
def post_with_null_author():
return make_post(with_author=False)
@pytest.fixture()
def posts():
return [make_post() for _ in range(3)]
<|fim▁end|> | make_post |
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>import pytest
from tests.base import Author, Post, Comment, Keyword, fake
def make_author():
return Author(
id=fake.random_int(),
first_name=fake.first_name(),
last_name=fake.last_name(),
twitter=fake.domain_word(),
)
def make_post(with_comments=True, with_author=True, with_keywords=True):
comments = [make_comment() for _ in range(2)] if with_comments else []
keywords = [make_keyword() for _ in range(3)] if with_keywords else []
author = make_author() if with_author else None
return Post(
id=fake.random_int(),
title=fake.catch_phrase(),
author=author,
author_id=author.id if with_author else None,
comments=comments,
keywords=keywords,
)
def <|fim_middle|>(with_author=True):
author = make_author() if with_author else None
return Comment(id=fake.random_int(), body=fake.bs(), author=author)
def make_keyword():
return Keyword(keyword=fake.domain_word())
@pytest.fixture()
def author():
return make_author()
@pytest.fixture()
def authors():
return [make_author() for _ in range(3)]
@pytest.fixture()
def comments():
return [make_comment() for _ in range(3)]
@pytest.fixture()
def post():
return make_post()
@pytest.fixture()
def post_with_null_comment():
return make_post(with_comments=False)
@pytest.fixture()
def post_with_null_author():
return make_post(with_author=False)
@pytest.fixture()
def posts():
return [make_post() for _ in range(3)]
<|fim▁end|> | make_comment |
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>import pytest
from tests.base import Author, Post, Comment, Keyword, fake
def make_author():
return Author(
id=fake.random_int(),
first_name=fake.first_name(),
last_name=fake.last_name(),
twitter=fake.domain_word(),
)
def make_post(with_comments=True, with_author=True, with_keywords=True):
comments = [make_comment() for _ in range(2)] if with_comments else []
keywords = [make_keyword() for _ in range(3)] if with_keywords else []
author = make_author() if with_author else None
return Post(
id=fake.random_int(),
title=fake.catch_phrase(),
author=author,
author_id=author.id if with_author else None,
comments=comments,
keywords=keywords,
)
def make_comment(with_author=True):
author = make_author() if with_author else None
return Comment(id=fake.random_int(), body=fake.bs(), author=author)
def <|fim_middle|>():
return Keyword(keyword=fake.domain_word())
@pytest.fixture()
def author():
return make_author()
@pytest.fixture()
def authors():
return [make_author() for _ in range(3)]
@pytest.fixture()
def comments():
return [make_comment() for _ in range(3)]
@pytest.fixture()
def post():
return make_post()
@pytest.fixture()
def post_with_null_comment():
return make_post(with_comments=False)
@pytest.fixture()
def post_with_null_author():
return make_post(with_author=False)
@pytest.fixture()
def posts():
return [make_post() for _ in range(3)]
<|fim▁end|> | make_keyword |
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>import pytest
from tests.base import Author, Post, Comment, Keyword, fake
def make_author():
return Author(
id=fake.random_int(),
first_name=fake.first_name(),
last_name=fake.last_name(),
twitter=fake.domain_word(),
)
def make_post(with_comments=True, with_author=True, with_keywords=True):
comments = [make_comment() for _ in range(2)] if with_comments else []
keywords = [make_keyword() for _ in range(3)] if with_keywords else []
author = make_author() if with_author else None
return Post(
id=fake.random_int(),
title=fake.catch_phrase(),
author=author,
author_id=author.id if with_author else None,
comments=comments,
keywords=keywords,
)
def make_comment(with_author=True):
author = make_author() if with_author else None
return Comment(id=fake.random_int(), body=fake.bs(), author=author)
def make_keyword():
return Keyword(keyword=fake.domain_word())
@pytest.fixture()
def <|fim_middle|>():
return make_author()
@pytest.fixture()
def authors():
return [make_author() for _ in range(3)]
@pytest.fixture()
def comments():
return [make_comment() for _ in range(3)]
@pytest.fixture()
def post():
return make_post()
@pytest.fixture()
def post_with_null_comment():
return make_post(with_comments=False)
@pytest.fixture()
def post_with_null_author():
return make_post(with_author=False)
@pytest.fixture()
def posts():
return [make_post() for _ in range(3)]
<|fim▁end|> | author |
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>import pytest
from tests.base import Author, Post, Comment, Keyword, fake
def make_author():
return Author(
id=fake.random_int(),
first_name=fake.first_name(),
last_name=fake.last_name(),
twitter=fake.domain_word(),
)
def make_post(with_comments=True, with_author=True, with_keywords=True):
comments = [make_comment() for _ in range(2)] if with_comments else []
keywords = [make_keyword() for _ in range(3)] if with_keywords else []
author = make_author() if with_author else None
return Post(
id=fake.random_int(),
title=fake.catch_phrase(),
author=author,
author_id=author.id if with_author else None,
comments=comments,
keywords=keywords,
)
def make_comment(with_author=True):
author = make_author() if with_author else None
return Comment(id=fake.random_int(), body=fake.bs(), author=author)
def make_keyword():
return Keyword(keyword=fake.domain_word())
@pytest.fixture()
def author():
return make_author()
@pytest.fixture()
def <|fim_middle|>():
return [make_author() for _ in range(3)]
@pytest.fixture()
def comments():
return [make_comment() for _ in range(3)]
@pytest.fixture()
def post():
return make_post()
@pytest.fixture()
def post_with_null_comment():
return make_post(with_comments=False)
@pytest.fixture()
def post_with_null_author():
return make_post(with_author=False)
@pytest.fixture()
def posts():
return [make_post() for _ in range(3)]
<|fim▁end|> | authors |
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>import pytest
from tests.base import Author, Post, Comment, Keyword, fake
def make_author():
return Author(
id=fake.random_int(),
first_name=fake.first_name(),
last_name=fake.last_name(),
twitter=fake.domain_word(),
)
def make_post(with_comments=True, with_author=True, with_keywords=True):
comments = [make_comment() for _ in range(2)] if with_comments else []
keywords = [make_keyword() for _ in range(3)] if with_keywords else []
author = make_author() if with_author else None
return Post(
id=fake.random_int(),
title=fake.catch_phrase(),
author=author,
author_id=author.id if with_author else None,
comments=comments,
keywords=keywords,
)
def make_comment(with_author=True):
author = make_author() if with_author else None
return Comment(id=fake.random_int(), body=fake.bs(), author=author)
def make_keyword():
return Keyword(keyword=fake.domain_word())
@pytest.fixture()
def author():
return make_author()
@pytest.fixture()
def authors():
return [make_author() for _ in range(3)]
@pytest.fixture()
def <|fim_middle|>():
return [make_comment() for _ in range(3)]
@pytest.fixture()
def post():
return make_post()
@pytest.fixture()
def post_with_null_comment():
return make_post(with_comments=False)
@pytest.fixture()
def post_with_null_author():
return make_post(with_author=False)
@pytest.fixture()
def posts():
return [make_post() for _ in range(3)]
<|fim▁end|> | comments |
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>import pytest
from tests.base import Author, Post, Comment, Keyword, fake
def make_author():
return Author(
id=fake.random_int(),
first_name=fake.first_name(),
last_name=fake.last_name(),
twitter=fake.domain_word(),
)
def make_post(with_comments=True, with_author=True, with_keywords=True):
comments = [make_comment() for _ in range(2)] if with_comments else []
keywords = [make_keyword() for _ in range(3)] if with_keywords else []
author = make_author() if with_author else None
return Post(
id=fake.random_int(),
title=fake.catch_phrase(),
author=author,
author_id=author.id if with_author else None,
comments=comments,
keywords=keywords,
)
def make_comment(with_author=True):
author = make_author() if with_author else None
return Comment(id=fake.random_int(), body=fake.bs(), author=author)
def make_keyword():
return Keyword(keyword=fake.domain_word())
@pytest.fixture()
def author():
return make_author()
@pytest.fixture()
def authors():
return [make_author() for _ in range(3)]
@pytest.fixture()
def comments():
return [make_comment() for _ in range(3)]
@pytest.fixture()
def <|fim_middle|>():
return make_post()
@pytest.fixture()
def post_with_null_comment():
return make_post(with_comments=False)
@pytest.fixture()
def post_with_null_author():
return make_post(with_author=False)
@pytest.fixture()
def posts():
return [make_post() for _ in range(3)]
<|fim▁end|> | post |
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>import pytest
from tests.base import Author, Post, Comment, Keyword, fake
def make_author():
return Author(
id=fake.random_int(),
first_name=fake.first_name(),
last_name=fake.last_name(),
twitter=fake.domain_word(),
)
def make_post(with_comments=True, with_author=True, with_keywords=True):
comments = [make_comment() for _ in range(2)] if with_comments else []
keywords = [make_keyword() for _ in range(3)] if with_keywords else []
author = make_author() if with_author else None
return Post(
id=fake.random_int(),
title=fake.catch_phrase(),
author=author,
author_id=author.id if with_author else None,
comments=comments,
keywords=keywords,
)
def make_comment(with_author=True):
author = make_author() if with_author else None
return Comment(id=fake.random_int(), body=fake.bs(), author=author)
def make_keyword():
return Keyword(keyword=fake.domain_word())
@pytest.fixture()
def author():
return make_author()
@pytest.fixture()
def authors():
return [make_author() for _ in range(3)]
@pytest.fixture()
def comments():
return [make_comment() for _ in range(3)]
@pytest.fixture()
def post():
return make_post()
@pytest.fixture()
def <|fim_middle|>():
return make_post(with_comments=False)
@pytest.fixture()
def post_with_null_author():
return make_post(with_author=False)
@pytest.fixture()
def posts():
return [make_post() for _ in range(3)]
<|fim▁end|> | post_with_null_comment |
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>import pytest
from tests.base import Author, Post, Comment, Keyword, fake
def make_author():
return Author(
id=fake.random_int(),
first_name=fake.first_name(),
last_name=fake.last_name(),
twitter=fake.domain_word(),
)
def make_post(with_comments=True, with_author=True, with_keywords=True):
comments = [make_comment() for _ in range(2)] if with_comments else []
keywords = [make_keyword() for _ in range(3)] if with_keywords else []
author = make_author() if with_author else None
return Post(
id=fake.random_int(),
title=fake.catch_phrase(),
author=author,
author_id=author.id if with_author else None,
comments=comments,
keywords=keywords,
)
def make_comment(with_author=True):
author = make_author() if with_author else None
return Comment(id=fake.random_int(), body=fake.bs(), author=author)
def make_keyword():
return Keyword(keyword=fake.domain_word())
@pytest.fixture()
def author():
return make_author()
@pytest.fixture()
def authors():
return [make_author() for _ in range(3)]
@pytest.fixture()
def comments():
return [make_comment() for _ in range(3)]
@pytest.fixture()
def post():
return make_post()
@pytest.fixture()
def post_with_null_comment():
return make_post(with_comments=False)
@pytest.fixture()
def <|fim_middle|>():
return make_post(with_author=False)
@pytest.fixture()
def posts():
return [make_post() for _ in range(3)]
<|fim▁end|> | post_with_null_author |
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>import pytest
from tests.base import Author, Post, Comment, Keyword, fake
def make_author():
return Author(
id=fake.random_int(),
first_name=fake.first_name(),
last_name=fake.last_name(),
twitter=fake.domain_word(),
)
def make_post(with_comments=True, with_author=True, with_keywords=True):
comments = [make_comment() for _ in range(2)] if with_comments else []
keywords = [make_keyword() for _ in range(3)] if with_keywords else []
author = make_author() if with_author else None
return Post(
id=fake.random_int(),
title=fake.catch_phrase(),
author=author,
author_id=author.id if with_author else None,
comments=comments,
keywords=keywords,
)
def make_comment(with_author=True):
author = make_author() if with_author else None
return Comment(id=fake.random_int(), body=fake.bs(), author=author)
def make_keyword():
return Keyword(keyword=fake.domain_word())
@pytest.fixture()
def author():
return make_author()
@pytest.fixture()
def authors():
return [make_author() for _ in range(3)]
@pytest.fixture()
def comments():
return [make_comment() for _ in range(3)]
@pytest.fixture()
def post():
return make_post()
@pytest.fixture()
def post_with_null_comment():
return make_post(with_comments=False)
@pytest.fixture()
def post_with_null_author():
return make_post(with_author=False)
@pytest.fixture()
def <|fim_middle|>():
return [make_post() for _ in range(3)]
<|fim▁end|> | posts |
<|file_name|>counters.py<|end_file_name|><|fim▁begin|>from baroque.entities.event import Event
class EventCounter:
"""A counter of events."""
def __init__(self):<|fim▁hole|> self.events_count = 0
self.events_count_by_type = dict()
def increment_counting(self, event):
"""Counts an event
Args:
event (:obj:`baroque.entities.event.Event`): the event to be counted
"""
assert isinstance(event, Event)
self.events_count += 1
t = type(event.type)
if t in self.events_count_by_type:
self.events_count_by_type[t] += 1
else:
self.events_count_by_type[t] = 1
def count_all(self):
"""Tells how many events have been counted globally
Returns:
int
"""
return self.events_count
def count(self, eventtype):
"""Tells how many events have been counted of the specified type
Args:
eventtype (:obj:`baroque.entities.eventtype.EventType`): the type of events to be counted
Returns:
int
"""
return self.events_count_by_type.get(type(eventtype), 0)<|fim▁end|> | |
<|file_name|>counters.py<|end_file_name|><|fim▁begin|>from baroque.entities.event import Event
class EventCounter:
<|fim_middle|>
<|fim▁end|> | """A counter of events."""
def __init__(self):
self.events_count = 0
self.events_count_by_type = dict()
def increment_counting(self, event):
"""Counts an event
Args:
event (:obj:`baroque.entities.event.Event`): the event to be counted
"""
assert isinstance(event, Event)
self.events_count += 1
t = type(event.type)
if t in self.events_count_by_type:
self.events_count_by_type[t] += 1
else:
self.events_count_by_type[t] = 1
def count_all(self):
"""Tells how many events have been counted globally
Returns:
int
"""
return self.events_count
def count(self, eventtype):
"""Tells how many events have been counted of the specified type
Args:
eventtype (:obj:`baroque.entities.eventtype.EventType`): the type of events to be counted
Returns:
int
"""
return self.events_count_by_type.get(type(eventtype), 0) |
<|file_name|>counters.py<|end_file_name|><|fim▁begin|>from baroque.entities.event import Event
class EventCounter:
"""A counter of events."""
def __init__(self):
<|fim_middle|>
def increment_counting(self, event):
"""Counts an event
Args:
event (:obj:`baroque.entities.event.Event`): the event to be counted
"""
assert isinstance(event, Event)
self.events_count += 1
t = type(event.type)
if t in self.events_count_by_type:
self.events_count_by_type[t] += 1
else:
self.events_count_by_type[t] = 1
def count_all(self):
"""Tells how many events have been counted globally
Returns:
int
"""
return self.events_count
def count(self, eventtype):
"""Tells how many events have been counted of the specified type
Args:
eventtype (:obj:`baroque.entities.eventtype.EventType`): the type of events to be counted
Returns:
int
"""
return self.events_count_by_type.get(type(eventtype), 0)
<|fim▁end|> | self.events_count = 0
self.events_count_by_type = dict() |
<|file_name|>counters.py<|end_file_name|><|fim▁begin|>from baroque.entities.event import Event
class EventCounter:
"""A counter of events."""
def __init__(self):
self.events_count = 0
self.events_count_by_type = dict()
def increment_counting(self, event):
<|fim_middle|>
def count_all(self):
"""Tells how many events have been counted globally
Returns:
int
"""
return self.events_count
def count(self, eventtype):
"""Tells how many events have been counted of the specified type
Args:
eventtype (:obj:`baroque.entities.eventtype.EventType`): the type of events to be counted
Returns:
int
"""
return self.events_count_by_type.get(type(eventtype), 0)
<|fim▁end|> | """Counts an event
Args:
event (:obj:`baroque.entities.event.Event`): the event to be counted
"""
assert isinstance(event, Event)
self.events_count += 1
t = type(event.type)
if t in self.events_count_by_type:
self.events_count_by_type[t] += 1
else:
self.events_count_by_type[t] = 1 |
<|file_name|>counters.py<|end_file_name|><|fim▁begin|>from baroque.entities.event import Event
class EventCounter:
"""A counter of events."""
def __init__(self):
self.events_count = 0
self.events_count_by_type = dict()
def increment_counting(self, event):
"""Counts an event
Args:
event (:obj:`baroque.entities.event.Event`): the event to be counted
"""
assert isinstance(event, Event)
self.events_count += 1
t = type(event.type)
if t in self.events_count_by_type:
self.events_count_by_type[t] += 1
else:
self.events_count_by_type[t] = 1
def count_all(self):
<|fim_middle|>
def count(self, eventtype):
"""Tells how many events have been counted of the specified type
Args:
eventtype (:obj:`baroque.entities.eventtype.EventType`): the type of events to be counted
Returns:
int
"""
return self.events_count_by_type.get(type(eventtype), 0)
<|fim▁end|> | """Tells how many events have been counted globally
Returns:
int
"""
return self.events_count |
<|file_name|>counters.py<|end_file_name|><|fim▁begin|>from baroque.entities.event import Event
class EventCounter:
"""A counter of events."""
def __init__(self):
self.events_count = 0
self.events_count_by_type = dict()
def increment_counting(self, event):
"""Counts an event
Args:
event (:obj:`baroque.entities.event.Event`): the event to be counted
"""
assert isinstance(event, Event)
self.events_count += 1
t = type(event.type)
if t in self.events_count_by_type:
self.events_count_by_type[t] += 1
else:
self.events_count_by_type[t] = 1
def count_all(self):
"""Tells how many events have been counted globally
Returns:
int
"""
return self.events_count
def count(self, eventtype):
<|fim_middle|>
<|fim▁end|> | """Tells how many events have been counted of the specified type
Args:
eventtype (:obj:`baroque.entities.eventtype.EventType`): the type of events to be counted
Returns:
int
"""
return self.events_count_by_type.get(type(eventtype), 0) |
<|file_name|>counters.py<|end_file_name|><|fim▁begin|>from baroque.entities.event import Event
class EventCounter:
"""A counter of events."""
def __init__(self):
self.events_count = 0
self.events_count_by_type = dict()
def increment_counting(self, event):
"""Counts an event
Args:
event (:obj:`baroque.entities.event.Event`): the event to be counted
"""
assert isinstance(event, Event)
self.events_count += 1
t = type(event.type)
if t in self.events_count_by_type:
<|fim_middle|>
else:
self.events_count_by_type[t] = 1
def count_all(self):
"""Tells how many events have been counted globally
Returns:
int
"""
return self.events_count
def count(self, eventtype):
"""Tells how many events have been counted of the specified type
Args:
eventtype (:obj:`baroque.entities.eventtype.EventType`): the type of events to be counted
Returns:
int
"""
return self.events_count_by_type.get(type(eventtype), 0)
<|fim▁end|> | self.events_count_by_type[t] += 1 |
<|file_name|>counters.py<|end_file_name|><|fim▁begin|>from baroque.entities.event import Event
class EventCounter:
"""A counter of events."""
def __init__(self):
self.events_count = 0
self.events_count_by_type = dict()
def increment_counting(self, event):
"""Counts an event
Args:
event (:obj:`baroque.entities.event.Event`): the event to be counted
"""
assert isinstance(event, Event)
self.events_count += 1
t = type(event.type)
if t in self.events_count_by_type:
self.events_count_by_type[t] += 1
else:
<|fim_middle|>
def count_all(self):
"""Tells how many events have been counted globally
Returns:
int
"""
return self.events_count
def count(self, eventtype):
"""Tells how many events have been counted of the specified type
Args:
eventtype (:obj:`baroque.entities.eventtype.EventType`): the type of events to be counted
Returns:
int
"""
return self.events_count_by_type.get(type(eventtype), 0)
<|fim▁end|> | self.events_count_by_type[t] = 1 |
<|file_name|>counters.py<|end_file_name|><|fim▁begin|>from baroque.entities.event import Event
class EventCounter:
"""A counter of events."""
def <|fim_middle|>(self):
self.events_count = 0
self.events_count_by_type = dict()
def increment_counting(self, event):
"""Counts an event
Args:
event (:obj:`baroque.entities.event.Event`): the event to be counted
"""
assert isinstance(event, Event)
self.events_count += 1
t = type(event.type)
if t in self.events_count_by_type:
self.events_count_by_type[t] += 1
else:
self.events_count_by_type[t] = 1
def count_all(self):
"""Tells how many events have been counted globally
Returns:
int
"""
return self.events_count
def count(self, eventtype):
"""Tells how many events have been counted of the specified type
Args:
eventtype (:obj:`baroque.entities.eventtype.EventType`): the type of events to be counted
Returns:
int
"""
return self.events_count_by_type.get(type(eventtype), 0)
<|fim▁end|> | __init__ |
<|file_name|>counters.py<|end_file_name|><|fim▁begin|>from baroque.entities.event import Event
class EventCounter:
"""A counter of events."""
def __init__(self):
self.events_count = 0
self.events_count_by_type = dict()
def <|fim_middle|>(self, event):
"""Counts an event
Args:
event (:obj:`baroque.entities.event.Event`): the event to be counted
"""
assert isinstance(event, Event)
self.events_count += 1
t = type(event.type)
if t in self.events_count_by_type:
self.events_count_by_type[t] += 1
else:
self.events_count_by_type[t] = 1
def count_all(self):
"""Tells how many events have been counted globally
Returns:
int
"""
return self.events_count
def count(self, eventtype):
"""Tells how many events have been counted of the specified type
Args:
eventtype (:obj:`baroque.entities.eventtype.EventType`): the type of events to be counted
Returns:
int
"""
return self.events_count_by_type.get(type(eventtype), 0)
<|fim▁end|> | increment_counting |
<|file_name|>counters.py<|end_file_name|><|fim▁begin|>from baroque.entities.event import Event
class EventCounter:
"""A counter of events."""
def __init__(self):
self.events_count = 0
self.events_count_by_type = dict()
def increment_counting(self, event):
"""Counts an event
Args:
event (:obj:`baroque.entities.event.Event`): the event to be counted
"""
assert isinstance(event, Event)
self.events_count += 1
t = type(event.type)
if t in self.events_count_by_type:
self.events_count_by_type[t] += 1
else:
self.events_count_by_type[t] = 1
def <|fim_middle|>(self):
"""Tells how many events have been counted globally
Returns:
int
"""
return self.events_count
def count(self, eventtype):
"""Tells how many events have been counted of the specified type
Args:
eventtype (:obj:`baroque.entities.eventtype.EventType`): the type of events to be counted
Returns:
int
"""
return self.events_count_by_type.get(type(eventtype), 0)
<|fim▁end|> | count_all |
<|file_name|>counters.py<|end_file_name|><|fim▁begin|>from baroque.entities.event import Event
class EventCounter:
"""A counter of events."""
def __init__(self):
self.events_count = 0
self.events_count_by_type = dict()
def increment_counting(self, event):
"""Counts an event
Args:
event (:obj:`baroque.entities.event.Event`): the event to be counted
"""
assert isinstance(event, Event)
self.events_count += 1
t = type(event.type)
if t in self.events_count_by_type:
self.events_count_by_type[t] += 1
else:
self.events_count_by_type[t] = 1
def count_all(self):
"""Tells how many events have been counted globally
Returns:
int
"""
return self.events_count
def <|fim_middle|>(self, eventtype):
"""Tells how many events have been counted of the specified type
Args:
eventtype (:obj:`baroque.entities.eventtype.EventType`): the type of events to be counted
Returns:
int
"""
return self.events_count_by_type.get(type(eventtype), 0)
<|fim▁end|> | count |
<|file_name|>fetch_avhrr_calcoeffs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Fetch avhrr calibration coefficients."""
import datetime as dt
import os.path
import sys
import h5py
import urllib2
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"
URLS = {
"Metop-B":
{"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"},
"Metop-A":
{"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"},
"NOAA-16":
{"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"},
"NOAA-17":
{"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"},
"NOAA-18":
{"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"},
"NOAA-19":
{"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"}
}
def get_page(url):
"""Retrieve the given page."""
return urllib2.urlopen(url).read()
def get_coeffs(page):
"""Parse coefficients from the page."""
coeffs = {}
coeffs['datetime'] = []
coeffs['slope1'] = []
coeffs['intercept1'] = []
coeffs['slope2'] = []
coeffs['intercept2'] = []
slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \
None, None, None, None
date_idx = 0
for row in page.lower().split('\n'):
row = row.split()
if len(row) == 0:
continue
if row[0] == 'update':
# Get the column indices from the header line
slope1_idx = row.index('slope_lo')
intercept1_idx = row.index('int_lo')
slope2_idx = row.index('slope_hi')
intercept2_idx = row.index('int_hi')
continue
if slope1_idx is None:
continue
# In some cases the fields are connected, skip those rows
if max([slope1_idx, intercept1_idx,
slope2_idx, intercept2_idx]) >= len(row):
continue
try:
dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y")
except ValueError:
continue
coeffs['datetime'].append([dat.year, dat.month, dat.day])
coeffs['slope1'].append(float(row[slope1_idx]))
coeffs['intercept1'].append(float(row[intercept1_idx]))
coeffs['slope2'].append(float(row[slope2_idx]))
coeffs['intercept2'].append(float(row[intercept2_idx]))
return coeffs
def get_all_coeffs():
"""Get all available calibration coefficients for the satellites."""
coeffs = {}
for platform in URLS:
if platform not in coeffs:
coeffs[platform] = {}
for chan in URLS[platform].keys():
url = URLS[platform][chan]
print(url)
page = get_page(url)
coeffs[platform][chan] = get_coeffs(page)
return coeffs
def save_coeffs(coeffs, out_dir=''):
"""Save calibration coefficients to HDF5 files."""
for platform in coeffs.keys():
fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform)
fid = h5py.File(fname, 'w')
for chan in coeffs[platform].keys():
fid.create_group(chan)
fid[chan]['datetime'] = coeffs[platform][chan]['datetime']
fid[chan]['slope1'] = coeffs[platform][chan]['slope1']
fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1']
fid[chan]['slope2'] = coeffs[platform][chan]['slope2']
fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2']
fid.close()
print("Calibration coefficients saved for %s" % platform)<|fim▁hole|>
def main():
"""Create calibration coefficient files for AVHRR."""
out_dir = sys.argv[1]
coeffs = get_all_coeffs()
save_coeffs(coeffs, out_dir=out_dir)
if __name__ == "__main__":
main()<|fim▁end|> | |
<|file_name|>fetch_avhrr_calcoeffs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Fetch avhrr calibration coefficients."""
import datetime as dt
import os.path
import sys
import h5py
import urllib2
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"
URLS = {
"Metop-B":
{"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"},
"Metop-A":
{"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"},
"NOAA-16":
{"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"},
"NOAA-17":
{"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"},
"NOAA-18":
{"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"},
"NOAA-19":
{"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"}
}
def get_page(url):
<|fim_middle|>
def get_coeffs(page):
"""Parse coefficients from the page."""
coeffs = {}
coeffs['datetime'] = []
coeffs['slope1'] = []
coeffs['intercept1'] = []
coeffs['slope2'] = []
coeffs['intercept2'] = []
slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \
None, None, None, None
date_idx = 0
for row in page.lower().split('\n'):
row = row.split()
if len(row) == 0:
continue
if row[0] == 'update':
# Get the column indices from the header line
slope1_idx = row.index('slope_lo')
intercept1_idx = row.index('int_lo')
slope2_idx = row.index('slope_hi')
intercept2_idx = row.index('int_hi')
continue
if slope1_idx is None:
continue
# In some cases the fields are connected, skip those rows
if max([slope1_idx, intercept1_idx,
slope2_idx, intercept2_idx]) >= len(row):
continue
try:
dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y")
except ValueError:
continue
coeffs['datetime'].append([dat.year, dat.month, dat.day])
coeffs['slope1'].append(float(row[slope1_idx]))
coeffs['intercept1'].append(float(row[intercept1_idx]))
coeffs['slope2'].append(float(row[slope2_idx]))
coeffs['intercept2'].append(float(row[intercept2_idx]))
return coeffs
def get_all_coeffs():
"""Get all available calibration coefficients for the satellites."""
coeffs = {}
for platform in URLS:
if platform not in coeffs:
coeffs[platform] = {}
for chan in URLS[platform].keys():
url = URLS[platform][chan]
print(url)
page = get_page(url)
coeffs[platform][chan] = get_coeffs(page)
return coeffs
def save_coeffs(coeffs, out_dir=''):
"""Save calibration coefficients to HDF5 files."""
for platform in coeffs.keys():
fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform)
fid = h5py.File(fname, 'w')
for chan in coeffs[platform].keys():
fid.create_group(chan)
fid[chan]['datetime'] = coeffs[platform][chan]['datetime']
fid[chan]['slope1'] = coeffs[platform][chan]['slope1']
fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1']
fid[chan]['slope2'] = coeffs[platform][chan]['slope2']
fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2']
fid.close()
print("Calibration coefficients saved for %s" % platform)
def main():
"""Create calibration coefficient files for AVHRR."""
out_dir = sys.argv[1]
coeffs = get_all_coeffs()
save_coeffs(coeffs, out_dir=out_dir)
if __name__ == "__main__":
main()
<|fim▁end|> | """Retrieve the given page."""
return urllib2.urlopen(url).read() |
<|file_name|>fetch_avhrr_calcoeffs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Fetch avhrr calibration coefficients."""
import datetime as dt
import os.path
import sys
import h5py
import urllib2
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"
URLS = {
"Metop-B":
{"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"},
"Metop-A":
{"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"},
"NOAA-16":
{"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"},
"NOAA-17":
{"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"},
"NOAA-18":
{"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"},
"NOAA-19":
{"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"}
}
def get_page(url):
"""Retrieve the given page."""
return urllib2.urlopen(url).read()
def get_coeffs(page):
<|fim_middle|>
def get_all_coeffs():
"""Get all available calibration coefficients for the satellites."""
coeffs = {}
for platform in URLS:
if platform not in coeffs:
coeffs[platform] = {}
for chan in URLS[platform].keys():
url = URLS[platform][chan]
print(url)
page = get_page(url)
coeffs[platform][chan] = get_coeffs(page)
return coeffs
def save_coeffs(coeffs, out_dir=''):
"""Save calibration coefficients to HDF5 files."""
for platform in coeffs.keys():
fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform)
fid = h5py.File(fname, 'w')
for chan in coeffs[platform].keys():
fid.create_group(chan)
fid[chan]['datetime'] = coeffs[platform][chan]['datetime']
fid[chan]['slope1'] = coeffs[platform][chan]['slope1']
fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1']
fid[chan]['slope2'] = coeffs[platform][chan]['slope2']
fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2']
fid.close()
print("Calibration coefficients saved for %s" % platform)
def main():
"""Create calibration coefficient files for AVHRR."""
out_dir = sys.argv[1]
coeffs = get_all_coeffs()
save_coeffs(coeffs, out_dir=out_dir)
if __name__ == "__main__":
main()
<|fim▁end|> | """Parse coefficients from the page."""
coeffs = {}
coeffs['datetime'] = []
coeffs['slope1'] = []
coeffs['intercept1'] = []
coeffs['slope2'] = []
coeffs['intercept2'] = []
slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \
None, None, None, None
date_idx = 0
for row in page.lower().split('\n'):
row = row.split()
if len(row) == 0:
continue
if row[0] == 'update':
# Get the column indices from the header line
slope1_idx = row.index('slope_lo')
intercept1_idx = row.index('int_lo')
slope2_idx = row.index('slope_hi')
intercept2_idx = row.index('int_hi')
continue
if slope1_idx is None:
continue
# In some cases the fields are connected, skip those rows
if max([slope1_idx, intercept1_idx,
slope2_idx, intercept2_idx]) >= len(row):
continue
try:
dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y")
except ValueError:
continue
coeffs['datetime'].append([dat.year, dat.month, dat.day])
coeffs['slope1'].append(float(row[slope1_idx]))
coeffs['intercept1'].append(float(row[intercept1_idx]))
coeffs['slope2'].append(float(row[slope2_idx]))
coeffs['intercept2'].append(float(row[intercept2_idx]))
return coeffs |
<|file_name|>fetch_avhrr_calcoeffs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Fetch avhrr calibration coefficients."""
import datetime as dt
import os.path
import sys
import h5py
import urllib2
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"
URLS = {
"Metop-B":
{"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"},
"Metop-A":
{"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"},
"NOAA-16":
{"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"},
"NOAA-17":
{"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"},
"NOAA-18":
{"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"},
"NOAA-19":
{"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"}
}
def get_page(url):
"""Retrieve the given page."""
return urllib2.urlopen(url).read()
def get_coeffs(page):
"""Parse coefficients from the page."""
coeffs = {}
coeffs['datetime'] = []
coeffs['slope1'] = []
coeffs['intercept1'] = []
coeffs['slope2'] = []
coeffs['intercept2'] = []
slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \
None, None, None, None
date_idx = 0
for row in page.lower().split('\n'):
row = row.split()
if len(row) == 0:
continue
if row[0] == 'update':
# Get the column indices from the header line
slope1_idx = row.index('slope_lo')
intercept1_idx = row.index('int_lo')
slope2_idx = row.index('slope_hi')
intercept2_idx = row.index('int_hi')
continue
if slope1_idx is None:
continue
# In some cases the fields are connected, skip those rows
if max([slope1_idx, intercept1_idx,
slope2_idx, intercept2_idx]) >= len(row):
continue
try:
dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y")
except ValueError:
continue
coeffs['datetime'].append([dat.year, dat.month, dat.day])
coeffs['slope1'].append(float(row[slope1_idx]))
coeffs['intercept1'].append(float(row[intercept1_idx]))
coeffs['slope2'].append(float(row[slope2_idx]))
coeffs['intercept2'].append(float(row[intercept2_idx]))
return coeffs
def get_all_coeffs():
<|fim_middle|>
def save_coeffs(coeffs, out_dir=''):
"""Save calibration coefficients to HDF5 files."""
for platform in coeffs.keys():
fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform)
fid = h5py.File(fname, 'w')
for chan in coeffs[platform].keys():
fid.create_group(chan)
fid[chan]['datetime'] = coeffs[platform][chan]['datetime']
fid[chan]['slope1'] = coeffs[platform][chan]['slope1']
fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1']
fid[chan]['slope2'] = coeffs[platform][chan]['slope2']
fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2']
fid.close()
print("Calibration coefficients saved for %s" % platform)
def main():
"""Create calibration coefficient files for AVHRR."""
out_dir = sys.argv[1]
coeffs = get_all_coeffs()
save_coeffs(coeffs, out_dir=out_dir)
if __name__ == "__main__":
main()
<|fim▁end|> | """Get all available calibration coefficients for the satellites."""
coeffs = {}
for platform in URLS:
if platform not in coeffs:
coeffs[platform] = {}
for chan in URLS[platform].keys():
url = URLS[platform][chan]
print(url)
page = get_page(url)
coeffs[platform][chan] = get_coeffs(page)
return coeffs |
<|file_name|>fetch_avhrr_calcoeffs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Fetch avhrr calibration coefficients."""
import datetime as dt
import os.path
import sys
import h5py
import urllib2
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"
URLS = {
"Metop-B":
{"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"},
"Metop-A":
{"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"},
"NOAA-16":
{"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"},
"NOAA-17":
{"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"},
"NOAA-18":
{"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"},
"NOAA-19":
{"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"}
}
def get_page(url):
"""Retrieve the given page."""
return urllib2.urlopen(url).read()
def get_coeffs(page):
"""Parse coefficients from the page."""
coeffs = {}
coeffs['datetime'] = []
coeffs['slope1'] = []
coeffs['intercept1'] = []
coeffs['slope2'] = []
coeffs['intercept2'] = []
slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \
None, None, None, None
date_idx = 0
for row in page.lower().split('\n'):
row = row.split()
if len(row) == 0:
continue
if row[0] == 'update':
# Get the column indices from the header line
slope1_idx = row.index('slope_lo')
intercept1_idx = row.index('int_lo')
slope2_idx = row.index('slope_hi')
intercept2_idx = row.index('int_hi')
continue
if slope1_idx is None:
continue
# In some cases the fields are connected, skip those rows
if max([slope1_idx, intercept1_idx,
slope2_idx, intercept2_idx]) >= len(row):
continue
try:
dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y")
except ValueError:
continue
coeffs['datetime'].append([dat.year, dat.month, dat.day])
coeffs['slope1'].append(float(row[slope1_idx]))
coeffs['intercept1'].append(float(row[intercept1_idx]))
coeffs['slope2'].append(float(row[slope2_idx]))
coeffs['intercept2'].append(float(row[intercept2_idx]))
return coeffs
def get_all_coeffs():
"""Get all available calibration coefficients for the satellites."""
coeffs = {}
for platform in URLS:
if platform not in coeffs:
coeffs[platform] = {}
for chan in URLS[platform].keys():
url = URLS[platform][chan]
print(url)
page = get_page(url)
coeffs[platform][chan] = get_coeffs(page)
return coeffs
def save_coeffs(coeffs, out_dir=''):
<|fim_middle|>
def main():
"""Create calibration coefficient files for AVHRR."""
out_dir = sys.argv[1]
coeffs = get_all_coeffs()
save_coeffs(coeffs, out_dir=out_dir)
if __name__ == "__main__":
main()
<|fim▁end|> | """Save calibration coefficients to HDF5 files."""
for platform in coeffs.keys():
fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform)
fid = h5py.File(fname, 'w')
for chan in coeffs[platform].keys():
fid.create_group(chan)
fid[chan]['datetime'] = coeffs[platform][chan]['datetime']
fid[chan]['slope1'] = coeffs[platform][chan]['slope1']
fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1']
fid[chan]['slope2'] = coeffs[platform][chan]['slope2']
fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2']
fid.close()
print("Calibration coefficients saved for %s" % platform) |
<|file_name|>fetch_avhrr_calcoeffs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Fetch avhrr calibration coefficients."""
import datetime as dt
import os.path
import sys
import h5py
import urllib2
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"
URLS = {
"Metop-B":
{"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"},
"Metop-A":
{"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"},
"NOAA-16":
{"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"},
"NOAA-17":
{"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"},
"NOAA-18":
{"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"},
"NOAA-19":
{"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"}
}
def get_page(url):
"""Retrieve the given page."""
return urllib2.urlopen(url).read()
def get_coeffs(page):
"""Parse coefficients from the page."""
coeffs = {}
coeffs['datetime'] = []
coeffs['slope1'] = []
coeffs['intercept1'] = []
coeffs['slope2'] = []
coeffs['intercept2'] = []
slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \
None, None, None, None
date_idx = 0
for row in page.lower().split('\n'):
row = row.split()
if len(row) == 0:
continue
if row[0] == 'update':
# Get the column indices from the header line
slope1_idx = row.index('slope_lo')
intercept1_idx = row.index('int_lo')
slope2_idx = row.index('slope_hi')
intercept2_idx = row.index('int_hi')
continue
if slope1_idx is None:
continue
# In some cases the fields are connected, skip those rows
if max([slope1_idx, intercept1_idx,
slope2_idx, intercept2_idx]) >= len(row):
continue
try:
dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y")
except ValueError:
continue
coeffs['datetime'].append([dat.year, dat.month, dat.day])
coeffs['slope1'].append(float(row[slope1_idx]))
coeffs['intercept1'].append(float(row[intercept1_idx]))
coeffs['slope2'].append(float(row[slope2_idx]))
coeffs['intercept2'].append(float(row[intercept2_idx]))
return coeffs
def get_all_coeffs():
"""Get all available calibration coefficients for the satellites."""
coeffs = {}
for platform in URLS:
if platform not in coeffs:
coeffs[platform] = {}
for chan in URLS[platform].keys():
url = URLS[platform][chan]
print(url)
page = get_page(url)
coeffs[platform][chan] = get_coeffs(page)
return coeffs
def save_coeffs(coeffs, out_dir=''):
"""Save calibration coefficients to HDF5 files."""
for platform in coeffs.keys():
fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform)
fid = h5py.File(fname, 'w')
for chan in coeffs[platform].keys():
fid.create_group(chan)
fid[chan]['datetime'] = coeffs[platform][chan]['datetime']
fid[chan]['slope1'] = coeffs[platform][chan]['slope1']
fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1']
fid[chan]['slope2'] = coeffs[platform][chan]['slope2']
fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2']
fid.close()
print("Calibration coefficients saved for %s" % platform)
def main():
<|fim_middle|>
if __name__ == "__main__":
main()
<|fim▁end|> | """Create calibration coefficient files for AVHRR."""
out_dir = sys.argv[1]
coeffs = get_all_coeffs()
save_coeffs(coeffs, out_dir=out_dir) |
<|file_name|>fetch_avhrr_calcoeffs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Fetch avhrr calibration coefficients."""
import datetime as dt
import os.path
import sys
import h5py
import urllib2
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"
URLS = {
"Metop-B":
{"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"},
"Metop-A":
{"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"},
"NOAA-16":
{"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"},
"NOAA-17":
{"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"},
"NOAA-18":
{"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"},
"NOAA-19":
{"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"}
}
def get_page(url):
"""Retrieve the given page."""
return urllib2.urlopen(url).read()
def get_coeffs(page):
"""Parse coefficients from the page."""
coeffs = {}
coeffs['datetime'] = []
coeffs['slope1'] = []
coeffs['intercept1'] = []
coeffs['slope2'] = []
coeffs['intercept2'] = []
slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \
None, None, None, None
date_idx = 0
for row in page.lower().split('\n'):
row = row.split()
if len(row) == 0:
<|fim_middle|>
if row[0] == 'update':
# Get the column indices from the header line
slope1_idx = row.index('slope_lo')
intercept1_idx = row.index('int_lo')
slope2_idx = row.index('slope_hi')
intercept2_idx = row.index('int_hi')
continue
if slope1_idx is None:
continue
# In some cases the fields are connected, skip those rows
if max([slope1_idx, intercept1_idx,
slope2_idx, intercept2_idx]) >= len(row):
continue
try:
dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y")
except ValueError:
continue
coeffs['datetime'].append([dat.year, dat.month, dat.day])
coeffs['slope1'].append(float(row[slope1_idx]))
coeffs['intercept1'].append(float(row[intercept1_idx]))
coeffs['slope2'].append(float(row[slope2_idx]))
coeffs['intercept2'].append(float(row[intercept2_idx]))
return coeffs
def get_all_coeffs():
"""Get all available calibration coefficients for the satellites."""
coeffs = {}
for platform in URLS:
if platform not in coeffs:
coeffs[platform] = {}
for chan in URLS[platform].keys():
url = URLS[platform][chan]
print(url)
page = get_page(url)
coeffs[platform][chan] = get_coeffs(page)
return coeffs
def save_coeffs(coeffs, out_dir=''):
"""Save calibration coefficients to HDF5 files."""
for platform in coeffs.keys():
fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform)
fid = h5py.File(fname, 'w')
for chan in coeffs[platform].keys():
fid.create_group(chan)
fid[chan]['datetime'] = coeffs[platform][chan]['datetime']
fid[chan]['slope1'] = coeffs[platform][chan]['slope1']
fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1']
fid[chan]['slope2'] = coeffs[platform][chan]['slope2']
fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2']
fid.close()
print("Calibration coefficients saved for %s" % platform)
def main():
"""Create calibration coefficient files for AVHRR."""
out_dir = sys.argv[1]
coeffs = get_all_coeffs()
save_coeffs(coeffs, out_dir=out_dir)
if __name__ == "__main__":
main()
<|fim▁end|> | continue |
<|file_name|>fetch_avhrr_calcoeffs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Fetch avhrr calibration coefficients."""
import datetime as dt
import os.path
import sys
import h5py
import urllib2
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"
URLS = {
"Metop-B":
{"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"},
"Metop-A":
{"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"},
"NOAA-16":
{"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"},
"NOAA-17":
{"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"},
"NOAA-18":
{"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"},
"NOAA-19":
{"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"}
}
def get_page(url):
"""Retrieve the given page."""
return urllib2.urlopen(url).read()
def get_coeffs(page):
"""Parse coefficients from the page."""
coeffs = {}
coeffs['datetime'] = []
coeffs['slope1'] = []
coeffs['intercept1'] = []
coeffs['slope2'] = []
coeffs['intercept2'] = []
slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \
None, None, None, None
date_idx = 0
for row in page.lower().split('\n'):
row = row.split()
if len(row) == 0:
continue
if row[0] == 'update':
# Get the column indices from the header line
<|fim_middle|>
if slope1_idx is None:
continue
# In some cases the fields are connected, skip those rows
if max([slope1_idx, intercept1_idx,
slope2_idx, intercept2_idx]) >= len(row):
continue
try:
dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y")
except ValueError:
continue
coeffs['datetime'].append([dat.year, dat.month, dat.day])
coeffs['slope1'].append(float(row[slope1_idx]))
coeffs['intercept1'].append(float(row[intercept1_idx]))
coeffs['slope2'].append(float(row[slope2_idx]))
coeffs['intercept2'].append(float(row[intercept2_idx]))
return coeffs
def get_all_coeffs():
"""Get all available calibration coefficients for the satellites."""
coeffs = {}
for platform in URLS:
if platform not in coeffs:
coeffs[platform] = {}
for chan in URLS[platform].keys():
url = URLS[platform][chan]
print(url)
page = get_page(url)
coeffs[platform][chan] = get_coeffs(page)
return coeffs
def save_coeffs(coeffs, out_dir=''):
"""Save calibration coefficients to HDF5 files."""
for platform in coeffs.keys():
fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform)
fid = h5py.File(fname, 'w')
for chan in coeffs[platform].keys():
fid.create_group(chan)
fid[chan]['datetime'] = coeffs[platform][chan]['datetime']
fid[chan]['slope1'] = coeffs[platform][chan]['slope1']
fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1']
fid[chan]['slope2'] = coeffs[platform][chan]['slope2']
fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2']
fid.close()
print("Calibration coefficients saved for %s" % platform)
def main():
"""Create calibration coefficient files for AVHRR."""
out_dir = sys.argv[1]
coeffs = get_all_coeffs()
save_coeffs(coeffs, out_dir=out_dir)
if __name__ == "__main__":
main()
<|fim▁end|> | slope1_idx = row.index('slope_lo')
intercept1_idx = row.index('int_lo')
slope2_idx = row.index('slope_hi')
intercept2_idx = row.index('int_hi')
continue |
<|file_name|>fetch_avhrr_calcoeffs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Fetch avhrr calibration coefficients."""
import datetime as dt
import os.path
import sys
import h5py
import urllib2
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"
URLS = {
"Metop-B":
{"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"},
"Metop-A":
{"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"},
"NOAA-16":
{"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"},
"NOAA-17":
{"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"},
"NOAA-18":
{"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"},
"NOAA-19":
{"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"}
}
def get_page(url):
"""Retrieve the given page."""
return urllib2.urlopen(url).read()
def get_coeffs(page):
"""Parse coefficients from the page."""
coeffs = {}
coeffs['datetime'] = []
coeffs['slope1'] = []
coeffs['intercept1'] = []
coeffs['slope2'] = []
coeffs['intercept2'] = []
slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \
None, None, None, None
date_idx = 0
for row in page.lower().split('\n'):
row = row.split()
if len(row) == 0:
continue
if row[0] == 'update':
# Get the column indices from the header line
slope1_idx = row.index('slope_lo')
intercept1_idx = row.index('int_lo')
slope2_idx = row.index('slope_hi')
intercept2_idx = row.index('int_hi')
continue
if slope1_idx is None:
<|fim_middle|>
# In some cases the fields are connected, skip those rows
if max([slope1_idx, intercept1_idx,
slope2_idx, intercept2_idx]) >= len(row):
continue
try:
dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y")
except ValueError:
continue
coeffs['datetime'].append([dat.year, dat.month, dat.day])
coeffs['slope1'].append(float(row[slope1_idx]))
coeffs['intercept1'].append(float(row[intercept1_idx]))
coeffs['slope2'].append(float(row[slope2_idx]))
coeffs['intercept2'].append(float(row[intercept2_idx]))
return coeffs
def get_all_coeffs():
"""Get all available calibration coefficients for the satellites."""
coeffs = {}
for platform in URLS:
if platform not in coeffs:
coeffs[platform] = {}
for chan in URLS[platform].keys():
url = URLS[platform][chan]
print(url)
page = get_page(url)
coeffs[platform][chan] = get_coeffs(page)
return coeffs
def save_coeffs(coeffs, out_dir=''):
"""Save calibration coefficients to HDF5 files."""
for platform in coeffs.keys():
fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform)
fid = h5py.File(fname, 'w')
for chan in coeffs[platform].keys():
fid.create_group(chan)
fid[chan]['datetime'] = coeffs[platform][chan]['datetime']
fid[chan]['slope1'] = coeffs[platform][chan]['slope1']
fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1']
fid[chan]['slope2'] = coeffs[platform][chan]['slope2']
fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2']
fid.close()
print("Calibration coefficients saved for %s" % platform)
def main():
"""Create calibration coefficient files for AVHRR."""
out_dir = sys.argv[1]
coeffs = get_all_coeffs()
save_coeffs(coeffs, out_dir=out_dir)
if __name__ == "__main__":
main()
<|fim▁end|> | continue |
<|file_name|>fetch_avhrr_calcoeffs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Fetch avhrr calibration coefficients."""
import datetime as dt
import os.path
import sys
import h5py
import urllib2
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"
URLS = {
"Metop-B":
{"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"},
"Metop-A":
{"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"},
"NOAA-16":
{"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"},
"NOAA-17":
{"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"},
"NOAA-18":
{"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"},
"NOAA-19":
{"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"}
}
def get_page(url):
"""Retrieve the given page."""
return urllib2.urlopen(url).read()
def get_coeffs(page):
"""Parse coefficients from the page."""
coeffs = {}
coeffs['datetime'] = []
coeffs['slope1'] = []
coeffs['intercept1'] = []
coeffs['slope2'] = []
coeffs['intercept2'] = []
slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \
None, None, None, None
date_idx = 0
for row in page.lower().split('\n'):
row = row.split()
if len(row) == 0:
continue
if row[0] == 'update':
# Get the column indices from the header line
slope1_idx = row.index('slope_lo')
intercept1_idx = row.index('int_lo')
slope2_idx = row.index('slope_hi')
intercept2_idx = row.index('int_hi')
continue
if slope1_idx is None:
continue
# In some cases the fields are connected, skip those rows
if max([slope1_idx, intercept1_idx,
slope2_idx, intercept2_idx]) >= len(row):
<|fim_middle|>
try:
dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y")
except ValueError:
continue
coeffs['datetime'].append([dat.year, dat.month, dat.day])
coeffs['slope1'].append(float(row[slope1_idx]))
coeffs['intercept1'].append(float(row[intercept1_idx]))
coeffs['slope2'].append(float(row[slope2_idx]))
coeffs['intercept2'].append(float(row[intercept2_idx]))
return coeffs
def get_all_coeffs():
"""Get all available calibration coefficients for the satellites."""
coeffs = {}
for platform in URLS:
if platform not in coeffs:
coeffs[platform] = {}
for chan in URLS[platform].keys():
url = URLS[platform][chan]
print(url)
page = get_page(url)
coeffs[platform][chan] = get_coeffs(page)
return coeffs
def save_coeffs(coeffs, out_dir=''):
"""Save calibration coefficients to HDF5 files."""
for platform in coeffs.keys():
fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform)
fid = h5py.File(fname, 'w')
for chan in coeffs[platform].keys():
fid.create_group(chan)
fid[chan]['datetime'] = coeffs[platform][chan]['datetime']
fid[chan]['slope1'] = coeffs[platform][chan]['slope1']
fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1']
fid[chan]['slope2'] = coeffs[platform][chan]['slope2']
fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2']
fid.close()
print("Calibration coefficients saved for %s" % platform)
def main():
"""Create calibration coefficient files for AVHRR."""
out_dir = sys.argv[1]
coeffs = get_all_coeffs()
save_coeffs(coeffs, out_dir=out_dir)
if __name__ == "__main__":
main()
<|fim▁end|> | continue |
<|file_name|>fetch_avhrr_calcoeffs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Fetch avhrr calibration coefficients."""
import datetime as dt
import os.path
import sys
import h5py
import urllib2
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"
URLS = {
"Metop-B":
{"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"},
"Metop-A":
{"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"},
"NOAA-16":
{"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"},
"NOAA-17":
{"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"},
"NOAA-18":
{"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"},
"NOAA-19":
{"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"}
}
def get_page(url):
"""Retrieve the given page."""
return urllib2.urlopen(url).read()
def get_coeffs(page):
"""Parse coefficients from the page."""
coeffs = {}
coeffs['datetime'] = []
coeffs['slope1'] = []
coeffs['intercept1'] = []
coeffs['slope2'] = []
coeffs['intercept2'] = []
slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \
None, None, None, None
date_idx = 0
for row in page.lower().split('\n'):
row = row.split()
if len(row) == 0:
continue
if row[0] == 'update':
# Get the column indices from the header line
slope1_idx = row.index('slope_lo')
intercept1_idx = row.index('int_lo')
slope2_idx = row.index('slope_hi')
intercept2_idx = row.index('int_hi')
continue
if slope1_idx is None:
continue
# In some cases the fields are connected, skip those rows
if max([slope1_idx, intercept1_idx,
slope2_idx, intercept2_idx]) >= len(row):
continue
try:
dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y")
except ValueError:
continue
coeffs['datetime'].append([dat.year, dat.month, dat.day])
coeffs['slope1'].append(float(row[slope1_idx]))
coeffs['intercept1'].append(float(row[intercept1_idx]))
coeffs['slope2'].append(float(row[slope2_idx]))
coeffs['intercept2'].append(float(row[intercept2_idx]))
return coeffs
def get_all_coeffs():
"""Get all available calibration coefficients for the satellites."""
coeffs = {}
for platform in URLS:
if platform not in coeffs:
<|fim_middle|>
for chan in URLS[platform].keys():
url = URLS[platform][chan]
print(url)
page = get_page(url)
coeffs[platform][chan] = get_coeffs(page)
return coeffs
def save_coeffs(coeffs, out_dir=''):
"""Save calibration coefficients to HDF5 files."""
for platform in coeffs.keys():
fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform)
fid = h5py.File(fname, 'w')
for chan in coeffs[platform].keys():
fid.create_group(chan)
fid[chan]['datetime'] = coeffs[platform][chan]['datetime']
fid[chan]['slope1'] = coeffs[platform][chan]['slope1']
fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1']
fid[chan]['slope2'] = coeffs[platform][chan]['slope2']
fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2']
fid.close()
print("Calibration coefficients saved for %s" % platform)
def main():
"""Create calibration coefficient files for AVHRR."""
out_dir = sys.argv[1]
coeffs = get_all_coeffs()
save_coeffs(coeffs, out_dir=out_dir)
if __name__ == "__main__":
main()
<|fim▁end|> | coeffs[platform] = {} |
<|file_name|>fetch_avhrr_calcoeffs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Fetch avhrr calibration coefficients."""
import datetime as dt
import os.path
import sys
import h5py
import urllib2
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"
URLS = {
"Metop-B":
{"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"},
"Metop-A":
{"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"},
"NOAA-16":
{"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"},
"NOAA-17":
{"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"},
"NOAA-18":
{"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"},
"NOAA-19":
{"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"}
}
def get_page(url):
"""Retrieve the given page."""
return urllib2.urlopen(url).read()
def get_coeffs(page):
"""Parse coefficients from the page."""
coeffs = {}
coeffs['datetime'] = []
coeffs['slope1'] = []
coeffs['intercept1'] = []
coeffs['slope2'] = []
coeffs['intercept2'] = []
slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \
None, None, None, None
date_idx = 0
for row in page.lower().split('\n'):
row = row.split()
if len(row) == 0:
continue
if row[0] == 'update':
# Get the column indices from the header line
slope1_idx = row.index('slope_lo')
intercept1_idx = row.index('int_lo')
slope2_idx = row.index('slope_hi')
intercept2_idx = row.index('int_hi')
continue
if slope1_idx is None:
continue
# In some cases the fields are connected, skip those rows
if max([slope1_idx, intercept1_idx,
slope2_idx, intercept2_idx]) >= len(row):
continue
try:
dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y")
except ValueError:
continue
coeffs['datetime'].append([dat.year, dat.month, dat.day])
coeffs['slope1'].append(float(row[slope1_idx]))
coeffs['intercept1'].append(float(row[intercept1_idx]))
coeffs['slope2'].append(float(row[slope2_idx]))
coeffs['intercept2'].append(float(row[intercept2_idx]))
return coeffs
def get_all_coeffs():
"""Get all available calibration coefficients for the satellites."""
coeffs = {}
for platform in URLS:
if platform not in coeffs:
coeffs[platform] = {}
for chan in URLS[platform].keys():
url = URLS[platform][chan]
print(url)
page = get_page(url)
coeffs[platform][chan] = get_coeffs(page)
return coeffs
def save_coeffs(coeffs, out_dir=''):
"""Save calibration coefficients to HDF5 files."""
for platform in coeffs.keys():
fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform)
fid = h5py.File(fname, 'w')
for chan in coeffs[platform].keys():
fid.create_group(chan)
fid[chan]['datetime'] = coeffs[platform][chan]['datetime']
fid[chan]['slope1'] = coeffs[platform][chan]['slope1']
fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1']
fid[chan]['slope2'] = coeffs[platform][chan]['slope2']
fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2']
fid.close()
print("Calibration coefficients saved for %s" % platform)
def main():
"""Create calibration coefficient files for AVHRR."""
out_dir = sys.argv[1]
coeffs = get_all_coeffs()
save_coeffs(coeffs, out_dir=out_dir)
if __name__ == "__main__":
<|fim_middle|>
<|fim▁end|> | main() |
<|file_name|>fetch_avhrr_calcoeffs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Fetch avhrr calibration coefficients."""
import datetime as dt
import os.path
import sys
import h5py
import urllib2
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"
URLS = {
"Metop-B":
{"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"},
"Metop-A":
{"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"},
"NOAA-16":
{"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"},
"NOAA-17":
{"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"},
"NOAA-18":
{"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"},
"NOAA-19":
{"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"}
}
def <|fim_middle|>(url):
"""Retrieve the given page."""
return urllib2.urlopen(url).read()
def get_coeffs(page):
"""Parse coefficients from the page."""
coeffs = {}
coeffs['datetime'] = []
coeffs['slope1'] = []
coeffs['intercept1'] = []
coeffs['slope2'] = []
coeffs['intercept2'] = []
slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \
None, None, None, None
date_idx = 0
for row in page.lower().split('\n'):
row = row.split()
if len(row) == 0:
continue
if row[0] == 'update':
# Get the column indices from the header line
slope1_idx = row.index('slope_lo')
intercept1_idx = row.index('int_lo')
slope2_idx = row.index('slope_hi')
intercept2_idx = row.index('int_hi')
continue
if slope1_idx is None:
continue
# In some cases the fields are connected, skip those rows
if max([slope1_idx, intercept1_idx,
slope2_idx, intercept2_idx]) >= len(row):
continue
try:
dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y")
except ValueError:
continue
coeffs['datetime'].append([dat.year, dat.month, dat.day])
coeffs['slope1'].append(float(row[slope1_idx]))
coeffs['intercept1'].append(float(row[intercept1_idx]))
coeffs['slope2'].append(float(row[slope2_idx]))
coeffs['intercept2'].append(float(row[intercept2_idx]))
return coeffs
def get_all_coeffs():
"""Get all available calibration coefficients for the satellites."""
coeffs = {}
for platform in URLS:
if platform not in coeffs:
coeffs[platform] = {}
for chan in URLS[platform].keys():
url = URLS[platform][chan]
print(url)
page = get_page(url)
coeffs[platform][chan] = get_coeffs(page)
return coeffs
def save_coeffs(coeffs, out_dir=''):
"""Save calibration coefficients to HDF5 files."""
for platform in coeffs.keys():
fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform)
fid = h5py.File(fname, 'w')
for chan in coeffs[platform].keys():
fid.create_group(chan)
fid[chan]['datetime'] = coeffs[platform][chan]['datetime']
fid[chan]['slope1'] = coeffs[platform][chan]['slope1']
fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1']
fid[chan]['slope2'] = coeffs[platform][chan]['slope2']
fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2']
fid.close()
print("Calibration coefficients saved for %s" % platform)
def main():
"""Create calibration coefficient files for AVHRR."""
out_dir = sys.argv[1]
coeffs = get_all_coeffs()
save_coeffs(coeffs, out_dir=out_dir)
if __name__ == "__main__":
main()
<|fim▁end|> | get_page |
<|file_name|>fetch_avhrr_calcoeffs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Fetch avhrr calibration coefficients."""
import datetime as dt
import os.path
import sys
import h5py
import urllib2
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"
URLS = {
"Metop-B":
{"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"},
"Metop-A":
{"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"},
"NOAA-16":
{"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"},
"NOAA-17":
{"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"},
"NOAA-18":
{"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"},
"NOAA-19":
{"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"}
}
def get_page(url):
"""Retrieve the given page."""
return urllib2.urlopen(url).read()
def <|fim_middle|>(page):
"""Parse coefficients from the page."""
coeffs = {}
coeffs['datetime'] = []
coeffs['slope1'] = []
coeffs['intercept1'] = []
coeffs['slope2'] = []
coeffs['intercept2'] = []
slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \
None, None, None, None
date_idx = 0
for row in page.lower().split('\n'):
row = row.split()
if len(row) == 0:
continue
if row[0] == 'update':
# Get the column indices from the header line
slope1_idx = row.index('slope_lo')
intercept1_idx = row.index('int_lo')
slope2_idx = row.index('slope_hi')
intercept2_idx = row.index('int_hi')
continue
if slope1_idx is None:
continue
# In some cases the fields are connected, skip those rows
if max([slope1_idx, intercept1_idx,
slope2_idx, intercept2_idx]) >= len(row):
continue
try:
dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y")
except ValueError:
continue
coeffs['datetime'].append([dat.year, dat.month, dat.day])
coeffs['slope1'].append(float(row[slope1_idx]))
coeffs['intercept1'].append(float(row[intercept1_idx]))
coeffs['slope2'].append(float(row[slope2_idx]))
coeffs['intercept2'].append(float(row[intercept2_idx]))
return coeffs
def get_all_coeffs():
"""Get all available calibration coefficients for the satellites."""
coeffs = {}
for platform in URLS:
if platform not in coeffs:
coeffs[platform] = {}
for chan in URLS[platform].keys():
url = URLS[platform][chan]
print(url)
page = get_page(url)
coeffs[platform][chan] = get_coeffs(page)
return coeffs
def save_coeffs(coeffs, out_dir=''):
"""Save calibration coefficients to HDF5 files."""
for platform in coeffs.keys():
fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform)
fid = h5py.File(fname, 'w')
for chan in coeffs[platform].keys():
fid.create_group(chan)
fid[chan]['datetime'] = coeffs[platform][chan]['datetime']
fid[chan]['slope1'] = coeffs[platform][chan]['slope1']
fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1']
fid[chan]['slope2'] = coeffs[platform][chan]['slope2']
fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2']
fid.close()
print("Calibration coefficients saved for %s" % platform)
def main():
"""Create calibration coefficient files for AVHRR."""
out_dir = sys.argv[1]
coeffs = get_all_coeffs()
save_coeffs(coeffs, out_dir=out_dir)
if __name__ == "__main__":
main()
<|fim▁end|> | get_coeffs |
<|file_name|>fetch_avhrr_calcoeffs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Fetch avhrr calibration coefficients."""
import datetime as dt
import os.path
import sys
import h5py
import urllib2
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"
URLS = {
"Metop-B":
{"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"},
"Metop-A":
{"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"},
"NOAA-16":
{"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"},
"NOAA-17":
{"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"},
"NOAA-18":
{"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"},
"NOAA-19":
{"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"}
}
def get_page(url):
"""Retrieve the given page."""
return urllib2.urlopen(url).read()
def get_coeffs(page):
"""Parse coefficients from the page."""
coeffs = {}
coeffs['datetime'] = []
coeffs['slope1'] = []
coeffs['intercept1'] = []
coeffs['slope2'] = []
coeffs['intercept2'] = []
slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \
None, None, None, None
date_idx = 0
for row in page.lower().split('\n'):
row = row.split()
if len(row) == 0:
continue
if row[0] == 'update':
# Get the column indices from the header line
slope1_idx = row.index('slope_lo')
intercept1_idx = row.index('int_lo')
slope2_idx = row.index('slope_hi')
intercept2_idx = row.index('int_hi')
continue
if slope1_idx is None:
continue
# In some cases the fields are connected, skip those rows
if max([slope1_idx, intercept1_idx,
slope2_idx, intercept2_idx]) >= len(row):
continue
try:
dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y")
except ValueError:
continue
coeffs['datetime'].append([dat.year, dat.month, dat.day])
coeffs['slope1'].append(float(row[slope1_idx]))
coeffs['intercept1'].append(float(row[intercept1_idx]))
coeffs['slope2'].append(float(row[slope2_idx]))
coeffs['intercept2'].append(float(row[intercept2_idx]))
return coeffs
def <|fim_middle|>():
"""Get all available calibration coefficients for the satellites."""
coeffs = {}
for platform in URLS:
if platform not in coeffs:
coeffs[platform] = {}
for chan in URLS[platform].keys():
url = URLS[platform][chan]
print(url)
page = get_page(url)
coeffs[platform][chan] = get_coeffs(page)
return coeffs
def save_coeffs(coeffs, out_dir=''):
"""Save calibration coefficients to HDF5 files."""
for platform in coeffs.keys():
fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform)
fid = h5py.File(fname, 'w')
for chan in coeffs[platform].keys():
fid.create_group(chan)
fid[chan]['datetime'] = coeffs[platform][chan]['datetime']
fid[chan]['slope1'] = coeffs[platform][chan]['slope1']
fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1']
fid[chan]['slope2'] = coeffs[platform][chan]['slope2']
fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2']
fid.close()
print("Calibration coefficients saved for %s" % platform)
def main():
"""Create calibration coefficient files for AVHRR."""
out_dir = sys.argv[1]
coeffs = get_all_coeffs()
save_coeffs(coeffs, out_dir=out_dir)
if __name__ == "__main__":
main()
<|fim▁end|> | get_all_coeffs |
<|file_name|>fetch_avhrr_calcoeffs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Fetch avhrr calibration coefficients."""
import datetime as dt
import os.path
import sys
import h5py
import urllib2
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"
URLS = {
"Metop-B":
{"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"},
"Metop-A":
{"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"},
"NOAA-16":
{"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"},
"NOAA-17":
{"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"},
"NOAA-18":
{"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"},
"NOAA-19":
{"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"}
}
def get_page(url):
"""Retrieve the given page."""
return urllib2.urlopen(url).read()
def get_coeffs(page):
"""Parse coefficients from the page."""
coeffs = {}
coeffs['datetime'] = []
coeffs['slope1'] = []
coeffs['intercept1'] = []
coeffs['slope2'] = []
coeffs['intercept2'] = []
slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \
None, None, None, None
date_idx = 0
for row in page.lower().split('\n'):
row = row.split()
if len(row) == 0:
continue
if row[0] == 'update':
# Get the column indices from the header line
slope1_idx = row.index('slope_lo')
intercept1_idx = row.index('int_lo')
slope2_idx = row.index('slope_hi')
intercept2_idx = row.index('int_hi')
continue
if slope1_idx is None:
continue
# In some cases the fields are connected, skip those rows
if max([slope1_idx, intercept1_idx,
slope2_idx, intercept2_idx]) >= len(row):
continue
try:
dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y")
except ValueError:
continue
coeffs['datetime'].append([dat.year, dat.month, dat.day])
coeffs['slope1'].append(float(row[slope1_idx]))
coeffs['intercept1'].append(float(row[intercept1_idx]))
coeffs['slope2'].append(float(row[slope2_idx]))
coeffs['intercept2'].append(float(row[intercept2_idx]))
return coeffs
def get_all_coeffs():
"""Get all available calibration coefficients for the satellites."""
coeffs = {}
for platform in URLS:
if platform not in coeffs:
coeffs[platform] = {}
for chan in URLS[platform].keys():
url = URLS[platform][chan]
print(url)
page = get_page(url)
coeffs[platform][chan] = get_coeffs(page)
return coeffs
def <|fim_middle|>(coeffs, out_dir=''):
"""Save calibration coefficients to HDF5 files."""
for platform in coeffs.keys():
fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform)
fid = h5py.File(fname, 'w')
for chan in coeffs[platform].keys():
fid.create_group(chan)
fid[chan]['datetime'] = coeffs[platform][chan]['datetime']
fid[chan]['slope1'] = coeffs[platform][chan]['slope1']
fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1']
fid[chan]['slope2'] = coeffs[platform][chan]['slope2']
fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2']
fid.close()
print("Calibration coefficients saved for %s" % platform)
def main():
"""Create calibration coefficient files for AVHRR."""
out_dir = sys.argv[1]
coeffs = get_all_coeffs()
save_coeffs(coeffs, out_dir=out_dir)
if __name__ == "__main__":
main()
<|fim▁end|> | save_coeffs |
<|file_name|>fetch_avhrr_calcoeffs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Fetch avhrr calibration coefficients."""
import datetime as dt
import os.path
import sys
import h5py
import urllib2
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"
URLS = {
"Metop-B":
{"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"},
"Metop-A":
{"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"},
"NOAA-16":
{"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"},
"NOAA-17":
{"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt",
"ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"},
"NOAA-18":
{"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"},
"NOAA-19":
{"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt",
"ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"}
}
def get_page(url):
"""Retrieve the given page."""
return urllib2.urlopen(url).read()
def get_coeffs(page):
"""Parse coefficients from the page."""
coeffs = {}
coeffs['datetime'] = []
coeffs['slope1'] = []
coeffs['intercept1'] = []
coeffs['slope2'] = []
coeffs['intercept2'] = []
slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \
None, None, None, None
date_idx = 0
for row in page.lower().split('\n'):
row = row.split()
if len(row) == 0:
continue
if row[0] == 'update':
# Get the column indices from the header line
slope1_idx = row.index('slope_lo')
intercept1_idx = row.index('int_lo')
slope2_idx = row.index('slope_hi')
intercept2_idx = row.index('int_hi')
continue
if slope1_idx is None:
continue
# In some cases the fields are connected, skip those rows
if max([slope1_idx, intercept1_idx,
slope2_idx, intercept2_idx]) >= len(row):
continue
try:
dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y")
except ValueError:
continue
coeffs['datetime'].append([dat.year, dat.month, dat.day])
coeffs['slope1'].append(float(row[slope1_idx]))
coeffs['intercept1'].append(float(row[intercept1_idx]))
coeffs['slope2'].append(float(row[slope2_idx]))
coeffs['intercept2'].append(float(row[intercept2_idx]))
return coeffs
def get_all_coeffs():
"""Get all available calibration coefficients for the satellites."""
coeffs = {}
for platform in URLS:
if platform not in coeffs:
coeffs[platform] = {}
for chan in URLS[platform].keys():
url = URLS[platform][chan]
print(url)
page = get_page(url)
coeffs[platform][chan] = get_coeffs(page)
return coeffs
def save_coeffs(coeffs, out_dir=''):
"""Save calibration coefficients to HDF5 files."""
for platform in coeffs.keys():
fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform)
fid = h5py.File(fname, 'w')
for chan in coeffs[platform].keys():
fid.create_group(chan)
fid[chan]['datetime'] = coeffs[platform][chan]['datetime']
fid[chan]['slope1'] = coeffs[platform][chan]['slope1']
fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1']
fid[chan]['slope2'] = coeffs[platform][chan]['slope2']
fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2']
fid.close()
print("Calibration coefficients saved for %s" % platform)
def <|fim_middle|>():
"""Create calibration coefficient files for AVHRR."""
out_dir = sys.argv[1]
coeffs = get_all_coeffs()
save_coeffs(coeffs, out_dir=out_dir)
if __name__ == "__main__":
main()
<|fim▁end|> | main |
<|file_name|>nonFinalPublic.py<|end_file_name|><|fim▁begin|><|fim▁hole|> modifiers = astTuple.ast.modifiers
nonFinalPublic = modifiers.isSet(Modifier.ModifierFlag.Public) and not modifiers.isSet(Modifier.ModifierFlag.Final)
if not nonFinalPublic:
Query.input.remove(astTuple)
Query.result = Query.input<|fim▁end|> | for astTuple in Query.input.tuples('ast'):
if type(astTuple.ast) is Field: |
<|file_name|>nonFinalPublic.py<|end_file_name|><|fim▁begin|>for astTuple in Query.input.tuples('ast'):
if type(astTuple.ast) is Field:
<|fim_middle|>
Query.result = Query.input<|fim▁end|> | modifiers = astTuple.ast.modifiers
nonFinalPublic = modifiers.isSet(Modifier.ModifierFlag.Public) and not modifiers.isSet(Modifier.ModifierFlag.Final)
if not nonFinalPublic:
Query.input.remove(astTuple) |
<|file_name|>nonFinalPublic.py<|end_file_name|><|fim▁begin|>for astTuple in Query.input.tuples('ast'):
if type(astTuple.ast) is Field:
modifiers = astTuple.ast.modifiers
nonFinalPublic = modifiers.isSet(Modifier.ModifierFlag.Public) and not modifiers.isSet(Modifier.ModifierFlag.Final)
if not nonFinalPublic:
<|fim_middle|>
Query.result = Query.input<|fim▁end|> | Query.input.remove(astTuple) |
<|file_name|>smri_freesurfer.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
================
sMRI: FreeSurfer
================
This script, smri_freesurfer.py, demonstrates the ability to call reconall on
a set of subjects and then make an average subject.
python smri_freesurfer.py
Import necessary modules from nipype.
"""
import os
import nipype.pipeline.engine as pe
import nipype.interfaces.io as nio
from nipype.interfaces.freesurfer.preprocess import ReconAll
from nipype.interfaces.freesurfer.utils import MakeAverageSubject
subject_list = ['s1', 's3']
data_dir = os.path.abspath('data')
subjects_dir = os.path.abspath('amri_freesurfer_tutorial/subjects_dir')
wf = pe.Workflow(name="l1workflow")
wf.base_dir = os.path.abspath('amri_freesurfer_tutorial/workdir')
"""
Grab data
"""
datasource = pe.MapNode(interface=nio.DataGrabber(infields=['subject_id'],
outfields=['struct']),
name='datasource',
iterfield=['subject_id'])
datasource.inputs.base_directory = data_dir
datasource.inputs.template = '%s/%s.nii'
datasource.inputs.template_args = dict(struct=[['subject_id', 'struct']])
datasource.inputs.subject_id = subject_list
"""
Run recon-all
"""
recon_all = pe.MapNode(interface=ReconAll(), name='recon_all',
iterfield=['subject_id', 'T1_files'])
recon_all.inputs.subject_id = subject_list
if not os.path.exists(subjects_dir):
os.mkdir(subjects_dir)
recon_all.inputs.subjects_dir = subjects_dir
<|fim▁hole|>Make average subject
"""
average = pe.Node(interface=MakeAverageSubject(), name="average")
average.inputs.subjects_dir = subjects_dir
wf.connect(recon_all, 'subject_id', average, 'subjects_ids')
wf.run("MultiProc", plugin_args={'n_procs': 4})<|fim▁end|> | wf.connect(datasource, 'struct', recon_all, 'T1_files')
""" |
<|file_name|>smri_freesurfer.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
================
sMRI: FreeSurfer
================
This script, smri_freesurfer.py, demonstrates the ability to call reconall on
a set of subjects and then make an average subject.
python smri_freesurfer.py
Import necessary modules from nipype.
"""
import os
import nipype.pipeline.engine as pe
import nipype.interfaces.io as nio
from nipype.interfaces.freesurfer.preprocess import ReconAll
from nipype.interfaces.freesurfer.utils import MakeAverageSubject
subject_list = ['s1', 's3']
data_dir = os.path.abspath('data')
subjects_dir = os.path.abspath('amri_freesurfer_tutorial/subjects_dir')
wf = pe.Workflow(name="l1workflow")
wf.base_dir = os.path.abspath('amri_freesurfer_tutorial/workdir')
"""
Grab data
"""
datasource = pe.MapNode(interface=nio.DataGrabber(infields=['subject_id'],
outfields=['struct']),
name='datasource',
iterfield=['subject_id'])
datasource.inputs.base_directory = data_dir
datasource.inputs.template = '%s/%s.nii'
datasource.inputs.template_args = dict(struct=[['subject_id', 'struct']])
datasource.inputs.subject_id = subject_list
"""
Run recon-all
"""
recon_all = pe.MapNode(interface=ReconAll(), name='recon_all',
iterfield=['subject_id', 'T1_files'])
recon_all.inputs.subject_id = subject_list
if not os.path.exists(subjects_dir):
<|fim_middle|>
recon_all.inputs.subjects_dir = subjects_dir
wf.connect(datasource, 'struct', recon_all, 'T1_files')
"""
Make average subject
"""
average = pe.Node(interface=MakeAverageSubject(), name="average")
average.inputs.subjects_dir = subjects_dir
wf.connect(recon_all, 'subject_id', average, 'subjects_ids')
wf.run("MultiProc", plugin_args={'n_procs': 4})
<|fim▁end|> | os.mkdir(subjects_dir) |
<|file_name|>mrp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
from openerp.tools import float_compare
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp import netsvc
from openerp import tools
class mrp_production(osv.osv):
_inherit = 'mrp.production'
def _action_compute_lines(self, cr, uid, ids, properties=None, context=None):
""" Computes bills of material of a product.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
if properties is None:
properties = []
results = []
bom_obj = self.pool.get('mrp.bom')
uom_obj = self.pool.get('product.uom')
prod_line_obj = self.pool.get('mrp.production.product.line')
workcenter_line_obj = self.pool.get('mrp.production.workcenter.line')
for production in self.browse(cr, uid, ids):
#unlink product_lines
prod_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.product_lines], context=context)
#unlink workcenter_lines
workcenter_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.workcenter_lines], context=context)
# search BoM structure and route
bom_point = production.bom_id
bom_id = production.bom_id.id
if not bom_point:
bom_id = bom_obj._bom_find(cr, uid, production.product_id.id, production.product_uom.id, properties)
if bom_id:
bom_point = bom_obj.browse(cr, uid, bom_id)
routing_id = bom_point.routing_id.id or False
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
if not bom_id:
continue
# get components and workcenter_lines from BoM structure
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] # product_lines
results2 = res[1] # workcenter_lines
# reset product_lines in production order
for line in results:
line['production_id'] = production.id
prod_line_obj.create(cr, uid, line)
#reset workcenter_lines in production order
for line in results2:
line['production_id'] = production.id
workcenter_line_obj.create(cr, uid, line)
return results
def action_ready(self, cr, uid, ids, context=None):
""" Changes the production state to Ready and location id of stock move.
@return: True
"""
move_obj = self.pool.get('stock.move')<|fim▁hole|> if not production.bom_id:
produce_move_id = self._make_production_produce_line(cr, uid, production, context=context)
for (production_id,name) in self.name_get(cr, uid, ids):
production = self.browse(cr, uid, production_id)
if production.move_prod_id and production.move_prod_id.location_id.id != production.location_dest_id.id:
move_obj.write(cr, uid, [production.move_prod_id.id],
{'location_id': production.location_dest_id.id})
return True
def action_produce(self, cr, uid, production_id, production_qty, production_mode, context=None):
production = self.browse(cr, uid, production_id, context=context)
if not production.bom_id and production.state == 'ready':
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'mrp.production', production_id, 'button_produce', cr)
return super(mrp_production, self).action_produce(cr, uid, production_id, production_qty, production_mode, context=context)
mrp_production()<|fim▁end|> | self.write(cr, uid, ids, {'state': 'ready'})
for production in self.browse(cr, uid, ids, context=context): |
<|file_name|>mrp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
from openerp.tools import float_compare
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp import netsvc
from openerp import tools
class mrp_production(osv.osv):
_<|fim_middle|>
mrp_production()
<|fim▁end|> | inherit = 'mrp.production'
def _action_compute_lines(self, cr, uid, ids, properties=None, context=None):
""" Computes bills of material of a product.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
if properties is None:
properties = []
results = []
bom_obj = self.pool.get('mrp.bom')
uom_obj = self.pool.get('product.uom')
prod_line_obj = self.pool.get('mrp.production.product.line')
workcenter_line_obj = self.pool.get('mrp.production.workcenter.line')
for production in self.browse(cr, uid, ids):
#unlink product_lines
prod_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.product_lines], context=context)
#unlink workcenter_lines
workcenter_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.workcenter_lines], context=context)
# search BoM structure and route
bom_point = production.bom_id
bom_id = production.bom_id.id
if not bom_point:
bom_id = bom_obj._bom_find(cr, uid, production.product_id.id, production.product_uom.id, properties)
if bom_id:
bom_point = bom_obj.browse(cr, uid, bom_id)
routing_id = bom_point.routing_id.id or False
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
if not bom_id:
continue
# get components and workcenter_lines from BoM structure
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] # product_lines
results2 = res[1] # workcenter_lines
# reset product_lines in production order
for line in results:
line['production_id'] = production.id
prod_line_obj.create(cr, uid, line)
#reset workcenter_lines in production order
for line in results2:
line['production_id'] = production.id
workcenter_line_obj.create(cr, uid, line)
return results
def action_ready(self, cr, uid, ids, context=None):
""" Changes the production state to Ready and location id of stock move.
@return: True
"""
move_obj = self.pool.get('stock.move')
self.write(cr, uid, ids, {'state': 'ready'})
for production in self.browse(cr, uid, ids, context=context):
if not production.bom_id:
produce_move_id = self._make_production_produce_line(cr, uid, production, context=context)
for (production_id,name) in self.name_get(cr, uid, ids):
production = self.browse(cr, uid, production_id)
if production.move_prod_id and production.move_prod_id.location_id.id != production.location_dest_id.id:
move_obj.write(cr, uid, [production.move_prod_id.id],
{'location_id': production.location_dest_id.id})
return True
def action_produce(self, cr, uid, production_id, production_qty, production_mode, context=None):
production = self.browse(cr, uid, production_id, context=context)
if not production.bom_id and production.state == 'ready':
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'mrp.production', production_id, 'button_produce', cr)
return super(mrp_production, self).action_produce(cr, uid, production_id, production_qty, production_mode, context=context)
|
<|file_name|>mrp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
from openerp.tools import float_compare
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp import netsvc
from openerp import tools
class mrp_production(osv.osv):
_inherit = 'mrp.production'
def _action_compute_lines(self, cr, uid, ids, properties=None, context=None):
"<|fim_middle|>
def action_ready(self, cr, uid, ids, context=None):
""" Changes the production state to Ready and location id of stock move.
@return: True
"""
move_obj = self.pool.get('stock.move')
self.write(cr, uid, ids, {'state': 'ready'})
for production in self.browse(cr, uid, ids, context=context):
if not production.bom_id:
produce_move_id = self._make_production_produce_line(cr, uid, production, context=context)
for (production_id,name) in self.name_get(cr, uid, ids):
production = self.browse(cr, uid, production_id)
if production.move_prod_id and production.move_prod_id.location_id.id != production.location_dest_id.id:
move_obj.write(cr, uid, [production.move_prod_id.id],
{'location_id': production.location_dest_id.id})
return True
def action_produce(self, cr, uid, production_id, production_qty, production_mode, context=None):
production = self.browse(cr, uid, production_id, context=context)
if not production.bom_id and production.state == 'ready':
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'mrp.production', production_id, 'button_produce', cr)
return super(mrp_production, self).action_produce(cr, uid, production_id, production_qty, production_mode, context=context)
mrp_production()
<|fim▁end|> | "" Computes bills of material of a product.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
if properties is None:
properties = []
results = []
bom_obj = self.pool.get('mrp.bom')
uom_obj = self.pool.get('product.uom')
prod_line_obj = self.pool.get('mrp.production.product.line')
workcenter_line_obj = self.pool.get('mrp.production.workcenter.line')
for production in self.browse(cr, uid, ids):
#unlink product_lines
prod_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.product_lines], context=context)
#unlink workcenter_lines
workcenter_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.workcenter_lines], context=context)
# search BoM structure and route
bom_point = production.bom_id
bom_id = production.bom_id.id
if not bom_point:
bom_id = bom_obj._bom_find(cr, uid, production.product_id.id, production.product_uom.id, properties)
if bom_id:
bom_point = bom_obj.browse(cr, uid, bom_id)
routing_id = bom_point.routing_id.id or False
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
if not bom_id:
continue
# get components and workcenter_lines from BoM structure
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] # product_lines
results2 = res[1] # workcenter_lines
# reset product_lines in production order
for line in results:
line['production_id'] = production.id
prod_line_obj.create(cr, uid, line)
#reset workcenter_lines in production order
for line in results2:
line['production_id'] = production.id
workcenter_line_obj.create(cr, uid, line)
return results
|
<|file_name|>mrp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
from openerp.tools import float_compare
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp import netsvc
from openerp import tools
class mrp_production(osv.osv):
_inherit = 'mrp.production'
def _action_compute_lines(self, cr, uid, ids, properties=None, context=None):
""" Computes bills of material of a product.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
if properties is None:
properties = []
results = []
bom_obj = self.pool.get('mrp.bom')
uom_obj = self.pool.get('product.uom')
prod_line_obj = self.pool.get('mrp.production.product.line')
workcenter_line_obj = self.pool.get('mrp.production.workcenter.line')
for production in self.browse(cr, uid, ids):
#unlink product_lines
prod_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.product_lines], context=context)
#unlink workcenter_lines
workcenter_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.workcenter_lines], context=context)
# search BoM structure and route
bom_point = production.bom_id
bom_id = production.bom_id.id
if not bom_point:
bom_id = bom_obj._bom_find(cr, uid, production.product_id.id, production.product_uom.id, properties)
if bom_id:
bom_point = bom_obj.browse(cr, uid, bom_id)
routing_id = bom_point.routing_id.id or False
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
if not bom_id:
continue
# get components and workcenter_lines from BoM structure
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] # product_lines
results2 = res[1] # workcenter_lines
# reset product_lines in production order
for line in results:
line['production_id'] = production.id
prod_line_obj.create(cr, uid, line)
#reset workcenter_lines in production order
for line in results2:
line['production_id'] = production.id
workcenter_line_obj.create(cr, uid, line)
return results
def action_ready(self, cr, uid, ids, context=None):
"<|fim_middle|>
def action_produce(self, cr, uid, production_id, production_qty, production_mode, context=None):
production = self.browse(cr, uid, production_id, context=context)
if not production.bom_id and production.state == 'ready':
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'mrp.production', production_id, 'button_produce', cr)
return super(mrp_production, self).action_produce(cr, uid, production_id, production_qty, production_mode, context=context)
mrp_production()
<|fim▁end|> | "" Changes the production state to Ready and location id of stock move.
@return: True
"""
move_obj = self.pool.get('stock.move')
self.write(cr, uid, ids, {'state': 'ready'})
for production in self.browse(cr, uid, ids, context=context):
if not production.bom_id:
produce_move_id = self._make_production_produce_line(cr, uid, production, context=context)
for (production_id,name) in self.name_get(cr, uid, ids):
production = self.browse(cr, uid, production_id)
if production.move_prod_id and production.move_prod_id.location_id.id != production.location_dest_id.id:
move_obj.write(cr, uid, [production.move_prod_id.id],
{'location_id': production.location_dest_id.id})
return True
|
<|file_name|>mrp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
from openerp.tools import float_compare
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp import netsvc
from openerp import tools
class mrp_production(osv.osv):
_inherit = 'mrp.production'
def _action_compute_lines(self, cr, uid, ids, properties=None, context=None):
""" Computes bills of material of a product.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
if properties is None:
properties = []
results = []
bom_obj = self.pool.get('mrp.bom')
uom_obj = self.pool.get('product.uom')
prod_line_obj = self.pool.get('mrp.production.product.line')
workcenter_line_obj = self.pool.get('mrp.production.workcenter.line')
for production in self.browse(cr, uid, ids):
#unlink product_lines
prod_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.product_lines], context=context)
#unlink workcenter_lines
workcenter_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.workcenter_lines], context=context)
# search BoM structure and route
bom_point = production.bom_id
bom_id = production.bom_id.id
if not bom_point:
bom_id = bom_obj._bom_find(cr, uid, production.product_id.id, production.product_uom.id, properties)
if bom_id:
bom_point = bom_obj.browse(cr, uid, bom_id)
routing_id = bom_point.routing_id.id or False
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
if not bom_id:
continue
# get components and workcenter_lines from BoM structure
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] # product_lines
results2 = res[1] # workcenter_lines
# reset product_lines in production order
for line in results:
line['production_id'] = production.id
prod_line_obj.create(cr, uid, line)
#reset workcenter_lines in production order
for line in results2:
line['production_id'] = production.id
workcenter_line_obj.create(cr, uid, line)
return results
def action_ready(self, cr, uid, ids, context=None):
""" Changes the production state to Ready and location id of stock move.
@return: True
"""
move_obj = self.pool.get('stock.move')
self.write(cr, uid, ids, {'state': 'ready'})
for production in self.browse(cr, uid, ids, context=context):
if not production.bom_id:
produce_move_id = self._make_production_produce_line(cr, uid, production, context=context)
for (production_id,name) in self.name_get(cr, uid, ids):
production = self.browse(cr, uid, production_id)
if production.move_prod_id and production.move_prod_id.location_id.id != production.location_dest_id.id:
move_obj.write(cr, uid, [production.move_prod_id.id],
{'location_id': production.location_dest_id.id})
return True
def action_produce(self, cr, uid, production_id, production_qty, production_mode, context=None):
p<|fim_middle|>
mrp_production()
<|fim▁end|> | roduction = self.browse(cr, uid, production_id, context=context)
if not production.bom_id and production.state == 'ready':
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'mrp.production', production_id, 'button_produce', cr)
return super(mrp_production, self).action_produce(cr, uid, production_id, production_qty, production_mode, context=context)
|
<|file_name|>mrp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
from openerp.tools import float_compare
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp import netsvc
from openerp import tools
class mrp_production(osv.osv):
_inherit = 'mrp.production'
def _action_compute_lines(self, cr, uid, ids, properties=None, context=None):
""" Computes bills of material of a product.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
if properties is None:
p <|fim_middle|>
results = []
bom_obj = self.pool.get('mrp.bom')
uom_obj = self.pool.get('product.uom')
prod_line_obj = self.pool.get('mrp.production.product.line')
workcenter_line_obj = self.pool.get('mrp.production.workcenter.line')
for production in self.browse(cr, uid, ids):
#unlink product_lines
prod_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.product_lines], context=context)
#unlink workcenter_lines
workcenter_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.workcenter_lines], context=context)
# search BoM structure and route
bom_point = production.bom_id
bom_id = production.bom_id.id
if not bom_point:
bom_id = bom_obj._bom_find(cr, uid, production.product_id.id, production.product_uom.id, properties)
if bom_id:
bom_point = bom_obj.browse(cr, uid, bom_id)
routing_id = bom_point.routing_id.id or False
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
if not bom_id:
continue
# get components and workcenter_lines from BoM structure
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] # product_lines
results2 = res[1] # workcenter_lines
# reset product_lines in production order
for line in results:
line['production_id'] = production.id
prod_line_obj.create(cr, uid, line)
#reset workcenter_lines in production order
for line in results2:
line['production_id'] = production.id
workcenter_line_obj.create(cr, uid, line)
return results
def action_ready(self, cr, uid, ids, context=None):
""" Changes the production state to Ready and location id of stock move.
@return: True
"""
move_obj = self.pool.get('stock.move')
self.write(cr, uid, ids, {'state': 'ready'})
for production in self.browse(cr, uid, ids, context=context):
if not production.bom_id:
produce_move_id = self._make_production_produce_line(cr, uid, production, context=context)
for (production_id,name) in self.name_get(cr, uid, ids):
production = self.browse(cr, uid, production_id)
if production.move_prod_id and production.move_prod_id.location_id.id != production.location_dest_id.id:
move_obj.write(cr, uid, [production.move_prod_id.id],
{'location_id': production.location_dest_id.id})
return True
def action_produce(self, cr, uid, production_id, production_qty, production_mode, context=None):
production = self.browse(cr, uid, production_id, context=context)
if not production.bom_id and production.state == 'ready':
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'mrp.production', production_id, 'button_produce', cr)
return super(mrp_production, self).action_produce(cr, uid, production_id, production_qty, production_mode, context=context)
mrp_production()
<|fim▁end|> | roperties = []
|
<|file_name|>mrp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
from openerp.tools import float_compare
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp import netsvc
from openerp import tools
class mrp_production(osv.osv):
_inherit = 'mrp.production'
def _action_compute_lines(self, cr, uid, ids, properties=None, context=None):
""" Computes bills of material of a product.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
if properties is None:
properties = []
results = []
bom_obj = self.pool.get('mrp.bom')
uom_obj = self.pool.get('product.uom')
prod_line_obj = self.pool.get('mrp.production.product.line')
workcenter_line_obj = self.pool.get('mrp.production.workcenter.line')
for production in self.browse(cr, uid, ids):
#unlink product_lines
prod_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.product_lines], context=context)
#unlink workcenter_lines
workcenter_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.workcenter_lines], context=context)
# search BoM structure and route
bom_point = production.bom_id
bom_id = production.bom_id.id
if not bom_point:
b <|fim_middle|>
if not bom_id:
continue
# get components and workcenter_lines from BoM structure
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] # product_lines
results2 = res[1] # workcenter_lines
# reset product_lines in production order
for line in results:
line['production_id'] = production.id
prod_line_obj.create(cr, uid, line)
#reset workcenter_lines in production order
for line in results2:
line['production_id'] = production.id
workcenter_line_obj.create(cr, uid, line)
return results
def action_ready(self, cr, uid, ids, context=None):
""" Changes the production state to Ready and location id of stock move.
@return: True
"""
move_obj = self.pool.get('stock.move')
self.write(cr, uid, ids, {'state': 'ready'})
for production in self.browse(cr, uid, ids, context=context):
if not production.bom_id:
produce_move_id = self._make_production_produce_line(cr, uid, production, context=context)
for (production_id,name) in self.name_get(cr, uid, ids):
production = self.browse(cr, uid, production_id)
if production.move_prod_id and production.move_prod_id.location_id.id != production.location_dest_id.id:
move_obj.write(cr, uid, [production.move_prod_id.id],
{'location_id': production.location_dest_id.id})
return True
def action_produce(self, cr, uid, production_id, production_qty, production_mode, context=None):
production = self.browse(cr, uid, production_id, context=context)
if not production.bom_id and production.state == 'ready':
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'mrp.production', production_id, 'button_produce', cr)
return super(mrp_production, self).action_produce(cr, uid, production_id, production_qty, production_mode, context=context)
mrp_production()
<|fim▁end|> | om_id = bom_obj._bom_find(cr, uid, production.product_id.id, production.product_uom.id, properties)
if bom_id:
bom_point = bom_obj.browse(cr, uid, bom_id)
routing_id = bom_point.routing_id.id or False
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
|
<|file_name|>mrp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
from openerp.tools import float_compare
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp import netsvc
from openerp import tools
class mrp_production(osv.osv):
_inherit = 'mrp.production'
def _action_compute_lines(self, cr, uid, ids, properties=None, context=None):
""" Computes bills of material of a product.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
if properties is None:
properties = []
results = []
bom_obj = self.pool.get('mrp.bom')
uom_obj = self.pool.get('product.uom')
prod_line_obj = self.pool.get('mrp.production.product.line')
workcenter_line_obj = self.pool.get('mrp.production.workcenter.line')
for production in self.browse(cr, uid, ids):
#unlink product_lines
prod_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.product_lines], context=context)
#unlink workcenter_lines
workcenter_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.workcenter_lines], context=context)
# search BoM structure and route
bom_point = production.bom_id
bom_id = production.bom_id.id
if not bom_point:
bom_id = bom_obj._bom_find(cr, uid, production.product_id.id, production.product_uom.id, properties)
if bom_id:
b <|fim_middle|>
if not bom_id:
continue
# get components and workcenter_lines from BoM structure
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] # product_lines
results2 = res[1] # workcenter_lines
# reset product_lines in production order
for line in results:
line['production_id'] = production.id
prod_line_obj.create(cr, uid, line)
#reset workcenter_lines in production order
for line in results2:
line['production_id'] = production.id
workcenter_line_obj.create(cr, uid, line)
return results
def action_ready(self, cr, uid, ids, context=None):
""" Changes the production state to Ready and location id of stock move.
@return: True
"""
move_obj = self.pool.get('stock.move')
self.write(cr, uid, ids, {'state': 'ready'})
for production in self.browse(cr, uid, ids, context=context):
if not production.bom_id:
produce_move_id = self._make_production_produce_line(cr, uid, production, context=context)
for (production_id,name) in self.name_get(cr, uid, ids):
production = self.browse(cr, uid, production_id)
if production.move_prod_id and production.move_prod_id.location_id.id != production.location_dest_id.id:
move_obj.write(cr, uid, [production.move_prod_id.id],
{'location_id': production.location_dest_id.id})
return True
def action_produce(self, cr, uid, production_id, production_qty, production_mode, context=None):
production = self.browse(cr, uid, production_id, context=context)
if not production.bom_id and production.state == 'ready':
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'mrp.production', production_id, 'button_produce', cr)
return super(mrp_production, self).action_produce(cr, uid, production_id, production_qty, production_mode, context=context)
mrp_production()
<|fim▁end|> | om_point = bom_obj.browse(cr, uid, bom_id)
routing_id = bom_point.routing_id.id or False
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
|
<|file_name|>mrp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
from openerp.tools import float_compare
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp import netsvc
from openerp import tools
class mrp_production(osv.osv):
_inherit = 'mrp.production'
def _action_compute_lines(self, cr, uid, ids, properties=None, context=None):
""" Computes bills of material of a product.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
if properties is None:
properties = []
results = []
bom_obj = self.pool.get('mrp.bom')
uom_obj = self.pool.get('product.uom')
prod_line_obj = self.pool.get('mrp.production.product.line')
workcenter_line_obj = self.pool.get('mrp.production.workcenter.line')
for production in self.browse(cr, uid, ids):
#unlink product_lines
prod_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.product_lines], context=context)
#unlink workcenter_lines
workcenter_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.workcenter_lines], context=context)
# search BoM structure and route
bom_point = production.bom_id
bom_id = production.bom_id.id
if not bom_point:
bom_id = bom_obj._bom_find(cr, uid, production.product_id.id, production.product_uom.id, properties)
if bom_id:
bom_point = bom_obj.browse(cr, uid, bom_id)
routing_id = bom_point.routing_id.id or False
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
if not bom_id:
c <|fim_middle|>
# get components and workcenter_lines from BoM structure
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] # product_lines
results2 = res[1] # workcenter_lines
# reset product_lines in production order
for line in results:
line['production_id'] = production.id
prod_line_obj.create(cr, uid, line)
#reset workcenter_lines in production order
for line in results2:
line['production_id'] = production.id
workcenter_line_obj.create(cr, uid, line)
return results
def action_ready(self, cr, uid, ids, context=None):
""" Changes the production state to Ready and location id of stock move.
@return: True
"""
move_obj = self.pool.get('stock.move')
self.write(cr, uid, ids, {'state': 'ready'})
for production in self.browse(cr, uid, ids, context=context):
if not production.bom_id:
produce_move_id = self._make_production_produce_line(cr, uid, production, context=context)
for (production_id,name) in self.name_get(cr, uid, ids):
production = self.browse(cr, uid, production_id)
if production.move_prod_id and production.move_prod_id.location_id.id != production.location_dest_id.id:
move_obj.write(cr, uid, [production.move_prod_id.id],
{'location_id': production.location_dest_id.id})
return True
def action_produce(self, cr, uid, production_id, production_qty, production_mode, context=None):
production = self.browse(cr, uid, production_id, context=context)
if not production.bom_id and production.state == 'ready':
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'mrp.production', production_id, 'button_produce', cr)
return super(mrp_production, self).action_produce(cr, uid, production_id, production_qty, production_mode, context=context)
mrp_production()
<|fim▁end|> | ontinue
|
<|file_name|>mrp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
from openerp.tools import float_compare
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp import netsvc
from openerp import tools
class mrp_production(osv.osv):
_inherit = 'mrp.production'
def _action_compute_lines(self, cr, uid, ids, properties=None, context=None):
""" Computes bills of material of a product.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
if properties is None:
properties = []
results = []
bom_obj = self.pool.get('mrp.bom')
uom_obj = self.pool.get('product.uom')
prod_line_obj = self.pool.get('mrp.production.product.line')
workcenter_line_obj = self.pool.get('mrp.production.workcenter.line')
for production in self.browse(cr, uid, ids):
#unlink product_lines
prod_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.product_lines], context=context)
#unlink workcenter_lines
workcenter_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.workcenter_lines], context=context)
# search BoM structure and route
bom_point = production.bom_id
bom_id = production.bom_id.id
if not bom_point:
bom_id = bom_obj._bom_find(cr, uid, production.product_id.id, production.product_uom.id, properties)
if bom_id:
bom_point = bom_obj.browse(cr, uid, bom_id)
routing_id = bom_point.routing_id.id or False
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
if not bom_id:
continue
# get components and workcenter_lines from BoM structure
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] # product_lines
results2 = res[1] # workcenter_lines
# reset product_lines in production order
for line in results:
line['production_id'] = production.id
prod_line_obj.create(cr, uid, line)
#reset workcenter_lines in production order
for line in results2:
line['production_id'] = production.id
workcenter_line_obj.create(cr, uid, line)
return results
def action_ready(self, cr, uid, ids, context=None):
""" Changes the production state to Ready and location id of stock move.
@return: True
"""
move_obj = self.pool.get('stock.move')
self.write(cr, uid, ids, {'state': 'ready'})
for production in self.browse(cr, uid, ids, context=context):
if not production.bom_id:
p <|fim_middle|>
for (production_id,name) in self.name_get(cr, uid, ids):
production = self.browse(cr, uid, production_id)
if production.move_prod_id and production.move_prod_id.location_id.id != production.location_dest_id.id:
move_obj.write(cr, uid, [production.move_prod_id.id],
{'location_id': production.location_dest_id.id})
return True
def action_produce(self, cr, uid, production_id, production_qty, production_mode, context=None):
production = self.browse(cr, uid, production_id, context=context)
if not production.bom_id and production.state == 'ready':
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'mrp.production', production_id, 'button_produce', cr)
return super(mrp_production, self).action_produce(cr, uid, production_id, production_qty, production_mode, context=context)
mrp_production()
<|fim▁end|> | roduce_move_id = self._make_production_produce_line(cr, uid, production, context=context)
|
<|file_name|>mrp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
from openerp.tools import float_compare
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp import netsvc
from openerp import tools
class mrp_production(osv.osv):
_inherit = 'mrp.production'
def _action_compute_lines(self, cr, uid, ids, properties=None, context=None):
""" Computes bills of material of a product.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
if properties is None:
properties = []
results = []
bom_obj = self.pool.get('mrp.bom')
uom_obj = self.pool.get('product.uom')
prod_line_obj = self.pool.get('mrp.production.product.line')
workcenter_line_obj = self.pool.get('mrp.production.workcenter.line')
for production in self.browse(cr, uid, ids):
#unlink product_lines
prod_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.product_lines], context=context)
#unlink workcenter_lines
workcenter_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.workcenter_lines], context=context)
# search BoM structure and route
bom_point = production.bom_id
bom_id = production.bom_id.id
if not bom_point:
bom_id = bom_obj._bom_find(cr, uid, production.product_id.id, production.product_uom.id, properties)
if bom_id:
bom_point = bom_obj.browse(cr, uid, bom_id)
routing_id = bom_point.routing_id.id or False
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
if not bom_id:
continue
# get components and workcenter_lines from BoM structure
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] # product_lines
results2 = res[1] # workcenter_lines
# reset product_lines in production order
for line in results:
line['production_id'] = production.id
prod_line_obj.create(cr, uid, line)
#reset workcenter_lines in production order
for line in results2:
line['production_id'] = production.id
workcenter_line_obj.create(cr, uid, line)
return results
def action_ready(self, cr, uid, ids, context=None):
""" Changes the production state to Ready and location id of stock move.
@return: True
"""
move_obj = self.pool.get('stock.move')
self.write(cr, uid, ids, {'state': 'ready'})
for production in self.browse(cr, uid, ids, context=context):
if not production.bom_id:
produce_move_id = self._make_production_produce_line(cr, uid, production, context=context)
for (production_id,name) in self.name_get(cr, uid, ids):
production = self.browse(cr, uid, production_id)
if production.move_prod_id and production.move_prod_id.location_id.id != production.location_dest_id.id:
m <|fim_middle|>
return True
def action_produce(self, cr, uid, production_id, production_qty, production_mode, context=None):
production = self.browse(cr, uid, production_id, context=context)
if not production.bom_id and production.state == 'ready':
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'mrp.production', production_id, 'button_produce', cr)
return super(mrp_production, self).action_produce(cr, uid, production_id, production_qty, production_mode, context=context)
mrp_production()
<|fim▁end|> | ove_obj.write(cr, uid, [production.move_prod_id.id],
{'location_id': production.location_dest_id.id})
|
<|file_name|>mrp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
from openerp.tools import float_compare
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp import netsvc
from openerp import tools
class mrp_production(osv.osv):
_inherit = 'mrp.production'
def _action_compute_lines(self, cr, uid, ids, properties=None, context=None):
""" Computes bills of material of a product.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
if properties is None:
properties = []
results = []
bom_obj = self.pool.get('mrp.bom')
uom_obj = self.pool.get('product.uom')
prod_line_obj = self.pool.get('mrp.production.product.line')
workcenter_line_obj = self.pool.get('mrp.production.workcenter.line')
for production in self.browse(cr, uid, ids):
#unlink product_lines
prod_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.product_lines], context=context)
#unlink workcenter_lines
workcenter_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.workcenter_lines], context=context)
# search BoM structure and route
bom_point = production.bom_id
bom_id = production.bom_id.id
if not bom_point:
bom_id = bom_obj._bom_find(cr, uid, production.product_id.id, production.product_uom.id, properties)
if bom_id:
bom_point = bom_obj.browse(cr, uid, bom_id)
routing_id = bom_point.routing_id.id or False
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
if not bom_id:
continue
# get components and workcenter_lines from BoM structure
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] # product_lines
results2 = res[1] # workcenter_lines
# reset product_lines in production order
for line in results:
line['production_id'] = production.id
prod_line_obj.create(cr, uid, line)
#reset workcenter_lines in production order
for line in results2:
line['production_id'] = production.id
workcenter_line_obj.create(cr, uid, line)
return results
def action_ready(self, cr, uid, ids, context=None):
""" Changes the production state to Ready and location id of stock move.
@return: True
"""
move_obj = self.pool.get('stock.move')
self.write(cr, uid, ids, {'state': 'ready'})
for production in self.browse(cr, uid, ids, context=context):
if not production.bom_id:
produce_move_id = self._make_production_produce_line(cr, uid, production, context=context)
for (production_id,name) in self.name_get(cr, uid, ids):
production = self.browse(cr, uid, production_id)
if production.move_prod_id and production.move_prod_id.location_id.id != production.location_dest_id.id:
move_obj.write(cr, uid, [production.move_prod_id.id],
{'location_id': production.location_dest_id.id})
return True
def action_produce(self, cr, uid, production_id, production_qty, production_mode, context=None):
production = self.browse(cr, uid, production_id, context=context)
if not production.bom_id and production.state == 'ready':
w <|fim_middle|>
return super(mrp_production, self).action_produce(cr, uid, production_id, production_qty, production_mode, context=context)
mrp_production()
<|fim▁end|> | f_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'mrp.production', production_id, 'button_produce', cr)
|
<|file_name|>mrp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
from openerp.tools import float_compare
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp import netsvc
from openerp import tools
class mrp_production(osv.osv):
_inherit = 'mrp.production'
def _<|fim_middle|>self, cr, uid, ids, properties=None, context=None):
""" Computes bills of material of a product.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
if properties is None:
properties = []
results = []
bom_obj = self.pool.get('mrp.bom')
uom_obj = self.pool.get('product.uom')
prod_line_obj = self.pool.get('mrp.production.product.line')
workcenter_line_obj = self.pool.get('mrp.production.workcenter.line')
for production in self.browse(cr, uid, ids):
#unlink product_lines
prod_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.product_lines], context=context)
#unlink workcenter_lines
workcenter_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.workcenter_lines], context=context)
# search BoM structure and route
bom_point = production.bom_id
bom_id = production.bom_id.id
if not bom_point:
bom_id = bom_obj._bom_find(cr, uid, production.product_id.id, production.product_uom.id, properties)
if bom_id:
bom_point = bom_obj.browse(cr, uid, bom_id)
routing_id = bom_point.routing_id.id or False
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
if not bom_id:
continue
# get components and workcenter_lines from BoM structure
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] # product_lines
results2 = res[1] # workcenter_lines
# reset product_lines in production order
for line in results:
line['production_id'] = production.id
prod_line_obj.create(cr, uid, line)
#reset workcenter_lines in production order
for line in results2:
line['production_id'] = production.id
workcenter_line_obj.create(cr, uid, line)
return results
def action_ready(self, cr, uid, ids, context=None):
""" Changes the production state to Ready and location id of stock move.
@return: True
"""
move_obj = self.pool.get('stock.move')
self.write(cr, uid, ids, {'state': 'ready'})
for production in self.browse(cr, uid, ids, context=context):
if not production.bom_id:
produce_move_id = self._make_production_produce_line(cr, uid, production, context=context)
for (production_id,name) in self.name_get(cr, uid, ids):
production = self.browse(cr, uid, production_id)
if production.move_prod_id and production.move_prod_id.location_id.id != production.location_dest_id.id:
move_obj.write(cr, uid, [production.move_prod_id.id],
{'location_id': production.location_dest_id.id})
return True
def action_produce(self, cr, uid, production_id, production_qty, production_mode, context=None):
production = self.browse(cr, uid, production_id, context=context)
if not production.bom_id and production.state == 'ready':
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'mrp.production', production_id, 'button_produce', cr)
return super(mrp_production, self).action_produce(cr, uid, production_id, production_qty, production_mode, context=context)
mrp_production()
<|fim▁end|> | action_compute_lines( |
<|file_name|>mrp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
from openerp.tools import float_compare
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp import netsvc
from openerp import tools
class mrp_production(osv.osv):
_inherit = 'mrp.production'
def _action_compute_lines(self, cr, uid, ids, properties=None, context=None):
""" Computes bills of material of a product.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
if properties is None:
properties = []
results = []
bom_obj = self.pool.get('mrp.bom')
uom_obj = self.pool.get('product.uom')
prod_line_obj = self.pool.get('mrp.production.product.line')
workcenter_line_obj = self.pool.get('mrp.production.workcenter.line')
for production in self.browse(cr, uid, ids):
#unlink product_lines
prod_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.product_lines], context=context)
#unlink workcenter_lines
workcenter_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.workcenter_lines], context=context)
# search BoM structure and route
bom_point = production.bom_id
bom_id = production.bom_id.id
if not bom_point:
bom_id = bom_obj._bom_find(cr, uid, production.product_id.id, production.product_uom.id, properties)
if bom_id:
bom_point = bom_obj.browse(cr, uid, bom_id)
routing_id = bom_point.routing_id.id or False
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
if not bom_id:
continue
# get components and workcenter_lines from BoM structure
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] # product_lines
results2 = res[1] # workcenter_lines
# reset product_lines in production order
for line in results:
line['production_id'] = production.id
prod_line_obj.create(cr, uid, line)
#reset workcenter_lines in production order
for line in results2:
line['production_id'] = production.id
workcenter_line_obj.create(cr, uid, line)
return results
def a<|fim_middle|>self, cr, uid, ids, context=None):
""" Changes the production state to Ready and location id of stock move.
@return: True
"""
move_obj = self.pool.get('stock.move')
self.write(cr, uid, ids, {'state': 'ready'})
for production in self.browse(cr, uid, ids, context=context):
if not production.bom_id:
produce_move_id = self._make_production_produce_line(cr, uid, production, context=context)
for (production_id,name) in self.name_get(cr, uid, ids):
production = self.browse(cr, uid, production_id)
if production.move_prod_id and production.move_prod_id.location_id.id != production.location_dest_id.id:
move_obj.write(cr, uid, [production.move_prod_id.id],
{'location_id': production.location_dest_id.id})
return True
def action_produce(self, cr, uid, production_id, production_qty, production_mode, context=None):
production = self.browse(cr, uid, production_id, context=context)
if not production.bom_id and production.state == 'ready':
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'mrp.production', production_id, 'button_produce', cr)
return super(mrp_production, self).action_produce(cr, uid, production_id, production_qty, production_mode, context=context)
mrp_production()
<|fim▁end|> | ction_ready( |
<|file_name|>mrp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
from openerp.tools import float_compare
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp import netsvc
from openerp import tools
class mrp_production(osv.osv):
_inherit = 'mrp.production'
def _action_compute_lines(self, cr, uid, ids, properties=None, context=None):
""" Computes bills of material of a product.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
if properties is None:
properties = []
results = []
bom_obj = self.pool.get('mrp.bom')
uom_obj = self.pool.get('product.uom')
prod_line_obj = self.pool.get('mrp.production.product.line')
workcenter_line_obj = self.pool.get('mrp.production.workcenter.line')
for production in self.browse(cr, uid, ids):
#unlink product_lines
prod_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.product_lines], context=context)
#unlink workcenter_lines
workcenter_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.workcenter_lines], context=context)
# search BoM structure and route
bom_point = production.bom_id
bom_id = production.bom_id.id
if not bom_point:
bom_id = bom_obj._bom_find(cr, uid, production.product_id.id, production.product_uom.id, properties)
if bom_id:
bom_point = bom_obj.browse(cr, uid, bom_id)
routing_id = bom_point.routing_id.id or False
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
if not bom_id:
continue
# get components and workcenter_lines from BoM structure
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] # product_lines
results2 = res[1] # workcenter_lines
# reset product_lines in production order
for line in results:
line['production_id'] = production.id
prod_line_obj.create(cr, uid, line)
#reset workcenter_lines in production order
for line in results2:
line['production_id'] = production.id
workcenter_line_obj.create(cr, uid, line)
return results
def action_ready(self, cr, uid, ids, context=None):
""" Changes the production state to Ready and location id of stock move.
@return: True
"""
move_obj = self.pool.get('stock.move')
self.write(cr, uid, ids, {'state': 'ready'})
for production in self.browse(cr, uid, ids, context=context):
if not production.bom_id:
produce_move_id = self._make_production_produce_line(cr, uid, production, context=context)
for (production_id,name) in self.name_get(cr, uid, ids):
production = self.browse(cr, uid, production_id)
if production.move_prod_id and production.move_prod_id.location_id.id != production.location_dest_id.id:
move_obj.write(cr, uid, [production.move_prod_id.id],
{'location_id': production.location_dest_id.id})
return True
def a<|fim_middle|>self, cr, uid, production_id, production_qty, production_mode, context=None):
production = self.browse(cr, uid, production_id, context=context)
if not production.bom_id and production.state == 'ready':
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'mrp.production', production_id, 'button_produce', cr)
return super(mrp_production, self).action_produce(cr, uid, production_id, production_qty, production_mode, context=context)
mrp_production()
<|fim▁end|> | ction_produce( |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | #This is where the tests go. |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu<|fim▁hole|>
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)<|fim▁end|> | def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu) |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
<|fim_middle|>
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu) |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
<|fim_middle|>
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
<|fim_middle|>
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id()) |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
<|fim_middle|>
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | return "NoncT({0},{1})".format(self.df, self.mu) |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
<|fim_middle|>
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda) |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
<|fim_middle|>
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2 |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
<|fim_middle|>
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | pass |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
<|fim_middle|>
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id()) |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
<|fim_middle|>
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | return "NoncChi2({0},{1})".format(self.df, self.lmbda) |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
<|fim_middle|>
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda) |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
<|fim_middle|>
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
<|fim_middle|>
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id()) |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
<|fim_middle|>
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda) |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
<|fim_middle|>
<|fim▁end|> | def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda) |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
<|fim_middle|>
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
<|fim_middle|>
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id()) |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
<|fim_middle|>
<|fim▁end|> | return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda) |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
<|fim_middle|>
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | return d1 |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def <|fim_middle|>(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | __init__ |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def <|fim_middle|>(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | __str__ |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def <|fim_middle|>(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | getName |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def <|fim_middle|>(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | __new__ |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def <|fim_middle|>(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | __init__ |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def <|fim_middle|>(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | __str__ |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def <|fim_middle|>(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | getName |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def <|fim_middle|>(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | __init__ |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def <|fim_middle|>(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | __str__ |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def <|fim_middle|>(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | getName |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def <|fim_middle|>(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | __init__ |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def <|fim_middle|>(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def getName(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | __str__ |
<|file_name|>noncentral_distr.py<|end_file_name|><|fim▁begin|>from numpy import sqrt
from pacal.standard_distr import NormalDistr, ChiSquareDistr
from pacal.distr import Distr, SumDistr, DivDistr, InvDistr
from pacal.distr import sqrt as distr_sqrt
class NoncentralTDistr(DivDistr):
def __init__(self, df = 2, mu = 0):
d1 = NormalDistr(mu, 1)
d2 = distr_sqrt(ChiSquareDistr(df) / df)
super(NoncentralTDistr, self).__init__(d1, d2)
self.df = df
self.mu = mu
def __str__(self):
return "NoncentralTDistr(df={0},mu={1})#{2}".format(self.df, self.mu, self.id())
def getName(self):
return "NoncT({0},{1})".format(self.df, self.mu)
class NoncentralChiSquareDistr(SumDistr):
def __new__(cls, df, lmbda = 0):
assert df >= 1
d1 = NormalDistr(sqrt(lmbda))**2
if df == 1:
return d1
d2 = ChiSquareDistr(df - 1)
ncc2 = super(NoncentralChiSquareDistr, cls).__new__(cls, d1, d2)
super(NoncentralChiSquareDistr, ncc2).__init__(d1, d2)
ncc2.df = df
ncc2.lmbda = lmbda
return ncc2
def __init__(self, df, lmbda = 0):
pass
def __str__(self):
return "NoncentralChiSquare(df={0},lambda={1})#{2}".format(self.df, self.lmbda, self.id())
def getName(self):
return "NoncChi2({0},{1})".format(self.df, self.lmbda)
class NoncentralBetaDistr(InvDistr):
def __init__(self, alpha = 1, beta = 1, lmbda = 0):
d = 1 + ChiSquareDistr(2.0 * beta) / NoncentralChiSquareDistr(2 * alpha, lmbda)
super(NoncentralBetaDistr, self).__init__(d)
self.alpha = alpha
self.beta = beta
self.lmbda = lmbda
def __str__(self):
return "NoncentralBetaDistr(alpha={0},beta={1},lambda={2})#{3}".format(self.alpha, self.beta, self.lmbda, self.id())
def getName(self):
return "NoncBeta({0},{1},{2})".format(self.alpha, self.beta, self.lmbda)
class NoncentralFDistr(DivDistr):
def __init__(self, df1 = 1, df2 = 1, lmbda = 0):
d1 = NoncentralChiSquareDistr(df1, lmbda) / df1
d2 = ChiSquareDistr(df2) / df2
super(NoncentralFDistr, self).__init__(d1, d2)
self.df1 = df1
self.df2 = df2
self.lmbda = lmbda
def __str__(self):
return "NoncentralFDistr(df1={0},df2={1},lambda={2})#{3}".format(self.df1, self.df2, self.lmbda, self.id())
def <|fim_middle|>(self):
return "NoncF({0},{1},{2})".format(self.df1, self.df2, self.lmbda)
<|fim▁end|> | getName |
<|file_name|>pythonutil.py<|end_file_name|><|fim▁begin|>from socket import inet_ntoa
from struct import pack
<|fim▁hole|> for i in xrange(32 - mask, 32):
bits |= (1 << i)
packed_value = pack('!I', bits)
addr = inet_ntoa(packed_value)
return addr<|fim▁end|> |
def calcDottedNetmask(mask):
bits = 0 |
<|file_name|>pythonutil.py<|end_file_name|><|fim▁begin|>from socket import inet_ntoa
from struct import pack
def calcDottedNetmask(mask):
<|fim_middle|>
<|fim▁end|> | bits = 0
for i in xrange(32 - mask, 32):
bits |= (1 << i)
packed_value = pack('!I', bits)
addr = inet_ntoa(packed_value)
return addr |
<|file_name|>pythonutil.py<|end_file_name|><|fim▁begin|>from socket import inet_ntoa
from struct import pack
def <|fim_middle|>(mask):
bits = 0
for i in xrange(32 - mask, 32):
bits |= (1 << i)
packed_value = pack('!I', bits)
addr = inet_ntoa(packed_value)
return addr
<|fim▁end|> | calcDottedNetmask |
<|file_name|>TurtleCommands.py<|end_file_name|><|fim▁begin|>__author__ = 'Alex'
from Movement import Movement
class BaseCommand:
def __init__(self, movement):
assert isinstance(movement, Movement)
self.name = 'unknown'
self.m = movement
def execute(selfself):pass
class Forward(BaseCommand):
def __init__(self, movement):
assert isinstance(movement, Movement)
self.name = 'forward'
self.m = movement
def execute(self):
self.m.moveCM(10)
class Reverse(BaseCommand):
def __init__(self, movement):
assert isinstance(movement, Movement)
self.name = 'reverse'
self.m = movement
def execute(self):
self.m.moveCM(10)<|fim▁hole|> self.name = 'left'
self.m = movement
def execute(self):
self.m.turnDegrees(-90)
class Right(BaseCommand):
def __init__(self, movement):
assert isinstance(movement, Movement)
self.name = 'right'
self.m = movement
def execute(self):
self.m.turnDegrees(90)<|fim▁end|> |
class Left(BaseCommand):
def __init__(self, movement):
assert isinstance(movement, Movement) |
<|file_name|>TurtleCommands.py<|end_file_name|><|fim▁begin|>__author__ = 'Alex'
from Movement import Movement
class BaseCommand:
<|fim_middle|>
class Forward(BaseCommand):
def __init__(self, movement):
assert isinstance(movement, Movement)
self.name = 'forward'
self.m = movement
def execute(self):
self.m.moveCM(10)
class Reverse(BaseCommand):
def __init__(self, movement):
assert isinstance(movement, Movement)
self.name = 'reverse'
self.m = movement
def execute(self):
self.m.moveCM(10)
class Left(BaseCommand):
def __init__(self, movement):
assert isinstance(movement, Movement)
self.name = 'left'
self.m = movement
def execute(self):
self.m.turnDegrees(-90)
class Right(BaseCommand):
def __init__(self, movement):
assert isinstance(movement, Movement)
self.name = 'right'
self.m = movement
def execute(self):
self.m.turnDegrees(90)
<|fim▁end|> | def __init__(self, movement):
assert isinstance(movement, Movement)
self.name = 'unknown'
self.m = movement
def execute(selfself):pass |
<|file_name|>TurtleCommands.py<|end_file_name|><|fim▁begin|>__author__ = 'Alex'
from Movement import Movement
class BaseCommand:
def __init__(self, movement):
<|fim_middle|>
def execute(selfself):pass
class Forward(BaseCommand):
def __init__(self, movement):
assert isinstance(movement, Movement)
self.name = 'forward'
self.m = movement
def execute(self):
self.m.moveCM(10)
class Reverse(BaseCommand):
def __init__(self, movement):
assert isinstance(movement, Movement)
self.name = 'reverse'
self.m = movement
def execute(self):
self.m.moveCM(10)
class Left(BaseCommand):
def __init__(self, movement):
assert isinstance(movement, Movement)
self.name = 'left'
self.m = movement
def execute(self):
self.m.turnDegrees(-90)
class Right(BaseCommand):
def __init__(self, movement):
assert isinstance(movement, Movement)
self.name = 'right'
self.m = movement
def execute(self):
self.m.turnDegrees(90)
<|fim▁end|> | assert isinstance(movement, Movement)
self.name = 'unknown'
self.m = movement |
Subsets and Splits