Skip to content

Commit

Permalink
Merge pull request #2911 from lbryio/file_list_claim_ids
Browse files Browse the repository at this point in the history
support `claim_id`, `channel_claim_id`, and `outpoint` args given as lists of values to match in `file_list`
  • Loading branch information
jackrobison authored Apr 13, 2020
2 parents b7e95ff + e81b51a commit 695eabd
Show file tree
Hide file tree
Showing 3 changed files with 42 additions and 8 deletions.
14 changes: 7 additions & 7 deletions lbry/extras/daemon/daemon.py
Original file line number Diff line number Diff line change
Expand Up @@ -1917,9 +1917,8 @@ def jsonrpc_address_unused(self, account_id=None, wallet_id=None):
"""

@requires(STREAM_MANAGER_COMPONENT)
async def jsonrpc_file_list(
self, sort=None, reverse=False, comparison=None,
wallet_id=None, page=None, page_size=None, **kwargs):
async def jsonrpc_file_list(self, sort=None, reverse=False, comparison=None, wallet_id=None, page=None,
page_size=None, **kwargs):
"""
List files limited by optional filters
Expand All @@ -1940,17 +1939,17 @@ async def jsonrpc_file_list(
--stream_hash=<stream_hash> : (str) get file with matching stream hash
--rowid=<rowid> : (int) get file with matching row id
--added_on=<added_on> : (int) get file with matching time of insertion
--claim_id=<claim_id> : (str) get file with matching claim id
--outpoint=<outpoint> : (str) get file with matching claim outpoint
--claim_id=<claim_id> : (str) get file with matching claim id(s)
--outpoint=<outpoint> : (str) get file with matching claim outpoint(s)
--txid=<txid> : (str) get file with matching claim txid
--nout=<nout> : (int) get file with matching claim nout
--channel_claim_id=<channel_claim_id> : (str) get file with matching channel claim id
--channel_claim_id=<channel_claim_id> : (str) get file with matching channel claim id(s)
--channel_name=<channel_name> : (str) get file with matching channel name
--claim_name=<claim_name> : (str) get file with matching claim name
--blobs_in_stream<blobs_in_stream> : (int) get file with matching blobs in stream
--blobs_remaining=<blobs_remaining> : (int) amount of remaining blobs to download
--sort=<sort_by> : (str) field to sort by (one of the above filter fields)
--comparison=<comparison> : (str) logical comparison, (eq | ne | g | ge | l | le)
--comparison=<comparison> : (str) logical comparison, (eq | ne | g | ge | l | le | in)
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
--wallet_id=<wallet_id> : (str) add purchase receipts from this wallet
Expand All @@ -1960,6 +1959,7 @@ async def jsonrpc_file_list(
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
sort = sort or 'rowid'
comparison = comparison or 'eq'

paginated = paginate_list(
self.stream_manager.get_filtered_streams(sort, reverse, comparison, **kwargs), page, page_size
)
Expand Down
31 changes: 30 additions & 1 deletion lbry/stream/stream_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@
from lbry.extras.daemon.analytics import AnalyticsManager
from lbry.extras.daemon.storage import SQLiteStorage, StoredContentClaim
from lbry.extras.daemon.exchange_rate_manager import ExchangeRateManager
from lbry.wallet.transaction import Transaction
from lbry.wallet.manager import WalletManager
from lbry.wallet.wallet import Wallet

log = logging.getLogger(__name__)

Expand All @@ -46,13 +49,20 @@
'blobs_in_stream'
]

SET_FILTER_FIELDS = {
"claim_ids": "claim_id",
"channel_claim_ids": "channel_claim_id",
"outpoints": "outpoint"
}

COMPARISON_OPERATORS = {
'eq': lambda a, b: a == b,
'ne': lambda a, b: a != b,
'g': lambda a, b: a > b,
'l': lambda a, b: a < b,
'ge': lambda a, b: a >= b,
'le': lambda a, b: a <= b,
'in': lambda a, b: a in b
}


Expand Down Expand Up @@ -276,15 +286,34 @@ def get_filtered_streams(self, sort_by: Optional[str] = None, reverse: Optional[
raise ValueError(f"'{comparison}' is not a valid comparison")
if 'full_status' in search_by:
del search_by['full_status']

for search in search_by:
if search not in FILTER_FIELDS:
raise ValueError(f"'{search}' is not a valid search operation")

compare_sets = {}
if isinstance(search_by.get('claim_id'), list):
compare_sets['claim_ids'] = search_by.pop('claim_id')
if isinstance(search_by.get('outpoint'), list):
compare_sets['outpoints'] = search_by.pop('outpoint')
if isinstance(search_by.get('channel_claim_id'), list):
compare_sets['channel_claim_ids'] = search_by.pop('channel_claim_id')

if search_by:
comparison = comparison or 'eq'
streams = []
for stream in self.streams.values():
matched = False
for set_search, val in compare_sets.items():
if COMPARISON_OPERATORS[comparison](getattr(stream, SET_FILTER_FIELDS[set_search]), val):
streams.append(stream)
matched = True
break
if matched:
continue
for search, val in search_by.items():
if COMPARISON_OPERATORS[comparison](getattr(stream, search), val):
this_stream = getattr(stream, search)
if COMPARISON_OPERATORS[comparison](this_stream, val):
streams.append(stream)
break
else:
Expand Down
5 changes: 5 additions & 0 deletions tests/integration/datanetwork/test_file_commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,11 @@ async def test_file_management(self):
self.assertEqual(file1['claim_name'], 'foo')
self.assertEqual(file2['claim_name'], 'foo2')

self.assertItemCount(await self.daemon.jsonrpc_file_list(claim_id=[file1['claim_id'], file2['claim_id']]), 2)
self.assertItemCount(await self.daemon.jsonrpc_file_list(claim_id=file1['claim_id']), 1)
self.assertItemCount(await self.daemon.jsonrpc_file_list(outpoint=[file1['outpoint'], file2['outpoint']]), 2)
self.assertItemCount(await self.daemon.jsonrpc_file_list(outpoint=file1['outpoint']), 1)

await self.daemon.jsonrpc_file_delete(claim_name='foo')
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
await self.daemon.jsonrpc_file_delete(claim_name='foo2')
Expand Down

0 comments on commit 695eabd

Please sign in to comment.