Add back password prompt, fix default first page as 0, and always try to load archive

This commit is contained in:
Hank Grabowski 2022-01-15 09:09:32 -05:00
parent 45e8cad1a0
commit 880ef5ad89

View file

@ -13,7 +13,7 @@ const defaultRequestDelayMilliseconds = 5000;
const defaultMaxPostsQuery = 1000000000;
const defaultItemsPerPage = 20;
const defaultDownloadImages = true;
const defaultStartPage = 0;
const defaultStartPage = 1;
void main(List<String> arguments) async {
final argParser = _buildArgs();
@ -27,6 +27,7 @@ void main(List<String> arguments) async {
return;
}
stdout.write('Password: ');
final password = stdin.readLineSync() ?? '';
print('');
@ -49,48 +50,44 @@ void main(List<String> arguments) async {
print(
"Max number of queries will be $maxQueries with $itemsPerPage items per page");
if (firstPage != 0) {
print(
"Starting page is not zero therefore attempting to load image and post/comment archives from disk");
if (File(postsJsonFile).existsSync()) {
try {
final oldEntriesJson =
jsonDecode(File(postsJsonFile).readAsStringSync()) as List<dynamic>;
final oldEntries =
oldEntriesJson.map((j) => FriendicaEntry.fromJson(j));
for (final entry in oldEntries) {
allEntries[entry.id] = entry;
}
print('Loading ${oldEntries.length} post/comment entries from disk');
} catch (e) {
print(
'Error loading old entries, will be starting from scratch file: $e');
}
} else {
print(
'Entries file did not exist at location therefore assuming starting from scratch: $postsJsonFile');
}
if (File(imageArchiveJsonFilePath).existsSync()) {
if (File(postsJsonFile).existsSync()) {
try {
final oldEntriesJson =
jsonDecode(File(imageArchiveJsonFilePath).readAsStringSync())
as List<dynamic>;
final oldEntries = oldEntriesJson.map((j) => ImageEntry.fromJson(j));
jsonDecode(File(postsJsonFile).readAsStringSync()) as List<dynamic>;
final oldEntries = oldEntriesJson.map((j) => FriendicaEntry.fromJson(j));
for (final entry in oldEntries) {
final alreadyHadEntry = imageArchive.addDirectEntries(entry);
if (alreadyHadEntry) {
print("Image cache already had entry for: ${entry.url}");
}
allEntries[entry.id] = entry;
}
print('Loading ${oldEntries.length} image entries from disk');
} else {
print('Loading ${oldEntries.length} post/comment entries from disk');
} catch (e) {
print(
'Image archive file did not exist at location so assuming starting from scratch: $imageArchiveJsonFilePath');
'Error loading old entries, will be starting from scratch file: $e');
}
} else {
print(
'Entries file did not exist at location therefore assuming starting from scratch: $postsJsonFile');
}
if (File(imageArchiveJsonFilePath).existsSync()) {
final oldEntriesJson =
jsonDecode(File(imageArchiveJsonFilePath).readAsStringSync())
as List<dynamic>;
final oldEntries = oldEntriesJson.map((j) => ImageEntry.fromJson(j));
for (final entry in oldEntries) {
final alreadyHadEntry = imageArchive.addDirectEntries(entry);
if (alreadyHadEntry) {
print("Image cache already had entry for: ${entry.url}");
}
}
print('Loading ${oldEntries.length} image entries from disk');
} else {
print(
'Image archive file did not exist at location so assuming starting from scratch: $imageArchiveJsonFilePath');
}
print("Loading data from server");
for (var page = firstPage; page < maxQueries; page++) {
final maxPage = firstPage + maxQueries;
for (var page = firstPage; page < maxPage; page++) {
print("Querying for posts/comments for $page");
final timelineResult =
await client.getTimeline(username, page, itemsPerPage);