Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,17 @@ And to load:
scraper.load('yahoo-finance')
```

### Exporting results

You can easily export your scraped data to various formats (CSV, JSON, TXT) using the built-in exporter utility:

```python
from autoscraper.utils.exporter import export_results

data = ["item1", "item2"]
export_results(data, "results.csv", "csv")
```

## Tutorials

- See [this gist](https://gist.github.com/alirezamika/72083221891eecd991bbc0a2a2467673) for more advanced usages.
Expand Down
Empty file added autoscraper/utils/__init__.py
Empty file.
35 changes: 35 additions & 0 deletions autoscraper/utils/exporter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import csv
import json
from typing import List, Any

def export_results(data: List[Any], file_path: str = "output.csv", format: str = "csv") -> None:
"""
Export scraped data to a specified format.

Args:
data (list): The list of items to export.
file_path (str): The destination file path.
format (str): The format to save the data in ('csv', 'json', 'txt').

Raises:
ValueError: If the format is not supported.
"""
format = format.lower()

if format == "csv":
with open(file_path, 'w', newline='', encoding='utf-8') as f:
writer = csv.writer(f)
for item in data:
if isinstance(item, (list, tuple)):
writer.writerow(item)
else:
writer.writerow([item])
elif format == "json":
with open(file_path, 'w', encoding='utf-8') as f:
json.dump(data, f, indent=4, ensure_ascii=False)
elif format == "txt":
with open(file_path, 'w', encoding='utf-8') as f:
for item in data:
f.write(f"{item}\n")
else:
raise ValueError(f"Unsupported format '{format}'. Supported formats are: 'csv', 'json', 'txt'.")