2019-11-26 21:02:40 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
|
|
# Define here the models for your scraped items
|
|
|
|
|
#
|
|
|
|
|
# See documentation in:
|
|
|
|
|
# https://docs.scrapy.org/en/latest/topics/items.html
|
|
|
|
|
|
|
|
|
|
import scrapy
|
2019-11-28 22:59:01 +00:00
|
|
|
from scrapy.loader.processors import MapCompose
|
|
|
|
|
import re
|
2019-11-26 21:02:40 +00:00
|
|
|
|
|
|
|
|
class FmscraperItem(scrapy.Item):
|
|
|
|
|
# define the fields for your item here like:
|
|
|
|
|
# name = scrapy.Field()
|
|
|
|
|
pass
|
|
|
|
|
|
2019-11-28 22:59:01 +00:00
|
|
|
def clean_text(value):
|
|
|
|
|
yield value.strip()
|
|
|
|
|
|
|
|
|
|
def clean_html_script(value):
|
2019-12-02 00:56:17 +00:00
|
|
|
description_re = re.match("([\w\W]+)(<script[\w\W]*</script>)?([\w\W]+)",value)
|
2019-11-28 22:59:01 +00:00
|
|
|
description_full_post = description_re.group(1) + description_re.group(3)
|
|
|
|
|
yield description_full_post
|
|
|
|
|
|
2019-12-01 21:01:20 +00:00
|
|
|
def clean_file_url(value):
|
|
|
|
|
url = re.match("(.+\.mp3)", value)
|
|
|
|
|
yield url.group(1)
|
|
|
|
|
|
2019-12-02 00:56:17 +00:00
|
|
|
def clean_emission_url(value):
|
|
|
|
|
url = re.match("(.+)(\?.+$)?", value)
|
|
|
|
|
yield url.group(1)
|
|
|
|
|
|
|
|
|
|
def extract_id_episode(value):
|
|
|
|
|
id_episode = re.search("-([\d]+$)", value)
|
|
|
|
|
yield id_episode.group(1)
|
|
|
|
|
|
2019-11-26 21:02:40 +00:00
|
|
|
class show_Item(scrapy.Item):
|
|
|
|
|
name = scrapy.Field()
|
|
|
|
|
url_page = scrapy.Field()
|
|
|
|
|
url_feed = scrapy.Field()
|
|
|
|
|
tags = scrapy.Field()
|
|
|
|
|
|
|
|
|
|
class episode_Item(scrapy.Item):
|
2019-12-02 00:56:17 +00:00
|
|
|
url_emission = scrapy.Field(
|
|
|
|
|
default = 'null',
|
|
|
|
|
input_processor = MapCompose(clean_emission_url)
|
|
|
|
|
)
|
2019-11-29 00:12:33 +00:00
|
|
|
title = scrapy.Field(
|
|
|
|
|
default = 'null',
|
|
|
|
|
input_processor = MapCompose(clean_text)
|
|
|
|
|
)
|
2019-12-02 00:56:17 +00:00
|
|
|
id_episode = scrapy.Field(
|
|
|
|
|
default = 'null',
|
|
|
|
|
input_processor = MapCompose(extract_id_episode)
|
|
|
|
|
)
|
2019-11-28 22:59:01 +00:00
|
|
|
description_lead = scrapy.Field(
|
|
|
|
|
default = 'null',
|
|
|
|
|
input_processor = MapCompose(clean_text)
|
|
|
|
|
)
|
|
|
|
|
description_full = scrapy.Field(
|
2019-12-02 00:56:17 +00:00
|
|
|
default = 'null',
|
|
|
|
|
input_processor = MapCompose(clean_html_script)
|
|
|
|
|
)
|
2019-12-01 21:01:20 +00:00
|
|
|
url_file = scrapy.Field(
|
2019-11-28 22:59:01 +00:00
|
|
|
default = 'null',
|
2019-12-01 21:01:20 +00:00
|
|
|
input_processor = MapCompose(clean_file_url)
|
2019-11-28 22:59:01 +00:00
|
|
|
)
|
2019-11-26 21:02:40 +00:00
|
|
|
url_page = scrapy.Field()
|
|
|
|
|
date_diffusion = scrapy.Field()
|
2019-12-02 01:36:32 +00:00
|
|
|
file_size= scrapy.Field()
|
2019-12-26 00:10:13 +00:00
|
|
|
file_last_modified = scrapy.Field()
|
2019-11-26 21:02:40 +00:00
|
|
|
|