SlideShare una empresa de Scribd logo
1 de 136
Descargar para leer sin conexión
PREMATURE
OPTIMIZATION
The Root of ALL Evil
@akitaonrails
(CODE VERSION)
@akitaonrails
manga-downloadr 
-u http://www.mangareader.net/onepunch-man 
-d /tmp/manga/one-punch-man 
-n one-punch-man
#!/usr/bin/env	ruby	
$LOAD_PATH.unshift	File.join(File.dirname(__FILE__),	'..',	'lib')	
require	'optparse'	
options	=	{	test:	false	}	
option_parser	=	OptionParser.new	do	|opts|	
		opts.banner	=	"Usage:	manga-downloadr	[options]"	
		opts.on("-t",	"--test",	"Test	routine")	do	|t|	
				options[:url]	=	"http://www.mangareader.net/onepunch-man"	
				options[:name]	=	"one-punch-man"	
				options[:directory]	=	"/tmp/manga-downloadr/one-punch-man"	
				options[:test]	=	true	
		end	
		opts.on("-u	URL",	"--url	URL",	
				"Full	MangaReader.net	manga	homepage	URL	-	required")	do	|v|	
				options[:url]	=	v	
		end	
		opts.on("-n	NAME",	"--name	NAME",	
				"slug	to	be	used	for	the	sub-folder	to	store	all	manga	files	-	required")	do	|n|	
				options[:name]	=	n	
		end	
		opts.on("-d	DIRECTORY",	"--directory	DIRECTORY",	
				"main	folder	where	all	mangas	will	be	stored	-	required")	do	|d|	
				options[:directory]	=	d	
		end	
		opts.on("-h",	"--help",	"Show	this	message")	do	
				puts	opts	
				exit	
		end	
end
require	'manga-downloadr'	
generator	=	MangaDownloadr::Workflow.create(options[:url],	options[:name],	
options[:directory])	
		generator.fetch_chapter_urls!	
		generator.fetch_page_urls!	
		generator.fetch_image_urls!	
		generator.fetch_images!	
		generator.compile_ebooks!
require	'manga-downloadr'	
generator	=	MangaDownloadr::Workflow.create(options[:url],	options[:name],	
options[:directory])	
		puts	"Massive	parallel	scanning	of	all	chapters	"	
		generator.fetch_chapter_urls!	
		puts	"nMassive	parallel	scanning	of	all	pages	"	
		generator.fetch_page_urls!	
		puts	"nMassive	parallel	scanning	of	all	images	"	
		generator.fetch_image_urls!	
		puts	"nTotal	page	links	found:	#{generator.chapter_pages_count}"	
		puts	"nMassive	parallel	download	of	all	page	images	"	
		generator.fetch_images!	
		puts	"nCompiling	all	images	into	PDF	volumes	"	
		generator.compile_ebooks!	
puts	"nProcess	finished."
require	'manga-downloadr'	
generator	=	MangaDownloadr::Workflow.create(options[:url],	options[:name],	
options[:directory])	
unless	generator.state?(:chapter_urls)	
		puts	"Massive	parallel	scanning	of	all	chapters	"	
		generator.fetch_chapter_urls!	
end	
unless	generator.state?(:page_urls)	
		puts	"nMassive	parallel	scanning	of	all	pages	"	
		generator.fetch_page_urls!	
end	
unless	generator.state?(:image_urls)	
		puts	"nMassive	parallel	scanning	of	all	images	"	
		generator.fetch_image_urls!	
		puts	"nTotal	page	links	found:	#{generator.chapter_pages_count}"	
end	
unless	generator.state?(:images)	
		puts	"nMassive	parallel	download	of	all	page	images	"	
		generator.fetch_images!	
end	
unless	options[:test]	
		puts	"nCompiling	all	images	into	PDF	volumes	"	
		generator.compile_ebooks!	
end	
puts	"nProcess	finished."
MangaDownloadr::Workflow
MangaDownloadr::Workflowmodule	MangaDownloadr	
		ImageData	=	Struct.new(:folder,	:filename,	:url)	
		class	Workflow	
				def	initialize(root_url	=	nil,	manga_name	=	nil,	manga_root	=	nil,	options	=	{})	
				end	
				def	fetch_chapter_urls!	
				end	
				def	fetch_page_urls!	
				end	
				def	fetch_image_urls!	
				end	
				def	fetch_images!	
				end	
				def	compile_ebooks!	
				end	
				def	state?(state)	
				end	
				private	
				def	current_state(state)	
				end	
		end	
end
fetch_chapter_urls!
module	MangaDownloadr	
		ImageData	=	Struct.new(:folder,	:filename,	:url)	
		class	Workflow	
				def	initialize(root_url	=	nil,	manga_name	=	nil,	manga_root	=	nil,	options	=	{})	
				end	
				def	fetch_chapter_urls!	
				end	
				def	fetch_page_urls!	
				end	
				def	fetch_image_urls!	
				end	
				def	fetch_images!	
				end	
				def	compile_ebooks!	
				end	
				def	state?(state)	
				end	
				private	
				def	current_state(state)	
				end	
		end	
end
fetch_chapter_urls!
fetch_chapter_urls!def	fetch_chapter_urls!	
		doc	=	Nokogiri::HTML(open(manga_root_url))	
		self.chapter_list	=	doc.css("#listing	a").map	{	|l|	l['href']}	
		self.manga_title		=	doc.css("#mangaproperties	h1").first.text	
		current_state	:chapter_urls	
end
fetch_chapter_urls!def	fetch_chapter_urls!	
		doc	=	Nokogiri::HTML(open(manga_root_url))	
		self.chapter_list	=	doc.css("#listing	a").map	{	|l|	l['href']}	
		self.manga_title		=	doc.css("#mangaproperties	h1").first.text	
		current_state	:chapter_urls	
end
def	fetch_page_urls!	
		chapter_list.each	do	|chapter_link|	
						response	=	Typhoeus.get	"http://www.mangareader.net#{chapter_link}"	
										chapter_doc	=	Nokogiri::HTML(response.body)	
										pages	=	chapter_doc.xpath("//div[@id='selectpage']//select[@id='pageMenu']//option")	
										chapter_pages.merge!(chapter_link	=>	pages.map	{	|p|	p['value']	})	
										print	'.'	
		end	
		self.chapter_pages_count	=	chapter_pages.values.inject(0)	{	|total,	list|	total	+=	list.size	}	
		current_state	:page_urls	
end
def	fetch_page_urls!	
		chapter_list.each	do	|chapter_link|	
				begin	
						response	=	Typhoeus.get	"http://www.mangareader.net#{chapter_link}"	
								begin	
										chapter_doc	=	Nokogiri::HTML(response.body)	
										pages	=	chapter_doc.xpath("//div[@id='selectpage']//select[@id='pageMenu']//option")	
										chapter_pages.merge!(chapter_link	=>	pages.map	{	|p|	p['value']	})	
										print	'.'	
								rescue	=>	e	
										self.fetch_page_urls_errors	<<	{	url:	chapter_link,	error:	e,	body:	response.body	}	
										print	'x'	
								end	
						end	
				rescue	=>	e	
						puts	e	
				end	
		end	
		unless	fetch_page_urls_errors.empty?	
				puts	"n	Errors	fetching	page	urls:"	
				puts	fetch_page_urls_errors	
		end	
		self.chapter_pages_count	=	chapter_pages.values.inject(0)	{	|total,	list|	total	+=	list.size	}	
		current_state	:page_urls	
end
def	fetch_page_urls!	
		hydra	=	Typhoeus::Hydra.new(max_concurrency:	hydra_concurrency)	
		chapter_list.each	do	|chapter_link|	
				begin	
						request	=	Typhoeus::Request.new	"http://www.mangareader.net#{chapter_link}"	
						request.on_complete	do	|response|	
								begin	
										chapter_doc	=	Nokogiri::HTML(response.body)	
										pages	=	chapter_doc.xpath("//div[@id='selectpage']//select[@id='pageMenu']//option")	
										chapter_pages.merge!(chapter_link	=>	pages.map	{	|p|	p['value']	})	
										print	'.'	
								rescue	=>	e	
										self.fetch_page_urls_errors	<<	{	url:	chapter_link,	error:	e,	body:	response.body	}	
										print	'x'	
								end	
						end	
						hydra.queue	request	
				rescue	=>	e	
						puts	e	
				end	
		end	
		hydra.run	
		unless	fetch_page_urls_errors.empty?	
				puts	"n	Errors	fetching	page	urls:"	
				puts	fetch_page_urls_errors	
		end	
		self.chapter_pages_count	=	chapter_pages.values.inject(0)	{	|total,	list|	total	+=	list.size	}	
		current_state	:page_urls	
end
def	fetch_page_urls!	
		hydra	=	Typhoeus::Hydra.new(max_concurrency:	hydra_concurrency)	
		chapter_list.each	do	|chapter_link|	
				begin	
						request	=	Typhoeus::Request.new	"http://www.mangareader.net#{chapter_link}"	
						request.on_complete	do	|response|	
								begin	
										chapter_doc	=	Nokogiri::HTML(response.body)	
										pages	=	chapter_doc.xpath("//div[@id='selectpage']//select[@id='pageMenu']//option")	
										chapter_pages.merge!(chapter_link	=>	pages.map	{	|p|	p['value']	})	
										print	'.'	
								rescue	=>	e	
										self.fetch_page_urls_errors	<<	{	url:	chapter_link,	error:	e,	body:	response.body	}	
										print	'x'	
								end	
						end	
						hydra.queue	request	
				rescue	=>	e	
						puts	e	
				end	
		end	
		hydra.run	
		unless	fetch_page_urls_errors.empty?	
				puts	"n	Errors	fetching	page	urls:"	
				puts	fetch_page_urls_errors	
		end	
		self.chapter_pages_count	=	chapter_pages.values.inject(0)	{	|total,	list|	total	+=	list.size	}	
		current_state	:page_urls	
end
def	fetch_page_urls!	
		hydra	=	Typhoeus::Hydra.new(max_concurrency:	hydra_concurrency)	
		chapter_list.each	do	|chapter_link|	
				begin	
						request	=	Typhoeus::Request.new	"http://www.mangareader.net#{chapter_link}"	
						request.on_complete	do	|response|	
								begin	
										chapter_doc	=	Nokogiri::HTML(response.body)	
										pages	=	chapter_doc.xpath("//div[@id='selectpage']//select[@id='pageMenu']//option")	
										chapter_pages.merge!(chapter_link	=>	pages.map	{	|p|	p['value']	})	
										print	'.'	
								rescue	=>	e	
										self.fetch_page_urls_errors	<<	{	url:	chapter_link,	error:	e,	body:	response.body	}	
										print	'x'	
								end	
						end	
						hydra.queue	request	
				rescue	=>	e	
						puts	e	
				end	
		end	
		hydra.run	
		unless	fetch_page_urls_errors.empty?	
				puts	"n	Errors	fetching	page	urls:"	
				puts	fetch_page_urls_errors	
		end	
		self.chapter_pages_count	=	chapter_pages.values.inject(0)	{	|total,	list|	total	+=	list.size	}	
		current_state	:page_urls	
end
def	fetch_page_urls!	
		hydra	=	Typhoeus::Hydra.new(max_concurrency:	hydra_concurrency)	
		chapter_list.each	do	|chapter_link|	
				begin	
						request	=	Typhoeus::Request.new	"http://www.mangareader.net#{chapter_link}"	
						request.on_complete	do	|response|	
								begin	
										chapter_doc	=	Nokogiri::HTML(response.body)	
										pages	=	chapter_doc.xpath("//div[@id='selectpage']//select[@id='pageMenu']//option")	
										chapter_pages.merge!(chapter_link	=>	pages.map	{	|p|	p['value']	})	
										print	'.'	
								rescue	=>	e	
										self.fetch_page_urls_errors	<<	{	url:	chapter_link,	error:	e,	body:	response.body	}	
										print	'x'	
								end	
						end	
						hydra.queue	request	
				rescue	=>	e	
						puts	e	
				end	
		end	
		hydra.run	
		unless	fetch_page_urls_errors.empty?	
				puts	"n	Errors	fetching	page	urls:"	
				puts	fetch_page_urls_errors	
		end	
		self.chapter_pages_count	=	chapter_pages.values.inject(0)	{	|total,	list|	total	+=	list.size	}	
		current_state	:page_urls	
end
def	fetch_page_urls!	
		hydra	=	Typhoeus::Hydra.new(max_concurrency:	hydra_concurrency)	
		chapter_list.each	do	|chapter_link|	
				begin	
						request	=	Typhoeus::Request.new	"http://www.mangareader.net#{chapter_link}"	
						request.on_complete	do	|response|	
								begin	
										chapter_doc	=	Nokogiri::HTML(response.body)	
										pages	=	chapter_doc.xpath("//div[@id='selectpage']//select[@id='pageMenu']//option")	
										chapter_pages.merge!(chapter_link	=>	pages.map	{	|p|	p['value']	})	
										print	'.'	
								rescue	=>	e	
										self.fetch_page_urls_errors	<<	{	url:	chapter_link,	error:	e,	body:	response.body	}	
										print	'x'	
								end	
						end	
						hydra.queue	request	
				rescue	=>	e	
						puts	e	
				end	
		end	
		hydra.run	
		unless	fetch_page_urls_errors.empty?	
				puts	"n	Errors	fetching	page	urls:"	
				puts	fetch_page_urls_errors	
		end	
		self.chapter_pages_count	=	chapter_pages.values.inject(0)	{	|total,	list|	total	+=	list.size	}	
		current_state	:page_urls	
end
def	fetch_page_urls!	
		hydra	=	Typhoeus::Hydra.new(max_concurrency:	hydra_concurrency)	
		chapter_list.each	do	|chapter_link|	
				begin	
						request	=	Typhoeus::Request.new	"http://www.mangareader.net#{chapter_link}"	
						request.on_complete	do	|response|	
								begin	
										chapter_doc	=	Nokogiri::HTML(response.body)	
										pages	=	chapter_doc.xpath("//div[@id='selectpage']//select[@id='pageMenu']//option")	
										chapter_pages.merge!(chapter_link	=>	pages.map	{	|p|	p['value']	})	
										print	'.'	
								rescue	=>	e	
										self.fetch_page_urls_errors	<<	{	url:	chapter_link,	error:	e,	body:	response.body	}	
										print	'x'	
								end	
						end	
						hydra.queue	request	
				rescue	=>	e	
						puts	e	
				end	
		end	
		hydra.run	
		unless	fetch_page_urls_errors.empty?	
				puts	"n	Errors	fetching	page	urls:"	
				puts	fetch_page_urls_errors	
		end	
		self.chapter_pages_count	=	chapter_pages.values.inject(0)	{	|total,	list|	total	+=	list.size	}	
		current_state	:page_urls	
end
def	fetch_page_urls!	
		hydra	=	Typhoeus::Hydra.new(max_concurrency:	hydra_concurrency)	
		chapter_list.each	do	|chapter_link|	
				begin	
						request	=	Typhoeus::Request.new	"http://www.mangareader.net#{chapter_link}"	
						request.on_complete	do	|response|	
								begin	
										chapter_doc	=	Nokogiri::HTML(response.body)	
										pages	=	chapter_doc.xpath("//div[@id='selectpage']//select[@id='pageMenu']//option")	
										chapter_pages.merge!(chapter_link	=>	pages.map	{	|p|	p['value']	})	
										print	'.'	
								rescue	=>	e	
										self.fetch_page_urls_errors	<<	{	url:	chapter_link,	error:	e,	body:	response.body	}	
										print	'x'	
								end	
						end	
						hydra.queue	request	
				rescue	=>	e	
						puts	e	
				end	
		end	
		hydra.run	
		unless	fetch_page_urls_errors.empty?	
				puts	"n	Errors	fetching	page	urls:"	
				puts	fetch_page_urls_errors	
		end	
		self.chapter_pages_count	=	chapter_pages.values.inject(0)	{	|total,	list|	total	+=	list.size	}	
		current_state	:page_urls	
end
def	fetch_page_urls!	
		hydra	=	Typhoeus::Hydra.new(max_concurrency:	hydra_concurrency)	
		chapter_list.each	do	|chapter_link|	
				begin	
						request	=	Typhoeus::Request.new	"http://www.mangareader.net#{chapter_link}"	
						request.on_complete	do	|response|	
								begin	
										chapter_doc	=	Nokogiri::HTML(response.body)	
										pages	=	chapter_doc.xpath("//div[@id='selectpage']//select[@id='pageMenu']//option")	
										chapter_pages.merge!(chapter_link	=>	pages.map	{	|p|	p['value']	})	
										print	'.'	
								rescue	=>	e	
										self.fetch_page_urls_errors	<<	{	url:	chapter_link,	error:	e,	body:	response.body	}	
										print	'x'	
								end	
						end	
						hydra.queue	request	
				rescue	=>	e	
						puts	e	
				end	
		end	
		hydra.run	
		unless	fetch_page_urls_errors.empty?	
				puts	"n	Errors	fetching	page	urls:"	
				puts	fetch_page_urls_errors	
		end	
		self.chapter_pages_count	=	chapter_pages.values.inject(0)	{	|total,	list|	total	+=	list.size	}	
		current_state	:page_urls	
end
def	fetch_page_urls!	
		hydra	=	Typhoeus::Hydra.new(max_con
		chapter_list.each	do	|chapter_link|
				begin	
						request	=	Typhoeus::Request.new
						request.on_complete	do	|respons
								begin	
										chapter_doc	=	Nokogiri::HTM
										pages	=	chapter_doc.xpath("
										chapter_pages.merge!(chapte
										print	'.'	
								rescue	=>	e	
										self.fetch_page_urls_errors
										print	'x'	
								end	
						end	
						hydra.queue	request	
				rescue	=>	e	
						puts	e	
				end	
		end	
		hydra.run	
		unless	fetch_page_urls_errors.empty
				puts	"n	Errors	fetching	page	url
				puts	fetch_page_urls_errors	
		end	
		self.chapter_pages_count	=	chapter_
		current_state	:page_urls	
end
def	fetch_image_urls!	
		hydra	=	Typhoeus::Hydra.new(max_concurrency:	hydra_concurrency)	
		chapter_list.each	do	|chapter_key|	
				chapter_pages[chapter_key].each	do	|page_link|	
						begin	
								request	=	Typhoeus::Request.new	"http://www.mangareader.net#{page_link}"	
								request.on_complete	do	|response|	
										begin	
												chapter_doc	=	Nokogiri::HTML(response.body)	
												image							=	chapter_doc.css('#img').first	
												tokens						=	image['alt'].match("^(.*?)s-s(.*?)$")	
												extension			=	File.extname(URI.parse(image['src']).path)	
												chapter_images.merge!(chapter_key	=>	[])	if	chapter_images[chapter_key].nil?	
												chapter_images[chapter_key]	<<	ImageData.new(	tokens[1],	"#{tokens[2]}#{extension}",	image['src']	)	
												print	'.'	
										rescue	=>	e	
												self.fetch_image_urls_errors	<<	{	url:	page_link,	error:	e	}	
												print	'x'	
										end	
								end	
								hydra.queue	request	
						rescue	=>	e	
								puts	e	
						end	
				end	
		end	
		hydra.run	
		unless	fetch_image_urls_errors.empty?	
				puts	"nErrors	fetching	image	urls:"	
				puts	fetch_image_urls_errors	
		end	
		current_state	:image_urls	
end
def	fetch_images!	
		hydra	=	Typhoeus::Hydra.new(max_concurrency:	hydra_concurrency)	
		chapter_list.each_with_index	do	|chapter_key,	chapter_index|	
				chapter_images[chapter_key].each	do	|file|	
								downloaded_filename	=	File.join(manga_root_folder,	file.folder,	file.filename)	
								next	if	File.exists?(downloaded_filename)	#	effectively	resumes	the	download	list	without	re-downloading	eve
								request	=	Typhoeus::Request.new	file.url	
								request.on_complete	do	|response|	
										begin	
												#	download	
												FileUtils.mkdir_p(File.join(manga_root_folder,	file.folder))	
												File.open(downloaded_filename,	"wb+")	{	|f|	f.write	response.body	}	
												unless	is_test	
														#	resize	
														image	=	Magick::Image.read(	downloaded_filename	).first	
														resized	=	image.resize_to_fit(600,	800)	
														resized.write(	downloaded_filename	)	{	self.quality	=	50	}	
														GC.start	#	to	avoid	a	leak	too	big	(ImageMagick	is	notorious	for	that,	specially	on	resizes)	
												end	
												print	'.'	
										rescue	=>	e	
												self.fetch_images_errors	<<	{	url:	file.url,	error:	e	}	
												print	'#'	
										end	
								end	
						hydra.queue	request	
				end	
		end	
		hydra.run	
		unless	fetch_images_errors.empty?	
				puts	"nErrors	downloading	images:"	
				puts	fetch_images_errors	
		end	
		current_state	:images	
end
def	compile_ebooks!	
		folders	=	Dir[manga_root_folder	+	"/*/"].sort_by	{	|element|	ary	=	element.split("	").last.to_i	}	
		self.download_links	=	folders.inject([])	do	|list,	folder|	
				list	+=	Dir[folder	+	"*.*"].sort_by	{	|element|	ary	=	element.split("	").last.to_i	}	
		end	
		#	concatenating	PDF	files	(250	pages	per	volume)	
		chapter_number	=	0	
		while	!download_links.empty?	
				chapter_number	+=	1	
				pdf_file	=	File.join(manga_root_folder,	"#{manga_title}	#{chapter_number}.pdf")	
				list	=	download_links.slice!(0..pages_per_volume)	
				Prawn::Document.generate(pdf_file,	page_size:	page_size)	do	|pdf|	
						list.each	do	|image_file|	
								begin	
										pdf.image	image_file,	position:	:center,	vposition:	:center	
								rescue	=>	e	
										puts	"Error	in	#{image_file}	-	#{e}"	
								end	
						end	
				end	
				print	'.'	
		end	
		current_state	:ebooks	
end
manga-downloadr -t
199.69s user 10.30s system 124% cpu 2:48.14 total
manga-downloadr -t
199.69s user 10.30s system 124% cpu 2:48.14 total
.
!"" _build
# $"" ...
!"" config
# $"" config.exs
!"" deps
# !"" ...
!"" ex_manga_downloadr
!"" lib
# !"" ex_manga_downloadr
# # !"" cli.ex
# # !"" mangafox
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" mangareader
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" pool_management
# # # !"" supervisor.ex
# # # $"" worker.ex
# # $"" workflow.ex
# !"" ex_manga_downloadr.ex
# $"" pool_management.ex
!"" mix.exs
!"" mix.lock
!"" README.md
$"" test
!"" ex_manga_downloadr
# !"" mangafox_test.exs
# $"" mangareader_test.exs
!"" ex_manga_downloadr_test.exs
$"" test_helper.exs
61 directories, 281 files
mix.exs
mix.exsdefmodule	ExMangaDownloadr.Mixfile	do	
		use	Mix.Project	
		def	project	do	
				[app:	:ex_manga_downloadr,	
					version:	"1.0.1",	
					elixir:	"~>	1.1",	
					build_embedded:	Mix.env	==	:prod,	
					start_permanent:	Mix.env	==	:prod,	
					escript:	[main_module:	ExMangaDownloadr.CLI],	
					deps:	deps]	
		end	
		#	Configuration	for	the	OTP	application	
		#	
		#	Type	"mix	help	compile.app"	for	more	information	
		def	application	do	
				[applications:	[:logger,	:httpotion,	:porcelain],	
					mod:	{PoolManagement,	[]}]	
		end	
	defp	deps	do	
				[	
						{:ibrowse,	"~>	4.2.2"},	
						{:httpotion,	"~>	3.0.0"},	
						{:floki,	"~>	0.9.0"},	
						{:porcelain,	"~>	2.0.1"},	
						{:poolboy,	"~>	1.5.1"},	
						{:mock,	"~>	0.1.3",	only:	:test}	
				]	
		end	
end
PoolManagement
PoolManagement
.
!"" _build
# $"" ...
!"" config
# $"" config.exs
!"" deps
# !"" ...
!"" ex_manga_downloadr
!"" lib
# !"" ex_manga_downloadr
# # !"" cli.ex
# # !"" mangafox
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" mangareader
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" pool_management
# # # !"" supervisor.ex
# # # $"" worker.ex
# # $"" workflow.ex
# !"" ex_manga_downloadr.ex
# $"" pool_management.ex
!"" mix.exs
!"" mix.lock
!"" README.md
$"" test
!"" ex_manga_downloadr
# !"" mangafox_test.exs
# $"" mangareader_test.exs
!"" ex_manga_downloadr_test.exs
$"" test_helper.exs
61 directories, 281 files
pool_management.ex
.
!"" _build
# $"" ...
!"" config
# $"" config.exs
!"" deps
# !"" ...
!"" ex_manga_downloadr
!"" lib
# !"" ex_manga_downloadr
# # !"" cli.ex
# # !"" mangafox
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" mangareader
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" pool_management
# # # !"" supervisor.ex
# # # $"" worker.ex
# # $"" workflow.ex
# !"" ex_manga_downloadr.ex
# $"" pool_management.ex
!"" mix.exs
!"" mix.lock
!"" README.md
$"" test
!"" ex_manga_downloadr
# !"" mangafox_test.exs
# $"" mangareader_test.exs
!"" ex_manga_downloadr_test.exs
$"" test_helper.exs
61 directories, 281 files
pool_management.ex
pool_management.exdefmodule	PoolManagement	do	
		use	Application	
		def	start(_type,	_args)	do	
				PoolManagement.Supervisor.start_link	
		end	
end
Supervisor
Supervisor
.
!"" _build
# $"" ...
!"" config
# $"" config.exs
!"" deps
# !"" ...
!"" ex_manga_downloadr
!"" lib
# !"" ex_manga_downloadr
# # !"" cli.ex
# # !"" mangafox
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" mangareader
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" pool_management
# # # !"" supervisor.ex
# # # $"" worker.ex
# # $"" workflow.ex
# !"" ex_manga_downloadr.ex
# $"" pool_management.ex
!"" mix.exs
!"" mix.lock
!"" README.md
$"" test
!"" ex_manga_downloadr
# !"" mangafox_test.exs
# $"" mangareader_test.exs
!"" ex_manga_downloadr_test.exs
$"" test_helper.exs
61 directories, 281 files
supervisor.ex
.
!"" _build
# $"" ...
!"" config
# $"" config.exs
!"" deps
# !"" ...
!"" ex_manga_downloadr
!"" lib
# !"" ex_manga_downloadr
# # !"" cli.ex
# # !"" mangafox
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" mangareader
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" pool_management
# # # !"" supervisor.ex
# # # $"" worker.ex
# # $"" workflow.ex
# !"" ex_manga_downloadr.ex
# $"" pool_management.ex
!"" mix.exs
!"" mix.lock
!"" README.md
$"" test
!"" ex_manga_downloadr
# !"" mangafox_test.exs
# $"" mangareader_test.exs
!"" ex_manga_downloadr_test.exs
$"" test_helper.exs
61 directories, 281 files
supervisor.ex
supervisor.exdefmodule	PoolManagement.Supervisor	do	
		use	Supervisor	
		def	start_link	do	
				Supervisor.start_link(__MODULE__,	[])	
		end	
		def	init([])	do	
				pool_size	=	System.get_env("POOL_SIZE")	||	"50"	
				pool_options	=	[	
						name:	{:local,	:worker_pool},	
						worker_module:	PoolManagement.Worker,	
						size:	String.to_integer(pool_size),	
						max_overflow:	0	
				]	
				children	=	[	
						supervisor(Task.Supervisor,	[[name:	Fetcher.TaskSupervisor,	
strategy:	:transient,	max_restarts:	10]]),	
						:poolboy.child_spec(:worker_pool,	pool_options,	[])	
				]	
				supervise(children,	strategy:	:one_for_one)	
		end	
end
Worker
supervisor.exdefmodule	PoolManagement.Supervisor	do	
		use	Supervisor	
		def	start_link	do	
				Supervisor.start_link(__MODULE__,	[])	
		end	
		def	init([])	do	
				pool_size	=	System.get_env("POOL_SIZE")	||	"50"	
				pool_options	=	[	
						name:	{:local,	:worker_pool},	
						worker_module:	PoolManagement.Worker,	
						size:	String.to_integer(pool_size),	
						max_overflow:	0	
				]	
				children	=	[	
						supervisor(Task.Supervisor,	[[name:	Fetcher.TaskSupervisor,	
strategy:	:transient,	max_restarts:	10]]),	
						:poolboy.child_spec(:worker_pool,	pool_options,	[])	
				]	
				supervise(children,	strategy:	:one_for_one)	
		end	
end
Worker
supervisor.exdefmodule	PoolManagement.Supervisor	do	
		use	Supervisor	
		def	start_link	do	
				Supervisor.start_link(__MODULE__,	[])	
		end	
		def	init([])	do	
				pool_size	=	System.get_env("POOL_SIZE")	||	"50"	
				pool_options	=	[	
						name:	{:local,	:worker_pool},	
						worker_module:	PoolManagement.Worker,	
						size:	String.to_integer(pool_size),	
						max_overflow:	0	
				]	
				children	=	[	
						supervisor(Task.Supervisor,	[[name:	Fetcher.TaskSupervisor,	
strategy:	:transient,	max_restarts:	10]]),	
						:poolboy.child_spec(:worker_pool,	pool_options,	[])	
				]	
				supervise(children,	strategy:	:one_for_one)	
		end	
end
Worker
supervisor.exdefmodule	PoolManagement.Supervisor	do	
		use	Supervisor	
		def	start_link	do	
				Supervisor.start_link(__MODULE__,	[])	
		end	
		def	init([])	do	
				pool_size	=	System.get_env("POOL_SIZE")	||	"50"	
				pool_options	=	[	
						name:	{:local,	:worker_pool},	
						worker_module:	PoolManagement.Worker,	
						size:	String.to_integer(pool_size),	
						max_overflow:	0	
				]	
				children	=	[	
						supervisor(Task.Supervisor,	[[name:	Fetcher.TaskSupervisor,	
strategy:	:transient,	max_restarts:	10]]),	
						:poolboy.child_spec(:worker_pool,	pool_options,	[])	
				]	
				supervise(children,	strategy:	:one_for_one)	
		end	
end
Worker
Worker
Supervisor
.
!"" _build
# $"" ...
!"" config
# $"" config.exs
!"" deps
# !"" ...
!"" ex_manga_downloadr
!"" lib
# !"" ex_manga_downloadr
# # !"" cli.ex
# # !"" mangafox
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" mangareader
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" pool_management
# # # !"" supervisor.ex
# # # $"" worker.ex
# # $"" workflow.ex
# !"" ex_manga_downloadr.ex
# $"" pool_management.ex
!"" mix.exs
!"" mix.lock
!"" README.md
$"" test
!"" ex_manga_downloadr
# !"" mangafox_test.exs
# $"" mangareader_test.exs
!"" ex_manga_downloadr_test.exs
$"" test_helper.exs
61 directories, 281 files
worker.ex
Supervisor
.
!"" _build
# $"" ...
!"" config
# $"" config.exs
!"" deps
# !"" ...
!"" ex_manga_downloadr
!"" lib
# !"" ex_manga_downloadr
# # !"" cli.ex
# # !"" mangafox
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" mangareader
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" pool_management
# # # !"" supervisor.ex
# # # $"" worker.ex
# # $"" workflow.ex
# !"" ex_manga_downloadr.ex
# $"" pool_management.ex
!"" mix.exs
!"" mix.lock
!"" README.md
$"" test
!"" ex_manga_downloadr
# !"" mangafox_test.exs
# $"" mangareader_test.exs
!"" ex_manga_downloadr_test.exs
$"" test_helper.exs
61 directories, 281 files
worker.ex
worker.exdefmodule	PoolManagement.Worker	do	
		use	GenServer	
		#	Public	APIs	
		def	index_page(url,	source)	do	
		end	
		def	chapter_page([chapter_link,	source])	do	
		end	
		def	page_image([page_link,	source])	do	
		end	
		def	page_download_image(image_data,	directory)	do	
		end	
		#	internal	GenServer	implementation	
		def	handle_call({:chapter_page,	chapter_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_image,	page_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_download_image,	image_data,	directory},	_from,	state)
	end	
		##	Helper	functions	
		defp	manga_source(source,	module)	do	
				case	source	do	
						"mangafox"				->	:"Elixir.ExMangaDownloadr.Mangafox.#{module}"	
						"mangareader"	->	:"Elixir.ExMangaDownloadr.MangaReader.#{module}"	
				end	
		end	
		defp	download_image({image_src,	image_filename},	directory)	do	
		end	
end
worker.exdefmodule	PoolManagement.Worker	do	
		use	GenServer	
		#	Public	APIs	
		def	index_page(url,	source)	do	
		end	
		def	chapter_page([chapter_link,	source])	do	
		end	
		def	page_image([page_link,	source])	do	
		end	
		def	page_download_image(image_data,	directory)	do	
		end	
		#	internal	GenServer	implementation	
		def	handle_call({:chapter_page,	chapter_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_image,	page_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_download_image,	image_data,	directory},	_from,	state)
	end	
		##	Helper	functions	
		defp	manga_source(source,	module)	do	
				case	source	do	
						"mangafox"				->	:"Elixir.ExMangaDownloadr.Mangafox.#{module}"	
						"mangareader"	->	:"Elixir.ExMangaDownloadr.MangaReader.#{module}"	
				end	
		end	
		defp	download_image({image_src,	image_filename},	directory)	do	
		end	
end
worker.exdefmodule	PoolManagement.Worker	do	
		use	GenServer	
		#	Public	APIs	
		def	index_page(url,	source)	do	
		end	
		def	chapter_page([chapter_link,	source])	do	
		end	
		def	page_image([page_link,	source])	do	
		end	
		def	page_download_image(image_data,	directory)	do	
		end	
		#	internal	GenServer	implementation	
		def	handle_call({:chapter_page,	chapter_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_image,	page_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_download_image,	image_data,	directory},	_from,	state)
	end	
		##	Helper	functions	
		defp	manga_source(source,	module)	do	
				case	source	do	
						"mangafox"				->	:"Elixir.ExMangaDownloadr.Mangafox.#{module}"	
						"mangareader"	->	:"Elixir.ExMangaDownloadr.MangaReader.#{module}"	
				end	
		end	
		defp	download_image({image_src,	image_filename},	directory)	do	
		end	
end
worker.exdefmodule	PoolManagement.Worker	do	
		use	GenServer	
		#	Public	APIs	
		def	index_page(url,	source)	do	
		end	
		def	chapter_page([chapter_link,	source])	do	
		end	
		def	page_image([page_link,	source])	do	
		end	
		def	page_download_image(image_data,	directory)	do	
		end	
		#	internal	GenServer	implementation	
		def	handle_call({:chapter_page,	chapter_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_image,	page_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_download_image,	image_data,	directory},	_from,	state)
	end	
		##	Helper	functions	
		defp	manga_source(source,	module)	do	
				case	source	do	
						"mangafox"				->	:"Elixir.ExMangaDownloadr.Mangafox.#{module}"	
						"mangareader"	->	:"Elixir.ExMangaDownloadr.MangaReader.#{module}"	
				end	
		end	
		defp	download_image({image_src,	image_filename},	directory)	do	
		end	
end
worker.exdefmodule	PoolManagement.Worker	do	
		use	GenServer	
		#	Public	APIs	
		def	index_page(url,	source)	do	
		end	
		def	chapter_page([chapter_link,	source])	do	
		end	
		def	page_image([page_link,	source])	do	
		end	
		def	page_download_image(image_data,	directory)	do	
		end	
		#	internal	GenServer	implementation	
		def	handle_call({:chapter_page,	chapter_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_image,	page_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_download_image,	image_data,	directory},	_from,	state)
	end	
		##	Helper	functions	
		defp	manga_source(source,	module)	do	
				case	source	do	
						"mangafox"				->	:"Elixir.ExMangaDownloadr.Mangafox.#{module}"	
						"mangareader"	->	:"Elixir.ExMangaDownloadr.MangaReader.#{module}"	
				end	
		end	
		defp	download_image({image_src,	image_filename},	directory)	do	
		end	
end
worker.exdefmodule	PoolManagement.Worker	do	
		use	GenServer	
		#	Public	APIs	
		def	index_page(url,	source)	do	
		end	
		def	chapter_page([chapter_link,	source])	do	
		end	
		def	page_image([page_link,	source])	do	
		end	
		def	page_download_image(image_data,	directory)	do	
		end	
		#	internal	GenServer	implementation	
		def	handle_call({:chapter_page,	chapter_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_image,	page_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_download_image,	image_data,	directory},	_from,	state)
	end	
		##	Helper	functions	
		defp	manga_source(source,	module)	do	
				case	source	do	
						"mangafox"				->	:"Elixir.ExMangaDownloadr.Mangafox.#{module}"	
						"mangareader"	->	:"Elixir.ExMangaDownloadr.MangaReader.#{module}"	
				end	
		end	
		defp	download_image({image_src,	image_filename},	directory)	do	
		end	
end
worker.exdefmodule	PoolManagement.Worker	do	
		use	GenServer	
		#	Public	APIs	
		def	index_page(url,	source)	do	
		end	
		def	chapter_page([chapter_link,	source])	do	
		end	
		def	page_image([page_link,	source])	do	
		end	
		def	page_download_image(image_data,	directory)	do	
		end	
		#	internal	GenServer	implementation	
		def	handle_call({:chapter_page,	chapter_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_image,	page_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_download_image,	image_data,	directory},	_from,	state)
	end	
		##	Helper	functions	
		defp	manga_source(source,	module)	do	
				case	source	do	
						"mangafox"				->	:"Elixir.ExMangaDownloadr.Mangafox.#{module}"	
						"mangareader"	->	:"Elixir.ExMangaDownloadr.MangaReader.#{module}"	
				end	
		end	
		defp	download_image({image_src,	image_filename},	directory)	do	
		end	
end
.
!"" _build
# $"" ...
!"" config
# $"" config.exs
!"" deps
# !"" ...
!"" ex_manga_downloadr
!"" lib
# !"" ex_manga_downloadr
# # !"" cli.ex
# # !"" mangafox
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" mangareader
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" pool_management
# # # !"" supervisor.ex
# # # $"" worker.ex
# # $"" workflow.ex
# !"" ex_manga_downloadr.ex
# $"" pool_management.ex
!"" mix.exs
!"" mix.lock
!"" README.md
$"" test
!"" ex_manga_downloadr
# !"" mangafox_test.exs
# $"" mangareader_test.exs
!"" ex_manga_downloadr_test.exs
$"" test_helper.exs
61 directories, 281 files
worker.exdefmodule	PoolManagement.Worker	do	
		use	GenServer	
		#	Public	APIs	
		def	index_page(url,	source)	do	
		end	
		def	chapter_page([chapter_link,	source])	do	
		end	
		def	page_image([page_link,	source])	do	
		end	
		def	page_download_image(image_data,	directory)	do	
		end	
		#	internal	GenServer	implementation	
		def	handle_call({:chapter_page,	chapter_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_image,	page_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_download_image,	image_data,	directory},	_from,	state)
	end	
		##	Helper	functions	
		defp	manga_source(source,	module)	do	
				case	source	do	
						"mangafox"				->	:"Elixir.ExMangaDownloadr.Mangafox.#{module}"	
						"mangareader"	->	:"Elixir.ExMangaDownloadr.MangaReader.#{module}"	
				end	
		end	
		defp	download_image({image_src,	image_filename},	directory)	do	
		end	
end
.
!"" _build
# $"" ...
!"" config
# $"" config.exs
!"" deps
# !"" ...
!"" ex_manga_downloadr
!"" lib
# !"" ex_manga_downloadr
# # !"" cli.ex
# # !"" mangafox
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" mangareader
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" pool_management
# # # !"" supervisor.ex
# # # $"" worker.ex
# # $"" workflow.ex
# !"" ex_manga_downloadr.ex
# $"" pool_management.ex
!"" mix.exs
!"" mix.lock
!"" README.md
$"" test
!"" ex_manga_downloadr
# !"" mangafox_test.exs
# $"" mangareader_test.exs
!"" ex_manga_downloadr_test.exs
$"" test_helper.exs
61 directories, 281 files
worker.exdefmodule	PoolManagement.Worker	do	
		use	GenServer	
		#	Public	APIs	
		def	index_page(url,	source)	do	
		end	
		def	chapter_page([chapter_link,	source])	do	
		end	
		def	page_image([page_link,	source])	do	
		end	
		def	page_download_image(image_data,	directory)	do	
		end	
		#	internal	GenServer	implementation	
		def	handle_call({:chapter_page,	chapter_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_image,	page_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_download_image,	image_data,	directory},	_from,	state)
	end	
		##	Helper	functions	
		defp	manga_source(source,	module)	do	
				case	source	do	
						"mangafox"				->	:"Elixir.ExMangaDownloadr.Mangafox.#{module}"	
						"mangareader"	->	:"Elixir.ExMangaDownloadr.MangaReader.#{module}"	
				end	
		end	
		defp	download_image({image_src,	image_filename},	directory)	do	
		end	
end
.
!"" _build
# $"" ...
!"" config
# $"" config.exs
!"" deps
# !"" ...
!"" ex_manga_downloadr
!"" lib
# !"" ex_manga_downloadr
# # !"" cli.ex
# # !"" mangafox
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" mangareader
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" pool_management
# # # !"" supervisor.ex
# # # $"" worker.ex
# # $"" workflow.ex
# !"" ex_manga_downloadr.ex
# $"" pool_management.ex
!"" mix.exs
!"" mix.lock
!"" README.md
$"" test
!"" ex_manga_downloadr
# !"" mangafox_test.exs
# $"" mangareader_test.exs
!"" ex_manga_downloadr_test.exs
$"" test_helper.exs
61 directories, 281 files
worker.exdefmodule	PoolManagement.Worker	do	
		use	GenServer	
		#	Public	APIs	
		def	index_page(url,	source)	do	
		end	
		def	chapter_page([chapter_link,	source])	do	
		end	
		def	page_image([page_link,	source])	do	
		end	
		def	page_download_image(image_data,	directory)	do	
		end	
		#	internal	GenServer	implementation	
		def	handle_call({:chapter_page,	chapter_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_image,	page_link,	source},	_from,	state)	do	
		end	
		def	handle_call({:page_download_image,	image_data,	directory},	_from,	state)
	end	
		##	Helper	functions	
		defp	manga_source(source,	module)	do	
				case	source	do	
						"mangafox"				->	:"Elixir.ExMangaDownloadr.Mangafox.#{module}"	
						"mangareader"	->	:"Elixir.ExMangaDownloadr.MangaReader.#{module}"	
				end	
		end	
		defp	download_image({image_src,	image_filename},	directory)	do	
		end	
end
.
!"" _build
# $"" ...
!"" config
# $"" config.exs
!"" deps
# !"" ...
!"" ex_manga_downloadr
!"" lib
# !"" ex_manga_downloadr
# # !"" cli.ex
# # !"" mangafox
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" mangareader
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" pool_management
# # # !"" supervisor.ex
# # # $"" worker.ex
# # $"" workflow.ex
# !"" ex_manga_downloadr.ex
# $"" pool_management.ex
!"" mix.exs
!"" mix.lock
!"" README.md
$"" test
!"" ex_manga_downloadr
# !"" mangafox_test.exs
# $"" mangareader_test.exs
!"" ex_manga_downloadr_test.exs
$"" test_helper.exs
61 directories, 281 files
POOL
defmodule	PoolManagement.Worker	do	
		use	GenServer	
		...	
		def	chapter_page([chapter_link,	source])	do	
				Task.Supervisor.async(Fetcher.TaskSupervisor,	fn	->	
						:poolboy.transaction	:worker_pool,	fn(server)	->	
								GenServer.call(server,	{:chapter_page,	chapter_link,	source},	
@genserver_call_timeout)	
						end,	@task_async_timeout	
				end)	
		end	
		...	
		def	handle_call({:chapter_page,	chapter_link,	source},	_from,	state)	do	
				links	=	source	
						|>	manga_source("ChapterPage")	
						|>	apply(:pages,	[chapter_link])	
				{:reply,	links,	state}	
		end	
		...	
		defp	manga_source(source,	module)	do	
				case	source	do	
						"mangareader"	->	:"Elixir.ExMangaDownloadr.MangaReader.#{module}"	
						"mangafox"				->	:"Elixir.ExMangaDownloadr.Mangafox.#{module}"	
				end	
		end	
end
ChapterPage
defmodule	PoolManagement.Worker	do	
		use	GenServer	
		...	
		def	chapter_page([chapter_link,	source])	do	
				Task.Supervisor.async(Fetcher.TaskSupervisor,	fn	->	
						:poolboy.transaction	:worker_pool,	fn(server)	->	
								GenServer.call(server,	{:chapter_page,	chapter_link,	source},	
@genserver_call_timeout)	
						end,	@task_async_timeout	
				end)	
		end	
		...	
		def	handle_call({:chapter_page,	chapter_link,	source},	_from,	state)	do	
				links	=	source	
						|>	manga_source("ChapterPage")	
						|>	apply(:pages,	[chapter_link])	
				{:reply,	links,	state}	
		end	
		...	
		defp	manga_source(source,	module)	do	
				case	source	do	
						"mangareader"	->	:"Elixir.ExMangaDownloadr.MangaReader.#{module}"	
						"mangafox"				->	:"Elixir.ExMangaDownloadr.Mangafox.#{module}"	
				end	
		end	
end
ChapterPage
defmodule	PoolManagement.Worker	do	
		use	GenServer	
		...	
		def	chapter_page([chapter_link,	source])	do	
				Task.Supervisor.async(Fetcher.TaskSupervisor,	fn	->	
						:poolboy.transaction	:worker_pool,	fn(server)	->	
								GenServer.call(server,	{:chapter_page,	chapter_link,	source},	
@genserver_call_timeout)	
						end,	@task_async_timeout	
				end)	
		end	
		...	
		def	handle_call({:chapter_page,	chapter_link,	source},	_from,	state)	do	
				links	=	source	
						|>	manga_source("ChapterPage")	
						|>	apply(:pages,	[chapter_link])	
				{:reply,	links,	state}	
		end	
		...	
		defp	manga_source(source,	module)	do	
				case	source	do	
						"mangareader"	->	:"Elixir.ExMangaDownloadr.MangaReader.#{module}"	
						"mangafox"				->	:"Elixir.ExMangaDownloadr.Mangafox.#{module}"	
				end	
		end	
end
ChapterPage
defmodule	PoolManagement.Worker	do	
		use	GenServer	
		...	
		def	chapter_page([chapter_link,	source])	do	
				Task.Supervisor.async(Fetcher.TaskSupervisor,	fn	->	
						:poolboy.transaction	:worker_pool,	fn(server)	->	
								GenServer.call(server,	{:chapter_page,	chapter_link,	source},	
@genserver_call_timeout)	
						end,	@task_async_timeout	
				end)	
		end	
		...	
		def	handle_call({:chapter_page,	chapter_link,	source},	_from,	state)	do	
				links	=	source	
						|>	manga_source("ChapterPage")	
						|>	apply(:pages,	[chapter_link])	
				{:reply,	links,	state}	
		end	
		...	
		defp	manga_source(source,	module)	do	
				case	source	do	
						"mangareader"	->	:"Elixir.ExMangaDownloadr.MangaReader.#{module}"	
						"mangafox"				->	:"Elixir.ExMangaDownloadr.Mangafox.#{module}"	
				end	
		end	
end
ChapterPage
defmodule	PoolManagement.Worker	do	
		use	GenServer	
		...	
		def	chapter_page([chapter_link,	source])	do	
				Task.Supervisor.async(Fetcher.TaskSupervisor,	fn	->	
						:poolboy.transaction	:worker_pool,	fn(server)	->	
								GenServer.call(server,	{:chapter_page,	chapter_link,	source},	
@genserver_call_timeout)	
						end,	@task_async_timeout	
				end)	
		end	
		...	
		def	handle_call({:chapter_page,	chapter_link,	source},	_from,	state)	do	
				links	=	source	
						|>	manga_source("ChapterPage")	
						|>	apply(:pages,	[chapter_link])	
				{:reply,	links,	state}	
		end	
		...	
		defp	manga_source(source,	module)	do	
				case	source	do	
						"mangareader"	->	:"Elixir.ExMangaDownloadr.MangaReader.#{module}"	
						"mangafox"				->	:"Elixir.ExMangaDownloadr.Mangafox.#{module}"	
				end	
		end	
end
ChapterPage
defmodule	ExMangaDownloadr.Mangafox.ChapterPage	do	
		require	Logger	
		require	ExMangaDownloadr	
		def	pages(chapter_link)	do	
				ExMangaDownloadr.fetch	chapter_link,	do:	fetch_pages(chapter_link)	
		end	
		defp	fetch_pages(html,	chapter_link)	do	
				[_page|link_template]	=	chapter_link	|>	String.split("/")	|>	
Enum.reverse	
				html	
				|>	Floki.find("div[id='top_center_bar']	option")	
				|>	Floki.attribute("value")	
				|>	Enum.reject(fn	page_number	->	page_number	==	"0"	end)	
				|>	Enum.map(fn	page_number	->		
						["#{page_number}.html"|link_template]	
								|>	Enum.reverse	
								|>	Enum.join("/")	
				end)	
		end	
end
ChapterPage
ChapterPage
.
!"" _build
# $"" ...
!"" config
# $"" config.exs
!"" deps
# !"" ...
!"" ex_manga_downloadr
!"" lib
# !"" ex_manga_downloadr
# # !"" cli.ex
# # !"" mangafox
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" mangareader
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" pool_management
# # # !"" supervisor.ex
# # # $"" worker.ex
# # $"" workflow.ex
# !"" ex_manga_downloadr.ex
# $"" pool_management.ex
!"" mix.exs
!"" mix.lock
!"" README.md
$"" test
!"" ex_manga_downloadr
# !"" mangafox_test.exs
# $"" mangareader_test.exs
!"" ex_manga_downloadr_test.exs
$"" test_helper.exs
61 directories, 281 files
cli.ex
cli.exdefmodule	ExMangaDownloadr.CLI	do	
		alias	ExMangaDownloadr.Workflow	
		require	ExMangaDownloadr	
		def	main(args)	do	
				args	
				|>	parse_args	
				|>	process	
		end	
		...	
		defp	parse_args(args)	do	
		end	
		defp	process(:help)	do	
		end	
		defp	process(directory,	url)	do	
				File.mkdir_p!(directory)	
				File.mkdir_p!("/tmp/ex_manga_downloadr_cache")	
				manga_name	=	directory	|>	String.split("/")	|>	Enum.reverse	|>	Enum.at(0)	
				url	
						|>	Workflow.determine_source	
						|>	Workflow.chapters	
						|>	Workflow.pages	
						|>	Workflow.images_sources	
						|>	Workflow.process_downloads(directory)	
						|>	Workflow.optimize_images	
						|>	Workflow.compile_pdfs(manga_name)	
						|>	finish_process	
		end	
		defp	process_test(directory,	url)	do	
		end	
		defp	finish_process(directory)	do	
		end	
end
Workflow
Workflow
.
!"" _build
# $"" ...
!"" config
# $"" config.exs
!"" deps
# !"" ...
!"" ex_manga_downloadr
!"" lib
# !"" ex_manga_downloadr
# # !"" cli.ex
# # !"" mangafox
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" mangareader
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" pool_management
# # # !"" supervisor.ex
# # # $"" worker.ex
# # $"" workflow.ex
# !"" ex_manga_downloadr.ex
# $"" pool_management.ex
!"" mix.exs
!"" mix.lock
!"" README.md
$"" test
!"" ex_manga_downloadr
# !"" mangafox_test.exs
# $"" mangareader_test.exs
!"" ex_manga_downloadr_test.exs
$"" test_helper.exs
61 directories, 281 files
workflow.ex
workflow.exdefmodule	ExMangaDownloadr.Workflow	do	
		alias	PoolManagement.Worker	
		require	Logger	
		def	chapters({url,	source})	do	
		end	
		def	pages({chapter_list,	source})	do	
				pages_list	=	chapter_list	
						|>	Enum.map(&Worker.chapter_page([&1,	source]))	
						|>	Enum.map(&Task.await(&1,	@await_timeout_ms))	
						|>	Enum.reduce([],	fn	{:ok,	list},	acc	->	acc	++	list	end)	
				{pages_list,	source}	
		end	
		def	images_sources({pages_list,	source})	do	
		end	
		def	process_downloads(images_list,	directory)	do	
		end	
		def	optimize_images(directory)	do	
				Porcelain.shell("mogrify	-resize	#{@image_dimensions}	#{directory}/*.jpg")	
				directory	
		end	
		def	compile_pdfs(directory,	manga_name)	do	
		end	
end
workflow.exdefmodule	ExMangaDownloadr.Workflow	do	
		alias	PoolManagement.Worker	
		require	Logger	
		def	chapters({url,	source})	do	
		end	
		def	pages({chapter_list,	source})	do	
				pages_list	=	chapter_list	
						|>	Enum.map(&Worker.chapter_page([&1,	source]))	
						|>	Enum.map(&Task.await(&1,	@await_timeout_ms))	
						|>	Enum.reduce([],	fn	{:ok,	list},	acc	->	acc	++	list	end)	
				{pages_list,	source}	
		end	
		def	images_sources({pages_list,	source})	do	
		end	
		def	process_downloads(images_list,	directory)	do	
		end	
		def	optimize_images(directory)	do	
				Porcelain.shell("mogrify	-resize	#{@image_dimensions}	#{directory}/*.jpg")	
				directory	
		end	
		def	compile_pdfs(directory,	manga_name)	do	
		end	
end
ex_manga_downloadr —test
28.36s user 15.57s system 33% cpu 2:10.28 total
ex_manga_downloadr —test
28.36s user 15.57s system 33% cpu 2:10.28 total
.
!"" _build
# $"" ...
!"" config
# $"" config.exs
!"" deps
# !"" ...
!"" ex_manga_downloadr
!"" lib
# !"" ex_manga_downloadr
# # !"" cli.ex
# # !"" mangafox
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" mangareader
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" pool_management
# # # !"" supervisor.ex
# # # $"" worker.ex
# # $"" workflow.ex
# !"" ex_manga_downloadr.ex
# $"" pool_management.ex
!"" mix.exs
!"" mix.lock
!"" README.md
$"" test
!"" ex_manga_downloadr
# !"" mangafox_test.exs
# $"" mangareader_test.exs
!"" ex_manga_downloadr_test.exs
$"" test_helper.exs
61 directories, 281 files
.
!"" cr_manga_downloadr
!"" libs
# !"" ...
!"" LICENSE
!"" README.md
!"" shard.lock
!"" shard.yml
!"" spec
# !"" cr_manga_downloadr
# # !"" chapters_spec.cr
# # !"" concurrency_spec.cr
# # !"" image_downloader_spec.cr
# # !"" page_image_spec.cr
# # $"" pages_spec.cr
# !"" fixtures
# # !"" ...
# $"" spec_helper.cr
$"" src
!"" cr_manga_downloadr
# !"" chapters.cr
# !"" concurrency.cr
# !"" downloadr_client.cr
# !"" image_downloader.cr
# !"" page_image.cr
# !"" pages.cr
# !"" records.cr
# !"" version.cr
# $"" workflow.cr
$"" cr_manga_downloadr.cr
.
!"" _build
# $"" ...
!"" config
# $"" config.exs
!"" deps
# !"" ...
!"" ex_manga_downloadr
!"" lib
# !"" ex_manga_downloadr
# # !"" cli.ex
# # !"" mangafox
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" mangareader
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" pool_management
# # # !"" supervisor.ex
# # # $"" worker.ex
# # $"" workflow.ex
# !"" ex_manga_downloadr.ex
# $"" pool_management.ex
!"" mix.exs
!"" mix.lock
!"" README.md
$"" test
!"" ex_manga_downloadr
# !"" mangafox_test.exs
# $"" mangareader_test.exs
!"" ex_manga_downloadr_test.exs
$"" test_helper.exs
61 directories, 281 files
.
!"" cr_manga_downloadr
!"" libs
# !"" ...
!"" LICENSE
!"" README.md
!"" shard.lock
!"" shard.yml
!"" spec
# !"" cr_manga_downloadr
# # !"" chapters_spec.cr
# # !"" concurrency_spec.cr
# # !"" image_downloader_spec.cr
# # !"" page_image_spec.cr
# # $"" pages_spec.cr
# !"" fixtures
# # !"" ...
# $"" spec_helper.cr
$"" src
!"" cr_manga_downloadr
# !"" chapters.cr
# !"" concurrency.cr
# !"" downloadr_client.cr
# !"" image_downloader.cr
# !"" page_image.cr
# !"" pages.cr
# !"" records.cr
# !"" version.cr
# $"" workflow.cr
$"" cr_manga_downloadr.cr
File.mkdir_p!(directory)	
		File.mkdir_p!("/tmp/ex_manga_downloadr_cache")	
		manga_name	=	directory	|>	String.split("/")	|>	Enum.reverse	|>	Enum.at(0)	
		url	
				|>	Workflow.determine_source	
				|>	Workflow.chapters	
				|>	Workflow.pages	
				|>	Workflow.images_sources	
				|>	Workflow.process_downloads(directory)	
				|>	Workflow.optimize_images	
				|>	Workflow.compile_pdfs(manga_name)	
				|>	finish_process	
end
def	run	
		Dir.mkdir_p	@config.download_directory	
		pipe	Steps.fetch_chapters(@config)	
				.>>	Steps.fetch_pages(@config)	
				.>>	Steps.fetch_images(@config)	
				.>>	Steps.download_images(@config)	
				.>>	Steps.optimize_images(@config)	
				.>>	Steps.prepare_volumes(@config)	
				.>>	unwrap	
		puts	"Done!"	
end
		File.mkdir_p!(directory)	
		File.mkdir_p!("/tmp/ex_manga_downloadr_cache")	
		manga_name	=	directory	|>	String.split("/")	|>	Enum.reverse	|>	Enum.at(0)	
		url	
				|>	Workflow.determine_source	
				|>	Workflow.chapters	
				|>	Workflow.pages	
				|>	Workflow.images_sources	
				|>	Workflow.process_downloads(directory)	
				|>	Workflow.optimize_images	
				|>	Workflow.compile_pdfs(manga_name)	
				|>	finish_process	
end
defmodule	ExMangaDownloadr.MangaReader.IndexPage	do	
		require	Logger	
		require	ExMangaDownloadr	
		def	chapters(manga_root_url)	do	
				ExMangaDownloadr.fetch	manga_root_url,	do:	collect	
		end	
		defp	collect(html)	do	
				{fetch_manga_title(html),	fetch_chapters(html)}	
		end	
		defp	fetch_manga_title(html)	do	
				html	
				|>	Floki.find("#mangaproperties	h1")	
				|>	Floki.text	
		end	
		defp	fetch_chapters(html)	do	
				html	
				|>	Floki.find("#listing	a")	
				|>	Floki.attribute("href")	
		end	
end
defmodule	ExMangaDownloadr.MangaReader.IndexPage	do	
		require	Logger	
		require	ExMangaDownloadr	
		def	chapters(manga_root_url)	do	
				ExMangaDownloadr.fetch	manga_root_url,	do:	collect	
		end	
		defp	collect(html)	do	
				{fetch_manga_title(html),	fetch_chapters(html)}	
		end	
		defp	fetch_manga_title(html)	do	
				html	
				|>	Floki.find("#mangaproperties	h1")	
				|>	Floki.text	
		end	
		defp	fetch_chapters(html)	do	
				html	
				|>	Floki.find("#listing	a")	
				|>	Floki.attribute("href")	
		end	
end
defmodule	ExMangaDownloadr.MangaReader.IndexPage	do	
		require	Logger	
		require	ExMangaDownloadr	
		def	chapters(manga_root_url)	do	
				ExMangaDownloadr.fetch	manga_root_url,	do:	collect	
		end	
		defp	collect(html)	do	
				{fetch_manga_title(html),	fetch_chapters(html)}	
		end	
		defp	fetch_manga_title(html)	do	
				html	
				|>	Floki.find("#mangaproperties	h1")	
				|>	Floki.text	
		end	
		defp	fetch_chapters(html)	do	
				html	
				|>	Floki.find("#listing	a")	
				|>	Floki.attribute("href")	
		end	
end
require	"./downloadr_client"	
require	"xml"	
module	CrMangaDownloadr	
		class	Chapters	<	DownloadrClient	
				def	initialize(@domain,	@root_uri	:	String,	@cache_http	=	false)	
						super(@domain,	@cache_http)	
				end	
				def	fetch	
						html	=	get(@root_uri)	
						nodes	=	html.xpath_nodes(	
								"//table[contains(@id,	'listing')]//td//a/@href")	
						nodes.map	{	|node|	node.text.as(String)	}	
				end	
		end	
end
DownloadrClient
require	"./downloadr_client"	
require	"xml"	
module	CrMangaDownloadr	
		class	Chapters	<	DownloadrClient	
				def	initialize(@domain,	@root_uri	:	String,	@cache_http	=	false)	
						super(@domain,	@cache_http)	
				end	
				def	fetch	
						html	=	get(@root_uri)	
						nodes	=	html.xpath_nodes(	
								"//table[contains(@id,	'listing')]//td//a/@href")	
						nodes.map	{	|node|	node.text.as(String)	}	
				end	
		end	
end
DownloadrClient
DownloadrClient
module	CrMangaDownloadr	
		class	DownloadrClient	
				@http_client	:	HTTP::Client	
				def	initialize(@domain	:	String,	@cache_http	=	false)	
				end	
				def	get(uri	:	String)	
						cache_path	=	"/tmp/cr_manga_downloadr_cache/#{cache_filename(uri)}"	
						while	true	
								begin	
										response	=	if	@cache_http	&&	File.exists?(cache_path)	
												body	=	File.read(cache_path)	
												HTTP::Client::Response.new(200,	body)	
										else	
												@http_client.get(uri,	headers:	HTTP::Headers{		
														"User-Agent"	=>	CrMangaDownloadr::USER_AGENT	})	
										end	
										case	response.status_code	
										when	301	
												uri	=	response.headers["Location"]	
										when	200	
												if	@cache_http	&&	!File.exists?(cache_path)	
														File.open(cache_path,	"w")	do	|f|	
																f.print	response.body	
														end	
												end	
												return	XML.parse_html(response.body)	
										end	
								rescue	IO::Timeout	
										puts	"Sleeping	over	#{uri}"	
										sleep	1	
								end	
						end	
				end	
		end	
end
DownloadrClient
module	CrMangaDownloadr	
		class	DownloadrClient	
				@http_client	:	HTTP::Client	
				def	initialize(@domain	:	String,	@cache_http	=	false)	
				end	
				def	get(uri	:	String)	
						cache_path	=	"/tmp/cr_manga_downloadr_cache/#{cache_filename(uri)}"	
						while	true	
								begin	
										response	=	if	@cache_http	&&	File.exists?(cache_path)	
												body	=	File.read(cache_path)	
												HTTP::Client::Response.new(200,	body)	
										else	
												@http_client.get(uri,	headers:	HTTP::Headers{		
														"User-Agent"	=>	CrMangaDownloadr::USER_AGENT	})	
										end	
										case	response.status_code	
										when	301	
												uri	=	response.headers["Location"]	
										when	200	
												if	@cache_http	&&	!File.exists?(cache_path)	
														File.open(cache_path,	"w")	do	|f|	
																f.print	response.body	
														end	
												end	
												return	XML.parse_html(response.body)	
										end	
								rescue	IO::Timeout	
										puts	"Sleeping	over	#{uri}"	
										sleep	1	
								end	
						end	
				end	
		end	
end
DownloadrClient
module	CrMangaDownloadr	
		class	DownloadrClient	
				@http_client	:	HTTP::Client	
				def	initialize(@domain	:	String,	@cache_http	=	false)	
				end	
				def	get(uri	:	String)	
						cache_path	=	"/tmp/cr_manga_downloadr_cache/#{cache_filename(uri)}"	
						while	true	
								begin	
										response	=	if	@cache_http	&&	File.exists?(cache_path)	
												body	=	File.read(cache_path)	
												HTTP::Client::Response.new(200,	body)	
										else	
												@http_client.get(uri,	headers:	HTTP::Headers{		
														"User-Agent"	=>	CrMangaDownloadr::USER_AGENT	})	
										end	
										case	response.status_code	
										when	301	
												uri	=	response.headers["Location"]	
										when	200	
												if	@cache_http	&&	!File.exists?(cache_path)	
														File.open(cache_path,	"w")	do	|f|	
																f.print	response.body	
														end	
												end	
												return	XML.parse_html(response.body)	
										end	
								rescue	IO::Timeout	
										puts	"Sleeping	over	#{uri}"	
										sleep	1	
								end	
						end	
				end	
		end	
end
DownloadrClient
module	CrMangaDownloadr	
		class	DownloadrClient	
				@http_client	:	HTTP::Client	
				def	initialize(@domain	:	String,	@cache_http	=	false)	
				end	
				def	get(uri	:	String)	
						cache_path	=	"/tmp/cr_manga_downloadr_cache/#{cache_filename(uri)}"	
						while	true	
								begin	
										response	=	if	@cache_http	&&	File.exists?(cache_path)	
												body	=	File.read(cache_path)	
												HTTP::Client::Response.new(200,	body)	
										else	
												@http_client.get(uri,	headers:	HTTP::Headers{		
														"User-Agent"	=>	CrMangaDownloadr::USER_AGENT	})	
										end	
										case	response.status_code	
										when	301	
												uri	=	response.headers["Location"]	
										when	200	
												if	@cache_http	&&	!File.exists?(cache_path)	
														File.open(cache_path,	"w")	do	|f|	
																f.print	response.body	
														end	
												end	
												return	XML.parse_html(response.body)	
										end	
								rescue	IO::Timeout	
										puts	"Sleeping	over	#{uri}"	
										sleep	1	
								end	
						end	
				end	
		end	
end
DownloadrClient
module	CrMangaDownloadr	
		class	DownloadrClient	
				@http_client	:	HTTP::Client	
				def	initialize(@domain	:	String,	@cache_http	=	false)	
				end	
				def	get(uri	:	String)	
						cache_path	=	"/tmp/cr_manga_downloadr_cache/#{cache_filename(uri)}"	
						while	true	
								begin	
										response	=	if	@cache_http	&&	File.exists?(cache_path)	
												body	=	File.read(cache_path)	
												HTTP::Client::Response.new(200,	body)	
										else	
												@http_client.get(uri,	headers:	HTTP::Headers{		
														"User-Agent"	=>	CrMangaDownloadr::USER_AGENT	})	
										end	
										case	response.status_code	
										when	301	
												uri	=	response.headers["Location"]	
										when	200	
												if	@cache_http	&&	!File.exists?(cache_path)	
														File.open(cache_path,	"w")	do	|f|	
																f.print	response.body	
														end	
												end	
												return	XML.parse_html(response.body)	
										end	
								rescue	IO::Timeout	
										puts	"Sleeping	over	#{uri}"	
										sleep	1	
								end	
						end	
				end	
		end	
end
DownloadrClient
module	CrMangaDownloadr	
		class	DownloadrClient	
				@http_client	:	HTTP::Client	
				def	initialize(@domain	:	String,	@cache_http	=	false)	
				end	
				def	get(uri	:	String)	
						cache_path	=	"/tmp/cr_manga_downloadr_cache/#{cache_filename(uri)}"	
						while	true	
								begin	
										response	=	if	@cache_http	&&	File.exists?(cache_path)	
												body	=	File.read(cache_path)	
												HTTP::Client::Response.new(200,	body)	
										else	
												@http_client.get(uri,	headers:	HTTP::Headers{		
														"User-Agent"	=>	CrMangaDownloadr::USER_AGENT	})	
										end	
										case	response.status_code	
										when	301	
												uri	=	response.headers["Location"]	
										when	200	
												if	@cache_http	&&	!File.exists?(cache_path)	
														File.open(cache_path,	"w")	do	|f|	
																f.print	response.body	
														end	
												end	
												return	XML.parse_html(response.body)	
										end	
								rescue	IO::Timeout	
										puts	"Sleeping	over	#{uri}"	
										sleep	1	
								end	
						end	
				end	
		end	
end
require	"fiberpool"	
module	CrMangaDownloadr	
		struct	Concurrency	
				def	initialize(@config	:	Config,	@turn_on_engine	=	true);	end	
				def	fetch(collection	:	Array(A)?,	engine_class	:	E.class,	
&block	:	A,	E?	->	Array(B)?)	:	Array(B)	
						results	=	[]	of	B	
						if	collection	
								pool	=	Fiberpool.new(collection,		
										@config.download_batch_size)	
								pool.run	do	|item|	
										engine	=	if	@turn_on_engine	
																					engine_class.new(@config.domain,		
																							@config.cache_http)	
																			end	
										if	reply	=	block.call(item,	engine)	
												results.concat(reply)	
										end	
								end	
						end	
						results	
				end	
		end	
end
fetch
Concurrency
require	"fiberpool"	
module	CrMangaDownloadr	
		struct	Concurrency	
				def	initialize(@config	:	Config,	@turn_on_engine	=	true);	end	
				def	fetch(collection	:	Array(A)?,	engine_class	:	E.class,	
&block	:	A,	E?	->	Array(B)?)	:	Array(B)	
						results	=	[]	of	B	
						if	collection	
								pool	=	Fiberpool.new(collection,		
										@config.download_batch_size)	
								pool.run	do	|item|	
										engine	=	if	@turn_on_engine	
																					engine_class.new(@config.domain,		
																							@config.cache_http)	
																			end	
										if	reply	=	block.call(item,	engine)	
												results.concat(reply)	
										end	
								end	
						end	
						results	
				end	
		end	
end
fetch
Concurrency
require	"fiberpool"	
module	CrMangaDownloadr	
		struct	Concurrency	
				def	initialize(@config	:	Config,	@turn_on_engine	=	true);	end	
				def	fetch(collection	:	Array(A)?,	engine_class	:	E.class,	
&block	:	A,	E?	->	Array(B)?)	:	Array(B)	
						results	=	[]	of	B	
						if	collection	
								pool	=	Fiberpool.new(collection,		
										@config.download_batch_size)	
								pool.run	do	|item|	
										engine	=	if	@turn_on_engine	
																					engine_class.new(@config.domain,		
																							@config.cache_http)	
																			end	
										if	reply	=	block.call(item,	engine)	
												results.concat(reply)	
										end	
								end	
						end	
						results	
				end	
		end	
end
fetch
Concurrency
require	"fiberpool"	
module	CrMangaDownloadr	
		struct	Concurrency	
				def	initialize(@config	:	Config,	@turn_on_engine	=	true);	end	
				def	fetch(collection	:	Array(A)?,	engine_class	:	E.class,	
&block	:	A,	E?	->	Array(B)?)	:	Array(B)	
						results	=	[]	of	B	
						if	collection	
								pool	=	Fiberpool.new(collection,		
										@config.download_batch_size)	
								pool.run	do	|item|	
										engine	=	if	@turn_on_engine	
																					engine_class.new(@config.domain,		
																							@config.cache_http)	
																			end	
										if	reply	=	block.call(item,	engine)	
												results.concat(reply)	
										end	
								end	
						end	
						results	
				end	
		end	
end
fetch
Concurrency
require	"fiberpool"	
module	CrMangaDownloadr	
		struct	Concurrency	
				def	initialize(@config	:	Config,	@turn_on_engine	=	true);	end	
				def	fetch(collection	:	Array(A)?,	engine_class	:	E.class,	
&block	:	A,	E?	->	Array(B)?)	:	Array(B)	
						results	=	[]	of	B	
						if	collection	
								pool	=	Fiberpool.new(collection,		
										@config.download_batch_size)	
								pool.run	do	|item|	
										engine	=	if	@turn_on_engine	
																					engine_class.new(@config.domain,		
																							@config.cache_http)	
																			end	
										if	reply	=	block.call(item,	engine)	
												results.concat(reply)	
										end	
								end	
						end	
						results	
				end	
		end	
end
fetch
Concurrency
require	"fiberpool"	
module	CrMangaDownloadr	
		struct	Concurrency	
				def	initialize(@config	:	Config,	@turn_on_engine	=	true);	end	
				def	fetch(collection	:	Array(A)?,	engine_class	:	E.class,	
&block	:	A,	E?	->	Array(B)?)	:	Array(B)	
						results	=	[]	of	B	
						if	collection	
								pool	=	Fiberpool.new(collection,		
										@config.download_batch_size)	
								pool.run	do	|item|	
										engine	=	if	@turn_on_engine	
																					engine_class.new(@config.domain,		
																							@config.cache_http)	
																			end	
										if	reply	=	block.call(item,	engine)	
												results.concat(reply)	
										end	
								end	
						end	
						results	
				end	
		end	
end
fetch
Concurrency
fetch
Concurrency
module	CrMangaDownloadr	
		class	Workflow	
		end	
		module	Steps	
				def	self.fetch_chapters(config	:	Config)	
				end	
				def	self.fetch_pages(chapters	:	Array(String)?,	config	:	Config)	
						puts	"Fetching	pages	from	all	chapters	..."	
						reactor	=	Concurrency.new(config)	
						reactor.fetch(chapters,	Pages)	do	|link,	engine|	
								engine.try(&.fetch(link))	
						end	
				end	
				def	self.fetch_images(pages	:	Array(String)?,	config	:	Config)	
				end	
				def	self.download_images(images	:	Array(Image)?,	config	:	Config)	
				end	
				def	self.optimize_images(downloads	:	Array(String),	config	:	Config)	
				end	
				def	self.prepare_volumes(downloads	:	Array(String),	config	:	Config)	
				end	
		end	
end
fetch
Concurrency
module	CrMangaDownloadr	
		class	Workflow	
		end	
		module	Steps	
				def	self.fetch_chapters(config	:	Config)	
				end	
				def	self.fetch_pages(chapters	:	Array(String)?,	config	:	Config)	
						puts	"Fetching	pages	from	all	chapters	..."	
						reactor	=	Concurrency.new(config)	
						reactor.fetch(chapters,	Pages)	do	|link,	engine|	
								engine.try(&.fetch(link))	
						end	
				end	
				def	self.fetch_images(pages	:	Array(String)?,	config	:	Config)	
				end	
				def	self.download_images(images	:	Array(Image)?,	config	:	Config)	
				end	
				def	self.optimize_images(downloads	:	Array(String),	config	:	Config)	
				end	
				def	self.prepare_volumes(downloads	:	Array(String),	config	:	Config)	
				end	
		end	
end
cr_manga_downloadr -t
0.28s user 0.53s system 0% cpu 1:52.45 total
cr_manga_downloadr -t
0.28s user 0.53s system 0% cpu 1:52.45 total
.
!"" _build
# $"" ...
!"" config
# $"" config.exs
!"" deps
# !"" ...
!"" ex_manga_downloadr
!"" lib
# !"" ex_manga_downloadr
# # !"" cli.ex
# # !"" mangafox
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" mangareader
# # # !"" chapter_page.ex
# # # !"" index_page.ex
# # # $"" page.ex
# # !"" pool_management
# # # !"" supervisor.ex
# # # $"" worker.ex
# # $"" workflow.ex
# !"" ex_manga_downloadr.ex
# $"" pool_management.ex
!"" mix.exs
!"" mix.lock
!"" README.md
$"" test
!"" ex_manga_downloadr
# !"" mangafox_test.exs
# $"" mangareader_test.exs
!"" ex_manga_downloadr_test.exs
$"" test_helper.exs
61 directories, 281 files
.
!"" cr_manga_downloadr
!"" libs
# !"" ...
!"" LICENSE
!"" README.md
!"" shard.lock
!"" shard.yml
!"" spec
# !"" cr_manga_downloadr
# # !"" chapters_spec.cr
# # !"" concurrency_spec.cr
# # !"" image_downloader_spec.cr
# # !"" page_image_spec.cr
# # $"" pages_spec.cr
# !"" fixtures
# # !"" ...
# $"" spec_helper.cr
$"" src
!"" cr_manga_downloadr
# !"" chapters.cr
# !"" concurrency.cr
# !"" downloadr_client.cr
# !"" image_downloader.cr
# !"" page_image.cr
# !"" pages.cr
# !"" records.cr
# !"" version.cr
# $"" workflow.cr
$"" cr_manga_downloadr.cr
.
!"" cr_manga_downloadr
!"" libs
# !"" ...
!"" LICENSE
!"" README.md
!"" shard.lock
!"" shard.yml
!"" spec
# !"" cr_manga_downloadr
# # !"" chapters_spec.cr
# # !"" concurrency_spec.cr
# # !"" image_downloader_spec.cr
# # !"" page_image_spec.cr
# # $"" pages_spec.cr
# !"" fixtures
# # !"" ...
# $"" spec_helper.cr
$"" src
!"" cr_manga_downloadr
# !"" chapters.cr
# !"" concurrency.cr
# !"" downloadr_client.cr
# !"" image_downloader.cr
# !"" page_image.cr
# !"" pages.cr
# !"" records.cr
# !"" version.cr
# $"" workflow.cr
$"" cr_manga_downloadr.cr
.
!"" bin
# $"" manga-downloadr
!"" Gemfile
!"" Gemfile.lock
!"" lib
# !"" manga-downloadr
# # !"" chapters.rb
# # !"" concurrency.rb
# # !"" downloadr_client.rb
# # !"" image_downloader.rb
# # !"" page_image.rb
# # !"" pages.rb
# # !"" records.rb
# # !"" version.rb
# # $"" workflow.rb
# $"" manga-downloadr.rb
!"" LICENSE.txt
!"" manga-downloadr.gemspec
!"" Rakefile
!"" README.md
$"" spec
!"" fixtures
# !"" ...
!"" manga-downloadr
# !"" chapters_spec.rb
# !"" concurrency_spec.rb
# !"" image_downloader_spec.rb
# !"" page_image_spec.rb
# $"" pages_spec.rb
$"" spec_helper.rb
.
!"" cr_manga_downloadr
!"" libs
# !"" ...
!"" LICENSE
!"" README.md
!"" shard.lock
!"" shard.yml
!"" spec
# !"" cr_manga_downloadr
# # !"" chapters_spec.cr
# # !"" concurrency_spec.cr
# # !"" image_downloader_spec.cr
# # !"" page_image_spec.cr
# # $"" pages_spec.cr
# !"" fixtures
# # !"" ...
# $"" spec_helper.cr
$"" src
!"" cr_manga_downloadr
# !"" chapters.cr
# !"" concurrency.cr
# !"" downloadr_client.cr
# !"" image_downloader.cr
# !"" page_image.cr
# !"" pages.cr
# !"" records.cr
# !"" version.cr
# $"" workflow.cr
$"" cr_manga_downloadr.cr
.
!"" bin
# $"" manga-downloadr
!"" Gemfile
!"" Gemfile.lock
!"" lib
# !"" manga-downloadr
# # !"" chapters.rb
# # !"" concurrency.rb
# # !"" downloadr_client.rb
# # !"" image_downloader.rb
# # !"" page_image.rb
# # !"" pages.rb
# # !"" records.rb
# # !"" version.rb
# # $"" workflow.rb
# $"" manga-downloadr.rb
!"" LICENSE.txt
!"" manga-downloadr.gemspec
!"" Rakefile
!"" README.md
$"" spec
!"" fixtures
# !"" ...
!"" manga-downloadr
# !"" chapters_spec.rb
# !"" concurrency_spec.rb
# !"" image_downloader_spec.rb
# !"" page_image_spec.rb
# $"" pages_spec.rb
$"" spec_helper.rb
.
!"" cr_manga_downloadr
!"" libs
# !"" ...
!"" LICENSE
!"" README.md
!"" shard.lock
!"" shard.yml
!"" spec
# !"" cr_manga_downloadr
# # !"" chapters_spec.cr
# # !"" concurrency_spec.cr
# # !"" image_downloader_spec.cr
# # !"" page_image_spec.cr
# # $"" pages_spec.cr
# !"" fixtures
# # !"" ...
# $"" spec_helper.cr
$"" src
!"" cr_manga_downloadr
# !"" chapters.cr
# !"" concurrency.cr
# !"" downloadr_client.cr
# !"" image_downloader.cr
# !"" page_image.cr
# !"" pages.cr
# !"" records.cr
# !"" version.cr
# $"" workflow.cr
$"" cr_manga_downloadr.cr
.
!"" bin
# $"" manga-downloadr
!"" Gemfile
!"" Gemfile.lock
!"" lib
# !"" manga-downloadr
# # !"" chapters.rb
# # !"" concurrency.rb
# # !"" downloadr_client.rb
# # !"" image_downloader.rb
# # !"" page_image.rb
# # !"" pages.rb
# # !"" records.rb
# # !"" version.rb
# # $"" workflow.rb
# $"" manga-downloadr.rb
!"" LICENSE.txt
!"" manga-downloadr.gemspec
!"" Rakefile
!"" README.md
$"" spec
!"" fixtures
# !"" ...
!"" manga-downloadr
# !"" chapters_spec.rb
# !"" concurrency_spec.rb
# !"" image_downloader_spec.rb
# !"" page_image_spec.rb
# $"" pages_spec.rb
$"" spec_helper.rb
def	run	
		Dir.mkdir_p	@config.download_directory	
		pipe	Steps.fetch_chapters(@config)	
				.>>	Steps.fetch_pages(@config)	
				.>>	Steps.fetch_images(@config)	
				.>>	Steps.download_images(@config)	
				.>>	Steps.optimize_images(@config)	
				.>>	Steps.prepare_volumes(@config)	
				.>>	unwrap	
		puts	"Done!"	
end
def	self.run(config	=	Config.new)	
		FileUtils.mkdir_p	config.download_directory	
		CM(config,	Workflow)	
				.fetch_chapters	
				.fetch_pages(config)	
				.fetch_images(config)	
				.download_images(config)	
				.optimize_images(config)	
				.prepare_volumes(config)	
				.unwrap	
		puts	"Done!"	
end
def	run	
		Dir.mkdir_p	@config.download_directory	
		pipe	Steps.fetch_chapters(@config)	
				.>>	Steps.fetch_pages(@config)	
				.>>	Steps.fetch_images(@config)	
				.>>	Steps.download_images(@config)	
				.>>	Steps.optimize_images(@config)	
				.>>	Steps.prepare_volumes(@config)	
				.>>	unwrap	
		puts	"Done!"	
end
#	concurrency.cr	
pool	=	Fiberpool.new(collection,	@config.download_batch_size)	
pool.run	do	|item|	
		engine	=	if	@turn_on_engine	
													engine_class.new(@config.domain,	@config.cache_http)	
											end	
		if	reply	=	block.call(item,	engine)	
				results.concat(reply)	
		end	
end
#	concurrency.cr	
pool	=	Fiberpool.new(collection,	@config.download_batch_size)	
pool.run	do	|item|	
		engine	=	if	@turn_on_engine	
													engine_class.new(@config.domain,	@config.cache_http)	
											end	
		if	reply	=	block.call(item,	engine)	
				results.concat(reply)	
		end	
end
pool				=	Thread.pool(@config.download_batch_size)	
mutex			=	Mutex.new	
results	=	[]	
collection.each	do	|item|	
		pool.process	{	
				engine		=	@turn_on_engine	?	@engine_klass.new(@config.domain,	@config.cache_http)	:	nil	
				reply	=	block.call(item,	engine)&.flatten	
				mutex.synchronize	do	
						results	+=	(	reply	||	[]	)	
				end	
		}	
end	
pool.shutdown
module	CrMangaDownloadr	
		class	Pages	<	DownloadrClient	
				def	fetch(chapter_link	:	String)	
						html	=	get(chapter_link)	
						nodes	=	html.xpath_nodes("//div[@id='selectpage']//select[@id='pageMenu']//option")	
						nodes.map	{	|node|	"#{chapter_link}/#{node.text}"	}	
				end	
		end	
end
module	CrMangaDownloadr	
		class	Pages	<	DownloadrClient	
				def	fetch(chapter_link	:	String)	
						html	=	get(chapter_link)	
						nodes	=	html.xpath_nodes("//div[@id='selectpage']//select[@id='pageMenu']//option")	
						nodes.map	{	|node|	"#{chapter_link}/#{node.text}"	}	
				end	
		end	
end
module	MangaDownloadr	
		class	Pages	<	DownloadrClient	
				def	fetch(chapter_link)	
						get	chapter_link	do	|html|	
								nodes	=	html.xpath("//div[@id='selectpage']//select[@id='pageMenu']//option")	
								nodes.map	{	|node|	[chapter_link,	node.children.to_s].join("/")	}	
						end	
				end	
		end	
end
manga-downloadr -t
16.55s user 6.65s system 17% cpu 2:13.86 total
manga-downloadr -t
16.55s user 6.65s system 17% cpu 2:13.86 total
Ruby/Typhoeus 124% CPU 2:38 min
Ruby/Typhoeus 124% CPU 2:38 min
Elixir 33% CPU 2:10 min
Ruby/Typhoeus 124% CPU 2:38 min
Elixir 33% CPU 2:10 min
Crystal 0% CPU 1:52 min
Ruby/Typhoeus 124% CPU 2:38 min
Elixir 33% CPU 2:10 min
Crystal 0% CPU 1:52 min
Ruby 17% CPU 2:13 min
Ruby Typhoeus libcurl
Ruby Typhoeus libcurl
Elixir OTP Poolboy
Ruby Typhoeus libcurl
Elixir OTP Poolboy
Crystal Fibers Fiberpool
Ruby Typhoeus libcurl
Elixir OTP Poolboy
Crystal Fibers Fiberpool
Ruby Thread Thread/Pool
manga-downloadr
ex_manga_downloadr
cr_manga_downloadr
manga-downloadr
ex_manga_downloadr
cr_manga_downloadr
fiberpool
cr_chainable_methods
chainable_methods
PREMATURE
OPTIMIZATION
The Root of ALL Evil
THANKS
@akitaonrails
slideshare.net/akitaonrails

Más contenido relacionado

Similar a A Journey through new Languages - Intercon 2016

30 Days to Elixir and Crystal and Back to Ruby
30 Days to Elixir and Crystal and Back to Ruby30 Days to Elixir and Crystal and Back to Ruby
30 Days to Elixir and Crystal and Back to RubyFabio Akita
 
Configuration Surgery with Augeas
Configuration Surgery with AugeasConfiguration Surgery with Augeas
Configuration Surgery with AugeasPuppet
 
Introducing Command Line Applications with Ruby
Introducing Command Line Applications with RubyIntroducing Command Line Applications with Ruby
Introducing Command Line Applications with RubyNikhil Mungel
 
Thumbtack Expertise Days # 5 - Javaz
Thumbtack Expertise Days # 5 - JavazThumbtack Expertise Days # 5 - Javaz
Thumbtack Expertise Days # 5 - JavazAlexey Remnev
 
Shell实现的windows回收站功能的脚本
Shell实现的windows回收站功能的脚本Shell实现的windows回收站功能的脚本
Shell实现的windows回收站功能的脚本Lingfei Kong
 
R版Getopt::Longを作ってみた
R版Getopt::Longを作ってみたR版Getopt::Longを作ってみた
R版Getopt::Longを作ってみたTakeshi Arabiki
 
Debugging in Clojure: Finding Light in the Darkness using Emacs and Cursive
Debugging in Clojure: Finding Light in the Darkness using Emacs and CursiveDebugging in Clojure: Finding Light in the Darkness using Emacs and Cursive
Debugging in Clojure: Finding Light in the Darkness using Emacs and CursiveAhmad Ragab
 
DPC 2012 : PHP in the Dark Workshop
DPC 2012 : PHP in the Dark WorkshopDPC 2012 : PHP in the Dark Workshop
DPC 2012 : PHP in the Dark WorkshopJeroen Keppens
 
Code that gets you pwn(s|'d)
Code that gets you pwn(s|'d)Code that gets you pwn(s|'d)
Code that gets you pwn(s|'d)snyff
 
Geeks Anonymes - Le langage Go
Geeks Anonymes - Le langage GoGeeks Anonymes - Le langage Go
Geeks Anonymes - Le langage GoGeeks Anonymes
 
COSCUP2012: How to write a bash script like the python?
COSCUP2012: How to write a bash script like the python?COSCUP2012: How to write a bash script like the python?
COSCUP2012: How to write a bash script like the python?Lloyd Huang
 
PostgreSQL Administration for System Administrators
PostgreSQL Administration for System AdministratorsPostgreSQL Administration for System Administrators
PostgreSQL Administration for System AdministratorsCommand Prompt., Inc
 
Augeas
AugeasAugeas
Augeaslutter
 
I have the shell for this program already, I just need the two sub-f.pdf
I have the shell for this program already, I just need the two sub-f.pdfI have the shell for this program already, I just need the two sub-f.pdf
I have the shell for this program already, I just need the two sub-f.pdfarhamnighty
 
autopkgtest lightning talk
autopkgtest lightning talkautopkgtest lightning talk
autopkgtest lightning talkmartin-pitt
 

Similar a A Journey through new Languages - Intercon 2016 (20)

30 Days to Elixir and Crystal and Back to Ruby
30 Days to Elixir and Crystal and Back to Ruby30 Days to Elixir and Crystal and Back to Ruby
30 Days to Elixir and Crystal and Back to Ruby
 
Configuration Surgery with Augeas
Configuration Surgery with AugeasConfiguration Surgery with Augeas
Configuration Surgery with Augeas
 
Augeas @RMLL 2012
Augeas @RMLL 2012Augeas @RMLL 2012
Augeas @RMLL 2012
 
Metadata-driven Testing
Metadata-driven TestingMetadata-driven Testing
Metadata-driven Testing
 
Puppi. Puppet strings to the shell
Puppi. Puppet strings to the shellPuppi. Puppet strings to the shell
Puppi. Puppet strings to the shell
 
Txjs
TxjsTxjs
Txjs
 
Introducing Command Line Applications with Ruby
Introducing Command Line Applications with RubyIntroducing Command Line Applications with Ruby
Introducing Command Line Applications with Ruby
 
Thumbtack Expertise Days # 5 - Javaz
Thumbtack Expertise Days # 5 - JavazThumbtack Expertise Days # 5 - Javaz
Thumbtack Expertise Days # 5 - Javaz
 
Shell实现的windows回收站功能的脚本
Shell实现的windows回收站功能的脚本Shell实现的windows回收站功能的脚本
Shell实现的windows回收站功能的脚本
 
R版Getopt::Longを作ってみた
R版Getopt::Longを作ってみたR版Getopt::Longを作ってみた
R版Getopt::Longを作ってみた
 
Debugging in Clojure: Finding Light in the Darkness using Emacs and Cursive
Debugging in Clojure: Finding Light in the Darkness using Emacs and CursiveDebugging in Clojure: Finding Light in the Darkness using Emacs and Cursive
Debugging in Clojure: Finding Light in the Darkness using Emacs and Cursive
 
DPC 2012 : PHP in the Dark Workshop
DPC 2012 : PHP in the Dark WorkshopDPC 2012 : PHP in the Dark Workshop
DPC 2012 : PHP in the Dark Workshop
 
Code that gets you pwn(s|'d)
Code that gets you pwn(s|'d)Code that gets you pwn(s|'d)
Code that gets you pwn(s|'d)
 
Geeks Anonymes - Le langage Go
Geeks Anonymes - Le langage GoGeeks Anonymes - Le langage Go
Geeks Anonymes - Le langage Go
 
Zram
ZramZram
Zram
 
COSCUP2012: How to write a bash script like the python?
COSCUP2012: How to write a bash script like the python?COSCUP2012: How to write a bash script like the python?
COSCUP2012: How to write a bash script like the python?
 
PostgreSQL Administration for System Administrators
PostgreSQL Administration for System AdministratorsPostgreSQL Administration for System Administrators
PostgreSQL Administration for System Administrators
 
Augeas
AugeasAugeas
Augeas
 
I have the shell for this program already, I just need the two sub-f.pdf
I have the shell for this program already, I just need the two sub-f.pdfI have the shell for this program already, I just need the two sub-f.pdf
I have the shell for this program already, I just need the two sub-f.pdf
 
autopkgtest lightning talk
autopkgtest lightning talkautopkgtest lightning talk
autopkgtest lightning talk
 

Más de Fabio Akita

Devconf 2019 - São Carlos
Devconf 2019 - São CarlosDevconf 2019 - São Carlos
Devconf 2019 - São CarlosFabio Akita
 
Meetup Nerdzão - English Talk about Languages
Meetup Nerdzão  - English Talk about LanguagesMeetup Nerdzão  - English Talk about Languages
Meetup Nerdzão - English Talk about LanguagesFabio Akita
 
Desmistificando Blockchains p/ Developers - Criciuma Dev Conf 2018
Desmistificando Blockchains p/ Developers - Criciuma Dev Conf 2018Desmistificando Blockchains p/ Developers - Criciuma Dev Conf 2018
Desmistificando Blockchains p/ Developers - Criciuma Dev Conf 2018Fabio Akita
 
Desmistificando Blockchains - 20o Encontro Locaweb SP
Desmistificando Blockchains - 20o Encontro Locaweb SPDesmistificando Blockchains - 20o Encontro Locaweb SP
Desmistificando Blockchains - 20o Encontro Locaweb SPFabio Akita
 
Desmistificando Blockchains - Insiter Goiania
Desmistificando Blockchains - Insiter GoianiaDesmistificando Blockchains - Insiter Goiania
Desmistificando Blockchains - Insiter GoianiaFabio Akita
 
Blockchain em 7 minutos - 7Masters
Blockchain em 7 minutos - 7MastersBlockchain em 7 minutos - 7Masters
Blockchain em 7 minutos - 7MastersFabio Akita
 
Elixir -Tolerância a Falhas para Adultos - GDG Campinas
Elixir  -Tolerância a Falhas para Adultos - GDG CampinasElixir  -Tolerância a Falhas para Adultos - GDG Campinas
Elixir -Tolerância a Falhas para Adultos - GDG CampinasFabio Akita
 
Desmistificando Mitos de Tech Startups - Intercon 2017
Desmistificando Mitos de Tech Startups - Intercon 2017Desmistificando Mitos de Tech Startups - Intercon 2017
Desmistificando Mitos de Tech Startups - Intercon 2017Fabio Akita
 
Uma Discussão sobre a Carreira de TI
Uma Discussão sobre a Carreira de TIUma Discussão sobre a Carreira de TI
Uma Discussão sobre a Carreira de TIFabio Akita
 
THE CONF - Opening Keynote
THE CONF - Opening KeynoteTHE CONF - Opening Keynote
THE CONF - Opening KeynoteFabio Akita
 
Desmistificando Mitos de Startups - Sebrae - AP
Desmistificando Mitos de Startups - Sebrae - APDesmistificando Mitos de Startups - Sebrae - AP
Desmistificando Mitos de Startups - Sebrae - APFabio Akita
 
Premature Optimization 2.0 - Intercon 2016
Premature Optimization 2.0 - Intercon 2016Premature Optimization 2.0 - Intercon 2016
Premature Optimization 2.0 - Intercon 2016Fabio Akita
 
Conexão Kinghost - Otimização Prematura
Conexão Kinghost - Otimização PrematuraConexão Kinghost - Otimização Prematura
Conexão Kinghost - Otimização PrematuraFabio Akita
 
The Open Commerce Conference - Premature Optimisation: The Root of All Evil
The Open Commerce Conference - Premature Optimisation: The Root of All EvilThe Open Commerce Conference - Premature Optimisation: The Root of All Evil
The Open Commerce Conference - Premature Optimisation: The Root of All EvilFabio Akita
 
Premature optimisation: The Root of All Evil
Premature optimisation: The Root of All EvilPremature optimisation: The Root of All Evil
Premature optimisation: The Root of All EvilFabio Akita
 
Elixir - Tolerância a Falhas para Adultos - Secot VIII Sorocaba
Elixir - Tolerância a Falhas para Adultos - Secot VIII SorocabaElixir - Tolerância a Falhas para Adultos - Secot VIII Sorocaba
Elixir - Tolerância a Falhas para Adultos - Secot VIII SorocabaFabio Akita
 
Elixir: Tolerância a Falhas para Adultos - OneDay Baixada Santista
Elixir: Tolerância a Falhas para Adultos - OneDay Baixada SantistaElixir: Tolerância a Falhas para Adultos - OneDay Baixada Santista
Elixir: Tolerância a Falhas para Adultos - OneDay Baixada SantistaFabio Akita
 
Evento Codeminer UFRN 2016
Evento Codeminer UFRN 2016Evento Codeminer UFRN 2016
Evento Codeminer UFRN 2016Fabio Akita
 
QCON SP 2016 - Elixir: Tolerância a Falhas para Adultos
QCON SP 2016 - Elixir: Tolerância a Falhas para AdultosQCON SP 2016 - Elixir: Tolerância a Falhas para Adultos
QCON SP 2016 - Elixir: Tolerância a Falhas para AdultosFabio Akita
 
"Elixir of Life" - Dev In Santos
"Elixir of Life" - Dev In Santos"Elixir of Life" - Dev In Santos
"Elixir of Life" - Dev In SantosFabio Akita
 

Más de Fabio Akita (20)

Devconf 2019 - São Carlos
Devconf 2019 - São CarlosDevconf 2019 - São Carlos
Devconf 2019 - São Carlos
 
Meetup Nerdzão - English Talk about Languages
Meetup Nerdzão  - English Talk about LanguagesMeetup Nerdzão  - English Talk about Languages
Meetup Nerdzão - English Talk about Languages
 
Desmistificando Blockchains p/ Developers - Criciuma Dev Conf 2018
Desmistificando Blockchains p/ Developers - Criciuma Dev Conf 2018Desmistificando Blockchains p/ Developers - Criciuma Dev Conf 2018
Desmistificando Blockchains p/ Developers - Criciuma Dev Conf 2018
 
Desmistificando Blockchains - 20o Encontro Locaweb SP
Desmistificando Blockchains - 20o Encontro Locaweb SPDesmistificando Blockchains - 20o Encontro Locaweb SP
Desmistificando Blockchains - 20o Encontro Locaweb SP
 
Desmistificando Blockchains - Insiter Goiania
Desmistificando Blockchains - Insiter GoianiaDesmistificando Blockchains - Insiter Goiania
Desmistificando Blockchains - Insiter Goiania
 
Blockchain em 7 minutos - 7Masters
Blockchain em 7 minutos - 7MastersBlockchain em 7 minutos - 7Masters
Blockchain em 7 minutos - 7Masters
 
Elixir -Tolerância a Falhas para Adultos - GDG Campinas
Elixir  -Tolerância a Falhas para Adultos - GDG CampinasElixir  -Tolerância a Falhas para Adultos - GDG Campinas
Elixir -Tolerância a Falhas para Adultos - GDG Campinas
 
Desmistificando Mitos de Tech Startups - Intercon 2017
Desmistificando Mitos de Tech Startups - Intercon 2017Desmistificando Mitos de Tech Startups - Intercon 2017
Desmistificando Mitos de Tech Startups - Intercon 2017
 
Uma Discussão sobre a Carreira de TI
Uma Discussão sobre a Carreira de TIUma Discussão sobre a Carreira de TI
Uma Discussão sobre a Carreira de TI
 
THE CONF - Opening Keynote
THE CONF - Opening KeynoteTHE CONF - Opening Keynote
THE CONF - Opening Keynote
 
Desmistificando Mitos de Startups - Sebrae - AP
Desmistificando Mitos de Startups - Sebrae - APDesmistificando Mitos de Startups - Sebrae - AP
Desmistificando Mitos de Startups - Sebrae - AP
 
Premature Optimization 2.0 - Intercon 2016
Premature Optimization 2.0 - Intercon 2016Premature Optimization 2.0 - Intercon 2016
Premature Optimization 2.0 - Intercon 2016
 
Conexão Kinghost - Otimização Prematura
Conexão Kinghost - Otimização PrematuraConexão Kinghost - Otimização Prematura
Conexão Kinghost - Otimização Prematura
 
The Open Commerce Conference - Premature Optimisation: The Root of All Evil
The Open Commerce Conference - Premature Optimisation: The Root of All EvilThe Open Commerce Conference - Premature Optimisation: The Root of All Evil
The Open Commerce Conference - Premature Optimisation: The Root of All Evil
 
Premature optimisation: The Root of All Evil
Premature optimisation: The Root of All EvilPremature optimisation: The Root of All Evil
Premature optimisation: The Root of All Evil
 
Elixir - Tolerância a Falhas para Adultos - Secot VIII Sorocaba
Elixir - Tolerância a Falhas para Adultos - Secot VIII SorocabaElixir - Tolerância a Falhas para Adultos - Secot VIII Sorocaba
Elixir - Tolerância a Falhas para Adultos - Secot VIII Sorocaba
 
Elixir: Tolerância a Falhas para Adultos - OneDay Baixada Santista
Elixir: Tolerância a Falhas para Adultos - OneDay Baixada SantistaElixir: Tolerância a Falhas para Adultos - OneDay Baixada Santista
Elixir: Tolerância a Falhas para Adultos - OneDay Baixada Santista
 
Evento Codeminer UFRN 2016
Evento Codeminer UFRN 2016Evento Codeminer UFRN 2016
Evento Codeminer UFRN 2016
 
QCON SP 2016 - Elixir: Tolerância a Falhas para Adultos
QCON SP 2016 - Elixir: Tolerância a Falhas para AdultosQCON SP 2016 - Elixir: Tolerância a Falhas para Adultos
QCON SP 2016 - Elixir: Tolerância a Falhas para Adultos
 
"Elixir of Life" - Dev In Santos
"Elixir of Life" - Dev In Santos"Elixir of Life" - Dev In Santos
"Elixir of Life" - Dev In Santos
 

Último

Exploring the Future Potential of AI-Enabled Smartphone Processors
Exploring the Future Potential of AI-Enabled Smartphone ProcessorsExploring the Future Potential of AI-Enabled Smartphone Processors
Exploring the Future Potential of AI-Enabled Smartphone Processorsdebabhi2
 
Boost PC performance: How more available memory can improve productivity
Boost PC performance: How more available memory can improve productivityBoost PC performance: How more available memory can improve productivity
Boost PC performance: How more available memory can improve productivityPrincipled Technologies
 
Raspberry Pi 5: Challenges and Solutions in Bringing up an OpenGL/Vulkan Driv...
Raspberry Pi 5: Challenges and Solutions in Bringing up an OpenGL/Vulkan Driv...Raspberry Pi 5: Challenges and Solutions in Bringing up an OpenGL/Vulkan Driv...
Raspberry Pi 5: Challenges and Solutions in Bringing up an OpenGL/Vulkan Driv...Igalia
 
Mastering MySQL Database Architecture: Deep Dive into MySQL Shell and MySQL R...
Mastering MySQL Database Architecture: Deep Dive into MySQL Shell and MySQL R...Mastering MySQL Database Architecture: Deep Dive into MySQL Shell and MySQL R...
Mastering MySQL Database Architecture: Deep Dive into MySQL Shell and MySQL R...Miguel Araújo
 
Advantages of Hiring UIUX Design Service Providers for Your Business
Advantages of Hiring UIUX Design Service Providers for Your BusinessAdvantages of Hiring UIUX Design Service Providers for Your Business
Advantages of Hiring UIUX Design Service Providers for Your BusinessPixlogix Infotech
 
Driving Behavioral Change for Information Management through Data-Driven Gree...
Driving Behavioral Change for Information Management through Data-Driven Gree...Driving Behavioral Change for Information Management through Data-Driven Gree...
Driving Behavioral Change for Information Management through Data-Driven Gree...Enterprise Knowledge
 
08448380779 Call Girls In Friends Colony Women Seeking Men
08448380779 Call Girls In Friends Colony Women Seeking Men08448380779 Call Girls In Friends Colony Women Seeking Men
08448380779 Call Girls In Friends Colony Women Seeking MenDelhi Call girls
 
The Role of Taxonomy and Ontology in Semantic Layers - Heather Hedden.pdf
The Role of Taxonomy and Ontology in Semantic Layers - Heather Hedden.pdfThe Role of Taxonomy and Ontology in Semantic Layers - Heather Hedden.pdf
The Role of Taxonomy and Ontology in Semantic Layers - Heather Hedden.pdfEnterprise Knowledge
 
How to Troubleshoot Apps for the Modern Connected Worker
How to Troubleshoot Apps for the Modern Connected WorkerHow to Troubleshoot Apps for the Modern Connected Worker
How to Troubleshoot Apps for the Modern Connected WorkerThousandEyes
 
A Call to Action for Generative AI in 2024
A Call to Action for Generative AI in 2024A Call to Action for Generative AI in 2024
A Call to Action for Generative AI in 2024Results
 
TrustArc Webinar - Stay Ahead of US State Data Privacy Law Developments
TrustArc Webinar - Stay Ahead of US State Data Privacy Law DevelopmentsTrustArc Webinar - Stay Ahead of US State Data Privacy Law Developments
TrustArc Webinar - Stay Ahead of US State Data Privacy Law DevelopmentsTrustArc
 
Scaling API-first – The story of a global engineering organization
Scaling API-first – The story of a global engineering organizationScaling API-first – The story of a global engineering organization
Scaling API-first – The story of a global engineering organizationRadu Cotescu
 
2024: Domino Containers - The Next Step. News from the Domino Container commu...
2024: Domino Containers - The Next Step. News from the Domino Container commu...2024: Domino Containers - The Next Step. News from the Domino Container commu...
2024: Domino Containers - The Next Step. News from the Domino Container commu...Martijn de Jong
 
Apidays Singapore 2024 - Building Digital Trust in a Digital Economy by Veron...
Apidays Singapore 2024 - Building Digital Trust in a Digital Economy by Veron...Apidays Singapore 2024 - Building Digital Trust in a Digital Economy by Veron...
Apidays Singapore 2024 - Building Digital Trust in a Digital Economy by Veron...apidays
 
From Event to Action: Accelerate Your Decision Making with Real-Time Automation
From Event to Action: Accelerate Your Decision Making with Real-Time AutomationFrom Event to Action: Accelerate Your Decision Making with Real-Time Automation
From Event to Action: Accelerate Your Decision Making with Real-Time AutomationSafe Software
 
Real Time Object Detection Using Open CV
Real Time Object Detection Using Open CVReal Time Object Detection Using Open CV
Real Time Object Detection Using Open CVKhem
 
Understanding Discord NSFW Servers A Guide for Responsible Users.pdf
Understanding Discord NSFW Servers A Guide for Responsible Users.pdfUnderstanding Discord NSFW Servers A Guide for Responsible Users.pdf
Understanding Discord NSFW Servers A Guide for Responsible Users.pdfUK Journal
 
How to convert PDF to text with Nanonets
How to convert PDF to text with NanonetsHow to convert PDF to text with Nanonets
How to convert PDF to text with Nanonetsnaman860154
 
EIS-Webinar-Prompt-Knowledge-Eng-2024-04-08.pptx
EIS-Webinar-Prompt-Knowledge-Eng-2024-04-08.pptxEIS-Webinar-Prompt-Knowledge-Eng-2024-04-08.pptx
EIS-Webinar-Prompt-Knowledge-Eng-2024-04-08.pptxEarley Information Science
 
Slack Application Development 101 Slides
Slack Application Development 101 SlidesSlack Application Development 101 Slides
Slack Application Development 101 Slidespraypatel2
 

Último (20)

Exploring the Future Potential of AI-Enabled Smartphone Processors
Exploring the Future Potential of AI-Enabled Smartphone ProcessorsExploring the Future Potential of AI-Enabled Smartphone Processors
Exploring the Future Potential of AI-Enabled Smartphone Processors
 
Boost PC performance: How more available memory can improve productivity
Boost PC performance: How more available memory can improve productivityBoost PC performance: How more available memory can improve productivity
Boost PC performance: How more available memory can improve productivity
 
Raspberry Pi 5: Challenges and Solutions in Bringing up an OpenGL/Vulkan Driv...
Raspberry Pi 5: Challenges and Solutions in Bringing up an OpenGL/Vulkan Driv...Raspberry Pi 5: Challenges and Solutions in Bringing up an OpenGL/Vulkan Driv...
Raspberry Pi 5: Challenges and Solutions in Bringing up an OpenGL/Vulkan Driv...
 
Mastering MySQL Database Architecture: Deep Dive into MySQL Shell and MySQL R...
Mastering MySQL Database Architecture: Deep Dive into MySQL Shell and MySQL R...Mastering MySQL Database Architecture: Deep Dive into MySQL Shell and MySQL R...
Mastering MySQL Database Architecture: Deep Dive into MySQL Shell and MySQL R...
 
Advantages of Hiring UIUX Design Service Providers for Your Business
Advantages of Hiring UIUX Design Service Providers for Your BusinessAdvantages of Hiring UIUX Design Service Providers for Your Business
Advantages of Hiring UIUX Design Service Providers for Your Business
 
Driving Behavioral Change for Information Management through Data-Driven Gree...
Driving Behavioral Change for Information Management through Data-Driven Gree...Driving Behavioral Change for Information Management through Data-Driven Gree...
Driving Behavioral Change for Information Management through Data-Driven Gree...
 
08448380779 Call Girls In Friends Colony Women Seeking Men
08448380779 Call Girls In Friends Colony Women Seeking Men08448380779 Call Girls In Friends Colony Women Seeking Men
08448380779 Call Girls In Friends Colony Women Seeking Men
 
The Role of Taxonomy and Ontology in Semantic Layers - Heather Hedden.pdf
The Role of Taxonomy and Ontology in Semantic Layers - Heather Hedden.pdfThe Role of Taxonomy and Ontology in Semantic Layers - Heather Hedden.pdf
The Role of Taxonomy and Ontology in Semantic Layers - Heather Hedden.pdf
 
How to Troubleshoot Apps for the Modern Connected Worker
How to Troubleshoot Apps for the Modern Connected WorkerHow to Troubleshoot Apps for the Modern Connected Worker
How to Troubleshoot Apps for the Modern Connected Worker
 
A Call to Action for Generative AI in 2024
A Call to Action for Generative AI in 2024A Call to Action for Generative AI in 2024
A Call to Action for Generative AI in 2024
 
TrustArc Webinar - Stay Ahead of US State Data Privacy Law Developments
TrustArc Webinar - Stay Ahead of US State Data Privacy Law DevelopmentsTrustArc Webinar - Stay Ahead of US State Data Privacy Law Developments
TrustArc Webinar - Stay Ahead of US State Data Privacy Law Developments
 
Scaling API-first – The story of a global engineering organization
Scaling API-first – The story of a global engineering organizationScaling API-first – The story of a global engineering organization
Scaling API-first – The story of a global engineering organization
 
2024: Domino Containers - The Next Step. News from the Domino Container commu...
2024: Domino Containers - The Next Step. News from the Domino Container commu...2024: Domino Containers - The Next Step. News from the Domino Container commu...
2024: Domino Containers - The Next Step. News from the Domino Container commu...
 
Apidays Singapore 2024 - Building Digital Trust in a Digital Economy by Veron...
Apidays Singapore 2024 - Building Digital Trust in a Digital Economy by Veron...Apidays Singapore 2024 - Building Digital Trust in a Digital Economy by Veron...
Apidays Singapore 2024 - Building Digital Trust in a Digital Economy by Veron...
 
From Event to Action: Accelerate Your Decision Making with Real-Time Automation
From Event to Action: Accelerate Your Decision Making with Real-Time AutomationFrom Event to Action: Accelerate Your Decision Making with Real-Time Automation
From Event to Action: Accelerate Your Decision Making with Real-Time Automation
 
Real Time Object Detection Using Open CV
Real Time Object Detection Using Open CVReal Time Object Detection Using Open CV
Real Time Object Detection Using Open CV
 
Understanding Discord NSFW Servers A Guide for Responsible Users.pdf
Understanding Discord NSFW Servers A Guide for Responsible Users.pdfUnderstanding Discord NSFW Servers A Guide for Responsible Users.pdf
Understanding Discord NSFW Servers A Guide for Responsible Users.pdf
 
How to convert PDF to text with Nanonets
How to convert PDF to text with NanonetsHow to convert PDF to text with Nanonets
How to convert PDF to text with Nanonets
 
EIS-Webinar-Prompt-Knowledge-Eng-2024-04-08.pptx
EIS-Webinar-Prompt-Knowledge-Eng-2024-04-08.pptxEIS-Webinar-Prompt-Knowledge-Eng-2024-04-08.pptx
EIS-Webinar-Prompt-Knowledge-Eng-2024-04-08.pptx
 
Slack Application Development 101 Slides
Slack Application Development 101 SlidesSlack Application Development 101 Slides
Slack Application Development 101 Slides
 

A Journey through new Languages - Intercon 2016