mirror of
https://github.com/servo/servo.git
synced 2025-08-06 14:10:11 +01:00
Remove CSS lexer's dependency on resource_task
This commit is contained in:
parent
86ce867a1c
commit
37d45c6872
4 changed files with 61 additions and 48 deletions
|
@ -9,7 +9,7 @@ use pipes::{Port, Chan};
|
|||
use lexer_util::*;
|
||||
|
||||
use std::net::url::Url;
|
||||
use resource::resource_task::{ResourceTask, ProgressMsg, Load, Payload, Done};
|
||||
use std::cell::Cell;
|
||||
|
||||
enum ParserState {
|
||||
CssElement,
|
||||
|
@ -225,29 +225,20 @@ impl CssLexer : CssLexerMethods {
|
|||
}
|
||||
}
|
||||
|
||||
fn resource_port_to_lexer_stream(input_port: comm::Port<ProgressMsg>) -> DataStream {
|
||||
return || {
|
||||
match input_port.recv() {
|
||||
Payload(move data) => Some(move data),
|
||||
Done(*) => None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parser(input_port: comm::Port<ProgressMsg>, state : ParserState) -> CssLexer {
|
||||
fn parser(input: DataStream, state : ParserState) -> CssLexer {
|
||||
return {
|
||||
input_state: {
|
||||
mut lookahead: None,
|
||||
mut buffer: ~[],
|
||||
input: resource_port_to_lexer_stream(input_port),
|
||||
input: input,
|
||||
mut eof: false
|
||||
},
|
||||
mut parser_state: state
|
||||
};
|
||||
}
|
||||
|
||||
fn lex_css_from_bytes(input_port: comm::Port<ProgressMsg>, result_chan : &Chan<Token>) {
|
||||
let lexer = parser(input_port, CssElement);
|
||||
pub fn lex_css_from_bytes(input_stream: DataStream, result_chan : &Chan<Token>) {
|
||||
let lexer = parser(input_stream, CssElement);
|
||||
|
||||
loop {
|
||||
let token = lexer.parse_css();
|
||||
|
@ -265,28 +256,11 @@ fn spawn_css_lexer_from_string(content : ~str) -> pipes::Port<Token> {
|
|||
let (result_chan, result_port) = pipes::stream();
|
||||
|
||||
do task::spawn |move result_chan, move content| {
|
||||
let input_port = comm::Port();
|
||||
input_port.send(Payload(str::to_bytes(content)));
|
||||
input_port.send(Done(Ok(())));
|
||||
|
||||
lex_css_from_bytes(input_port, &result_chan);
|
||||
let content = str::to_bytes(content);
|
||||
let content = Cell(copy content);
|
||||
let input = |move content| if !content.is_empty() { Some(content.take()) } else { None };
|
||||
lex_css_from_bytes(input, &result_chan);
|
||||
}
|
||||
|
||||
return move result_port;
|
||||
}
|
||||
|
||||
#[allow(non_implicitly_copyable_typarams)]
|
||||
pub fn spawn_css_lexer_task(url: Url, resource_task: ResourceTask) -> pipes::Port<Token> {
|
||||
let (result_chan, result_port) = pipes::stream();
|
||||
|
||||
do task::spawn |move result_chan, move url| {
|
||||
assert url.path.ends_with(".css");
|
||||
let input_port = Port();
|
||||
// TODO: change copy to move once the compiler permits it
|
||||
resource_task.send(Load(copy url, input_port.chan()));
|
||||
|
||||
lex_css_from_bytes(input_port, &result_chan);
|
||||
};
|
||||
|
||||
return move result_port;
|
||||
}
|
||||
|
|
48
src/servo/html/cssparse.rs
Normal file
48
src/servo/html/cssparse.rs
Normal file
|
@ -0,0 +1,48 @@
|
|||
/*!
|
||||
Some little helpers for hooking up the HTML parser with the CSS parser
|
||||
*/
|
||||
|
||||
use std::net::url::Url;
|
||||
use resource::resource_task::{ResourceTask, ProgressMsg, Load, Payload, Done};
|
||||
use newcss::values::Rule;
|
||||
use css::lexer_util::DataStream;
|
||||
use css::lexer::{Token, lex_css_from_bytes};
|
||||
|
||||
pub fn spawn_css_parser(url: Url, resource_task: ResourceTask) -> comm::Port<~[~Rule]> {
|
||||
let result_port = comm::Port();
|
||||
let result_chan = comm::Chan(&result_port);
|
||||
// TODO: change copy to move once we have match move
|
||||
let url = copy url;
|
||||
do task::spawn |move url, copy resource_task| {
|
||||
let css_stream = spawn_css_lexer_task(copy url, resource_task);
|
||||
let mut css_rules = css::parser::build_stylesheet(move css_stream);
|
||||
result_chan.send(move css_rules);
|
||||
}
|
||||
|
||||
return result_port;
|
||||
}
|
||||
|
||||
#[allow(non_implicitly_copyable_typarams)]
|
||||
fn spawn_css_lexer_task(url: Url, resource_task: ResourceTask) -> pipes::Port<Token> {
|
||||
let (result_chan, result_port) = pipes::stream();
|
||||
|
||||
do task::spawn |move result_chan, move url| {
|
||||
assert url.path.ends_with(".css");
|
||||
let input_port = Port();
|
||||
// TODO: change copy to move once the compiler permits it
|
||||
resource_task.send(Load(copy url, input_port.chan()));
|
||||
|
||||
lex_css_from_bytes(resource_port_to_lexer_stream(input_port), &result_chan);
|
||||
};
|
||||
|
||||
return move result_port;
|
||||
}
|
||||
|
||||
fn resource_port_to_lexer_stream(input_port: comm::Port<ProgressMsg>) -> DataStream {
|
||||
return || {
|
||||
match input_port.recv() {
|
||||
Payload(move data) => Some(move data),
|
||||
Done(*) => None
|
||||
}
|
||||
}
|
||||
}
|
|
@ -14,6 +14,7 @@ use hubbub::Attribute;
|
|||
|
||||
use comm::{Chan, Port};
|
||||
use std::net::url::Url;
|
||||
use cssparse::spawn_css_parser;
|
||||
|
||||
type JSResult = ~[~[u8]];
|
||||
|
||||
|
@ -54,19 +55,8 @@ fn css_link_listener(to_parent : comm::Chan<Stylesheet>, from_parent : comm::Por
|
|||
|
||||
loop {
|
||||
match from_parent.recv() {
|
||||
CSSTaskNewFile(url) => {
|
||||
let result_port = comm::Port();
|
||||
let result_chan = comm::Chan(&result_port);
|
||||
// TODO: change copy to move once we have match move
|
||||
let url = copy url;
|
||||
do task::spawn |move url, copy resource_task| {
|
||||
// TODO: change copy to move once we can move out of closures
|
||||
let css_stream = css::lexer::spawn_css_lexer_task(copy url, resource_task);
|
||||
let mut css_rules = css::parser::build_stylesheet(move css_stream);
|
||||
result_chan.send(move css_rules);
|
||||
}
|
||||
|
||||
vec::push(&mut result_vec, result_port);
|
||||
CSSTaskNewFile(move url) => {
|
||||
result_vec.push(spawn_css_parser(move url, copy resource_task));
|
||||
}
|
||||
CSSTaskExit => {
|
||||
break;
|
||||
|
|
|
@ -92,6 +92,7 @@ pub mod image {
|
|||
|
||||
pub mod html {
|
||||
pub mod hubbub_html_parser;
|
||||
pub mod cssparse;
|
||||
}
|
||||
|
||||
pub mod platform {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue