@article {1507271, title = {Centering Educators{\textquoteright} Expertise: Learning About Innovative Approaches to Social-Emotional Learning from School Partners in the Boston Charter Research Collaborative}, year = {Working Paper}, author = {Akira S. Gutierrez and Sara B. Krachman and Katie H. Buckley} } @article {1070936, title = {School District Reform in Newark: Within- and Between- School Changes in Achievement Growth}, journal = {NBER Working Paper 23922 }, year = {Working Paper}, abstract = {In 2011-12, Newark launched a set of educational reforms supported by $20 million gift. Using data from 2009 through 2016, we evaluate the change in Newark students{\textquoteright} achievement growth relative to similar students and schools elsewhere in New Jersey. We measure achievement growth using a {\textquotedblleft}value-added{\textquotedblright} model, controlling for prior achievement, demographics and peer characteristics. By the fifth year of reform, Newark saw statistically significant gains in English and no significant change in math achievement growth. Perhaps due to the disruptive nature of the reforms, growth declined initially before rebounding in recent years. Aided by the closure of low value-added schools, much of the improvement was due to shifting enrollment from lower-to higher-growth district and charter schools. Shifting enrollment accounted for 62 percent of the improvement in English. In math, such shifts offset what would have been a decline in achievement growth.}, url = {http://www.nber.org/papers/w23922}, author = {Mark Chin and Tom Kane and W. Kozakowski and Beth Schueler and Doug Staiger} } @article {639596, title = {Exploring Explanations for the "Weak" Relationship Between Value Added and Observation-Based Measures of Teacher Performance}, year = {Working Paper}, author = {Mark Chin and Dan Goldhaber} } @report {576916, title = {Explaining Teacher Effects on Achievement Using Measures from Multiple Research Traditions}, year = {Working Paper}, note = {

Presented at the 2014 American Educational Research Association (AERA) annual meeting. Philadelphia, PA.\ 

}, abstract = {

Researchers have identified many characteristics of teachers and teaching that contribute to student outcomes. However, most studies investigate only a small number of these characteristics, likely underestimating the overall contribution. In this paper, we use a set of 28 teacher-level predictors drawn from multiple research traditions to explain teacher-level variation in student outcomes. These predictors collectively explain 28\% of teacher-level variability in state standardized math test scores and 40\% in a predictor-aligned math test. In addition, each individual predictor explains only a small, relatively unique portion of the total teacher-level variability. This first finding highlights the importance of choosing predictors and outcomes that are well aligned, and the second suggests that the phenomena underlying teacher effects is multidimensional.\ 

}, author = {Andrew Bacher-Hicks and Mark Chin and Heather Hill and Douglas Staiger} } @report {576801, title = {The Generalizability of Item Parameters Across Lessons}, year = {Working Paper}, note = {

Presented at the 2014 National Council on Measurement in Education (NCME) Annual Meeting, Philadelphia, PA.

}, abstract = {

The purpose of this study is to investigate three aspects of construct validity for the Mathematical Quality of Instruction classroom observation instrument: (1) the dimensionality of scores, (2) the generalizability of these scores across districts, and (3) the predictive validity of these scores in terms of student achievement.

}, author = {Ben Kelcey and Dan McGinn and Heather C. Hill and Charalambos Charalambous} } @report {576926, title = {Using Item Response Theory to Learn about Observational Instruments}, year = {Working Paper}, note = {

Presented at the 2014 National Council on Measurement in Education (NCME) Annual Meeting, Philadelphia, PA.

}, abstract = {

As many states are slated to soon use scores derived from classroom observation instruments in high-stakes decisions, developers must cultivate methods for improving the functioning of these instruments. We show how multidimensional, multilevel item response theory models can yield information critical for improving the performance of observational instruments.

}, author = {Dan McGinn and Ben Kelcey and Heather Hill and Mark Chin} } @report {576901, title = {What Does It Mean to be Ranked a "High" or "Low" Value-Added Teacher? Observing Differences in Instructional Quality Across Districts}, year = {Working Paper}, note = {

Presented at the American Education Research Association (AERA) Annual Meeting, Philadelphia, PA.

}, abstract = {

Education agencies are evaluating teachers using student achievement data. However, very little is known about the comparability of test-based or "value-added" metrics across districts and the extent to which they capture variability in classroom practices. Drawing on data from four urban districts, we find that teachers are categorized differently when compared within versus across districts. In addition, analyses of scores from two observation instruments, as well qualitative viewing of lesson videos identify stark differences in instructional practices across districts among teachers who receive similar within-district value-added rankings. Exploratory analyses suggest that these patterns are not explained by observable background characteristics of teachers and that factors beyond labor market sorting likely play a key role.\ 

}, author = {David Blazar and Erica Litke and Johanna Barmore and Claire Gogolen} } @report {1526721, title = {Summer School As a Learning Loss Recovery Strategy After COVID-19: Evidence from Summer 2022}, year = {2023}, abstract = {To make up for pandemic-related learning losses, many U.S. public school districts have increased enrollment in their summer school programs. We assess summer school as a strategy for COVID-19 learning recovery by tracking the academic progress of students who attended summer school in 2022 across eight districts serving 400,000 students. Based on students{\textquoteright} spring to fall progress, we find a positive impact for summer school on math test achievement (0.03 standard deviation, SD), but not on reading tests. These effects are predominantly driven by students in upper elementary grades. To put the results into perspective, if we assume that these districts have losses similar to those present at the end of the 2022{\textendash}23 school year (i.e., approximately -0.2 SD), we estimate summer programming closed approximately 2\% to 3\% of the districts{\textquoteright} total learning losses in math, but none in reading.}, author = {Ian Callen and Maria V. Carbonari and Michael DeArmond and Daniel Dewey and Elise Dizon-Ross and Dan Goldhaber and Jazmin Isaacs and Thomas J. Kane and Megan Kuhfield and Anna McDonald and Andrew McEachin and Emily Morton and Atsuko Muroga and Douglas O. Staiger} } @report {1526711, title = {School District and Community Factors Associated With Learning Loss During the COVID-19 Pandemic}, year = {2023}, abstract = {

We analyze data from approximately 7,800 school districts to describe variation in pandemic-related learning losses among communities and student subgroups. We attempt to understand mechanisms that led to learning losses, as well as explore how historical data from those districts can inform our expectations for how quickly districts will rebound from such losses. We show that learning losses during the pandemic were large and highly variable among communities. Similar to previous research, we find that losses were larger in lower-income and minority districts and in districts which remained remote or hybrid for longer periods during the 2020-21 school year. Among districts, the math learning loss per week of remote/hybrid instruction was larger in high-minority and high-poverty districts. Within districts, however, White students and non-economically disadvantaged students lost about the same amount of ground as Black, Hispanic and economically disadvantaged students. This suggests that the mechanisms driving losses operated at the district or community level, rather than household level. Several community-level characteristics were related to learning losses: broadband access, disruptions to social and economic activity, and trust in government institutions. However, no individual predictor provided strong explanatory power. Relative to historical years, losses during the pandemic were substantial, and an exploratory analysis of historical shocks to achievement suggests that the effects of the pandemic are likely to persist without continued concerted investments in student learning.

}, author = {Erin Fahle and Thomas J. Kane and Tyler Patterson and Sean Reardon and Douglas Staiger and Elizabeth A. Stuart} } @report {1517926, title = { The Challenge of Execution: District Leader Insights on Evaluating Academic Programs, Products, and Services}, year = {2023}, abstract = {

During the summer and fall of 2022, researchers at the Center for Education Policy Research (CEPR) at Harvard University conducted a series of interviews to explore district leaders{\textquoteright} willingness to participate in evaluation efforts for academic products and services. There were three primary research objectives:

  1. To determine what information district decision-makers need, want, and would find helpful in the identification and adoption of interventions and learning technology products and to better understand when in the procurement and decision-making process this information could be most useful.

  2. To determine what barriers exist for schools and districts to pilot and test interventions before rolling them out system-wide, and which evaluation designs districts would be most willing to implement.

  3. To understand what model of engagement is most desirable for districts to participate in rigorous evaluation work, including barriers to participation.

This study engaged 16 district leaders across the country in qualitative interviews. District leaders participating in this analysis represented 13 unique states with regional representation in the South (5), West (4), Southwest (4), Midwest (2), and New England (1). In total, leaders from participating districts serve 1.2 million students with individual district sizes ranging from about 25,000 to 200,000 students. All the leaders interviewed served a critical role in their district{\textquoteright}s decision-making processes for the procurement of academic products and services. Most participating leaders were either part of their district{\textquoteright}s leadership cabinet, or directly reported to a cabinet member. These interviews probed how district leaders currently incorporate evidence of effectiveness into their purchasing of academic products and services (i.e., academic interventions and core curricular materials), what evidence they wished they had, how willing they would be to have their district generate its own evidence through program evaluations, and what structures could be most helpful to support their decision-making moving forward.

Following the interviews, the research team also analyzed the studies that vendors provided to school districts during the procurement process. The purpose of this analysis was to better understand how vendors report on the effectiveness of their product, and to what degree these reports align with rigorous research methodologies.

Below, we summarize our findings and their implications for developing a network of districts focused on using high-quality evidence to select academic products and services. In Appendix A, we attach a brief outline of what such a network might look like and a timeline for its development.

}, author = {Jon Fullerton and Brittany Miller} } @report {1526716, title = {What Do Changes in State Test Scores Imply for Later Life Outcomes?}, year = {2022}, abstract = {In the three decades before the pandemic, mean achievement of U.S. 8th graders in math rose by more than half a standard deviation on the National Assessment of Educational Progress (NAEP). Between 2019 and 2022, U.S. students had forfeited 40 percent of that rise. To anticipate the consequences of the recent decline, we investigate the past relationship between NAEP scores and students{\textquoteright} later life outcomes by year and state of birth. We find that a standard deviation improvement in a birth cohort{\textquoteright}s 8th grade math achievement was associated with an 8 percent rise in income, as well as improved educational attainment and declines in teen motherhood, incarceration and arrest rates. If allowed to become permanent, our findings imply that the recent losses would represent a 1.6 percent decline in present value of lifetime earnings for the average K-12 student (or $19,400), totaling $900 billion for the 48 million students enrolled in public schools during the 2020-21 school year.}, url = {https://cepr.harvard.edu/sites/hwpi.harvard.edu/files/cepr/files/long_term_outcomes_11.18.pdf?m=1668789278}, author = {Thomas J. Kane and Elena Doty and Tyler Patterson and Douglas O. Staiger} } @report {1524471, title = {The Challenges of Implementing Academic COVID Recovery Interventions: Evidence From the Road to Recovery Project}, year = {2022}, abstract = {In this paper we examine academic recovery in 12 mid- to large-sized school districts across 10 states during the 2021{\textendash}22 school year. Our findings highlight the challenges that recovery efforts faced during the 2021{\textendash}22 school year. Although, on average, math and reading test score gains during the school year reached the pace of pre-pandemic school years, they were not accelerated beyond that pace. This is not surprising given that we found that districts struggled to implement recovery programs at the scale they had planned. In the districts where we had detailed data on student participation in academic interventions, we found that recovery efforts often fell short of original expectations for program scale, intensity of treatment, and impact. Interviews with a subsample of district leaders revealed several implementation challenges, including difficulty engaging targeted students consistently across schools, issues with staffing and limitations to staff capacity, challenges with scheduling, and limited engagement of parents as partners in recovery initiatives. Our findings on the pace and trajectory of recovery and the challenges of implementing recovery initiatives raise important questions about the scale of district recovery efforts.}, url = {https://cepr.harvard.edu/sites/hwpi.harvard.edu/files/cepr/files/the_challenges_of_implementing_academic_covid_recovery.pdf?m=1677190353}, author = {Maria V. Carbonari and Miles Davison and Michael DeArmond and Daniel Dewey and Elise Dizon-Ross and Dan Goldhaber and Ayesha K. Hashim and Thomas J. Kane and Andrew McEachin and Emily Morton and Tyler Patterson and Douglas O. Staiger} } @report {1524466, title = {The Consequences of Remote and Hybrid Instruction During the Pandemic}, year = {2022}, abstract = {Using testing data from 2.1 million students in 10,000 schools in 49 states (plus D.C.), we investigate the role of remote and hybrid instruction in widening gaps in achievement by race and school poverty. We find that remote instruction was a primary driver of widening achievement gaps. Math gaps did not widen in areas that remained in-person (although there was some widening in reading gaps in those areas). We estimate that high-poverty districts that went remote in 2020-21 will need to spend nearly all of their federal aid on academic recovery to help students recover from pandemic-related achievement losses.}, author = {Dan Goldhaber and Thomas J. Kane and Andrew McEachin and Emily Morton and Tyler Patterson and Douglas O. Staiger} } @article {1421632, title = {Bridging the Gaps in Education Data}, journal = {American Enterprise Institute}, year = {2021}, abstract = {

Key Points

}, author = {Jon Fullerton} } @report {1333928, title = {The Response of Rural Districts to the COVID-19 Pandemic}, year = {2020}, abstract = {

Given the major disruptions to students{\textquoteright} daily lives as well as the education field more generally caused by the COVID-19 pandemic, NCRERN was interested in learning how its partner districts navigated mandatory school closures and the shift to online learning, as well as identifying ways that NCRERN could support the short- and long-term needs of rural educators. Throughout April 2020, NCRERN staff conducted semistructured phone interviews with district officials and other leaders from 40 out of its 49 partner rural districts in Ohio and New York. The majority of interviews took place when schools were 3{\textendash}5 weeks into shutdown. Notes from each interview were coded by two graduate research assistants to identify major themes that emerged from the conversations. Because interviews were semistructured, not all districts answered each question; as a result, counts should be interpreted with caution.

Click to read the full text.

}, author = {Tara Nicola and Alexis Gable and Jennifer Ash} } @article {1325307, title = {Strategic Data Use in Higher Education: Using Data to Improve Postsecondary Success}, year = {2020}, abstract = {

Like many other elements of the American economy, higher education is working to realize the potential of sophisticated data analytics to inform and transform how it operates. In August 2019, the Association for Institutional Research (AIR), EDUCAUSE (the association of campus information technology professionals), and the National Association of College and University Business Officers (NACUBO) released a joint statement with the provocative title {\textquotedblleft}Analytics can save higher education. Really.{\textquotedblright} Its purpose was to inspire a sense of urgency and provide direction for higher education leaders to harness data as a strategic organizational asset. The statement features the following rationale for investment in data analytics:

{\textquotedblleft}We strongly believe that using data to better understand our students and our own operations paves the way to developing new, innovative approaches for improved student recruiting, better student outcomes, greater institutional efficiency and cost-containment, and much more.{\textquotedblright}

However, progress has been uneven, with some state higher education agencies, university and college systems, and individual institutions leading the way while many others struggle to adapt. Why?

The Strategic Data Project (SDP) at the Center for Education Policy Research at Harvard University has a ten-year track record of developing data capacity in state and local PK-12 agencies and organizations and interviewed 40 leaders and analysts at 29 institutions of higher education and postsecondary organizations to explore their data needs to understand why some colleges and university systems are excelling in using data and others have yet to fully realize the potential of their data to inform strategic decisions that transform student success in school and the workforce.

Our key finding is that the missing link is not in the technical infrastructure but in human capacity. If higher education is to take advantage of data analytics to improve student outcomes and increase organizational effectiveness, it will have to find better ways to attract, train, and retain strategic data professionals who can inform policy and practice.

Download the white paper.

}, author = {Strategic Data Project} } @article {1324966, title = {Can Video Technology Improve Teacher Evaluations? An Experimental Study}, journal = {The MIT Press Journals}, volume = {15}, year = {2020}, pages = {397-427}, abstract = {

Teacher evaluation reform has been among the most controversial education reforms in recent years. It also is one of the costliest in terms of the time teachers and principals must spend on classroom observations. We conducted a randomized field trial at four sites to evaluate whether substituting teacher-collected videos for in-person observations could improve the value of teacher observations for teachers, administrators, or students. Relative to teachers in the control group who participated in standard in-person observations, teachers in the video-based treatment group reported that post-observation meetings were more {\textquotedblleft}supportive{\textquotedblright} and they were more able to identify a specific practice they changed afterward. Treatment principals were able to shift their observation work to noninstructional times. The program also substantially increased teacher retention. Nevertheless, the intervention did not improve students{\textquoteright} academic achievement or self-reported classroom experiences, either in the year of the intervention or for the next cohort of students. Following from the literature on observation and feedback cycles in low-stakes settings, we hypothesize that to improve student outcomes schools may need to pair video feedback with more specific supports for desired changes in practice.

Click to read full text on MIT Press Journals

}, author = {Thomas J. Kane and David Blazar and Hunter Gehlbach and Miriam Greenberg} } @report {1144329, title = {Remedial Math Goes to High School: An Evaluation of the Tennessee SAILS Program}, year = {2018}, author = {Thomas J. Kane and Angela Boatman and Whitney Kozakowski, and Chris Bennett and Rachel Hitch and Dana Weisenfeld} } @report {1144330, title = {Remedial Math Goes to High School: The Impact of the Tennessee SAILS Program}, year = {2018}, author = {Thomas J. Kane and Angela Boatman and Whitney Kozakowski, and Chris Bennett and Rachel Hitch and Dana Weisenfeld} } @report {1070931, title = {Assessing the Impact of the Newark Education Reforms}, year = {2017}, institution = {Center for Education Policy Research at Harvard University}, abstract = {Aided by $200 million in private philanthropy, city and state leaders launched a major school reform effort in Newark, New Jersey, starting in the 2011{\textendash}2012 school year. In a coinciding National Bureau of Economic Research (NBER) working paper, we assessed the impact of those reforms on student achievement growth, comparing students in Newark Public Schools (NPS) district and charter schools to students with similar prior achievement, similar demographics, and similar peers elsewhere in New Jersey. This report includes key findings.}, author = {Mark Chin and Thomas Kane and W. Kozakowski and Beth Schueler and Doug Staiger} } @report {918006, title = {Developing Common Core Classrooms Through Rubric-Based Coaching}, year = {2016}, month = {10/21}, institution = {Center for Education Policy Research at Harvard University}, abstract = {

The project team is\ still awaiting student test data to complete the\ evaluation, but this brief provides a short update on\ survey results. Students of MQI-coached teachers report that their teachers ask more substantive questions, and require more use of mathematical vocabulary as compared to students of control teachers.\ Students in MQI-coached classrooms also reported more student talk in class. Teachers who received MQI Coaching tended to find their professional development significantly more useful than control teachers,\ and were also more likely to report that their mathematics instruction improved over the course of the year.

}, author = {Heather C. Hill and Matthew A. Kraft and Corinne Herlihy} } @presentation {914001, title = {The Track Record of Charter Schools in Massachusetts}, year = {2016}, abstract = {

Against the backdrop of a contentious ballot question, charter schools in Massachusetts have faced scrutiny across multiple dimensions. This event brings together several of the preeminent researchers on the topic to share their findings, followed by a period of directed questions, and audience Q\&A.

[[{"fid":"730011","view_mode":"default","type":"media","attributes":{"height":"360","width":"640","class":"wysiwyg-placeholder media-element file-default"}}]]

} } @report {908611, title = {Let the Numbers Have Their Say: Evidence on Massachusetts{\textquoteright} Charter Schools}, year = {2016}, month = {09/27}, institution = {Center for Education Policy Research at Harvard University}, abstract = {

In Massachusetts, the charter school debate has centered\ on four concerns:

This report summarizes the evidence pertaining to these four concerns.

}, author = {Thomas J. Kane} } @report {841431, title = {DreamBox Learning Achievement Growth in the Howard County Public School System and Rocketship Education}, year = {2016}, month = {05/31}, institution = {Center for Education Policy at Harvard University} } @report {841701, title = {Findings from a National Study on Research Use Among School and District Leaders (Technical Report No. 1)}, year = {2016}, month = {05/13}, institution = {National Center for Research in Policy and Practice}, url = {http://ncrpp.org/assets/documents/NCRPP_Technical-Report-1.pdf} } @report {787641, title = {Achievement Network{\textquoteright}s Investing in Innovation Expansion: Impacts on Educator Practice and Student Achievement}, year = {2016}, month = {03/07}, abstract = {

Achievement Network (ANet) was founded in 2005 as a school-level intervention to support the use of academic content standards and assessments to improve teaching and learning. Initially developed within the Boston charter school sector, it has expanded to serve over 500 schools in nine geographic networks across the United States. The program is based on the belief that if teachers are provided with timely data on student performance from interim assessments tied to state standards, if school leaders provide support and create structures that help them use that data to identify student weaknesses, and if teachers have knowledge of how to improve the performance of students who are falling behind, then they will become more effective at identifying and addressing gaps in student learning. This will, in turn, improve student performance, particularly for high-need students.

In 2010, ANet received a development grant from the U.S. Department of Education{\textquoteright}s Investing in Innovation (i3) Program. The grant funded both the expansion of the program to serve up to 60 additional schools in five school districts, as well as an external evaluation of the expansion. The Center for Education Policy Research (CEPR) at Harvard University partnered with ANet to design a matched-pair, school-randomized evaluation of their program{\textquoteright}s impact on educator practice and student achievement in schools participating in its i3-funded expansion.

}, author = {Martin R. West and Beth A. Morton and Corinne M. Herlihy} } @presentation {781616, title = {Askwith Forum {\textendash} Teaching Higher: Educators{\textquoteright} Perspectives on Common Core Implementation}, year = {2016}, abstract = {

With the debate over the federal role in education at rest with the passage of the Every Student Succeeds Act (ESSA), it is time to refocus attention on how to help the states move forward and succeed using the Common Core State Standards (CCSS). In this Askwith Forum, Professor Thomas Kane will share findings about CCSS implementation strategies from the Center for Education Policy Research at Harvard University. This will be followed by a panel of educators, who will share their experiences, pain points, and successes with the CCSS over this past year.

[[{"fid":592481,"view_mode":"default","type":"media","attributes":{"height":"315","width":"560","class":"wysiwyg-placeholder media-element file-default"}}]]

Learn more about Teaching Higher.

}, author = {Thomas J. Kane} } @report {771361, title = {Teaching Higher: Educators{\textquoteright} Perspectives on Common Core Implementation}, year = {2016}, month = {02/15}, author = {Thomas J. Kane and Antoniya M. Owens and William H. Marinell and Daniel R. C. Thal and Douglas O. Staiger} } @article {770256, title = {Surprising Ripple Effects: How Changing the SAT Score-Sending Policy for Low-Income Students Impacts College Access and Success}, journal = {Education Evaluation and Policy Analysis}, year = {2016}, month = {09/01}, abstract = {

Subtle policy adjustments can induce relatively large {\textquotedblleft}ripple effects.{\textquotedblright} We evaluate a College Board\ initiative that increased the number of free SAT score reports available to low-income students and\ changed the time horizon for using these score reports. Using a difference-in-differences analytic\ strategy, we estimate that targeted students were roughly 10 percentage points more likely to send\ eight or more reports. The policy improved on-time college attendance and 6-year bachelor{\textquoteright}s completion\ by about 2 percentage points. Impacts were realized primarily by students who were competitive\ candidates for 4-year college admission. The bachelor{\textquoteright}s completion impacts are larger than\ would be expected based on the number of students driven by the policy change to enroll in college\ and to shift into more selective colleges. The unexplained portion of the completion effects may result\ from improvements in nonacademic fit between students and the postsecondary institutions in which\ they enroll.

}, url = {http://epa.sagepub.com/content/early/2016/08/31/0162373716665198.full.pdf+html}, author = {Michael Hurwitz and Preeya P. Mbekeani and Margaret Nipson and Lindsay C. Page} } @report {721651, title = {SDP College-Going Diagnostic for Wake County Public School System}, year = {2015}, month = {10/16}, institution = {Strategic Data Project. }, abstract = {

In 2011, the Strategic Data Project (SDP) began a partnership with the Wake County Public School System (WCPSS). As part of this partnership, SDP collaborated with WCPSS to analyze patterns of high school students{\textquoteright} on-track status, graduation, college enrollment, and college persistence. This set of high-leverage, policy-relevant analyses constitutes the SDP College-Going Diagnostic.

} } @report {716546, title = {The Best Foot Forward Project: Substituting Teacher-Collected Video for In-Person Classroom Observations}, year = {2015}, month = {10/05}, author = {Thomas J. Kane and Hunter Gehlbach and Miriam Greenberg and David Quinn and Daniel Thal} } @report {716556, title = {Best Foot Forward: Video Observation Toolkit}, year = {2015}, month = {10/05}, abstract = {

This toolkit provides practical guidance for education practitioners on using video observations\ to help teachers accelerate their development.\ Inside you will find four sections to help you start video observations in your school community. Each section includes a\ discussion of important lessons from the Best Foot Forward project, a study of digital video\ in classroom observations, and adaptable tools for implementation.

}, url = {http://cepr.harvard.edu/video-observation-toolkit} } @report {716551, title = {Effects of a Video-Based Teacher Observation Program on the De-privatization of Instruction: Evidence from a Randomized Experiment}, year = {2015}, month = {10/05}, author = {David M. Quinn and Thomas J. Kane and Miriam Greenberg and Daniel Thal} } @report {679006, title = {SDP College-Going Diagnostic for Kentucky}, year = {2015}, month = {08/17}, institution = {Strategic Data Project, Center for Education Policy Research at Harvard University.}, abstract = {

The Strategic Data Project (SDP) and the Kentucky Department of Education (KDE) collaborated on the SDP College-Going Diagnostic{\textemdash}a set of policy-relevant analyses that track Kentucky public school students{\textquoteright} graduation from high school through enrollment and persistence in college. This interactive report highlights the key findings from this research collaboration and is designed to facilitate exploration across important student characteristics.

}, url = {http://sdp.cepr.harvard.edu/kentucky-college-going} } @article {639496, title = {Effective teaching in elementary mathematics: Identifying classroom practices that support student achievement}, journal = {Economics of Education Review}, volume = {48}, year = {2015}, pages = {16-29}, abstract = {

Recent investigations into the education production function have moved beyond traditional teacher inputs, such as education, certification, and salary, focusing instead on observational measures of teaching practice. However, challenges to identification mean that this work has yet to coalesce around specific instructional dimensions that increase student achievement. I build on this discussion by exploiting within-school, between-grade, and cross-cohort variation in scores from two observation instruments; further, I condition on a uniquely rich set of teacher characteristics, practices, and skills. Findings indicate that inquiry-oriented instruction positively predicts student achievement. Content errors and imprecisions are negatively related, though these estimates are sensitive to the set of covariates included in the model. Two other dimensions of instruction, classroom emotional support and classroom organization, are not related to this outcome. Findings can inform recruitment and development efforts aimed at improving the quality of the teacher workforce.\ 

}, url = {http://www.sciencedirect.com/science/article/pii/S0272775715000710}, author = {David Blazar} } @report {631486, title = {Research Project Overview: The Study of Pre-College Math Remediation Programs in Tennessee}, year = {2015}, month = {07/01}, institution = {Center for Education Policy Research at Harvard University. }, abstract = {

In an effort to promote college enrollment and degree completion, the state of Tennessee has invested a student-centric, technology-based blended learning model of high school mathematics instruction, The Seamless Alignment and Integrated Learning Support (SAILS).

The SAILS program provides high school seniors likely to require math remediation in college with coursework equivalent to college-level developmental education classes. Eligible students who complete the program are able to satisfy math require-ments for high school graduation and, upon postsecondary matriculation, to enroll directly in credit-bearing coursework toward a college degree.

Researchers at the Center for Education Policy Research at Harvard University and Vanderbilt Peabody College of Education are partnering with the SAILS Program and Tennessee state leadership to conduct an evaluation of SAILS. Using a range of quantitative and qualitative research methods, the study will examine the impact of participation in SAILS on students{\textquoteright} short- and long-term outcomes and investigate the mechanisms by which the program may promote students{\textquoteright} postsecondary success.

} } @presentation {618981, title = {Best Foot Forward Project: Research Findings from Year 1}, year = {2015}, month = {24 June}, author = {Miriam Greenberg} } @report {592576, title = {Changing the Culture of Data Use in Delaware:How State Leaders Used Analytics to Create Education Policies That Matter}, year = {2015}, month = {04/19}, institution = {Strategic Data Project}, abstract = {

This case illustrates how the work of leaders and analysts in the Delaware Department of Education (DDOE) and the agency{\textquoteright}s partnership with the Strategic Data Project (SDP), a program of the Center for Education Policy Research at Harvard University, created momentum for statewide policy change. \ By exploring Delaware leaders{\textquoteright} use of data and analytics to challenge assumptions and inform the development of better policies and practices, the case illustrates the importance of leadership, analytic and technical competency, and strategic partnerships when leading education reform. \ The case specifically highlights the power of human capital analytics to diagnose the current status of Delaware{\textquoteright}s educator pipeline, from preparation through development and retention, and how effectively communicating with these analyses built coalitions of support and drove a culture of data use at both the state and district level.
Download the case study [SDP website]

}, url = {https://sdp.cepr.harvard.edu/files/sdp/files/sdp-diagnostic-educator-delaware_01.pdf} } @report {587961, title = {Teacher Characteristics and Student Learning: Toward a More Comprehensive Examination of the Association}, year = {2015}, note = {

Presented at the 2015 American Education Research Association (AERA) Annual Meeting, Chicago, IL.

}, author = {Heather C. Hill and Charalambos Y. Charalambous and Mark Chin} } @report {587966, title = {Teachers{\textquoteright} Knowledge of Students: Defining a Domain}, year = {2015}, note = {

Presented at the 2015 American Educational Research Association annual meeting.\ 

}, author = {Heather C. Hill and Mark Chin and David Blazar} } @report {587726, title = {Attending to General and Content-Specific Dimensions of Teaching: Exploring Factors Across Two Observation Instruments}, year = {2015}, abstract = {

New observation instruments used in research and evaluation settings assess teachers along multiple domains of teaching practice, both general and content-specific. However, this work infrequently explores the relationship between these domains. In this study, we use exploratory and confirmatory factor analyses of two observation instruments - the Classroom Assessment Scoring System (CLASS) and the Mathematical Quality of Instruction (MQI) - to explore the extent to which we might integrate both general and content-specific view of teaching. Importantly, bi-factor analyses that account for instrument-specific variation enable more robust conclusions than in existing literature. Findings indicate that there is some overlap between instruments, but that the best factor structures include both general and content-specific practices. This suggests new approaches to measuring mathematics instruction for the purposes of evaluation and professional development.\ 

}, author = {David Blazar and David Braslow and Charalambos Charalambous and Heather C. Hill} } @report {587336, title = {SDP College-Going Diagnostic for the New York State Education Department}, year = {2015}, month = {02/02}, institution = {Strategic Data Project}, abstract = {

The Strategic Data Project (SDP) designed the College-Going Diagnostic to inform state and district leaders about high school graduation, college enrollment, and persistence rates; and to identify potential areas for action to increase student achievement in high school, preparedness for college, and postsecondary attainment. In 2012, the New York State Education Department (NYSED) and SDP launched an initiative to assess student attainment in New York State public schools, and identified a set of questions to understand how first-time ninth-grade students in New York State public schools progress through high school and into college. The interactive graphics supporting the findings in this brief are designed to promote deeper engagement with the analysis through data exploration.
View the interactive report on the SDP website

} } @report {584331, title = {SDP College-Going Diagnostic for Cleveland Metropolitan School District}, year = {2015}, month = {04/09}, institution = {Strategic Data Project}, abstract = {

The Strategic Data Project (SDP) partnered with the Cleveland Metropolitan School District (CMSD) to expand the use of data to inform policy and management decisions within the district. As part of this partnership, SDP collaborated with CMSD to analyze the high school graduation and college-going outcomes of CMSD students.

} } @report {584341, title = {SDP College-Going Diagnostic for Tennessee}, year = {2015}, month = {02/12}, institution = {Strategic Data Project}, abstract = {

SDP launched a College-Going Diagnostic research collaboration with the Tennessee Department of Education (TDOE) as part of a larger partnership between the two organizations. In defining the scope of work for this project, TDOE policymakers were particularly interested in investigating how students{\textquoteright} transition from high school to postsecondary education differed across schools, regions, and student subgroups. The SDP College-Going Diagnostic examines the extent to which Tennessee high school students faced specific barriers to postsecondary enrollment, such as inadequate academic preparation for college-level coursework and limited college access.

} } @report {584326, title = {SDP Educator Diagnostic for Delaware Department of Education}, year = {2015}, month = {04/10}, institution = {Strategic Data Project}, abstract = {

The Strategic Data Project (SDP) collaborated with the state of Delaware to illuminate patterns related to three critical areas of policy focus for the state: the recruitment, placement, and success of new and early career teachers; teacher impact on student learning; and teacher retention and the stability of the state{\textquoteright}s teacher workforce.

} } @report {584406, title = {SDP Human Capital Diagnostic for Colorado}, year = {2015}, month = {01/30}, institution = {Strategic Data Project}, abstract = {

The Strategic Data Project (SDP) collaborated with the Colorado Department of Education (CDE) and the Colorado Education Initiative (CEI) to conduct SDP{\textquoteright}s Human Capital Diagnostic{\textemdash}a series of high leverage, policy-relevant analyses related to the state{\textquoteright}s educator workforce. SDP{\textquoteright}s Human Capital Diagnostic investigates questions on five critical topics related to teachers and teacher effectiveness: recruitment, placement, development, evaluation, and retention.

} } @report {584366, title = {SDP Key Findings Report for Colorado: Mathematics Teacher Placement Patterns}, year = {2015}, month = {01/30}, institution = {Strategic Data Project}, abstract = {

The Strategic Data Project (SDP) partnered with the Colorado Department of Education (CDE) and the Colorado Education Initiative (CEI) to investigate whether Colorado public school students who are academically behind their peers are disproportionately placed with novice teachers.

} } @report {584346, title = {SDP Key Findings Report for Colorado: Students{\textquoteright} Progression from Ninth Grade to College, by Prior Academic Achievement}, year = {2015}, month = {01/30}, institution = {Strategic Data Project}, abstract = {

The Strategic Data Project (SDP) partnered with the Colorado Department of Education(CDE) and the Colorado Education Initiative (CEI) to investigate statewide trends in students{\textquoteright} high school graduation and their enrollment and persistence in college. This brief summarizes several of the key findings from this research collaboration.

} } @report {592581, title = {SDP Strategic Use of Data Rubric}, year = {2014}, institution = {Strategic Data Project}, address = {Cambridge, MA}, abstract = {

The SDP Strategic Use of Data Rubric is a resource to provide direction and support to education organizations in their efforts to transform their use of data. The rubric establishes a common language and framework to more clearly illustrate what effective data use at the system level can look like.
Learn more and download the rubric [SDP website]

} } @article {587981, title = {Approximate measurement invariance in cross-classified rater-mediated assessments}, journal = {Frontiers in Psychology}, volume = {5}, year = {2014}, abstract = {

An important assumption underlying meaningful comparisons of scores in rater-mediated assessments is that measurement is commensurate across raters. When raters differentially apply the standards established by an instrument, scores from different raters are on fundamentally different scales and no longer preserve a common meaning and basis for comparison. In this study, we developed a method to accommodate measurement noninvariance across raters when measurements are cross-classified within two distinct hierarchical units. We conceptualized random item effects cross-classified graded response models and used random discrimination and threshold effects to test, calibrate, and account for measurement noninvariance among raters. By leveraging empirical estimates of rater-specific deviations in the discrimination and threshold parameters, the proposed method allows us to identify noninvariant items and empirically estimate and directly adjust for this noninvariance within a cross-classified framework. Within the context of teaching evaluations, the results of a case study suggested substantial noninvariance across raters and that establishing an approximately invariant scale through random item effects improves model fit and predictive validity.

}, url = {http://www.ncbi.nlm.nih.gov/pmc/articles/PMC4274900/}, author = {Benjamin Kelcey and Heather C. Hill and Dan McGinn} } @report {587971, title = {Year-to-Year Stability in Measures of Teachers and Teaching}, year = {2014}, note = {

Presented at the 2014 American Education Research Association (AERA) Annual Meeting

}, author = {Heather C. Hill and Mark Chin} } @report {586846, title = {NCTE Newsletter - Fall 2014 [AUDIO]}, year = {2014}, month = {09/01}, institution = {National Center for Teacher Effectiveness}, abstract = {

In this newsletter we highlight a new NCTE study,\ Developing Common Core Classrooms Through Rubric-Based Coaching,\ and interview one of the Co-Principal Investigators, Dr. Matthew Kraft.\ 

This newsletter is part of a series originally intended for NCTE study participants.\ 

Listen to the full interview here:

[[{"fid":"401151","view_mode":"default","type":"media","attributes":{"width":"166","class":"wysiwyg-placeholder media-element file-default"}}]]

} } @report {585811, title = {SDP College-Going Diagnostic for Albuquerque Public Schools}, year = {2014}, month = {05/05}, institution = {Strategic Data Project}, abstract = {

This report examines Albuquerque Public Schools (APS) students{\textquoteright} high school performance, college enrollment, and college persistence patterns, and compares these patterns across a variety of student characteristics and academic experiences. To conduct the analyses, researchers connected APS administrative student data (including demographics and test scores) to college enrollment records and to student surveys conducted by SDP Data Fellows at several APS high schools. These data sources allowed the diagnostic to track students{\textquoteright} progress through high school to graduation, and to examine their college-going aspirations and actual college outcomes.

} } @report {585906, title = {SDP Course-Grades Diagnostic for Denver Public Schools}, year = {2014}, month = {04/30}, institution = {Strategic Data Project}, abstract = {

In the fall of the 2012{\textendash}13 school year, the Denver Public Schools and SDP engaged in a research collaboration aimed at providing the district with preliminary evidence that could inform some of these questions, potentially help refine existing reform efforts, and motivate support for future initiatives, such as establishing consistent grading policies rooted in new Common Core standards. This report presents findings from our research diagnostic, illuminating the extent to which average GPAs, standardized assessment scores, and college remediation rates vary across the district and within DPS high schools; how GPAs differ for students from different racial/ethnic backgrounds; and whether knowing a student{\textquoteright}s middle school helps predicts her subsequent success.

} } @report {585916, title = {SDP Human Capital Diagnostic for Albuquerque Public Schools}, year = {2014}, month = {05/05}, institution = {Strategic Data Project}, abstract = {

The Human Capital Diagnostic is the result of a partnership between SDP and APS designed to bring data to bear on policy and management decisions to improve student outcomes. Researchers connected student demographics and test scores to teacher human resource data and calculated objective measures of teacher effectiveness that are linked to teacher characteristics. The diagnostic analyses leverage these measures of effectiveness to explore their relationship with characteristics of teachers, schools, and students. They are not intended to draw conclusions about the contribution made by any individual teacher but rather to understand overall district trends.

} } @report {584416, title = {SDP College-Going Diagnostic for Colorado}, year = {2014}, month = {10/21}, institution = {Strategic Data Project}, abstract = {

Initiative (CEI)\ to produce the SDP College-Going Diagnostic. The diagnostic analyses focus on describing college enrollment and persistence rates of high school graduates across Colorado; describing patterns in students{\textquoteright} participation in college-level coursework (in particular, AP classes and concurrent enrollment participation) in high school; and investigating the extent to which students{\textquoteright} participation in AP classes and concurrent enrollment programs is associated with persistence at both two-year and four-year colleges.

} } @report {584426, title = {SDP College-Going Diagnostic for Delaware}, year = {2014}, month = {08/13}, institution = {Strategic Data Project}, abstract = {

The Delaware Department of Education collaborated SDP to produce the SDP College-Going Diagnostic. The diagnostic analyses examine students{\textquoteright} high school performance, college enrollment, and college persistence patterns, and compares these patterns across a variety of student characteristics and academic experiences.

} } @report {584431, title = {SDP College-Going Diagnostic for Massachusetts}, year = {2014}, month = {07/16}, institution = {Strategic Data Project}, abstract = {

The Massachusetts Department of Elementary and Secondary Education collaborated with the Strategic Data Project (SDP) to conduct the SDP College-Going Diagnostic.\ This report examines key findings related to students{\textquoteright} college enrollment and college persistence patterns, and compares these patterns across a variety of student characteristics and academic experiences. The report highlights results primarily at the state level and, for illustrative purposes, for a handful of school districts and high schools.

} } @report {584411, title = {SDP Human Capital Diagnostic for Denver Public Schools}, year = {2014}, month = {11/06}, institution = {Strategic Data Project}, abstract = {

Denver Public Schools (DPS) collaborated with the Strategic Data Project to conduct the SDP Human Capital Diagnostic. The analysis focused on teacher recruitment, placement, development, evaluation/compensation, and retention/turnover. Because of the uniqueness of the data from DPS{\textquoteright} ProComp teacher compensation system and its relevance to current policy discussions, we describe the evaluation/compensation analyses in depth and summarize the key findings from the other components of the diagnostic. These analyses have the potential to inform important education policy both in Denver and across the nation as education agencies consider revising traditional {\textquotedblleft}lockstep{\textquotedblright} pay systems.

} } @report {584421, title = {SDP Human Capital Diagnostic for the New York State Education Department}, year = {2014}, month = {10/20}, institution = {Strategic Data Project}, abstract = {

The New York State Education Department collaborated with SDP to produce the SDP Human Capital Diagnostic. The diagnostic is designed to identify patterns of teacher effectiveness and areas for policy change that could leverage teacher effectiveness to improve student achievement. It is also intended to demonstrate how education agencies can capitalize on existing data to understand its current performance, set future goals, and strategically plan responses.

} } @article {576721, title = {Can Professional Environments in Schools Promote Teacher Development? Explaining Heterogeneity in Returns to Teaching Experience}, journal = {Educational Evaluation and Policy Analysis}, volume = {36}, year = {2014}, pages = {476-500}, abstract = {

Although wide variation in teacher effectiveness is well established, much less is known about differences in teacher improvement over time. We document that average returns to teaching experience mask large variation across individual teachers, and across groups of teachers working in different schools. We examine the role of school context in explaining these differences using a measure of the professional environment constructed from teachers{\textquoteright} responses to state-wide surveys. Our analyses show that teachers working in more supportive professional environments improve their effectiveness more over time than teachers working in less supportive contexts. On average, teachers working in schools at the 75th percentile of professional environment ratings improved 38\% more than teachers in schools at the 25th percentile after ten years.

}, url = {http://epa.sagepub.com/content/36/4/476.abstract}, author = {Matthew A. Kraft and John P. Papay} } @report {576786, title = {Predictors of Teachers{\textquoteright} Instructional Practices}, year = {2014}, note = {

Manuscript in preparation for the American Educational Research Association annual meeting

}, abstract = {

We extend this line of research by investigating teacher career and background characteristics, personal resources, and school and district resources that predict an array of instructional practices identified on a mathematics-specific observational instrument, MQI, and a general instrument, CLASS. To understand these relationships, we use correlation and regression analyses. For a subset of teachers for whom we have data from multiple school years, we exploit within-teacher, cross-year variation to examine the relationship between class composition and instructional quality that is not confounded with the sorting of "better" students to "better" teachers. We conclude that multiple teacher- and school-level characteristics--rather than a single factor--are related to teachers{\textquoteright} classroom practices.

}, author = {David Blazar and Claire Gogolen and Heather C. Hill and Andrea Humez and Kathleen Lynch} } @report {576831, title = {Promise and Paradox: Measuring Students{\textquoteright} Non-cognitive Skills and the Impact of Schooling}, year = {2014}, month = {03/01}, abstract = {

The authors used self-report surveys to gather information on a broad set of non-cognitive skills from 1,368 eighth-grade students attending Boston Public Schools and linked this information to administrative data on their demographics and test scores. At the student level, scales measuring conscientiousness, self-control, grit, and growth mindset are positively correlated with attendance, behavior, and test-score gains between fourth- and eighth-grade. Conscientiousness, self-control, and grit are unrelated to test-score gains at the school level, however, and students attending over-subscribed charter schools with higher average test-score gains score lower on these scales than do students attending district schools. Exploiting charter school admissions lotteries, the authors replicate previous findings indicating positive impacts of charter school attendance on math achievement, but find negative impacts on these non-cognitive skills. The authors provide suggestive evidence that these paradoxical results are driven by reference bias, or the tendency for survey responses to be influenced by social context. The results therefore highlight the importance of improved measurement of non-cognitive skills in order to capitalize on their promise as a tool to inform education practice and policy.

}, author = {Martin R. West and Matthew A. Kraft and Amy S. Finn and Angela L. Duckworth and Christopher F.O. Gabrieli and John D. E. Gabrieli} } @report {576946, title = {Using Surveys as Proxies for Observations in Measuring Mathematics Instruction}, year = {2014}, note = {

Manuscript in preparation, National Council on Measurement in Education Annual Meeting

}, abstract = {

Using data from elementary mathematics teachers, we examine the correspondence between self-reports and observational measures of two instructional dimensions--reform-orientation and classroom climate--and the relative ability of these measures to predict teachers{\textquoteright} contributions to student learning.

}, author = {David Braslow and Andrea Humez} } @proceedings {574296, title = {Lessons Learned from Instruction | Results from a Study of Upper-Elementary Mathematics Classrooms}, year = {2014}, abstract = {

While research has generated substantial information regarding the characteristics of effective mathematics teachers and classrooms, scholars have rarely tested multiple aspects of teachers or teaching within a single study. Without testing multiple variables simultaneously, it is difficult to identify specific aspects of mathematics teachers and teaching that may be particularly impactful on student learning, and to understand the degree to which these characteristics are related to one another. This plenary draws on data from a three-year study measuring multiple components of teacher and teaching quality to investigate these issues.

}, author = {Heather Hill} } @report {592586, title = {SDP Toolkit for Effective Data Use}, year = {2013}, month = {10/09}, institution = {Strategic Data Project}, abstract = {

The SDP Toolkit for Effective Data Use is a resource guide for education agency analysts who collect and analyze data on student achievement. Completing the toolkit produces a set of basic, yet essential, human capital and college-going analyses that every education agency should have as a foundation to inform strategic management and policy decisions.

}, url = {http://sdp.cepr.harvard.edu/toolkit-effective-data-use} } @report {590551, title = {SDP Summer Melt Handbook: A Guide to Investigating and Responding to Summer Melt}, year = {2013}, month = {07/16}, institution = {Strategic Data Project}, abstract = {

The SDP Summer Melt Handbook is a resource for education leaders interested in examining whether summer melt is occurring in their agency. The handbook not only serves to diagnose the phenomenon, but also helps leaders understand what they can do to address it.
Learn more about the Summer Melt Handbook on the SDP Website

}, author = {Benjamin L. Castleman and Lindsay C. Page and Ashley L. Snowdon} } @report {585826, title = {SDP College-Going Diagnostic for Boston Public Schools}, year = {2013}, month = {09/16}, institution = {Strategic Data Project}, abstract = {

Boston Public Schools collaborated with SDP to produce the SDP College-Going Diagnostic for its district. The diagnostic is designed to identify potential areas for action to increase students{\textquoteright} levels of academic achievement, preparedness for college, and postsecondary attainment. It is also intended to demonstrate how districts can capitalize on existing data to understand its current performance, set future goals, and strategically plan responses.

} } @report {585861, title = {SDP College-Going Diagnostic for The School District of Philadelphia}, year = {2013}, month = {10/11}, institution = {Strategic Data Project}, abstract = {

The School District of Philadelphia partnered with SDP to produce the SDP College-Going Diagnostic. The diagnostic analyses summarized in this report focus on 1) student performance in the district during high school and into college, 2) critical junctures along the way that affect student success, and 3) student characteristics and other factors that are most strongly related to college enrollment and persistence.

} } @report {585901, title = {SDP College-Readiness Diagnostic for Los Angeles Unified School District}, year = {2013}, month = {04/08}, institution = {Strategic Data Project}, abstract = {

Los Angeles Unified School District (LAUSD) partnered with SDP to produce the SDP College-Readiness Diagnostic for its district. The diagnostic analyses focus on 1) how students across the district progress toward high school graduation, 2) whether and how students who fall off track for graduation recover and go on to graduate, and 3) the progress of students toward the completion of A-G requirements.

} } @report {585936, title = {SDP Human Capital Diagnostic for Boston Public Schools}, year = {2013}, month = {08/30}, institution = {Strategic Data Project}, abstract = {

Boston Public Schools collaborated with SDP to produce the SDP Human Capital Diagnostic for its district. The diagnostic is designed to identify patterns of teacher effectiveness and areas for policy change that could leverage teacher effectiveness to improve student achievement. It is also intended to demonstrate how districts can capitalize on existing data to understand its current performance, set future goals, and strategically plan responses.

} } @conference {576861, title = {Examining High and Low Value-Added Mathematics: Can Expert Observers Tell the Difference?}, booktitle = {Association for Public Policy Analysis \& Management Fall Research Conference}, year = {2013}, month = {November 7, 2013}, address = {Washington, DC}, abstract = {

In this study, we use value-added scores and video data in order to mount an exploratory study of high- and low-VAM teachers{\textquoteright} instruction. Specifically, we seek to answer two research questions: First, can expert observers of mathematics instruction distinguish between high- and low-VAM teachers solely by observing their instruction? Second, what instructional practices, if any, consistently characterize high but not low-VAM teacher classrooms? To answer these questions, we use data generated by 250 fourth- and fifth-grade math teachers and their students in four large public school districts.Preliminary analyses indicate that a teacher{\textquoteright}s value-added rank was often not obvious to this team of expert observers.

}, author = {Heather C. Hill and Claire Gogolen and Erica Litke and Andrea Humez and David Blazar and Douglas Corey and Johanna Barmore and Mark Chin and Mary Beisiegel and Sara Roesler and Lucas Salzman and David Braslow and Samantha Rabinowicz} } @report {576781, title = {Graduate Exit Survey Toolkit}, year = {2013}, institution = {Strategic Data Project}, abstract = {

This toolkit provides useful resources for designing and rolling out a high school graduate exit survey, as well as effectively analyzing survey results in a school district. Anyone who is interested in implementing a high school exit survey, reworking a current exit survey, or effectively analyzing survey results in a school district can leverage this resource.

Learn more about the [Graduate Exit Survey].

} } @conference {576941, title = {How Well Do Teacher Observations Predict Value-Added? Exploring Variability Across Districts}, booktitle = {Association for Public Policy Analysis \& Management Fall Research Conference}, year = {2013}, month = {November 7, 2013}, address = {Washington, DC}, abstract = {

In this study we ask: Do observational instruments predict teachers{\textquoteright} value-added equally well across different state tests and district/state contexts? And, to what extent are differences in these correlations a function of the match between the observation instrument and tested content? We use data from the Gates Foundation-funded Measures of Effective Teaching (MET) Project(N=1,333) study of elementary and middle school teachers from six large public school districts,and from a smaller (N=250) study of fourth- and fifth-grade math teachers from four large public school districts. Early results indicate that estimates of the relationship between teachers{\textquoteright} value-added scores and their observed classroom instructional quality differ considerably by district.

}, author = {Kathleen Lynch and Mark Chin and David Blazar} } @article {576771, title = {Learning from Teacher Observations: Challenges and Opportunities Posed by New Teacher Evaluation Systems}, journal = {Harvard Educational Review}, year = {2013}, abstract = {

In this article, Heather Hill and Pam Grossman discuss the current focus on using teacher observation instruments as part of new teacher evaluation systems being considered and implemented by states and districts. They argue that if these teacher observation instruments are to achieve the goal of supporting teachers in improving instructional practice, they must be subject-specific, involve content experts in the process of observation, and provide information that is both accurate and useful for teachers. They discuss the instruments themselves, raters and system design, and timing of and feedback from the observations. They conclude by outlining the challenges that policy makers face in designing observation systems that will work to improve instructional practice at scale.

}, author = {Heather C. Hill and Pam Grossman} } @report {576896, title = {Using Validity Criteria to Enable Model Selection: An Exploratory Analysis}, year = {2013}, month = {11/07}, abstract = {

In this paper, the authors propose that an important determinant of value-added model choice should be alignment with alternative indicators of teacher and teaching quality. Such alignment makes sense from a theoretical perspective because better alignment is thought to indicate more valid systems. To provide initial evidence on this issue,\ they first calculated value-added scores for all fourth and fifth grade teachers within four districts, then extracted scores for 160 intensively studied teachers.Initial analyses using a subset of alternative indicators suggest that alignment between value-added scores and alternative indicators differ by model, though not significantly.

}, author = {Mark Chin and Heather Hill and Dan McGinn and Douglas Staiger and Katie Buckley} } @report {601516, title = {Technical Report: Creation and Dissemination of Upper-Elementary Mathematics Assessment Modules}, year = {2012}, abstract = {

This document provides information on the grades 4 and 5 mathematics test modules developed as a joint Harvard/ETS venture from 2009 to 2012. Hundreds of items that were initially developed were then culled by a series of reviews, revisions, pilot tests, cognitive interviews, and field tests.\ 

}, author = {Judy J. Hickman and Jianbin Fu and Heather C. Hill} } @report {585841, title = {SDP College-Going Diagnostic for Gwinnett County Public Schools}, year = {2012}, month = {08/09}, institution = {Strategic Data Project}, abstract = {

Gwinnett County Public Schools worked with SDP to create the SDP College-Going Diagnostic for its district. The diagnostic is designed to identify potential areas for action to increase students{\textquoteright} levels of academic achievement, preparedness for college, and postsecondary attainment. It is also intended to demonstrate how districts can capitalize on existing data to understand its current performance, set future goals, and strategically plan responses.

} } @report {585956, title = {SDP Human Capital Diagnostic for Gwinnett County Public Schools}, year = {2012}, month = {08/09}, institution = {Strategic Data Project}, abstract = {

Gwinnett County Public Schools worked with SDP to produce the SDP Human Capital Diagnostic for its district. The diagnostic is designed to identify patterns of teacher effectiveness and areas for policy change that could leverage teacher effectiveness to improve student achievement. It is also intended to demonstrate how districts can capitalize on existing data to understand its current performance, set future goals, and strategically plan responses.

} } @report {585966, title = {SDP Human Capital Diagnostic for Los Angeles Unified School District}, year = {2012}, month = {12/14}, institution = {Strategic Data Project}, abstract = {

Los Angeles Unified School District (LAUSD) partnered with SDP to produce the SDP Human Capital Diagnostic for its district. The diagnostic is designed to identify patterns of teacher effectiveness and areas for policy change that could leverage teacher effectiveness to improve student achievement. It is also intended to demonstrate how districts can capitalize on existing data to understand its current performance, set future goals, and strategically plan responses.

} } @case {576746, title = {Increasing College-Going Rates in Fulton County Schools: A Summer Intervention Based on the Strategic Use of Data}, year = {2012}, abstract = {

This case study, published by Harvard Education Press, describes how to use data to challenge assumptions, reveal student needs, address these needs programmatically, and evaluate results. It shows a team of data specialists and educators working together, across institutional and departmental boundaries, to determine why some high school seniors who intend to go to college after graduation do not enroll in the fall. Together, they develop, implement, and evaluate a summer counseling intervention program called Summer PACE to ensure that more students enroll seamlessly in college.

}, url = {http://www.hepg.org/hep/case/98/IncreasingCollegeGoingRatesInFultonCountySchools}, author = {Lynn Jenkins and Michelle Wisdom and Sarah Glover} } @article {576766, title = {Knowledge, Tests, and Fadeout in Educational Interventions}, year = {2012}, abstract = {

Educational interventions are often evaluated and compared on the basis of their impacts on test scores. Decades of research have produced two empirical regularities: interventions in later grades tend to have smaller effects than the same interventions in earlier grades, and the test score impacts of early educational interventions almost universally {\textquotedblleft}fade out{\textquotedblright} over time. This paper explores whether these empirical regularities are an artifact of the common practice of rescaling test scores in terms of a student{\textquoteright}s position in a widening distribution of knowledge. If a standard deviation in test scores in later grades translates into a larger difference in knowledge, an intervention{\textquoteright}s effect on normalized test scores may fall even as its effect on knowledge does not. We evaluate this hypothesis by fitting a model of education production to correlations in test scores across grades and with college-going using both administrative and survey data. Our results imply that the variance in knowledge does indeed rise as children progress through school, but not enough for test score normalization to fully explain these empirical regularities.

}, url = {http://www.nber.org/papers/w18038}, author = {Elizabeth U. Cascio and Douglas O. Staiger} } @article {576966, title = {Validating Arguments for Observational Instruments: Attending to Multiple Sources of Variation}, journal = {Educational Assessment}, volume = {17}, year = {2012}, pages = {1-19}, abstract = {

Measurement scholars have recently constructed validity arguments in support of a variety of educational assessments, including classroom observation instruments. In this article, we note that users must examine the robustness of validity arguments to variation in the implementation of these instruments. We illustrate how such an analysis might be used to assess a validity argument constructed for the Mathematical Quality of Instruction instrument, focusing in particular on the effects of varying the rater pool, subject matter content, observation procedure, and district context. Variation in the subject matter content of lessons did not affect rater agreement with master scores, but the evaluation of other portions of the validity argument varied according to the composition of the rater pool, observation procedure, and district context. These results demonstrate the need for conducting such analyses, especially for classroom observation instruments that are subject to multiple sources of variation

}, author = {Heather C. Hill and Charalambos Y. Charalambous and David Blazar and Daniel McGinn and Matthew A. Kraft and Mary Beisiegel and Andrea Humez and Erica Litke and Katherine Lynch} } @report {557666, title = {Are Practice-Based Teacher Evaluations and Teacher Effectiveness Linked in TNTP{\textquoteright}s "Performance Assessment System (PAS)"?}, year = {2012}, month = {06/19}, institution = {Center for Education Policy Research at Harvard University}, abstract = {

The CEPR report, {\textquotedblleft}Are Practice-Based Teacher Evaluations and Teacher Effectiveness Linked in TNTP{\textquoteright}s Performance Assessment System (PAS)?{\textquotedblright} examines the evaluation system for first-year Louisiana teachers trained by TNTP, a national nonprofit organization focused on improving teacher performance.\  The authors conclude that there is a modest positive relationship between teachers{\textquoteright} PAS scores and actual student achievement growth in math and reading.\  The analysis also suggests that, with some technical improvements, the PAS could become an even better predictor of student academic outcomes.

}, author = {John H. Tyler and Brian A. Jacob and Shaun M. Dougherty and Havala J. Hanson and Jon B. Fullerton and Corinne M. Herlihy} } @report {587501, title = {SDP College-Going Diagnostic for Fulton County Schools}, year = {2011}, month = {05/15}, institution = {Strategic Data Project}, abstract = {

Fulton County Schools (FCS) partnered with SDP to produce the SDP College-Going and Human Capital Diagnostic for its district. The diagnostics are meant to demonstrate how districts can capitalize on existing data to understand its current performance, set future goals, and strategically plan responses. \ The College-Going Diagnostic report illuminates students{\textquoteright} enrollment over time and compares these patterns across a variety of student characteristics and academic experiences. \ The Human Capital Diagnostic report investigates teacher effectiveness with the intention of informing district leaders about patterns of teacher effectiveness and identifying areas for policy change that could leverage teacher effectiveness to improve student achievement.

} } @report {587506, title = {SDP Human Capital Diagnostic for Fulton County Schools}, year = {2011}, month = {12/15}, institution = {Strategic Data Project}, abstract = {

Fulton County Schools (FCS) partnered with SDP to produce the SDP College-Going and Human Capital Diagnostic for its district. The diagnostics are meant to demonstrate how districts can capitalize on existing data to understand its current performance, set future goals, and strategically plan responses. \ The College-Going Diagnostic report illuminates students{\textquoteright} enrollment over time and compares these patterns across a variety of student characteristics and academic experiences. \ The Human Capital Diagnostic report investigates teacher effectiveness with the intention of informing district leaders about patterns of teacher effectiveness and identifying areas for policy change that could leverage teacher effectiveness to improve student achievement.

} } @report {585891, title = {SDP College-Going Diagnostic for Fort Worth Independent School District}, year = {2011}, month = {12/1}, institution = {Strategic Data Project}, abstract = {

Fort Worth Independent School District (FWISD) collaborated with SDP to create the SDP College-Going Diagnostic to examine the district{\textquoteright}s college-going enrollment and persistence rates. \ The diagnostic is designed to identify potential areas for action to increase students{\textquoteright} levels of academic achievement, preparedness for college, and postsecondary attainment.

} } @article {576761, title = {Can You Recognize an Effective Teacher When You Recruit One?}, journal = {Association for Education Finance and Policy}, volume = {6}, year = {2011}, pages = {43-74}, abstract = {

The authors administered an in-depth survey to new math teachers in New York City and collected information on a number of non-traditional predictors of effectiveness: teaching specific content knowledge, cognitive ability, personality traits, feelings of self-efficacy, and scores on a commercially available teacher selection instrument. They find that a number of these predictors have statistically and economically significant relationships with student and teacher outcomes. The authors conclude that, while there may be no single factor that can predict success in teaching, using a broad set of measures can help schools improve the quality of their teachers.

}, url = {http://www.mitpressjournals.org.ezp-prod1.hul.harvard.edu/doi/abs/10.1162/EDFP_a_00022$\#$.VTfrStzF9yw}, author = {T.J. Kane and B. Jacob and J. Rockoff and D.O. Staiger} } @article {576711, title = {Does Practice-Based Teacher Preparation Increase Student Achievement? Early Evidence from the Boston Teacher Residency}, year = {2011}, month = {12/15}, abstract = {

Center researchers John Papay, Martin West, Jon Fullerton, and Thomas Kane investigate the effectiveness of the Boston Teacher Residency (BTR) in their working paperDoes Practice-Based Teacher Preparation Increase Student Achievement? Early Evidence from the Boston Teacher Residency.\  BTR is an innovative practice-based preparation program in which candidates work alongside a mentor teacher for a year before becoming a teacher of record in Boston Public Schools.

}, author = {John Papay and Martin West and Jon Fullerton and Thomas Kane} } @article {576886, title = {The Effect of Evaluation on Performance: Evidence from Longitudinal Student Achievement Data of Mid-career Teachers}, year = {2011}, month = {03/01}, abstract = {

The effect of evaluation on employee performance is traditionally studied in the context of the principal-agent problem. Evaluation can, however, also be characterized as an investment in the evaluated employee{\textquoteright}s human capital. We study a sample of mid-career public school teachers where we can consider these two types of evaluation effect separately. Employee evaluation is a particularly salient topic in public schools where teacher effectiveness varies substantially and where teacher evaluation itself is increasingly a focus of public policy proposals. We find evidence that a quality classroom-observation-based evaluation and performance measures can improve mid-career teacher performance both during the period of evaluation, consistent with the traditional predictions; and in subsequent years, consistent with human capital investment. However the estimated improvements during evaluation are less precise. Additionally, the effects sizes represent a substantial gain in welfare given the program{\textquoteright}s costs.

}, url = {http://www.nber.org/papers/w16877}, author = {Eric S. Taylor and John H. Tyler} } @article {576736, title = {Identifying Effective Classroom Practices Using Student Achievement Data}, journal = {The Journal of Human Resources}, volume = {46}, year = {2011}, pages = {587-613}, abstract = {

This paper combines information from classroom-based observations and measures of teachers{\textquoteright} ability to improve student achievement as a step toward addressing the challenge of identifying effective teachers and teaching practices. The authors find that classroom-based measures of teaching effectiveness are related in substantial ways to student achievement growth. The authors conclude that the results point to the promise of teacher evaluation systems that would use information from both classroom observations and student test scores to identify effective teachers. Information on the types of practices that are most effective at raising achievement is also highlighted.

}, author = {Thomas J. Kane and Eric Taylor and John Tyler and Amy Wooten} } @report {576791, title = {Prioritizing Teaching Quality in a New System of Teacher Evaluation}, year = {2011}, abstract = {

Teachers are the most important school-level factor in student success--but as any parent knows, all teachers are not created equal. Reforms to the current quite cursory teacher evaluation system, if done well, have the potential to remove the worst-performing teachers and, even more important, to assist the majority in improving their craft. However, the US educational system often cannibalizes its own innovations, destroying their potential with a steady drip of rules, regulations, bureaucracy, and accommodations to the status quo. Because that status quo sets an unacceptably low bar for teaching quality, missing this opportunity now means new generations of students may suffer mediocre{\textemdash}or worse{\textemdash}classrooms.

}, author = {Heather Hill and Corinne Herlihy} } @report {576866, title = {Student Achievement in Massachusetts{\textquoteright} Charter Schools}, year = {2011}, month = {02/04}, abstract = {

Researchers from the Harvard Graduate School of Education, MIT, and the University of Michigan have released the results of a new study that suggests that urban charter schools in Massachusetts have large positive effects on student achievement at both the middle and high school levels. Results for nonurban charter schools were less clear; some analyses indicated positive effects on student achievement at the high school level, while results for middle school students were much less encouraging.

View the Press Release

View the PowerPoint Presentation

}, author = {Joshua D. Angrist and Sarah R. Cohodes and Susan M. Dynarski and Jon B. Fullerton and Thomas J. Kane and Parag A. Pathak and Christopher R. Walters} } @report {576756, title = {Informing the Debate: Comparing Boston{\textquoteright}s Charter, Pilot, and Traditional Schools}, year = {2009}, abstract = {

Whether using the randomized lotteries or statistical controls for measured background characteristics, we generally find large positive effects for Charter Schools, at both the middle school and high school levels. For each year of attendance in middle school, we estimate that Charter Schools raise student achievement .09 to .17 standard deviations in English Language Arts and .18 to .54 standard deviations in math relative to those attending traditional schools in the Boston Public Schools. The estimated impact on math achievement for Charter middle schools is extraordinarily large. Increasing performance by .5 standard deviations is the same as moving from the 50th to the 69th percentile in student performance. This is roughly half the size of the blackwhite achievement gap. In high school, the estimated gains are somewhat smaller than in middle school: .16 to .19 standard deviations in English Language Arts; .16 to .19 in mathematics; .2 to .28 in writing topic development; and .13 to .17 in writing composition with the lottery-based results. The estimated impacts of middle schools and high school Charters are similar in both the {\textquotedblleft}observational{\textquotedblright} and {\textquotedblleft}lottery-based{\textquotedblright} results.

}, author = {Atila Abdulkadiroglu and Josh Angrist and Sarah Cohodes and Susan Dynarski and Jon Fullerton and Thomas Kane and Parag Pathak} } @report {576726, title = {Estimating Teacher Impacts on Student Achievement: An Experimental Evaluation}, year = {2008}, abstract = {

The authors used a random-assignment experiment in Los Angeles Unified School District to evaluate various non-experimental methods for estimating teacher effects on student test scores. Having estimated teacher effects during a pre-experimental period, they used these estimates to predict student achievement following random assignment of teachers to classrooms. While all of the teacher effect estimates considered were significant predictors of student achievement under random assignment, those that controlled for prior student test scores yielded unbiased predictions and those that further controlled for mean classroom characteristics yielded the best prediction accuracy. In both the experimental and non-experimental data, the authors found that teacher effects faded out by roughly 50 percent per year in the two years following teacher assignment.

}, author = {Thomas J. Kane and Douglas O. Staiger} } @report {576776, title = {National Board Certification and Teacher Effectiveness: Evidence from a Random Assignment Experiment}, year = {2008}, abstract = {

The National Board for Professional Teaching Standards (NBPTS) assesses teaching practice based on videos and essays submitted by teachers. For this study, the authors compared the performance of classrooms of elementary students in Los Angeles randomly assigned to NBPTS applicants and to comparison teachers. The authors conclude that students assigned to highly-rated applicants outperformed those in the comparison classrooms by more than those assigned to poorly-rated teachers. Moreover, the estimates with and without random assignment were similar.

}, author = {Steven Cantrell and Jon Fullerton and Thomas J. Kane and Douglas O. Staiger} }